Skip to content

Commit

Permalink
Support attach and detach exporters from service
Browse files Browse the repository at this point in the history
  • Loading branch information
alevy713 committed Apr 22, 2024
1 parent ad79304 commit 64da386
Show file tree
Hide file tree
Showing 27 changed files with 845 additions and 61 deletions.
24 changes: 24 additions & 0 deletions docs/data-sources/exporter.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "timescale_exporter Data Source - terraform-provider-timescale"
subcategory: ""
description: |-
Exporter data source
---

# timescale_exporter (Data Source)

Exporter data source



<!-- schema generated by tfplugindocs -->
## Schema

### Required

- `name` (String) The name of this exporter. Exporter names must be unique in order to manage them using Terraform.

### Read-Only

- `id` (String) exporter id is the unique identifier for an exporter
22 changes: 22 additions & 0 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,27 @@ import {
As of v1.9.2, the aws_vpc_peering_connection import id must be manually added. This
value, always starting with `pcx-...` will be available in `timescale_peering_connection.pc.provisioned_id` after a terraform refresh.

### Exporters

Since v1.10.0 it is possible to attach and detach exporters using Terraform.
Below is an example configuration:

```
data "timescale_exporter" "metric_exporter" {
name = "metric_exporter_name"
}
data "timescale_exporter" "log_exporter" {
name = "log_exporter_name"
}
resource "timescale_service" "service" {
metric_exporter_id = timescale_exporter.metric_exporter.id
log_exporter_id = timescale_exporter.log_exporter.id
}
```

## Supported Service Configurations
### Compute
- 500m CPU / 2 GB Memory
Expand All @@ -119,6 +140,7 @@ See more info in our [blogpost](https://www.timescale.com/blog/savings-unlocked-
✅ Enable High Availability replicas <br />
✅ Enable read replicas <br />
✅ VPC peering <br />
✅ Attach and detach exporters <br />

## Billing
Services are currently billed for hourly usage. If a service is running for less than an hour,
Expand Down
3 changes: 3 additions & 0 deletions docs/resources/service.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,10 @@ resource "timescale_service" "read_replica" {

- `connection_pooler_enabled` (Boolean) Set connection pooler status for this service.
- `enable_ha_replica` (Boolean) Enable HA Replica
- `log_exporter_id` (String) The Log Exporter ID attached to this service.
WARNING: To complete the logs exporter attachment, a service restart is required.
- `memory_gb` (Number) Memory GB
- `metric_exporter_id` (String) The Exporter ID attached to this service.
- `milli_cpu` (Number) Milli CPU
- `name` (String) Service Name is the configurable name assigned to this resource. If none is provided, a default will be generated by the provider.
- `paused` (Boolean) Paused status of the service.
Expand Down
34 changes: 34 additions & 0 deletions examples/data-sources/timescale_exporter/exporter.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
terraform {
required_providers {
timescale = {
source = "registry.terraform.io/providers/timescale"
version = "~> 1.0"
}
}
}

variable "ts_access_key" {
type = string
}

variable "ts_secret_key" {
type = string
}

variable "ts_project_id" {
type = string
}

provider "timescale" {
access_key = var.ts_access_key
secret_key = var.ts_secret_key
project_id = var.ts_project_id
}

data "timescale_exporter" "exporter" {
name = "exporter_name"
}

output "products_list" {
value = data.timescale_exporter.exporter.id
}
3 changes: 2 additions & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ require (
github.com/hashicorp/terraform-plugin-log v0.9.0
github.com/hashicorp/terraform-plugin-sdk/v2 v2.33.0
github.com/hashicorp/terraform-plugin-testing v1.7.0
github.com/samber/lo v1.39.0
github.com/stretchr/testify v1.9.0
github.com/vektah/gqlparser/v2 v2.5.11
)

require (
Expand Down Expand Up @@ -64,7 +66,6 @@ require (
github.com/russross/blackfriday v1.6.0 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/vektah/gqlparser/v2 v2.5.11 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
Expand Down
53 changes: 8 additions & 45 deletions go.sum

Large diffs are not rendered by default.

26 changes: 18 additions & 8 deletions internal/client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,24 @@ var (
OpenPeerRequestMutation string
//go:embed queries/delete_peer_request.graphql
DeletePeeringConnectionMutation string

// Exporters
//go:embed queries/attach_metric_exporter.graphql
AttachMetricExporterMutation string
//go:embed queries/attach_generic_exporter.graphql
AttachGenericExporterMutation string
//go:embed queries/detach_metric_exporter.graphql
DetachMetricExporterMutation string
//go:embed queries/detach_generic_metric_exporter.graphql
DetachGenericMetricExporterMutation string
//go:embed queries/get_all_metric_exporters.graphql
GetAllMetricExporters string
//go:embed queries/get_all_generic_exporters.graphql
GetAllGenericMetricExporters string
)

var (
errNotFound = errors.New("resource not found")
)

type Client struct {
Expand All @@ -76,10 +94,6 @@ type Response[T any] struct {
Errors []*Error `json:"errors"`
}

type Error struct {
Message string `json:"message"`
}

func NewClient(token, projectID, env, terraformVersion string) *Client {
c := &http.Client{
Timeout: 30 * time.Second,
Expand Down Expand Up @@ -132,10 +146,6 @@ func JWTFromCC(c *Client, accessKey, secretKey string) error {
return nil
}

func (e *Error) Error() string {
return e.Message
}

func (c *Client) do(ctx context.Context, req map[string]interface{}, resp interface{}) error {
tflog.Trace(ctx, "Client.do")
jsonValue, err := json.Marshal(req)
Expand Down
179 changes: 179 additions & 0 deletions internal/client/exporter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
package client

import (
"context"
"encoding/json"
"errors"
"fmt"
"strconv"
"time"

"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/samber/lo"
)

type Exporter struct {
ID string `json:"id"`
ProjectID string `json:"projectId"`
Created time.Time `json:"created"`
Name string `json:"name"`
Type string `json:"type"`
RegionCode string `json:"regionCode"`
Config json.RawMessage `json:"config"`
}

type GetAllMetricExportersResponse struct {
Exporters []*Exporter `json:"getAllMetricExporters"`
}

type GetAllGenericExporterResponse struct {
Exporters []*Exporter `json:"getAllGenericExporters"`
}

type GetExporterByNameRequest struct {
Name string
}

type AttachExporterRequest struct {
ServiceID string
ExporterID string
}

type DetachExporterRequest struct {
ServiceID string
ExporterID string
}

func (c *Client) getAllMetricExporters(ctx context.Context) ([]*Exporter, error) {
tflog.Trace(ctx, "MetricExporter.GetAll")
req := graphQLRequest{
operationName: "GetAllMetricExporters",
query: GetAllMetricExporters,
variables: map[string]interface{}{
"projectId": c.projectID,
},
}
var resp Response[GetAllMetricExportersResponse]
err := c.do(ctx, req.build(), &resp)
if err = coalesceErrors(resp, err); err != nil {
return nil, err
}
return resp.Data.Exporters, nil
}

func (c *Client) getAllLogExporters(ctx context.Context) ([]*Exporter, error) {
tflog.Trace(ctx, "MetricExporter.GetAllLogExporters")
req := graphQLRequest{
operationName: "GetAllGenericExporters",
query: GetAllGenericMetricExporters,
variables: map[string]interface{}{
"projectId": c.projectID,
},
}
var resp Response[GetAllGenericExporterResponse]
err := c.do(ctx, req.build(), &resp)
if err = coalesceErrors(resp, err); err != nil {
return nil, err
}
return resp.Data.Exporters, nil
}

func (c *Client) getAllExporters(ctx context.Context) ([]*Exporter, error) {
tflog.Trace(ctx, "Client.getAllExporters")
metricExporters, err := c.getAllMetricExporters(ctx)
if err != nil {
return nil, err
}
logExporters, err := c.getAllLogExporters(ctx)
if err != nil {
return nil, err
}
tflog.Info(ctx, "found "+strconv.Itoa(len(logExporters))+" log exporters\n\n\n\n\n")
for _, exporter := range logExporters {
tflog.Info(ctx, "LOG EXPORTER: "+exporter.Name)
}
return append(metricExporters, logExporters...), nil
}

func (c *Client) GetExporterByName(ctx context.Context, request *GetExporterByNameRequest) (*Exporter, error) {
tflog.Trace(ctx, "Client.GetExporterByName")
exporters, err := c.getAllExporters(ctx)
if err != nil {
return nil, err
}
e := lo.Filter(exporters, func(e *Exporter, _ int) bool {
return e.Name == request.Name
})
if len(e) == 0 {
return nil, errNotFound
}
if len(e) > 1 {
return nil, errors.New("exporter names must be unique for importing")
}
return e[0], nil
}

func (c *Client) AttachMetricExporter(ctx context.Context, request *AttachExporterRequest) error {
tflog.Trace(ctx, "Client.AttachMetricExporter")
req := &graphQLRequest{
operationName: "AttachServiceToMetricExporter",
query: AttachMetricExporterMutation,
variables: map[string]interface{}{
"projectId": c.projectID,
"serviceId": request.ServiceID,
"exporterId": request.ExporterID,
},
}
var resp Response[any]
err := c.do(ctx, req.build(), &resp)
return coalesceErrors(resp, err)
}

func (c *Client) AttachLogExporter(ctx context.Context, request *AttachExporterRequest) error {
tflog.Trace(ctx, "Client.AttachLogExporter")
req := &graphQLRequest{
operationName: "AttachServiceToGenericExporter",
query: AttachGenericExporterMutation,
variables: map[string]interface{}{
"projectId": c.projectID,
"serviceId": request.ServiceID,
"exporterId": request.ExporterID,
},
}
var resp Response[any]
err := c.do(ctx, req.build(), &resp)
tflog.Info(ctx, "RESP "+fmt.Sprintf("%+v", resp))
return coalesceErrors(resp, err)
}

func (c *Client) DetachLogExporter(ctx context.Context, request *DetachExporterRequest) error {
tflog.Trace(ctx, "Client.DetachLogExporter")
req := &graphQLRequest{
operationName: "DetachServiceFromGenericExporter",
query: DetachGenericMetricExporterMutation,
variables: map[string]interface{}{
"projectId": c.projectID,
"serviceId": request.ServiceID,
"exporterId": request.ExporterID,
},
}
var resp Response[any]
err := c.do(ctx, req.build(), &resp)
return coalesceErrors(resp, err)
}

func (c *Client) DetachMetricExporter(ctx context.Context, request *DetachExporterRequest) error {
tflog.Trace(ctx, "Client.DetachMetricExporter")
req := &graphQLRequest{
operationName: "DetachServiceFromMetricExporter",
query: DetachMetricExporterMutation,
variables: map[string]interface{}{
"projectId": c.projectID,
"serviceId": request.ServiceID,
"exporterId": request.ExporterID,
},
}
var resp Response[any]
err := c.do(ctx, req.build(), &resp)
return coalesceErrors(resp, err)
}
46 changes: 46 additions & 0 deletions internal/client/graphql.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package client

import (
"errors"
"strings"
)

type graphQLRequest struct {
operationName string
query string
variables map[string]interface{}
}

func (g *graphQLRequest) build() map[string]interface{} {
return map[string]interface{}{
"operationName": g.operationName,
"query": g.query,
"variables": g.variables,
}
}

type Error struct {
Message string `json:"message"`
Path []string `json:"path"`
}

func (e *Error) Error() string {
return e.Message + " " + strings.Join(e.Path, ".")
}

func coalesceErrors[T any](resp Response[T], err error) error {
if err != nil {
return err
}
if len(resp.Errors) > 0 {
errs := make([]error, len(resp.Errors))
for idx, e := range resp.Errors {
errs[idx] = e
}
return errors.Join(errs...)
}
if resp.Data == nil {
return errNotFound
}
return nil
}

0 comments on commit 64da386

Please sign in to comment.