From 8a8be59b916f5f60bc8bd9b48eb4611d40a54d1a Mon Sep 17 00:00:00 2001 From: Douglas Harcourt Parsons Date: Thu, 18 Apr 2024 17:25:06 +0100 Subject: [PATCH] Add support for Log Drains Closes #128 --- client/log_drain.go | 100 +++++ docs/data-sources/endpoint_verification.md | 29 ++ docs/data-sources/log_drain.md | 46 ++ docs/resources/log_drain.md | 87 ++++ docs/resources/webhook.md | 4 +- .../data-source.tf | 1 + .../vercel_log_drain/data-source.tf | 3 + examples/resources/vercel_log_drain/import.sh | 9 + .../resources/vercel_log_drain/resource.tf | 21 + vercel/data_source_endpoint_verification.go | 104 +++++ .../data_source_endpoint_verification_test.go | 31 ++ vercel/data_source_log_drain.go | 196 +++++++++ vercel/provider.go | 19 +- vercel/resource_log_drain.go | 398 ++++++++++++++++++ vercel/resource_log_drain_test.go | 131 ++++++ vercel/resource_webhook.go | 3 +- vercel/types_conversions.go | 7 + vercel/validator_float64_greater_than.go | 40 ++ vercel/validator_float64_less_than.go | 40 ++ vercel/validator_map_max_count.go | 42 ++ .../validator_string_length_greater_than.go | 42 ++ 21 files changed, 1342 insertions(+), 11 deletions(-) create mode 100644 client/log_drain.go create mode 100644 docs/data-sources/endpoint_verification.md create mode 100644 docs/data-sources/log_drain.md create mode 100644 docs/resources/log_drain.md create mode 100644 examples/data-sources/vercel_endpoint_verification/data-source.tf create mode 100644 examples/data-sources/vercel_log_drain/data-source.tf create mode 100644 examples/resources/vercel_log_drain/import.sh create mode 100644 examples/resources/vercel_log_drain/resource.tf create mode 100644 vercel/data_source_endpoint_verification.go create mode 100644 vercel/data_source_endpoint_verification_test.go create mode 100644 vercel/data_source_log_drain.go create mode 100644 vercel/resource_log_drain.go create mode 100644 vercel/resource_log_drain_test.go create mode 100644 vercel/validator_float64_greater_than.go create mode 100644 vercel/validator_float64_less_than.go create mode 100644 vercel/validator_map_max_count.go create mode 100644 vercel/validator_string_length_greater_than.go diff --git a/client/log_drain.go b/client/log_drain.go new file mode 100644 index 00000000..fe5be299 --- /dev/null +++ b/client/log_drain.go @@ -0,0 +1,100 @@ +package client + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +type LogDrain struct { + ID string `json:"id"` + TeamID string `json:"ownerId"` + DeliveryFormat string `json:"deliveryFormat"` + Environments []string `json:"environments"` + Headers map[string]string `json:"headers"` + ProjectIDs []string `json:"projectIds"` + SamplingRate *float64 `json:"samplingRate"` + Secret string `json:"secret"` + Sources []string `json:"sources"` + Endpoint string `json:"url"` +} + +type CreateLogDrainRequest struct { + TeamID string `json:"-"` + DeliveryFormat string `json:"deliveryFormat"` + Environments []string `json:"environments"` + Headers map[string]string `json:"headers,omitempty"` + ProjectIDs []string `json:"projectIds,omitempty"` + SamplingRate float64 `json:"samplingRate,omitempty"` + Secret string `json:"secret,omitempty"` + Sources []string `json:"sources"` + Endpoint string `json:"url"` +} + +func (c *Client) CreateLogDrain(ctx context.Context, request CreateLogDrainRequest) (l LogDrain, err error) { + url := fmt.Sprintf("%s/v1/log-drains", c.baseURL) + if c.teamID(request.TeamID) != "" { + url = fmt.Sprintf("%s?teamId=%s", url, c.teamID(request.TeamID)) + } + payload := string(mustMarshal(request)) + tflog.Info(ctx, "creating log drain", map[string]interface{}{ + "url": url, + "payload": payload, + }) + err = c.doRequest(clientRequest{ + ctx: ctx, + method: "POST", + url: url, + body: payload, + }, &l) + return l, err +} + +func (c *Client) DeleteLogDrain(ctx context.Context, id, teamID string) error { + url := fmt.Sprintf("%s/v1/log-drains/%s", c.baseURL, id) + if c.teamID(teamID) != "" { + url = fmt.Sprintf("%s?teamId=%s", url, c.teamID(teamID)) + } + tflog.Info(ctx, "deleting log drain", map[string]interface{}{ + "url": url, + }) + return c.doRequest(clientRequest{ + ctx: ctx, + method: "DELETE", + url: url, + }, nil) +} + +func (c *Client) GetLogDrain(ctx context.Context, id, teamID string) (l LogDrain, err error) { + url := fmt.Sprintf("%s/v1/log-drains/%s", c.baseURL, id) + if c.teamID(teamID) != "" { + url = fmt.Sprintf("%s?teamId=%s", url, c.teamID(teamID)) + } + tflog.Info(ctx, "reading log drain", map[string]interface{}{ + "url": url, + }) + err = c.doRequest(clientRequest{ + ctx: ctx, + method: "GET", + url: url, + }, &l) + return l, err +} + +func (c *Client) GetEndpointVerificationCode(ctx context.Context, teamID string) (code string, err error) { + url := fmt.Sprintf("%s/v1/verify-endpoint", c.baseURL) + if c.teamID(teamID) != "" { + url = fmt.Sprintf("%s?teamId=%s", url, c.teamID(teamID)) + } + + var l struct { + Code string `json:"verificationCode"` + } + err = c.doRequest(clientRequest{ + ctx: ctx, + method: "GET", + url: url, + }, &l) + return l.Code, err +} diff --git a/docs/data-sources/endpoint_verification.md b/docs/data-sources/endpoint_verification.md new file mode 100644 index 00000000..e44db279 --- /dev/null +++ b/docs/data-sources/endpoint_verification.md @@ -0,0 +1,29 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "vercel_endpoint_verification Data Source - terraform-provider-vercel" +subcategory: "" +description: |- + Provides a verification code that can be used to prove ownership over an API. +--- + +# vercel_endpoint_verification (Data Source) + +Provides a verification code that can be used to prove ownership over an API. + +## Example Usage + +```terraform +data "vercel_endpoint_verification" "example" {} +``` + + +## Schema + +### Optional + +- `team_id` (String) The ID of the team the Edge Config should exist under. Required when configuring a team resource if a default team has not been set in the provider. + +### Read-Only + +- `id` (String) The ID of this resource. +- `verification_code` (String) A verification code that should be set in the `x-vercel-verify` response header for your API. This is used to verify that the endpoint belongs to you. diff --git a/docs/data-sources/log_drain.md b/docs/data-sources/log_drain.md new file mode 100644 index 00000000..bb2a9c19 --- /dev/null +++ b/docs/data-sources/log_drain.md @@ -0,0 +1,46 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "vercel_log_drain Data Source - terraform-provider-vercel" +subcategory: "" +description: |- + Provides information about an existing Log Drain. + Log Drains collect all of your logs using a service specializing in storing app logs. + Teams on Pro and Enterprise plans can subscribe to log drains that are generic and configurable from the Vercel dashboard without creating an integration. This allows you to use a HTTP service to receive logs through Vercel's log drains. +--- + +# vercel_log_drain (Data Source) + +Provides information about an existing Log Drain. + +Log Drains collect all of your logs using a service specializing in storing app logs. + +Teams on Pro and Enterprise plans can subscribe to log drains that are generic and configurable from the Vercel dashboard without creating an integration. This allows you to use a HTTP service to receive logs through Vercel's log drains. + +## Example Usage + +```terraform +data "vercel_log_drain" "example" { + id = "lg_xxxxxxx_xxxxxx_xxxxx" +} +``` + + +## Schema + +### Required + +- `endpoint` (String) Logs will be sent as POST requests to this URL. The endpoint will be verified, and must return a `200` status code and an `x-vercel-verify` header taken from the endpoint_verification data source. The value the `x-vercel-verify` header should be can be read from the `vercel_endpoint_verification_code` data source. +- `id` (String) The ID of the Log Drain. + +### Optional + +- `team_id` (String) The ID of the team the Log Drain should exist under. Required when configuring a team resource if a default team has not been set in the provider. + +### Read-Only + +- `delivery_format` (String) The format log data should be delivered in. Can be `json` or `ndjson`. +- `environments` (Set of String) Logs from the selected environments will be forwarded to your webhook. At least one must be present. +- `headers` (Map of String) Custom headers to include in requests to the log drain endpoint. +- `project_ids` (Set of String) A list of project IDs that the log drain should be associated with. Logs from these projects will be sent log events to the specified endpoint. If omitted, logs will be sent for all projects. +- `sampling_rate` (Number) A ratio of logs matching the sampling rate will be sent to your log drain. Should be a value between 0 and 1. If unspecified, all logs are sent. +- `sources` (Set of String) A set of sources that the log drain should send logs for. Valid values are `static`, `edge`, `external`, `build` and `function`. diff --git a/docs/resources/log_drain.md b/docs/resources/log_drain.md new file mode 100644 index 00000000..410feb97 --- /dev/null +++ b/docs/resources/log_drain.md @@ -0,0 +1,87 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "vercel_log_drain Resource - terraform-provider-vercel" +subcategory: "" +description: |- + Provides a Configurable Log Drain resource. + ~> For Log Drain integrations, please see the Integration Log Drain docs https://vercel.com/docs/observability/log-drains#log-drains-integration. + Log Drains collect all of your logs using a service specializing in storing app logs. + Teams on Pro and Enterprise plans can subscribe to log drains that are generic and configurable from the Vercel dashboard without creating an integration. This allows you to use a HTTP service to receive logs through Vercel's log drains. + ~> Only Pro and Enterprise teams can create Configurable Log Drains. +--- + +# vercel_log_drain (Resource) + +Provides a Configurable Log Drain resource. + +~> For Log Drain integrations, please see the [Integration Log Drain docs](https://vercel.com/docs/observability/log-drains#log-drains-integration). + +Log Drains collect all of your logs using a service specializing in storing app logs. + +Teams on Pro and Enterprise plans can subscribe to log drains that are generic and configurable from the Vercel dashboard without creating an integration. This allows you to use a HTTP service to receive logs through Vercel's log drains. + +~> Only Pro and Enterprise teams can create Configurable Log Drains. + +## Example Usage + +```terraform +// Use the vercel_endpoint_verification data source to work out the verification code needed to +// verify the log drain endpoint. +data "vercel_endpoint_verification" "example" { +} + +resource "vercel_log_drain" "example" { + delivery_format = "json" + environments = ["production"] + headers = { + some-key = "some-value" + } + project_ids = [vercel_project.example.id] + sampling_rate = 0.8 + secret = "a_very_long_and_very_well_specified_secret" + sources = ["static"] + endpoint = "https://example.com/my-log-drain-endpoint" +} + +resource "vercel_project" "example" { + name = "example" +} +``` + + +## Schema + +### Required + +- `delivery_format` (String) The format log data should be delivered in. Can be `json` or `ndjson`. +- `endpoint` (String) Logs will be sent as POST requests to this URL. The endpoint will be verified, and must return a `200` status code and an `x-vercel-verify` header taken from the endpoint_verification data source. The value the `x-vercel-verify` header should be can be read from the `vercel_endpoint_verification_code` data source. +- `environments` (Set of String) Logs from the selected environments will be forwarded to your webhook. At least one must be present. +- `sources` (Set of String) A set of sources that the log drain should send logs for. Valid values are `static`, `edge`, `external`, `build` and `lambda`. + +### Optional + +- `headers` (Map of String) Custom headers to include in requests to the log drain endpoint. +- `project_ids` (Set of String) A list of project IDs that the log drain should be associated with. Logs from these projects will be sent log events to the specified endpoint. If omitted, logs will be sent for all projects. +- `sampling_rate` (Number) A ratio of logs matching the sampling rate will be sent to your log drain. Should be a value between 0 and 1. If unspecified, all logs are sent. +- `secret` (String, Sensitive) A custom secret to be used for signing log events. You can use this secret to verify that log events are coming from Vercel and are not tampered with. See https://vercel.com/docs/observability/log-drains/log-drains-reference#secure-log-drains for full info. +- `team_id` (String) The ID of the team the Log Drain should exist under. Required when configuring a team resource if a default team has not been set in the provider. + +### Read-Only + +- `id` (String) The ID of the Log Drain. + +## Import + +Import is supported using the following syntax: + +```shell +# If importing into a personal account, or with a team configured on +# the provider, simply use the log_drain_id. +# - log_drain_id can be found by querying the Vercel REST API (https://vercel.com/docs/rest-api/endpoints/logDrains#retrieves-a-list-of-all-the-log-drains). +terraform import vercel_log_drain.example ecfg_xxxxxxxxxxxxxxxxxxxxxxxxxxxx/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + +# Alternatively, you can import via the team_id and edge_config_id. +# - team_id can be found in the team `settings` tab in the Vercel UI. +# - log_drain_id can be found by querying the Vercel REST API (https://vercel.com/docs/rest-api/endpoints/logDrains#retrieves-a-list-of-all-the-log-drains). +terraform import vercel_log_drain.example team_xxxxxxxxxxxxxxxxxxxxxxxx/ecfg_xxxxxxxxxxxxxxxxxxxxxxxxxxxx/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` diff --git a/docs/resources/webhook.md b/docs/resources/webhook.md index 21d5f513..6d9b7a36 100644 --- a/docs/resources/webhook.md +++ b/docs/resources/webhook.md @@ -44,7 +44,7 @@ resource "vercel_webhook" "without_project_ids" { ### Required -- `endpoint` (String) Webhooks events will be sent as POST request to this URL. +- `endpoint` (String) Webhooks events will be sent as POST requests to this URL. - `events` (Set of String) A list of the events the webhook will listen to. At least one must be present. ### Optional @@ -55,4 +55,4 @@ resource "vercel_webhook" "without_project_ids" { ### Read-Only - `id` (String) The ID of the Webhook. -- `secret` (String) A secret value which will be provided in the `x-vercel-signature` header and can be used to verify the authenticity of the webhook. See https://vercel.com/docs/observability/webhooks-overview/webhooks-api#securing-webhooks for further details. +- `secret` (String, Sensitive) A secret value which will be provided in the `x-vercel-signature` header and can be used to verify the authenticity of the webhook. See https://vercel.com/docs/observability/webhooks-overview/webhooks-api#securing-webhooks for further details. diff --git a/examples/data-sources/vercel_endpoint_verification/data-source.tf b/examples/data-sources/vercel_endpoint_verification/data-source.tf new file mode 100644 index 00000000..ed923201 --- /dev/null +++ b/examples/data-sources/vercel_endpoint_verification/data-source.tf @@ -0,0 +1 @@ +data "vercel_endpoint_verification" "example" {} diff --git a/examples/data-sources/vercel_log_drain/data-source.tf b/examples/data-sources/vercel_log_drain/data-source.tf new file mode 100644 index 00000000..f04e7320 --- /dev/null +++ b/examples/data-sources/vercel_log_drain/data-source.tf @@ -0,0 +1,3 @@ +data "vercel_log_drain" "example" { + id = "lg_xxxxxxx_xxxxxx_xxxxx" +} diff --git a/examples/resources/vercel_log_drain/import.sh b/examples/resources/vercel_log_drain/import.sh new file mode 100644 index 00000000..bca3b2fc --- /dev/null +++ b/examples/resources/vercel_log_drain/import.sh @@ -0,0 +1,9 @@ +# If importing into a personal account, or with a team configured on +# the provider, simply use the log_drain_id. +# - log_drain_id can be found by querying the Vercel REST API (https://vercel.com/docs/rest-api/endpoints/logDrains#retrieves-a-list-of-all-the-log-drains). +terraform import vercel_log_drain.example ecfg_xxxxxxxxxxxxxxxxxxxxxxxxxxxx/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + +# Alternatively, you can import via the team_id and edge_config_id. +# - team_id can be found in the team `settings` tab in the Vercel UI. +# - log_drain_id can be found by querying the Vercel REST API (https://vercel.com/docs/rest-api/endpoints/logDrains#retrieves-a-list-of-all-the-log-drains). +terraform import vercel_log_drain.example team_xxxxxxxxxxxxxxxxxxxxxxxx/ecfg_xxxxxxxxxxxxxxxxxxxxxxxxxxxx/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx diff --git a/examples/resources/vercel_log_drain/resource.tf b/examples/resources/vercel_log_drain/resource.tf new file mode 100644 index 00000000..61243e6e --- /dev/null +++ b/examples/resources/vercel_log_drain/resource.tf @@ -0,0 +1,21 @@ +// Use the vercel_endpoint_verification data source to work out the verification code needed to +// verify the log drain endpoint. +data "vercel_endpoint_verification" "example" { +} + +resource "vercel_log_drain" "example" { + delivery_format = "json" + environments = ["production"] + headers = { + some-key = "some-value" + } + project_ids = [vercel_project.example.id] + sampling_rate = 0.8 + secret = "a_very_long_and_very_well_specified_secret" + sources = ["static"] + endpoint = "https://example.com/my-log-drain-endpoint" +} + +resource "vercel_project" "example" { + name = "example" +} diff --git a/vercel/data_source_endpoint_verification.go b/vercel/data_source_endpoint_verification.go new file mode 100644 index 00000000..65dbc7f8 --- /dev/null +++ b/vercel/data_source_endpoint_verification.go @@ -0,0 +1,104 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/vercel/terraform-provider-vercel/client" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &endpointVerificationDataSource{} +) + +func newEndpointVerificationDataSource() datasource.DataSource { + return &endpointVerificationDataSource{} +} + +type endpointVerificationDataSource struct { + client *client.Client +} + +func (d *endpointVerificationDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_endpoint_verification" +} + +func (d *endpointVerificationDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*client.Client) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = client +} + +// Schema returns the schema information for a file data source +func (d *endpointVerificationDataSource) Schema(_ context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Provides a verification code that can be used to prove ownership over an API.", + Attributes: map[string]schema.Attribute{ + "verification_code": schema.StringAttribute{ + Description: "A verification code that should be set in the `x-vercel-verify` response header for your API. This is used to verify that the endpoint belongs to you.", + Computed: true, + }, + "id": schema.StringAttribute{ + Computed: true, + }, + "team_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the team the Edge Config should exist under. Required when configuring a team resource if a default team has not been set in the provider.", + }, + }, + } +} + +// FileData represents the information terraform knows about a File data source +type EndpointVerification struct { + ID types.String `tfsdk:"id"` + TeamID types.String `tfsdk:"team_id"` + VerificationCode types.String `tfsdk:"verification_code"` +} + +// Read will read a file from the filesytem and provide terraform with information about it. +// It is called by the provider whenever data source values should be read to update state. +func (d *endpointVerificationDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var config EndpointVerification + diags := req.Config.Get(ctx, &config) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + code, err := d.client.GetEndpointVerificationCode(ctx, config.TeamID.ValueString()) + if err != nil { + resp.Diagnostics.AddError( + "Failed to get verification code", + fmt.Sprintf("Failed to get verification code, unexpected error: %s", err), + ) + return + } + + diags = resp.State.Set(ctx, EndpointVerification{ + TeamID: config.TeamID, + ID: types.StringValue(code), + VerificationCode: types.StringValue(code), + }) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} diff --git a/vercel/data_source_endpoint_verification_test.go b/vercel/data_source_endpoint_verification_test.go new file mode 100644 index 00000000..fcb6f11e --- /dev/null +++ b/vercel/data_source_endpoint_verification_test.go @@ -0,0 +1,31 @@ +package vercel_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAcc_EndpointVerificationDataSource(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccEndpointVerificationDataSourceConfig(teamIDConfig()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet("data.vercel_endpoint_verification.test", "verification_code"), + ), + }, + }, + }) +} + +func testAccEndpointVerificationDataSourceConfig(teamID string) string { + return fmt.Sprintf(` +data "vercel_endpoint_verification" "test" { + %[1]s +} +`, teamID) +} diff --git a/vercel/data_source_log_drain.go b/vercel/data_source_log_drain.go new file mode 100644 index 00000000..bd6251dc --- /dev/null +++ b/vercel/data_source_log_drain.go @@ -0,0 +1,196 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/vercel/terraform-provider-vercel/client" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &logDrainDataSource{} + _ datasource.DataSourceWithConfigure = &logDrainDataSource{} +) + +func newLogDrainDataSource() datasource.DataSource { + return &logDrainDataSource{} +} + +type logDrainDataSource struct { + client *client.Client +} + +func (d *logDrainDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_log_drain" +} + +func (d *logDrainDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*client.Client) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = client +} + +// Schema returns the schema information for an logDrain data source +func (r *logDrainDataSource) Schema(_ context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: ` +Provides information about an existing Log Drain. + +Log Drains collect all of your logs using a service specializing in storing app logs. + +Teams on Pro and Enterprise plans can subscribe to log drains that are generic and configurable from the Vercel dashboard without creating an integration. This allows you to use a HTTP service to receive logs through Vercel's log drains. +`, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "The ID of the Log Drain.", + Required: true, + }, + "team_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the team the Log Drain should exist under. Required when configuring a team resource if a default team has not been set in the provider.", + }, + "delivery_format": schema.StringAttribute{ + Description: "The format log data should be delivered in. Can be `json` or `ndjson`.", + Computed: true, + }, + "environments": schema.SetAttribute{ + Description: "Logs from the selected environments will be forwarded to your webhook. At least one must be present.", + ElementType: types.StringType, + Computed: true, + }, + "headers": schema.MapAttribute{ + Description: "Custom headers to include in requests to the log drain endpoint.", + ElementType: types.StringType, + Computed: true, + }, + "project_ids": schema.SetAttribute{ + Description: "A list of project IDs that the log drain should be associated with. Logs from these projects will be sent log events to the specified endpoint. If omitted, logs will be sent for all projects.", + Computed: true, + ElementType: types.StringType, + }, + "sampling_rate": schema.Float64Attribute{ + Description: "A ratio of logs matching the sampling rate will be sent to your log drain. Should be a value between 0 and 1. If unspecified, all logs are sent.", + Computed: true, + }, + "sources": schema.SetAttribute{ + Description: "A set of sources that the log drain should send logs for. Valid values are `static`, `edge`, `external`, `build` and `function`.", + Computed: true, + ElementType: types.StringType, + }, + "endpoint": schema.StringAttribute{ + Description: "Logs will be sent as POST requests to this URL. The endpoint will be verified, and must return a `200` status code and an `x-vercel-verify` header taken from the endpoint_verification data source. The value the `x-vercel-verify` header should be can be read from the `vercel_endpoint_verification_code` data source.", + Required: true, + }, + }, + } +} + +type LogDrainWithoutSecret struct { + ID types.String `tfsdk:"id"` + TeamID types.String `tfsdk:"team_id"` + DeliveryFormat types.String `tfsdk:"delivery_format"` + Environments types.Set `tfsdk:"environments"` + Headers types.Map `tfsdk:"headers"` + ProjectIDs types.Set `tfsdk:"project_ids"` + SamplingRate types.Float64 `tfsdk:"sampling_rate"` + Sources types.Set `tfsdk:"sources"` + Endpoint types.String `tfsdk:"endpoint"` +} + +func responseToLogDrainWithoutSecret(ctx context.Context, out client.LogDrain) (l LogDrainWithoutSecret, diags diag.Diagnostics) { + projectIDs, diags := types.SetValueFrom(ctx, types.StringType, out.ProjectIDs) + if diags.HasError() { + return l, diags + } + + environments, diags := types.SetValueFrom(ctx, types.StringType, out.Environments) + if diags.HasError() { + return l, diags + } + + sources, diags := types.SetValueFrom(ctx, types.StringType, out.Sources) + if diags.HasError() { + return l, diags + } + + headers, diags := types.MapValueFrom(ctx, types.StringType, out.Headers) + if diags.HasError() { + return l, diags + } + + return LogDrainWithoutSecret{ + ID: types.StringValue(out.ID), + TeamID: toTeamID(out.TeamID), + DeliveryFormat: types.StringValue(out.DeliveryFormat), + SamplingRate: fromFloat64Pointer(out.SamplingRate), + Endpoint: types.StringValue(out.Endpoint), + Environments: environments, + Headers: headers, + Sources: sources, + ProjectIDs: projectIDs, + }, nil +} + +// Read will read the logDrain information by requesting it from the Vercel API, and will update terraform +// with this information. +// It is called by the provider whenever data source values should be read to update state. +func (d *logDrainDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var config LogDrainWithoutSecret + diags := req.Config.Get(ctx, &config) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + out, err := d.client.GetLogDrain(ctx, config.ID.ValueString(), config.TeamID.ValueString()) + if client.NotFound(err) { + resp.State.RemoveResource(ctx) + return + } + if err != nil { + resp.Diagnostics.AddError( + "Error reading Log Drain", + fmt.Sprintf("Could not get Log Drain %s %s, unexpected error: %s", + config.TeamID.ValueString(), + config.ID.ValueString(), + err, + ), + ) + return + } + + result, diags := responseToLogDrainWithoutSecret(ctx, out) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "read log drain", map[string]interface{}{ + "team_id": result.TeamID.ValueString(), + "log_drain_id": result.ID.ValueString(), + }) + + diags = resp.State.Set(ctx, result) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} diff --git a/vercel/provider.go b/vercel/provider.go index feddff6d..3988ed6a 100644 --- a/vercel/provider.go +++ b/vercel/provider.go @@ -51,15 +51,16 @@ Use the navigation to the left to read about the available resources. func (p *vercelProvider) Resources(_ context.Context) []func() resource.Resource { return []func() resource.Resource{ newAliasResource, - newDeploymentResource, newDNSRecordResource, - newProjectResource, + newDeploymentResource, + newEdgeConfigResource, + newEdgeConfigSchemaResource, + newEdgeConfigTokenResource, + newLogDrainResource, newProjectDomainResource, newProjectEnvironmentVariableResource, + newProjectResource, newSharedEnvironmentVariableResource, - newEdgeConfigResource, - newEdgeConfigTokenResource, - newEdgeConfigSchemaResource, newWebhookResource, } } @@ -67,14 +68,16 @@ func (p *vercelProvider) Resources(_ context.Context) []func() resource.Resource func (p *vercelProvider) DataSources(_ context.Context) []func() datasource.DataSource { return []func() datasource.DataSource{ newAliasDataSource, + newEdgeConfigDataSource, + newEdgeConfigSchemaDataSource, + newEdgeConfigTokenDataSource, + newEndpointVerificationDataSource, newFileDataSource, newPrebuiltProjectDataSource, newProjectDataSource, newProjectDirectoryDataSource, newSharedEnvironmentVariableDataSource, - newEdgeConfigDataSource, - newEdgeConfigTokenDataSource, - newEdgeConfigSchemaDataSource, + newLogDrainDataSource, } } diff --git a/vercel/resource_log_drain.go b/vercel/resource_log_drain.go new file mode 100644 index 00000000..35d61a35 --- /dev/null +++ b/vercel/resource_log_drain.go @@ -0,0 +1,398 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/mapplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/vercel/terraform-provider-vercel/client" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.Resource = &logDrainResource{} + _ resource.ResourceWithConfigure = &logDrainResource{} + _ resource.ResourceWithImportState = &logDrainResource{} +) + +func newLogDrainResource() resource.Resource { + return &logDrainResource{} +} + +type logDrainResource struct { + client *client.Client +} + +func (r *logDrainResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_log_drain" +} + +func (r *logDrainResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*client.Client) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + r.client = client +} + +func (r *logDrainResource) Schema(_ context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: ` +Provides a Configurable Log Drain resource. + +~> For Log Drain integrations, please see the [Integration Log Drain docs](https://vercel.com/docs/observability/log-drains#log-drains-integration). + +Log Drains collect all of your logs using a service specializing in storing app logs. + +Teams on Pro and Enterprise plans can subscribe to log drains that are generic and configurable from the Vercel dashboard without creating an integration. This allows you to use a HTTP service to receive logs through Vercel's log drains. + +~> Only Pro and Enterprise teams can create Configurable Log Drains.`, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "The ID of the Log Drain.", + Computed: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, + }, + "team_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the team the Log Drain should exist under. Required when configuring a team resource if a default team has not been set in the provider.", + PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplaceIfConfigured(), stringplanmodifier.UseStateForUnknown()}, + }, + "delivery_format": schema.StringAttribute{ + Description: "The format log data should be delivered in. Can be `json` or `ndjson`.", + Required: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplace()}, + Validators: []validator.String{ + stringOneOf("json", "ndjson"), + }, + }, + "environments": schema.SetAttribute{ + Description: "Logs from the selected environments will be forwarded to your webhook. At least one must be present.", + ElementType: types.StringType, + PlanModifiers: []planmodifier.Set{setplanmodifier.RequiresReplace()}, + Required: true, + Validators: []validator.Set{ + stringSetItemsIn("production", "preview"), + stringSetMinCount(1), + }, + }, + "headers": schema.MapAttribute{ + Description: "Custom headers to include in requests to the log drain endpoint.", + ElementType: types.StringType, + PlanModifiers: []planmodifier.Map{mapplanmodifier.RequiresReplace()}, + Optional: true, + Validators: []validator.Map{ + mapMaxCount(5), + }, + }, + "project_ids": schema.SetAttribute{ + Description: "A list of project IDs that the log drain should be associated with. Logs from these projects will be sent log events to the specified endpoint. If omitted, logs will be sent for all projects.", + Optional: true, + ElementType: types.StringType, + PlanModifiers: []planmodifier.Set{setplanmodifier.RequiresReplace()}, + }, + "sampling_rate": schema.Float64Attribute{ + Description: "A ratio of logs matching the sampling rate will be sent to your log drain. Should be a value between 0 and 1. If unspecified, all logs are sent.", + Optional: true, + PlanModifiers: []planmodifier.Float64{float64planmodifier.RequiresReplace()}, + Validators: []validator.Float64{ + float64GreaterThan(0), + float64LessThan(1), + }, + }, + "secret": schema.StringAttribute{ + Description: "A custom secret to be used for signing log events. You can use this secret to verify that log events are coming from Vercel and are not tampered with. See https://vercel.com/docs/observability/log-drains/log-drains-reference#secure-log-drains for full info.", + Optional: true, + Computed: true, + Sensitive: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplaceIfConfigured(), stringplanmodifier.UseStateForUnknown()}, + Validators: []validator.String{ + stringLengthGreaterThan(32), + }, + }, + "sources": schema.SetAttribute{ + Description: "A set of sources that the log drain should send logs for. Valid values are `static`, `edge`, `external`, `build` and `lambda`.", + Required: true, + ElementType: types.StringType, + PlanModifiers: []planmodifier.Set{setplanmodifier.RequiresReplace()}, + Validators: []validator.Set{ + stringSetItemsIn("static", "edge", "external", "build", "lambda"), + stringSetMinCount(1), + }, + }, + "endpoint": schema.StringAttribute{ + Description: "Logs will be sent as POST requests to this URL. The endpoint will be verified, and must return a `200` status code and an `x-vercel-verify` header taken from the endpoint_verification data source. The value the `x-vercel-verify` header should be can be read from the `vercel_endpoint_verification_code` data source.", + Required: true, + PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplace()}, + }, + }, + } +} + +type LogDrain struct { + ID types.String `tfsdk:"id"` + TeamID types.String `tfsdk:"team_id"` + DeliveryFormat types.String `tfsdk:"delivery_format"` + Environments types.Set `tfsdk:"environments"` + Headers types.Map `tfsdk:"headers"` + ProjectIDs types.Set `tfsdk:"project_ids"` + SamplingRate types.Float64 `tfsdk:"sampling_rate"` + Secret types.String `tfsdk:"secret"` + Sources types.Set `tfsdk:"sources"` + Endpoint types.String `tfsdk:"endpoint"` +} + +func responseToLogDrain(ctx context.Context, out client.LogDrain, secret types.String) (LogDrain, diag.Diagnostics) { + projectIDs, diags := types.SetValueFrom(ctx, types.StringType, out.ProjectIDs) + if diags.HasError() { + return LogDrain{}, diags + } + + environments, diags := types.SetValueFrom(ctx, types.StringType, out.Environments) + if diags.HasError() { + return LogDrain{}, diags + } + + sources, diags := types.SetValueFrom(ctx, types.StringType, out.Sources) + if diags.HasError() { + return LogDrain{}, diags + } + + headers, diags := types.MapValueFrom(ctx, types.StringType, out.Headers) + if diags.HasError() { + return LogDrain{}, diags + } + + if secret.IsNull() || secret.IsUnknown() { + secret = types.StringValue(out.Secret) + } + + return LogDrain{ + ID: types.StringValue(out.ID), + TeamID: toTeamID(out.TeamID), + DeliveryFormat: types.StringValue(out.DeliveryFormat), + SamplingRate: fromFloat64Pointer(out.SamplingRate), + Secret: secret, + Endpoint: types.StringValue(out.Endpoint), + Environments: environments, + Headers: headers, + Sources: sources, + ProjectIDs: projectIDs, + }, nil +} + +func (r *logDrainResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan LogDrain + diags := req.Plan.Get(ctx, &plan) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var environments []string + diags = plan.Environments.ElementsAs(ctx, &environments, false) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var headers map[string]string + diags = plan.Headers.ElementsAs(ctx, &headers, false) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var projectIDs []string + diags = plan.ProjectIDs.ElementsAs(ctx, &projectIDs, false) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var sources []string + diags = plan.Sources.ElementsAs(ctx, &sources, false) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + out, err := r.client.CreateLogDrain(ctx, client.CreateLogDrainRequest{ + TeamID: plan.TeamID.ValueString(), + DeliveryFormat: plan.DeliveryFormat.ValueString(), + Environments: environments, + Headers: headers, + ProjectIDs: projectIDs, + SamplingRate: plan.SamplingRate.ValueFloat64(), + Secret: plan.Secret.ValueString(), + Sources: sources, + Endpoint: plan.Endpoint.ValueString(), + }) + if err != nil { + resp.Diagnostics.AddError( + "Error creating Log Drain", + "Could not create Log Drain, unexpected error: "+err.Error(), + ) + return + } + + result, diags := responseToLogDrain(ctx, out, plan.Secret) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "created Log Drain", map[string]interface{}{ + "team_id": plan.TeamID.ValueString(), + "log_drain_id": result.ID.ValueString(), + }) + + diags = resp.State.Set(ctx, result) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *logDrainResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state LogDrain + diags := req.State.Get(ctx, &state) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + out, err := r.client.GetLogDrain(ctx, state.ID.ValueString(), state.TeamID.ValueString()) + if client.NotFound(err) { + resp.State.RemoveResource(ctx) + return + } + if err != nil { + resp.Diagnostics.AddError( + "Error reading Log Drain", + fmt.Sprintf("Could not get Log Drain %s %s, unexpected error: %s", + state.TeamID.ValueString(), + state.ID.ValueString(), + err, + ), + ) + return + } + + result, diags := responseToLogDrain(ctx, out, state.Secret) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "read log drain", map[string]interface{}{ + "team_id": result.TeamID.ValueString(), + "log_drain_id": result.ID.ValueString(), + }) + + diags = resp.State.Set(ctx, result) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *logDrainResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + resp.Diagnostics.AddError( + "Updating a Log Drain is not supported", + "Updating a Log Drain is not supported", + ) +} + +func (r *logDrainResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var state LogDrain + diags := req.State.Get(ctx, &state) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + err := r.client.DeleteLogDrain(ctx, state.ID.ValueString(), state.TeamID.ValueString()) + if client.NotFound(err) { + return + } + if err != nil { + resp.Diagnostics.AddError( + "Error deleting log drain", + fmt.Sprintf( + "Could not delete Log Drain %s %s, unexpected error: %s", + state.TeamID.ValueString(), + state.ID.ValueString(), + err, + ), + ) + return + } + + tflog.Info(ctx, "deleted Log Drain", map[string]interface{}{ + "team_id": state.TeamID.ValueString(), + "log_drain_id": state.ID.ValueString(), + }) +} + +func (r *logDrainResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + teamID, id, ok := splitInto1Or2(req.ID) + if !ok { + resp.Diagnostics.AddError( + "Error importing Log Drain", + fmt.Sprintf("Invalid id '%s' specified. should be in format \"team_id/log_drain_id\" or \"log_drain_id\"", req.ID), + ) + } + + out, err := r.client.GetLogDrain(ctx, id, teamID) + if client.NotFound(err) { + resp.State.RemoveResource(ctx) + return + } + if err != nil { + resp.Diagnostics.AddError( + "Error reading Log Drain", + fmt.Sprintf("Could not get Log Drain %s %s, unexpected error: %s", + teamID, + id, + err, + ), + ) + return + } + + result, diags := responseToLogDrain(ctx, out, types.StringNull()) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "import log drain", map[string]interface{}{ + "team_id": result.TeamID.ValueString(), + "log_drain_id": result.ID.ValueString(), + }) + + diags = resp.State.Set(ctx, result) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} diff --git a/vercel/resource_log_drain_test.go b/vercel/resource_log_drain_test.go new file mode 100644 index 00000000..e2017a02 --- /dev/null +++ b/vercel/resource_log_drain_test.go @@ -0,0 +1,131 @@ +package vercel_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/vercel/terraform-provider-vercel/client" +) + +func testCheckLogDrainExists(teamID, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("no ID is set") + } + + _, err := testClient().GetLogDrain(context.TODO(), rs.Primary.ID, teamID) + return err + } +} + +func testCheckLogDrainDeleted(n, teamID string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("no ID is set") + } + + _, err := testClient().GetLogDrain(context.TODO(), rs.Primary.ID, teamID) + if err == nil { + return fmt.Errorf("expected not_found error, but got no error") + } + if !client.NotFound(err) { + return fmt.Errorf("Unexpected error checking for deleted log drain: %s", err) + } + + return nil + } +} + +func TestAcc_LogDrainResource(t *testing.T) { + name := acctest.RandString(16) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testAccProtoV6ProviderFactories, + CheckDestroy: testCheckLogDrainDeleted("vercel_log_drain.minimal", testTeam()), + Steps: []resource.TestStep{ + { + Config: testAccResourceLogDrain(name, teamIDConfig()), + Check: resource.ComposeAggregateTestCheckFunc( + testCheckLogDrainExists(testTeam(), "vercel_log_drain.minimal"), + resource.TestCheckResourceAttr("vercel_log_drain.minimal", "delivery_format", "json"), + resource.TestCheckResourceAttr("vercel_log_drain.minimal", "environments.#", "1"), + resource.TestCheckResourceAttr("vercel_log_drain.minimal", "environments.0", "production"), + resource.TestCheckResourceAttr("vercel_log_drain.minimal", "sources.#", "1"), + resource.TestCheckResourceAttr("vercel_log_drain.minimal", "sources.0", "static"), + resource.TestCheckResourceAttrSet("vercel_log_drain.minimal", "endpoint"), + resource.TestCheckResourceAttrSet("vercel_log_drain.minimal", "id"), + resource.TestCheckResourceAttrSet("vercel_log_drain.minimal", "team_id"), + resource.TestCheckResourceAttrSet("vercel_log_drain.maximal", "secret"), + + testCheckLogDrainExists(testTeam(), "vercel_log_drain.maximal"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "delivery_format", "json"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "environments.#", "2"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "environments.0", "preview"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "environments.1", "production"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "sources.#", "5"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "sources.0", "build"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "sources.1", "edge"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "sources.2", "external"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "sources.3", "lambda"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "sources.4", "static"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "secret", "a_very_long_and_very_well_specified_secret"), + resource.TestCheckResourceAttr("vercel_log_drain.maximal", "headers.%", "1"), + resource.TestCheckResourceAttrSet("vercel_log_drain.maximal", "endpoint"), + resource.TestCheckResourceAttrSet("vercel_log_drain.maximal", "id"), + resource.TestCheckResourceAttrSet("vercel_log_drain.maximal", "team_id"), + ), + }, + }, + }) +} + +func testAccResourceLogDrain(name, team string) string { + return fmt.Sprintf(` +data "vercel_endpoint_verification" "test" { + %[2]s +} + +resource "vercel_log_drain" "minimal" { + delivery_format = "json" + environments = ["production"] + sources = ["static"] + endpoint = "https://verify-test-rouge.vercel.app/api?${data.vercel_endpoint_verification.test.verification_code}" + + %[2]s +} + +resource "vercel_project" "test" { + name = "test-acc-%[1]s" + %[2]s +} + +resource "vercel_log_drain" "maximal" { + delivery_format = "json" + environments = ["production", "preview"] + headers = { + some-key = "some-value" + } + project_ids = [vercel_project.test.id] + sampling_rate = 0.8 + secret = "a_very_long_and_very_well_specified_secret" + sources = ["static", "edge", "external", "build", "lambda"] + endpoint = "https://verify-test-rouge.vercel.app/api?${data.vercel_endpoint_verification.test.verification_code}" + + %[2]s +} +`, name, team) +} diff --git a/vercel/resource_webhook.go b/vercel/resource_webhook.go index 0703e0da..fa8c928b 100644 --- a/vercel/resource_webhook.go +++ b/vercel/resource_webhook.go @@ -91,7 +91,7 @@ When an event happens, a webhook is sent to a third-party app, which can then ta PlanModifiers: []planmodifier.Set{setplanmodifier.RequiresReplace()}, }, "endpoint": schema.StringAttribute{ - Description: "Webhooks events will be sent as POST request to this URL.", + Description: "Webhooks events will be sent as POST requests to this URL.", Required: true, PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplace()}, }, @@ -104,6 +104,7 @@ When an event happens, a webhook is sent to a third-party app, which can then ta "secret": schema.StringAttribute{ Description: "A secret value which will be provided in the `x-vercel-signature` header and can be used to verify the authenticity of the webhook. See https://vercel.com/docs/observability/webhooks-overview/webhooks-api#securing-webhooks for further details.", Computed: true, + Sensitive: true, PlanModifiers: []planmodifier.String{stringplanmodifier.UseStateForUnknown()}, }, }, diff --git a/vercel/types_conversions.go b/vercel/types_conversions.go index 514e3e39..efe05eb4 100644 --- a/vercel/types_conversions.go +++ b/vercel/types_conversions.go @@ -48,6 +48,13 @@ func fromInt64Pointer(v *int64) types.Int64 { return types.Int64Value(*v) } +func fromFloat64Pointer(v *float64) types.Float64 { + if v == nil { + return types.Float64Null() + } + return types.Float64Value(*v) +} + func toTeamID(v string) types.String { if v == "" { return types.StringNull() diff --git a/vercel/validator_float64_greater_than.go b/vercel/validator_float64_greater_than.go new file mode 100644 index 00000000..47c12aa2 --- /dev/null +++ b/vercel/validator_float64_greater_than.go @@ -0,0 +1,40 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func float64GreaterThan(val float64) validatorFloat64GreaterThan { + return validatorFloat64GreaterThan{ + Min: val, + } +} + +type validatorFloat64GreaterThan struct { + Min float64 +} + +func (v validatorFloat64GreaterThan) Description(ctx context.Context) string { + return fmt.Sprintf("Value must be greater than %.2f", v.Min) +} +func (v validatorFloat64GreaterThan) MarkdownDescription(ctx context.Context) string { + return fmt.Sprintf("Value must be greater than `%.2f`", v.Min) +} + +func (v validatorFloat64GreaterThan) ValidateFloat64(ctx context.Context, req validator.Float64Request, resp *validator.Float64Response) { + if req.ConfigValue.IsUnknown() || req.ConfigValue.IsNull() { + return + } + + if req.ConfigValue.ValueFloat64() < v.Min { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid value provided", + fmt.Sprintf("Value must be greater than %.2f, got: %.2f.", v.Min, req.ConfigValue.ValueFloat64()), + ) + return + } +} diff --git a/vercel/validator_float64_less_than.go b/vercel/validator_float64_less_than.go new file mode 100644 index 00000000..4183d65e --- /dev/null +++ b/vercel/validator_float64_less_than.go @@ -0,0 +1,40 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func float64LessThan(val float64) validatorFloat64LessThan { + return validatorFloat64LessThan{ + Max: val, + } +} + +type validatorFloat64LessThan struct { + Max float64 +} + +func (v validatorFloat64LessThan) Description(ctx context.Context) string { + return fmt.Sprintf("Value must be less than %.2f", v.Max) +} +func (v validatorFloat64LessThan) MarkdownDescription(ctx context.Context) string { + return fmt.Sprintf("Value must be less than `%.2f`", v.Max) +} + +func (v validatorFloat64LessThan) ValidateFloat64(ctx context.Context, req validator.Float64Request, resp *validator.Float64Response) { + if req.ConfigValue.IsUnknown() || req.ConfigValue.IsNull() { + return + } + + if req.ConfigValue.ValueFloat64() > v.Max { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid value provided", + fmt.Sprintf("Value must be less than %.2f, got: %.2f.", v.Max, req.ConfigValue.ValueFloat64()), + ) + return + } +} diff --git a/vercel/validator_map_max_count.go b/vercel/validator_map_max_count.go new file mode 100644 index 00000000..c2e5f709 --- /dev/null +++ b/vercel/validator_map_max_count.go @@ -0,0 +1,42 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.Map = validatorMapMaxCount{} + +func mapMaxCount(max int) validatorMapMaxCount { + return validatorMapMaxCount{ + Max: max, + } +} + +type validatorMapMaxCount struct { + Max int +} + +func (v validatorMapMaxCount) Description(ctx context.Context) string { + return fmt.Sprintf("Map must contain fewer than %d items", v.Max) +} +func (v validatorMapMaxCount) MarkdownDescription(ctx context.Context) string { + return fmt.Sprintf("Map must contain fewer than %d items", v.Max) +} + +func (v validatorMapMaxCount) ValidateMap(ctx context.Context, req validator.MapRequest, resp *validator.MapResponse) { + if req.ConfigValue.IsUnknown() || req.ConfigValue.IsNull() { + return + } + + if len(req.ConfigValue.Elements()) > v.Max { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid value provided", + v.Description(ctx), + ) + return + } +} diff --git a/vercel/validator_string_length_greater_than.go b/vercel/validator_string_length_greater_than.go new file mode 100644 index 00000000..dfbe3ab7 --- /dev/null +++ b/vercel/validator_string_length_greater_than.go @@ -0,0 +1,42 @@ +package vercel + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.String = validatorStringLengthGreaterThan{} + +func stringLengthGreaterThan(min int) validatorStringLengthGreaterThan { + return validatorStringLengthGreaterThan{ + Min: min, + } +} + +type validatorStringLengthGreaterThan struct { + Min int +} + +func (v validatorStringLengthGreaterThan) Description(ctx context.Context) string { + return fmt.Sprintf("String length must be greater than %d", v.Min) +} + +func (v validatorStringLengthGreaterThan) MarkdownDescription(ctx context.Context) string { + return fmt.Sprintf("String length must be greater than %d", v.Min) +} + +func (v validatorStringLengthGreaterThan) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsUnknown() || req.ConfigValue.IsNull() { + return + } + strLen := len(req.ConfigValue.ValueString()) + if strLen < v.Min { + resp.Diagnostics.AddError( + "Invalid value provided", + fmt.Sprintf("String length must be greater than %d, got: %d.", v.Min, strLen), + ) + return + } +}