From 64e4747a1d243ec5bc11e6c17d3a41b96060838d Mon Sep 17 00:00:00 2001 From: Nohv Date: Mon, 23 Sep 2024 14:51:00 +0900 Subject: [PATCH] fix(objectStorage): Fix invalid key format (#446) * fix: Fix invalid key format (now can cover longer path seperated with "/") * fix: Fix invalid regex format * chore: Remvoe redundant log * fix: Set fin endpoint to do not parse regionCode * fix: Fix object source update logic for proper acl apply * test: Add update test code * chore: Fix regex typo * feat: Remove redundant Update logic (update with planmodifier) * Revert "fix: Set fin endpoint to do not parse regionCode" This reverts commit d42390ebbf12c20f68a58c3a4708549e7da9b691. * fix: Remove unavailable optional properties * feat: Add copy object update logic & testing * feat: Add object update logic & testing * fix: Modify optional input attributes * feat: integrate duplicated PutObject logic * fix: Remove unprovided property from docs * feat: Add Close() contrast to Open() * refactor: Remove dependency of local source path at update logic * fix: Modify to operate GetObject only if source is not changed --- docs/data-sources/object_storage_object.md | 1 - docs/resources/object_storage_object.md | 7 +- docs/resources/object_storage_object_copy.md | 8 +- .../objectstorage/objectstorage_bucket_acl.go | 4 - .../objectstorage_bucket_acl_test.go | 30 +++ .../objectstorage/objectstorage_object.go | 103 +++++++---- .../objectstorage/objectstorage_object_acl.go | 6 +- .../objectstorage_object_acl_test.go | 41 +++- .../objectstorage_object_copy.go | 62 ++++++- .../objectstorage_object_copy_test.go | 175 +++++++++++++++++- .../objectstorage_object_data_source_test.go | 2 +- .../objectstorage_object_test.go | 109 ++++++++++- 12 files changed, 475 insertions(+), 73 deletions(-) diff --git a/docs/data-sources/object_storage_object.md b/docs/data-sources/object_storage_object.md index 5c69a8b65..6dcf74844 100644 --- a/docs/data-sources/object_storage_object.md +++ b/docs/data-sources/object_storage_object.md @@ -35,7 +35,6 @@ This data source exports the following attributes in addition to the arguments a * `content_length` - How long the object is. * `content_type` - Type of the object. * `body` - Saved content of the object. -* `bucket_key_enabled` - Whether this resource uses Ncloud KMS Keys for SSE. * `content_encoding` - Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. * `accept_ranges` - Indicates that a range of bytes was specified. * `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). diff --git a/docs/resources/object_storage_object.md b/docs/resources/object_storage_object.md index 0af2fb2ad..1454bcab3 100644 --- a/docs/resources/object_storage_object.md +++ b/docs/resources/object_storage_object.md @@ -40,11 +40,7 @@ The following arguments are required: The following arguments are optional: -* `bucket_key_enabled` - (Optional) Whether this resource uses Ncloud KMS Keys for SSE. -* `content_encoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. -* `content_language` - (Optional) Language the content is in e.g., en-US or en-GB. * `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. -* `website_redirect_location` - (Optional) Target URL for website redirect. ## Attribute Reference. @@ -54,10 +50,13 @@ This resource exports the following attributes in addition to the arguments abov * `accept_ranges` - Indicates that a range of bytes was specified. * `content_length` - Size of the body in bytes. +* `content_encoding` - Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - Language the content is in e.g., en-US or en-GB. * `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). * `expiration` - the object expiration is configured, the response includes this header. It includes the expiry-date and rule-id key-value pairs providing object expiration information. The value of the rule-id is URL-encoded. * `last_modified` - Date and time when the object was last modified. * `parts_count` - The count of parts this object has. This value is only returned if you specify partNumber in your request and the object was uploaded as a multipart upload. +* `website_redirect_location` - Target URL for website redirect. * `version_id` - Unique version ID value for the object, if bucket versioning is enabled. ## Import diff --git a/docs/resources/object_storage_object_copy.md b/docs/resources/object_storage_object_copy.md index ec9406efc..514386643 100644 --- a/docs/resources/object_storage_object_copy.md +++ b/docs/resources/object_storage_object_copy.md @@ -50,10 +50,7 @@ The following arguments are required: The following arguments are supported: -* `content_encoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. -* `content_language` - (Optional) Language the content is in e.g., en-US or en-GB. -* `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. -* `website_redirect_location` - (Optional) Target URL for website redirect. +* `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. This attribute is only available in update operation. ## Attribute Reference. @@ -62,11 +59,14 @@ The following arguments are supported: This resource exports the following attributes in addition to the arguments above: * `accept_ranges` - Indicates that a range of bytes was specified. +* `content_encoding` - Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - Language the content is in e.g., en-US or en-GB. * `content_length` - Size of the body in bytes. * `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). * `expiration` - the object expiration is configured, the response includes this header. It includes the expiry-date and rule-id key-value pairs providing object expiration information. The value of the rule-id is URL-encoded. * `last_modified` - Date and time when the object was last modified. * `parts_count` - The count of parts this object has. This value is only returned if you specify partNumber in your request and the object was uploaded as a multipart upload. +* `website_redirect_location` - Target URL for website redirect. * `version_id` - Unique version ID value for the object, if bucket versioning is enabled. ## Import diff --git a/internal/service/objectstorage/objectstorage_bucket_acl.go b/internal/service/objectstorage/objectstorage_bucket_acl.go index cd249fcff..f62e79562 100644 --- a/internal/service/objectstorage/objectstorage_bucket_acl.go +++ b/internal/service/objectstorage/objectstorage_bucket_acl.go @@ -86,19 +86,15 @@ func (b *bucketACLResource) Schema(_ context.Context, req resource.SchemaRequest }, "display_name": schema.StringAttribute{ Computed: true, - Optional: true, }, "email_address": schema.StringAttribute{ Computed: true, - Optional: true, }, "id": schema.StringAttribute{ Computed: true, - Optional: true, }, "uri": schema.StringAttribute{ Computed: true, - Optional: true, }, }, }, diff --git a/internal/service/objectstorage/objectstorage_bucket_acl_test.go b/internal/service/objectstorage/objectstorage_bucket_acl_test.go index eb8f79274..19e0ed32d 100644 --- a/internal/service/objectstorage/objectstorage_bucket_acl_test.go +++ b/internal/service/objectstorage/objectstorage_bucket_acl_test.go @@ -42,6 +42,36 @@ func TestAccResourceNcloudObjectStorage_bucket_acl_basic(t *testing.T) { }) } +func TestAccResourceNcloudObjectStorage_bucket_acl_update(t *testing.T) { + var aclOutput s3.GetBucketAclOutput + bucketName := fmt.Sprintf("tf-test-%s", acctest.RandString(5)) + + acl := "public-read" + newACL := "private" + resourceName := "ncloud_objectstorage_bucket_acl.testing_acl" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { TestAccPreCheck(t) }, + ProtoV6ProviderFactories: ProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccBucketACLConfig(bucketName, acl), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckBucketACLExists(resourceName, &aclOutput, GetTestProvider(true)), + resource.TestCheckResourceAttr(resourceName, "rule", acl), + ), + }, + { + Config: testAccBucketACLConfig(bucketName, newACL), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckBucketACLExists(resourceName, &aclOutput, GetTestProvider(true)), + resource.TestCheckResourceAttr(resourceName, "rule", newACL), + ), + }, + }, + }) +} + func testAccCheckBucketACLExists(n string, object *s3.GetBucketAclOutput, provider *schema.Provider) resource.TestCheckFunc { return func(s *terraform.State) error { resource, ok := s.RootModule().Resources[n] diff --git a/internal/service/objectstorage/objectstorage_object.go b/internal/service/objectstorage/objectstorage_object.go index 032b81eb6..5b4194fbe 100644 --- a/internal/service/objectstorage/objectstorage_object.go +++ b/internal/service/objectstorage/objectstorage_object.go @@ -49,6 +49,7 @@ func (o *objectResource) Create(ctx context.Context, req resource.CreateRequest, resp.Diagnostics.AddError("CREATING ERROR", "invalid source path") return } + defer file.Close() reqParams := &s3.PutObjectInput{ Bucket: plan.Bucket.ValueStringPointer(), @@ -163,42 +164,42 @@ func (o *objectResource) Schema(_ context.Context, req resource.SchemaRequest, r Description: "(Required) Name of the object once it is in the bucket", }, "source": schema.StringAttribute{ - Required: true, + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, Description: "(Required) Path of the object", }, "accept_ranges": schema.StringAttribute{ Computed: true, - Optional: true, }, "content_encoding": schema.StringAttribute{ - Optional: true, + Computed: true, }, "content_language": schema.StringAttribute{ - Optional: true, + Computed: true, }, "content_length": schema.Int64Attribute{ Computed: true, }, "content_type": schema.StringAttribute{ Computed: true, + Optional: true, }, "etag": schema.StringAttribute{ Computed: true, }, "expiration": schema.StringAttribute{ Computed: true, - Optional: true, }, "parts_count": schema.Int64Attribute{ Computed: true, - Optional: true, }, "version_id": schema.StringAttribute{ Computed: true, - Optional: true, }, "website_redirect_location": schema.StringAttribute{ - Optional: true, + Computed: true, }, "last_modified": schema.StringAttribute{ Computed: true, @@ -217,58 +218,82 @@ func (o *objectResource) Update(ctx context.Context, req resource.UpdateRequest, return } + reqParams := &s3.PutObjectInput{ + Bucket: state.Bucket.ValueStringPointer(), + Key: state.Key.ValueStringPointer(), + } + + // get body from plan with source path or existing object if !plan.Source.Equal(state.Source) { file, err := os.Open(plan.Source.ValueString()) if err != nil { resp.Diagnostics.AddError("UPDATING ERROR", "invalid source path") return } + defer file.Close() - reqParams := &s3.PutObjectInput{ + reqParams.Body = file + } else { + getReqParams := &s3.GetObjectInput{ Bucket: state.Bucket.ValueStringPointer(), Key: state.Key.ValueStringPointer(), - Body: file, - } - - // attributes that has dependancies with source - if !plan.ContentEncoding.IsNull() && !plan.ContentEncoding.IsUnknown() { - reqParams.ContentEncoding = plan.ContentEncoding.ValueStringPointer() - } - - if !plan.ContentLanguage.IsNull() && !plan.ContentLanguage.IsUnknown() { - reqParams.ContentLanguage = plan.ContentLanguage.ValueStringPointer() } - if !plan.ContentType.IsNull() && !plan.ContentType.IsUnknown() { - reqParams.ContentType = plan.ContentType.ValueStringPointer() - } + tflog.Info(ctx, "GetObject at update operation reqParams="+common.MarshalUncheckedString(getReqParams)) - if !plan.WebsiteRedirectLocation.IsNull() && !plan.WebsiteRedirectLocation.IsUnknown() { - reqParams.WebsiteRedirectLocation = plan.WebsiteRedirectLocation.ValueStringPointer() - } - - tflog.Info(ctx, "PutObject at update operation reqParams="+common.MarshalUncheckedString(reqParams)) - - output, err := o.config.Client.ObjectStorage.PutObject(ctx, reqParams) + getOutput, err := o.config.Client.ObjectStorage.GetObject(ctx, getReqParams) if err != nil { resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) return } - if output == nil { - resp.Diagnostics.AddError("UPDATING ERROR", "response invalid") + if getOutput == nil { + resp.Diagnostics.AddError("UPDATING ERROR", "response invalid at get object") return } - tflog.Info(ctx, "PutObject at update operation response="+common.MarshalUncheckedString(output)) + tflog.Info(ctx, "GetObject at update operation response="+common.MarshalUncheckedString(getOutput)) - if err := waitObjectUploaded(ctx, o.config, plan.Bucket.ValueString(), plan.Key.ValueString()); err != nil { - resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) - return - } + reqParams.Body = getOutput.Body + } - plan.refreshFromOutput(ctx, o.config, &resp.Diagnostics) - resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) + // attributes that has dependancies with source + if !plan.ContentEncoding.IsNull() && !plan.ContentEncoding.IsUnknown() { + reqParams.ContentEncoding = plan.ContentEncoding.ValueStringPointer() + } + + if !plan.ContentLanguage.IsNull() && !plan.ContentLanguage.IsUnknown() { + reqParams.ContentLanguage = plan.ContentLanguage.ValueStringPointer() + } + + if !plan.WebsiteRedirectLocation.IsNull() && !plan.WebsiteRedirectLocation.IsUnknown() { + reqParams.WebsiteRedirectLocation = plan.WebsiteRedirectLocation.ValueStringPointer() + } + + if !plan.ContentType.Equal(state.ContentType) && !plan.ContentType.IsNull() && !plan.ContentType.IsUnknown() { + reqParams.ContentType = plan.ContentType.ValueStringPointer() + } + + tflog.Info(ctx, "PutObject at update operation reqParams="+common.MarshalUncheckedString(reqParams)) + + output, err := o.config.Client.ObjectStorage.PutObject(ctx, reqParams) + if err != nil { + resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) + return } + if output == nil { + resp.Diagnostics.AddError("UPDATING ERROR", "response invalid at put object") + return + } + + tflog.Info(ctx, "PutObject at update operation response="+common.MarshalUncheckedString(output)) + + if err := waitObjectUploaded(ctx, o.config, plan.Bucket.ValueString(), plan.Key.ValueString()); err != nil { + resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) + return + } + + plan.refreshFromOutput(ctx, o.config, &resp.Diagnostics) + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) } func (o *objectResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { @@ -445,5 +470,5 @@ func ObjectIDParser(id string) (bucketName, key string) { return "", "" } - return parts[0], parts[1] + return parts[0], strings.Join(parts[1:], "/") } diff --git a/internal/service/objectstorage/objectstorage_object_acl.go b/internal/service/objectstorage/objectstorage_object_acl.go index a7cf5495a..482b18a72 100644 --- a/internal/service/objectstorage/objectstorage_object_acl.go +++ b/internal/service/objectstorage/objectstorage_object_acl.go @@ -113,7 +113,7 @@ func (o *objectACLResource) Schema(_ context.Context, req resource.SchemaRequest }, Validators: []validator.String{ stringvalidator.All( - stringvalidator.RegexMatches(regexp.MustCompile(`^[a-z0-9-_]+\/[a-zA-Z0-9_.-]+$`), "Requires pattern with link of target object"), + stringvalidator.RegexMatches(regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`), "Requires pattern with link of target object"), ), }, Description: "Target object id", @@ -143,19 +143,15 @@ func (o *objectACLResource) Schema(_ context.Context, req resource.SchemaRequest }, "display_name": schema.StringAttribute{ Computed: true, - Optional: true, }, "email_address": schema.StringAttribute{ Computed: true, - Optional: true, }, "id": schema.StringAttribute{ Computed: true, - Optional: true, }, "uri": schema.StringAttribute{ Computed: true, - Optional: true, }, }, }, diff --git a/internal/service/objectstorage/objectstorage_object_acl_test.go b/internal/service/objectstorage/objectstorage_object_acl_test.go index fca6b8aff..5265f7aab 100644 --- a/internal/service/objectstorage/objectstorage_object_acl_test.go +++ b/internal/service/objectstorage/objectstorage_object_acl_test.go @@ -21,7 +21,8 @@ import ( func TestAccResourceNcloudObjectStorage_object_acl_basic(t *testing.T) { bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) - key := fmt.Sprintf("%s.md", acctest.RandString(5)) + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + key := "test/key/path" + sourceName content := "content for file upload testing" aclOptions := []string{string(awsTypes.ObjectCannedACLPrivate), string(awsTypes.ObjectCannedACLPublicRead), @@ -30,7 +31,7 @@ func TestAccResourceNcloudObjectStorage_object_acl_basic(t *testing.T) { acl := aclOptions[acctest.RandIntRange(0, len(aclOptions)-1)] resourceName := "ncloud_objectstorage_object_acl.testing_acl" - tmpFile := CreateTempFile(t, content, key) + tmpFile := CreateTempFile(t, content, sourceName) source := tmpFile.Name() defer os.Remove(source) @@ -49,6 +50,42 @@ func TestAccResourceNcloudObjectStorage_object_acl_basic(t *testing.T) { }) } +func TestAccResourceNcloudObjectStorage_object_acl_update(t *testing.T) { + bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + key := "test/key/path" + sourceName + content := "content for file upload testing" + + acl := "public-read" + newACL := "private" + resourceName := "ncloud_objectstorage_object_acl.testing_acl" + + tmpFile := CreateTempFile(t, content, sourceName) + source := tmpFile.Name() + defer os.Remove(source) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { TestAccPreCheck(t) }, + ProtoV6ProviderFactories: ProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccObjectACLConfig(bucketName, key, source, acl), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectACLExists(resourceName, GetTestProvider(true)), + resource.TestCheckResourceAttr(resourceName, "rule", acl), + ), + }, + { + Config: testAccObjectACLConfig(bucketName, key, source, newACL), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectACLExists(resourceName, GetTestProvider(true)), + resource.TestCheckResourceAttr(resourceName, "rule", newACL), + ), + }, + }, + }) +} + func testAccCheckObjectACLExists(n string, provider *schema.Provider) resource.TestCheckFunc { return func(s *terraform.State) error { resource, ok := s.RootModule().Resources[n] diff --git a/internal/service/objectstorage/objectstorage_object_copy.go b/internal/service/objectstorage/objectstorage_object_copy.go index d9a24dea8..439635e0f 100644 --- a/internal/service/objectstorage/objectstorage_object_copy.go +++ b/internal/service/objectstorage/objectstorage_object_copy.go @@ -182,37 +182,34 @@ func (o *objectCopyResource) Schema(ctx context.Context, req resource.SchemaRequ }, "accept_ranges": schema.StringAttribute{ Computed: true, - Optional: true, }, "content_encoding": schema.StringAttribute{ - Optional: true, + Computed: true, }, "content_language": schema.StringAttribute{ - Optional: true, + Computed: true, }, "content_length": schema.Int64Attribute{ Computed: true, }, "content_type": schema.StringAttribute{ Computed: true, + Optional: true, }, "etag": schema.StringAttribute{ Computed: true, }, "expiration": schema.StringAttribute{ Computed: true, - Optional: true, }, "parts_count": schema.Int64Attribute{ Computed: true, - Optional: true, }, "version_id": schema.StringAttribute{ Computed: true, - Optional: true, }, "website_redirect_location": schema.StringAttribute{ - Optional: true, + Computed: true, }, "last_modified": schema.StringAttribute{ Computed: true, @@ -276,6 +273,57 @@ func (o *objectCopyResource) Update(ctx context.Context, req resource.UpdateRequ plan.refreshFromOutput(ctx, o.config, &resp.Diagnostics) } + + if !plan.ContentType.Equal(state.ContentType) { + + getReqParams := &s3.GetObjectInput{ + Bucket: state.Bucket.ValueStringPointer(), + Key: state.Key.ValueStringPointer(), + } + + tflog.Info(ctx, "GetObject at update operation reqParams="+common.MarshalUncheckedString(getReqParams)) + + getOutput, err := o.config.Client.ObjectStorage.GetObject(ctx, getReqParams) + if err != nil { + resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) + return + } + if getOutput == nil { + resp.Diagnostics.AddError("UPDATING ERROR", "response invalid at get object") + return + } + + tflog.Info(ctx, "GetObject at update operation response="+common.MarshalUncheckedString(getOutput)) + + reqParams := &s3.PutObjectInput{ + Bucket: plan.Bucket.ValueStringPointer(), + Key: plan.Key.ValueStringPointer(), + Body: getOutput.Body, + ContentType: plan.ContentType.ValueStringPointer(), + } + + tflog.Info(ctx, "PutObject at update operation reqParams="+common.MarshalUncheckedString(reqParams)) + + output, err := o.config.Client.ObjectStorage.PutObject(ctx, reqParams) + if err != nil { + resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) + return + } + if output == nil { + resp.Diagnostics.AddError("UPDATING ERROR", "response invalid") + return + } + + tflog.Info(ctx, "PutObject at update operation response="+common.MarshalUncheckedString(output)) + + if err := waitObjectUploaded(ctx, o.config, plan.Bucket.ValueString(), plan.Key.ValueString()); err != nil { + resp.Diagnostics.AddError("UPDATING ERROR", err.Error()) + return + } + + plan.refreshFromOutput(ctx, o.config, &resp.Diagnostics) + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) + } } func waitObjectCopied(ctx context.Context, config *conn.ProviderConfig, bucketName string, key string) error { diff --git a/internal/service/objectstorage/objectstorage_object_copy_test.go b/internal/service/objectstorage/objectstorage_object_copy_test.go index 0a40c3c65..c7262beeb 100644 --- a/internal/service/objectstorage/objectstorage_object_copy_test.go +++ b/internal/service/objectstorage/objectstorage_object_copy_test.go @@ -20,11 +20,12 @@ import ( func TestAccResourceNcloudObjectStorage_object_copy_basic(t *testing.T) { bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) - key := fmt.Sprintf("%s.md", acctest.RandString(5)) + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) resourceName := "ncloud_objectstorage_object_copy.testing_copy" content := "content for file upload testing" + key := "test/key/" + sourceName - tmpFile := CreateTempFile(t, content, key) + tmpFile := CreateTempFile(t, content, sourceName) source := tmpFile.Name() defer os.Remove(source) @@ -37,7 +38,7 @@ func TestAccResourceNcloudObjectStorage_object_copy_basic(t *testing.T) { Config: testAccObjectCopyConfig(bucketName, key, source), Check: resource.ComposeAggregateTestCheckFunc( testAccCheckObjectCopyExists(resourceName, GetTestProvider(true)), - resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_]+\/[a-zA-Z0-9_.-]+$`)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), resource.TestCheckResourceAttr(resourceName, "bucket", bucketName+"-to"), resource.TestCheckResourceAttr(resourceName, "key", key), ), @@ -46,6 +47,89 @@ func TestAccResourceNcloudObjectStorage_object_copy_basic(t *testing.T) { }) } +func TestAccResourceNcloudObjectStorage_object_copy_update_source(t *testing.T) { + bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) + resourceName := "ncloud_objectstorage_object_copy.testing_copy" + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + content := "content for file upload testing" + preObjectkey := "test/key/" + sourceName + + tmpFile := CreateTempFile(t, content, sourceName) + source := tmpFile.Name() + defer os.Remove(source) + + preObjectResourceName := "ncloud_objectstorage_object.testing_object_pre" + postObjectResourceName := "ncloud_objectstorage_object.testing_object_post" + postObjectKey := "test/post-key/" + sourceName + + resource.Test(t, resource.TestCase{ + PreCheck: func() { TestAccPreCheck(t) }, + ProtoV6ProviderFactories: ProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccObjectCopySourcePreUpdateConfig(bucketName, preObjectkey, source, postObjectKey), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectCopyExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName+"-to"), + resource.TestCheckResourceAttr(resourceName, "key", preObjectkey), + resource.TestCheckResourceAttrPair(resourceName, "source", preObjectResourceName, "id"), + ), + }, + { + Config: testAccObjectCopySourcePostUpdateConfig(bucketName, preObjectkey, source, postObjectKey), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectCopyExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName+"-to"), + resource.TestCheckResourceAttr(resourceName, "key", postObjectKey), + resource.TestCheckResourceAttrPair(resourceName, "source", postObjectResourceName, "id"), + ), + }, + }, + }) +} + +func TestAccResourceNcloudObjectStorage_object_copy_update_content_type(t *testing.T) { + bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) + resourceName := "ncloud_objectstorage_object_copy.testing_copy" + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + content := "content for file upload testing" + key := "test/key/" + sourceName + + tmpFile := CreateTempFile(t, content, sourceName) + source := tmpFile.Name() + defer os.Remove(source) + + newContentType := "application/json" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { TestAccPreCheck(t) }, + ProtoV6ProviderFactories: ProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccObjectCopyConfig(bucketName, key, source), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName+"-to"), + resource.TestCheckResourceAttr(resourceName, "key", key), + ), + }, + { + Config: testAccObjectCopyContentTypeConfig(bucketName, key, source, newContentType), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName+"-to"), + resource.TestCheckResourceAttr(resourceName, "key", key), + resource.TestCheckResourceAttr(resourceName, "content_type", newContentType), + ), + }, + }, + }) +} + func testAccCheckObjectCopyExists(n string, provider *schema.Provider) resource.TestCheckFunc { return func(s *terraform.State) error { resource, ok := s.RootModule().Resources[n] @@ -121,3 +205,88 @@ func testAccObjectCopyConfig(bucketName, key, source string) string { } `, bucketName, key, source) } + +func testAccObjectCopyContentTypeConfig(bucketName, key, source, contentType string) string { + return fmt.Sprintf(` + resource "ncloud_objectstorage_bucket" "testing_bucket_from" { + bucket_name = "%[1]s-from" + } + + resource "ncloud_objectstorage_bucket" "testing_bucket_to" { + bucket_name = "%[1]s-to" + } + + resource "ncloud_objectstorage_object" "testing_object" { + bucket = ncloud_objectstorage_bucket.testing_bucket_from.bucket_name + key = "%[2]s" + source = "%[3]s" + } + + resource "ncloud_objectstorage_object_copy" "testing_copy" { + bucket = ncloud_objectstorage_bucket.testing_bucket_to.bucket_name + key = "%[2]s" + source = ncloud_objectstorage_object.testing_object.id + content_type = "%[4]s" + } + `, bucketName, key, source, contentType) +} + +func testAccObjectCopySourcePreUpdateConfig(bucketName, key, source, postObjectKey string) string { + return fmt.Sprintf(` + resource "ncloud_objectstorage_bucket" "testing_bucket_from" { + bucket_name = "%[1]s-from" + } + + resource "ncloud_objectstorage_bucket" "testing_bucket_to" { + bucket_name = "%[1]s-to" + } + + resource "ncloud_objectstorage_object" "testing_object_pre" { + bucket = ncloud_objectstorage_bucket.testing_bucket_from.bucket_name + key = "%[2]s" + source = "%[3]s" + } + + resource "ncloud_objectstorage_object" "testing_object_post" { + bucket = ncloud_objectstorage_bucket.testing_bucket_from.bucket_name + key = "%[4]s" + source = "%[3]s" + } + + resource "ncloud_objectstorage_object_copy" "testing_copy" { + bucket = ncloud_objectstorage_bucket.testing_bucket_to.bucket_name + key = "%[2]s" + source = ncloud_objectstorage_object.testing_object_pre.id + } + `, bucketName, key, source, postObjectKey) +} + +func testAccObjectCopySourcePostUpdateConfig(bucketName, key, source, postObjectKey string) string { + return fmt.Sprintf(` + resource "ncloud_objectstorage_bucket" "testing_bucket_from" { + bucket_name = "%[1]s-from" + } + + resource "ncloud_objectstorage_bucket" "testing_bucket_to" { + bucket_name = "%[1]s-to" + } + + resource "ncloud_objectstorage_object" "testing_object_pre" { + bucket = ncloud_objectstorage_bucket.testing_bucket_from.bucket_name + key = "%[2]s" + source = "%[3]s" + } + + resource "ncloud_objectstorage_object" "testing_object_post" { + bucket = ncloud_objectstorage_bucket.testing_bucket_from.bucket_name + key = "%[4]s" + source = "%[3]s" + } + + resource "ncloud_objectstorage_object_copy" "testing_copy" { + bucket = ncloud_objectstorage_bucket.testing_bucket_to.bucket_name + key = "%[4]s" + source = ncloud_objectstorage_object.testing_object_post.id + } + `, bucketName, key, source, postObjectKey) +} diff --git a/internal/service/objectstorage/objectstorage_object_data_source_test.go b/internal/service/objectstorage/objectstorage_object_data_source_test.go index dd8c7671f..9d8daadf7 100644 --- a/internal/service/objectstorage/objectstorage_object_data_source_test.go +++ b/internal/service/objectstorage/objectstorage_object_data_source_test.go @@ -29,7 +29,7 @@ func TestAccDataSourceNcloudObjectStorage_object_basic(t *testing.T) { { Config: testAccDataSourceObjectConfig(bucket, key, source), Check: resource.ComposeTestCheckFunc( - resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_]+\/[a-zA-Z0-9_.-]+$`)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), resource.TestCheckResourceAttrPair(dataName, "object_id", resourceName, "id"), ), }, diff --git a/internal/service/objectstorage/objectstorage_object_test.go b/internal/service/objectstorage/objectstorage_object_test.go index da1b30888..21b9e8974 100644 --- a/internal/service/objectstorage/objectstorage_object_test.go +++ b/internal/service/objectstorage/objectstorage_object_test.go @@ -20,11 +20,12 @@ import ( func TestAccResourceNcloudObjectStorage_object_basic(t *testing.T) { bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) - key := fmt.Sprintf("%s.md", acctest.RandString(5)) + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) resourceName := "ncloud_objectstorage_object.testing_object" content := "content for file upload testing" + key := "test/key/" + sourceName - tmpFile := CreateTempFile(t, content, key) + tmpFile := CreateTempFile(t, content, sourceName) source := tmpFile.Name() defer os.Remove(source) @@ -37,7 +38,7 @@ func TestAccResourceNcloudObjectStorage_object_basic(t *testing.T) { Config: testAccObjectConfig(bucketName, key, source), Check: resource.ComposeAggregateTestCheckFunc( testAccCheckObjectExists(resourceName, GetTestProvider(true)), - resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_]+\/[a-zA-Z0-9_.-]+$`)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), resource.TestCheckResourceAttr(resourceName, "bucket", bucketName), resource.TestCheckResourceAttr(resourceName, "key", key), resource.TestCheckResourceAttr(resourceName, "source", source), @@ -47,6 +48,94 @@ func TestAccResourceNcloudObjectStorage_object_basic(t *testing.T) { }) } +func TestAccResourceNcloudObjectStorage_object_update_source(t *testing.T) { + bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + newSourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + key := "test/key/" + sourceName + + content := "content for file upload testing" + newContent := "new content for file update testing" + resourceName := "ncloud_objectstorage_object.testing_object" + + tmpFile := CreateTempFile(t, content, sourceName) + source := tmpFile.Name() + defer os.Remove(source) + + newTmpFile := CreateTempFile(t, newContent, newSourceName) + newSource := newTmpFile.Name() + defer os.Remove(newSource) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { TestAccPreCheck(t) }, + ProtoV6ProviderFactories: ProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccObjectConfig(bucketName, key, source), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName), + resource.TestCheckResourceAttr(resourceName, "key", key), + resource.TestCheckResourceAttr(resourceName, "source", source), + ), + }, + { + Config: testAccObjectConfig(bucketName, key, newSource), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName), + resource.TestCheckResourceAttr(resourceName, "key", key), + resource.TestCheckResourceAttr(resourceName, "source", newSource), + ), + }, + }, + }) +} + +func TestAccResourceNcloudObjectStorage_object_update_content_type(t *testing.T) { + bucketName := fmt.Sprintf("tf-bucket-%s", acctest.RandString(5)) + sourceName := fmt.Sprintf("%s.md", acctest.RandString(5)) + content := "content for file upload testing" + resourceName := "ncloud_objectstorage_object.testing_object" + key := "test/key/" + sourceName + + tmpFile := CreateTempFile(t, content, sourceName) + source := tmpFile.Name() + defer os.Remove(source) + + newContentType := "application/json" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { TestAccPreCheck(t) }, + ProtoV6ProviderFactories: ProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccObjectConfig(bucketName, key, source), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName), + resource.TestCheckResourceAttr(resourceName, "key", key), + resource.TestCheckResourceAttr(resourceName, "source", source), + ), + }, + { + Config: testAccObjectContentType(bucketName, key, source, newContentType), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckObjectExists(resourceName, GetTestProvider(true)), + resource.TestMatchResourceAttr(resourceName, "id", regexp.MustCompile(`^[a-z0-9-_.-]+(\/[a-z0-9-_.-]+)+$`)), + resource.TestCheckResourceAttr(resourceName, "bucket", bucketName), + resource.TestCheckResourceAttr(resourceName, "key", key), + resource.TestCheckResourceAttr(resourceName, "source", source), + resource.TestCheckResourceAttr(resourceName, "content_type", newContentType), + ), + }, + }, + }) +} + func testAccCheckObjectExists(n string, provider *schema.Provider) resource.TestCheckFunc { return func(s *terraform.State) error { resource, ok := s.RootModule().Resources[n] @@ -112,6 +201,20 @@ func testAccObjectConfig(bucketName, key, source string) string { }`, bucketName, key, source) } +func testAccObjectContentType(bucketName, key, source, contentType string) string { + return fmt.Sprintf(` + resource "ncloud_objectstorage_bucket" "testing_bucket" { + bucket_name = "%[1]s" + } + + resource "ncloud_objectstorage_object" "testing_object" { + bucket = ncloud_objectstorage_bucket.testing_bucket.bucket_name + key = "%[2]s" + source = "%[3]s" + content_type = "%[4]s" + }`, bucketName, key, source, contentType) +} + func CreateTempFile(t *testing.T, content, key string) *os.File { tmpFile, err := os.CreateTemp("", key) if err != nil {