Skip to content

Commit

Permalink
cos satellite bucket
Browse files Browse the repository at this point in the history
cos-satellite doesnot support firwall- update docs based on it

added conflict for satellite location id

added conflict for satellite location id

cos-satellite does not support allowedip

cos-satellite commit

updated files

updated based on comments

commited

updated based on the changes
  • Loading branch information
Your Name committed May 24, 2022
1 parent 3dbc25a commit 9929884
Show file tree
Hide file tree
Showing 10 changed files with 1,003 additions and 118 deletions.
41 changes: 41 additions & 0 deletions examples/ibm-cos-bucket/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,46 @@ data "ibm_cos_bucket" "standard-ams03" {

* [Cloud Object Storage](https://github.com/IBM-Cloud/terraform-provider-ibm/tree/master/examples/ibm-cos-bucket)

<!-- COS SATELLITE PROJECT -->

## COS SATELLITE

The following example creates a bucket and add object versioning and expiration features on COS satellite location. As of now we are using existing cos instance to create bucket , so no need to create any cos instance via a terraform. We don't have any resource group in satellite.We can not use storage_class with Satellite location id.

* [IBM Satellite](https://cloud.ibm.com/docs/satellite?topic=satellite-getting-started)
* [IBM COS Satellite](https://cloud.ibm.com/docs/cloud-object-storage?topic=cloud-object-storage-about-cos-satellite)

## Example Usage

```terraform
data "ibm_resource_group" "group" {
name = "Default"
}
resource "ibm_satellite_location" "create_location" {
location = var.location
zones = var.location_zones
managed_from = var.managed_from
resource_group_id = data.ibm_resource_group.group.id
}
resource "ibm_cos_bucket" "cos_bucket" {
bucket_name = "cos-sat-terraform"
resource_instance_id = data.ibm_resource_instance.cos_instance.id
satellite_location_id = data.ibm_satellite_location.create_location.id
object_versioning {
enable = true
}
expire_rule {
rule_id = "bucket-tf-rule1"
enable = false
days = 20
prefix = "logs/"
}
}
```


<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

## Requirements
Expand All @@ -213,6 +253,7 @@ data "ibm_cos_bucket" "standard-ams03" {
|------|-------------|------|---------|
| bucket_name | Name of the bucket. | `string` | yes |
| resource_group_name | Name of the resource group. | `string` | yes |
| satellite_location_id | satellite location. | `string` | no |
| storage | The storage class that you want to use for the bucket. Supported values are **standard, vault, cold, flex, and smart**.| `string` | no |
| region | The location for a cross-regional bucket. Supported values are **us, eu, and ap**. | `string` | no |
| read_data_events | Enables sending log data to Activity Tracker and LogDNA to provide visibility into object read and write events. | `array` | no
Expand Down
16 changes: 16 additions & 0 deletions examples/ibm-cos-bucket/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -112,3 +112,19 @@ resource "ibm_cos_bucket_object" "base64" {
content_base64 = "RW5jb2RlZCBpbiBiYXNlNjQ="
key = "base64.txt"
}

//Satellite Location
resource "ibm_cos_bucket" "cos_bucket" {
bucket_name = var.bucket_name
resource_instance_id = "crn:v1:bluemix:public:cloud-object-storage:satloc_wdc_c8jh7hfw0ppoapdqrmpg:a/d0c259a490e4488c83b62707ad3f5182:756ad6b6-72a6-4e55-8c94-b02e51e708b3::"
satellite_location_id = var.satellite_location_id
object_versioning {
enable = true
}
expire_rule {
rule_id = "bucket-tf-rule1"
enable = false
days = 20
prefix = "logs/"
}
}
2 changes: 1 addition & 1 deletion examples/ibm-cos-bucket/provider.tf
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ provider "ibm" {
iaas_classic_username = var.iaas_classic_username
iaas_classic_api_key = var.iaas_classic_api_key
ibmcloud_api_key = var.ibmcloud_api_key
}
}
8 changes: 6 additions & 2 deletions examples/ibm-cos-bucket/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ variable "expire_days" {
}

variable "expire_date" {
default = ""
default = "2022-06-09"
}

variable "expire_prefix" {
Expand Down Expand Up @@ -88,4 +88,8 @@ variable "maximum_retention" {

variable "quota" {
default = "1"
}
}

variable "satellite_location_id" {
default = ""
}
14 changes: 14 additions & 0 deletions ibm/acctest/acctest.go
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,10 @@ var Scc_posture_collector_id_scope_update []string
//ROKS Cluster
var ClusterName string

// Satellite instance
var Satellite_location_id string
var Satellite_Resource_instance_id string

func init() {
testlogger := os.Getenv("TF_LOG")
if testlogger != "" {
Expand Down Expand Up @@ -891,6 +895,16 @@ func init() {
if ClusterName == "" {
fmt.Println("[INFO] Set the environment variable IBM_CONTAINER_CLUSTER_NAME for ibm_container_nlb_dns resource or datasource else tests will fail if this is not set correctly")
}

Satellite_location_id = os.Getenv("SATELLITE_LOCATION_ID")
if Satellite_location_id == "" {
fmt.Println("[INFO] Set the environment variable SATELLITE_LOCATION_ID for ibm_cos_bucket satellite location resource or datasource else tests will fail if this is not set correctly")
}

Satellite_Resource_instance_id = os.Getenv("SATELLITE_RESOURCE_INSTANCE_ID")
if Satellite_Resource_instance_id == "" {
fmt.Println("[INFO] Set the environment variable SATELLITE_RESOURCE_INSTANCE_ID for ibm_cos_bucket satellite location resource or datasource else tests will fail if this is not set correctly")
}
}

var TestAccProviders map[string]*schema.Provider
Expand Down
120 changes: 81 additions & 39 deletions ibm/service/cos/data_source_ibm_cos_bucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,6 @@ package cos

import (
"fmt"
"strings"
"time"

"github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns"
"github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex"
"github.com/IBM-Cloud/terraform-provider-ibm/ibm/validate"
Expand All @@ -18,6 +15,8 @@ import (
"github.com/IBM/ibm-cos-sdk-go/aws/session"
"github.com/IBM/ibm-cos-sdk-go/service/s3"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"strings"
"time"
)

var bucketTypes = []string{"single_site_location", "region_location", "cross_region_location"}
Expand All @@ -32,24 +31,31 @@ func DataSourceIBMCosBucket() *schema.Resource {
Required: true,
},
"bucket_type": {
Type: schema.TypeString,
ValidateFunc: validate.ValidateAllowedStringValues(bucketTypes),
Required: true,
Type: schema.TypeString,
ValidateFunc: validate.ValidateAllowedStringValues(bucketTypes),
Optional: true,
ConflictsWith: []string{"satellite_location_id"},
},
"bucket_region": {
Type: schema.TypeString,
Required: true,
Type: schema.TypeString,
Optional: true,
ConflictsWith: []string{"satellite_location_id"},
},
"resource_instance_id": {
Type: schema.TypeString,
Required: true,
},
"satellite_location_id": {
Type: schema.TypeString,
Computed: true,
},
"endpoint_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validate.ValidateAllowedStringValues([]string{"public", "private", "direct"}),
Description: "public or private",
Default: "public",
Type: schema.TypeString,
Optional: true,
ValidateFunc: validate.ValidateAllowedStringValues([]string{"public", "private", "direct"}),
Description: "public or private",
ConflictsWith: []string{"satellite_location_id"},
Default: "public",
},
"crn": {
Type: schema.TypeString,
Expand Down Expand Up @@ -327,14 +333,32 @@ func dataSourceIBMCosBucketRead(d *schema.ResourceData, meta interface{}) error
serviceID := d.Get("resource_instance_id").(string)
bucketType := d.Get("bucket_type").(string)
bucketRegion := d.Get("bucket_region").(string)
var endpointType = d.Get("endpoint_type").(string)
apiEndpoint, apiEndpointPrivate, directApiEndpoint := SelectCosApi(bucketLocationConvert(bucketType), bucketRegion)
if endpointType == "private" {
apiEndpoint = apiEndpointPrivate
endpointType := d.Get("endpoint_type").(string)

var satlc_id, apiEndpoint, apiEndpointPrivate, directApiEndpoint string

if satlc, ok := d.GetOk("satellite_location_id"); ok {
satlc_id = satlc.(string)
satloc_guid := strings.Split(serviceID, ":")
bucketsatcrn := satloc_guid[7]
serviceID = bucketsatcrn
bucketType = "sl"
}
if endpointType == "direct" {
apiEndpoint = directApiEndpoint

if bucketType == "sl" {
apiEndpoint = SelectSatlocCosApi(bucketLocationConvert(bucketType), serviceID, satlc_id)

} else {
apiEndpoint, apiEndpointPrivate, directApiEndpoint = SelectCosApi(bucketLocationConvert(bucketType), bucketRegion)
if endpointType == "private" {
apiEndpoint = apiEndpointPrivate
}
if endpointType == "direct" {
apiEndpoint = directApiEndpoint
}

}

apiEndpoint = conns.EnvFallBack([]string{"IBMCLOUD_COS_ENDPOINT"}, apiEndpoint)
if apiEndpoint == "" {
return fmt.Errorf("[ERROR] The endpoint doesn't exists for given location %s and endpoint type %s", bucketRegion, endpointType)
Expand Down Expand Up @@ -372,26 +396,32 @@ func dataSourceIBMCosBucketRead(d *schema.ResourceData, meta interface{}) error
return fmt.Errorf("failed waiting for bucket %s to be created, %v",
bucketName, err)
}
bucketLocationInput := &s3.GetBucketLocationInput{
Bucket: aws.String(bucketName),
}
bucketLocationConstraint, err := s3Client.GetBucketLocation(bucketLocationInput)
if err != nil {
return err
}
bLocationConstraint := *bucketLocationConstraint.LocationConstraint

if singleSiteLocationRegex.MatchString(bLocationConstraint) {
d.Set("single_site_location", strings.Split(bLocationConstraint, "-")[0])
d.Set("storage_class", strings.Split(bLocationConstraint, "-")[1])
}
if regionLocationRegex.MatchString(bLocationConstraint) {
d.Set("region_location", fmt.Sprintf("%s-%s", strings.Split(bLocationConstraint, "-")[0], strings.Split(bLocationConstraint, "-")[1]))
d.Set("storage_class", strings.Split(bLocationConstraint, "-")[2])
}
if crossRegionLocationRegex.MatchString(bLocationConstraint) {
d.Set("cross_region_location", strings.Split(bLocationConstraint, "-")[0])
d.Set("storage_class", strings.Split(bLocationConstraint, "-")[1])
if bucketType != "sl" {

bucketLocationInput := &s3.GetBucketLocationInput{
Bucket: aws.String(bucketName),
}
bucketLocationConstraint, err := s3Client.GetBucketLocation(bucketLocationInput)
if err != nil {
return err
}
bLocationConstraint := *bucketLocationConstraint.LocationConstraint

if singleSiteLocationRegex.MatchString(bLocationConstraint) {
d.Set("single_site_location", strings.Split(bLocationConstraint, "-")[0])
d.Set("storage_class", strings.Split(bLocationConstraint, "-")[1])
}
if regionLocationRegex.MatchString(bLocationConstraint) {
d.Set("region_location", fmt.Sprintf("%s-%s", strings.Split(bLocationConstraint, "-")[0], strings.Split(bLocationConstraint, "-")[1]))
d.Set("storage_class", strings.Split(bLocationConstraint, "-")[2])
}
if crossRegionLocationRegex.MatchString(bLocationConstraint) {
d.Set("cross_region_location", strings.Split(bLocationConstraint, "-")[0])
d.Set("storage_class", strings.Split(bLocationConstraint, "-")[1])
}
} else {
d.Set("satellite_location_id", satlc_id)
}

head, err := s3Client.HeadBucket(headInput)
Expand Down Expand Up @@ -420,6 +450,15 @@ func dataSourceIBMCosBucketRead(d *schema.ResourceData, meta interface{}) error
if endpointType == "private" {
sess.SetServiceURL("https://config.private.cloud-object-storage.cloud.ibm.com/v1")
}

if bucketType == "sl" {

satconfig := fmt.Sprintf("https://config.%s.%s.cloud-object-storage.appdomain.cloud/v1", serviceID, bucketType)

sess.SetServiceURL(satconfig)

}

bucketPtr, response, err := sess.GetBucketConfig(getBucketConfigOptions)

if err != nil {
Expand Down Expand Up @@ -520,7 +559,10 @@ func bucketLocationConvert(locationtype string) string {
return "rl"
}
if locationtype == "single_site_location" {
return "crl"
return "ssl"
}
if locationtype == "satellite_location_id" {
return "sl"
}
return ""
}
Loading

0 comments on commit 9929884

Please sign in to comment.