diff --git a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3.go b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3.go index fe54f94..89f0674 100644 --- a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3.go +++ b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3.go @@ -1,5 +1,5 @@ /** - * (C) Copyright IBM Corp. 2022. + * (C) Copyright IBM Corp. 2023. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -1280,6 +1280,7 @@ func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) GetLogForwardingConfigWith // ConfigurePlatformLogging : Enable or disable log forwarding // Enable or disable log forwarding from IBM Analytics Engine to IBM Log Analysis server. +// *Note:* Deprecated. Use the log forwarding config api instead. func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) ConfigurePlatformLogging(configurePlatformLoggingOptions *ConfigurePlatformLoggingOptions) (result *LoggingConfigurationResponse, response *core.DetailedResponse, err error) { return ibmAnalyticsEngineApi.ConfigurePlatformLoggingWithContext(context.Background(), configurePlatformLoggingOptions) } @@ -1350,6 +1351,7 @@ func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) ConfigurePlatformLoggingWi // GetLoggingConfiguration : Retrieve the logging configuration for a given instance id // Retrieve the logging configuration of a given Analytics Engine instance. +// *Note:* Deprecated. Use the log forwarding config api instead. func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) GetLoggingConfiguration(getLoggingConfigurationOptions *GetLoggingConfigurationOptions) (result *LoggingConfigurationResponse, response *core.DetailedResponse, err error) { return ibmAnalyticsEngineApi.GetLoggingConfigurationWithContext(context.Background(), getLoggingConfigurationOptions) } @@ -1408,6 +1410,174 @@ func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) GetLoggingConfigurationWit return } +// StartSparkHistoryServer : Start Spark history server +// Start the Spark history server for the given Analytics Engine instance. +func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) StartSparkHistoryServer(startSparkHistoryServerOptions *StartSparkHistoryServerOptions) (result *SparkHistoryServerResponse, response *core.DetailedResponse, err error) { + return ibmAnalyticsEngineApi.StartSparkHistoryServerWithContext(context.Background(), startSparkHistoryServerOptions) +} + +// StartSparkHistoryServerWithContext is an alternate form of the StartSparkHistoryServer method which supports a Context parameter +func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) StartSparkHistoryServerWithContext(ctx context.Context, startSparkHistoryServerOptions *StartSparkHistoryServerOptions) (result *SparkHistoryServerResponse, response *core.DetailedResponse, err error) { + err = core.ValidateNotNil(startSparkHistoryServerOptions, "startSparkHistoryServerOptions cannot be nil") + if err != nil { + return + } + err = core.ValidateStruct(startSparkHistoryServerOptions, "startSparkHistoryServerOptions") + if err != nil { + return + } + + pathParamsMap := map[string]string{ + "instance_id": *startSparkHistoryServerOptions.InstanceID, + } + + builder := core.NewRequestBuilder(core.POST) + builder = builder.WithContext(ctx) + builder.EnableGzipCompression = ibmAnalyticsEngineApi.GetEnableGzipCompression() + _, err = builder.ResolveRequestURL(ibmAnalyticsEngineApi.Service.Options.URL, `/v3/analytics_engines/{instance_id}/spark_history_server`, pathParamsMap) + if err != nil { + return + } + + for headerName, headerValue := range startSparkHistoryServerOptions.Headers { + builder.AddHeader(headerName, headerValue) + } + + sdkHeaders := common.GetSdkHeaders("ibm_analytics_engine_api", "V3", "StartSparkHistoryServer") + for headerName, headerValue := range sdkHeaders { + builder.AddHeader(headerName, headerValue) + } + builder.AddHeader("Accept", "application/json") + + request, err := builder.Build() + if err != nil { + return + } + + var rawResponse map[string]json.RawMessage + response, err = ibmAnalyticsEngineApi.Service.Request(request, &rawResponse) + if err != nil { + return + } + if rawResponse != nil { + err = core.UnmarshalModel(rawResponse, "", &result, UnmarshalSparkHistoryServerResponse) + if err != nil { + return + } + response.Result = result + } + + return +} + +// GetSparkHistoryServer : Get Spark history server details +// Get the details of the Spark history server of the given Analytics Engine instance. +func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) GetSparkHistoryServer(getSparkHistoryServerOptions *GetSparkHistoryServerOptions) (result *SparkHistoryServerResponse, response *core.DetailedResponse, err error) { + return ibmAnalyticsEngineApi.GetSparkHistoryServerWithContext(context.Background(), getSparkHistoryServerOptions) +} + +// GetSparkHistoryServerWithContext is an alternate form of the GetSparkHistoryServer method which supports a Context parameter +func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) GetSparkHistoryServerWithContext(ctx context.Context, getSparkHistoryServerOptions *GetSparkHistoryServerOptions) (result *SparkHistoryServerResponse, response *core.DetailedResponse, err error) { + err = core.ValidateNotNil(getSparkHistoryServerOptions, "getSparkHistoryServerOptions cannot be nil") + if err != nil { + return + } + err = core.ValidateStruct(getSparkHistoryServerOptions, "getSparkHistoryServerOptions") + if err != nil { + return + } + + pathParamsMap := map[string]string{ + "instance_id": *getSparkHistoryServerOptions.InstanceID, + } + + builder := core.NewRequestBuilder(core.GET) + builder = builder.WithContext(ctx) + builder.EnableGzipCompression = ibmAnalyticsEngineApi.GetEnableGzipCompression() + _, err = builder.ResolveRequestURL(ibmAnalyticsEngineApi.Service.Options.URL, `/v3/analytics_engines/{instance_id}/spark_history_server`, pathParamsMap) + if err != nil { + return + } + + for headerName, headerValue := range getSparkHistoryServerOptions.Headers { + builder.AddHeader(headerName, headerValue) + } + + sdkHeaders := common.GetSdkHeaders("ibm_analytics_engine_api", "V3", "GetSparkHistoryServer") + for headerName, headerValue := range sdkHeaders { + builder.AddHeader(headerName, headerValue) + } + builder.AddHeader("Accept", "application/json") + + request, err := builder.Build() + if err != nil { + return + } + + var rawResponse map[string]json.RawMessage + response, err = ibmAnalyticsEngineApi.Service.Request(request, &rawResponse) + if err != nil { + return + } + if rawResponse != nil { + err = core.UnmarshalModel(rawResponse, "", &result, UnmarshalSparkHistoryServerResponse) + if err != nil { + return + } + response.Result = result + } + + return +} + +// StopSparkHistoryServer : Stop Spark history server +// Stop the Spark history server of the given Analytics Engine instance. +func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) StopSparkHistoryServer(stopSparkHistoryServerOptions *StopSparkHistoryServerOptions) (response *core.DetailedResponse, err error) { + return ibmAnalyticsEngineApi.StopSparkHistoryServerWithContext(context.Background(), stopSparkHistoryServerOptions) +} + +// StopSparkHistoryServerWithContext is an alternate form of the StopSparkHistoryServer method which supports a Context parameter +func (ibmAnalyticsEngineApi *IbmAnalyticsEngineApiV3) StopSparkHistoryServerWithContext(ctx context.Context, stopSparkHistoryServerOptions *StopSparkHistoryServerOptions) (response *core.DetailedResponse, err error) { + err = core.ValidateNotNil(stopSparkHistoryServerOptions, "stopSparkHistoryServerOptions cannot be nil") + if err != nil { + return + } + err = core.ValidateStruct(stopSparkHistoryServerOptions, "stopSparkHistoryServerOptions") + if err != nil { + return + } + + pathParamsMap := map[string]string{ + "instance_id": *stopSparkHistoryServerOptions.InstanceID, + } + + builder := core.NewRequestBuilder(core.DELETE) + builder = builder.WithContext(ctx) + builder.EnableGzipCompression = ibmAnalyticsEngineApi.GetEnableGzipCompression() + _, err = builder.ResolveRequestURL(ibmAnalyticsEngineApi.Service.Options.URL, `/v3/analytics_engines/{instance_id}/spark_history_server`, pathParamsMap) + if err != nil { + return + } + + for headerName, headerValue := range stopSparkHistoryServerOptions.Headers { + builder.AddHeader(headerName, headerValue) + } + + sdkHeaders := common.GetSdkHeaders("ibm_analytics_engine_api", "V3", "StopSparkHistoryServer") + for headerName, headerValue := range sdkHeaders { + builder.AddHeader(headerName, headerValue) + } + + request, err := builder.Build() + if err != nil { + return + } + + response, err = ibmAnalyticsEngineApi.Service.Request(request, nil) + + return +} + // Application : Details of a Spark application. type Application struct { // Identifier provided by Analytics Engine service for the Spark application. @@ -1428,14 +1598,23 @@ type Application struct { // State of the Spark application. State *string `json:"state,omitempty"` + // URL of the Apache Spark web UI that is available when the application is running. + SparkUi *string `json:"spark_ui,omitempty"` + + // Time when the application was submitted. + SubmissionTime *strfmt.DateTime `json:"submission_time,omitempty"` + // Time when the application was started. - StartTime *string `json:"start_time,omitempty"` + StartTime *strfmt.DateTime `json:"start_time,omitempty"` // Time when the application run ended in success, failure or was stopped. - EndTime *string `json:"end_time,omitempty"` + EndTime *strfmt.DateTime `json:"end_time,omitempty"` - // Time when the application was completed. - FinishTime *string `json:"finish_time,omitempty"` + // (deprecated) Time when the application was completed. + FinishTime *strfmt.DateTime `json:"finish_time,omitempty"` + + // Time when the application will be automatically stopped by the service. + AutoTerminationTime *strfmt.DateTime `json:"auto_termination_time,omitempty"` } // Constants associated with the Application.State property. @@ -1443,15 +1622,11 @@ type Application struct { const ( Application_State_Accepted = "accepted" Application_State_AutoTerminated = "auto_terminated" - Application_State_Error = "error" Application_State_Failed = "failed" Application_State_Finished = "finished" Application_State_OpsTerminated = "ops_terminated" Application_State_Running = "running" Application_State_Stopped = "stopped" - Application_State_Submitted = "submitted" - Application_State_Unknown = "unknown" - Application_State_Waiting = "waiting" ) // UnmarshalApplication unmarshals an instance of Application from the specified map of raw messages. @@ -1481,6 +1656,14 @@ func UnmarshalApplication(m map[string]json.RawMessage, result interface{}) (err if err != nil { return } + err = core.UnmarshalPrimitive(m, "spark_ui", &obj.SparkUi) + if err != nil { + return + } + err = core.UnmarshalPrimitive(m, "submission_time", &obj.SubmissionTime) + if err != nil { + return + } err = core.UnmarshalPrimitive(m, "start_time", &obj.StartTime) if err != nil { return @@ -1493,6 +1676,10 @@ func UnmarshalApplication(m map[string]json.RawMessage, result interface{}) (err if err != nil { return } + err = core.UnmarshalPrimitive(m, "auto_termination_time", &obj.AutoTerminationTime) + if err != nil { + return + } reflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj)) return } @@ -1629,17 +1816,26 @@ type ApplicationGetResponse struct { // State of the Spark application. State *string `json:"state,omitempty"` + // URL of the Apache Spark web UI that is available when the application is running. + SparkUi *string `json:"spark_ui,omitempty"` + // List of additional information messages on the current state of the application. StateDetails []ApplicationGetResponseStateDetailsItem `json:"state_details,omitempty"` - // Application start time in the format YYYY-MM-DDTHH:mm:ssZ. + // Time when the application was submitted. + SubmissionTime *strfmt.DateTime `json:"submission_time,omitempty"` + + // Time when the application started, in the format YYYY-MM-DDTHH:mm:ssZ. StartTime *strfmt.DateTime `json:"start_time,omitempty"` - // Application end time in the format YYYY-MM-DDTHH:mm:ssZ. + // Time when the application ended either in success or failure, in the format YYYY-MM-DDTHH:mm:ssZ. EndTime *strfmt.DateTime `json:"end_time,omitempty"` - // Application finish time in the format YYYY-MM-DDTHH:mm:ssZ. + // (deprecated) Time when the application completed successfully, in the format YYYY-MM-DDTHH:mm:ssZ. FinishTime *strfmt.DateTime `json:"finish_time,omitempty"` + + // Time when the application will be automatically stopped by the service. + AutoTerminationTime *strfmt.DateTime `json:"auto_termination_time,omitempty"` } // Constants associated with the ApplicationGetResponse.State property. @@ -1647,15 +1843,11 @@ type ApplicationGetResponse struct { const ( ApplicationGetResponse_State_Accepted = "accepted" ApplicationGetResponse_State_AutoTerminated = "auto_terminated" - ApplicationGetResponse_State_Error = "error" ApplicationGetResponse_State_Failed = "failed" ApplicationGetResponse_State_Finished = "finished" ApplicationGetResponse_State_OpsTerminated = "ops_terminated" ApplicationGetResponse_State_Running = "running" ApplicationGetResponse_State_Stopped = "stopped" - ApplicationGetResponse_State_Submitted = "submitted" - ApplicationGetResponse_State_Unknown = "unknown" - ApplicationGetResponse_State_Waiting = "waiting" ) // UnmarshalApplicationGetResponse unmarshals an instance of ApplicationGetResponse from the specified map of raw messages. @@ -1681,10 +1873,18 @@ func UnmarshalApplicationGetResponse(m map[string]json.RawMessage, result interf if err != nil { return } + err = core.UnmarshalPrimitive(m, "spark_ui", &obj.SparkUi) + if err != nil { + return + } err = core.UnmarshalModel(m, "state_details", &obj.StateDetails, UnmarshalApplicationGetResponseStateDetailsItem) if err != nil { return } + err = core.UnmarshalPrimitive(m, "submission_time", &obj.SubmissionTime) + if err != nil { + return + } err = core.UnmarshalPrimitive(m, "start_time", &obj.StartTime) if err != nil { return @@ -1697,6 +1897,10 @@ func UnmarshalApplicationGetResponse(m map[string]json.RawMessage, result interf if err != nil { return } + err = core.UnmarshalPrimitive(m, "auto_termination_time", &obj.AutoTerminationTime) + if err != nil { + return + } reflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj)) return } @@ -1749,13 +1953,16 @@ type ApplicationGetStateResponse struct { State *string `json:"state,omitempty"` // Time when the application was started. - StartTime *string `json:"start_time,omitempty"` + StartTime *strfmt.DateTime `json:"start_time,omitempty"` // Time when the application run ended in success, failure or was stopped. - EndTime *string `json:"end_time,omitempty"` + EndTime *strfmt.DateTime `json:"end_time,omitempty"` + + // (deprecated) Time when the application was completed. + FinishTime *strfmt.DateTime `json:"finish_time,omitempty"` - // Time when the application was completed. - FinishTime *string `json:"finish_time,omitempty"` + // Time when the application will be automatically stopped by the service. + AutoTerminationTime *strfmt.DateTime `json:"auto_termination_time,omitempty"` } // Constants associated with the ApplicationGetStateResponse.State property. @@ -1763,15 +1970,11 @@ type ApplicationGetStateResponse struct { const ( ApplicationGetStateResponse_State_Accepted = "accepted" ApplicationGetStateResponse_State_AutoTerminated = "auto_terminated" - ApplicationGetStateResponse_State_Error = "error" ApplicationGetStateResponse_State_Failed = "failed" ApplicationGetStateResponse_State_Finished = "finished" ApplicationGetStateResponse_State_OpsTerminated = "ops_terminated" ApplicationGetStateResponse_State_Running = "running" ApplicationGetStateResponse_State_Stopped = "stopped" - ApplicationGetStateResponse_State_Submitted = "submitted" - ApplicationGetStateResponse_State_Unknown = "unknown" - ApplicationGetStateResponse_State_Waiting = "waiting" ) // UnmarshalApplicationGetStateResponse unmarshals an instance of ApplicationGetStateResponse from the specified map of raw messages. @@ -1797,6 +2000,10 @@ func UnmarshalApplicationGetStateResponse(m map[string]json.RawMessage, result i if err != nil { return } + err = core.UnmarshalPrimitive(m, "auto_termination_time", &obj.AutoTerminationTime) + if err != nil { + return + } reflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj)) return } @@ -1815,15 +2022,11 @@ type ApplicationResponse struct { const ( ApplicationResponse_State_Accepted = "accepted" ApplicationResponse_State_AutoTerminated = "auto_terminated" - ApplicationResponse_State_Error = "error" ApplicationResponse_State_Failed = "failed" ApplicationResponse_State_Finished = "finished" ApplicationResponse_State_OpsTerminated = "ops_terminated" ApplicationResponse_State_Running = "running" ApplicationResponse_State_Stopped = "stopped" - ApplicationResponse_State_Submitted = "submitted" - ApplicationResponse_State_Unknown = "unknown" - ApplicationResponse_State_Waiting = "waiting" ) // UnmarshalApplicationResponse unmarshals an instance of ApplicationResponse from the specified map of raw messages. @@ -2380,6 +2583,34 @@ func (options *GetResourceConsumptionLimitsOptions) SetHeaders(param map[string] return options } +// GetSparkHistoryServerOptions : The GetSparkHistoryServer options. +type GetSparkHistoryServerOptions struct { + // The ID of the Analytics Engine instance to which the Spark history server belongs. + InstanceID *string `json:"instance_id" validate:"required,ne="` + + // Allows users to set headers on API requests + Headers map[string]string +} + +// NewGetSparkHistoryServerOptions : Instantiate GetSparkHistoryServerOptions +func (*IbmAnalyticsEngineApiV3) NewGetSparkHistoryServerOptions(instanceID string) *GetSparkHistoryServerOptions { + return &GetSparkHistoryServerOptions{ + InstanceID: core.StringPtr(instanceID), + } +} + +// SetInstanceID : Allow user to set InstanceID +func (_options *GetSparkHistoryServerOptions) SetInstanceID(instanceID string) *GetSparkHistoryServerOptions { + _options.InstanceID = core.StringPtr(instanceID) + return _options +} + +// SetHeaders : Allow user to set Headers +func (options *GetSparkHistoryServerOptions) SetHeaders(param map[string]string) *GetSparkHistoryServerOptions { + options.Headers = param + return options +} + // Instance : Details of Analytics Engine instance. type Instance struct { // GUID of the Analytics Engine instance. @@ -2646,15 +2877,11 @@ type ListApplicationsOptions struct { const ( ListApplicationsOptions_State_Accepted = "accepted" ListApplicationsOptions_State_AutoTerminated = "auto_terminated" - ListApplicationsOptions_State_Error = "error" ListApplicationsOptions_State_Failed = "failed" ListApplicationsOptions_State_Finished = "finished" ListApplicationsOptions_State_OpsTerminated = "ops_terminated" ListApplicationsOptions_State_Running = "running" ListApplicationsOptions_State_Stopped = "stopped" - ListApplicationsOptions_State_Submitted = "submitted" - ListApplicationsOptions_State_Unknown = "unknown" - ListApplicationsOptions_State_Waiting = "waiting" ) // NewListApplicationsOptions : Instantiate ListApplicationsOptions @@ -2737,7 +2964,7 @@ func UnmarshalLogForwardingConfigResponseLogServer(m map[string]json.RawMessage, return } -// LoggingConfigurationResponse : Response of logging API. +// LoggingConfigurationResponse : (deprecated) Response of logging API. type LoggingConfigurationResponse struct { // component array. Components []string `json:"components,omitempty"` @@ -3048,6 +3275,121 @@ func (options *SetInstanceHomeOptions) SetHeaders(param map[string]string) *SetI return options } +// SparkHistoryServerResponse : Status of the Spark history server. +type SparkHistoryServerResponse struct { + // State of the Spark history server. + State *string `json:"state,omitempty"` + + // Number of cpu cores used by the Spark history server. + Cores *string `json:"cores,omitempty"` + + // Amount of memory used by the Spark history server. + Memory *string `json:"memory,omitempty"` + + // Time when the Spark history server was started. + StartTime *strfmt.DateTime `json:"start_time,omitempty"` + + // Time when the Spark history server was stopped. + StopTime *strfmt.DateTime `json:"stop_time,omitempty"` + + // Time when the Spark history server will be stopped automatically. + AutoTerminationTime *strfmt.DateTime `json:"auto_termination_time,omitempty"` +} + +// Constants associated with the SparkHistoryServerResponse.State property. +// State of the Spark history server. +const ( + SparkHistoryServerResponse_State_Started = "started" + SparkHistoryServerResponse_State_Stopped = "stopped" +) + +// UnmarshalSparkHistoryServerResponse unmarshals an instance of SparkHistoryServerResponse from the specified map of raw messages. +func UnmarshalSparkHistoryServerResponse(m map[string]json.RawMessage, result interface{}) (err error) { + obj := new(SparkHistoryServerResponse) + err = core.UnmarshalPrimitive(m, "state", &obj.State) + if err != nil { + return + } + err = core.UnmarshalPrimitive(m, "cores", &obj.Cores) + if err != nil { + return + } + err = core.UnmarshalPrimitive(m, "memory", &obj.Memory) + if err != nil { + return + } + err = core.UnmarshalPrimitive(m, "start_time", &obj.StartTime) + if err != nil { + return + } + err = core.UnmarshalPrimitive(m, "stop_time", &obj.StopTime) + if err != nil { + return + } + err = core.UnmarshalPrimitive(m, "auto_termination_time", &obj.AutoTerminationTime) + if err != nil { + return + } + reflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj)) + return +} + +// StartSparkHistoryServerOptions : The StartSparkHistoryServer options. +type StartSparkHistoryServerOptions struct { + // The ID of the Analytics Engine instance to which the Spark history server belongs. + InstanceID *string `json:"instance_id" validate:"required,ne="` + + // Allows users to set headers on API requests + Headers map[string]string +} + +// NewStartSparkHistoryServerOptions : Instantiate StartSparkHistoryServerOptions +func (*IbmAnalyticsEngineApiV3) NewStartSparkHistoryServerOptions(instanceID string) *StartSparkHistoryServerOptions { + return &StartSparkHistoryServerOptions{ + InstanceID: core.StringPtr(instanceID), + } +} + +// SetInstanceID : Allow user to set InstanceID +func (_options *StartSparkHistoryServerOptions) SetInstanceID(instanceID string) *StartSparkHistoryServerOptions { + _options.InstanceID = core.StringPtr(instanceID) + return _options +} + +// SetHeaders : Allow user to set Headers +func (options *StartSparkHistoryServerOptions) SetHeaders(param map[string]string) *StartSparkHistoryServerOptions { + options.Headers = param + return options +} + +// StopSparkHistoryServerOptions : The StopSparkHistoryServer options. +type StopSparkHistoryServerOptions struct { + // The ID of the Analytics Engine instance to which the Spark history server belongs. + InstanceID *string `json:"instance_id" validate:"required,ne="` + + // Allows users to set headers on API requests + Headers map[string]string +} + +// NewStopSparkHistoryServerOptions : Instantiate StopSparkHistoryServerOptions +func (*IbmAnalyticsEngineApiV3) NewStopSparkHistoryServerOptions(instanceID string) *StopSparkHistoryServerOptions { + return &StopSparkHistoryServerOptions{ + InstanceID: core.StringPtr(instanceID), + } +} + +// SetInstanceID : Allow user to set InstanceID +func (_options *StopSparkHistoryServerOptions) SetInstanceID(instanceID string) *StopSparkHistoryServerOptions { + _options.InstanceID = core.StringPtr(instanceID) + return _options +} + +// SetHeaders : Allow user to set Headers +func (options *StopSparkHistoryServerOptions) SetHeaders(param map[string]string) *StopSparkHistoryServerOptions { + options.Headers = param + return options +} + // UpdateInstanceDefaultConfigsOptions : The UpdateInstanceDefaultConfigs options. type UpdateInstanceDefaultConfigsOptions struct { // The ID of the Analytics Engine instance. diff --git a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_examples_test.go b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_examples_test.go index 21c6389..94180bb 100644 --- a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_examples_test.go +++ b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_examples_test.go @@ -150,7 +150,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3 Examples Tests`, func() { setInstanceHomeOptions := ibmAnalyticsEngineApiService.NewSetInstanceHomeOptions( "dc0e9889-eab2-4t9e-9441-566209499546", - ) + ) setInstanceHomeOptions.SetNewHmacAccessKey("b9****************************4b") setInstanceHomeOptions.SetNewHmacSecretKey("fa********************************************8a") @@ -318,7 +318,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3 Examples Tests`, func() { listApplicationsOptions := ibmAnalyticsEngineApiService.NewListApplicationsOptions( "dc0e9889-eab2-4t9e-9441-566209499546", ) - listApplicationsOptions.SetState([]string{"accepted", "submitted", "waiting", "running", "finished", "failed"}) + listApplicationsOptions.SetState([]string{"accepted", "running", "finished", "failed"}) applicationCollection, response, err := ibmAnalyticsEngineApiService.ListApplications(listApplicationsOptions) if err != nil { @@ -504,6 +504,68 @@ var _ = Describe(`IbmAnalyticsEngineApiV3 Examples Tests`, func() { Expect(response.StatusCode).To(Equal(200)) Expect(loggingConfigurationResponse).ToNot(BeNil()) }) + It(`StartSparkHistoryServer request example`, func() { + fmt.Println("\nStartSparkHistoryServer() result:") + // begin-start_spark_history_server + + startSparkHistoryServerOptions := ibmAnalyticsEngineApiService.NewStartSparkHistoryServerOptions( + "dc0e9889-eab2-4t9e-9441-566209499546", + ) + + sparkHistoryServerResponse, response, err := ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptions) + if err != nil { + panic(err) + } + b, _ := json.MarshalIndent(sparkHistoryServerResponse, "", " ") + fmt.Println(string(b)) + + // end-start_spark_history_server + + Expect(err).To(BeNil()) + Expect(response.StatusCode).To(Equal(200)) + Expect(sparkHistoryServerResponse).ToNot(BeNil()) + }) + It(`GetSparkHistoryServer request example`, func() { + fmt.Println("\nGetSparkHistoryServer() result:") + // begin-get_spark_history_server + + getSparkHistoryServerOptions := ibmAnalyticsEngineApiService.NewGetSparkHistoryServerOptions( + "dc0e9889-eab2-4t9e-9441-566209499546", + ) + + sparkHistoryServerResponse, response, err := ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptions) + if err != nil { + panic(err) + } + b, _ := json.MarshalIndent(sparkHistoryServerResponse, "", " ") + fmt.Println(string(b)) + + // end-get_spark_history_server + + Expect(err).To(BeNil()) + Expect(response.StatusCode).To(Equal(200)) + Expect(sparkHistoryServerResponse).ToNot(BeNil()) + }) + It(`StopSparkHistoryServer request example`, func() { + // begin-stop_spark_history_server + + stopSparkHistoryServerOptions := ibmAnalyticsEngineApiService.NewStopSparkHistoryServerOptions( + "dc0e9889-eab2-4t9e-9441-566209499546", + ) + + response, err := ibmAnalyticsEngineApiService.StopSparkHistoryServer(stopSparkHistoryServerOptions) + if err != nil { + panic(err) + } + if response.StatusCode != 204 { + fmt.Printf("\nUnexpected response status code received from StopSparkHistoryServer(): %d\n", response.StatusCode) + } + + // end-stop_spark_history_server + + Expect(err).To(BeNil()) + Expect(response.StatusCode).To(Equal(204)) + }) It(`DeleteApplication request example`, func() { // begin-delete_application diff --git a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_integration_test.go b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_integration_test.go index cfb6cf5..0d355a8 100644 --- a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_integration_test.go +++ b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_integration_test.go @@ -267,14 +267,14 @@ var _ = Describe(`IbmAnalyticsEngineApiV3 Integration Tests`, func() { }) }) - Describe(`ListApplications - Retrieve all Spark applications`, func() { + Describe(`ListApplications - List all Spark applications`, func() { BeforeEach(func() { shouldSkipTest() }) It(`ListApplications(listApplicationsOptions *ListApplicationsOptions)`, func() { listApplicationsOptions := &ibmanalyticsengineapiv3.ListApplicationsOptions{ InstanceID: core.StringPtr(instanceGuid), - State: []string{"accepted","submitted","waiting","finished","failed"}, + State: []string{"accepted","running","finished","failed"}, } applicationCollection, response, err := ibmAnalyticsEngineApiService.ListApplications(listApplicationsOptions) @@ -418,6 +418,53 @@ var _ = Describe(`IbmAnalyticsEngineApiV3 Integration Tests`, func() { }) }) + Describe(`StartSparkHistoryServer - Start Spark history server`, func() { + BeforeEach(func() { + shouldSkipTest() + }) + It(`StartSparkHistoryServer(startSparkHistoryServerOptions *StartSparkHistoryServerOptions)`, func() { + startSparkHistoryServerOptions := &ibmanalyticsengineapiv3.StartSparkHistoryServerOptions{ + InstanceID: core.StringPtr(instanceGuid), + } + + sparkHistoryServerResponse, response, err := ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptions) + Expect(err).To(BeNil()) + Expect(response.StatusCode).To(Equal(202)) + Expect(sparkHistoryServerResponse).ToNot(BeNil()) + }) + }) + + Describe(`GetSparkHistoryServer - Get Spark history server details`, func() { + BeforeEach(func() { + shouldSkipTest() + }) + It(`GetSparkHistoryServer(getSparkHistoryServerOptions *GetSparkHistoryServerOptions)`, func() { + getSparkHistoryServerOptions := &ibmanalyticsengineapiv3.GetSparkHistoryServerOptions{ + InstanceID: core.StringPtr(instanceGuid), + } + + sparkHistoryServerResponse, response, err := ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptions) + Expect(err).To(BeNil()) + Expect(response.StatusCode).To(Equal(200)) + Expect(sparkHistoryServerResponse).ToNot(BeNil()) + }) + }) + + Describe(`StopSparkHistoryServer - Stop Spark history server`, func() { + BeforeEach(func() { + shouldSkipTest() + }) + It(`StopSparkHistoryServer(stopSparkHistoryServerOptions *StopSparkHistoryServerOptions)`, func() { + stopSparkHistoryServerOptions := &ibmanalyticsengineapiv3.StopSparkHistoryServerOptions{ + InstanceID: core.StringPtr(instanceGuid), + } + + response, err := ibmAnalyticsEngineApiService.StopSparkHistoryServer(stopSparkHistoryServerOptions) + Expect(err).To(BeNil()) + Expect(response.StatusCode).To(Equal(204)) + }) + }) + Describe(`DeleteApplication - Stop application`, func() { BeforeEach(func() { shouldSkipTest() diff --git a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_test.go b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_test.go index b994358..4646354 100644 --- a/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_test.go +++ b/ibmanalyticsengineapiv3/ibm_analytics_engine_api_v3_test.go @@ -1,5 +1,5 @@ /** - * (C) Copyright IBM Corp. 2022. + * (C) Copyright IBM Corp. 2023. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import ( "context" "fmt" "io" + "io/ioutil" "net/http" "net/http/httptest" "os" @@ -626,8 +627,8 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { setInstanceHomeOptionsModel.NewType = core.StringPtr("objectstore") setInstanceHomeOptionsModel.NewRegion = core.StringPtr("us-south") setInstanceHomeOptionsModel.NewEndpoint = core.StringPtr("s3.direct.us-south.cloud-object-storage.appdomain.cloud") - setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("821**********0ae") - setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("03e****************4fc3") + setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("b9****************************4b") + setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("fa********************************************8a") setInstanceHomeOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} // Expect response parsing to fail since we are receiving a text/plain response result, response, operationErr := ibmAnalyticsEngineApiService.SetInstanceHome(setInstanceHomeOptionsModel) @@ -700,8 +701,8 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { setInstanceHomeOptionsModel.NewType = core.StringPtr("objectstore") setInstanceHomeOptionsModel.NewRegion = core.StringPtr("us-south") setInstanceHomeOptionsModel.NewEndpoint = core.StringPtr("s3.direct.us-south.cloud-object-storage.appdomain.cloud") - setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("821**********0ae") - setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("03e****************4fc3") + setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("b9****************************4b") + setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("fa********************************************8a") setInstanceHomeOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} // Invoke operation with a Context to test a timeout error @@ -782,8 +783,8 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { setInstanceHomeOptionsModel.NewType = core.StringPtr("objectstore") setInstanceHomeOptionsModel.NewRegion = core.StringPtr("us-south") setInstanceHomeOptionsModel.NewEndpoint = core.StringPtr("s3.direct.us-south.cloud-object-storage.appdomain.cloud") - setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("821**********0ae") - setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("03e****************4fc3") + setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("b9****************************4b") + setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("fa********************************************8a") setInstanceHomeOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} // Invoke operation with valid options model (positive test) @@ -809,8 +810,8 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { setInstanceHomeOptionsModel.NewType = core.StringPtr("objectstore") setInstanceHomeOptionsModel.NewRegion = core.StringPtr("us-south") setInstanceHomeOptionsModel.NewEndpoint = core.StringPtr("s3.direct.us-south.cloud-object-storage.appdomain.cloud") - setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("821**********0ae") - setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("03e****************4fc3") + setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("b9****************************4b") + setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("fa********************************************8a") setInstanceHomeOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} // Invoke operation with empty URL (negative test) err := ibmAnalyticsEngineApiService.SetServiceURL("") @@ -857,8 +858,8 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { setInstanceHomeOptionsModel.NewType = core.StringPtr("objectstore") setInstanceHomeOptionsModel.NewRegion = core.StringPtr("us-south") setInstanceHomeOptionsModel.NewEndpoint = core.StringPtr("s3.direct.us-south.cloud-object-storage.appdomain.cloud") - setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("821**********0ae") - setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("03e****************4fc3") + setInstanceHomeOptionsModel.NewHmacAccessKey = core.StringPtr("b9****************************4b") + setInstanceHomeOptionsModel.NewHmacSecretKey = core.StringPtr("fa********************************************8a") setInstanceHomeOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} // Invoke operation @@ -1938,15 +1939,15 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Construct an instance of the CreateApplicationOptions model createApplicationOptionsModel := new(ibmanalyticsengineapiv3.CreateApplicationOptions) createApplicationOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") - createApplicationOptionsModel.Application = core.StringPtr("cos://bucket_name.my_cos/my_spark_app.py") + createApplicationOptionsModel.Application = core.StringPtr("testString") createApplicationOptionsModel.Runtime = runtimeModel - createApplicationOptionsModel.Jars = core.StringPtr("cos://cloud-object-storage/jars/tests.jar") + createApplicationOptionsModel.Jars = core.StringPtr("testString") createApplicationOptionsModel.Packages = core.StringPtr("testString") createApplicationOptionsModel.Repositories = core.StringPtr("testString") createApplicationOptionsModel.Files = core.StringPtr("testString") createApplicationOptionsModel.Archives = core.StringPtr("testString") - createApplicationOptionsModel.Name = core.StringPtr("spark-app") - createApplicationOptionsModel.Class = core.StringPtr("com.company.path.ClassName") + createApplicationOptionsModel.Name = core.StringPtr("testString") + createApplicationOptionsModel.Class = core.StringPtr("testString") createApplicationOptionsModel.Arguments = []string{"[arg1, arg2, arg3]"} createApplicationOptionsModel.Conf = make(map[string]interface{}) createApplicationOptionsModel.Env = make(map[string]interface{}) @@ -2021,15 +2022,15 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Construct an instance of the CreateApplicationOptions model createApplicationOptionsModel := new(ibmanalyticsengineapiv3.CreateApplicationOptions) createApplicationOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") - createApplicationOptionsModel.Application = core.StringPtr("cos://bucket_name.my_cos/my_spark_app.py") + createApplicationOptionsModel.Application = core.StringPtr("testString") createApplicationOptionsModel.Runtime = runtimeModel - createApplicationOptionsModel.Jars = core.StringPtr("cos://cloud-object-storage/jars/tests.jar") + createApplicationOptionsModel.Jars = core.StringPtr("testString") createApplicationOptionsModel.Packages = core.StringPtr("testString") createApplicationOptionsModel.Repositories = core.StringPtr("testString") createApplicationOptionsModel.Files = core.StringPtr("testString") createApplicationOptionsModel.Archives = core.StringPtr("testString") - createApplicationOptionsModel.Name = core.StringPtr("spark-app") - createApplicationOptionsModel.Class = core.StringPtr("com.company.path.ClassName") + createApplicationOptionsModel.Name = core.StringPtr("testString") + createApplicationOptionsModel.Class = core.StringPtr("testString") createApplicationOptionsModel.Arguments = []string{"[arg1, arg2, arg3]"} createApplicationOptionsModel.Conf = make(map[string]interface{}) createApplicationOptionsModel.Env = make(map[string]interface{}) @@ -2112,15 +2113,15 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Construct an instance of the CreateApplicationOptions model createApplicationOptionsModel := new(ibmanalyticsengineapiv3.CreateApplicationOptions) createApplicationOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") - createApplicationOptionsModel.Application = core.StringPtr("cos://bucket_name.my_cos/my_spark_app.py") + createApplicationOptionsModel.Application = core.StringPtr("testString") createApplicationOptionsModel.Runtime = runtimeModel - createApplicationOptionsModel.Jars = core.StringPtr("cos://cloud-object-storage/jars/tests.jar") + createApplicationOptionsModel.Jars = core.StringPtr("testString") createApplicationOptionsModel.Packages = core.StringPtr("testString") createApplicationOptionsModel.Repositories = core.StringPtr("testString") createApplicationOptionsModel.Files = core.StringPtr("testString") createApplicationOptionsModel.Archives = core.StringPtr("testString") - createApplicationOptionsModel.Name = core.StringPtr("spark-app") - createApplicationOptionsModel.Class = core.StringPtr("com.company.path.ClassName") + createApplicationOptionsModel.Name = core.StringPtr("testString") + createApplicationOptionsModel.Class = core.StringPtr("testString") createApplicationOptionsModel.Arguments = []string{"[arg1, arg2, arg3]"} createApplicationOptionsModel.Conf = make(map[string]interface{}) createApplicationOptionsModel.Env = make(map[string]interface{}) @@ -2148,15 +2149,15 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Construct an instance of the CreateApplicationOptions model createApplicationOptionsModel := new(ibmanalyticsengineapiv3.CreateApplicationOptions) createApplicationOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") - createApplicationOptionsModel.Application = core.StringPtr("cos://bucket_name.my_cos/my_spark_app.py") + createApplicationOptionsModel.Application = core.StringPtr("testString") createApplicationOptionsModel.Runtime = runtimeModel - createApplicationOptionsModel.Jars = core.StringPtr("cos://cloud-object-storage/jars/tests.jar") + createApplicationOptionsModel.Jars = core.StringPtr("testString") createApplicationOptionsModel.Packages = core.StringPtr("testString") createApplicationOptionsModel.Repositories = core.StringPtr("testString") createApplicationOptionsModel.Files = core.StringPtr("testString") createApplicationOptionsModel.Archives = core.StringPtr("testString") - createApplicationOptionsModel.Name = core.StringPtr("spark-app") - createApplicationOptionsModel.Class = core.StringPtr("com.company.path.ClassName") + createApplicationOptionsModel.Name = core.StringPtr("testString") + createApplicationOptionsModel.Class = core.StringPtr("testString") createApplicationOptionsModel.Arguments = []string{"[arg1, arg2, arg3]"} createApplicationOptionsModel.Conf = make(map[string]interface{}) createApplicationOptionsModel.Env = make(map[string]interface{}) @@ -2205,15 +2206,15 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Construct an instance of the CreateApplicationOptions model createApplicationOptionsModel := new(ibmanalyticsengineapiv3.CreateApplicationOptions) createApplicationOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") - createApplicationOptionsModel.Application = core.StringPtr("cos://bucket_name.my_cos/my_spark_app.py") + createApplicationOptionsModel.Application = core.StringPtr("testString") createApplicationOptionsModel.Runtime = runtimeModel - createApplicationOptionsModel.Jars = core.StringPtr("cos://cloud-object-storage/jars/tests.jar") + createApplicationOptionsModel.Jars = core.StringPtr("testString") createApplicationOptionsModel.Packages = core.StringPtr("testString") createApplicationOptionsModel.Repositories = core.StringPtr("testString") createApplicationOptionsModel.Files = core.StringPtr("testString") createApplicationOptionsModel.Archives = core.StringPtr("testString") - createApplicationOptionsModel.Name = core.StringPtr("spark-app") - createApplicationOptionsModel.Class = core.StringPtr("com.company.path.ClassName") + createApplicationOptionsModel.Name = core.StringPtr("testString") + createApplicationOptionsModel.Class = core.StringPtr("testString") createApplicationOptionsModel.Arguments = []string{"[arg1, arg2, arg3]"} createApplicationOptionsModel.Conf = make(map[string]interface{}) createApplicationOptionsModel.Env = make(map[string]interface{}) @@ -2295,7 +2296,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Set mock response res.Header().Set("Content-type", "application/json") res.WriteHeader(200) - fmt.Fprintf(res, "%s", `{"applications": [{"id": "ID", "href": "Href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "start_time": "StartTime", "end_time": "EndTime", "finish_time": "FinishTime"}]}`) + fmt.Fprintf(res, "%s", `{"applications": [{"id": "ID", "href": "Href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "spark_ui": "SparkUi", "submission_time": "2021-01-30T08:30:00.000Z", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z", "auto_termination_time": "2021-01-30T08:30:00.000Z"}]}`) })) }) It(`Invoke ListApplications successfully with retries`, func() { @@ -2350,7 +2351,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Set mock response res.Header().Set("Content-type", "application/json") res.WriteHeader(200) - fmt.Fprintf(res, "%s", `{"applications": [{"id": "ID", "href": "Href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "start_time": "StartTime", "end_time": "EndTime", "finish_time": "FinishTime"}]}`) + fmt.Fprintf(res, "%s", `{"applications": [{"id": "ID", "href": "Href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "spark_ui": "SparkUi", "submission_time": "2021-01-30T08:30:00.000Z", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z", "auto_termination_time": "2021-01-30T08:30:00.000Z"}]}`) })) }) It(`Invoke ListApplications successfully`, func() { @@ -2512,7 +2513,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Set mock response res.Header().Set("Content-type", "application/json") res.WriteHeader(200) - fmt.Fprintf(res, "%s", `{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "runtime": {"spark_version": "3.1"}, "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "Packages", "repositories": "Repositories", "files": "Files", "archives": "Archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "state_details": [{"type": "server_error", "code": "server_error", "message": "Message"}], "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z"}`) + fmt.Fprintf(res, "%s", `{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "runtime": {"spark_version": "3.1"}, "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "Packages", "repositories": "Repositories", "files": "Files", "archives": "Archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "spark_ui": "SparkUi", "state_details": [{"type": "server_error", "code": "server_error", "message": "Message"}], "submission_time": "2021-01-30T08:30:00.000Z", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z", "auto_termination_time": "2021-01-30T08:30:00.000Z"}`) })) }) It(`Invoke GetApplication successfully with retries`, func() { @@ -2567,7 +2568,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Set mock response res.Header().Set("Content-type", "application/json") res.WriteHeader(200) - fmt.Fprintf(res, "%s", `{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "runtime": {"spark_version": "3.1"}, "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "Packages", "repositories": "Repositories", "files": "Files", "archives": "Archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "state_details": [{"type": "server_error", "code": "server_error", "message": "Message"}], "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z"}`) + fmt.Fprintf(res, "%s", `{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "runtime": {"spark_version": "3.1"}, "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "Packages", "repositories": "Repositories", "files": "Files", "archives": "Archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "SparkApplicationID", "spark_application_name": "SparkApplicationName", "state": "finished", "spark_ui": "SparkUi", "state_details": [{"type": "server_error", "code": "server_error", "message": "Message"}], "submission_time": "2021-01-30T08:30:00.000Z", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z", "auto_termination_time": "2021-01-30T08:30:00.000Z"}`) })) }) It(`Invoke GetApplication successfully`, func() { @@ -2799,7 +2800,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Set mock response res.Header().Set("Content-type", "application/json") res.WriteHeader(200) - fmt.Fprintf(res, "%s", `{"id": "ID", "state": "finished", "start_time": "StartTime", "end_time": "EndTime", "finish_time": "FinishTime"}`) + fmt.Fprintf(res, "%s", `{"id": "ID", "state": "finished", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z", "auto_termination_time": "2021-01-30T08:30:00.000Z"}`) })) }) It(`Invoke GetApplicationState successfully with retries`, func() { @@ -2854,7 +2855,7 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { // Set mock response res.Header().Set("Content-type", "application/json") res.WriteHeader(200) - fmt.Fprintf(res, "%s", `{"id": "ID", "state": "finished", "start_time": "StartTime", "end_time": "EndTime", "finish_time": "FinishTime"}`) + fmt.Fprintf(res, "%s", `{"id": "ID", "state": "finished", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z", "auto_termination_time": "2021-01-30T08:30:00.000Z"}`) })) }) It(`Invoke GetApplicationState successfully`, func() { @@ -4309,6 +4310,498 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { }) }) }) + Describe(`StartSparkHistoryServer(startSparkHistoryServerOptions *StartSparkHistoryServerOptions) - Operation response error`, func() { + startSparkHistoryServerPath := "/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_history_server" + Context(`Using mock server endpoint with invalid JSON response`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(startSparkHistoryServerPath)) + Expect(req.Method).To(Equal("POST")) + res.Header().Set("Content-type", "application/json") + res.WriteHeader(202) + fmt.Fprint(res, `} this is not valid json {`) + })) + }) + It(`Invoke StartSparkHistoryServer with error: Operation response processing error`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the StartSparkHistoryServerOptions model + startSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StartSparkHistoryServerOptions) + startSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + startSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + // Expect response parsing to fail since we are receiving a text/plain response + result, response, operationErr := ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).To(BeNil()) + + // Enable retries and test again + ibmAnalyticsEngineApiService.EnableRetries(0, 0) + result, response, operationErr = ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + }) + Describe(`StartSparkHistoryServer(startSparkHistoryServerOptions *StartSparkHistoryServerOptions)`, func() { + startSparkHistoryServerPath := "/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_history_server" + Context(`Using mock server endpoint with timeout`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(startSparkHistoryServerPath)) + Expect(req.Method).To(Equal("POST")) + + // Sleep a short time to support a timeout test + time.Sleep(100 * time.Millisecond) + + // Set mock response + res.Header().Set("Content-type", "application/json") + res.WriteHeader(202) + fmt.Fprintf(res, "%s", `{"state": "started", "cores": "1", "memory": "4G", "start_time": "2022-12-02T08:30:00.000Z", "stop_time": "2022-12-02T10:30:00.000Z", "auto_termination_time": "2022-12-05T08:30:00.000Z"}`) + })) + }) + It(`Invoke StartSparkHistoryServer successfully with retries`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + ibmAnalyticsEngineApiService.EnableRetries(0, 0) + + // Construct an instance of the StartSparkHistoryServerOptions model + startSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StartSparkHistoryServerOptions) + startSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + startSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation with a Context to test a timeout error + ctx, cancelFunc := context.WithTimeout(context.Background(), 80*time.Millisecond) + defer cancelFunc() + _, _, operationErr := ibmAnalyticsEngineApiService.StartSparkHistoryServerWithContext(ctx, startSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring("deadline exceeded")) + + // Disable retries and test again + ibmAnalyticsEngineApiService.DisableRetries() + result, response, operationErr := ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).ToNot(BeNil()) + + // Re-test the timeout error with retries disabled + ctx, cancelFunc2 := context.WithTimeout(context.Background(), 80*time.Millisecond) + defer cancelFunc2() + _, _, operationErr = ibmAnalyticsEngineApiService.StartSparkHistoryServerWithContext(ctx, startSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring("deadline exceeded")) + }) + AfterEach(func() { + testServer.Close() + }) + }) + Context(`Using mock server endpoint`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(startSparkHistoryServerPath)) + Expect(req.Method).To(Equal("POST")) + + // Set mock response + res.Header().Set("Content-type", "application/json") + res.WriteHeader(202) + fmt.Fprintf(res, "%s", `{"state": "started", "cores": "1", "memory": "4G", "start_time": "2022-12-02T08:30:00.000Z", "stop_time": "2022-12-02T10:30:00.000Z", "auto_termination_time": "2022-12-05T08:30:00.000Z"}`) + })) + }) + It(`Invoke StartSparkHistoryServer successfully`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Invoke operation with nil options model (negative test) + result, response, operationErr := ibmAnalyticsEngineApiService.StartSparkHistoryServer(nil) + Expect(operationErr).NotTo(BeNil()) + Expect(response).To(BeNil()) + Expect(result).To(BeNil()) + + // Construct an instance of the StartSparkHistoryServerOptions model + startSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StartSparkHistoryServerOptions) + startSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + startSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation with valid options model (positive test) + result, response, operationErr = ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).ToNot(BeNil()) + + }) + It(`Invoke StartSparkHistoryServer with error: Operation validation and request error`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the StartSparkHistoryServerOptions model + startSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StartSparkHistoryServerOptions) + startSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + startSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + // Invoke operation with empty URL (negative test) + err := ibmAnalyticsEngineApiService.SetServiceURL("") + Expect(err).To(BeNil()) + result, response, operationErr := ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring(core.ERRORMSG_SERVICE_URL_MISSING)) + Expect(response).To(BeNil()) + Expect(result).To(BeNil()) + // Construct a second instance of the StartSparkHistoryServerOptions model with no property values + startSparkHistoryServerOptionsModelNew := new(ibmanalyticsengineapiv3.StartSparkHistoryServerOptions) + // Invoke operation with invalid model (negative test) + result, response, operationErr = ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModelNew) + Expect(operationErr).ToNot(BeNil()) + Expect(response).To(BeNil()) + Expect(result).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + Context(`Using mock server endpoint with missing response body`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Set success status code with no respoonse body + res.WriteHeader(202) + })) + }) + It(`Invoke StartSparkHistoryServer successfully`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the StartSparkHistoryServerOptions model + startSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StartSparkHistoryServerOptions) + startSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + startSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation + result, response, operationErr := ibmAnalyticsEngineApiService.StartSparkHistoryServer(startSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + + // Verify a nil result + Expect(result).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + }) + Describe(`GetSparkHistoryServer(getSparkHistoryServerOptions *GetSparkHistoryServerOptions) - Operation response error`, func() { + getSparkHistoryServerPath := "/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_history_server" + Context(`Using mock server endpoint with invalid JSON response`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(getSparkHistoryServerPath)) + Expect(req.Method).To(Equal("GET")) + res.Header().Set("Content-type", "application/json") + res.WriteHeader(200) + fmt.Fprint(res, `} this is not valid json {`) + })) + }) + It(`Invoke GetSparkHistoryServer with error: Operation response processing error`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the GetSparkHistoryServerOptions model + getSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.GetSparkHistoryServerOptions) + getSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + getSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + // Expect response parsing to fail since we are receiving a text/plain response + result, response, operationErr := ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).To(BeNil()) + + // Enable retries and test again + ibmAnalyticsEngineApiService.EnableRetries(0, 0) + result, response, operationErr = ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + }) + Describe(`GetSparkHistoryServer(getSparkHistoryServerOptions *GetSparkHistoryServerOptions)`, func() { + getSparkHistoryServerPath := "/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_history_server" + Context(`Using mock server endpoint with timeout`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(getSparkHistoryServerPath)) + Expect(req.Method).To(Equal("GET")) + + // Sleep a short time to support a timeout test + time.Sleep(100 * time.Millisecond) + + // Set mock response + res.Header().Set("Content-type", "application/json") + res.WriteHeader(200) + fmt.Fprintf(res, "%s", `{"state": "started", "cores": "1", "memory": "4G", "start_time": "2022-12-02T08:30:00.000Z", "stop_time": "2022-12-02T10:30:00.000Z", "auto_termination_time": "2022-12-05T08:30:00.000Z"}`) + })) + }) + It(`Invoke GetSparkHistoryServer successfully with retries`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + ibmAnalyticsEngineApiService.EnableRetries(0, 0) + + // Construct an instance of the GetSparkHistoryServerOptions model + getSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.GetSparkHistoryServerOptions) + getSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + getSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation with a Context to test a timeout error + ctx, cancelFunc := context.WithTimeout(context.Background(), 80*time.Millisecond) + defer cancelFunc() + _, _, operationErr := ibmAnalyticsEngineApiService.GetSparkHistoryServerWithContext(ctx, getSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring("deadline exceeded")) + + // Disable retries and test again + ibmAnalyticsEngineApiService.DisableRetries() + result, response, operationErr := ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).ToNot(BeNil()) + + // Re-test the timeout error with retries disabled + ctx, cancelFunc2 := context.WithTimeout(context.Background(), 80*time.Millisecond) + defer cancelFunc2() + _, _, operationErr = ibmAnalyticsEngineApiService.GetSparkHistoryServerWithContext(ctx, getSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring("deadline exceeded")) + }) + AfterEach(func() { + testServer.Close() + }) + }) + Context(`Using mock server endpoint`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(getSparkHistoryServerPath)) + Expect(req.Method).To(Equal("GET")) + + // Set mock response + res.Header().Set("Content-type", "application/json") + res.WriteHeader(200) + fmt.Fprintf(res, "%s", `{"state": "started", "cores": "1", "memory": "4G", "start_time": "2022-12-02T08:30:00.000Z", "stop_time": "2022-12-02T10:30:00.000Z", "auto_termination_time": "2022-12-05T08:30:00.000Z"}`) + })) + }) + It(`Invoke GetSparkHistoryServer successfully`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Invoke operation with nil options model (negative test) + result, response, operationErr := ibmAnalyticsEngineApiService.GetSparkHistoryServer(nil) + Expect(operationErr).NotTo(BeNil()) + Expect(response).To(BeNil()) + Expect(result).To(BeNil()) + + // Construct an instance of the GetSparkHistoryServerOptions model + getSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.GetSparkHistoryServerOptions) + getSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + getSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation with valid options model (positive test) + result, response, operationErr = ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + Expect(result).ToNot(BeNil()) + + }) + It(`Invoke GetSparkHistoryServer with error: Operation validation and request error`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the GetSparkHistoryServerOptions model + getSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.GetSparkHistoryServerOptions) + getSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + getSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + // Invoke operation with empty URL (negative test) + err := ibmAnalyticsEngineApiService.SetServiceURL("") + Expect(err).To(BeNil()) + result, response, operationErr := ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring(core.ERRORMSG_SERVICE_URL_MISSING)) + Expect(response).To(BeNil()) + Expect(result).To(BeNil()) + // Construct a second instance of the GetSparkHistoryServerOptions model with no property values + getSparkHistoryServerOptionsModelNew := new(ibmanalyticsengineapiv3.GetSparkHistoryServerOptions) + // Invoke operation with invalid model (negative test) + result, response, operationErr = ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModelNew) + Expect(operationErr).ToNot(BeNil()) + Expect(response).To(BeNil()) + Expect(result).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + Context(`Using mock server endpoint with missing response body`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Set success status code with no respoonse body + res.WriteHeader(200) + })) + }) + It(`Invoke GetSparkHistoryServer successfully`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the GetSparkHistoryServerOptions model + getSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.GetSparkHistoryServerOptions) + getSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + getSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation + result, response, operationErr := ibmAnalyticsEngineApiService.GetSparkHistoryServer(getSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + + // Verify a nil result + Expect(result).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + }) + Describe(`StopSparkHistoryServer(stopSparkHistoryServerOptions *StopSparkHistoryServerOptions)`, func() { + stopSparkHistoryServerPath := "/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_history_server" + Context(`Using mock server endpoint`, func() { + BeforeEach(func() { + testServer = httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + defer GinkgoRecover() + + // Verify the contents of the request + Expect(req.URL.EscapedPath()).To(Equal(stopSparkHistoryServerPath)) + Expect(req.Method).To(Equal("DELETE")) + + res.WriteHeader(204) + })) + }) + It(`Invoke StopSparkHistoryServer successfully`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Invoke operation with nil options model (negative test) + response, operationErr := ibmAnalyticsEngineApiService.StopSparkHistoryServer(nil) + Expect(operationErr).NotTo(BeNil()) + Expect(response).To(BeNil()) + + // Construct an instance of the StopSparkHistoryServerOptions model + stopSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StopSparkHistoryServerOptions) + stopSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + stopSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + + // Invoke operation with valid options model (positive test) + response, operationErr = ibmAnalyticsEngineApiService.StopSparkHistoryServer(stopSparkHistoryServerOptionsModel) + Expect(operationErr).To(BeNil()) + Expect(response).ToNot(BeNil()) + }) + It(`Invoke StopSparkHistoryServer with error: Operation validation and request error`, func() { + ibmAnalyticsEngineApiService, serviceErr := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ + URL: testServer.URL, + Authenticator: &core.NoAuthAuthenticator{}, + }) + Expect(serviceErr).To(BeNil()) + Expect(ibmAnalyticsEngineApiService).ToNot(BeNil()) + + // Construct an instance of the StopSparkHistoryServerOptions model + stopSparkHistoryServerOptionsModel := new(ibmanalyticsengineapiv3.StopSparkHistoryServerOptions) + stopSparkHistoryServerOptionsModel.InstanceID = core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09") + stopSparkHistoryServerOptionsModel.Headers = map[string]string{"x-custom-header": "x-custom-value"} + // Invoke operation with empty URL (negative test) + err := ibmAnalyticsEngineApiService.SetServiceURL("") + Expect(err).To(BeNil()) + response, operationErr := ibmAnalyticsEngineApiService.StopSparkHistoryServer(stopSparkHistoryServerOptionsModel) + Expect(operationErr).ToNot(BeNil()) + Expect(operationErr.Error()).To(ContainSubstring(core.ERRORMSG_SERVICE_URL_MISSING)) + Expect(response).To(BeNil()) + // Construct a second instance of the StopSparkHistoryServerOptions model with no property values + stopSparkHistoryServerOptionsModelNew := new(ibmanalyticsengineapiv3.StopSparkHistoryServerOptions) + // Invoke operation with invalid model (negative test) + response, operationErr = ibmAnalyticsEngineApiService.StopSparkHistoryServer(stopSparkHistoryServerOptionsModelNew) + Expect(operationErr).ToNot(BeNil()) + Expect(response).To(BeNil()) + }) + AfterEach(func() { + testServer.Close() + }) + }) + }) Describe(`Model constructor tests`, func() { Context(`Using a service client instance`, func() { ibmAnalyticsEngineApiService, _ := ibmanalyticsengineapiv3.NewIbmAnalyticsEngineApiV3(&ibmanalyticsengineapiv3.IbmAnalyticsEngineApiV3Options{ @@ -4338,30 +4831,30 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { instanceID := "e64c907a-e82f-46fd-addc-ccfafbd28b09" createApplicationOptionsModel := ibmAnalyticsEngineApiService.NewCreateApplicationOptions(instanceID) createApplicationOptionsModel.SetInstanceID("e64c907a-e82f-46fd-addc-ccfafbd28b09") - createApplicationOptionsModel.SetApplication("cos://bucket_name.my_cos/my_spark_app.py") + createApplicationOptionsModel.SetApplication("testString") createApplicationOptionsModel.SetRuntime(runtimeModel) - createApplicationOptionsModel.SetJars("cos://cloud-object-storage/jars/tests.jar") + createApplicationOptionsModel.SetJars("testString") createApplicationOptionsModel.SetPackages("testString") createApplicationOptionsModel.SetRepositories("testString") createApplicationOptionsModel.SetFiles("testString") createApplicationOptionsModel.SetArchives("testString") - createApplicationOptionsModel.SetName("spark-app") - createApplicationOptionsModel.SetClass("com.company.path.ClassName") + createApplicationOptionsModel.SetName("testString") + createApplicationOptionsModel.SetClass("testString") createApplicationOptionsModel.SetArguments([]string{"[arg1, arg2, arg3]"}) createApplicationOptionsModel.SetConf(make(map[string]interface{})) createApplicationOptionsModel.SetEnv(make(map[string]interface{})) createApplicationOptionsModel.SetHeaders(map[string]string{"foo": "bar"}) Expect(createApplicationOptionsModel).ToNot(BeNil()) Expect(createApplicationOptionsModel.InstanceID).To(Equal(core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09"))) - Expect(createApplicationOptionsModel.Application).To(Equal(core.StringPtr("cos://bucket_name.my_cos/my_spark_app.py"))) + Expect(createApplicationOptionsModel.Application).To(Equal(core.StringPtr("testString"))) Expect(createApplicationOptionsModel.Runtime).To(Equal(runtimeModel)) - Expect(createApplicationOptionsModel.Jars).To(Equal(core.StringPtr("cos://cloud-object-storage/jars/tests.jar"))) + Expect(createApplicationOptionsModel.Jars).To(Equal(core.StringPtr("testString"))) Expect(createApplicationOptionsModel.Packages).To(Equal(core.StringPtr("testString"))) Expect(createApplicationOptionsModel.Repositories).To(Equal(core.StringPtr("testString"))) Expect(createApplicationOptionsModel.Files).To(Equal(core.StringPtr("testString"))) Expect(createApplicationOptionsModel.Archives).To(Equal(core.StringPtr("testString"))) - Expect(createApplicationOptionsModel.Name).To(Equal(core.StringPtr("spark-app"))) - Expect(createApplicationOptionsModel.Class).To(Equal(core.StringPtr("com.company.path.ClassName"))) + Expect(createApplicationOptionsModel.Name).To(Equal(core.StringPtr("testString"))) + Expect(createApplicationOptionsModel.Class).To(Equal(core.StringPtr("testString"))) Expect(createApplicationOptionsModel.Arguments).To(Equal([]string{"[arg1, arg2, arg3]"})) Expect(createApplicationOptionsModel.Conf).To(Equal(make(map[string]interface{}))) Expect(createApplicationOptionsModel.Env).To(Equal(make(map[string]interface{}))) @@ -4486,6 +4979,16 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { Expect(getResourceConsumptionLimitsOptionsModel.InstanceID).To(Equal(core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09"))) Expect(getResourceConsumptionLimitsOptionsModel.Headers).To(Equal(map[string]string{"foo": "bar"})) }) + It(`Invoke NewGetSparkHistoryServerOptions successfully`, func() { + // Construct an instance of the GetSparkHistoryServerOptions model + instanceID := "e64c907a-e82f-46fd-addc-ccfafbd28b09" + getSparkHistoryServerOptionsModel := ibmAnalyticsEngineApiService.NewGetSparkHistoryServerOptions(instanceID) + getSparkHistoryServerOptionsModel.SetInstanceID("e64c907a-e82f-46fd-addc-ccfafbd28b09") + getSparkHistoryServerOptionsModel.SetHeaders(map[string]string{"foo": "bar"}) + Expect(getSparkHistoryServerOptionsModel).ToNot(BeNil()) + Expect(getSparkHistoryServerOptionsModel.InstanceID).To(Equal(core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09"))) + Expect(getSparkHistoryServerOptionsModel.Headers).To(Equal(map[string]string{"foo": "bar"})) + }) It(`Invoke NewListApplicationsOptions successfully`, func() { // Construct an instance of the ListApplicationsOptions model instanceID := "e64c907a-e82f-46fd-addc-ccfafbd28b09" @@ -4549,8 +5052,8 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { setInstanceHomeOptionsModel.SetNewType("objectstore") setInstanceHomeOptionsModel.SetNewRegion("us-south") setInstanceHomeOptionsModel.SetNewEndpoint("s3.direct.us-south.cloud-object-storage.appdomain.cloud") - setInstanceHomeOptionsModel.SetNewHmacAccessKey("821**********0ae") - setInstanceHomeOptionsModel.SetNewHmacSecretKey("03e****************4fc3") + setInstanceHomeOptionsModel.SetNewHmacAccessKey("b9****************************4b") + setInstanceHomeOptionsModel.SetNewHmacSecretKey("fa********************************************8a") setInstanceHomeOptionsModel.SetHeaders(map[string]string{"foo": "bar"}) Expect(setInstanceHomeOptionsModel).ToNot(BeNil()) Expect(setInstanceHomeOptionsModel.InstanceID).To(Equal(core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09"))) @@ -4559,10 +5062,30 @@ var _ = Describe(`IbmAnalyticsEngineApiV3`, func() { Expect(setInstanceHomeOptionsModel.NewType).To(Equal(core.StringPtr("objectstore"))) Expect(setInstanceHomeOptionsModel.NewRegion).To(Equal(core.StringPtr("us-south"))) Expect(setInstanceHomeOptionsModel.NewEndpoint).To(Equal(core.StringPtr("s3.direct.us-south.cloud-object-storage.appdomain.cloud"))) - Expect(setInstanceHomeOptionsModel.NewHmacAccessKey).To(Equal(core.StringPtr("821**********0ae"))) - Expect(setInstanceHomeOptionsModel.NewHmacSecretKey).To(Equal(core.StringPtr("03e****************4fc3"))) + Expect(setInstanceHomeOptionsModel.NewHmacAccessKey).To(Equal(core.StringPtr("b9****************************4b"))) + Expect(setInstanceHomeOptionsModel.NewHmacSecretKey).To(Equal(core.StringPtr("fa********************************************8a"))) Expect(setInstanceHomeOptionsModel.Headers).To(Equal(map[string]string{"foo": "bar"})) }) + It(`Invoke NewStartSparkHistoryServerOptions successfully`, func() { + // Construct an instance of the StartSparkHistoryServerOptions model + instanceID := "e64c907a-e82f-46fd-addc-ccfafbd28b09" + startSparkHistoryServerOptionsModel := ibmAnalyticsEngineApiService.NewStartSparkHistoryServerOptions(instanceID) + startSparkHistoryServerOptionsModel.SetInstanceID("e64c907a-e82f-46fd-addc-ccfafbd28b09") + startSparkHistoryServerOptionsModel.SetHeaders(map[string]string{"foo": "bar"}) + Expect(startSparkHistoryServerOptionsModel).ToNot(BeNil()) + Expect(startSparkHistoryServerOptionsModel.InstanceID).To(Equal(core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09"))) + Expect(startSparkHistoryServerOptionsModel.Headers).To(Equal(map[string]string{"foo": "bar"})) + }) + It(`Invoke NewStopSparkHistoryServerOptions successfully`, func() { + // Construct an instance of the StopSparkHistoryServerOptions model + instanceID := "e64c907a-e82f-46fd-addc-ccfafbd28b09" + stopSparkHistoryServerOptionsModel := ibmAnalyticsEngineApiService.NewStopSparkHistoryServerOptions(instanceID) + stopSparkHistoryServerOptionsModel.SetInstanceID("e64c907a-e82f-46fd-addc-ccfafbd28b09") + stopSparkHistoryServerOptionsModel.SetHeaders(map[string]string{"foo": "bar"}) + Expect(stopSparkHistoryServerOptionsModel).ToNot(BeNil()) + Expect(stopSparkHistoryServerOptionsModel.InstanceID).To(Equal(core.StringPtr("e64c907a-e82f-46fd-addc-ccfafbd28b09"))) + Expect(stopSparkHistoryServerOptionsModel.Headers).To(Equal(map[string]string{"foo": "bar"})) + }) It(`Invoke NewUpdateInstanceDefaultConfigsOptions successfully`, func() { // Construct an instance of the UpdateInstanceDefaultConfigsOptions model instanceID := "e64c907a-e82f-46fd-addc-ccfafbd28b09" @@ -4618,7 +5141,7 @@ func CreateMockUUID(mockData string) *strfmt.UUID { } func CreateMockReader(mockData string) io.ReadCloser { - return io.NopCloser(bytes.NewReader([]byte(mockData))) + return ioutil.NopCloser(bytes.NewReader([]byte(mockData))) } func CreateMockDate(mockData string) *strfmt.Date {