Skip to content

Commit

Permalink
Merge pull request #440 from HumairAK/issue_244
Browse files Browse the repository at this point in the history
Add Custom CA Bundle Injection into DSP via DSPA
  • Loading branch information
openshift-merge-bot[bot] authored Nov 6, 2023
2 parents 42d6619 + 31ed583 commit 6206cf2
Show file tree
Hide file tree
Showing 22 changed files with 591 additions and 33 deletions.
16 changes: 16 additions & 0 deletions api/v1alpha1/dspipeline_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,22 @@ type APIServer struct {
AutoUpdatePipelineDefaultVersion bool `json:"autoUpdatePipelineDefaultVersion"`
// Specify custom Pod resource requirements for this component.
Resources *ResourceRequirements `json:"resources,omitempty"`

// If the Object store/DB is behind a TLS secured connection that is
// unrecognized by the host OpenShift/K8s cluster, then you can
// provide a PEM formatted CA bundle to be injected into the DSP
// server pod to trust this connection. CA Bundle should be provided
// as values within configmaps, mapped to keys.
CABundle *CABundle `json:"cABundle,omitempty"`
}

type CABundle struct {
// +kubebuilder:validation:Required
ConfigMapName string `json:"configMapName"`
// Key should map to a CA bundle. The key is also used to name
// the CA bundle file (e.g. ca-bundle.crt)
// +kubebuilder:validation:Required
ConfigMapKey string `json:"configMapKey"`
}

type ArtifactScriptConfigMap struct {
Expand Down
20 changes: 20 additions & 0 deletions api/v1alpha1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,23 @@ spec:
default: true
description: 'Default: true'
type: boolean
cABundle:
description: If the Object store/DB is behind a TLS secured connection
that is unrecognized by the host OpenShift/K8s cluster, then
you can provide a PEM formatted CA bundle to be injected into
the DSP server pod to trust this connection. CA Bundle should
be provided as values within configmaps, mapped to keys.
properties:
configMapKey:
description: Key should map to a CA bundle. The key is also
used to name the CA bundle file (e.g. ca-bundle.crt)
type: string
configMapName:
type: string
required:
- configMapKey
- configMapName
type: object
cacheImage:
type: string
collectMetrics:
Expand Down
13 changes: 11 additions & 2 deletions config/internal/apiserver/artifact_script.yaml.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,22 @@ data:
workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g")
workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name)
artifact_name=$(basename $2)

aws_cp() {
{{ if .APIServer.CABundle }}
aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} --ca-bundle {{ .PiplinesCABundleMountPath }}/{{ .APIServer.CABundle.ConfigMapKey }} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz
{{ else }}
aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz
{{ end }}
}

if [ -f "$workspace_dest/$artifact_name" ]; then
echo sending to: ${workspace_dest}/${artifact_name}
tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name}
aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz
aws_cp $1
elif [ -f "$2" ]; then
tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name}
aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz
aws_cp $1
else
echo "$2 file does not exist. Skip artifact tracking for $1"
fi
Expand Down
24 changes: 23 additions & 1 deletion config/internal/apiserver/deployment.yaml.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@ spec:
value: "{{.APIServer.ArtifactImage}}"
- name: ARCHIVE_LOGS
value: "{{.APIServer.ArchiveLogs}}"
{{ if .APIServer.CABundle }}
- name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_NAME
value: "{{.APIServer.CABundle.ConfigMapName}}"
- name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_KEY
value: "{{.APIServer.CABundle.ConfigMapKey}}"
- name: ARTIFACT_COPY_STEP_CABUNDLE_MOUNTPATH
value: {{ .PiplinesCABundleMountPath }}
{{ end }}
- name: TRACK_ARTIFACTS
value: "{{.APIServer.TrackArtifacts}}"
- name: STRIP_EOF
Expand Down Expand Up @@ -145,13 +153,19 @@ spec:
memory: {{.APIServer.Resources.Limits.Memory}}
{{ end }}
{{ end }}
{{ if .APIServer.EnableSamplePipeline }}
{{ if or .APIServer.EnableSamplePipeline .APIServer.CABundle }}
volumeMounts:
{{ if .APIServer.EnableSamplePipeline }}
- name: sample-config
mountPath: /config/sample_config.json
subPath: sample_config.json
- name: sample-pipeline
mountPath: /samples/
{{ end }}
{{ if .APIServer.CABundle }}
- mountPath: {{ .APIServerPiplinesCABundleMountPath }}
name: ca-bundle
{{ end }}
{{ end }}
{{ if .APIServer.EnableRoute }}
- name: oauth-proxy
Expand Down Expand Up @@ -206,6 +220,14 @@ spec:
- name: proxy-tls
secret:
secretName: ds-pipelines-proxy-tls-{{.Name}}
{{ if .APIServer.CABundle }}
- name: ca-bundle
configMap:
name: {{ .APIServer.CABundle.ConfigMapName }}
items:
- key: {{ .APIServer.CABundle.ConfigMapKey }}
path: {{ .APIServer.CABundle.ConfigMapKey }}
{{ end }}
{{ if .APIServer.EnableSamplePipeline }}
- name: sample-config
configMap:
Expand Down
13 changes: 7 additions & 6 deletions controllers/config/defaults.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,13 @@ import (
)

const (
DefaultImageValue = "MustSetInConfig"

MLPipelineUIConfigMapPrefix = "ds-pipeline-ui-configmap-"
ArtifactScriptConfigMapNamePrefix = "ds-pipeline-artifact-script-"
ArtifactScriptConfigMapKey = "artifact_script"
DSPServicePrefix = "ds-pipeline"
DefaultImageValue = "MustSetInConfig"
APIServerPiplinesCABundleMountPath = "/etc/pki/tls/certs"
PiplinesCABundleMountPath = "/etc/pki/tls/certs"
MLPipelineUIConfigMapPrefix = "ds-pipeline-ui-configmap-"
ArtifactScriptConfigMapNamePrefix = "ds-pipeline-artifact-script-"
ArtifactScriptConfigMapKey = "artifact_script"
DSPServicePrefix = "ds-pipeline"

DBSecretNamePrefix = "ds-pipeline-db-"
DBSecretKey = "password"
Expand Down
2 changes: 1 addition & 1 deletion controllers/dspipeline_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.
// Get Prereq Status (DB and ObjStore Ready)
dbAvailable := r.isDatabaseAccessible(ctx, dspa, params)
objStoreAvailable := r.isObjectStorageAccessible(ctx, dspa, params)
dspaPrereqsReady := (dbAvailable && objStoreAvailable)
dspaPrereqsReady := dbAvailable && objStoreAvailable

if dspaPrereqsReady {
// Manage Common Manifests
Expand Down
20 changes: 18 additions & 2 deletions controllers/dspipeline_params.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,11 @@ type DSPAParams struct {
Namespace string
Owner mf.Owner
APIServer *dspa.APIServer
APIServerPiplinesCABundleMountPath string
PiplinesCABundleMountPath string
APIServerDefaultResourceName string
APIServerServiceName string
APICustomPemCerts []byte
OAuthProxy string
ScheduledWorkflow *dspa.ScheduledWorkflow
ScheduledWorkflowDefaultResourceName string
Expand Down Expand Up @@ -441,8 +444,8 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip
p.Minio = dsp.Spec.ObjectStorage.Minio.DeepCopy()
p.OAuthProxy = config.GetStringConfigWithDefault(config.OAuthProxyImagePath, config.DefaultImageValue)
p.MLMD = dsp.Spec.MLMD.DeepCopy()

// TODO: If p.<component> is nil we should create defaults
p.APIServerPiplinesCABundleMountPath = config.APIServerPiplinesCABundleMountPath
p.PiplinesCABundleMountPath = config.PiplinesCABundleMountPath

if p.APIServer != nil {

Expand All @@ -464,7 +467,20 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip
Key: config.ArtifactScriptConfigMapKey,
}
}

// If a Custom CA Bundle is specified for injection into DSP API Server Pod
// then retrieve the bundle to utilize during storage health check
if p.APIServer.CABundle != nil {
cfgKey, cfgName := p.APIServer.CABundle.ConfigMapKey, p.APIServer.CABundle.ConfigMapName
err, val := util.GetConfigMapValue(ctx, cfgKey, cfgName, p.Namespace, client, log)
if err != nil {
log.Error(err, "Encountered error when attempting to retrieve CABundle from configmap")
return err
}
p.APICustomPemCerts = []byte(val)
}
}

if p.PersistenceAgent != nil {
persistenceAgentImageFromConfig := config.GetStringConfigWithDefault(config.PersistenceAgentImagePath, config.DefaultImageValue)
setStringDefault(persistenceAgentImageFromConfig, &p.PersistenceAgent.Image)
Expand Down
53 changes: 49 additions & 4 deletions controllers/storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package controllers

import (
"context"
"crypto/x509"
"encoding/base64"
"errors"
"fmt"
Expand All @@ -26,6 +27,7 @@ import (
"github.com/minio/minio-go/v7/pkg/credentials"
dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1"
"github.com/opendatahub-io/data-science-pipelines-operator/controllers/config"
"github.com/opendatahub-io/data-science-pipelines-operator/controllers/util"
"net/http"
)

Expand Down Expand Up @@ -67,12 +69,46 @@ func createCredentialProvidersChain(accessKey, secretKey string) *credentials.Cr
return credentials.New(&credentials.Chain{Providers: providers})
}

var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint, bucket string, accesskey, secretkey []byte, secure bool) bool {
func getHttpsTransportWithCACert(log logr.Logger, pemCerts []byte) (*http.Transport, error) {
transport, err := minio.DefaultTransport(true)
if err != nil {
return nil, fmt.Errorf("Error creating default transport : %s", err)
}

if transport.TLSClientConfig.RootCAs == nil {
pool, err := x509.SystemCertPool()
if err != nil {
log.Error(err, "error initializing TLS Pool: %s")
transport.TLSClientConfig.RootCAs = x509.NewCertPool()
} else {
transport.TLSClientConfig.RootCAs = pool
}
}

if ok := transport.TLSClientConfig.RootCAs.AppendCertsFromPEM(pemCerts); !ok {
return nil, fmt.Errorf("error parsing CA Certificate, ensure provided certs are in valid PEM format")
}
return transport, nil
}

var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint, bucket string, accesskey, secretkey []byte, secure bool, pemCerts []byte) bool {
cred := createCredentialProvidersChain(string(accesskey), string(secretkey))
minioClient, err := minio.New(endpoint, &minio.Options{

opts := &minio.Options{
Creds: cred,
Secure: secure,
})
}

if len(pemCerts) != 0 {
tr, err := getHttpsTransportWithCACert(log, pemCerts)
if err != nil {
log.Error(err, "Encountered error when processing custom ca bundle.")
return false
}
opts.Transport = tr
}

minioClient, err := minio.New(endpoint, opts)
if err != nil {
log.Info(fmt.Sprintf("Could not connect to object storage endpoint: %s", endpoint))
return false
Expand All @@ -92,6 +128,15 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin
return true
}
}

if util.IsX509UnknownAuthorityError(err) {
log.Error(err, "Encountered x509 UnknownAuthorityError when connecting to ObjectStore. "+
"If using an tls S3 connection with self-signed certs, you may specify a custom CABundle "+
"to mount on the DSP API Server via the DSPA cr under the spec.cABundle field. If you have already "+
"provided a CABundle, verify the validity of the provided CABundle.")
return false
}

// Every other error means the endpoint in inaccessible, or the credentials provided do not have, at a minimum GetObject, permissions
log.Info(fmt.Sprintf("Could not connect to (%s), Error: %s", endpoint, err.Error()))
return false
Expand Down Expand Up @@ -129,7 +174,7 @@ func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dsp
return false
}

verified := ConnectAndQueryObjStore(ctx, log, endpoint, params.ObjectStorageConnection.Bucket, accesskey, secretkey, *params.ObjectStorageConnection.Secure)
verified := ConnectAndQueryObjStore(ctx, log, endpoint, params.ObjectStorageConnection.Bucket, accesskey, secretkey, *params.ObjectStorageConnection.Secure, params.APICustomPemCerts)
if verified {
log.Info("Object Storage Health Check Successful")
} else {
Expand Down
Loading

0 comments on commit 6206cf2

Please sign in to comment.