Skip to content

Commit

Permalink
V1alpha2 KFService Spec for predict, explain, transform (kubeflow#300)
Browse files Browse the repository at this point in the history
* v1alpha2 spec for predict,explain,transform

* Fix tests

* Change endpoints to nouns

* Fix default tests

* AlibiExplainSpec to AlibiExplainerSpec

* Update codegen
  • Loading branch information
yuzisun authored and k8s-ci-robot committed Aug 26, 2019
1 parent 22ed6a3 commit a9f3608
Show file tree
Hide file tree
Showing 19 changed files with 899 additions and 440 deletions.
1 change: 0 additions & 1 deletion cmd/spec-gen/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,4 +73,3 @@ func swaggify(name string) string {
name = strings.Replace(name, "/", ".", -1)
return name
}

429 changes: 266 additions & 163 deletions config/default/crds/serving_v1alpha2_kfservice.yaml

Large diffs are not rendered by default.

36 changes: 18 additions & 18 deletions pkg/apis/serving/v1alpha2/framework.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,19 @@ var (
)

// Returns a URI to the model. This URI is passed to the model-initializer via the ModelInitializerSourceUriInternalAnnotationKey
func (m *ModelSpec) GetModelSourceUri() string {
func (m *PredictorSpec) GetModelSourceUri() string {
return getHandler(m).GetModelSourceUri()
}

func (m *ModelSpec) CreateModelServingContainer(modelName string, config *FrameworksConfig) *v1.Container {
func (m *PredictorSpec) CreateModelServingContainer(modelName string, config *FrameworksConfig) *v1.Container {
return getHandler(m).CreateModelServingContainer(modelName, config)
}

func (m *ModelSpec) ApplyDefaults() {
func (m *PredictorSpec) ApplyDefaults() {
getHandler(m).ApplyDefaults()
}

func (m *ModelSpec) Validate() error {
func (m *PredictorSpec) Validate() error {
handler, err := makeHandler(m)
if err != nil {
return err
Expand Down Expand Up @@ -104,7 +104,7 @@ func isGPUEnabled(requirements v1.ResourceRequirements) bool {
return ok
}

func getHandler(modelSpec *ModelSpec) FrameworkHandler {
func getHandler(modelSpec *PredictorSpec) FrameworkHandler {
handler, err := makeHandler(modelSpec)
if err != nil {
klog.Fatal(err)
Expand All @@ -113,25 +113,25 @@ func getHandler(modelSpec *ModelSpec) FrameworkHandler {
return handler
}

func makeHandler(modelSpec *ModelSpec) (FrameworkHandler, error) {
func makeHandler(predictorSpec *PredictorSpec) (FrameworkHandler, error) {
handlers := []FrameworkHandler{}
if modelSpec.Custom != nil {
handlers = append(handlers, modelSpec.Custom)
if predictorSpec.Custom != nil {
handlers = append(handlers, predictorSpec.Custom)
}
if modelSpec.XGBoost != nil {
handlers = append(handlers, modelSpec.XGBoost)
if predictorSpec.XGBoost != nil {
handlers = append(handlers, predictorSpec.XGBoost)
}
if modelSpec.SKLearn != nil {
handlers = append(handlers, modelSpec.SKLearn)
if predictorSpec.SKLearn != nil {
handlers = append(handlers, predictorSpec.SKLearn)
}
if modelSpec.Tensorflow != nil {
handlers = append(handlers, modelSpec.Tensorflow)
if predictorSpec.Tensorflow != nil {
handlers = append(handlers, predictorSpec.Tensorflow)
}
if modelSpec.PyTorch != nil {
handlers = append(handlers, modelSpec.PyTorch)
if predictorSpec.PyTorch != nil {
handlers = append(handlers, predictorSpec.PyTorch)
}
if modelSpec.TensorRT != nil {
handlers = append(handlers, modelSpec.TensorRT)
if predictorSpec.TensorRT != nil {
handlers = append(handlers, predictorSpec.TensorRT)
}
if len(handlers) == 0 {
return nil, fmt.Errorf(AtLeastOneModelSpecViolatedError)
Expand Down
4 changes: 2 additions & 2 deletions pkg/apis/serving/v1alpha2/kfservice_defaults.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ package v1alpha2
// Default implements https://godoc.org/sigs.k8s.io/controller-runtime/pkg/webhook/admission#Defaulter
func (kfsvc *KFService) Default() {
logger.Info("Defaulting KFService", "namespace", kfsvc.Namespace, "name", kfsvc.Name)
kfsvc.Spec.Default.ApplyDefaults()
kfsvc.Spec.Default.Predictor.ApplyDefaults()
if kfsvc.Spec.Canary != nil {
kfsvc.Spec.Canary.ApplyDefaults()
kfsvc.Spec.Canary.Predictor.ApplyDefaults()
}
}
26 changes: 14 additions & 12 deletions pkg/apis/serving/v1alpha2/kfservice_defaults_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,22 +33,24 @@ func TestTensorflowDefaults(t *testing.T) {
Namespace: "default",
},
Spec: KFServiceSpec{
Default: ModelSpec{
Tensorflow: &TensorflowSpec{ModelURI: "gs://testbucket/testmodel"},
Default: EndpointSpec{
Predictor: PredictorSpec{
Tensorflow: &TensorflowSpec{ModelURI: "gs://testbucket/testmodel"},
},
},
},
}
kfsvc.Spec.Canary = kfsvc.Spec.Default.DeepCopy()
kfsvc.Spec.Canary.Tensorflow.RuntimeVersion = "1.11"
kfsvc.Spec.Canary.Tensorflow.Resources.Requests = v1.ResourceList{v1.ResourceMemory: resource.MustParse("3Gi")}
kfsvc.Spec.Canary.Predictor.Tensorflow.RuntimeVersion = "1.11"
kfsvc.Spec.Canary.Predictor.Tensorflow.Resources.Requests = v1.ResourceList{v1.ResourceMemory: resource.MustParse("3Gi")}
kfsvc.Default()

g.Expect(kfsvc.Spec.Default.Tensorflow.RuntimeVersion).To(gomega.Equal(DefaultTensorflowRuntimeVersion))
g.Expect(kfsvc.Spec.Default.Tensorflow.Resources.Requests[v1.ResourceCPU]).To(gomega.Equal(DefaultCPU))
g.Expect(kfsvc.Spec.Default.Tensorflow.Resources.Requests[v1.ResourceMemory]).To(gomega.Equal(DefaultMemory))
g.Expect(kfsvc.Spec.Default.Tensorflow.Resources.Limits[v1.ResourceCPU]).To(gomega.Equal(DefaultCPU))
g.Expect(kfsvc.Spec.Default.Tensorflow.Resources.Limits[v1.ResourceMemory]).To(gomega.Equal(DefaultMemory))
g.Expect(kfsvc.Spec.Canary.Tensorflow.RuntimeVersion).To(gomega.Equal("1.11"))
g.Expect(kfsvc.Spec.Canary.Tensorflow.Resources.Requests[v1.ResourceCPU]).To(gomega.Equal(DefaultCPU))
g.Expect(kfsvc.Spec.Canary.Tensorflow.Resources.Requests[v1.ResourceMemory]).To(gomega.Equal(resource.MustParse("3Gi")))
g.Expect(kfsvc.Spec.Default.Predictor.Tensorflow.RuntimeVersion).To(gomega.Equal(DefaultTensorflowRuntimeVersion))
g.Expect(kfsvc.Spec.Default.Predictor.Tensorflow.Resources.Requests[v1.ResourceCPU]).To(gomega.Equal(DefaultCPU))
g.Expect(kfsvc.Spec.Default.Predictor.Tensorflow.Resources.Requests[v1.ResourceMemory]).To(gomega.Equal(DefaultMemory))
g.Expect(kfsvc.Spec.Default.Predictor.Tensorflow.Resources.Limits[v1.ResourceCPU]).To(gomega.Equal(DefaultCPU))
g.Expect(kfsvc.Spec.Default.Predictor.Tensorflow.Resources.Limits[v1.ResourceMemory]).To(gomega.Equal(DefaultMemory))
g.Expect(kfsvc.Spec.Canary.Predictor.Tensorflow.RuntimeVersion).To(gomega.Equal("1.11"))
g.Expect(kfsvc.Spec.Canary.Predictor.Tensorflow.Resources.Requests[v1.ResourceCPU]).To(gomega.Equal(DefaultCPU))
g.Expect(kfsvc.Spec.Canary.Predictor.Tensorflow.Resources.Requests[v1.ResourceMemory]).To(gomega.Equal(resource.MustParse("3Gi")))
}
67 changes: 52 additions & 15 deletions pkg/apis/serving/v1alpha2/kfservice_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,36 +21,73 @@ import (

// KFServiceSpec defines the desired state of KFService
type KFServiceSpec struct {
Default ModelSpec `json:"default"`
// Canary defines an alternate configuration to route a percentage of traffic.
Canary *ModelSpec `json:"canary,omitempty"`
CanaryTrafficPercent int `json:"canaryTrafficPercent,omitempty"`
// Default defines default KFService endpoints
// +required
Default EndpointSpec `json:"default"`
// Canary defines an alternate endpoints to route a percentage of traffic.
// +optional
Canary *EndpointSpec `json:"canary,omitempty"`
// CanaryTrafficPercent defines the percentage of traffic going to canary KFService endpoints
// +optional
CanaryTrafficPercent int `json:"canaryTrafficPercent,omitempty"`
}

// ModelSpec defines the configuration to route traffic to a predictor.
type ModelSpec struct {
type EndpointSpec struct {
// Predictor defines the model serving spec
// +required
Predictor PredictorSpec `json:"predictor"`

// Explainer defines the model explanation service spec
// explainer service calls to transformer or predictor service
// +optional
Explainer *ExplainerSpec `json:"explainer,omitempty"`

// Transformer defines the transformer service spec for pre/post processing
// transformer service calls to predictor service
// +optional
Transformer *TransformerSpec `json:"transformer,omitempty"`
}

// DeploymentSpec defines the configuration for a given KFService service component
type DeploymentSpec struct {
// ServiceAccountName is the name of the ServiceAccount to use to run the service
// +optional
ServiceAccountName string `json:"serviceAccountName,omitempty"`
// Minimum number of replicas, pods won't scale down to 0 in case of no traffic
// +optional
MinReplicas int `json:"minReplicas,omitempty"`
// This is the up bound for autoscaler to scale to
// +optional
MaxReplicas int `json:"maxReplicas,omitempty"`
}

// PredictorSpec defines the configuration to route traffic to a predictor.
type PredictorSpec struct {
// The following fields follow a "1-of" semantic. Users must specify exactly one spec.
Custom *CustomSpec `json:"custom,omitempty"`
Tensorflow *TensorflowSpec `json:"tensorflow,omitempty"`
TensorRT *TensorRTSpec `json:"tensorrt,omitempty"`
XGBoost *XGBoostSpec `json:"xgboost,omitempty"`
SKLearn *SKLearnSpec `json:"sklearn,omitempty"`
PyTorch *PyTorchSpec `json:"pytorch,omitempty"`
// Optional Explain specification to add a model explainer next to the chosen predictor.
// In future v1alpha2 the above model predictors would be moved down a level.
Explain *ExplainSpec `json:"explain,omitempty"`

DeploymentSpec `json:",inline"`
}

// ExplainSpec defines the arguments for a model explanation server
type ExplainSpec struct {
// ExplainerSpec defines the arguments for a model explanation server
type ExplainerSpec struct {
// The following fields follow a "1-of" semantic. Users must specify exactly one spec.
Alibi *AlibiExplainSpec `json:"alibi,omitempty"`
Custom *CustomSpec `json:"custom,omitempty"`
Alibi *AlibiExplainerSpec `json:"alibi,omitempty"`
Custom *CustomSpec `json:"custom,omitempty"`

DeploymentSpec `json:",inline"`
}

// TransformerSpec defines transformer service for pre/post processing
type TransformerSpec struct {
Custom *CustomSpec `json:"custom,omitempty"`

DeploymentSpec `json:",inline"`
}

type AlibiExplainerType string
Expand All @@ -63,8 +100,8 @@ const (
AlibiContrastiveExplainer AlibiExplainerType = "Contrastive"
)

// AlibiExplainSpec defines the arguments for configuring an Alibi Explanation Server
type AlibiExplainSpec struct {
// AlibiExplainerSpec defines the arguments for configuring an Alibi Explanation Server
type AlibiExplainerSpec struct {
// The type of Alibi explainer
Type AlibiExplainerType `json:"type"`
// The location of a trained explanation model
Expand Down
32 changes: 20 additions & 12 deletions pkg/apis/serving/v1alpha2/kfservice_types_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,21 +36,29 @@ func TestKFService(t *testing.T) {
Namespace: "default",
},
Spec: KFServiceSpec{
Default: ModelSpec{
MinReplicas: 1,
MaxReplicas: 3,
Tensorflow: &TensorflowSpec{
ModelURI: "s3://test/mnist/export",
RuntimeVersion: "1.13.0",
Default: EndpointSpec{
Predictor: PredictorSpec{
DeploymentSpec: DeploymentSpec{
MinReplicas: 1,
MaxReplicas: 3,
},
Tensorflow: &TensorflowSpec{
ModelURI: "s3://test/mnist/export",
RuntimeVersion: "1.13.0",
},
},
},
CanaryTrafficPercent: 20,
Canary: &ModelSpec{
MinReplicas: 1,
MaxReplicas: 3,
Tensorflow: &TensorflowSpec{
ModelURI: "s3://test/mnist-2/export",
RuntimeVersion: "1.13.0",
Canary: &EndpointSpec{
Predictor: PredictorSpec{
DeploymentSpec: DeploymentSpec{
MinReplicas: 1,
MaxReplicas: 3,
},
Tensorflow: &TensorflowSpec{
ModelURI: "s3://test/mnist-2/export",
RuntimeVersion: "1.13.0",
},
},
},
},
Expand Down
10 changes: 6 additions & 4 deletions pkg/apis/serving/v1alpha2/kfservice_validation.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,14 @@ func validateKFService(kfsvc *KFService) error {
if kfsvc == nil {
return fmt.Errorf("Unable to validate, KFService is nil")
}
if err := validateModelSpec(&kfsvc.Spec.Default); err != nil {
if err := validateModelSpec(&kfsvc.Spec.Default.Predictor); err != nil {
return err
}

if err := validateModelSpec(kfsvc.Spec.Canary); err != nil {
return err
if kfsvc.Spec.Canary != nil {
if err := validateModelSpec(&kfsvc.Spec.Canary.Predictor); err != nil {
return err
}
}

if err := validateCanaryTrafficPercent(kfsvc.Spec); err != nil {
Expand All @@ -78,7 +80,7 @@ func validateKFService(kfsvc *KFService) error {
return nil
}

func validateModelSpec(spec *ModelSpec) error {
func validateModelSpec(spec *PredictorSpec) error {
if spec == nil {
return nil
}
Expand Down
Loading

0 comments on commit a9f3608

Please sign in to comment.