Skip to content

Commit b341d58

Browse files
committed
fix: Skip InferenceService patching for KServe RawDeployment
1 parent e9a0204 commit b341d58

File tree

1 file changed

+24
-12
lines changed

1 file changed

+24
-12
lines changed

controllers/inference_services.go

+24-12
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,12 @@ import (
1212
"strings"
1313
)
1414

15+
const (
16+
DEPLOYMENT_MODE_MODELMESH = "ModelMesh"
17+
DEPLOYMENT_MODE_RAW = "RawDeployment"
18+
DEPLOYMENT_MODE_SERVERLESS = "Serverless"
19+
)
20+
1521
func (r *TrustyAIServiceReconciler) patchEnvVarsForDeployments(ctx context.Context, instance *trustyaiopendatahubiov1alpha1.TrustyAIService, deployments []appsv1.Deployment, envVarName string, url string, remove bool) (bool, error) {
1622
// Create volume and volume mount for this intance's TLS secrets
1723
certVolumes := TLSCertVolumes{}
@@ -199,20 +205,26 @@ func (r *TrustyAIServiceReconciler) handleInferenceServices(ctx context.Context,
199205

200206
for _, infService := range inferenceServices.Items {
201207
annotations := infService.GetAnnotations()
202-
// Check the annotation "serving.kserve.io/deploymentMode: ModelMesh"
203-
if val, ok := annotations["serving.kserve.io/deploymentMode"]; ok && val == "ModelMesh" {
204-
shouldContinue, err := r.patchEnvVarsByLabelForDeployments(ctx, instance, namespace, labelKey, labelValue, envVarName, crName, remove)
205-
if err != nil {
206-
log.FromContext(ctx).Error(err, "Could not patch environment variables for ModelMesh deployments.")
207-
return shouldContinue, err
208-
}
209-
} else {
210-
err := r.patchKServe(ctx, instance, infService, namespace, crName, remove)
211-
if err != nil {
212-
log.FromContext(ctx).Error(err, "Could not path InferenceLogger for KServe deployment.")
213-
return false, err
208+
209+
// Check the annotation "serving.kserve.io/deploymentMode"
210+
if val, ok := annotations["serving.kserve.io/deploymentMode"]; ok {
211+
if val == DEPLOYMENT_MODE_RAW {
212+
log.FromContext(ctx).Info("RawDeployment mode not supported by TrustyAI")
213+
continue
214+
} else if val == DEPLOYMENT_MODE_MODELMESH {
215+
shouldContinue, err := r.patchEnvVarsByLabelForDeployments(ctx, instance, namespace, labelKey, labelValue, envVarName, crName, remove)
216+
if err != nil {
217+
log.FromContext(ctx).Error(err, "could not patch environment variables for ModelMesh deployments")
218+
return shouldContinue, err
219+
}
220+
continue
214221
}
215222
}
223+
err := r.patchKServe(ctx, instance, infService, namespace, crName, remove)
224+
if err != nil {
225+
log.FromContext(ctx).Error(err, "could not patch InferenceLogger for KServe deployment")
226+
return false, err
227+
}
216228
}
217229
return true, nil
218230
}

0 commit comments

Comments
 (0)