diff --git a/pkg/scalers/scalersconfig/scalersconfig.go b/pkg/scalers/scalersconfig/scalersconfig.go new file mode 100644 index 00000000000..67ed86cd7a0 --- /dev/null +++ b/pkg/scalers/scalersconfig/scalersconfig.go @@ -0,0 +1,71 @@ +/* +Copyright 2024 The KEDA Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package scalersconfig + +import ( + "time" + + v2 "k8s.io/api/autoscaling/v2" + + kedav1alpha1 "github.com/kedacore/keda/v2/apis/keda/v1alpha1" +) + +// ScalerConfig contains config fields common for all scalers +type ScalerConfig struct { + // ScalableObjectName specifies name of the ScaledObject/ScaledJob that owns this scaler + ScalableObjectName string + + // ScalableObjectNamespace specifies name of the ScaledObject/ScaledJob that owns this scaler + ScalableObjectNamespace string + + // ScalableObjectType specifies whether this Scaler is owned by ScaledObject or ScaledJob + ScalableObjectType string + + // The timeout to be used on all HTTP requests from the controller + GlobalHTTPTimeout time.Duration + + // Name of the trigger + TriggerName string + + // Marks whether we should query metrics only during the polling interval + // Any requests for metrics in between are read from the cache + TriggerUseCachedMetrics bool + + // TriggerMetadata + TriggerMetadata map[string]string + + // ResolvedEnv + ResolvedEnv map[string]string + + // AuthParams + AuthParams map[string]string + + // PodIdentity + PodIdentity kedav1alpha1.AuthPodIdentity + + // TriggerIndex + TriggerIndex int + + // TriggerUniqueKey for the scaler across KEDA. Useful to identify uniquely the scaler, eg: AWS credentials cache + TriggerUniqueKey string + + // MetricType + MetricType v2.MetricTargetType + + // When we use the scaler for composite scaler, we shouldn't require the value because it'll be ignored + AsMetricSource bool +} diff --git a/pkg/scalers/scalersconfig/typed_config_test.go b/pkg/scalers/scalersconfig/typed_config_test.go new file mode 100644 index 00000000000..866311f574a --- /dev/null +++ b/pkg/scalers/scalersconfig/typed_config_test.go @@ -0,0 +1,585 @@ +/* +Copyright 2024 The KEDA Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package scalersconfig + +import ( + "net/url" + "testing" + + . "github.com/onsi/gomega" +) + +// TestBasicTypedConfig tests the basic types for typed config +func TestBasicTypedConfig(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + "intVal": "1", + "boolValFromEnv": "boolVal", + "floatValFromEnv": "floatVal", + }, + ResolvedEnv: map[string]string{ + "boolVal": "true", + "floatVal": "1.1", + }, + AuthParams: map[string]string{ + "auth": "authValue", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, order=triggerMetadata"` + IntVal int `keda:"name=intVal, order=triggerMetadata"` + BoolVal bool `keda:"name=boolVal, order=resolvedEnv"` + FloatVal float64 `keda:"name=floatVal, order=resolvedEnv"` + AuthVal string `keda:"name=auth, order=authParams"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.IntVal).To(Equal(1)) + Expect(ts.BoolVal).To(BeTrue()) + Expect(ts.FloatVal).To(Equal(1.1)) + Expect(ts.AuthVal).To(Equal("authValue")) +} + +// TestParsingOrder tests the parsing order +func TestParsingOrder(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + "intVal": "1", + "intValFromEnv": "intVal", + "floatVal": "1.1", + "floatValFromEnv": "floatVal", + }, + ResolvedEnv: map[string]string{ + "stringVal": "value2", + "intVal": "2", + "floatVal": "2.2", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, order=resolvedEnv;triggerMetadata"` + IntVal int `keda:"name=intVal, order=triggerMetadata;resolvedEnv"` + FloatVal float64 `keda:"name=floatVal, order=resolvedEnv;triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.IntVal).To(Equal(1)) + Expect(ts.FloatVal).To(Equal(2.2)) +} + +// TestOptional tests the optional tag +func TestOptional(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, order=triggerMetadata"` + IntValOptional int `keda:"name=intVal, order=triggerMetadata, optional"` + IntValAlsoOptional int `keda:"name=intVal, order=triggerMetadata, optional=true"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.IntValOptional).To(Equal(0)) + Expect(ts.IntValAlsoOptional).To(Equal(0)) +} + +// TestMissing tests the missing parameter for compulsory tag +func TestMissing(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{} + + type testStruct struct { + StringVal string `keda:"name=stringVal, order=triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(MatchError(`missing required parameter "stringVal" in [triggerMetadata]`)) +} + +// TestDeprecated tests the deprecated tag +func TestDeprecated(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, order=triggerMetadata, deprecated=deprecated"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(MatchError(`parameter "stringVal" is deprecated`)) + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{}, + } + + ts2 := testStruct{} + err = sc2.TypedConfig(&ts2) + Expect(err).To(BeNil()) +} + +// TestDefaultValue tests the default tag +func TestDefaultValue(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + }, + } + + type testStruct struct { + BoolVal bool `keda:"name=boolVal, order=triggerMetadata, optional, default=true"` + StringVal string `keda:"name=stringVal, order=triggerMetadata, optional, default=d"` + StringVal2 string `keda:"name=stringVal2, order=triggerMetadata, optional, default=d"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.BoolVal).To(Equal(true)) + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.StringVal2).To(Equal("d")) +} + +// TestMap tests the map type +func TestMap(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "mapVal": "key1=1,key2=2", + }, + } + + type testStruct struct { + MapVal map[string]int `keda:"name=mapVal, order=triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.MapVal).To(HaveLen(2)) + Expect(ts.MapVal["key1"]).To(Equal(1)) + Expect(ts.MapVal["key2"]).To(Equal(2)) +} + +// TestSlice tests the slice type +func TestSlice(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "sliceVal": "1,2,3", + "sliceValWithSpaces": "1, 2, 3", + "sliceValWithOtherSeparator": "1;2;3", + }, + } + + type testStruct struct { + SliceVal []int `keda:"name=sliceVal, order=triggerMetadata"` + SliceValWithSpaces []int `keda:"name=sliceValWithSpaces, order=triggerMetadata"` + SliceValWithOtherSeparator []int `keda:"name=sliceValWithOtherSeparator, order=triggerMetadata, separator=;"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.SliceVal).To(HaveLen(3)) + Expect(ts.SliceVal[0]).To(Equal(1)) + Expect(ts.SliceVal[1]).To(Equal(2)) + Expect(ts.SliceVal[2]).To(Equal(3)) + Expect(ts.SliceValWithSpaces).To(HaveLen(3)) + Expect(ts.SliceValWithSpaces[0]).To(Equal(1)) + Expect(ts.SliceValWithSpaces[1]).To(Equal(2)) + Expect(ts.SliceValWithSpaces[2]).To(Equal(3)) + Expect(ts.SliceValWithOtherSeparator).To(HaveLen(3)) + Expect(ts.SliceValWithOtherSeparator[0]).To(Equal(1)) + Expect(ts.SliceValWithOtherSeparator[1]).To(Equal(2)) + Expect(ts.SliceValWithOtherSeparator[2]).To(Equal(3)) +} + +// TestEnum tests the enum type +func TestEnum(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "enumVal": "value1", + "enumSlice": "value1, value2", + }, + } + + type testStruct struct { + EnumVal string `keda:"name=enumVal, order=triggerMetadata, enum=value1;value2"` + EnumSlice []string `keda:"name=enumSlice, order=triggerMetadata, enum=value1;value2, optional"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.EnumVal).To(Equal("value1")) + Expect(ts.EnumSlice).To(HaveLen(2)) + Expect(ts.EnumSlice).To(ConsistOf("value1", "value2")) + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "enumVal": "value3", + }, + } + + ts2 := testStruct{} + err = sc2.TypedConfig(&ts2) + Expect(err).To(MatchError(`parameter "enumVal" value "value3" must be one of [value1 value2]`)) +} + +// TestExclusive tests the exclusiveSet type +func TestExclusive(t *testing.T) { + RegisterTestingT(t) + + type testStruct struct { + IntVal []int `keda:"name=intVal, order=triggerMetadata, exclusiveSet=1;4;5"` + } + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "intVal": "1,2,3", + }, + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "intVal": "1,4", + }, + } + + ts2 := testStruct{} + err = sc2.TypedConfig(&ts2) + Expect(err).To(MatchError(`parameter "intVal" value "1,4" must contain only one of [1 4 5]`)) +} + +// TestURLValues tests the url.Values type +func TestURLValues(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "endpointParams": "key1=value1&key2=value2&key1=value3", + }, + } + + type testStruct struct { + EndpointParams url.Values `keda:"name=endpointParams, order=authParams"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.EndpointParams).To(HaveLen(2)) + Expect(ts.EndpointParams).To(HaveKey("key1")) + Expect(ts.EndpointParams).To(HaveKey("key2")) + Expect(ts.EndpointParams["key1"]).To(ConsistOf("value1", "value3")) + Expect(ts.EndpointParams["key2"]).To(ConsistOf("value2")) +} + +// TestGenericMap tests the generic map type that is structurally similar to url.Values +func TestGenericMap(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "endpointParams": "key1=value1,key2=value2,key3=value3", + }, + } + + // structurally similar to url.Values but should behave as generic map + type testStruct struct { + EndpointParams map[string][]string `keda:"name=endpointParams, order=authParams"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.EndpointParams).To(HaveLen(3)) + Expect(ts.EndpointParams).To(HaveKey("key1")) + Expect(ts.EndpointParams).To(HaveKey("key2")) + Expect(ts.EndpointParams).To(HaveKey("key3")) + Expect(ts.EndpointParams["key1"]).To(ConsistOf("value1")) + Expect(ts.EndpointParams["key2"]).To(ConsistOf("value2")) + Expect(ts.EndpointParams["key3"]).To(ConsistOf("value3")) +} + +// TestNestedStruct tests the nested struct type +func TestNestedStruct(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type basicAuth struct { + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` + } + + type testStruct struct { + BA basicAuth `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BA.Username).To(Equal("user")) + Expect(ts.BA.Password).To(Equal("pass")) +} + +// TestEmbeddedStruct tests the embedded struct type +func TestEmbeddedStruct(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type testStruct struct { + BasicAuth struct { + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` + } `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BasicAuth.Username).To(Equal("user")) + Expect(ts.BasicAuth.Password).To(Equal("pass")) +} + +// TestWrongNestedStruct tests the wrong nested type +func TestWrongNestedStruct(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type testStruct struct { + WrongNesting int `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(MatchError(`nested parameter "WrongNesting" must be a struct, has kind "int"`)) +} + +// TestNestedOptional tests the nested optional type +func TestNestedOptional(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + }, + } + + type basicAuth struct { + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams, optional"` + AlsoOptionalThanksToParent string `keda:"name=optional, order=authParams"` + } + + type testStruct struct { + BA basicAuth `keda:"optional"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BA.Username).To(Equal("user")) + Expect(ts.BA.Password).To(Equal("")) + Expect(ts.BA.AlsoOptionalThanksToParent).To(Equal("")) +} + +// TestNestedPointer tests the nested pointer type +func TestNestedPointer(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type basicAuth struct { + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` + } + + type testStruct struct { + BA *basicAuth `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BA).ToNot(BeNil()) + Expect(ts.BA.Username).To(Equal("user")) + Expect(ts.BA.Password).To(Equal("pass")) +} + +// TestNoParsingOrder tests when no parsing order is provided +func TestNoParsingOrder(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "strVal": "value1", + "defaultVal": "value2", + }, + } + + type testStructMissing struct { + StrVal string `keda:"name=strVal, enum=value1;value2"` + } + tsm := testStructMissing{} + err := sc.TypedConfig(&tsm) + Expect(err).To(MatchError(ContainSubstring(`missing required parameter "strVal", no 'order' tag, provide any from [authParams resolvedEnv triggerMetadata]`))) + + type testStructDefault struct { + DefaultVal string `keda:"name=defaultVal, default=dv"` + } + tsd := testStructDefault{} + err = sc.TypedConfig(&tsd) + Expect(err).To(BeNil()) + Expect(tsd.DefaultVal).To(Equal("dv")) + + type testStructDefaultMissing struct { + DefaultVal2 string `keda:"name=defaultVal2, default=dv"` + } + tsdm := testStructDefaultMissing{} + err = sc.TypedConfig(&tsdm) + Expect(err).To(BeNil()) + Expect(tsdm.DefaultVal2).To(Equal("dv")) +} + +// TestRange tests the range param +func TestRange(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "range": "5-10", + "multiRange": "5-10, 15-20", + "dottedRange": "2..7", + "wrongRange": "5..3", + }, + } + + type testStruct struct { + Range []int `keda:"name=range, order=triggerMetadata, range=-"` + MultiRange []int `keda:"name=multiRange, order=triggerMetadata, range"` + DottedRange []int `keda:"name=dottedRange, order=triggerMetadata, range=.."` + WrongRange []int `keda:"name=wrongRange, order=triggerMetadata, range=.."` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.Range).To(HaveLen(6)) + Expect(ts.Range).To(ConsistOf(5, 6, 7, 8, 9, 10)) + Expect(ts.MultiRange).To(HaveLen(12)) + Expect(ts.MultiRange).To(ConsistOf(5, 6, 7, 8, 9, 10, 15, 16, 17, 18, 19, 20)) + Expect(ts.DottedRange).To(HaveLen(6)) + Expect(ts.DottedRange).To(ConsistOf(2, 3, 4, 5, 6, 7)) + Expect(ts.WrongRange).To(HaveLen(0)) +} + +// TestMultiName tests the multi name param +func TestMultiName(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "property1": "aaa", + }, + } + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "property2": "bbb", + }, + } + + type testStruct struct { + Property string `keda:"name=property1;property2, order=triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.Property).To(Equal("aaa")) + + err = sc2.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.Property).To(Equal("bbb")) +} diff --git a/pkg/scalers/scalersconfig/typedconfig.go b/pkg/scalers/scalersconfig/typedconfig.go new file mode 100644 index 00000000000..4e61f3e288d --- /dev/null +++ b/pkg/scalers/scalersconfig/typedconfig.go @@ -0,0 +1,505 @@ +/* +Copyright 2024 The KEDA Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package scalersconfig + +import ( + "encoding/json" + "errors" + "fmt" + "net/url" + "reflect" + "runtime/debug" + "strconv" + "strings" + + "golang.org/x/exp/maps" + "golang.org/x/exp/slices" +) + +// CustomValidator is an interface that can be implemented to validate the configuration of the typed config +type CustomValidator interface { + Validate() error +} + +// ParsingOrder is a type that represents the order in which the parameters are parsed +type ParsingOrder string + +// Constants that represent the order in which the parameters are parsed +const ( + TriggerMetadata ParsingOrder = "triggerMetadata" + ResolvedEnv ParsingOrder = "resolvedEnv" + AuthParams ParsingOrder = "authParams" +) + +// allowedParsingOrderMap is a map with set of valid parsing orders +var allowedParsingOrderMap = map[ParsingOrder]bool{ + TriggerMetadata: true, + ResolvedEnv: true, + AuthParams: true, +} + +// separators for field tag structure +// e.g. name=stringVal,order=triggerMetadata;resolvedEnv;authParams,optional +const ( + tagSeparator = "," + tagKeySeparator = "=" + tagValueSeparator = ";" +) + +// separators for map and slice elements +const ( + elemKeyValSeparator = "=" +) + +// field tag parameters +const ( + optionalTag = "optional" + deprecatedTag = "deprecated" + defaultTag = "default" + orderTag = "order" + nameTag = "name" + enumTag = "enum" + exclusiveSetTag = "exclusiveSet" + rangeTag = "range" + separatorTag = "separator" +) + +// Params is a struct that represents the parameter list that can be used in the keda tag +type Params struct { + // FieldName is the name of the field in the struct + FieldName string + + // Names is the 'name' tag parameter defining the key in triggerMetadata, resolvedEnv or authParams + Names []string + + // Optional is the 'optional' tag parameter defining if the parameter is optional + Optional bool + + // Order is the 'order' tag parameter defining the parsing order in which the parameter is looked up + // in the triggerMetadata, resolvedEnv or authParams maps + Order []ParsingOrder + + // Default is the 'default' tag parameter defining the default value of the parameter if it's not found + // in any of the maps from ParsingOrder + Default string + + // Deprecated is the 'deprecated' tag parameter, if the map contain this parameter, it is considered + // as an error and the DeprecatedMessage should be returned to the user + Deprecated string + + // Enum is the 'enum' tag parameter defining the list of possible values for the parameter + Enum []string + + // ExclusiveSet is the 'exclusiveSet' tag parameter defining the list of values that are mutually exclusive + ExclusiveSet []string + + // RangeSeparator is the 'range' tag parameter defining the separator for range values + RangeSeparator string + + // Separator is the tag parameter to define which separator will be used + Separator string +} + +// Name returns the name of the parameter (or comma separated list of names if it has multiple) +func (p Params) Name() string { + return strings.Join(p.Names, ",") +} + +// IsNested is a function that returns true if the parameter is nested +func (p Params) IsNested() bool { + return len(p.Names) == 0 +} + +// IsDeprecated is a function that returns true if the parameter is deprecated +func (p Params) IsDeprecated() bool { + return p.Deprecated != "" +} + +// DeprecatedMessage is a function that returns the optional deprecated message if the parameter is deprecated +func (p Params) DeprecatedMessage() string { + if p.Deprecated == deprecatedTag { + return "" + } + return fmt.Sprintf(": %s", p.Deprecated) +} + +// TypedConfig is a function that is used to unmarshal the TriggerMetadata, ResolvedEnv and AuthParams +// populating the provided typedConfig where structure fields along with complementary field tags define +// declaratively the parsing rules +func (sc *ScalerConfig) TypedConfig(typedConfig any) (err error) { + defer func() { + if r := recover(); r != nil { + // this shouldn't happen, but calling certain reflection functions may result in panic + // if it does, it's better to return a error with stacktrace and reject parsing config + // rather than crashing KEDA + err = fmt.Errorf("failed to parse typed config %T resulted in panic\n%v", r, string(debug.Stack())) + } + }() + err = sc.parseTypedConfig(typedConfig, false) + return +} + +// parseTypedConfig is a function that is used to unmarshal the TriggerMetadata, ResolvedEnv and AuthParams +// this can be called recursively to parse nested structures +func (sc *ScalerConfig) parseTypedConfig(typedConfig any, parentOptional bool) error { + t := reflect.TypeOf(typedConfig) + if t.Kind() != reflect.Pointer { + return fmt.Errorf("typedConfig must be a pointer") + } + t = t.Elem() + v := reflect.ValueOf(typedConfig).Elem() + + errs := []error{} + for i := 0; i < t.NumField(); i++ { + fieldType := t.Field(i) + fieldValue := v.Field(i) + tag, exists := fieldType.Tag.Lookup("keda") + if !exists { + continue + } + tagParams, err := paramsFromTag(tag, fieldType) + if err != nil { + errs = append(errs, err) + continue + } + tagParams.Optional = tagParams.Optional || parentOptional + if err := sc.setValue(fieldValue, tagParams); err != nil { + errs = append(errs, err) + } + } + if validator, ok := typedConfig.(CustomValidator); ok { + if err := validator.Validate(); err != nil { + errs = append(errs, err) + } + } + return errors.Join(errs...) +} + +// setValue is a function that sets the value of the field based on the provided params +func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { + valFromConfig, exists := sc.configParamValue(params) + if exists && params.IsDeprecated() { + return fmt.Errorf("parameter %q is deprecated%v", params.Name(), params.DeprecatedMessage()) + } + if !exists && params.Default != "" { + exists = true + valFromConfig = params.Default + } + if !exists && (params.Optional || params.IsDeprecated()) { + return nil + } + if !exists && !(params.Optional || params.IsDeprecated()) { + if len(params.Order) == 0 { + apo := maps.Keys(allowedParsingOrderMap) + slices.Sort(apo) + return fmt.Errorf("missing required parameter %q, no 'order' tag, provide any from %v", params.Name(), apo) + } + return fmt.Errorf("missing required parameter %q in %v", params.Name(), params.Order) + } + if params.Enum != nil { + enumMap := make(map[string]bool) + for _, e := range params.Enum { + enumMap[e] = true + } + missingMap := make(map[string]bool) + split := splitWithSeparator(valFromConfig, params.Separator) + for _, s := range split { + s := strings.TrimSpace(s) + if !enumMap[s] { + missingMap[s] = true + } + } + if len(missingMap) > 0 { + return fmt.Errorf("parameter %q value %q must be one of %v", params.Name(), valFromConfig, params.Enum) + } + } + if params.ExclusiveSet != nil { + exclusiveMap := make(map[string]bool) + for _, e := range params.ExclusiveSet { + exclusiveMap[e] = true + } + split := splitWithSeparator(valFromConfig, params.Separator) + exclusiveCount := 0 + for _, s := range split { + s := strings.TrimSpace(s) + if exclusiveMap[s] { + exclusiveCount++ + } + } + if exclusiveCount > 1 { + return fmt.Errorf("parameter %q value %q must contain only one of %v", params.Name(), valFromConfig, params.ExclusiveSet) + } + } + if params.IsNested() { + for field.Kind() == reflect.Ptr { + field.Set(reflect.New(field.Type().Elem())) + field = field.Elem() + } + if field.Kind() != reflect.Struct { + return fmt.Errorf("nested parameter %q must be a struct, has kind %q", params.FieldName, field.Kind()) + } + return sc.parseTypedConfig(field.Addr().Interface(), params.Optional) + } + if err := setConfigValueHelper(params, valFromConfig, field); err != nil { + return fmt.Errorf("unable to set param %q value %q: %w", params.Name(), valFromConfig, err) + } + return nil +} + +// setConfigValueURLParams is a function that sets the value of the url.Values field +func setConfigValueURLParams(params Params, valFromConfig string, field reflect.Value) error { + field.Set(reflect.MakeMap(reflect.MapOf(field.Type().Key(), field.Type().Elem()))) + vals, err := url.ParseQuery(valFromConfig) + if err != nil { + return fmt.Errorf("expected url.Values, unable to parse query %q: %w", valFromConfig, err) + } + for k, vs := range vals { + ifcMapKeyElem := reflect.New(field.Type().Key()).Elem() + ifcMapValueElem := reflect.New(field.Type().Elem()).Elem() + if err := setConfigValueHelper(params, k, ifcMapKeyElem); err != nil { + return fmt.Errorf("map key %q: %w", k, err) + } + for _, v := range vs { + ifcMapValueElem.Set(reflect.Append(ifcMapValueElem, reflect.ValueOf(v))) + } + field.SetMapIndex(ifcMapKeyElem, ifcMapValueElem) + } + return nil +} + +// setConfigValueMap is a function that sets the value of the map field +func setConfigValueMap(params Params, valFromConfig string, field reflect.Value) error { + field.Set(reflect.MakeMap(reflect.MapOf(field.Type().Key(), field.Type().Elem()))) + split := splitWithSeparator(valFromConfig, params.Separator) + for _, s := range split { + s := strings.TrimSpace(s) + kv := strings.Split(s, elemKeyValSeparator) + if len(kv) != 2 { + return fmt.Errorf("expected format key%vvalue, got %q", elemKeyValSeparator, s) + } + key := strings.TrimSpace(kv[0]) + val := strings.TrimSpace(kv[1]) + ifcKeyElem := reflect.New(field.Type().Key()).Elem() + if err := setConfigValueHelper(params, key, ifcKeyElem); err != nil { + return fmt.Errorf("map key %q: %w", key, err) + } + ifcValueElem := reflect.New(field.Type().Elem()).Elem() + if err := setConfigValueHelper(params, val, ifcValueElem); err != nil { + return fmt.Errorf("map key %q, value %q: %w", key, val, err) + } + field.SetMapIndex(ifcKeyElem, ifcValueElem) + } + return nil +} + +// canRange is a function that checks if the value can be ranged +func canRange(valFromConfig, elemRangeSeparator string, field reflect.Value) bool { + if elemRangeSeparator == "" { + return false + } + if field.Kind() != reflect.Slice { + return false + } + elemIfc := reflect.New(field.Type().Elem()).Interface() + elemVal := reflect.ValueOf(elemIfc).Elem() + if !elemVal.CanInt() { + return false + } + return strings.Contains(valFromConfig, elemRangeSeparator) +} + +// splitWithSeparator is a function that splits on default or custom separator +func splitWithSeparator(valFromConfig, customSeparator string) []string { + separator := "," + if customSeparator != "" { + separator = customSeparator + } + return strings.Split(valFromConfig, separator) +} + +// setConfigValueRange is a function that sets the value of the range field +func setConfigValueRange(params Params, valFromConfig string, field reflect.Value) error { + rangeSplit := strings.Split(valFromConfig, params.RangeSeparator) + if len(rangeSplit) != 2 { + return fmt.Errorf("expected format start%vend, got %q", params.RangeSeparator, valFromConfig) + } + start := reflect.New(field.Type().Elem()).Interface() + end := reflect.New(field.Type().Elem()).Interface() + if err := json.Unmarshal([]byte(rangeSplit[0]), &start); err != nil { + return fmt.Errorf("unable to parse start value %q: %w", rangeSplit[0], err) + } + if err := json.Unmarshal([]byte(rangeSplit[1]), &end); err != nil { + return fmt.Errorf("unable to parse end value %q: %w", rangeSplit[1], err) + } + + startVal := reflect.ValueOf(start).Elem() + endVal := reflect.ValueOf(end).Elem() + for i := startVal.Int(); i <= endVal.Int(); i++ { + elemVal := reflect.New(field.Type().Elem()).Elem() + elemVal.SetInt(i) + field.Set(reflect.Append(field, elemVal)) + } + return nil +} + +// setConfigValueSlice is a function that sets the value of the slice field +func setConfigValueSlice(params Params, valFromConfig string, field reflect.Value) error { + elemIfc := reflect.New(field.Type().Elem()).Interface() + split := splitWithSeparator(valFromConfig, params.Separator) + for i, s := range split { + s := strings.TrimSpace(s) + if canRange(s, params.RangeSeparator, field) { + if err := setConfigValueRange(params, s, field); err != nil { + return fmt.Errorf("slice element %d: %w", i, err) + } + } else { + if err := setConfigValueHelper(params, s, reflect.ValueOf(elemIfc).Elem()); err != nil { + return fmt.Errorf("slice element %d: %w", i, err) + } + field.Set(reflect.Append(field, reflect.ValueOf(elemIfc).Elem())) + } + } + return nil +} + +// setParamValueHelper is a function that sets the value of the parameter +func setConfigValueHelper(params Params, valFromConfig string, field reflect.Value) error { + paramValue := reflect.ValueOf(valFromConfig) + if paramValue.Type().AssignableTo(field.Type()) { + field.SetString(valFromConfig) + return nil + } + if paramValue.Type().ConvertibleTo(field.Type()) { + field.Set(paramValue.Convert(field.Type())) + return nil + } + if field.Type() == reflect.TypeOf(url.Values{}) { + return setConfigValueURLParams(params, valFromConfig, field) + } + if field.Kind() == reflect.Map { + return setConfigValueMap(params, valFromConfig, field) + } + if field.Kind() == reflect.Slice { + return setConfigValueSlice(params, valFromConfig, field) + } + if field.CanInterface() { + ifc := reflect.New(field.Type()).Interface() + if err := json.Unmarshal([]byte(valFromConfig), &ifc); err != nil { + return fmt.Errorf("unable to unmarshal to field type %v: %w", field.Type(), err) + } + field.Set(reflect.ValueOf(ifc).Elem()) + return nil + } + return fmt.Errorf("unable to find matching parser for field type %v", field.Type()) +} + +// configParamValue is a function that returns the value of the parameter based on the parsing order +func (sc *ScalerConfig) configParamValue(params Params) (string, bool) { + for _, po := range params.Order { + var m map[string]string + for _, key := range params.Names { + switch po { + case TriggerMetadata: + m = sc.TriggerMetadata + case AuthParams: + m = sc.AuthParams + case ResolvedEnv: + m = sc.ResolvedEnv + key = sc.TriggerMetadata[fmt.Sprintf("%sFromEnv", key)] + default: + // this is checked when parsing the tags but adding as default case to avoid any potential future problems + return "", false + } + param, ok := m[key] + param = strings.TrimSpace(param) + if ok && param != "" { + return param, true + } + } + } + return "", params.IsNested() +} + +// paramsFromTag is a function that returns the Params struct based on the field tag +func paramsFromTag(tag string, field reflect.StructField) (Params, error) { + params := Params{FieldName: field.Name} + tagSplit := strings.Split(tag, tagSeparator) + for _, ts := range tagSplit { + tsplit := strings.Split(ts, tagKeySeparator) + tsplit[0] = strings.TrimSpace(tsplit[0]) + switch tsplit[0] { + case optionalTag: + if len(tsplit) == 1 { + params.Optional = true + } + if len(tsplit) > 1 { + params.Optional, _ = strconv.ParseBool(strings.TrimSpace(tsplit[1])) + } + case orderTag: + if len(tsplit) > 1 { + order := strings.Split(tsplit[1], tagValueSeparator) + for _, po := range order { + poTyped := ParsingOrder(strings.TrimSpace(po)) + if !allowedParsingOrderMap[poTyped] { + apo := maps.Keys(allowedParsingOrderMap) + slices.Sort(apo) + return params, fmt.Errorf("unknown parsing order value %s, has to be one of %s", po, apo) + } + params.Order = append(params.Order, poTyped) + } + } + case nameTag: + if len(tsplit) > 1 { + params.Names = strings.Split(strings.TrimSpace(tsplit[1]), tagValueSeparator) + } + case deprecatedTag: + if len(tsplit) == 1 { + params.Deprecated = deprecatedTag + } else { + params.Deprecated = strings.TrimSpace(tsplit[1]) + } + case defaultTag: + if len(tsplit) > 1 { + params.Default = strings.TrimSpace(tsplit[1]) + } + case enumTag: + if len(tsplit) > 1 { + params.Enum = strings.Split(tsplit[1], tagValueSeparator) + } + case exclusiveSetTag: + if len(tsplit) > 1 { + params.ExclusiveSet = strings.Split(tsplit[1], tagValueSeparator) + } + case rangeTag: + if len(tsplit) == 1 { + params.RangeSeparator = "-" + } + if len(tsplit) == 2 { + params.RangeSeparator = strings.TrimSpace(tsplit[1]) + } + case separatorTag: + if len(tsplit) > 1 { + params.Separator = strings.TrimSpace(tsplit[1]) + } + case "": + continue + default: + return params, fmt.Errorf("unknown tag param %s: %s", tsplit[0], tag) + } + } + return params, nil +}