Skip to content

Commit

Permalink
fix: health hpa when only second conditions is False
Browse files Browse the repository at this point in the history
Signed-off-by: Arnaud Farbos <[email protected]>
  • Loading branch information
afarbos committed Sep 5, 2024
1 parent bd7681a commit e460207
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 4 deletions.
11 changes: 7 additions & 4 deletions pkg/health/health_hpa.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package health
import (
"encoding/json"
"fmt"
"strings"

autoscalingv1 "k8s.io/api/autoscaling/v1"
autoscalingv2 "k8s.io/api/autoscaling/v2"
Expand Down Expand Up @@ -132,6 +133,7 @@ func getAutoScalingV1HPAHealth(hpa *autoscalingv1.HorizontalPodAutoscaler) (*Hea
}

func checkConditions(conditions []hpaCondition, progressingStatus *HealthStatus) (*HealthStatus, error) {
healthyMessages := []string{}
for _, condition := range conditions {
if isDegraded(&condition) {
return &HealthStatus{
Expand All @@ -141,13 +143,14 @@ func checkConditions(conditions []hpaCondition, progressingStatus *HealthStatus)
}

if isHealthy(&condition) {
return &HealthStatus{
Status: HealthStatusHealthy,
Message: condition.Message,
}, nil
healthyMessages = append(healthyMessages, condition.Message)
}
}

if len(conditions) == len(healthyMessages) {
return &HealthStatus{Status: HealthStatusHealthy, Message: strings.Join(healthyMessages, ",")}, nil
}

return progressingStatus, nil
}

Expand Down
40 changes: 40 additions & 0 deletions pkg/health/testdata/hpa-v2-degraded-partially.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
creationTimestamp: "2022-01-17T14:22:27Z"
name: sample
uid: 0e6d855e-83ed-4ed5-b80a-461a750f14db
spec:
maxReplicas: 2
minReplicas: 1
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: argocd-server
targetCPUUtilizationPercentage: 80
status:
conditions:
- lastTransitionTime: '2024-09-05T20:20:41Z'
message: the HPA controller was able to get the target's current scale
reason: SucceededGetScale
status: 'True'
type: AbleToScale
- lastTransitionTime: '2024-09-05T20:20:56Z'
message: >-
the HPA was unable to compute the replica count: failed to get memory
utilization: unable to get metrics for resource memory: unable to fetch
metrics from resource metrics API: the server could not find the
requested resource (get pods.metrics.k8s.io)
reason: FailedGetResourceMetric
status: 'False'
type: ScalingActive
type: ScalingLimited
currentMetrics:
- resource:
current:
averageUtilization: 6
averageValue: 12m
name: cpu
type: Resource
currentReplicas: 1
desiredReplicas: 1

0 comments on commit e460207

Please sign in to comment.