Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SS 1233 Kubernetes manifest yaml validation #255

Merged
merged 20 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
f12e315
Ignoring dot files from model diagrams
alfredeen Nov 22, 2024
268a62d
Added validation of user app k8s deployment manifest yaml.
alfredeen Nov 22, 2024
eff7571
Minor changes to tasks
alfredeen Nov 26, 2024
cf50975
Merge branch 'develop' into ss-1073-shinyproxy-volume-support
alfredeen Nov 26, 2024
90113d0
Created a class and test for k8s deployment manifests.
alfredeen Nov 27, 2024
cc704a7
Added more methods and tests to deployment manifest class. Refactorin…
alfredeen Nov 28, 2024
4703e66
Added more manifest validation
alfredeen Nov 29, 2024
1f50a25
Trying to fix helm teplate command permission errorby setting repo ca…
alfredeen Nov 29, 2024
59d730f
Creating custom values file for unit test runs.
alfredeen Nov 29, 2024
b03b3df
Added manifest validation with python lib kubernetes-validate
alfredeen Nov 29, 2024
fd6043f
Cleaned up some of the manifest validation logic.
alfredeen Dec 2, 2024
aefc983
Added more tests of an invalid manifest
alfredeen Dec 2, 2024
33a95ba
Added function to extract and validate the kubernetes-pod-patches sec…
alfredeen Dec 2, 2024
9aabe19
Cleaned up tasks and made chart version dynamic
alfredeen Dec 3, 2024
4e7b747
Merge branch 'develop' into ss-1073-shinyproxy-volume-support
alfredeen Dec 3, 2024
70e4dbd
Merge branch 'develop' into ss-1073-shinyproxy-volume-support
alfredeen Dec 5, 2024
b86c844
Logging and removed an unused line
alfredeen Dec 9, 2024
da85fcd
Using named tuples for more structured method return values.
alfredeen Dec 9, 2024
6d7b13d
Added a new Django setting CLUSTER_VERSION. Made some return types mo…
alfredeen Dec 9, 2024
7a5ec60
Merge branch 'develop' into ss-1073-shinyproxy-volume-support
alfredeen Dec 9, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,9 @@ coverage.xml
.hypothesis/
.pytest_cache/

# Model diagrams from graphviz
*.dot

# Translations
*.mo
*.pot
Expand Down
128 changes: 123 additions & 5 deletions apps/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import yaml
from celery import shared_task
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.utils import timezone
Expand Down Expand Up @@ -108,8 +109,10 @@ def helm_install(release_name, chart, namespace="default", values_file=None, ver


@shared_task
def helm_delete(release_name, namespace="default"):
# Base command
def helm_delete(release_name: str, namespace: str = "default") -> tuple[str | None, str | None]:
"""
Executes a Helm delete command.
"""
command = f"helm uninstall {release_name} --namespace {namespace} --wait"
# Execute the command
try:
Expand All @@ -119,24 +122,129 @@ def helm_delete(release_name, namespace="default"):
return e.stdout, e.stderr


@shared_task
def helm_template(
chart: str, values_file: str, namespace: str = "default", version: str = None
) -> tuple[str | None, str | None]:
"""
Executes a Helm template command.
"""
command = f"helm template tmp-release-name {chart} -f {values_file} --namespace {namespace}"

# Append version if deploying via ghcr
if version:
command += f" --version {version} --repository-cache /app/charts/.cache/helm/repository"

# Execute the command
try:
result = subprocess.run(command.split(" "), check=True, text=True, capture_output=True)
return result.stdout, None
except subprocess.CalledProcessError as e:
return e.stdout, e.stderr


@shared_task
def helm_lint(chart: str, values_file: str, namespace: str) -> tuple[str | None, str | None]:
alfredeen marked this conversation as resolved.
Show resolved Hide resolved
"""
Executes a Helm lint command.
"""
command = f"helm lint {chart} -f {values_file} --namespace {namespace}"
# Execute the command
try:
result = subprocess.run(command.split(" "), check=True, text=True, capture_output=True)
return result.stdout, None
except subprocess.CalledProcessError as e:
return e.stdout, e.stderr


@shared_task
def _kubectl_apply_dry(deployment_file: str, target_strategy: str = "client") -> tuple[str | None, str | None]:
"""
Executes a kubectl apply --dry-run command.
NOTE: This does not appear to be working, but kept for continued testing.
"""
command = f"kubectl apply --dry-run={target_strategy} -f {deployment_file}"
# Execute the command
try:
result = subprocess.check_output(command, shell=True)
# result = subprocess.run(command.split(" "), check=True, text=True, capture_output=True)
return result.stdout, None
except subprocess.CalledProcessError as e:
return e.stdout, e.stderr


def get_manifest_yaml(release_name: str, namespace: str = "default") -> tuple[str | None, str | None]:
command = f"helm get manifest {release_name} --namespace {namespace}"
# command = f"kubectl get configmap cm -n default -o yaml | yq eval '.data[\"application.yml\"]'"
alfredeen marked this conversation as resolved.
Show resolved Hide resolved
# Execute the command
logger.debug(f"Executing command: {command}")
try:
result = subprocess.run(command.split(" "), check=True, text=True, capture_output=True)
return result.stdout, None
except subprocess.CalledProcessError as e:
return e.stdout, e.stderr


@shared_task
@transaction.atomic
def deploy_resource(serialized_instance):
instance = deserialize(serialized_instance)
logger.info("Deploying resource for instance %s", instance)
values = instance.k8s_values
release = values["subdomain"]
if "ghcr" in instance.chart:
version = instance.chart.split(":")[-1]
chart = "oci://" + instance.chart.split(":")[0]
else:
version = None
chart = instance.chart

# Use a KubernetesDeploymentManifest to manage the manifest validation and files
from apps.types_.kubernetes_deployment_manifest import KubernetesDeploymentManifest
churnikov marked this conversation as resolved.
Show resolved Hide resolved

kdm = KubernetesDeploymentManifest()

# Save helm values file for internal reference
values_file = f"charts/values/{str(uuid.uuid4())}.yaml"
values_file = kdm.get_filepaths()["values_file"]
with open(values_file, "w") as f:
f.write(yaml.dump(values))

output, error = helm_install(values["subdomain"], chart, values["namespace"], values_file, version)
valid_deployment = True
deployment_file = None

# In development, also generate and validate the k8s deployment manifest
if settings.DEBUG:
logger.debug(f"Generating and validating k8s deployment yaml for release {release} before deployment.")

output, error = kdm.generate_manifest_yaml_from_template(
chart, values_file, values["namespace"], version, save_to_file=True
)

deployment_file = kdm.get_filepaths()["deployment_file"]

# Validate the manifest yaml documents
is_valid, validation_output = kdm.validate_manifest(output)

if is_valid:
logger.debug(f"The deployment manifest file is valid for release {release}")

# Also validate the kubernetes-pod-patches section
kpp_data = kdm.extract_kubernetes_pod_patches_from_manifest(output)

if kpp_data:
is_valid, message = kdm.validate_kubernetes_pod_patches_yaml(kpp_data)

if not is_valid:
logger.debug(f"The kubernetes-pod-patches section is invalid for release {release}. {message}")
valid_deployment = False
else:
valid_deployment = False

if not valid_deployment:
logger.warning(f"The deployment manifest file is INVALID for release {release}. {validation_output}")

# Install the app using Helm install
output, error = helm_install(release, chart, values["namespace"], values_file, version)
success = not error

helm_info = {"success": success, "info": {"stdout": output, "stderr": error}}
Expand All @@ -147,7 +255,17 @@ def deploy_resource(serialized_instance):
instance.app_status.save()
instance.save()

subprocess.run(["rm", "-f", values_file])
# In development, also generate and validate the k8s deployment manifest
if settings.DEBUG:
alfredeen marked this conversation as resolved.
Show resolved Hide resolved
# Previously, we generated and validated the deployment after creation
# output, error = get_manifest_yaml(release)
pass

if valid_deployment:
# If valid, then delete both the values and deployment files (if exists)
subprocess.run(["rm", "-f", values_file])
if deployment_file:
subprocess.run(["rm", "-f", deployment_file])


@shared_task
Expand Down
Loading