Skip to content

Commit

Permalink
fix: generate the bundle path including prefix/suffix if available (#20)
Browse files Browse the repository at this point in the history
* generate the bundle path including prefix/suffix if available
  • Loading branch information
pariksheet authored Aug 25, 2023
1 parent 515b2f3 commit e925c58
Show file tree
Hide file tree
Showing 3 changed files with 341 additions and 0 deletions.
16 changes: 16 additions & 0 deletions brickflow/codegen/databricks_bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,6 +617,22 @@ def proj_to_bundle(self) -> DatabricksAssetBundles:
/ self.env
)

bundle_suffix = config(
BrickflowEnvVars.BRICKFLOW_WORKFLOW_SUFFIX.value,
default=None,
)

bundle_prefix = config(
BrickflowEnvVars.BRICKFLOW_WORKFLOW_PREFIX.value,
default=None,
)

if bundle_prefix is not None:
bundle_root_path = bundle_root_path / bundle_prefix

if bundle_suffix is not None:
bundle_root_path = bundle_root_path / bundle_suffix

env_content = Environments(
workspace=Workspace(
root_path=str(bundle_root_path.as_posix()),
Expand Down
276 changes: 276 additions & 0 deletions tests/codegen/expected_bundles/local_bundle_prefix_suffix.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,276 @@
bundle:
name: test-project
environments:
test-project-local:
resources:
jobs:
test:
email_notifications: null
notification_settings: null
webhook_notifications: null
git_source: null
job_clusters: []
max_concurrent_runs: 1.0
name: test_user_test
run_as:
user_name: [email protected]
permissions:
- level: IS_OWNER
user_name: [email protected]
- level: CAN_MANAGE
user_name: [email protected]
- level: CAN_MANAGE_RUN
user_name: [email protected]
- level: CAN_VIEW
user_name: [email protected]
schedule:
quartz_cron_expression: '* * * * *'
timezone_id: UTC
tags:
brickflow_project_name: test-project
brickflow_deployment_mode: Databricks Asset Bundles
deployed_by: test_user
brickflow_version: 1.0.0
environment: local
test: test2
tasks:
- depends_on: []
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: custom_python_task_push
timeout_seconds: null
- depends_on: []
pipeline_task:
pipeline_id: ${resources.pipelines.test_hello_world.id}
task_key: dlt_pipeline
- depends_on: []
pipeline_task:
pipeline_id: ${resources.pipelines.test_hello_world.id}
task_key: dlt_pipeline_2
- depends_on: [ ]
email_notifications: { }
existing_cluster_id: existing_cluster_id
libraries: [ ]
max_retries: null
min_retry_interval_millis: null
notebook_task:
notebook_path: notebooks/notebook_a
retry_on_timeout: null
task_key: notebook_task_a
timeout_seconds: null
- depends_on: []
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
test: var
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: task_function
timeout_seconds: null
- depends_on:
- task_key: task_function
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: task_function_2
timeout_seconds: null
- depends_on:
- task_key: task_function_2
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: task_function_3
timeout_seconds: null
- depends_on:
- task_key: task_function_3
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: task_function_4
timeout_seconds: null
- depends_on: []
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
test: var
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: task_function_no_deco_args
timeout_seconds: null
- depends_on: []
email_notifications: {}
existing_cluster_id: existing_cluster_id
libraries: []
max_retries: null
min_retry_interval_millis: null
notebook_task:
base_parameters:
all_tasks1: test
all_tasks3: '123'
brickflow_env: local
brickflow_internal_only_run_tasks: ''
brickflow_internal_task_name: '{{task_key}}'
brickflow_internal_workflow_name: test
brickflow_internal_workflow_prefix: ''
brickflow_internal_workflow_suffix: ''
brickflow_job_id: '{{job_id}}'
brickflow_parent_run_id: '{{parent_run_id}}'
brickflow_run_id: '{{run_id}}'
brickflow_start_date: '{{start_date}}'
brickflow_start_time: '{{start_time}}'
brickflow_task_key: '{{task_key}}'
brickflow_task_retry_count: '{{task_retry_count}}'
notebook_path: test_databricks_bundle.py
source: WORKSPACE
retry_on_timeout: null
task_key: task_function_nokwargs
timeout_seconds: null
pipelines:
test_hello_world:
catalog: null
channel: current
configuration: {}
continuous: false
development: true
edition: advanced
libraries:
- notebook:
path: scripts/spark_script_2.py
name: test_user_hello world
photon: false
storage: '123'
target: null
workspace:
file_path: /Users/${workspace.current_user.userName}/.brickflow_bundles/test-project/local/_prefix/_suffix/files
root_path: /Users/${workspace.current_user.userName}/.brickflow_bundles/test-project/local/_prefix/_suffix
state_path: /Users/${workspace.current_user.userName}/.brickflow_bundles/test-project/local/_prefix/_suffix/state
workspace: {}
49 changes: 49 additions & 0 deletions tests/codegen/test_databricks_bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,55 @@ def test_generate_bundle_local(
if os.path.exists(BUNDLE_FILE_NAME):
os.remove(BUNDLE_FILE_NAME)

@patch.dict(
os.environ,
{
BrickflowEnvVars.BRICKFLOW_MODE.value: Stage.deploy.value,
BrickflowEnvVars.BRICKFLOW_ENV.value: "local",
BrickflowEnvVars.BRICKFLOW_DEPLOYMENT_MODE.value: BrickflowDeployMode.BUNDLE.value,
BrickflowEnvVars.BRICKFLOW_WORKFLOW_PREFIX.value: "_prefix",
BrickflowEnvVars.BRICKFLOW_WORKFLOW_SUFFIX.value: "_suffix",
},
)
@patch("subprocess.check_output")
@patch("brickflow.context.ctx.get_parameter")
@patch("importlib.metadata.version")
def test_generate_bundle_local_prefix_suffix(
self,
bf_version_mock: Mock,
dbutils: Mock,
sub_proc_mock: Mock,
):
dbutils.return_value = None
sub_proc_mock.return_value = b""
bf_version_mock.return_value = "1.0.0"
workspace_client = get_workspace_client_mock()
# get caller part breaks here
with Project(
"test-project",
entry_point_path="test_databricks_bundle.py",
codegen_kwargs={
"mutators": [
DatabricksBundleTagsAndNameMutator(
databricks_client=workspace_client
)
]
}, # dont test import mutator
) as f:
f.add_workflow(wf)

with open(BUNDLE_FILE_NAME, "r", encoding="utf-8") as bundle:
bundle_content = bundle.read()
assert bundle_content is not None
assert len(bundle_content) > 0

actual = read_yaml_file(BUNDLE_FILE_NAME)
expected = get_expected_bundle_yaml("local_bundle_prefix_suffix.yml")
bf_version_mock.assert_called_once()
assert_equal_dicts(actual, expected)
if os.path.exists(BUNDLE_FILE_NAME):
os.remove(BUNDLE_FILE_NAME)

@patch.dict(
os.environ,
{
Expand Down

0 comments on commit e925c58

Please sign in to comment.