diff --git a/src/dbt_jobs_as_code/exporter/export.py b/src/dbt_jobs_as_code/exporter/export.py index 50d5887..e26aee2 100644 --- a/src/dbt_jobs_as_code/exporter/export.py +++ b/src/dbt_jobs_as_code/exporter/export.py @@ -10,7 +10,8 @@ def export_jobs_yml(jobs: list[JobDefinition], include_linked_id: bool = False): export_yml = {"jobs": {}} for id, cloud_job in enumerate(jobs): - export_yml["jobs"][f"import_{id + 1}"] = cloud_job.to_load_format(include_linked_id) + yaml_key = cloud_job.identifier if cloud_job.identifier else f"import_{id + 1}" + export_yml["jobs"][yaml_key] = cloud_job.to_load_format(include_linked_id) print( "# yaml-language-server: $schema=https://raw.githubusercontent.com/dbt-labs/dbt-jobs-as-code/main/src/dbt_jobs_as_code/schemas/load_job_schema.json" diff --git a/src/dbt_jobs_as_code/loader/load.py b/src/dbt_jobs_as_code/loader/load.py index f7119eb..7da3ea3 100644 --- a/src/dbt_jobs_as_code/loader/load.py +++ b/src/dbt_jobs_as_code/loader/load.py @@ -23,7 +23,7 @@ def load_job_configuration(config_files: List[str], vars_file: Optional[List[str else: config = _load_yaml_no_template(config_files) - if not config["jobs"]: + if config.get("jobs", {}) == {}: return Config(jobs={}) date_config = [job.get("schedule", {}).get("date", None) for job in config["jobs"].values()] @@ -61,7 +61,7 @@ def _load_yaml_no_template(config_files: List[str]) -> dict: config = yaml.safe_load(config_string) if config: # Merge the jobs from each file into combined_config - if "jobs" in config: + if config.get("jobs", {}) != {}: if "jobs" not in combined_config: combined_config["jobs"] = {} combined_config["jobs"].update(config["jobs"]) diff --git a/tests/exporter/test_export.py b/tests/exporter/test_export.py index 818fdfc..bfaf9ad 100644 --- a/tests/exporter/test_export.py +++ b/tests/exporter/test_export.py @@ -1,5 +1,9 @@ import json +import pytest +from jsonschema import validate +from ruamel.yaml import YAML + from dbt_jobs_as_code.exporter.export import export_jobs_yml from dbt_jobs_as_code.schemas.common_types import ( Date, @@ -10,8 +14,22 @@ Triggers, ) from dbt_jobs_as_code.schemas.job import JobDefinition -from jsonschema import validate -from ruamel.yaml import YAML + + +@pytest.fixture +def base_job_definition(): + return JobDefinition( + account_id=1, + project_id=1, + environment_id=1, + name="Test Job", + settings={}, + run_generate_sources=False, + execute_steps=[], + generate_docs=False, + schedule={"cron": "0 14 * * 0,1,2,3,4,5,6"}, + triggers={}, + ) def test_export_jobs_yml(capsys): @@ -99,3 +117,57 @@ def test_export_jobs_yml(capsys): yaml = YAML(typ="safe") yaml_data = yaml.load(captured.out.strip()) validate(instance=yaml_data, schema=json.loads(schema)) + + +def test_export_jobs_yml_with_identifier(base_job_definition, capsys): + # Create a job with identifier + job_with_identifier = base_job_definition.model_copy() + job_with_identifier.identifier = "existing_identifier" + + # Create a job without identifier + job_without_identifier = base_job_definition.model_copy() + + jobs = [job_with_identifier, job_without_identifier] + + # Export jobs and capture output + export_jobs_yml(jobs) + captured = capsys.readouterr() + + # Parse the YAML output (skipping the first two lines which contain the schema) + yaml = YAML() + exported_jobs = yaml.load("\n".join(captured.out.split("\n")[2:])) + + # Verify the job keys + assert "existing_identifier" in exported_jobs["jobs"] + assert "import_2" in exported_jobs["jobs"] + + # Verify the job contents + assert exported_jobs["jobs"]["existing_identifier"]["name"] == "Test Job" + assert exported_jobs["jobs"]["import_2"]["name"] == "Test Job" + + +def test_export_jobs_yml_with_linked_id(base_job_definition, capsys): + # Create a job with both identifier and id + job = base_job_definition.model_copy() + job.identifier = "test_identifier" + job.id = 123 + + # Export with include_linked_id=True + export_jobs_yml([job], include_linked_id=True) + captured = capsys.readouterr() + + yaml = YAML() + exported_jobs = yaml.load("\n".join(captured.out.split("\n")[2:])) + + # Verify linked_id is included and matches the id + assert exported_jobs["jobs"]["test_identifier"]["linked_id"] == 123 + + # Export with include_linked_id=False + export_jobs_yml([job], include_linked_id=False) + captured = capsys.readouterr() + + yaml = YAML() + exported_jobs = yaml.load("\n".join(captured.out.split("\n")[2:])) + + # Verify linked_id is not included + assert "linked_id" not in exported_jobs["jobs"]["test_identifier"]