Skip to content

Commit

Permalink
Fixing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
skrydal committed Sep 1, 2024
1 parent 52be907 commit 5918411
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import boto3
import pytest
from freezegun import freeze_time
from moto import mock_dynamodb
from moto import mock_dynamodb, mock_sts

from datahub.ingestion.glossary.classification_mixin import ClassificationConfig
from datahub.ingestion.glossary.classifier import DynamicTypedClassifierConfig
Expand All @@ -21,6 +21,7 @@

@freeze_time(FROZEN_TIME)
@mock_dynamodb
@mock_sts
@pytest.mark.integration
def test_dynamodb(pytestconfig, tmp_path):
boto3.setup_default_session()
Expand Down
5 changes: 4 additions & 1 deletion metadata-ingestion/tests/integration/s3/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import moto.s3
import pytest
from boto3.session import Session
from moto import mock_s3
from moto import mock_s3, mock_sts
from pydantic import ValidationError

from datahub.ingestion.run.pipeline import Pipeline, PipelineContext
Expand Down Expand Up @@ -158,6 +158,7 @@ def touch_local_files(pytestconfig):
s3_source_files = [(S3_SOURCE_FILES_PATH, p) for p in os.listdir(S3_SOURCE_FILES_PATH)]


@mock_sts
@pytest.mark.integration
@pytest.mark.parametrize("source_file_tuple", shared_source_files + s3_source_files)
def test_data_lake_s3_ingest(
Expand Down Expand Up @@ -195,6 +196,7 @@ def test_data_lake_s3_ingest(
)


@mock_sts
@pytest.mark.integration
@pytest.mark.parametrize("source_file_tuple", shared_source_files)
def test_data_lake_local_ingest(
Expand Down Expand Up @@ -253,6 +255,7 @@ def test_data_lake_local_ingest(
)


@mock_sts
def test_data_lake_incorrect_config_raises_error(tmp_path, mock_time):
ctx = PipelineContext(run_id="test-s3")

Expand Down

0 comments on commit 5918411

Please sign in to comment.