diff --git a/tests/conftest.py b/tests/conftest.py index 988e66e..e8c4e3c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ import contextlib import os +import random import time from datetime import datetime, timedelta, timezone from pathlib import Path @@ -33,22 +34,9 @@ def faker(): return Faker() -def cleanup(cache_path): - for file in cache_path.rglob("*"): - if file.is_dir(): - cleanup(cache_path) - continue - file.unlink() - cache_path.rmdir() - - @pytest.fixture(scope="session") -def cache_path(): - path = Path(__file__).resolve().parents[0] / Path("cache") - yield path - if path.exists(): - with contextlib.suppress(PermissionError): # Windows Compatibility Issues - cleanup(path) +def number(): + return random.randint(1, 100_000) @pytest.fixture(scope="session") @@ -56,20 +44,14 @@ def config_setup() -> BasicAuth: return generate_authentication() -@pytest.fixture(scope="session") -def get_sqlite_cache(cache_path): - return SqliteCache(cache_path, timedelta(days=1)) - - -@pytest.fixture(scope="session") -def get_json_cache(cache_path): - cache_path.mkdir(parents=True, exist_ok=True) - return JSONCache(cache_path, timedelta(days=1)) +@pytest.fixture() +def get_sqlite_cache(tmp_path): + return SqliteCache(tmp_path, timedelta(days=1)) -@pytest.fixture(scope="session") -def cache_list(cache_path, get_json_cache, get_sqlite_cache): - return (get_json_cache, get_sqlite_cache) +@pytest.fixture() +def get_json_cache(tmp_path): + return JSONCache(tmp_path, timedelta(days=1)) @pytest.fixture(scope="session") @@ -77,57 +59,34 @@ def get_workspace_id() -> int: return int(os.getenv("TOGGL_WORKSPACE_ID", "0")) -@pytest.fixture(scope="session") +@pytest.fixture() def workspace_object(get_workspace_id, config_setup, get_json_cache): return WorkspaceEndpoint(get_workspace_id, config_setup, get_json_cache) -def _project_cleanup(endpoint): - projects = endpoint.get_projects(refresh=True) - for project in projects: - with contextlib.suppress(HTTPError): - endpoint.delete_project(project) - - -@pytest.fixture(scope="session") +@pytest.fixture() def project_object(get_workspace_id, config_setup, get_json_cache): - endpoint = ProjectEndpoint(get_workspace_id, config_setup, get_json_cache) - _project_cleanup(endpoint) - yield endpoint - _project_cleanup(endpoint) + return ProjectEndpoint(get_workspace_id, config_setup, get_json_cache) -@pytest.fixture(scope="session") +@pytest.fixture() def user_object(get_workspace_id, config_setup, get_json_cache) -> UserEndpoint: return UserEndpoint(get_workspace_id, config_setup, get_json_cache) -def _tracker_cleanup(endpoint, user_object): - trackers = user_object.get_trackers(refresh=True) - for tracker in trackers: - with contextlib.suppress(HTTPError): - endpoint.delete_tracker(tracker) - - -@pytest.fixture(scope="session") +@pytest.fixture() def tracker_object(get_workspace_id, config_setup, get_json_cache, user_object): - endpoint = TrackerEndpoint(get_workspace_id, config_setup, get_json_cache) - _tracker_cleanup(endpoint, user_object) - yield endpoint - _tracker_cleanup(endpoint, user_object) + return TrackerEndpoint(get_workspace_id, config_setup, get_json_cache) -@pytest.fixture(scope="session") +@pytest.fixture() def tracker_object_sqlite( get_workspace_id, config_setup, get_sqlite_cache, user_object, ): - endpoint = TrackerEndpoint(get_workspace_id, config_setup, get_sqlite_cache) - _tracker_cleanup(endpoint, user_object) - yield endpoint - _tracker_cleanup(endpoint, user_object) + return TrackerEndpoint(get_workspace_id, config_setup, get_sqlite_cache) @pytest.fixture() @@ -142,35 +101,14 @@ def add_tracker(tracker_object, faker): tracker_object.delete_tracker(tracker) -def _client_cleanup(endpoint): - clients = endpoint.get_clients(refresh=True) - - for client in clients: - with contextlib.suppress(HTTPError): - endpoint.delete_client(client) - - -@pytest.fixture(scope="session") +@pytest.fixture() def client_object(get_json_cache, get_workspace_id, config_setup): - endpoint = ClientEndpoint(get_workspace_id, config_setup, get_json_cache) - _client_cleanup(endpoint) - yield endpoint - _client_cleanup(endpoint) + return ClientEndpoint(get_workspace_id, config_setup, get_json_cache) -def _tag_cleanup(endpoint): - tags = endpoint.get_tags(refresh=True) - for tag in tags: - with contextlib.suppress(HTTPError): - endpoint.delete_tag(tag) - - -@pytest.fixture(scope="session") +@pytest.fixture() def tag_object(get_workspace_id, config_setup, get_json_cache): - endpoint = TagEndpoint(get_workspace_id, config_setup, get_json_cache) - _tag_cleanup(endpoint) - yield endpoint - _tag_cleanup(endpoint) + return TagEndpoint(get_workspace_id, config_setup, get_json_cache) class ModelTest(TogglClass): @@ -192,22 +130,82 @@ def model(self) -> type[TogglTracker]: return TogglTracker -@pytest.fixture(scope="session") +@pytest.fixture() def meta_object(config_setup, get_workspace_id, get_json_cache): return EndPointTest(get_workspace_id, config_setup, get_json_cache) -@pytest.fixture(scope="session") +@pytest.fixture() def meta_object_sqlite(config_setup, get_workspace_id, get_sqlite_cache): return EndPointTest(get_workspace_id, config_setup, get_sqlite_cache) +def _path_cleanup(cache_path): + for file in cache_path.rglob("*"): + if file.is_dir(): + cleanup(cache_path) + continue + file.unlink() + cache_path.rmdir() + + +def _tracker_cleanup(cache, wid, config): + user_object = UserEndpoint(wid, config, cache) + trackers = user_object.get_trackers(refresh=True) + endpoint = TrackerEndpoint(wid, config, cache) + for tracker in trackers: + with contextlib.suppress(HTTPError): + endpoint.delete_tracker(tracker) + + +def _project_cleanup(cache, wid, config): + endpoint = ProjectEndpoint(wid, config, cache) + projects = endpoint.get_projects(refresh=True) + for project in projects: + with contextlib.suppress(HTTPError): + endpoint.delete_project(project) + + +def _client_cleanup(cache, wid, config): + endpoint = ClientEndpoint(wid, config, cache) + clients = endpoint.get_clients(refresh=True) + + for client in clients: + with contextlib.suppress(HTTPError): + endpoint.delete_client(client) + + +def _tag_cleanup(cache, wid, config): + endpoint = TagEndpoint(wid, config, cache) + tags = endpoint.get_tags(refresh=True) + for tag in tags: + with contextlib.suppress(HTTPError): + endpoint.delete_tag(tag) + + +def cleanup(): + path = Path(__file__).parent / "cache" + wid = int(os.getenv("TOGGL_WORKSPACE_ID", "0")) + config = generate_authentication() + cache = JSONCache(path, timedelta(days=1)) + + _project_cleanup(cache, wid, config) + _tracker_cleanup(cache, wid, config) + _client_cleanup(cache, wid, config) + _tag_cleanup(cache, wid, config) + _path_cleanup(path) + + def pytest_sessionstart(session: pytest.Session): - pass + marks = session.config.getoption("-m", default="") + if not marks or "integration" in marks: + cleanup() def pytest_sessionfinish(session, exitstatus): - pass + marks = session.config.getoption("-m", default="") + if not marks or "integration" in marks: + cleanup() @pytest.fixture() @@ -216,12 +214,12 @@ def model_data(get_workspace_id, faker): client = TogglClient( id=1, - name="test_client", + name=faker.name(), workspace=workspace.id, ) project = TogglProject( id=1, - name="test_project", + name=faker.name(), workspace=workspace.id, color="000000", client=client.id, @@ -229,47 +227,47 @@ def model_data(get_workspace_id, faker): ) tag = TogglTag( id=1, - name="test_tag", + name=faker.name(), workspace=workspace.id, ) return { "workspace": workspace, - "model": ModelTest(id=1, name="test_model"), + "model": ModelTest(id=1, name=faker.name()), "client": client, "project": project, "tracker": TogglTracker.from_kwargs( id=1, - name="test_tracker", + name=faker.name(), workspace=workspace.id, start="2020-01-01T00:00:00Z", duration=3600, stop="2020-01-01T01:00:00Z", project=project.id, - tags=[tag], + tags=[], ), "tag": tag, } -@pytest.fixture(scope="module") -def get_test_data(get_workspace_id): +@pytest.fixture() +def get_test_data(get_workspace_id, faker): return [ { "id": 2, "workspace_id": get_workspace_id, - "description": "test", + "description": faker.name(), "start": "2020-01-01T00:00:00Z", "stop": "2020-01-01T01:00:00Z", "duration": 3600, - "tags": ["tag1", "tag2"], + "tags": [faker.name(), faker.name()], }, { "id": 3, "workspace_id": get_workspace_id, - "description": "test2", + "description": faker.name(), "start": "2020-01-01T00:00:00Z", "stop": "2020-01-01T00:30:00Z", "duration": 1800, - "tags": ["tag1", "tag2"], + "tags": [faker.name(), faker.name()], }, ] diff --git a/tests/test_clients.py b/tests/test_clients.py index 6ae8abb..4cb76ed 100644 --- a/tests/test_clients.py +++ b/tests/test_clients.py @@ -4,12 +4,12 @@ from toggl_api.modules.models import TogglClient -@pytest.fixture(scope="session") +@pytest.fixture() def create_client_body(get_workspace_id, faker): return ClientBody(name=faker.name(), workspace_id=get_workspace_id) -@pytest.fixture(scope="session") +@pytest.fixture() def create_client(client_object, create_client_body): return client_object.create_client(create_client_body) diff --git a/tests/test_meta/test_cache.py b/tests/test_meta/test_cache.py index 9bc7999..e30cb90 100644 --- a/tests/test_meta/test_cache.py +++ b/tests/test_meta/test_cache.py @@ -55,9 +55,9 @@ def test_expiration_json(meta_object, model_data): @pytest.mark.unit() -def test_encoder_json(model_data, cache_path): +def test_encoder_json(model_data, tmp_path): model_data.pop("model") - cache_file = cache_path / "encoder.json" + cache_file = tmp_path / "encoder.json" with cache_file.open("w", encoding="utf-8") as f: json.dump(model_data, f, cls=CustomEncoder) with cache_file.open("r", encoding="utf-8") as f: diff --git a/tests/test_meta/test_sqlite.py b/tests/test_meta/test_sqlite.py index 8001fed..f4d99e5 100644 --- a/tests/test_meta/test_sqlite.py +++ b/tests/test_meta/test_sqlite.py @@ -12,9 +12,8 @@ @pytest.fixture() -def db_conn(cache_path): - cache_path.mkdir(parents=True, exist_ok=True) - cache_path /= "cache.sqlite" +def db_conn(tmp_path): + cache_path = tmp_path / "cache.sqlite" engine = db.create_engine(f"sqlite:///{cache_path}") conn = engine.connect() yield engine @@ -72,7 +71,6 @@ def test_db_creation(meta_object_sqlite): def test_add_entries_sqlite(meta_object_sqlite, model_data): tracker = model_data["tracker"] meta_object_sqlite.cache.add_entries(tracker) - assert tracker in meta_object_sqlite.cache.load_cache() @@ -98,30 +96,28 @@ def test_delete_entries_sqlite(meta_object_sqlite, model_data): @pytest.mark.unit() def test_find_sqlite(meta_object_sqlite, model_data): tracker = model_data["tracker"] + tracker.id += random.randint(50, 100_000) meta_object_sqlite.cache.add_entries(tracker) - tracker_data = {"id": tracker.id, "name": tracker.name} - assert tracker == meta_object_sqlite.cache.find_entry(tracker_data) + + assert tracker == meta_object_sqlite.cache.find_entry(tracker, expire=False) @pytest.mark.unit() def test_query_sqlite(tracker_object_sqlite, model_data, faker): - tracker_object_sqlite.cache.session.data = [] names = [faker.name() for _ in range(10)] tracker = model_data.pop("tracker") tracker.timestamp = datetime.now(timezone.utc) tracker_object_sqlite.save_cache(tracker, RequestMethod.GET) + d = asdict(tracker) for i in range(1, 11): - t = TogglTracker(**asdict(tracker)) - t.id += i - t.name = names[i - 1] - t.timestamp = datetime.now(timezone.utc) - tracker_object_sqlite.save_cache(t, RequestMethod.GET) + d["id"] += i + 1 + d["name"] = names[i - 1] + d["timestamp"] = datetime.now(timezone.utc) + tracker_object_sqlite.save_cache(tracker.from_kwargs(**d), RequestMethod.GET) tracker_object_sqlite.cache.commit() - assert tracker_object_sqlite.load_cache().count() == 11 # noqa: PLR2004 - assert tracker_object_sqlite.query(name=tracker.name)[0] == tracker @@ -134,15 +130,10 @@ def test_expiration_sqlite(meta_object_sqlite, model_data): meta_object_sqlite.cache.add_entries(tracker) time.sleep(delay.total_seconds() + 2) tracker_data = {"id": tracker.id, "name": tracker.name} - assert ( - meta_object_sqlite.cache.find_entry( - tracker_data, - ) - is None - ) + assert meta_object_sqlite.cache.find_entry(tracker_data) is None -@pytest.fixture(scope="session") +@pytest.fixture() def user_object_sqlite(user_object, get_sqlite_cache): user_object.cache = get_sqlite_cache return user_object @@ -155,6 +146,7 @@ def test_tracker_cache( httpx_mock, ): tracker = get_test_data[1] + tracker["tag_ids"] = [random.randint(1000, 100_000) for _ in range(2)] tracker_id = tracker["id"] httpx_mock.add_response( json=tracker, diff --git a/tests/test_projects.py b/tests/test_projects.py index ffda69a..ada3f85 100644 --- a/tests/test_projects.py +++ b/tests/test_projects.py @@ -7,7 +7,7 @@ from toggl_api.utility import format_iso -@pytest.fixture(scope="session") +@pytest.fixture() def project_body(faker, get_workspace_id): return ProjectBody( workspace_id=get_workspace_id, @@ -17,7 +17,7 @@ def project_body(faker, get_workspace_id): ) -@pytest.fixture(scope="session") +@pytest.fixture() def create_project( project_object, project_body, diff --git a/tests/test_tags.py b/tests/test_tags.py index 09242a9..7816b39 100644 --- a/tests/test_tags.py +++ b/tests/test_tags.py @@ -3,14 +3,14 @@ from toggl_api.modules.models import TogglTag -@pytest.fixture(scope="session") -def add_tag(tag_object): - return tag_object.create_tag(name="test_tag") +@pytest.fixture() +def add_tag(tag_object, faker): + return tag_object.create_tag(name=faker.name()) @pytest.mark.unit() -def test_tag_model(get_workspace_id): - data = {"id": 1, "name": "Test", "workspace_id": get_workspace_id} +def test_tag_model(get_workspace_id, faker): + data = {"id": 1, "name": faker.name(), "workspace_id": get_workspace_id} tag = TogglTag.from_kwargs(**data) assert isinstance(tag, TogglTag) assert tag.id == data["id"] @@ -18,16 +18,16 @@ def test_tag_model(get_workspace_id): @pytest.mark.integration() -def test_tag_creation(tag_object, get_workspace_id): - name = "test_tag_creation" +def test_tag_creation(tag_object, get_workspace_id, faker): + name = faker.name() tag = tag_object.create_tag(name) assert isinstance(tag, TogglTag) assert tag.name == name @pytest.mark.integration() -def test_tag_update(tag_object, get_workspace_id, add_tag, monkeypatch): - monkeypatch.setattr(add_tag, "name", "test_tag_update_2") +def test_tag_update(tag_object, get_workspace_id, add_tag, monkeypatch, faker): + monkeypatch.setattr(add_tag, "name", faker.name()) tag = tag_object.update_tag(add_tag) assert isinstance(tag, TogglTag) assert tag.name == add_tag.name diff --git a/tests/test_trackers/conftest.py b/tests/test_trackers/conftest.py index b9a10d4..2a01014 100644 --- a/tests/test_trackers/conftest.py +++ b/tests/test_trackers/conftest.py @@ -5,7 +5,7 @@ from toggl_api import TrackerBody -@pytest.fixture(scope="module") +@pytest.fixture() def create_body(get_workspace_id, faker): start = datetime.now(tz=timezone.utc) delta = timedelta(hours=1) diff --git a/tests/test_trackers/test_trackers.py b/tests/test_trackers/test_trackers.py index 3526081..c521ef0 100644 --- a/tests/test_trackers/test_trackers.py +++ b/tests/test_trackers/test_trackers.py @@ -30,8 +30,8 @@ def test_tracker_kwargs(get_workspace_id, faker): assert all(tag in tracker.tags for tag in data["tags"]) data["tags"] = [ - {"name": "awd1", "id": 1, "workspace_id": get_workspace_id}, - {"name": "awd2", "id": 2, "workspace_id": get_workspace_id}, + {"name": faker.name(), "id": 1, "workspace_id": get_workspace_id}, + {"name": faker.name(), "id": 2, "workspace_id": get_workspace_id}, ] assert all(TogglTag.from_kwargs(**tag) for tag in tracker.tags for tag in data["tags"]) @@ -44,7 +44,7 @@ def test_tracker_creation(add_tracker): @pytest.mark.integration() def test_tracker_editing(tracker_object, add_tracker, faker): new_description = TrackerBody(description=faker.name()) - new_description.tags = ["new_tag", "new_tag2"] + new_description.tags = [faker.name(), faker.name()] data = tracker_object.edit_tracker( tracker=add_tracker, body=new_description,