Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): add quality score to api #279

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""add-score-qualite

Revision ID: 68fe052dc63c
Revises: e3f3dfa4ad01
Create Date: 2024-08-13 15:13:29.690054

"""

import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "68fe052dc63c"
down_revision = "e3f3dfa4ad01"
branch_labels = None
depends_on = None


def upgrade() -> None:
op.add_column(
"api__services", sa.Column("score_qualite", sa.Float(), nullable=True)
)
op.execute("UPDATE api__services SET score_qualite = 0.5")
op.alter_column("api__services", "score_qualite", nullable=False)


def downgrade() -> None:
op.drop_column("api__services", "score_qualite")
1 change: 1 addition & 0 deletions api/src/data_inclusion/api/inclusion_data/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ class Service(Base):
zone_diffusion_code: Mapped[str | None]
zone_diffusion_nom: Mapped[str | None]
zone_diffusion_type: Mapped[str | None]
score_qualite: Mapped[float]

commune_: Mapped[Commune] = relationship(back_populates="services")

Expand Down
16 changes: 15 additions & 1 deletion api/src/data_inclusion/api/inclusion_data/schemas.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from pydantic import BaseModel, ConfigDict
from textwrap import dedent
from typing import Annotated

from pydantic import BaseModel, ConfigDict, Field

from data_inclusion import schema

Expand All @@ -17,6 +20,17 @@ class Service(schema.Service):
formulaire_en_ligne: str | None = None
lien_source: str | None = None

score_qualite: Annotated[
float,
Field(
ge=0,
le=1,
description=dedent("""\
Score de qualité du service, défini et calculé par data·inclusion.
"""),
),
]


class Structure(schema.Structure):
model_config = ConfigDict(from_attributes=True, populate_by_name=True)
Expand Down
18 changes: 17 additions & 1 deletion api/tests/e2e/api/__snapshots__/test_inclusion_data.ambr
Original file line number Diff line number Diff line change
Expand Up @@ -1876,6 +1876,13 @@
],
"title": "Modes Orientation Accompagnateur Autres"
},
"score_qualite": {
"type": "number",
"maximum": 1.0,
"minimum": 0.0,
"title": "Score Qualite",
"description": "Score de qualité du service, défini et calculé par data·inclusion.\n"
},
"structure": {
"$ref": "#/components/schemas/Structure"
}
Expand All @@ -1886,6 +1893,7 @@
"structure_id",
"source",
"nom",
"score_qualite",
"structure"
],
"title": "DetailedService"
Expand Down Expand Up @@ -2961,14 +2969,22 @@
}
],
"title": "Modes Orientation Accompagnateur Autres"
},
"score_qualite": {
"type": "number",
"maximum": 1.0,
"minimum": 0.0,
"title": "Score Qualite",
"description": "Score de qualité du service, défini et calculé par data·inclusion.\n"
}
},
"type": "object",
"required": [
"id",
"structure_id",
"source",
"nom"
"nom",
"score_qualite"
],
"title": "Service"
},
Expand Down
1 change: 1 addition & 0 deletions api/tests/e2e/api/test_inclusion_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,7 @@ def test_list_services_all(api_client):
"prise_rdv": "https://teixeira.fr/",
"profils": ["femmes"],
"recurrence": None,
"score_qualite": 0.5,
"source": "dora",
"structure_id": "prince-point-monde",
"telephone": "0102030405",
Expand Down
2 changes: 2 additions & 0 deletions api/tests/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,3 +164,5 @@ class Meta:
zone_diffusion_type = None
zone_diffusion_code = None
zone_diffusion_nom = None

score_qualite = 0.5
1 change: 1 addition & 0 deletions datawarehouse/processings/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ dependencies = [
"pandas~=2.2",
"requests~=2.31",
"tenacity",
"data-inclusion-schema",
]

[project.optional-dependencies]
Expand Down
20 changes: 19 additions & 1 deletion datawarehouse/processings/requirements/dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,19 +1,29 @@
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml --extra=dev --output-file=requirements/dev-requirements.txt
annotated-types==0.7.0
# via pydantic
certifi==2024.8.30
# via requests
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.3.2
# via requests
data-inclusion-schema @ https://github.com/gip-inclusion/data-inclusion-schema/archive/vmttn/feat/score-qualite.zip#sha256=148145139b64888d885ea33ccac08d044000c983fcbb0bd3298af579e0931f31
# via data-inclusion-processings (pyproject.toml)
distlib==0.3.8
# via virtualenv
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
filelock==3.16.0
# via virtualenv
identify==2.6.0
# via pre-commit
idna==3.8
# via requests
# via
# email-validator
# requests
nodeenv==1.9.1
# via pre-commit
numpy==2.1.1
Expand All @@ -26,6 +36,10 @@ platformdirs==4.3.2
# via virtualenv
pre-commit==3.8.0
# via data-inclusion-processings (pyproject.toml)
pydantic==2.9.1
# via data-inclusion-schema
pydantic-core==2.23.3
# via pydantic
python-dateutil==2.9.0.post0
# via pandas
pytz==2024.2
Expand All @@ -40,6 +54,10 @@ six==1.16.0
# via python-dateutil
tenacity==9.0.0
# via data-inclusion-processings (pyproject.toml)
typing-extensions==4.12.2
# via
# pydantic
# pydantic-core
tzdata==2024.1
# via pandas
urllib3==2.2.2
Expand Down
20 changes: 19 additions & 1 deletion datawarehouse/processings/requirements/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,17 +1,31 @@
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml --output-file=requirements/requirements.txt
annotated-types==0.7.0
# via pydantic
certifi==2024.8.30
# via requests
charset-normalizer==3.3.2
# via requests
data-inclusion-schema @ https://github.com/gip-inclusion/data-inclusion-schema/archive/vmttn/feat/score-qualite.zip#sha256=148145139b64888d885ea33ccac08d044000c983fcbb0bd3298af579e0931f31
# via data-inclusion-processings (pyproject.toml)
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
idna==3.8
# via requests
# via
# email-validator
# requests
numpy==2.1.1
# via
# data-inclusion-processings (pyproject.toml)
# pandas
pandas==2.2.2
# via data-inclusion-processings (pyproject.toml)
pydantic==2.9.1
# via data-inclusion-schema
pydantic-core==2.23.3
# via pydantic
python-dateutil==2.9.0.post0
# via pandas
pytz==2024.2
Expand All @@ -22,6 +36,10 @@ six==1.16.0
# via python-dateutil
tenacity==9.0.0
# via data-inclusion-processings (pyproject.toml)
typing-extensions==4.12.2
# via
# pydantic
# pydantic-core
tzdata==2024.1
# via pandas
urllib3==2.2.2
Expand Down
20 changes: 19 additions & 1 deletion datawarehouse/processings/requirements/test-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml --extra=test --output-file=requirements/test-requirements.txt
annotated-types==0.7.0
# via pydantic
certifi==2024.8.30
# via requests
charset-normalizer==3.3.2
# via requests
data-inclusion-schema @ https://github.com/gip-inclusion/data-inclusion-schema/archive/vmttn/feat/score-qualite.zip#sha256=148145139b64888d885ea33ccac08d044000c983fcbb0bd3298af579e0931f31
# via data-inclusion-processings (pyproject.toml)
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
idna==3.8
# via requests
# via
# email-validator
# requests
iniconfig==2.0.0
# via pytest
numpy==2.1.1
Expand All @@ -18,6 +28,10 @@ pandas==2.2.2
# via data-inclusion-processings (pyproject.toml)
pluggy==1.5.0
# via pytest
pydantic==2.9.1
# via data-inclusion-schema
pydantic-core==2.23.3
# via pydantic
pytest==8.3.3
# via data-inclusion-processings (pyproject.toml)
python-dateutil==2.9.0.post0
Expand All @@ -30,6 +44,10 @@ six==1.16.0
# via python-dateutil
tenacity==9.0.0
# via data-inclusion-processings (pyproject.toml)
typing-extensions==4.12.2
# via
# pydantic
# pydantic-core
tzdata==2024.1
# via pandas
urllib3==2.2.2
Expand Down
41 changes: 18 additions & 23 deletions pipeline/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,12 @@ pip install -U pip setuptools wheel

# Install the dev dependencies
pip install -r requirements/dev/requirements.txt
```

## Running the test suite

```bash
# Copy (and optionally edit) the template .env
cp .template.env .env

# simply use tox (for reproducible environnement, packaging errors, etc.)
tox
# Install dbt
pip install -r requirements/tasks/dbt/requirements.txt
```

## dbt
## Running `dbt`

* dbt is configured to target the `target-db` postgres container (see the root `docker-compose.yml`).
* all dbt commands must be run in the in the `pipeline/dbt` directory.
Expand All @@ -44,28 +37,20 @@ dbt run-operation create_udfs
# run commands
dbt ls

# staging, basic processing/mapping:
# - retrieve data from datalake table
# - retrieve data from raw dedicated source tables
# - retrieve data from the Soliguide S3
dbt run --select staging

# intermediate, specific transformations
dbt run --select intermediate

# marts, last touch
dbt run --select marts
dbt build --select models/staging
dbt build --select models/intermediate
dbt build --select models/marts
```

## Update schema in dbt seeds
## Updating schema in dbt seeds

* Required when the schema changes.

```bash
python scripts/update_schema_seeds.py
```

## Manage the pipeline requirements
## Managing the pipeline requirements

In order to prevent conflicts:

Expand All @@ -84,3 +69,13 @@ make all
# to upgrade dependencies
make upgrade all
```

## Running the test suite

```bash
# Copy (and optionally edit) the template .env
cp .template.env .env

# simply use tox (for reproducible environnement, packaging errors, etc.)
tox
```
1 change: 1 addition & 0 deletions pipeline/dags/dag_utils/dbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ def get_intermediate_tasks():
# main since the beginning as it required intermediate data to be
# present ?
"path:models/intermediate/int__geocodages.sql",
"path:models/intermediate/int__criteres_qualite.sql",
"path:models/intermediate/int__union_contacts.sql",
"path:models/intermediate/int__union_adresses.sql",
"path:models/intermediate/int__union_services.sql",
Expand Down
Loading
Loading