Skip to content

Commit

Permalink
feat(storage): make S3 URL expiration customizable (#345)
Browse files Browse the repository at this point in the history
* feat(storage): make S3 URL expiration customizable

* feat(config): make URL expiration an env var

* test(storage): add test cases

* style(test): remove legacy import

* fix(config): fix type casting
  • Loading branch information
Ronan committed Jul 17, 2024
1 parent 7d9d613 commit f4a3131
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 33 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ jobs:
TELEGRAM_TEST_CHAT_ID: ${{ secrets.TELEGRAM_TEST_CHAT_ID }}
run: |
docker compose -f docker-compose.test.yml up -d --build --wait
docker compose -f docker-compose.test.yml exec localstack awslocal s3 mb s3://sample-bucket
docker compose -f docker-compose.test.yml exec localstack awslocal s3api put-object --bucket sample-bucket --key media-folder
docker compose -f docker-compose.test.yml exec localstack awslocal s3 mb s3://bucket
docker compose -f docker-compose.test.yml exec localstack awslocal s3api put-object --bucket bucket --key media-folder
docker compose -f docker-compose.test.yml exec -T backend pytest --cov=app --cov-report xml tests/
docker compose -f docker-compose.test.yml cp backend:/app/coverage.xml ./coverage-src.xml
- name: Upload coverage to Codecov
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ make run-dev

To enable a smoother development experience, we are using [localstack](https://docs.localstack.cloud/overview/) to create a local S3 bucket.

We automatically create a s3 bucket called `sample-bucket`, which you can check by running:
We automatically create a s3 bucket called `bucket`, which you can check by running:
```shell
docker-compose exec localstack awslocal s3 ls
```
Expand Down
7 changes: 3 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -32,17 +32,16 @@ stop:
run-dev:
poetry export -f requirements.txt --without-hashes --with dev --output src/app/requirements.txt
docker compose -f docker-compose.test.yml up -d --build
docker compose exec localstack awslocal s3 mb s3://sample-bucket
docker compose exec localstack awslocal s3api put-object --bucket sample-bucket --key media-folder
docker compose exec localstack awslocal s3 mb s3://bucket
docker compose exec localstack awslocal s3api put-object --bucket bucket --key media-folder

stop-dev:
docker compose -f docker-compose.test.yml down

# Run tests for the library
test:
poetry export -f requirements.txt --without-hashes --with dev --output src/app/requirements.txt
docker compose -f docker-compose.test.yml up -d --build
docker compose exec localstack awslocal s3 mb s3://sample-bucket
docker compose -f docker-compose.test.yml up -d --build --wait
docker compose exec -T backend pytest --cov=app
docker compose -f docker-compose.test.yml down

Expand Down
8 changes: 5 additions & 3 deletions docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ services:
- SQLALCHEMY_SILENCE_UBER_WARNING=1
- SUPERUSER_LOGIN=dummy_login
- SUPERUSER_PWD=dummy&P@ssw0rd!
- BUCKET_NAME=sample-bucket
- S3_ENDPOINT_URL=http://s3.localhost.localstack.cloud:4566
- BUCKET_NAME=bucket
- S3_ENDPOINT_URL=http://localstack:4566
- S3_ACCESS_KEY=fake
- S3_SECRET_KEY=fake
- S3_REGION=us-east-1
Expand Down Expand Up @@ -51,8 +51,10 @@ services:
environment:
- EDGE_PORT=4566
- SERVICES=s3
volumes:
- ./scripts/localstack:/etc/localstack/init/ready.d
healthcheck:
test: ["CMD-SHELL", "awslocal --endpoint-url=http://localhost:4566 s3 ls"]
test: ["CMD-SHELL", "awslocal --endpoint-url=http://localhost:4566 s3 ls s3://bucket"]
interval: 10s
timeout: 5s
retries: 5
4 changes: 4 additions & 0 deletions scripts/localstack/setup-s3.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env bash
awslocal s3 mb s3://bucket
echo -n "" > my_file
awslocal s3 cp my_file s3://bucket/my_file
27 changes: 8 additions & 19 deletions src/app/api/endpoints/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from app.api.deps import get_current_access, get_db
from app.db import alerts, events
from app.models import Access, AccessType, Alert, Device, Event, Media
from app.schemas import AccessRead, Acknowledgement, AcknowledgementOut, AlertOut, EventIn, EventOut, EventUpdate, MediaUrl
from app.schemas import AccessRead, Acknowledgement, AcknowledgementOut, AlertOut, EventIn, EventOut, EventUpdate
from app.services.telemetry import telemetry_client

router = APIRouter(redirect_slashes=True)
Expand Down Expand Up @@ -140,40 +140,29 @@ async def fetch_unacknowledged_events(
"""
telemetry_client.capture(requester.id, event="events-fetch-unacnkowledged")
if await is_admin_access(requester.id):
retrieved_events = (
session.query(
Event,
Media.bucket_key
)
retrieved_events = (
session.query(Event, Media.bucket_key)
.select_from(Event)
.join(Alert, Event.id == Alert.event_id)
.join(Media, Alert.media_id == Media.id)
.filter(and_(
Event.is_acknowledged.is_(False)
))
.filter(and_(Event.is_acknowledged.is_(False)))
)
else:
retrieved_events = (
session.query(
Event,
Media.bucket_key
)
session.query(Event, Media.bucket_key)
.select_from(Event)
.join(Alert, Event.id == Alert.event_id)
.join(Media, Alert.media_id == Media.id)
.join(Device, Alert.device_id == Device.id)
.join(Access, Device.access_id == Access.id)
.filter(and_(
Access.group_id == requester.group_id,
Event.is_acknowledged.is_(False)
))
.filter(and_(Access.group_id == requester.group_id, Event.is_acknowledged.is_(False)))
)
results = []
for event, bucket_key in retrieved_events.all():
event_dict = event.__dict__.copy()
event_dict['bucket_key'] = bucket_key
event_dict["bucket_key"] = await s3_bucket.get_public_url(bucket_key)
results.append(event_dict)

return results


Expand Down
8 changes: 4 additions & 4 deletions src/tests/routes/test_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,10 @@
]

MEDIA_TABLE = [
{"id": 1, "device_id": 1, "type": "image", "created_at": "2020-10-13T08:18:45.447773"},
{"id": 2, "device_id": 1, "type": "video", "created_at": "2020-10-13T09:18:45.447773"},
{"id": 3, "device_id": 1, "type": "image", "created_at": "2020-10-13T09:18:45.447773"},
{"id": 4, "device_id": 1, "type": "video", "created_at": "2020-10-13T09:18:45.447773"},
{"id": 1, "device_id": 1, "type": "image", "created_at": "2020-10-13T08:18:45.447773", "bucket_key": "my_file"},
{"id": 2, "device_id": 1, "type": "video", "created_at": "2020-10-13T09:18:45.447773", "bucket_key": "my_file"},
{"id": 3, "device_id": 1, "type": "image", "created_at": "2020-10-13T09:18:45.447773", "bucket_key": "my_file"},
{"id": 4, "device_id": 1, "type": "video", "created_at": "2020-10-13T09:18:45.447773", "bucket_key": "my_file"},
]

ALERT_TABLE = [
Expand Down
2 changes: 2 additions & 0 deletions src/tests/test_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from app.services.utils import cfg, send_telegram_msg
from httpx import AsyncClient


def test_resolve_bucket_key(monkeypatch):
file_name = "myfile.jpg"
bucket_subfolder = "my/bucket/subfolder"
Expand Down Expand Up @@ -58,6 +59,7 @@ def test_send_telegram_msg(chat_id, msg, expected_status_code):
else:
assert response.status_code == expected_status_code, response.text


@pytest.mark.asyncio
async def test_s3_bucket(async_client: AsyncClient, mock_img: bytes):
assert isinstance(s3_bucket, S3Bucket)
Expand Down

0 comments on commit f4a3131

Please sign in to comment.