Skip to content
This repository has been archived by the owner on Aug 8, 2024. It is now read-only.

Commit

Permalink
Release 2024-02-15 (#91)
Browse files Browse the repository at this point in the history
  • Loading branch information
tordans authored Feb 15, 2024
2 parents e48bda0 + f039b2a commit fb40f16
Show file tree
Hide file tree
Showing 9 changed files with 164 additions and 139 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/deployment.production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
uses: ./.github/workflows/deployment.yml
with:
ENVIRONMENT: production
URL: tiles.radverkehrsatlas.de
TILES_URL: tiles.radverkehrsatlas.de
secrets:
SERVICE_NAME: ${{ secrets.SERVICE_NAME }}
DATABASE_NAME: ${{ secrets.DATABASE_NAME }}
Expand All @@ -24,4 +24,4 @@ jobs:
SSH_USERNAME: ${{ secrets.SSH_USERNAME }}
SSH_PASSWORD: ${{ secrets.SSH_PASSWORD }}
SYNOLOGY_LOG_TOKEN: ${{ secrets.SYNOLOGY_LOG_TOKEN }}
SYNOLOGY_ERROR_LOG_TOKEN: ${{ secrets.SYNOLOGY_ERROR_LOG_TOKEN }}
SYNOLOGY_ERROR_LOG_TOKEN: ${{ secrets.SYNOLOGY_ERROR_LOG_TOKEN }}
2 changes: 1 addition & 1 deletion .github/workflows/deployment.staging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
uses: ./.github/workflows/deployment.yml
with:
ENVIRONMENT: staging
URL: staging-tiles.radverkehrsatlas.de
TILES_URL: staging-tiles.radverkehrsatlas.de
secrets:
SERVICE_NAME: ${{ secrets.SERVICE_NAME }}
DATABASE_NAME: ${{ secrets.DATABASE_NAME }}
Expand Down
74 changes: 58 additions & 16 deletions .github/workflows/deployment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ on:
ENVIRONMENT:
type: string
required: true
URL:
TILES_URL:
type: string
required: true

Expand All @@ -42,11 +42,40 @@ jobs:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.ENVIRONMENT }}
url: https://${{ inputs.URL }}
url: https://${{ inputs.TILES_URL }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 6

- name: Get last run commit SHA
id: last_run
run: |
LAST_RUN_SHA=$(curl --request GET \
--url https://api.github.com/repos/${{ github.repository }}/actions/runs \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \
| jq -r '.workflow_runs[1].head_sha')
echo "::set-output name=sha::$LAST_RUN_SHA"
shell: bash

- name: Check if LAST_RUN_SHA is one of the last 6 commits and check changes
id: git_changes
run: |
LAST_6_COMMITS=$(git log -n 6 --pretty=format:"%H")
if echo "$LAST_6_COMMITS" | grep -q "${{ steps.last_run.outputs.sha }}"; then
if git diff --quiet ${{ steps.last_run.outputs.sha }} HEAD -- app/ helpers/ app.Dockerfile docker-compose.yml; then
echo "::set-output name=changes::false"
else
echo "::set-output name=changes::true"
fi
else
echo "::set-output name=changes::true"
fi
shell: bash


- name: Upgrade AWS CLI version and setup lightsailctl
if: steps.git_changes.outputs.changes == 'true'
run: |
aws --version
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
Expand All @@ -58,16 +87,19 @@ jobs:
sudo chmod +x /usr/local/bin/lightsailctl
- name: Configure AWS credentials
if: steps.git_changes.outputs.changes == 'true'
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}

- name: Setup buildx
if: steps.git_changes.outputs.changes == 'true'
uses: docker/setup-buildx-action@v3

- name: Build app image
if: steps.git_changes.outputs.changes == 'true'
uses: docker/build-push-action@v5
with:
context: .
Expand All @@ -79,6 +111,7 @@ jobs:
tags: public.ecr.aws/n0p8j4k5/atlas/app:${{ github.sha }}

- name: Push the app image
if: steps.git_changes.outputs.changes == 'true'
run: |
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/n0p8j4k5/
docker push public.ecr.aws/n0p8j4k5/atlas/app:${{ github.sha }}
Expand All @@ -93,6 +126,17 @@ jobs:
target: "/srv/processing/"
overwrite: true

- name: Update GITHUB_SHA
if: steps.git_changes.outputs.changes == 'true'
uses: appleboy/ssh-action@master
with:
host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USERNAME }}
password: ${{ secrets.SSH_PASSWORD }}
script: |
cd /srv/processing/
sed -i "s|^GITHUB_SHA=.*$|GITHUB_SHA='${{ github.sha }}'|" .env
- name: Stop & Start containers on VPS
uses: appleboy/ssh-action@master
with:
Expand All @@ -101,20 +145,18 @@ jobs:
password: ${{ secrets.SSH_PASSWORD }}
script: |
cd /srv/processing/
echo "Updating '.env'"
rm .env
touch .env
echo PGHOST='${{ vars.DATABASE_HOST }}' >> .env
echo ENVIRONMENT='${{ inputs.ENVIRONMENT }}' >> .env
echo SYNOLOGY_LOG_TOKEN='${{ secrets.SYNOLOGY_LOG_TOKEN }}' >> .env
echo SYNOLOGY_ERROR_LOG_TOKEN='${{ secrets.SYNOLOGY_ERROR_LOG_TOKEN }}' >> .env
echo PGUSER='${{ secrets.DATABASE_USER }}' >> .env
echo PGPASSWORD='${{ secrets.DATABASE_PASSWORD }}' >> .env
echo PGDATABASE='${{ secrets.DATABASE_NAME }}' >> .env
echo OSM_DOWNLOAD_URL='${{ vars.OSM_DOWNLOAD_URL }}' >> .env
echo URL='${{ inputs.URL }}' >> .env
echo GITHUB_SHA='${{ github.sha }}' >> .env
sed -i \
-e "s|^PGHOST=.*$|PGHOST='${{ vars.DATABASE_HOST }}'|" \
-e "s|^ENVIRONMENT=.*$|ENVIRONMENT='${{ inputs.ENVIRONMENT }}'|" \
-e "s|^SYNOLOGY_LOG_TOKEN=.*$|SYNOLOGY_LOG_TOKEN='${{ secrets.SYNOLOGY_LOG_TOKEN }}'|" \
-e "s|^SYNOLOGY_ERROR_LOG_TOKEN=.*$|SYNOLOGY_ERROR_LOG_TOKEN='${{ secrets.SYNOLOGY_ERROR_LOG_TOKEN }}'|" \
-e "s|^PGUSER=.*$|PGUSER='${{ secrets.DATABASE_USER }}'|" \
-e "s|^PGPASSWORD=.*$|PGPASSWORD='${{ secrets.DATABASE_PASSWORD }}'|" \
-e "s|^PGDATABASE=.*$|PGDATABASE='${{ secrets.DATABASE_NAME }}'|" \
-e "s|^OSM_DOWNLOAD_URL=.*$|OSM_DOWNLOAD_URL='${{ vars.OSM_DOWNLOAD_URL }}'|" \
-e "s|^TILES_URL=.*$|TILES_URL='${{ inputs.TILES_URL }}'|" \
.env
echo "Reload containers"
docker compose -f docker-compose.traefik.yml up -d
docker compose pull
docker compose up -d
docker image prune -fa
57 changes: 10 additions & 47 deletions app/process/barriers/barriers.lua
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,6 @@ local lineBarriers = osm2pgsql.define_table({
}
})

-- local excludedLineBarriers = osm2pgsql.define_table({
-- name = 'barrierLines_excluded',
-- ids = { type = 'any', id_column = 'osm_id', type_column = 'osm_type' },
-- columns = {
-- { column = 'tags', type = 'jsonb' },
-- { column = 'meta', type = 'jsonb' },
-- { column = 'geom', type = 'linestring' },
-- }
-- })

local areaBarriers = osm2pgsql.define_table({
name = 'barrierAreas',
ids = { type = 'any', id_column = 'osm_id', type_column = 'osm_type' },
Expand All @@ -34,17 +24,6 @@ local areaBarriers = osm2pgsql.define_table({
}
})

-- local excludedAreaBarriers = osm2pgsql.define_table({
-- name = 'barrierAreas_excluded',
-- ids = { type = 'any', id_column = 'osm_id', type_column = 'osm_type' },
-- columns = {
-- { column = 'tags', type = 'jsonb' },
-- { column = 'meta', type = 'jsonb' },
-- { column = 'geom', type = 'multipolygon' },
-- }
-- })


local allowedTags = Set({
'tunnel',
'waterway',
Expand Down Expand Up @@ -91,44 +70,33 @@ function osm2pgsql.process_way(object)
FilterTags(object.tags, allowedTags)
areaBarriers:insert({
tags = object.tags,
meta=Metadata(object),
geom=object:as_multipolygon()
meta = Metadata(object),
geom = object:as_multipolygon()
})
return
end
-- excludedAreaBarriers:insert({
-- tags=object.tags,
-- meta=Metadata(object),
-- geom=object:as_multipolygon()
-- })
else --process as linestring
local tags = object.tags
-- if tags.tunnel =='yes' then return end -- we don't consider tunnels as barriers

local isBarrier = HighwayClasses[tags.highway]

-- only need for low zoom levels
local waterBarriers = Set({"river", "canal"})
-- waterways as lines are used for low zoom levels
local waterBarriers = Set({ "river", "canal" })
isBarrier = isBarrier or waterBarriers[tags.waterway]

local trainBarriers = Set({"main", "branch"})
local trainBarriers = Set({ "main", "branch" })
if (tags.railway == 'rail' or tags.railway == 'light_rail') then
isBarrier = isBarrier or trainBarriers[tags.usage]
end

if isBarrier then
FilterTags(object.tags, allowedTags)
lineBarriers:insert({
tags = object.tags,
meta=Metadata(object),
geom=object:as_linestring(),
meta = Metadata(object),
geom = object:as_linestring(),
})
return
end
-- excludedLineBarriers:insert({
-- tags=object.tags,
-- meta=Metadata(object),
-- geom=object:as_linestring()
-- })
end
end

Expand All @@ -137,14 +105,9 @@ function osm2pgsql.process_relation(object)
FilterTags(object.tags, allowedTags)
areaBarriers:insert({
tags = object.tags,
meta=Metadata(object),
geom=object:as_multipolygon()
meta = Metadata(object),
geom = object:as_multipolygon()
})
return
end
-- excludedAreaBarriers:insert({
-- tags=object.tags,
-- meta=Metadata(object),
-- geom=object:as_multipolygon()
-- })
end
15 changes: 9 additions & 6 deletions app/process/roads_bikelanes/bikelanes/categories.lua
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ local function footAndCyclewaySharedCases(tags)
local taggedWithAccessTagging = tags.bicycle == "designated" and tags.foot == "designated" and tags.segregated == "no"
local taggedWithTrafficsign = osm2pgsql.has_prefix(tags.traffic_sign, "DE:240")
if taggedWithAccessTagging or taggedWithTrafficsign then
if tags.is_sidepath == "yes" or tags.footway == "sidewalk" then
-- `_parent_highway` indicates that this way was split of the centerline; in this case, we consider it a sidepath.
if tags.is_sidepath == "yes" or tags._parent_highway or tags.footway == "sidewalk" then
return "footAndCyclewayShared_adjoining"
end
-- Eg https://www.openstreetmap.org/way/440072364 highway=service
Expand All @@ -108,7 +109,8 @@ local function footAndCyclewaySegregatedCases(tags)
local taggedWithAccessTagging = tags.bicycle == "designated" and tags.foot == "designated" and tags.segregated == "yes"
local taggedWithTrafficsign = osm2pgsql.has_prefix(tags.traffic_sign, "DE:241")
if taggedWithAccessTagging or taggedWithTrafficsign then
if tags.is_sidepath == "yes" or tags.footway == "sidewalk" then
-- `_parent_highway` indicates that this way was split of the centerline; in this case, we consider it a sidepath.
if tags.is_sidepath == "yes" or tags._parent_highway or tags.footway == "sidewalk" then
return "footAndCyclewaySegregated_adjoining"
end
if tags.is_sidepath == "no" then
Expand All @@ -129,12 +131,13 @@ local function footwayBicycleYesCases(tags)

if tags.highway == "footway" or tags.highway == "path" then
if tags.bicycle == "yes" or IsTermInString("1022-10", tags.traffic_sign) then
-- https://www.openstreetmap.org/way/946438663
if tags.is_sidepath == "yes" or tags.footway == "sidewalk" then
return "footwayBicycleYes_isolated"
-- `_parent_highway` indicates that this way was split of the centerline; in this case, we consider it a sidepath.
if tags.is_sidepath == "yes" or tags._parent_highway or tags.footway == "sidewalk" then
return "footwayBicycleYes_adjoining"
end
-- https://www.openstreetmap.org/way/946438663
if tags.is_sidepath == "no" then
return "footwayBicycleYes_adjoining"
return "footwayBicycleYes_isolated"
end
return "footwayBicycleYes_adjoiningOrIsolated"
end
Expand Down
60 changes: 30 additions & 30 deletions app/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,33 +26,33 @@ alert() {
curl -X POST $url -d "payload=$payload" --silent --output "/dev/null"
}

if ! ./run-1-download.sh; then
alert '*ERROR*: #run-1-download exited with non-zero status code'
fi

if ! ./run-2-filter.sh; then
alert '*ERROR*: #run-2-filter exited with non-zero status code'
fi

if ! ./run-3-migration.sh; then
alert '*ERROR*: #run-3-migration exited with non-zero status code'
fi

process_start_time=$(date +%s)
if ! ./run-4-process.sh; then
alert '*ERROR*: #run-4-process exited with non-zero status code'
fi
process_end_time=$(date +%s)
export PROCESS_RUN_TIME_DIFF=$((process_end_time - process_start_time)) # used by metadata.sh

if ! ./run-5-postprocess.sh; then
alert '*ERROR*: #run-5-postprocess exited with non-zero status code'
fi

if ! ./run-6-analysis.sh; then
alert '*ERROR*: #run-6-analysis exited with non-zero status code'
fi

if ! ./run-7-metadata.sh; then
alert '*ERROR*: #run-7-metadata exited with non-zero status code'
fi
# if ! ./run-1-download.sh; then
# alert '*ERROR*: #run-1-download exited with non-zero status code'
# fi

# if ! ./run-2-filter.sh; then
# alert '*ERROR*: #run-2-filter exited with non-zero status code'
# fi

# if ! ./run-3-migration.sh; then
# alert '*ERROR*: #run-3-migration exited with non-zero status code'
# fi

# process_start_time=$(date +%s)
# if ! ./run-4-process.sh; then
# alert '*ERROR*: #run-4-process exited with non-zero status code'
# fi
# process_end_time=$(date +%s)
# export PROCESS_RUN_TIME_DIFF=$((process_end_time - process_start_time)) # used by metadata.sh

# if ! ./run-5-postprocess.sh; then
# alert '*ERROR*: #run-5-postprocess exited with non-zero status code'
# fi

# if ! ./run-6-analysis.sh; then
# alert '*ERROR*: #run-6-analysis exited with non-zero status code'
# fi

# if ! ./run-7-metadata.sh; then
# alert '*ERROR*: #run-7-metadata exited with non-zero status code'
# fi
4 changes: 2 additions & 2 deletions configs/nginx.conf
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ http {

proxy_cache_path /var/cache/nginx/
levels=1:2
max_size=15g
max_size=50g
inactive=60m
use_temp_path=off
keys_zone=backend_cache:240m;
Expand All @@ -61,7 +61,7 @@ http {
proxy_cache backend_cache;
proxy_cache_lock on;
proxy_cache_revalidate on;
proxy_cache_valid 200 204 302 1d;
proxy_cache_valid 200 204 302 12h;
proxy_cache_valid 404 1m;
proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504;
add_header X-Cache-Status $upstream_cache_status;
Expand Down
Loading

0 comments on commit fb40f16

Please sign in to comment.