Skip to content

Commit

Permalink
Integrate k8s production (data-for-change#1328)
Browse files Browse the repository at this point in the history
* Push DB image to hub

* Enable downloading dump on DB startup

This patch adds dump dowload stage to container startup if dump is missing
but GDRIVE_FILE_ID is provided and RESTORE_DB is set to TRUE

* Update docker docs

* finalize db docker

* more finalizations of productionizations

* finalize production env vars + db dump/restore/build

* CI

* final changes

Co-authored-by: Dima Kuznetsov <[email protected]>
  • Loading branch information
OriHoch and dimakuz authored Jun 9, 2020
1 parent 30971d9 commit 018700c
Show file tree
Hide file tree
Showing 18 changed files with 416 additions and 105 deletions.
13 changes: 11 additions & 2 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
**/Dockerfile
**/docker-compose.yml
.dockerignore
Dockerfile
docker-compose-*
db_docker/*
db_docker
**/.git
**/__pycache__
**/pytest_cache
.env*
.vscode
.venv
venv
.DS_Store
.idea
.*
115 changes: 110 additions & 5 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,122 @@ on:
branches:
- master
- dev
env:
DOCKER_REPOSITORY_ANYWAY: "anywayteam/anyway"
DOCKER_REPOSITORY_DB: "anywayteam/db"
DOCKER_REPOSITORY_DB_BACKUP: "anywayteam/db_backup"
DOCKER_REPOSITORY_NGINX: "anywayteam/nginx"
jobs:
Deploy:
Build-anyway:
if: github.repository == 'hasadna/anyway'
runs-on: ubuntu-latest
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- name: Build and push Docker images
- name: Build and push anyway Docker image
uses: docker/[email protected]
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: anywayteam/anyway
tag_with_ref: false
repository: ${{ env.DOCKER_REPOSITORY_ANYWAY }}
tag_with_ref: true
tag_with_sha: true
cache_froms: ${{ env.DOCKER_REPOSITORY_ANYWAY }}:dev
Build-db:
if: github.repository == 'hasadna/anyway'
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- name: Build and push database Docker image
uses: docker/[email protected]
with:
path: 'db_docker'
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: ${{ env.DOCKER_REPOSITORY_DB }}
tag_with_ref: true
tag_with_sha: true
cache_froms: ${{ env.DOCKER_REPOSITORY_DB }}:dev
Build-db-backup:
if: github.repository == 'hasadna/anyway'
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- name: Build and push database backup Docker image
uses: docker/[email protected]
with:
path: 'db_docker'
dockerfile: 'db_docker/backup.Dockerfile'
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: ${{ env.DOCKER_REPOSITORY_DB_BACKUP }}
tag_with_ref: true
tag_with_sha: true
cache_froms: ${{ env.DOCKER_REPOSITORY_DB_BACKUP }}:dev
Build-nginx:
if: github.repository == 'hasadna/anyway'
needs: Build-anyway
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
DOCKER_REPOSITORY_ANYWAY: ${{ env.DOCKER_REPOSITORY_ANYWAY }}
DOCKER_REPOSITORY_NGINX: ${{ env.DOCKER_REPOSITORY_NGINX }}
run: |
REF_TAG="${GITHUB_REF##*/}" &&\
SHA_TAG=sha-`git rev-parse --short $GITHUB_SHA` &&\
echo REF_TAG=$REF_TAG &&\
echo SHA_TAG=$SHA_TAG &&\
docker pull "${DOCKER_REPOSITORY_ANYWAY}:${SHA_TAG}" &&\
if docker pull "${DOCKER_REPOSITORY_NGINX}:${REF_TAG}"; then
CACHE_FROM=" --cache-from ${DOCKER_REPOSITORY_NGINX}:${REF_TAG} "
else
CACHE_FROM=""
fi &&\
docker tag "${DOCKER_REPOSITORY_ANYWAY}:${SHA_TAG}" anywayteam/anyway:latest &&\
docker build $CACHE_FROM -t "${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" nginx_docker &&\
docker tag "${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" "${DOCKER_REPOSITORY_NGINX}:${REF_TAG}" &&\
echo "${DOCKER_PASSWORD}" | docker login -u "${DOCKER_USERNAME}" --password-stdin &&\
docker push "${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" &&\
docker push "${DOCKER_REPOSITORY_NGINX}:${REF_TAG}"
deploy:
if: github.repository == 'hasadna/anyway' && github.ref == 'refs/heads/master'
needs:
- Build-anyway
- Build-db
- Build-db-backup
- Build-nginx
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- env:
DOCKER_REPOSITORY_ANYWAY: ${{ env.DOCKER_REPOSITORY_ANYWAY }}
DOCKER_REPOSITORY_DB: ${{ env.DOCKER_REPOSITORY_DB }}
DOCKER_REPOSITORY_DB_BACKUP: ${{ env.DOCKER_REPOSITORY_DB_BACKUP }}
DOCKER_REPOSITORY_NGINX: ${{ env.DOCKER_REPOSITORY_NGINX }}
HASADNA_K8S_DEPLOY_KEY: ${{ secrets.HASADNA_K8S_DEPLOY_KEY }}
run: |
SHA_TAG=sha-`git rev-parse --short $GITHUB_SHA` &&\
echo SHA_TAG=$SHA_TAG &&\
ANYWAY_IMAGE="${DOCKER_REPOSITORY_ANYWAY}:${SHA_TAG}" &&\
echo ANYWAY_IMAGE=$ANYWAY_IMAGE &&\
DB_IMAGE="${DOCKER_REPOSITORY_DB}:${SHA_TAG}" &&\
echo DB_IMAGE=$DB_IMAGE &&\
DB_BACKUP_IMAGE="${DOCKER_REPOSITORY_DB_BACKUP}:${SHA_TAG}" &&\
echo DB_BACKUP_IMAGE=$DB_BACKUP_IMAGE &&\
NGINX_IMAGE="${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" &&\
echo NGINX_IMAGE=$NGINX_IMAGE &&\
if ! git log -1 --pretty=format:"%s" | grep -- --no-deploy; then
cd `mktemp -d` &&\
echo "${HASADNA_K8S_DEPLOY_KEY}" > hasadna_k8s_deploy_key &&\
chmod 400 hasadna_k8s_deploy_key &&\
export GIT_SSH_COMMAND="ssh -i `pwd`/hasadna_k8s_deploy_key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" &&\
git clone [email protected]:hasadna/hasadna-k8s.git &&\
cd hasadna-k8s &&\
python update_yaml.py '{"anyway":{"image":"'${ANYWAY_IMAGE}'","dbImage":"'${DB_IMAGE}'","dbBackupImage":"'${DB_BACKUP_IMAGE}'","nginxImage":"'${NGINX_IMAGE}'"}}' ./values.auto-updated.yaml &&\
git config --global user.name "Anyway CI" &&\
git config --global user.email "anyway-ci@localhost" &&\
git add ./values.auto-updated.yaml && git commit -m "automatic update of anyway" &&\
git push origin master
fi
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,4 @@ venv
/static/js/years.js

/.vscode
/.env*
4 changes: 4 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,7 @@ EXPOSE 5000
ENTRYPOINT ["/anyway/docker-entrypoint.sh"]

CMD FLASK_APP=anyway flask run --host 0.0.0.0

ENV ALLOW_ALEMBIC_UPGRADE=yes
ENV FLASK_APP=anyway
RUN flask assets clean
25 changes: 24 additions & 1 deletion anyway/flask_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from sqlalchemy import and_, not_, or_
from sqlalchemy import func
from sqlalchemy.orm import load_only
from webassets import Environment as AssetsEnvironment
from webassets import Environment as AssetsEnvironment, Bundle as AssetsBundle
from webassets.ext.jinja2 import AssetsExtension
from werkzeug.security import check_password_hash
from wtforms import form, fields, validators, StringField, PasswordField, Form
Expand Down Expand Up @@ -95,6 +95,29 @@
assets = Environment()
assets.init_app(app)
assets_env = AssetsEnvironment(os.path.join(utilities._PROJECT_ROOT, "static"), "/static")
assets.register('css_all', AssetsBundle(
"css/jquery.smartbanner.css", "css/bootstrap.rtl.css", "css/style.css",
"css/daterangepicker.css", "css/accordion.css", "css/bootstrap-tour.min.css",
"css/jquery-ui.min.css", "css/jquery.jspanel.min.css", "css/markers.css",
filters='yui_css', output='css/app.min.css'
))
assets.register("js_all", AssetsBundle(
"js/libs/jquery-1.11.3.min.js", "js/libs/spin.js", "js/libs/oms.min.js",
"js/libs/markerclusterer.js", "js/markerClustererAugment.js", "js/libs/underscore.js",
"js/libs/backbone.js", "js/libs/backbone.paginator.min.js", "js/libs/bootstrap.js",
"js/libs/notify-combined.min.js", "js/libs/moment-with-langs.min.js", "js/libs/date.js",
"js/libs/daterangepicker.js", "js/libs/js-itm.js", "js/constants.js",
"js/marker.js", "js/clusterView.js", "js/featuredialog.js", "js/subscriptiondialog.js",
"js/preferencesdialog.js", "js/logindialog.js", "js/sidebar.js", "js/contextmenu.js",
"js/map_style.js", "js/clipboard.js", "js/libs/bootstrap-tour.min.js",
"js/app.js", "js/localization.js", "js/inv_dict.js", "js/veh_dict.js",
"js/retina.js", "js/statPanel.js", "js/reports.js",
filters="rjsmin", output="js/app.min.js"
))
assets.register("email_all", AssetsBundle(
"js/libs/jquery-1.11.3.min.js", "js/libs/notify-combined.min.js",
filters="rjsmin", output="js/app_send_email.min.js"
))

CORS(
app,
Expand Down
30 changes: 12 additions & 18 deletions db_docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,18 +1,12 @@
FROM postgis/postgis:9.6-2.5-alpine

RUN apk add --update-cache python3 sudo &&\
pip3 install gdown

ARG GDRIVE_FILE_ID=''
ARG RESTORE_DB='FALSE'
ARG GDRIVE_URL='https://drive.google.com/uc?id='
ARG DB_DUMP_PATH=/tmp/anyway_public.pgdump

ENV GDRIVE_FILE_ID=$GDRIVE_FILE_ID
ENV RESTORE_DB=$RESTORE_DB

ADD ./download_dump.py /

RUN if [ "$RESTORE_DB" == "TRUE" ] ; then python3 download_dump.py && chmod +rwx $DB_DUMP_PATH; else echo "RESTORE_DB!=TRUE, not downloading dump" ; fi

ADD ./restore_db.sh /docker-entrypoint-initdb.d/
FROM postgis/postgis:9.6-2.5

RUN apt-get update && apt-get install -y curl unzip &&\
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" &&\
unzip awscliv2.zip && ./aws/install && rm -rf aws && aws --version
COPY restore_db.sh /docker-entrypoint-initdb.d/

# values suitable for docker-compose environment
ENV POSTGRES_PASSWORD anyway
ENV POSTGRES_USER anyway
ENV POSTGRES_DB anyway
ENV DBRESTORE_AWS_BUCKET=anyway-partial-db-dumps
6 changes: 6 additions & 0 deletions db_docker/backup.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
FROM postgres:9.6
RUN apt-get update && apt-get install -y curl unzip &&\
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" &&\
unzip awscliv2.zip && ./aws/install && rm -rf aws && aws --version
COPY dumpdb.sh /
ENTRYPOINT ["/dumpdb.sh"]
36 changes: 36 additions & 0 deletions db_docker/dumpdb.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/bin/bash

export TZ=Asia/Jerusalem

( [ "${DBDUMP_AWS_ACCESS_KEY_ID}" == "" ] || [ "${DBDUMP_AWS_SECRET_ACCESS_KEY}" == "" ] ) && echo missing AWS env vars && exit 1
( [ "${DBDUMP_PASSWORD}" == "" ] || [ "${DBDUMP_HOST}" == "" ] || [ "${DBDUMP_USER}" == "" ] ) && echo missing DBDUMP env vars && exit 1

export AWS_ACCESS_KEY_ID="${DBDUMP_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${DBDUMP_AWS_SECRET_ACCESS_KEY}"

dumpdb() {
PG_DUMP_ARGS="${1}"
DUMP_FILE="${2}"
BUCKET="${3}"
TEMPDIR=`mktemp -d`
pushd $TEMPDIR
echo "Dumping into dump file: ${DUMP_FILE}"
! PGPASSWORD=$DBDUMP_PASSWORD $PG_DUMP_ARGS -h $DBDUMP_HOST -U $DBDUMP_USER > "${DUMP_FILE}" && echo failed to pg_dump && return 1
echo "Zipping down the dump file"
! gzip "${DUMP_FILE}" && echo failed to gzip && return 1
echo "Uploading to S3"
! aws s3 cp "${DUMP_FILE}.gz" "s3://${BUCKET}/" && echo failed to s3 cp && return 1
popd
rm -rf "${TEMPDIR}"
}

echo dumping full db &&\
dumpdb "pg_dumpall" \
"`date +%Y-%m-%d`_${DBDUMP_S3_FILE_PREFIX}anyway.pgdump" \
"anyway-full-db-dumps" &&\
echo dumping partial db &&\
dumpdb "pg_dump -d anyway --no-privileges -N topology -T users -T roles -T roles_users -T locationsubscribers -T report_preferences -T general_preferences" \
"`date +%Y-%m-%d`_${DBDUMP_S3_FILE_PREFIX}anyway_partial.pgdump" \
"anyway-partial-db-dumps" &&\
echo Great Success && exit 0
echo Failed && exit 1
32 changes: 25 additions & 7 deletions db_docker/restore_db.sh
Original file line number Diff line number Diff line change
@@ -1,12 +1,30 @@
#!/bin/sh

set -e
set -x

if [ "$RESTORE_DB" == "TRUE" ]; then
echo "******DEV Env - PostgreSQL initialisation******"
pg_restore -Fc "$DB_DUMP_PATH" -d "$POSTGRES_DB" --no-owner
else
echo "Not DEV environment, not restoring db"
fi
( [ "${DBRESTORE_AWS_ACCESS_KEY_ID}" == "" ] || [ "${DBRESTORE_AWS_SECRET_ACCESS_KEY}" == "" ] || [ "${DBRESTORE_AWS_BUCKET}" == "" ] ) && echo missing AWS env vars && exit 1
[ "${DBRESTORE_FILE_NAME}" == "" ] && export DBRESTORE_FILE_NAME="`date +%Y-%m-%d`_anyway_partial.pgdump"

export AWS_ACCESS_KEY_ID="${DBRESTORE_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${DBRESTORE_AWS_SECRET_ACCESS_KEY}"

# to create a dump from old anyway DB server:
# cd `mktemp -d`
# chown postgres .
# su postgres -c "pg_dumpall" > "`date +%Y-%m-%d`_anyway.pgdump"
# gzip *.pgdump
# s3cmd -c /etc/anyway-s3cfg put *.gz s3://anyway-db-dumps
# rm *.gz

TEMPDIR=`mktemp -d`
pushd $TEMPDIR
aws s3 cp "s3://${DBRESTORE_AWS_BUCKET}/${DBRESTORE_FILE_NAME}.gz" ./ &&\
gzip -d "${DBRESTORE_FILE_NAME}.gz" &&\
psql -f "${DBRESTORE_FILE_NAME}" &&\
if [ "${DBRESTORE_SET_ANYWAY_PASSWORD}" != "" ]; then
echo setting anyway role password &&\
echo "alter role anyway with password '${DBRESTORE_SET_ANYWAY_PASSWORD}'" | psql
fi
[ "$?" != "0" ] && echo failed && exit 1
popd
rm -rf $TEMPDIR
53 changes: 53 additions & 0 deletions docker-compose-production.override.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
version: '3'

services:
nginx:
build: nginx_docker
image: anywayteam/nginx:latest
depends_on:
- anyway
ports:
- "8000:80"

anyway:
environment:
- DATABASE_URL
- GOOGLE_MAPS_KEY
- TWITTER_CONSUMER_KEY
- TWITTER_CONSUMER_SECRET
- TWITTER_ACCESS_KEY
- TWITTER_ACCESS_SECRET
- FACEBOOK_KEY
- FACEBOOK_SECRET
- GOOGLE_LOGIN_CLIENT_ID
- GOOGLE_LOGIN_CLIENT_SECRET
- MAILUSER
- MAILPASS
- GOOGLE_APPLICATION_CREDENTIALS
- GOOGLE_APPLICATION_CREDENTIALS_HOST_PATH
- APP_SECRET_KEY
volumes:
- "${GOOGLE_APPLICATION_CREDENTIALS_HOST_PATH}:${GOOGLE_APPLICATION_CREDENTIALS}"
command: "gunicorn -b 0.0.0.0:5000 -w 4 -t 120 anyway:app"

db:
environment:
- DBRESTORE_AWS_BUCKET=anyway-full-db-dumps
- DBRESTORE_FILE_NAME
- DBRESTORE_SET_ANYWAY_PASSWORD
- POSTGRES_USER
- POSTGRES_PASSWORD
- POSTGRES_DB

db-backup:
build:
context: db_docker
dockerfile: backup.Dockerfile
environment:
- DBDUMP_AWS_ACCESS_KEY_ID
- DBDUMP_AWS_SECRET_ACCESS_KEY
- DBDUMP_S3_FILE_PREFIX
- DBDUMP_PASSWORD
- DBDUMP_HOST
- DBDUMP_USER
restart: "no"
11 changes: 6 additions & 5 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ version: '3'
services:
anyway:
build: .
image: anywayteam/anyway:latest
container_name: anyway
image: hasadna/anyway:latest
ports:
- "8080:5000"
environment:
Expand All @@ -17,12 +17,13 @@ services:

db:
build: db_docker
image: anywayteam/db:latest
container_name: db
environment:
- POSTGRES_PASSWORD=anyway
- POSTGRES_USER=anyway
- POSTGRES_DB=anyway
- DB_DUMP_PATH=/tmp/anyway_public.pgdump
- DBRESTORE_AWS_ACCESS_KEY_ID
- DBRESTORE_AWS_SECRET_ACCESS_KEY
# the following line can be removed once the db backup job is running daily on kubernetes
- DBRESTORE_FILE_NAME=2020-06-09_anyway_partial.pgdump
volumes:
- db_data:/var/lib/postgresql/data
ports:
Expand Down
Loading

0 comments on commit 018700c

Please sign in to comment.