From 018700c424206ea233ff3a96346d6ffb1bd4350c Mon Sep 17 00:00:00 2001 From: Ori Hoch Date: Tue, 9 Jun 2020 19:31:42 +0300 Subject: [PATCH] Integrate k8s production (#1328) * Push DB image to hub * Enable downloading dump on DB startup This patch adds dump dowload stage to container startup if dump is missing but GDRIVE_FILE_ID is provided and RESTORE_DB is set to TRUE * Update docker docs * finalize db docker * more finalizations of productionizations * finalize production env vars + db dump/restore/build * CI * final changes Co-authored-by: Dima Kuznetsov --- .dockerignore | 13 ++- .github/workflows/deploy.yml | 115 +++++++++++++++++++++++-- .gitignore | 1 + Dockerfile | 4 + anyway/flask_app.py | 25 +++++- db_docker/Dockerfile | 30 +++---- db_docker/backup.Dockerfile | 6 ++ db_docker/dumpdb.sh | 36 ++++++++ db_docker/restore_db.sh | 32 +++++-- docker-compose-production.override.yml | 53 ++++++++++++ docker-compose.yml | 11 +-- docker-entrypoint.sh | 6 +- docs/DOCKER.md | 103 ++++++++++++++++++---- nginx_docker/Dockerfile | 7 ++ nginx_docker/anyway_upstream.conf | 1 + nginx_docker/default.conf | 21 +++++ templates/index.html | 53 ++---------- templates/sendemail.html | 4 +- 18 files changed, 416 insertions(+), 105 deletions(-) create mode 100644 db_docker/backup.Dockerfile create mode 100755 db_docker/dumpdb.sh create mode 100644 docker-compose-production.override.yml create mode 100644 nginx_docker/Dockerfile create mode 100644 nginx_docker/anyway_upstream.conf create mode 100644 nginx_docker/default.conf diff --git a/.dockerignore b/.dockerignore index 2647a4b83..97225104a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,15 @@ -**/Dockerfile -**/docker-compose.yml +.dockerignore +Dockerfile +docker-compose-* +db_docker/* db_docker **/.git **/__pycache__ **/pytest_cache +.env* +.vscode +.venv +venv +.DS_Store +.idea +.* diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 699aaf749..0d62c3112 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -4,17 +4,122 @@ on: branches: - master - dev +env: + DOCKER_REPOSITORY_ANYWAY: "anywayteam/anyway" + DOCKER_REPOSITORY_DB: "anywayteam/db" + DOCKER_REPOSITORY_DB_BACKUP: "anywayteam/db_backup" + DOCKER_REPOSITORY_NGINX: "anywayteam/nginx" jobs: - Deploy: + Build-anyway: if: github.repository == 'hasadna/anyway' - runs-on: ubuntu-latest + runs-on: ubuntu-18.04 steps: - uses: actions/checkout@v2 - - name: Build and push Docker images + - name: Build and push anyway Docker image uses: docker/build-push-action@v1.0.1 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - repository: anywayteam/anyway - tag_with_ref: false + repository: ${{ env.DOCKER_REPOSITORY_ANYWAY }} + tag_with_ref: true tag_with_sha: true + cache_froms: ${{ env.DOCKER_REPOSITORY_ANYWAY }}:dev + Build-db: + if: github.repository == 'hasadna/anyway' + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - name: Build and push database Docker image + uses: docker/build-push-action@v1.0.1 + with: + path: 'db_docker' + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + repository: ${{ env.DOCKER_REPOSITORY_DB }} + tag_with_ref: true + tag_with_sha: true + cache_froms: ${{ env.DOCKER_REPOSITORY_DB }}:dev + Build-db-backup: + if: github.repository == 'hasadna/anyway' + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - name: Build and push database backup Docker image + uses: docker/build-push-action@v1.0.1 + with: + path: 'db_docker' + dockerfile: 'db_docker/backup.Dockerfile' + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + repository: ${{ env.DOCKER_REPOSITORY_DB_BACKUP }} + tag_with_ref: true + tag_with_sha: true + cache_froms: ${{ env.DOCKER_REPOSITORY_DB_BACKUP }}:dev + Build-nginx: + if: github.repository == 'hasadna/anyway' + needs: Build-anyway + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - env: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + DOCKER_REPOSITORY_ANYWAY: ${{ env.DOCKER_REPOSITORY_ANYWAY }} + DOCKER_REPOSITORY_NGINX: ${{ env.DOCKER_REPOSITORY_NGINX }} + run: | + REF_TAG="${GITHUB_REF##*/}" &&\ + SHA_TAG=sha-`git rev-parse --short $GITHUB_SHA` &&\ + echo REF_TAG=$REF_TAG &&\ + echo SHA_TAG=$SHA_TAG &&\ + docker pull "${DOCKER_REPOSITORY_ANYWAY}:${SHA_TAG}" &&\ + if docker pull "${DOCKER_REPOSITORY_NGINX}:${REF_TAG}"; then + CACHE_FROM=" --cache-from ${DOCKER_REPOSITORY_NGINX}:${REF_TAG} " + else + CACHE_FROM="" + fi &&\ + docker tag "${DOCKER_REPOSITORY_ANYWAY}:${SHA_TAG}" anywayteam/anyway:latest &&\ + docker build $CACHE_FROM -t "${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" nginx_docker &&\ + docker tag "${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" "${DOCKER_REPOSITORY_NGINX}:${REF_TAG}" &&\ + echo "${DOCKER_PASSWORD}" | docker login -u "${DOCKER_USERNAME}" --password-stdin &&\ + docker push "${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" &&\ + docker push "${DOCKER_REPOSITORY_NGINX}:${REF_TAG}" + deploy: + if: github.repository == 'hasadna/anyway' && github.ref == 'refs/heads/master' + needs: + - Build-anyway + - Build-db + - Build-db-backup + - Build-nginx + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - env: + DOCKER_REPOSITORY_ANYWAY: ${{ env.DOCKER_REPOSITORY_ANYWAY }} + DOCKER_REPOSITORY_DB: ${{ env.DOCKER_REPOSITORY_DB }} + DOCKER_REPOSITORY_DB_BACKUP: ${{ env.DOCKER_REPOSITORY_DB_BACKUP }} + DOCKER_REPOSITORY_NGINX: ${{ env.DOCKER_REPOSITORY_NGINX }} + HASADNA_K8S_DEPLOY_KEY: ${{ secrets.HASADNA_K8S_DEPLOY_KEY }} + run: | + SHA_TAG=sha-`git rev-parse --short $GITHUB_SHA` &&\ + echo SHA_TAG=$SHA_TAG &&\ + ANYWAY_IMAGE="${DOCKER_REPOSITORY_ANYWAY}:${SHA_TAG}" &&\ + echo ANYWAY_IMAGE=$ANYWAY_IMAGE &&\ + DB_IMAGE="${DOCKER_REPOSITORY_DB}:${SHA_TAG}" &&\ + echo DB_IMAGE=$DB_IMAGE &&\ + DB_BACKUP_IMAGE="${DOCKER_REPOSITORY_DB_BACKUP}:${SHA_TAG}" &&\ + echo DB_BACKUP_IMAGE=$DB_BACKUP_IMAGE &&\ + NGINX_IMAGE="${DOCKER_REPOSITORY_NGINX}:${SHA_TAG}" &&\ + echo NGINX_IMAGE=$NGINX_IMAGE &&\ + if ! git log -1 --pretty=format:"%s" | grep -- --no-deploy; then + cd `mktemp -d` &&\ + echo "${HASADNA_K8S_DEPLOY_KEY}" > hasadna_k8s_deploy_key &&\ + chmod 400 hasadna_k8s_deploy_key &&\ + export GIT_SSH_COMMAND="ssh -i `pwd`/hasadna_k8s_deploy_key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" &&\ + git clone git@github.com:hasadna/hasadna-k8s.git &&\ + cd hasadna-k8s &&\ + python update_yaml.py '{"anyway":{"image":"'${ANYWAY_IMAGE}'","dbImage":"'${DB_IMAGE}'","dbBackupImage":"'${DB_BACKUP_IMAGE}'","nginxImage":"'${NGINX_IMAGE}'"}}' ./values.auto-updated.yaml &&\ + git config --global user.name "Anyway CI" &&\ + git config --global user.email "anyway-ci@localhost" &&\ + git add ./values.auto-updated.yaml && git commit -m "automatic update of anyway" &&\ + git push origin master + fi diff --git a/.gitignore b/.gitignore index 75fec1ccb..df7be25c6 100644 --- a/.gitignore +++ b/.gitignore @@ -57,3 +57,4 @@ venv /static/js/years.js /.vscode +/.env* \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 3837b1520..33cac5351 100644 --- a/Dockerfile +++ b/Dockerfile @@ -40,3 +40,7 @@ EXPOSE 5000 ENTRYPOINT ["/anyway/docker-entrypoint.sh"] CMD FLASK_APP=anyway flask run --host 0.0.0.0 + +ENV ALLOW_ALEMBIC_UPGRADE=yes +ENV FLASK_APP=anyway +RUN flask assets clean \ No newline at end of file diff --git a/anyway/flask_app.py b/anyway/flask_app.py index d617c4157..e9132b24e 100755 --- a/anyway/flask_app.py +++ b/anyway/flask_app.py @@ -33,7 +33,7 @@ from sqlalchemy import and_, not_, or_ from sqlalchemy import func from sqlalchemy.orm import load_only -from webassets import Environment as AssetsEnvironment +from webassets import Environment as AssetsEnvironment, Bundle as AssetsBundle from webassets.ext.jinja2 import AssetsExtension from werkzeug.security import check_password_hash from wtforms import form, fields, validators, StringField, PasswordField, Form @@ -95,6 +95,29 @@ assets = Environment() assets.init_app(app) assets_env = AssetsEnvironment(os.path.join(utilities._PROJECT_ROOT, "static"), "/static") +assets.register('css_all', AssetsBundle( + "css/jquery.smartbanner.css", "css/bootstrap.rtl.css", "css/style.css", + "css/daterangepicker.css", "css/accordion.css", "css/bootstrap-tour.min.css", + "css/jquery-ui.min.css", "css/jquery.jspanel.min.css", "css/markers.css", + filters='yui_css', output='css/app.min.css' +)) +assets.register("js_all", AssetsBundle( + "js/libs/jquery-1.11.3.min.js", "js/libs/spin.js", "js/libs/oms.min.js", + "js/libs/markerclusterer.js", "js/markerClustererAugment.js", "js/libs/underscore.js", + "js/libs/backbone.js", "js/libs/backbone.paginator.min.js", "js/libs/bootstrap.js", + "js/libs/notify-combined.min.js", "js/libs/moment-with-langs.min.js", "js/libs/date.js", + "js/libs/daterangepicker.js", "js/libs/js-itm.js", "js/constants.js", + "js/marker.js", "js/clusterView.js", "js/featuredialog.js", "js/subscriptiondialog.js", + "js/preferencesdialog.js", "js/logindialog.js", "js/sidebar.js", "js/contextmenu.js", + "js/map_style.js", "js/clipboard.js", "js/libs/bootstrap-tour.min.js", + "js/app.js", "js/localization.js", "js/inv_dict.js", "js/veh_dict.js", + "js/retina.js", "js/statPanel.js", "js/reports.js", + filters="rjsmin", output="js/app.min.js" +)) +assets.register("email_all", AssetsBundle( + "js/libs/jquery-1.11.3.min.js", "js/libs/notify-combined.min.js", + filters="rjsmin", output="js/app_send_email.min.js" +)) CORS( app, diff --git a/db_docker/Dockerfile b/db_docker/Dockerfile index 4f6b87e90..116eff941 100644 --- a/db_docker/Dockerfile +++ b/db_docker/Dockerfile @@ -1,18 +1,12 @@ -FROM postgis/postgis:9.6-2.5-alpine - -RUN apk add --update-cache python3 sudo &&\ - pip3 install gdown - -ARG GDRIVE_FILE_ID='' -ARG RESTORE_DB='FALSE' -ARG GDRIVE_URL='https://drive.google.com/uc?id=' -ARG DB_DUMP_PATH=/tmp/anyway_public.pgdump - -ENV GDRIVE_FILE_ID=$GDRIVE_FILE_ID -ENV RESTORE_DB=$RESTORE_DB - -ADD ./download_dump.py / - -RUN if [ "$RESTORE_DB" == "TRUE" ] ; then python3 download_dump.py && chmod +rwx $DB_DUMP_PATH; else echo "RESTORE_DB!=TRUE, not downloading dump" ; fi - -ADD ./restore_db.sh /docker-entrypoint-initdb.d/ +FROM postgis/postgis:9.6-2.5 + +RUN apt-get update && apt-get install -y curl unzip &&\ + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" &&\ + unzip awscliv2.zip && ./aws/install && rm -rf aws && aws --version +COPY restore_db.sh /docker-entrypoint-initdb.d/ + +# values suitable for docker-compose environment +ENV POSTGRES_PASSWORD anyway +ENV POSTGRES_USER anyway +ENV POSTGRES_DB anyway +ENV DBRESTORE_AWS_BUCKET=anyway-partial-db-dumps diff --git a/db_docker/backup.Dockerfile b/db_docker/backup.Dockerfile new file mode 100644 index 000000000..75eb4208b --- /dev/null +++ b/db_docker/backup.Dockerfile @@ -0,0 +1,6 @@ +FROM postgres:9.6 +RUN apt-get update && apt-get install -y curl unzip &&\ + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" &&\ + unzip awscliv2.zip && ./aws/install && rm -rf aws && aws --version +COPY dumpdb.sh / +ENTRYPOINT ["/dumpdb.sh"] diff --git a/db_docker/dumpdb.sh b/db_docker/dumpdb.sh new file mode 100755 index 000000000..c53fe1c28 --- /dev/null +++ b/db_docker/dumpdb.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +export TZ=Asia/Jerusalem + +( [ "${DBDUMP_AWS_ACCESS_KEY_ID}" == "" ] || [ "${DBDUMP_AWS_SECRET_ACCESS_KEY}" == "" ] ) && echo missing AWS env vars && exit 1 +( [ "${DBDUMP_PASSWORD}" == "" ] || [ "${DBDUMP_HOST}" == "" ] || [ "${DBDUMP_USER}" == "" ] ) && echo missing DBDUMP env vars && exit 1 + +export AWS_ACCESS_KEY_ID="${DBDUMP_AWS_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DBDUMP_AWS_SECRET_ACCESS_KEY}" + +dumpdb() { + PG_DUMP_ARGS="${1}" + DUMP_FILE="${2}" + BUCKET="${3}" + TEMPDIR=`mktemp -d` + pushd $TEMPDIR + echo "Dumping into dump file: ${DUMP_FILE}" + ! PGPASSWORD=$DBDUMP_PASSWORD $PG_DUMP_ARGS -h $DBDUMP_HOST -U $DBDUMP_USER > "${DUMP_FILE}" && echo failed to pg_dump && return 1 + echo "Zipping down the dump file" + ! gzip "${DUMP_FILE}" && echo failed to gzip && return 1 + echo "Uploading to S3" + ! aws s3 cp "${DUMP_FILE}.gz" "s3://${BUCKET}/" && echo failed to s3 cp && return 1 + popd + rm -rf "${TEMPDIR}" +} + +echo dumping full db &&\ +dumpdb "pg_dumpall" \ + "`date +%Y-%m-%d`_${DBDUMP_S3_FILE_PREFIX}anyway.pgdump" \ + "anyway-full-db-dumps" &&\ + echo dumping partial db &&\ +dumpdb "pg_dump -d anyway --no-privileges -N topology -T users -T roles -T roles_users -T locationsubscribers -T report_preferences -T general_preferences" \ + "`date +%Y-%m-%d`_${DBDUMP_S3_FILE_PREFIX}anyway_partial.pgdump" \ + "anyway-partial-db-dumps" &&\ +echo Great Success && exit 0 +echo Failed && exit 1 diff --git a/db_docker/restore_db.sh b/db_docker/restore_db.sh index dd25039f1..a8ee01c4f 100644 --- a/db_docker/restore_db.sh +++ b/db_docker/restore_db.sh @@ -1,12 +1,30 @@ #!/bin/sh set -e -set -x -if [ "$RESTORE_DB" == "TRUE" ]; then - echo "******DEV Env - PostgreSQL initialisation******" - pg_restore -Fc "$DB_DUMP_PATH" -d "$POSTGRES_DB" --no-owner -else - echo "Not DEV environment, not restoring db" -fi +( [ "${DBRESTORE_AWS_ACCESS_KEY_ID}" == "" ] || [ "${DBRESTORE_AWS_SECRET_ACCESS_KEY}" == "" ] || [ "${DBRESTORE_AWS_BUCKET}" == "" ] ) && echo missing AWS env vars && exit 1 +[ "${DBRESTORE_FILE_NAME}" == "" ] && export DBRESTORE_FILE_NAME="`date +%Y-%m-%d`_anyway_partial.pgdump" +export AWS_ACCESS_KEY_ID="${DBRESTORE_AWS_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DBRESTORE_AWS_SECRET_ACCESS_KEY}" + +# to create a dump from old anyway DB server: +# cd `mktemp -d` +# chown postgres . +# su postgres -c "pg_dumpall" > "`date +%Y-%m-%d`_anyway.pgdump" +# gzip *.pgdump +# s3cmd -c /etc/anyway-s3cfg put *.gz s3://anyway-db-dumps +# rm *.gz + +TEMPDIR=`mktemp -d` +pushd $TEMPDIR + aws s3 cp "s3://${DBRESTORE_AWS_BUCKET}/${DBRESTORE_FILE_NAME}.gz" ./ &&\ + gzip -d "${DBRESTORE_FILE_NAME}.gz" &&\ + psql -f "${DBRESTORE_FILE_NAME}" &&\ + if [ "${DBRESTORE_SET_ANYWAY_PASSWORD}" != "" ]; then + echo setting anyway role password &&\ + echo "alter role anyway with password '${DBRESTORE_SET_ANYWAY_PASSWORD}'" | psql + fi + [ "$?" != "0" ] && echo failed && exit 1 +popd +rm -rf $TEMPDIR diff --git a/docker-compose-production.override.yml b/docker-compose-production.override.yml new file mode 100644 index 000000000..a58f970fa --- /dev/null +++ b/docker-compose-production.override.yml @@ -0,0 +1,53 @@ +version: '3' + +services: + nginx: + build: nginx_docker + image: anywayteam/nginx:latest + depends_on: + - anyway + ports: + - "8000:80" + + anyway: + environment: + - DATABASE_URL + - GOOGLE_MAPS_KEY + - TWITTER_CONSUMER_KEY + - TWITTER_CONSUMER_SECRET + - TWITTER_ACCESS_KEY + - TWITTER_ACCESS_SECRET + - FACEBOOK_KEY + - FACEBOOK_SECRET + - GOOGLE_LOGIN_CLIENT_ID + - GOOGLE_LOGIN_CLIENT_SECRET + - MAILUSER + - MAILPASS + - GOOGLE_APPLICATION_CREDENTIALS + - GOOGLE_APPLICATION_CREDENTIALS_HOST_PATH + - APP_SECRET_KEY + volumes: + - "${GOOGLE_APPLICATION_CREDENTIALS_HOST_PATH}:${GOOGLE_APPLICATION_CREDENTIALS}" + command: "gunicorn -b 0.0.0.0:5000 -w 4 -t 120 anyway:app" + + db: + environment: + - DBRESTORE_AWS_BUCKET=anyway-full-db-dumps + - DBRESTORE_FILE_NAME + - DBRESTORE_SET_ANYWAY_PASSWORD + - POSTGRES_USER + - POSTGRES_PASSWORD + - POSTGRES_DB + + db-backup: + build: + context: db_docker + dockerfile: backup.Dockerfile + environment: + - DBDUMP_AWS_ACCESS_KEY_ID + - DBDUMP_AWS_SECRET_ACCESS_KEY + - DBDUMP_S3_FILE_PREFIX + - DBDUMP_PASSWORD + - DBDUMP_HOST + - DBDUMP_USER + restart: "no" diff --git a/docker-compose.yml b/docker-compose.yml index eb8409109..084bcca3d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,8 +3,8 @@ version: '3' services: anyway: build: . + image: anywayteam/anyway:latest container_name: anyway - image: hasadna/anyway:latest ports: - "8080:5000" environment: @@ -17,12 +17,13 @@ services: db: build: db_docker + image: anywayteam/db:latest container_name: db environment: - - POSTGRES_PASSWORD=anyway - - POSTGRES_USER=anyway - - POSTGRES_DB=anyway - - DB_DUMP_PATH=/tmp/anyway_public.pgdump + - DBRESTORE_AWS_ACCESS_KEY_ID + - DBRESTORE_AWS_SECRET_ACCESS_KEY + # the following line can be removed once the db backup job is running daily on kubernetes + - DBRESTORE_FILE_NAME=2020-06-09_anyway_partial.pgdump volumes: - db_data:/var/lib/postgresql/data ports: diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index 64a350d01..be27c3993 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -7,6 +7,10 @@ until psql $DATABASE_URL -c "select 1" > /dev/null 2>&1 || [ $RETRIES -eq 0 ]; d sleep 10 done -! alembic upgrade head && echo failed to upgrade head && sleep 10 && exit 1 +if [ "${ALLOW_ALEMBIC_UPGRADE}" == "yes" ]; then + ! alembic upgrade head && echo failed to upgrade head && sleep 10 && exit 1 +else + echo not running alembic upgrade +fi exec "$@" diff --git a/docs/DOCKER.md b/docs/DOCKER.md index 203e01f4e..677fdbd4d 100644 --- a/docs/DOCKER.md +++ b/docs/DOCKER.md @@ -26,7 +26,7 @@ Instructions **2.** [Install Docker](https://docs.docker.com/install/) and [Install Docker Compose](https://docs.docker.com/compose/install/) -**3.** Copy the GDRIVE_FILE_ID from [this file](https://drive.google.com/file/d/1IRnSsRwwHFtmGTNlSOfChg-H6R8JKMpl/view?usp=sharing) (You need to request access) +**3.** Get the `.env` file with the required secret values and place in the project root directory. **Continue with your OS, See below** **For Mac:** @@ -35,19 +35,17 @@ Instructions Otherwise, to build an existing environment with the most updated DB, remove DB volume by running `docker volume rm anyway_db_data`. Note - this will delete all of your local DB data! -**5.** Build anyway container with updated DB data (in anyway main directory): `docker-compose -f docker-compose.yml build --build-arg RESTORE_DB=TRUE --build-arg GDRIVE_FILE_ID=""` -- To get - see part 3 -- If you're having this kind of ERROR:Need service name for --build-arg option, make sure the docker-compose version is 1.25.2 and above (check with `docker-compose --version`) - -**6.** Start the container, go to the **anyway** directory and run: +**5.** Start the container, go to the **anyway** directory and run: `docker-compose up` It will take a few minutes until it's done. -**7.** **You're all set!** ANYWAY is up and running with the DB data - connect to http://127.0.0.1:8080 +**6.** **You're all set!** ANYWAY is up and running with the DB data - connect to http://127.0.0.1:8080 Note - you won't see the map since the key works in production. If you need to see the map contact atalya via slack to get a developer key. -**8.** To stop the containers run: `docker-compose down` +**7.** To stop the containers run: `docker-compose down` + +**8.** To restore fresh DB data, delete all existing volumes: `docker-compose down -v` then restart from step 6 **For Ubuntu:** @@ -55,19 +53,17 @@ If you need to see the map contact atalya via slack to get a developer key. Otherwise, to build an existing environment with the most updated DB, remove DB volume by running `sudo docker volume rm anyway_db_data`. Note - this will delete all of your local DB data! -**5.** Build anyway container with updated DB data (in anyway main directory): `sudo docker-compose -f docker-compose.yml build --build-arg RESTORE_DB=TRUE --build-arg GDRIVE_FILE_ID=""` -- To get - see part 3 -- If you're having this kind of ERROR:Need service name for --build-arg option, make sure the docker-compose version is 1.25.2 and above (check with `sudo docker-compose --version`) - -**6.** Start the container, go to the **anyway** directory and run: +**5.** Start the container, go to the **anyway** directory and run: `sudo docker-compose up` It will take a few minutes until it's done. -**7.** **You're all set!** ANYWAY is up and running with the DB data - connect to http://127.0.0.1:8080 +**6.** **You're all set!** ANYWAY is up and running with the DB data - connect to http://127.0.0.1:8080 Note - you won't see the map since the key works in production. If you need to see the map for development email us [anyway@anyway.co.il](mailto:anyway@anyway.co.il) to get a developer key. -**8.** To stop the containers run: `sudo docker-compose down` +**7.** To stop the containers run: `sudo docker-compose down` + +**8.** To restore fresh DB data, delete all existing volumes: `docker-compose down -v` then restart from step 6 ## Additional Docker commands Use `sudo` before each docker commands if you are using ubuntu. @@ -151,3 +147,80 @@ This loads the ./anyway dir (relative to the docker-compose file) as /anyway/any Questions and ideas ----------------- Talk to Atalya on HASADNA's Slack (atalya) or email us [anyway@anyway.co.il](mailto:anyway@anyway.co.il). + + +Testing production environment locally +-------------------------------------- + +This process allows to emulate a full production environment locally for testing. This is an advanced operation and not needed for normal development. + +Create a .env file for production (set relevant values): + +``` +# app env vars +DATABASE_URL=postgresql://anyway:12345678@db/anyway +GOOGLE_MAPS_KEY= +TWITTER_CONSUMER_KEY= +TWITTER_CONSUMER_SECRET= +TWITTER_ACCESS_KEY= +TWITTER_ACCESS_SECRET= +FACEBOOK_KEY= +FACEBOOK_SECRET= +GOOGLE_LOGIN_CLIENT_ID= +GOOGLE_LOGIN_CLIENT_SECRET= +MAILUSER= +MAILPASS= +GOOGLE_APPLICATION_CREDENTIALS=/secrets/GOOGLE_APPLICATION_CREDENTIALS_KEY.json +GOOGLE_APPLICATION_CREDENTIALS_HOST_PATH=/host/path/to/google_application_credentials.json +APP_SECRET_KEY= + +# db env vars +POSTGRES_USER=postgres +POSTGRES_PASSWORD=123456 +POSTGRES_DB=postgres +# aws access/secret with permissions to read from full db dumps bucket +DBRESTORE_AWS_ACCESS_KEY_ID= +DBRESTORE_AWS_SECRET_ACCESS_KEY= +DBRESTORE_AWS_BUCKET=anyway-full-db-dumps +DBRESTORE_FILE_NAME=2020-06-09_anyway.pgdump +# should match the password set in app env vars +DBRESTORE_SET_ANYWAY_PASSWORD=12345678 + +# db-backup env vars +DBDUMP_S3_FILE_PREFIX=testing_ +# aws access/secret with permissions to write to both full and partial db dumps buckets +DBDUMP_AWS_ACCESS_KEY_ID= +DBDUMP_AWS_SECRET_ACCESS_KEY= +# db connection details to the postgres user +DBDUMP_USER=postgres +DBDUMP_PASSWORD=123456 +DBDUMP_HOST=db +``` + +Create a shell alias to run docker-compose for production + +``` +alias docker-compose-prod="docker-compose -f docker-compose.yml -f docker-compose-production.override.yml" +``` + +Restore the DB + +``` +docker-compose-prod down -v +docker-compose-prod up --build db +``` + +Start the app + +``` +docker-compose-prod up --build nginx anyway +``` + +Access the app at http://localhost:8000 + +Run the backup job + +``` +docker-compose-prod build db-backup +docker-compose-prod run db-backup +``` \ No newline at end of file diff --git a/nginx_docker/Dockerfile b/nginx_docker/Dockerfile new file mode 100644 index 000000000..c916b8227 --- /dev/null +++ b/nginx_docker/Dockerfile @@ -0,0 +1,7 @@ +FROM anywayteam/anyway:latest AS builder +RUN flask assets build + +FROM nginx:stable +COPY --from=builder /anyway/static /opt/anyway/static +COPY default.conf /etc/nginx/conf.d/default.conf +COPY anyway_upstream.conf /etc/nginx/anyway_upstream.conf diff --git a/nginx_docker/anyway_upstream.conf b/nginx_docker/anyway_upstream.conf new file mode 100644 index 000000000..3c19262b4 --- /dev/null +++ b/nginx_docker/anyway_upstream.conf @@ -0,0 +1 @@ +server anyway:5000; diff --git a/nginx_docker/default.conf b/nginx_docker/default.conf new file mode 100644 index 000000000..0059d1ed1 --- /dev/null +++ b/nginx_docker/default.conf @@ -0,0 +1,21 @@ +upstream anyway { + include anyway_upstream.conf; +} + +server { + listen 80; + server_name _; + + location / { + proxy_pass http://anyway; + proxy_pass_header Server; + proxy_redirect off; + proxy_connect_timeout 10; + proxy_read_timeout 120; + } + + location /static/ { + alias /opt/anyway/static/; + expires 30d; + } +} diff --git a/templates/index.html b/templates/index.html index 5ff42400d..bbb92e8b9 100644 --- a/templates/index.html +++ b/templates/index.html @@ -17,18 +17,9 @@ - {% assets filters="yui_css", output="css/app.min.css", - "css/jquery.smartbanner.css", - "css/bootstrap.rtl.css", - "css/style.css", - "css/daterangepicker.css", - "css/accordion.css", - "css/bootstrap-tour.min.css", - "css/jquery-ui.min.css", - "css/jquery.jspanel.min.css", - "css/markers.css" %} - - + {% assets "css_all" %} + + {% endassets %} @@ -1123,42 +1114,8 @@ -{% assets filters="rjsmin", output="js/app.min.js", -"js/libs/jquery-1.11.3.min.js", -"js/libs/spin.js", -"js/libs/oms.min.js", -"js/libs/markerclusterer.js", -"js/markerClustererAugment.js", -"js/libs/underscore.js", -"js/libs/backbone.js", -"js/libs/backbone.paginator.min.js", -"js/libs/bootstrap.js", -"js/libs/notify-combined.min.js", -"js/libs/moment-with-langs.min.js", -"js/libs/date.js", -"js/libs/daterangepicker.js", -"js/libs/js-itm.js", -"js/constants.js", -"js/marker.js", -"js/clusterView.js", -"js/featuredialog.js", -"js/subscriptiondialog.js", -"js/preferencesdialog.js", -"js/logindialog.js", -"js/sidebar.js", -"js/contextmenu.js", -"js/map_style.js", -"js/clipboard.js", -"js/libs/bootstrap-tour.min.js", -"js/app.js", -"js/localization.js", -"js/inv_dict.js", -"js/veh_dict.js", -"js/retina.js", -"js/statPanel.js", -"js/reports.js" -%} - +{% assets "js_all" %} + {% endassets %} {% endassets %}