diff --git a/.eslintrc.json b/.eslintrc.json index 92360b85..26f3f4aa 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -5,13 +5,24 @@ "sourceType": "module", "project": "./tsconfig.json" }, - "extends": ["next/core-web-vitals", "plugin:react/recommended", "prettier"], + "extends": [ + "next/core-web-vitals", + "plugin:react/recommended", + "plugin:@typescript-eslint/recommended", + "prettier" + ], "rules": { "react/react-in-jsx-scope": "off", "react/prop-types": "off", - "custom-rules/no-text-size-class": "warn" + "custom-rules/no-text-size-class": "warn", + "prettier/prettier": "error" }, - "plugins": ["react", "eslint-plugin-custom-rules"], + "plugins": [ + "react", + "eslint-plugin-custom-rules", + "@typescript-eslint", + "prettier" + ], "settings": { "react": { "version": "detect" diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..efd3d715 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "pip" + directory: "/data" + schedule: + interval: "weekly" diff --git a/.github/workflows/pr_checks.yaml b/.github/workflows/pr_checks.yaml index c802adac..81ab11bb 100644 --- a/.github/workflows/pr_checks.yaml +++ b/.github/workflows/pr_checks.yaml @@ -1,27 +1,40 @@ -name: Frontend PR Checks -on: # see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows - workflow_dispatch: - pull_request: - paths: - - "src/**/*" - - "package.json" - - "package-lock.json" - - "*.js" +name: PR Checks + +on: + push: + branches: + - staging + jobs: - lint_and_build: + lint: runs-on: ubuntu-latest + steps: - - name: Check out repository code + - name: Checkout repository uses: actions/checkout@v4 - - name: Setup Node.js + + - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: "20" - cache: "npm" - cache-dependency-path: "package-lock.json" + node-version: '20' + - name: Install dependencies - run: npm ci - - name: Run lints + run: npm install + + - name: Run ESLint run: npm run lint - - name: Check the build - run: npm run build + continue-on-error: true + + - name: Run Prettier Check + run: npm run format:check + continue-on-error: true + + - name: Check linting and formatting + if: failure() + run: | + echo "Linting or formatting issues found. Please run 'npm run lint:fix' and 'npm run format' to fix them." + exit 1 + + - name: Linting and formatting success + if: success() + run: echo "No linting or formatting issues found. Good job!" diff --git a/.github/workflows/pr_checks_backend.yml b/.github/workflows/pr_checks_backend.yml new file mode 100644 index 00000000..8baf2a3c --- /dev/null +++ b/.github/workflows/pr_checks_backend.yml @@ -0,0 +1,81 @@ +name: PR Checks Backend + +on: + pull_request: + branches: + - staging + paths: + - "data/**" + - "Dockerfile-pg" + - "init_pg.sql" + - "docker-compose.yml" + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Set up Docker Compose + uses: docker/setup-qemu-action@v3 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11.4" + + - name: Run Black Formatter in Docker + run: | + cd data + docker-compose run --rm formatter + continue-on-error: true + + - name: Run Flake8 + run: | + cd data + pip install flake8 + flake8 . + continue-on-error: true + + - name: Install dependencies and build Docker images + run: | + cd data + docker-compose pull + docker-compose build + + - name: Run vacant-lots-proj + run: | + cd data + docker-compose up -d vacant-lots-proj + continue-on-error: true + + - name: Build and check Postgres container + run: | + cd data + docker-compose build postgres + continue-on-error: true + + - name: Check build status + if: failure() + run: | + echo "The vacant-lots-proj or postgres container failed to build and run." + exit 1 + + - name: Report success + if: success() + run: echo "The vacant-lots-proj and postgres container built and ran successfully." + + - name: Check formatter and linter status + if: failure() + run: | + echo "Formatting or linting issues found. Please fix the issues." + exit 1 + + - name: Formatter and linter success + if: success() + run: echo "Formatting and linting passed successfully." diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..c3e03287 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,5 @@ +{ + "semi": true, + "singleQuote": true, + "trailingComma": "es5" +} diff --git a/.vscode/settings.json b/.vscode/settings.json index a52adfa0..67c22a62 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,13 +1,21 @@ { "editor.formatOnSave": true, "editor.defaultFormatter": "esbenp.prettier-vscode", - "python.analysis.extraPaths": ["${workspaceFolder}/data/src","${workspaceFolder}/data/src/awkde"], + "python.analysis.extraPaths": [ + "${workspaceFolder}/data/src", + "${workspaceFolder}/data/src/awkde" + ], "python.testing.pytestEnabled": true, "python.testing.cwd": "${workspaceFolder}/data/src", "[python]": { "editor.formatOnSave": true, "editor.codeActionsOnSave": { - "source.organizeImports": "explicit" + "source.organizeImports": "explicit" }, - } + "editor.defaultFormatter": "ms-python.black-formatter" + }, + "python.formatting.provider": "black", + "python.linting.flake8Enabled": true, + "python.linting.enabled": true, + "python.linting.flake8Args": ["--max-line-length=88"] } diff --git a/data/Dockerfile b/data/Dockerfile index 0e119436..d5cf8d40 100644 --- a/data/Dockerfile +++ b/data/Dockerfile @@ -29,6 +29,9 @@ ENV GDAL_CONFIG=/usr/bin/gdal-config # Install Pipenv RUN pip install pipenv +# update pipfile +RUN pipenv lock + # Copy the Pipfile and Pipfile.lock from the src directory COPY src/Pipfile src/Pipfile.lock ./ diff --git a/docs/SETUP/BACK_END.md b/docs/SETUP/BACK_END.md index 2e0eb872..0487891b 100644 --- a/docs/SETUP/BACK_END.md +++ b/docs/SETUP/BACK_END.md @@ -2,84 +2,130 @@ ## Overview -If you are planning to contribute to the data wrangling and database management on this project and will need to run the Python script, follow the installation and setup instructions below. +If you plan to contribute to the data wrangling and database management on this project and need to run the Python script, follow the installation and setup instructions below. ## Setup ### Fork the Repository -1. Navigate to [our GitHub repository](https://github.com/CodeForPhilly/vacant-lots-proj). -2. Create a fork of the repository by clicking the "Fork" button in the top right corner of the page. This will create a copy of the repository in your own GitHub account. +1. Navigate to [our GitHub repository](https://github.com/CodeForPhilly/clean-and-green-philly). +2. Create a fork of the repository by clicking the "Fork" button in the top right corner of the page. 3. Clone your fork of the repository to your local machine using `git clone`. -Note: make sure to keep your fork up to date with the original repository by following the instructions [here](https://docs.github.com/en/get-started/quickstart/fork-a-repo#keep-your-fork-synced). +**Note:** Keep your fork up to date with the original repository by following the instructions [here](https://docs.github.com/en/get-started/quickstart/fork-a-repo#keep-your-fork-synced). ### Set Environment Variables -The project needs certain specific and sensitive pieces of information to run. We store these in the user's development environment and not in source control. +The project requires specific and sensitive information to run, which should be stored in the user's development environment rather than in source control. Here are instructions for setting environment variables locally on your machine or using a `.env` file. -For Mac and Linux, you can permanently store the environmental variables in your command line shell's configuration file, e.g. `~/.bashrc`, `~/.bash_profile`, `~/.zshrc` or `~/.profile`. Add a line `export VAR_NAME=VALUE` in your file and run `source file` to read it in when newly created. Any new shells will automatically have the new environment. +#### Using a .env File -For Windows, you can set environmental variables under System -> Advanced or you can download a terminal emulator such as [Git Bash](https://gitforwindows.org/) and follow the instructions for Mac and Linux above. A terminal emulator is recommended. +1. Create a file named `.env` in the `/data` subdirectory of your project. +2. Add the following environment variables to the `.env` file: -All of your local environmental variables will be passed through to docker-compose so if you have them locally you should not have to hard-code them as indicated below. +```sh +POSTGRES_PASSWORD=a-strong-password-here +VACANT_LOTS_DB=postgresql://postgres:${POSTGRES_PASSWORD}@localhost:5433/vacantlotdb +``` + +All local environment variables will be passed through to docker-compose, so if you have them set up in the `.env` file, you should not need to hard-code them elsewhere. + +#### Setting Environment Variables Locally + +For Mac and Linux, you can permanently store the environment variables in your command line shell's configuration file, e.g., `~/.bashrc`, `~/.bash_profile`, `~/.zshrc`, or `~/.profile`. Add a line `export VAR_NAME=VALUE` in your file and run `source ` to read it in when newly created. Any new shells will automatically have the new environment. -#### PostgreSQL +For Windows, you can set environment variables under System -> Advanced or you can download a terminal emulator such as [Git Bash](https://gitforwindows.org/) and follow the instructions for Mac and Linux above. A terminal emulator is recommended. -Create an environmental variable called `POSTGRES_PASSWORD` and set its value to a new, strong password to use for your local postgres installation in Docker. After that, add the below variable to configure the full postgres connection string: -`VACANT_LOTS_DB="postgresql://postgres:${POSTGRES_PASSWORD}@localhost:5433/vacantlotdb"` +```sh +export POSTGRES_PASSWORD=a-strong-password-here +export VACANT_LOTS_DB=postgresql://postgres:${POSTGRES_PASSWORD}@localhost:5433/vacantlotdb +``` + +All of your local environment variables will be passed through to docker-compose, so if you have them locally, you should not have to hard-code them. ### Docker Build Docker is a platform that allows you to containerize and run applications in isolated environments, making it easier to manage dependencies and ensure consistent deployments. Download the [latest version of Docker Desktop for your operating system](https://www.docker.com/products/docker-desktop/). -We use [docker-compose](https://docs.docker.com/compose/) to manage the backend Docker services. The `data/docker-compose.yml` file defines the services. The only service that runs perpetually in Docker is `postgres`. The other services are one-time batch jobs to build the data sets. +We use [docker compose](https://docs.docker.com/compose/) to manage the backend Docker services. The `data/docker compose.yaml` file defines the services. The only service that runs perpetually in Docker is `postgres`. The other services are one-time batch jobs to build the data sets. + +1. The first time you set up your backend, or any time either of the two Docker files change, build the Docker services by running: + + ```sh + docker compose build + ``` -The first time you set up your backend, or any time either of the two Docker files change, you should build the Docker services by running `docker-compose build`. It may take a while to install the dependencies but you will only need to do this once. You can rebuild only one container, such as `postgres`, with `docker-compose build postgres`. For first-time runs, you should set `FORCE_RELOAD = True` in `config.py` and optionally `log_level: int = logging.DEBUG` to get more verbose output. + This should correctly build both containers. However, if it does not, you can explicitly build the postgres container with the following: -All Docker commands should be run from the `data/` directory. There is one main `Dockerfile` for the batch scripts and one called `Dockerfile-pg` for the PostgreSQL and postgis installation. There is also a file called `init_pg.sql` that is run one time by Docker when the postgres data volume is empty to create the database and install postgis. You should not have to touch any of the above three files. + ```sh + docker compose build postgres + ``` + +2. When both containers are built, connect to the PG database in the container by running: + ```sh + docker compose up -d postgres + ``` + +For first-time runs, set `FORCE_RELOAD=True` in `config.py` and optionally `log_level: int = logging.DEBUG` to get more verbose output. + +All Docker commands should be run from the `data/` directory. There is one main `Dockerfile` for the batch scripts and one called `Dockerfile-pg` for the PostgreSQL and postgis installation. There is also a file called `init_pg.sql` that is run one time by Docker when the postgres data volume is empty to create the database and install postgis. You should not have to touch any of the above three files. #### Windows 1. Make sure Docker is running by opening the Docker Desktop app. -2. Open the command prompt. Navigate to the location of the `vacant-lots-proj` repository. Run `cd data` and then `docker-compose run vacant-lots-proj`. -3. When the script is done running, you’ll get a notification. When you’re done, to shut off the Docker container (which uses memory), run `docker-compose down`. +2. Open the command prompt. Navigate to the location of the `clean-and-green-philly` repository. Run `cd data` and then `docker compose run vacant-lots-proj`. +3. When the script is done running, you’ll get a notification. When you’re done, to shut off the Docker container (which uses memory), run `docker compose down`. #### Linux -1. In the terminal, navigate to your repository location using `cd path/to/repository`. Then run `cd data` to move into the `data` directory. -2. Run `docker-compose run vacant-lots-proj`. Enter your password if requested. If you run into an error message related to "KEY_ID" or something like similar, you may have to do the following: - -- Hard-code your VACANT_LOTS_DB variable in `docker-compose.yml`. +1. In the terminal, navigate to your repository location using `cd path/to/clean-and-green-philly`. Then run `cd data` to move into the `data` directory. +2. Run `docker compose run vacant-lots-proj`. Enter your password if requested. If you run into an error message related to "KEY_ID" or something similar, you may have to do the following: + - Hard-code your `VACANT_LOTS_DB` variable in `docker compose.yml`. The backend also works on WSL Ubuntu running Docker for Linux on Windows 10. -3. When you're finished, and you want to shut down the Docker container, run `docker-compose down`. +3. When you're finished, and you want to shut down the Docker container, run `docker compose down`. #### macOS -In the terminal, use the `cd` command to navigate to your repository location, and then into the `data` directory. Run `docker-compose run vacant-lots-proj`. This command starts Docker Compose and sets up your environment as defined in your `docker-compose.yml` file. When you're finished and want to shut down the Docker containers, run `docker-compose down`. +In the terminal, use the `cd` command to navigate to your repository location, and then into the `data` directory. Run `docker compose run vacant-lots-proj`. This command starts Docker Compose and sets up your environment as defined in your `docker compose.yml` file. When you're finished and want to shut down the Docker containers, run `docker compose down`. ### PostgreSQL -[PostgreSQL](https://www.postgresql.org/) AKA postgres, pg, psql is an open-source relational database management system. It is used in this project only by the data load script to stage data and by the data diff process to compare new data with backed up data. It is not needed by the front-end to run. We run Postgres with the [Postgis](https://postgis.net/) extension for geospatial data in a Docker container. +[PostgreSQL](https://www.postgresql.org/) AKA postgres, pg, psql is an open-source relational database management system. It is used in this project only by the data load script to stage data and by the data diff process to compare new data with backed up data. It is not needed by the front-end to run. We run Postgres with the [Postgis](https://postgis.net/) extension for geospatial data in a Docker container. -We are running postgres on the non-standard port 5433 instead of the default of 5432. This is so our Docker postgres will not conflict with any native postgres already running on the developer's PC. +We are running postgres on the non-standard port 5433 instead of the default of 5432. This is so our Docker postgres will not conflict with any native postgres already running on the developer's PC. To start the postgres Docker container, run: -`docker compose up -d postgres`. You can access the psql command line in your container to work with the database with this command: `docker exec -it cagp-postgres psql -U postgres -d vacantlotdb`. To stop the postgres container run `docker compose down postgres`. + +```sh +docker compose up -d postgres +``` + +You can access the psql command line in your container to work with the database with this command: + +```sh +docker exec -it cagp-postgres psql -U postgres -d vacantlotdb +``` + +To stop the postgres container run: + +```sh +docker compose down postgres +``` ## Python Development -You can set up your local Python environment so you can develop and run the backend `script.py` and create and run unit tests outside of Docker. Build your local environment to match what is defined in the `Dockerfile`. Install the same python version as is in the Dockerfile, using `pyenv` to manage multiple distributions if needed. Use `pipenv` to create a virtual environment. Install the pip dependencies that are defined in the `Pipfile` into your virtual environment. Install the executables with `apt-get`. Now you can develop in Python in your terminal and IDE and run unit tests with `pytest`. +You can set up your local Python environment so you can develop and run the backend `script.py` and create and run unit tests outside of Docker. Build your local environment to match what is defined in the `Dockerfile`. Install the same python version as is in the Dockerfile, using `pyenv` to manage multiple distributions if needed. Use `pipenv` to create a virtual environment. Install the pip dependencies that are defined in the `Pipfile` into your virtual environment. Install the executables with `apt-get`. Now you can develop in Python in your terminal and IDE and run unit tests with `pytest`. ## Configuration -There are numerous configuration variables in `data/src/config/config.py`. See the documentation in that file for each variable. You will also have to set up environmental variables for keys and database connection parameters as defined throughout this document. +There are numerous configuration variables in `data/src/config/config.py`. See the documentation in that file for each variable. You will also have to set up environmental variables for keys and database connection parameters as defined throughout this document. There are the following secrets that may be securely shared with you by the project leads: -- The password for the project's Google account to access the cloud platform. For development purposes you can work in your personal cloud account, see the GCP section below. -- The Slack API key to post diff reports to the project Slack via the messenger bot. See the 'Backup and difference reporting' section below. You can set up your own Slack bot for your personal workspace and use that API key for local testing. See [this link](https://www.datacamp.com/tutorial/how-to-send-slack-messages-with-python) for instructions or do a Google search on how to do it. + +- The password for the project's Google account to access the cloud platform. For development purposes, you can work in your personal cloud account, see the GCP section below. +- The Slack API key to post diff reports to the project Slack via the messenger bot. See the 'Backup and difference reporting' section below. You can set up your own Slack bot for your personal workspace and use that API key for local testing. See [this link](https://www.datacamp.com/tutorial/how-to-send-slack-messages-with-python) for instructions or do a Google search on how to do it. #### Making code changes @@ -89,18 +135,28 @@ Changes to our codebase should always address an [issue](https://github.com/Code Format all python files by running: -``` -docker-compose run formatter +```sh +docker compose run formatter ``` #### Google Cloud (GCP) The map data is converted to the [pmtiles](https://docs.protomaps.com/pmtiles/) format and served from Google Cloud. For access to production credentials, contact the project lead. -You can run the tile build locally with `docker-compose run vacant-lots-proj` to create a tile file and upload it to your own GCP bucket. First, create your own GCP account using their free trial. You will need to create the following assets in your GCP account and configure them in the environment variables in docker-compose.yml: -- Under APIs and Services -> Credentials, create an API key and put that in the CLEAN_GREEN_GOOGLE_KEY variable -- Under APIs and Services -> Credentials, create a service account. After you create the service account you will download the service account private key file named like encoded-keyword-ddd-xxx.json. Copy that to ~/.config/gcloud/application_default_credentials.json. This path is specified by default in the volumes section of the docker compose file. -- Go to Cloud storage -> Buckets and create a new bucket. Name it logically, e.g. cleanandgreenphl-{your_initials}. It has to be globally unique. Grant access to at least write to the bucket to your service account. Put your bucket name in the GOOGLE_CLOUD_BUCKET_NAME variable. Make sure the tiles file in your bucket is publicly accessible by following Google's instructions online. +You can run the tile build locally with `docker compose run vacant-lots-proj` to create a tile file and upload it to your own GCP bucket. First, create your own GCP account using their free trial. You will need to create the following assets in your GCP account and configure them in the environment variables in the `.env` file: + +1. Under APIs and Services -> Credentials, create an API key and put that in the CLEAN_GREEN_GOOGLE_KEY variable. +2. Under APIs and Services -> Credentials, create a service account. After you create the service account you will download the service account private key file named like encoded-keyword-ddd-xxx.json. Copy that to `~/.config/gcloud/application_default_credentials.json`. This path is specified by default in the volumes section of the docker compose file. +3. Go to Cloud storage -> Buckets and create a new bucket. Name it logically, e.g. cleanandgreenphl-{your_initials}. It has to be globally unique. Grant access to at least write to the bucket to your service account. Put your bucket name in the GOOGLE_CLOUD_BUCKET_NAME variable. Make sure the tiles file in your bucket is publicly accessible by following Google's instructions online. + +Your `/data/.env` file should now look like this: + +```sh +POSTGRES_PASSWORD=a-strong-password-here +VACANT_LOTS_DB=postgresql://postgres:${POSTGRES_PASSWORD}@localhost:5433/vacantlotdb +CLEAN_GREEN_GOOGLE_KEY=your-api-key-here +GOOGLE_CLOUD_BUCKET_NAME=your-bucket-name-here +``` The python script loads the tiles to Google Cloud as `vacant_properties_tiles_staging.pmtiles`. You can check this tileset by changing the config setting on the frontend `useStagingTiles` to `true`. If the tiles look OK, manually change the name in Google Cloud to remove the `_staging` and archive the previous copy. @@ -108,22 +164,23 @@ The python script loads the tiles to Google Cloud as `vacant_properties_tiles_st To update streetview images, after running the full data script run: -``` -docker-compose run streetview +```sh +docker compose run streetview ``` The script should only load new images that aren't in the bucket already (new properties added to list). #### Backup and difference reporting -Whenever the data load script is run in force reload mode, the old data set is backed up and a report of any differences is sent to the team via Slack. Differences in data are calculated using the [data-diff](https://github.com/datafold/data-diff) package. See [issue 520](https://github.com/CodeForPhilly/clean-and-green-philly/issues/520) in Github. -Backups are done in PostgreSQL in the vacantlotsdb database by copying the whole public schema to a backup schema named backup_{timestamp}. Besides the original tables, the backup schema includes a '{table_name}_diff' table with details of the differences from data-diff for each table. +Whenever the data load script is run in force reload mode, the old data set is backed up and a report of any differences is sent to the team via Slack. Differences in data are calculated using the [data-diff](https://github.com/datafold/data-diff) package. See [issue 520](https://github.com/CodeForPhilly/clean-and-green-philly/issues/520) in Github. + +Backups are done in PostgreSQL in the vacantlotsdb database by copying the whole public schema to a backup schema named backup\_{timestamp}. Besides the original tables, the backup schema includes a '{table_name}\_diff' table with details of the differences from data-diff for each table. -Backup schemas are only kept for one year by default. Backup schemas older than a year are deleted at the end of the load script. +Backup schemas are only kept for one year by default. Backup schemas older than a year are deleted at the end of the load script. -After all runs of the back-end script, the tiles file is backed up to the backup/ directory in the GCP bucket with a timestamp. If the main tiles file ever gets corrupted, it can be rolled back to a backup file. +After all runs of the back-end script, the tiles file is backed up to the backup/ directory in the GCP bucket with a timestamp. If the main tiles file ever gets corrupted, it can be rolled back to a backup file. -When a diff is performed, an html file of the contents of the '{table_name}_diff' table is generated for each table and uploaded to the public GCP bucket so it can be viewed in a web browser. The location of the html files is in the format: https://storage.googleapis.com/cleanandgreenphl/diff/{backup_timestamp}/{table_name}.html The link to the detail diff page is included in the Slack report message. +When a diff is performed, an html file of the contents of the '{table_name}\_diff' table is generated for each table and uploaded to the public GCP bucket so it can be viewed in a web browser. The location of the html files is in the format: https://storage.googleapis.com/cleanandgreenphl/diff/{backup_timestamp}/{table_name}.html The link to the detail diff page is included in the Slack report message. The `CAGP_SLACK_API_TOKEN` environmental variable must be set with the API key for the Slack app that can write messages to the channel as configured in the config.py `report_to_slack_channel` variable. @@ -133,29 +190,28 @@ The report will also be emailed to any emails configured in the config.py `repor The job to reload the backend data has been scheduled in the Google Cloud to run on a weekly basis. -A virtual machine running Debian Linux named `backend` is set up in the compute engine of the CAGP GCP account. The staging branch of the git project has been cloned here into the home directory of the `cleanandgreenphl` user. All required software such as docker and git has been installed on this vm. +A virtual machine running Debian Linux named `backend` is set up in the compute engine of the CAGP GCP account. The staging branch of the git project has been cloned here into the home directory of the `cleanandgreenphl` user. All required software such as docker and git has been installed on this vm. -To access the Linux terminal of this vm instance via SSH you can use the 'SSH-in-browser' GCP tool on the web. Go to Compute Engine -> VM instances and select SSH next to the `backend` instance, then select 'Open in browser window'. +To access the Linux terminal of this vm instance via SSH you can use the 'SSH-in-browser' GCP tool on the web. Go to Compute Engine -> VM instances and select SSH next to the `backend` instance, then select 'Open in browser window'. + +You can also connect to the vm with the terminal ssh client on your pc. This is recommended for more advanced use cases as the web UI is limited. To set this up, follow the steps below: -You can also connect to the vm with the terminal ssh client on your pc. This is recommended for more advanced use cases as the web UI is limited. To set this up, follow the steps below: - In GCP, go to IAM and Admin -> Service Accounts -> Keys and click on the `1065311260334-compute@developer.gserviceaccount.com ` account. -- Click 'Add key'. You can only download the service account JSON key file when you create a key so you will have to create a new key. Select 'JSON' and save the .json file to your local machine. +- Click 'Add key'. You can only download the service account JSON key file when you create a key so you will have to create a new key. Select 'JSON' and save the .json file to your local machine. - Download and install the [Google Cloud Command Line Interface (CLI)](https://cloud.google.com/sdk/docs/install) for your OS. -- In your terminal, navigate to the folder with your saved .json file. Run the command: -`gcloud auth activate-service-account --key-file=your-key.json` +- In your terminal, navigate to the folder with your saved .json file. Run the command: + `gcloud auth activate-service-account --key-file=your-key.json` - Now you can ssh into the vm with: -`gcloud compute ssh --zone "us-east1-b" "cleanandgreenphl@backend" --project "clean-and-green-philly"` -- You will land in the home directory of the `cleanandgreenphl` user. The project has been cloned to this directory. + `gcloud compute ssh --zone "us-east1-b" "cleanandgreenphl@backend" --project "clean-and-green-philly"` +- You will land in the home directory of the `cleanandgreenphl` user. The project has been cloned to this directory. -The job to regenerate and upload the tiles file and street images to the GCP bucket has been scheduled in `cron` to run weekly on Wednesday at 5 AM. You can run `crontab -l` to see the job. Currently it looks like this: +The job to regenerate and upload the tiles file and street images to the GCP bucket has been scheduled in `cron` to run weekly on Wednesday at 5 AM. You can run `crontab -l` to see the job. Currently it looks like this: `0 5 * * 3 . /home/cleanandgreenphl/.cagp_env && cd clean-and-green-philly/data && docker compose run vacant-lots-proj && docker compose run streetview` -The specific production environmental variables are stored in `/home/cleanandgreenphl/.cagp_env`. Some variables in the `data/src/config/config.py` project file have been edited locally for the scheduled run. Be careful when running this job in this environment because the production web site could be affected. +The specific production environmental variables are stored in `/home/cleanandgreenphl/.cagp_env`. Some variables in the `data/src/config/config.py` project file have been edited locally for the scheduled run. Be careful when running this job in this environment because the production web site could be affected. The message with the diff report will be sent to the `clean-and-green-philly-back-end` Slack channel. To troubleshoot any errors you can look at the docker logs of the last run container. e.g.: `docker logs data-vacant-lots-proj-run-8c5e7639c386 | grep -i error` - - diff --git a/src/components/SinglePropertyDetail.tsx b/src/components/SinglePropertyDetail.tsx index 9013d78c..bfb3b551 100644 --- a/src/components/SinglePropertyDetail.tsx +++ b/src/components/SinglePropertyDetail.tsx @@ -266,7 +266,7 @@ const SinglePropertyDetail = ({ aria-label="Open full screen street view map" id="outside-iframe-element" > - + diff --git a/src/components/icons/Rake.tsx b/src/components/icons/Rake.tsx index f103c7ff..8b06e33a 100644 --- a/src/components/icons/Rake.tsx +++ b/src/components/icons/Rake.tsx @@ -1,52 +1,61 @@ export const Rake = () => ( - - - - - - - - - + <> + + + + + + + + + + + );