diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index d12c2708..897e94ce 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -5,6 +5,23 @@ RUN apt-get update RUN apt-get install fping # install gcloud api RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-cli -y - + +# Install Docker CE CLI +RUN apt-get update \ + && apt-get install -y apt-transport-https ca-certificates curl gnupg2 lsb-release \ + && curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \ + && echo "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list \ + && apt-get update \ + && apt-get install -y docker-ce-cli + +# Install Docker Compose +RUN LATEST_COMPOSE_VERSION=$(curl -sSL "https://api.github.com/repos/docker/compose/releases/latest" | grep -o -P '(?<="tag_name": ").+(?=")') \ + && curl -sSL "https://github.com/docker/compose/releases/download/${LATEST_COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose \ + && chmod +x /usr/local/bin/docker-compose + +# Install Heoku CLI +RUN curl https://cli-assets.heroku.com/install-ubuntu.sh | sh + +# Install app dependencies COPY requirements-dev.txt ./ RUN pip install --no-cache-dir -r requirements-dev.txt \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7fceb3ef..c98efb16 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -18,6 +18,9 @@ "github.vscode-pull-request-github", // Github interaction "GitHub.copilot", "mikoz.black-py"] - } - "forwardPorts": [3000] + }, + "runArgs": ["--init", "--privileged"], + "extensions": ["ms-python.python", "wholroyd.jinja","ms-python.vscode-pylance"], + "forwardPorts": [3000], + "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ] } \ No newline at end of file diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a63b2659..cb95db24 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -38,7 +38,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index c4a57cca..6441cdba 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -6,8 +6,8 @@ jobs: runs-on: ubuntu-latest steps: - name: ⤵️ Check out code from GitHub - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 🚀 Run Home Assistant Add-on Linter - uses: frenck/action-addon-linter@v2 + uses: frenck/action-addon-linter@v2.13 with: path: "./hassio-google-drive-backup" diff --git a/.github/workflows/prod_push.yaml b/.github/workflows/prod_push.yaml index 65b4a45d..941f4382 100644 --- a/.github/workflows/prod_push.yaml +++ b/.github/workflows/prod_push.yaml @@ -12,13 +12,13 @@ jobs: python-version: '3.11' - name: Check out dev repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: dev persist-credentials: false - name: Login to DockerHub - uses: docker/login-action@v2.1.0 + uses: docker/login-action@v3.0.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }} diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1ee34ca4..6b4e8893 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -7,7 +7,7 @@ jobs: run-pytest: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -41,7 +41,7 @@ jobs: with: files: junit/**/*.xml - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3.1.3 + uses: codecov/codecov-action@v3.1.4 with: token: ${{ secrets.CODECOV_TOKEN }} files: ./coverage.xml diff --git a/.github/workflows/server_image_push.yml b/.github/workflows/server_image_push.yml index 77334b7c..2cb0e601 100644 --- a/.github/workflows/server_image_push.yml +++ b/.github/workflows/server_image_push.yml @@ -16,7 +16,7 @@ jobs: contents: read steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Build image run: docker build -f hassio-google-drive-backup/Dockerfile-server --tag $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}" hassio-google-drive-backup/. diff --git a/.github/workflows/staging_push.yml b/.github/workflows/staging_push.yml index ca611bc8..774ac31f 100644 --- a/.github/workflows/staging_push.yml +++ b/.github/workflows/staging_push.yml @@ -24,11 +24,11 @@ jobs: with: python-version: '3.11' # Check out the current branch - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: path: dev # Check out the staging barnch - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: path: staging repository: sabeechen/hgdb-dev-staging diff --git a/.github/workflows/test_staging_push.yml b/.github/workflows/test_staging_push.yml index 743e37e4..094b60e3 100644 --- a/.github/workflows/test_staging_push.yml +++ b/.github/workflows/test_staging_push.yml @@ -15,13 +15,13 @@ jobs: python-version: '3.11' - name: Check out dev repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: dev persist-credentials: false - name: Checkout Staging Repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: staging repository: sabeechen/hgdb-dev-staging @@ -33,7 +33,7 @@ jobs: python3 staging/update.py dev staging - name: Login to DockerHub - uses: docker/login-action@v2.1.0 + uses: docker/login-action@v3.0.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }} diff --git a/README.md b/README.md index c05fcd34..dc49296a 100644 --- a/README.md +++ b/README.md @@ -265,3 +265,15 @@ The add-on will only delete an old backup if a new one exists to replace it, so ### Can I exclude specific sub-folders from my backup? The add-on uses the supervisor to create backups, and the supervisor only permits you to include or exclude the 5 main folders (home assistant configuration, share, SSL, media, and local add-ons). Excluding specific subfolders, or only including specific subfolders from a backup isn't possible today. + +### I'm getting weird errors. Where do I look for more detils about an error (Supervisor logs). + +The addon uses Home Assistant's "supervisor" to create and delete backups on Home Asisstant's side. In case you don't know, the supervisor is something that runs in the background on Home Assistant and manages stuff like backups, connections to hardware, and setting up the environment that Home Assistant Core (eg the UI) and addons run in. Because of this a lot of errors you run into (problems with the NAS, HD corruption, etc) only show up in the supervisor's logs. The supervisor's logs are kind of hidden by default, to view them: + +- Go to your Home Assistant user profile by clicking the user icon in the bottom left of Home Assistant's main UI. +- Enable "Advanced Mode" in your profile. +- Navigate to Settings > System > Logs +- Select "Supervisor" from the drop down at the top right of the page. + +The logs there keep a pretty short history, so if you ahve a lot of other errors/warnings happening (which is common) you mgiht need to go check the logs right after you see errors in the addon. + diff --git a/hassio-google-drive-backup/backup/config/settings.py b/hassio-google-drive-backup/backup/config/settings.py index 42b1fc9b..7e5d9e85 100644 --- a/hassio-google-drive-backup/backup/config/settings.py +++ b/hassio-google-drive-backup/backup/config/settings.py @@ -250,7 +250,7 @@ def key(self): # Remote endpoints Setting.AUTHORIZATION_HOST: "https://habackup.io", - Setting.TOKEN_SERVER_HOSTS: "https://token1.habackup.io,https://habackup.io", + Setting.TOKEN_SERVER_HOSTS: "https://token2.habackup.io,https://token1.habackup.io,https://habackup.io", Setting.SUPERVISOR_URL: "", Setting.SUPERVISOR_TOKEN: "", Setting.DRIVE_URL: "https://www.googleapis.com", diff --git a/hassio-google-drive-backup/backup/server/server.py b/hassio-google-drive-backup/backup/server/server.py index b092acf9..bad3bcaa 100644 --- a/hassio-google-drive-backup/backup/server/server.py +++ b/hassio-google-drive-backup/backup/server/server.py @@ -15,6 +15,7 @@ from yarl import URL from backup.config import Version from urllib.parse import unquote +from backup.time import Time NEW_AUTH_MINIMUM = Version(0, 101, 3) @@ -26,7 +27,9 @@ def __init__(self, config: Config, exchanger_builder: ClassAssistedBuilder[Exchanger], logger: CloudLogger, - error_store: ErrorStore): + error_store: ErrorStore, + time: Time): + self._time = time self.exchanger = exchanger_builder.build( client_id=config.get(Setting.DEFAULT_DRIVE_CLIENT_ID), client_secret=config.get(Setting.DEFAULT_DRIVE_CLIENT_SECRET), @@ -128,14 +131,6 @@ async def refresh(self, request: Request): return json_response({ "error": "Couldn't connect to Google's servers" }, status=503) - except ServerDisconnectedError: - return json_response({ - "error": "Couldn't connect to Google's servers" - }, status=503) - except ServerTimeoutError: - return json_response({ - "error": "Google's servers timed out" - }, status=503) except GoogleCredentialsExpired: return json_response({ "error": "expired" @@ -203,16 +198,18 @@ def logError(self, request: Request, exception: Exception): self.logger.log_struct(data) def logReport(self, request, report): - data = self.getRequestInfo(request) + data = self.getRequestInfo(request, include_timestamp=True) data['report'] = report - self.logger.log_struct(data) self.error_store.store(data) - def getRequestInfo(self, request: Request): - return { + def getRequestInfo(self, request: Request, include_timestamp=False): + data = { 'client': request.headers.get('client', "unknown"), 'version': request.headers.get('addon_version', "unknown"), 'address': request.remote, 'url': str(request.url), - 'length': request.content_length + 'length': request.content_length, } + if include_timestamp: + data['server_time'] = self._time.now() + return data diff --git a/hassio-google-drive-backup/dev/error_tools.py b/hassio-google-drive-backup/dev/error_tools.py new file mode 100644 index 00000000..3da939c1 --- /dev/null +++ b/hassio-google-drive-backup/dev/error_tools.py @@ -0,0 +1,57 @@ +import argparse +from google.cloud import firestore +from datetime import datetime, timedelta +DELETE_BATCH_SIZE = 200 +STORE_NAME = "error_reports" + + +def delete_old_data(): + # Initialize Firestore + db = firestore.Client() + collection_ref = db.collection(STORE_NAME) + + # Define the datetime for one week ago + week_ago = datetime.now() - timedelta(days=7) + + # Query to find all documents older than a week + total_deleted = 0 + while True: + to_delete = 0 + batch = db.batch() + docs = collection_ref.where('server_time', '<', week_ago).stream() + for doc in docs: + to_delete += 1 + batch.delete(doc.reference) + if to_delete >= DELETE_BATCH_SIZE: + break + if to_delete > 0: + batch.commit() + total_deleted += to_delete + print(f"Deleted {to_delete} documents ({total_deleted} total)") + else: + break + print(f"Success: All documents older than a week deleted ({total_deleted} total)") + + +def main(): + # Create command line argument parser + parser = argparse.ArgumentParser() + + # Add purge argument + parser.add_argument("--purge", help="Delete all documents older than a week.", action="store_true") + + # Add any other argument you want in future. For example: + # parser.add_argument("--future_arg", help="Perform some future operation.") + + args = parser.parse_args() + + # Respond to arguments + if args.purge: + confirm = input('Are you sure you want to delete all documents older than a week? (y/n): ') + if confirm.lower() == 'y': + delete_old_data() + else: + print("Abort: No documents were deleted.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/hassio-google-drive-backup/tests/test_server.py b/hassio-google-drive-backup/tests/test_server.py index 9f88bc1f..b530d989 100644 --- a/hassio-google-drive-backup/tests/test_server.py +++ b/hassio-google-drive-backup/tests/test_server.py @@ -4,7 +4,8 @@ from dev.simulationserver import SimulationServer from aiohttp import ClientSession, hdrs from backup.config import Config - +from .faketime import FakeTime +import json @pytest.mark.asyncio async def test_refresh_known_error(server: SimulationServer, session: ClientSession, config: Config, server_url: URL): @@ -48,3 +49,11 @@ async def test_old_auth_method(server: SimulationServer, session: ClientSession, redirect = URL(r.headers[hdrs.LOCATION]) assert redirect.query.get("creds") is not None assert redirect.host == "example.com" + + +async def test_log_to_firestore(time: FakeTime, server: SimulationServer, session: ClientSession, server_url: URL): + data = {"info": "testing"} + async with session.post(server_url.with_path("logerror"), data=json.dumps(data)) as r: + assert r.status == 200 + assert server._authserver.error_store.last_error is not None + assert server._authserver.error_store.last_error['report'] == data diff --git a/heroku.yml b/heroku.yml new file mode 100644 index 00000000..b88ba005 --- /dev/null +++ b/heroku.yml @@ -0,0 +1,3 @@ +build: + docker: + web: hassio-google-drive-backup/Dockerfile-server \ No newline at end of file