Skip to content

Commit

Permalink
Merge branch 'master' into upload-thorttling
Browse files Browse the repository at this point in the history
  • Loading branch information
sabeechen committed Nov 13, 2023
2 parents f8d846d + b2c274c commit 8293db8
Show file tree
Hide file tree
Showing 15 changed files with 130 additions and 32 deletions.
19 changes: 18 additions & 1 deletion .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,23 @@ RUN apt-get update
RUN apt-get install fping
# install gcloud api
RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-cli -y


# Install Docker CE CLI
RUN apt-get update \
&& apt-get install -y apt-transport-https ca-certificates curl gnupg2 lsb-release \
&& curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \
&& echo "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list \
&& apt-get update \
&& apt-get install -y docker-ce-cli

# Install Docker Compose
RUN LATEST_COMPOSE_VERSION=$(curl -sSL "https://api.github.com/repos/docker/compose/releases/latest" | grep -o -P '(?<="tag_name": ").+(?=")') \
&& curl -sSL "https://github.com/docker/compose/releases/download/${LATEST_COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose \
&& chmod +x /usr/local/bin/docker-compose

# Install Heoku CLI
RUN curl https://cli-assets.heroku.com/install-ubuntu.sh | sh

# Install app dependencies
COPY requirements-dev.txt ./
RUN pip install --no-cache-dir -r requirements-dev.txt
7 changes: 5 additions & 2 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
"github.vscode-pull-request-github", // Github interaction
"GitHub.copilot",
"mikoz.black-py"]
}
"forwardPorts": [3000]
},
"runArgs": ["--init", "--privileged"],
"extensions": ["ms-python.python", "wholroyd.jinja","ms-python.vscode-pylance"],
"forwardPorts": [3000],
"mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ]
}
2 changes: 1 addition & 1 deletion .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:

steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4

# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: ⤵️ Check out code from GitHub
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: 🚀 Run Home Assistant Add-on Linter
uses: frenck/action-addon-linter@v2
uses: frenck/action-addon-linter@v2.13
with:
path: "./hassio-google-drive-backup"
4 changes: 2 additions & 2 deletions .github/workflows/prod_push.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ jobs:
python-version: '3.11'

- name: Check out dev repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: dev
persist-credentials: false

- name: Login to DockerHub
uses: docker/login-action@v2.1.0
uses: docker/login-action@v3.0.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
run-pytest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
Expand Down Expand Up @@ -41,7 +41,7 @@ jobs:
with:
files: junit/**/*.xml
- name: Upload coverage to Codecov
uses: codecov/[email protected].3
uses: codecov/[email protected].4
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage.xml
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/server_image_push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
contents: read

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Build image
run: docker build -f hassio-google-drive-backup/Dockerfile-server --tag $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}" hassio-google-drive-backup/.
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/staging_push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ jobs:
with:
python-version: '3.11'
# Check out the current branch
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
path: dev
# Check out the staging barnch
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
path: staging
repository: sabeechen/hgdb-dev-staging
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/test_staging_push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@ jobs:
python-version: '3.11'

- name: Check out dev repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: dev
persist-credentials: false

- name: Checkout Staging Repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: staging
repository: sabeechen/hgdb-dev-staging
Expand All @@ -33,7 +33,7 @@ jobs:
python3 staging/update.py dev staging
- name: Login to DockerHub
uses: docker/login-action@v2.1.0
uses: docker/login-action@v3.0.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
Expand Down
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -265,3 +265,15 @@ The add-on will only delete an old backup if a new one exists to replace it, so
### Can I exclude specific sub-folders from my backup?

The add-on uses the supervisor to create backups, and the supervisor only permits you to include or exclude the 5 main folders (home assistant configuration, share, SSL, media, and local add-ons). Excluding specific subfolders, or only including specific subfolders from a backup isn't possible today.

### <a href="supervisor_logs"></a>I'm getting weird errors. Where do I look for more detils about an error (Supervisor logs).

The addon uses Home Assistant's "supervisor" to create and delete backups on Home Asisstant's side. In case you don't know, the supervisor is something that runs in the background on Home Assistant and manages stuff like backups, connections to hardware, and setting up the environment that Home Assistant Core (eg the UI) and addons run in. Because of this a lot of errors you run into (problems with the NAS, HD corruption, etc) only show up in the supervisor's logs. The supervisor's logs are kind of hidden by default, to view them:

- Go to your Home Assistant user profile by clicking the user icon in the bottom left of Home Assistant's main UI.
- Enable "Advanced Mode" in your profile.
- Navigate to Settings > System > Logs
- Select "Supervisor" from the drop down at the top right of the page.

The logs there keep a pretty short history, so if you ahve a lot of other errors/warnings happening (which is common) you mgiht need to go check the logs right after you see errors in the addon.

2 changes: 1 addition & 1 deletion hassio-google-drive-backup/backup/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def key(self):

# Remote endpoints
Setting.AUTHORIZATION_HOST: "https://habackup.io",
Setting.TOKEN_SERVER_HOSTS: "https://token1.habackup.io,https://habackup.io",
Setting.TOKEN_SERVER_HOSTS: "https://token2.habackup.io,https://token1.habackup.io,https://habackup.io",
Setting.SUPERVISOR_URL: "",
Setting.SUPERVISOR_TOKEN: "",
Setting.DRIVE_URL: "https://www.googleapis.com",
Expand Down
25 changes: 11 additions & 14 deletions hassio-google-drive-backup/backup/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from yarl import URL
from backup.config import Version
from urllib.parse import unquote
from backup.time import Time

NEW_AUTH_MINIMUM = Version(0, 101, 3)

Expand All @@ -26,7 +27,9 @@ def __init__(self,
config: Config,
exchanger_builder: ClassAssistedBuilder[Exchanger],
logger: CloudLogger,
error_store: ErrorStore):
error_store: ErrorStore,
time: Time):
self._time = time
self.exchanger = exchanger_builder.build(
client_id=config.get(Setting.DEFAULT_DRIVE_CLIENT_ID),
client_secret=config.get(Setting.DEFAULT_DRIVE_CLIENT_SECRET),
Expand Down Expand Up @@ -128,14 +131,6 @@ async def refresh(self, request: Request):
return json_response({
"error": "Couldn't connect to Google's servers"
}, status=503)
except ServerDisconnectedError:
return json_response({
"error": "Couldn't connect to Google's servers"
}, status=503)
except ServerTimeoutError:
return json_response({
"error": "Google's servers timed out"
}, status=503)
except GoogleCredentialsExpired:
return json_response({
"error": "expired"
Expand Down Expand Up @@ -203,16 +198,18 @@ def logError(self, request: Request, exception: Exception):
self.logger.log_struct(data)

def logReport(self, request, report):
data = self.getRequestInfo(request)
data = self.getRequestInfo(request, include_timestamp=True)
data['report'] = report
self.logger.log_struct(data)
self.error_store.store(data)

def getRequestInfo(self, request: Request):
return {
def getRequestInfo(self, request: Request, include_timestamp=False):
data = {
'client': request.headers.get('client', "unknown"),
'version': request.headers.get('addon_version', "unknown"),
'address': request.remote,
'url': str(request.url),
'length': request.content_length
'length': request.content_length,
}
if include_timestamp:
data['server_time'] = self._time.now()
return data
57 changes: 57 additions & 0 deletions hassio-google-drive-backup/dev/error_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import argparse
from google.cloud import firestore
from datetime import datetime, timedelta
DELETE_BATCH_SIZE = 200
STORE_NAME = "error_reports"


def delete_old_data():
# Initialize Firestore
db = firestore.Client()
collection_ref = db.collection(STORE_NAME)

# Define the datetime for one week ago
week_ago = datetime.now() - timedelta(days=7)

# Query to find all documents older than a week
total_deleted = 0
while True:
to_delete = 0
batch = db.batch()
docs = collection_ref.where('server_time', '<', week_ago).stream()
for doc in docs:
to_delete += 1
batch.delete(doc.reference)
if to_delete >= DELETE_BATCH_SIZE:
break
if to_delete > 0:
batch.commit()
total_deleted += to_delete
print(f"Deleted {to_delete} documents ({total_deleted} total)")
else:
break
print(f"Success: All documents older than a week deleted ({total_deleted} total)")


def main():
# Create command line argument parser
parser = argparse.ArgumentParser()

# Add purge argument
parser.add_argument("--purge", help="Delete all documents older than a week.", action="store_true")

# Add any other argument you want in future. For example:
# parser.add_argument("--future_arg", help="Perform some future operation.")

args = parser.parse_args()

# Respond to arguments
if args.purge:
confirm = input('Are you sure you want to delete all documents older than a week? (y/n): ')
if confirm.lower() == 'y':
delete_old_data()
else:
print("Abort: No documents were deleted.")

if __name__ == "__main__":
main()
11 changes: 10 additions & 1 deletion hassio-google-drive-backup/tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
from dev.simulationserver import SimulationServer
from aiohttp import ClientSession, hdrs
from backup.config import Config

from .faketime import FakeTime
import json

@pytest.mark.asyncio
async def test_refresh_known_error(server: SimulationServer, session: ClientSession, config: Config, server_url: URL):
Expand Down Expand Up @@ -48,3 +49,11 @@ async def test_old_auth_method(server: SimulationServer, session: ClientSession,
redirect = URL(r.headers[hdrs.LOCATION])
assert redirect.query.get("creds") is not None
assert redirect.host == "example.com"


async def test_log_to_firestore(time: FakeTime, server: SimulationServer, session: ClientSession, server_url: URL):
data = {"info": "testing"}
async with session.post(server_url.with_path("logerror"), data=json.dumps(data)) as r:
assert r.status == 200
assert server._authserver.error_store.last_error is not None
assert server._authserver.error_store.last_error['report'] == data
3 changes: 3 additions & 0 deletions heroku.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
build:
docker:
web: hassio-google-drive-backup/Dockerfile-server

0 comments on commit 8293db8

Please sign in to comment.