diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..166f935 --- /dev/null +++ b/.env.example @@ -0,0 +1,7 @@ +FLASK_APP= +DB_USER= +DB_PASSWORD= +DB_NAME= +DB_HOST= +DB_PORT= +LOGGING_LEVEL= diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 0000000..ad1a6ee --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,18 @@ +name: pre-commit + +on: + pull_request: + push: + branches: + - develop + - main + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.12.1' + - uses: pre-commit/action@v2.0.0 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e72b932 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +*.pyc +__pycache__/ +.mypy_cache + +# Environments +.env +.venv +env/ +venv/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..f8457b9 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,35 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-json + - id: check-yaml + - id: check-merge-conflict + - id: check-added-large-files + - id: debug-statements + - id: requirements-txt-fixer +- repo: https://github.com/pre-commit/mirrors-isort + rev: 'v5.10.1' + hooks: + - id: isort +- repo: https://github.com/pre-commit/mirrors-mypy + rev: 'v1.9.0' + hooks: + - id: mypy + exclude: alembic +- repo: https://github.com/psf/black + rev: 24.4.0 + hooks: + - id: black + args: [--line-length=79] +- repo: https://github.com/PyCQA/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + exclude: __init__.py +- repo: https://github.com/PyCQA/docformatter + rev: v1.5.0 + hooks: + - id: docformatter diff --git a/README.md b/README.md index 0cb4ac5..fd429c5 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,63 @@ -### WhatsApp Webhook Analytics +### WhatsApp Webhook Analytics -Handling and processing Incoming webhook request configured at Glific. \ No newline at end of file +Handling and processing Incoming webhook request configured at Glific. + +## Installation + +### Prerequisite +1. pyenv +2. python 3.12 + +### Steps +1. Clone the repository + ```sh + git clone https://github.com/DostEducation/whatsapp-webhook-analytics.git + ``` +2. Switch to project folder and setup the vertual environment + ```sh + cd whatsapp-webhook-analytics + python -m venv venv + ``` +3. Activate the virtual environment + + **For Windows** + ```sh + venv\Scripts\Activate.ps1 + ``` + **For Mac** + ```sh + source ./venv/bin/activate + ``` +4. Install the dependencies: + ```sh + pip install -r requirements.txt + ``` +5. Set up your .env file by copying .env.example + ```sh + cp .env.example .env + ``` +6. Add/update variables in your `.env` file for your environment. +7. Run these commands to add environment variables in the system. + + **For Windows** + ```sh + $env:FLASK_APP="manage.py" + $env:PYTHONPATH="" + ``` + **For Mac** + ```sh + export FLASK_APP=manage.py + export PYTHONPATH=path-of-the-project + ``` +8. Upgrade DB to the latest version using this command. + ```sh + flask db upgrade + ``` +9. Run the following command to get started with pre-commit + ```sh + pre-commit install + ``` +10. Start the server by following command + ```sh + functions_framework --target=handle_payload --debug + ``` diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000..dc3a534 --- /dev/null +++ b/api/__init__.py @@ -0,0 +1,6 @@ +from flask import Flask +from flask_sqlalchemy import SQLAlchemy + +app = Flask(__name__) +app.config.from_object("config") +db = SQLAlchemy(app) diff --git a/api/mixins.py b/api/mixins.py new file mode 100644 index 0000000..886c46c --- /dev/null +++ b/api/mixins.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from datetime import datetime + +from api import db + + +class TimestampMixin: + created_on = db.Column(db.DateTime, default=datetime.now) + updated_on = db.Column( + db.DateTime, onupdate=datetime.now, default=datetime.now + ) diff --git a/api/models/__init__.py b/api/models/__init__.py new file mode 100644 index 0000000..a478b05 --- /dev/null +++ b/api/models/__init__.py @@ -0,0 +1,5 @@ +from sqlalchemy.ext.declarative import declarative_base + +Base = declarative_base() + +from .webhook_transaction_log import * diff --git a/api/models/webhook_transaction_log.py b/api/models/webhook_transaction_log.py new file mode 100644 index 0000000..c7a8720 --- /dev/null +++ b/api/models/webhook_transaction_log.py @@ -0,0 +1,11 @@ +from api import db +from api.mixins import TimestampMixin + + +class WebhookTransactionLog(TimestampMixin, db.Model): + + __tablename__ = "webhook_transaction_log" + id = db.Column(db.Integer, primary_key=True) + payload = db.Column(db.Text) + processed = db.Column(db.Boolean, nullable=False) + attempts = db.Column(db.Integer, nullable=False, default="0") diff --git a/api/services/__init__.py b/api/services/__init__.py new file mode 100644 index 0000000..097f248 --- /dev/null +++ b/api/services/__init__.py @@ -0,0 +1 @@ +from .webhook_transaction_log_service import * diff --git a/api/services/webhook_transaction_log_service.py b/api/services/webhook_transaction_log_service.py new file mode 100644 index 0000000..b3cb880 --- /dev/null +++ b/api/services/webhook_transaction_log_service.py @@ -0,0 +1,35 @@ +import json + +from api import models +from api.utils import db_utils +from api.utils.loggingutils import logger + + +class WebhookTransactionLogService: + def create_new_webhook_log(self, jsonData): + try: + data = json.dumps(jsonData) + new_webhook_log = models.WebhookTransactionLog( + payload=data, + processed=False, + attempts=0, + ) + db_utils.save(new_webhook_log) + return new_webhook_log + except Exception as e: + logger.error( + f"Error while creating new webhook log. Webhook: {jsonData}." + f"Error message: {e}" + ) + return None + + def mark_webhook_log_as_processed(self, webhook_log): + try: + webhook_log.processed = True + db_utils.save(webhook_log) + except Exception as e: + logger.error( + f"Error while marking webhook log as processed." + f"Error message: {e}" + ) + return None diff --git a/api/utils/__init__.py b/api/utils/__init__.py new file mode 100644 index 0000000..e9792ab --- /dev/null +++ b/api/utils/__init__.py @@ -0,0 +1,2 @@ +from .db_utils import * +from .loggingutils import * diff --git a/api/utils/db_utils.py b/api/utils/db_utils.py new file mode 100644 index 0000000..45648d2 --- /dev/null +++ b/api/utils/db_utils.py @@ -0,0 +1,17 @@ +import traceback + +from api import db +from api.utils.loggingutils import logger + + +def save(data): + try: + db.session.add(data) + db.session.commit() + except Exception as e: + logger.error( + "Error occurred while committing the data in the database." + f"Error message: {e}" + ) + logger.debug(traceback.format_exc()) + db.session.rollback() diff --git a/api/utils/loggingutils.py b/api/utils/loggingutils.py new file mode 100644 index 0000000..87ba28a --- /dev/null +++ b/api/utils/loggingutils.py @@ -0,0 +1,19 @@ +import logging +import os + +from google.cloud import logging as gcloud_logging + +from api import app +from config import LOGGING_LEVEL + +logger = logging.getLogger() +logging.basicConfig(level=LOGGING_LEVEL) + +if os.environ.get("FLASK_ENV", "development"): + log_handler = logger.handlers[0] + logger.addHandler(log_handler) +else: + log_client = gcloud_logging.Client() + log_client.setup_logging() + log_handler = log_client.get_default_handler() + app.logger.addHandler(log_handler) diff --git a/config.py b/config.py new file mode 100644 index 0000000..ffa94d8 --- /dev/null +++ b/config.py @@ -0,0 +1,35 @@ +"""Flask configuration.""" + +import os + +FLASK_APP = os.environ.get("FLASK_APP", "development") + +if FLASK_APP == "development": + from dotenv import load_dotenv + + load_dotenv() + + +# Database configuration +POSTGRES = { + "user": os.environ.get("DB_USER"), + "password": os.environ.get("DB_PASSWORD"), + "database": os.environ.get("DB_NAME"), + "host": os.environ.get("DB_HOST"), + "port": os.environ.get("DB_PORT"), + "conn_str": os.environ.get("CONNECTION_NAME"), +} + +SQLALCHEMY_DATABASE_URI = ( + "postgresql://%(user)s:%(password)s@%(host)s:%(port)s/%(database)s" + % POSTGRES +) + +# For socket based connection +if FLASK_APP in ("production", "staging"): + SQLALCHEMY_DATABASE_URI = ( + "postgresql://%(user)s:%(password)s@/%(database)s?host=%(conn_str)s/" + % POSTGRES + ) + +LOGGING_LEVEL = os.environ.get("LOGGING_LEVEL", "DEBUG") diff --git a/main.py b/main.py new file mode 100644 index 0000000..897bbdf --- /dev/null +++ b/main.py @@ -0,0 +1,30 @@ +import functions_framework + +from api import app +from api.services import WebhookTransactionLogService +from api.utils.loggingutils import logger + + +# Endpoint for Cloud function +@functions_framework.http +def handle_payload(request): + if request.method == "POST": + with app.app_context(): + try: + jsonData = request.get_json() + if jsonData: + handle_webhook(jsonData) + except Exception as e: + logger.error( + f"Exception while handling the webhook payload: {jsonData}" + f"Error: {e}" + ) + return "Success" + else: + return "Currently, the system does not accept a GET request" + + +def handle_webhook(jsonData): + transaction_log_service = WebhookTransactionLogService() + webhook_log = transaction_log_service.create_new_webhook_log(jsonData) + transaction_log_service.mark_webhook_log_as_processed(webhook_log) diff --git a/manage.py b/manage.py new file mode 100644 index 0000000..cdad6ed --- /dev/null +++ b/manage.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from flask.cli import FlaskGroup +from flask_migrate import Migrate + +from api import app, db + +migrate = Migrate(app, db) +cli = FlaskGroup(app) + +if __name__ == "__main__": + cli() diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..0e04844 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..ec9d45c --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..18e0bb4 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from alembic import context +from flask import current_app + +from api import models + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger("alembic.env") + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions["migrate"].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions["migrate"].db.engine + + +def get_engine_url(): + try: + return ( + get_engine() + .url.render_as_string(hide_password=False) + .replace("%", "%%") + ) + except AttributeError: + return str(get_engine().url).replace("%", "%%") + + +# add your model's MetaData object here +# for 'autogenerate' support + +target_metadata = models.Base.metadata +config.set_main_option("sqlalchemy.url", get_engine_url()) +target_db = current_app.extensions["migrate"].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, "metadatas"): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine and associate a + connection with the context. + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, "autogenerate", False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info("No changes in schema detected.") + + conf_args = current_app.extensions["migrate"].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=get_metadata(), **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/70bea50174bf_initial_migration.py b/migrations/versions/70bea50174bf_initial_migration.py new file mode 100644 index 0000000..992373d --- /dev/null +++ b/migrations/versions/70bea50174bf_initial_migration.py @@ -0,0 +1,36 @@ +"""Initial migration. + +Revision ID: 70bea50174bf +Revises: +Create Date: 2024-04-18 17:18:19.882178 +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "70bea50174bf" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "webhook_transaction_log", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("payload", sa.Text(), nullable=True), + sa.Column("processed", sa.Boolean(), nullable=False), + sa.Column("attempts", sa.Integer(), nullable=False), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("updated_on", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("webhook_transaction_log") + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..a968a88 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,10 @@ +Flask==3.0.3 +Flask-Migrate==4.0.7 +Flask-SQLAlchemy==3.1.1 +functions-framework==3.5.0 +google-cloud-logging==3.10.0 +pre-commit==3.7.0 +psycopg2==2.9.9 +python-dotenv==1.0.1 +sqlalchemy==2.0.29 +Werkzeug==3.0.2