diff --git a/.env.example b/.env.example index f70fa19..5f2f039 100644 --- a/.env.example +++ b/.env.example @@ -1 +1,23 @@ -GITHUB_TOKEN=daolytics_access_token +AUTOMATION_DB_COLLECTION= +AUTOMATION_DB_NAME= +MONGODB_HOST= +MONGODB_PASS= +MONGODB_PORT= +MONGODB_USER= +NEO4J_DB= +NEO4J_HOST= +NEO4J_PASSWORD= +NEO4J_PORT= +NEO4J_PROTOCOL= +NEO4J_USER= +RABBIT_HOST= +RABBIT_PASSWORD= +RABBIT_PORT= +RABBIT_USER= +REDIS_HOST= +REDIS_PASSWORD= +REDIS_PORT= +SAGA_DB_COLLECTION= +SAGA_DB_NAME= +SENTRY_DSN= +SENTRY_ENV= diff --git a/Dockerfile b/Dockerfile index 701dce2..3f02042 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,15 +2,11 @@ FROM python:3.10-bullseye AS base WORKDIR /project COPY . . -ARG GITHUB_TOKEN RUN pip3 install -r requirements.txt FROM base AS test RUN chmod +x docker-entrypoint.sh CMD ["./docker-entrypoint.sh"] -FROM base AS prod-server -CMD ["python3", "start_rabbit_mq.py"] - -FROM base as prod-worker -CMD ["python3", "redis_worker.py"] \ No newline at end of file +FROM base AS prod +CMD ["python3", "server.py"] \ No newline at end of file diff --git a/automation/__init__.py b/automation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/automation/automation_workflow.py b/automation/automation_workflow.py new file mode 100644 index 0000000..98ae719 --- /dev/null +++ b/automation/automation_workflow.py @@ -0,0 +1,226 @@ +import logging +from typing import Any + +from automation.utils.automation_base import AutomationBase +from automation.utils.model import AutomationDB +from pybars import Compiler +from tc_messageBroker.rabbit_mq.event import Event +from tc_messageBroker.rabbit_mq.queue import Queue + + +class AutomationWorkflow(AutomationBase): + def __init__(self) -> None: + super().__init__() + self.automation_db = AutomationDB() + + def start(self, guild_id: str): + """ + start the automation workflow for a guild + + Parameters + ----------- + guild_id : str + to select the right automation + """ + log_prefix = f"GUILDID: {guild_id}: " + automations = self.automation_db.load_from_db(guild_id) + if len(automations) == 0: + logging.info(f"{log_prefix}No automation available for this guild!") + else: + msg = f"{log_prefix}Starting automation!" + logging.info(f"{msg} number of automation fetched: {len(automations)}") + + for at in automations: + at_pre = f"{log_prefix}: automation id: {at.id}: " + if at.enabled: + members_by_category: dict[str, list[dict[str, str]]] = {} + for trigger in at.triggers: + if trigger.enabled: + category: str | None = trigger.options["category"] + if category is None or not isinstance(category, str): + logging.error( + f"{at_pre}No category specified for one of the triggers!" + ) + logging.error(f"{at_pre}Skipping the trigger!") + break + + members_by_category[category] = [] + + users1, users2 = self._get_users_from_memberactivities( + guild_id, category + ) + users = self._subtract_users(users1, users2) + + for action in at.actions: + if action.enabled: + type = self._get_handlebar_type(action.template) + prepared_id_name: list[tuple[str, str]] + if type is None: + logging.warning( + f"{at_pre}No type specified in the action template!" + ) + logging.warning( + f"{at_pre}Sending raw action.template to users!" + ) + # adding a dummy variable for user_name + prepared_id_name = list(zip(users, users)) + else: + prepared_id_name = self.prepare_names( + guild_id, list(users), user_field=type + ) + + for user_id, user_name in prepared_id_name: + compiled_message: str + if type is not None: + compiled_message = self._compile_message( + data={type: user_name}, + message=action.template, + ) + else: + compiled_message = action.template + + data = self._prepare_saga_data( + guild_id, user_id, compiled_message + ) + saga_id = self._create_manual_saga(data) + logging.info( + f"{at_pre}Started to fire events for user {user_id}!" + ) + # firing the event + self.fire_event(saga_id, data) + + members_by_category[category].append( + {"user_id": user_id, "user_name": user_name} + ) + + if at.report.enabled: + # setting up the names to send message + # to avoid duplicate we used dictionary + report_users: dict[str, str] = {} + for member in members_by_category[category]: # type: ignore + if member["user_name"] is not None: + report_users[member["user_id"]] = member["user_name"] + else: + # in case of no user info was available just send the ids + report_users[member["user_id"]] = member["user_id"] + + compiled_message = self._prepare_report_compiled_message( + list(report_users.values()), at.report.template + ) + + for recipent in at.report.recipientIds: + data = self._prepare_saga_data( + guild_id, recipent, compiled_message + ) + saga_id = self._create_manual_saga(data) + + # firing the event + self.fire_event(saga_id, data) + + def _prepare_report_compiled_message( + self, user_names: list[str], template: str + ) -> str: + """ + prepare the message for the report + + Note: we're just hardcoding the template message with having the template of + each user being in one line. and we just support the `usernames` + """ + # hardcoding the template type for report! + # we have to change it in future to support more types. + type = "usernames" + users_prepared = [f"- {user}\n" for user in user_names] + + compiled_message = self._compile_message( + data={type: users_prepared}, message=template # type: ignore + ) + + return compiled_message + + def _get_handlebar_type(self, template: str) -> str | None: + """ + get the handlebar type. + for example the template would be + "hello {{username}}!" + and the output would be `username` + + Note: for now it just support returning one handlebar. + + Parameters + ------------ + template : str + the template message to extract the type + + Returns + --------- + type : str + the handlebar type to use + """ + start_index = template.find("{{") + 2 + end_index = template.find("}}") + if start_index == -1 or end_index == -1: + return None + return template[start_index:end_index] + + def _compile_message(self, data: dict[str, str], message: str) -> str: + """ + compile the message to be sent to the user + + Parameters + ----------- + data : dict[str, str] + the dictionary to be compiled for the handlebars + message : str + the string message that contain the handlebars + """ + compiler = Compiler() + template = compiler.compile(message) + compiled_message = template(data) + + return compiled_message + + def _prepare_saga_data( + self, guild_id: str, user_id: str, message: str + ) -> dict[str, Any]: + """ + prepare the data needed for the saga + + Parameters: + ------------ + guild_id : str + the guild_id having the user + user_id : str + the user_id to send message + message : str + the message to send the user + """ + data = { + "guildId": guild_id, + "created": False, + "discordId": user_id, + "message": message, + "userFallback": True, + } + + return data + + def fire_event(self, saga_id: str, data: dict[str, Any]) -> None: + """ + fire the event `SEND_MESSAGE` to the user of a guild + + Parameters: + ------------ + saga_id : str + the saga_id having of the event + data : str + the data to fire + """ + + self.rabbitmq.publish( + queue_name=Queue.DISCORD_BOT, + event=Event.DISCORD_BOT.SEND_MESSAGE, + content={ + "uuid": saga_id, + "data": data, + }, + ) diff --git a/automation/utils/__init__.py b/automation/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/automation/utils/automation_base.py b/automation/utils/automation_base.py new file mode 100644 index 0000000..41e28f3 --- /dev/null +++ b/automation/utils/automation_base.py @@ -0,0 +1,237 @@ +from datetime import datetime, timedelta, timezone +from typing import Any +from uuid import uuid1 + +from utils.get_mongo_client import MongoSingleton +from utils.get_rabbitmq import prepare_rabbit_mq + + +class AutomationBase: + def __init__(self) -> None: + """ + utilities for automation workflow + """ + mongo_singleton = MongoSingleton.get_instance() + self.mongo_client = mongo_singleton.get_client() + self.rabbitmq = prepare_rabbit_mq() + + def _get_users_from_guildmembers( + self, guild_id: str, user_ids: list[str], strategy: str = "ngu" + ) -> list[dict[str, str | None]]: + """ + get the name of the users based on a strategy + - `n`: nickname + - `g`: global name + - `u`: username + + Parameters + ------------- + guild_id : str + the guild_id to find users from + user_ids : list[str] + a list of user id to get the data + strategy : str + what fields of the user to select from + can be either one of the `n`, `g`, `u` or any combination of them + + Returns + ---------- + users_data : list[dict[str, str | None]] + a dictionary of users with ngu names to use + """ + user_fields = {"discordId": 1} + for field in strategy: + if field == "n": + user_fields["nickname"] = 1 + elif field == "g": + user_fields["globalName"] = 1 + elif field == "u": + user_fields["username"] = 1 + else: + msg = "Wrong strategy given!" + msg += "should be either on of the `n`, `g`, or `u`!" + raise ValueError(msg) + + user_fields["_id"] = 0 + + curosr = self.mongo_client[guild_id]["guildmembers"].find( + {"discordId": {"$in": user_ids}}, + user_fields, + ) + + users_data = list(curosr) + return users_data + + def _get_users_from_memberactivities( + self, guild_id: str, category: str + ) -> tuple[list[str], list[str]]: + """ + get the users of memberactivities within a specific memberactivities + the users from previous day and previous two days + + Parameters: + ------------- + guild_id : str + the guild id to get people's id + category : str + the category of memberactivities + + Returns: + ---------- + users1: list[str] + the users for yesterday + users2: list[str] + the users from past two days + """ + projection = {category: 1, "date": 1, "_id": 0} + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + users = ( + self.mongo_client[guild_id]["memberactivities"] + .find( + { + "$or": [ + {"date": date_yesterday}, + {"date": date_two_past_days}, + ] + }, + projection, + ) + .limit(2) + ) + + users1: list[str] = [] + users2: list[str] = [] + for users_data in users: + if users_data["date"] == date_yesterday: + users1 = users_data[category] + else: + users2 = users_data[category] + + return users1, users2 + + def _subtract_users(self, users1: list[str], users2: list[str]) -> set[str]: + """ + subtract two list of users + + Parameters: + ------------ + users1: list[str] + a list of user ids + users2: list[str] + a list of user ids for another day + + Returns: + --------- + results: set[str] + a set of users subtracting users1 from users2 + """ + results = set(users1) - set(users2) + + return results + + def _create_manual_saga(self, data: dict[str, Any]) -> str: + """ + manually create a saga for the discord-bot to be able to work. + NOTE: THIS FUNCTION IS FOR MVP AND IN FUTURE WE HAVE TO ADD A NEW SAGA + + Parameters: + ------------ + data : dict[str, Any] + the data we want to have on the saga + + Returns: + --------- + saga_id : str + the id of created saga + """ + + saga_id = str(uuid1()) + self.mongo_client["Saga"]["sagas"].insert_one( + { + "choreography": { + "name": "DISCORD_NOTIFY_USERS", + "transactions": [ + { + "queue": "DISCORD_BOT", + "event": "SEND_MESSAGE", + "order": 1, + "status": "NOT_STARTED", + } + ], + }, + "status": "IN_PROGRESS", + "data": data, + "sagaId": saga_id, + "createdAt": datetime.now(timezone.utc), + "updatedAt": datetime.now(timezone.utc), + } + ) + + return saga_id + + def prepare_names( + self, guild_id: str, user_ids: list[str], user_field: str = "username" + ) -> list[tuple[str, str]]: + """ + prepare the name to use in message + just use the usernames + + Parameters + ------------ + guild_id : str + the guild to access their data + user_ids : list[str] + a list of user ids to prepare their name + user_field : str + the field to choose from the user + can be either one of below + - `username` + - `nickname` + - `globalName` + - `ngu` -> is the combination of above + - default is `username` + + Returns: + -------- + prepared_id_name : list[tuple[str, str]] + a prepared id and the names of users to use + the reason we're returning the id again is we want to + have the right alignment of id and name + """ + # strategy selection + fields: str + if user_field == "ngu": + fields = "ngu" + else: + fields = user_field[0] # choose the first character + + users_data = self._get_users_from_guildmembers( + guild_id=guild_id, user_ids=user_ids, strategy=fields + ) + + prepared_id_name: list[tuple[str, str]] = [] + + if user_field == "ngu": + for user in users_data: + if user["nickname"] is not None: + prepared_id_name.append((user["discordId"], user["nickname"])) # type: ignore + elif user["globalName"] is not None: + prepared_id_name.append((user["discordId"], user["globalName"])) # type: ignore + else: + # this would never be None + prepared_id_name.append((user["discordId"], user["username"])) # type: ignore + else: + for user in users_data: + prepared_id_name.append((user["discordId"], user[user_field])) # type: ignore + + return prepared_id_name diff --git a/automation/utils/interfaces.py b/automation/utils/interfaces.py new file mode 100644 index 0000000..4cb3529 --- /dev/null +++ b/automation/utils/interfaces.py @@ -0,0 +1,78 @@ +from datetime import datetime +from uuid import uuid1 + + +class AutomationTrigger: + def __init__(self, options: dict, enabled: bool): + # self.options = {"category": category} + self.options = options + self.enabled = enabled + + +class AutomationAction: + def __init__(self, template: str, options: dict, enabled: bool): + self.template = template + self.options = options + self.enabled = enabled + + +class AutomationReport: + def __init__( + self, recipientIds: list[str], template: str, options: dict, enabled: bool + ): + self.recipientIds = recipientIds + self.template = template + self.options = options + self.enabled = enabled + + +class Automation: + def __init__( + self, + guild_id: str, + triggers: list[AutomationTrigger], + actions: list[AutomationAction], + report: AutomationReport, + enabled: bool, + createdAt: datetime, + updatedAt: datetime, + id: str = str(uuid1()), + ): + self.id = id + self.guild_id = guild_id + self.triggers = triggers + self.actions = actions + self.report = report + self.enabled = enabled + self.createdAt = createdAt + self.updatedAt = updatedAt + + @classmethod + def from_dict(cls, data: dict): + triggers = [AutomationTrigger(**trigger) for trigger in data["triggers"]] + actions = [AutomationAction(**trigger) for trigger in data["actions"]] + report = AutomationReport(**data["report"]) + created_at = data["createdAt"] + updated_at = data["updatedAt"] + return cls( + data["guildId"], + triggers, + actions, + report, + data["enabled"], + created_at, + updated_at, + data["id"], + ) + + def to_dict(self): + return { + "id": self.id, + "guildId": self.guild_id, + "triggers": [trigger.__dict__ for trigger in self.triggers], + "actions": [action.__dict__ for action in self.actions], + "report": self.report.__dict__, + "enabled": self.enabled, + "createdAt": self.createdAt, + "updatedAt": self.updatedAt, + } diff --git a/automation/utils/model.py b/automation/utils/model.py new file mode 100644 index 0000000..4456bc4 --- /dev/null +++ b/automation/utils/model.py @@ -0,0 +1,61 @@ +from utils.get_automation_env import get_automations_env +from utils.get_mongo_client import MongoSingleton + +from .interfaces import Automation + + +class AutomationDB: + def __init__(self): + """ + the automation db instance + """ + instance = MongoSingleton.get_instance() + self.client = instance.get_client() + at_env_vars = get_automations_env() + self.db_name = at_env_vars["DB_NAME"] + self.collection_name = at_env_vars["COLLECTION_NAME"] + + def load_from_db(self, guild_id: str) -> list[Automation]: + """ + load all the automation from database for a guild based on guild_id + + Returns + --------- + guild_automations : list[Automation] + a list of automation related for one guild + """ + + cursor = self.client[self.db_name][self.collection_name].find( + {"guildId": guild_id} + ) + + automations = list(cursor) + + guild_automations = [] + for at in automations: + guild_at = Automation.from_dict(at) + guild_automations.append(guild_at) + + return guild_automations + + def save_to_db(self, automation: Automation | dict) -> None: + """ + save one automation into the database + + Parameters + ------------ + automation : Automation | dict + an automation to insert into a guild + in case of Automation instance, + at first we would convert it to a dictionary + """ + if isinstance(automation, dict): + self.client[self.db_name][self.collection_name].insert_one(automation) + elif isinstance(automation, Automation): + self.client[self.db_name][self.collection_name].insert_one( + automation.to_dict() + ) + else: + msg = "Not supported the type of entered object!," + msg += f"given type is: {type(automation)}" + raise TypeError(msg) diff --git a/discord_analyzer/analysis/neo4j_analysis/louvain.py b/discord_analyzer/analysis/neo4j_analysis/louvain.py new file mode 100644 index 0000000..82eb060 --- /dev/null +++ b/discord_analyzer/analysis/neo4j_analysis/louvain.py @@ -0,0 +1,151 @@ +import logging +from uuid import uuid1 + +from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils +from tc_neo4j_lib.neo4j_ops import Neo4jOps + + +class Louvain: + def __init__(self, neo4j_ops: Neo4jOps) -> None: + """ + louvain algorithm wrapper to compute + """ + self.neo4j_ops = neo4j_ops + + def compute(self, guild_id: str, from_start: bool = False) -> None: + """ + compute the louvain modularity score for a guild + + Parameters + ------------ + guild_id : str + the guild_id to compute the the algorithm for + from_start : bool + whether to compute the metric from the first day or not + if True, then would compute from start + default is False + """ + projection_utils = ProjectionUtils(gds=self.neo4j_ops.gds, guildId=guild_id) + + computable_dates = projection_utils.get_dates(guildId=guild_id) + + # compute for each date + to_compute: set[float] + if from_start: + to_compute = computable_dates + else: + computed_dates = self.get_computed_dates(projection_utils, guild_id) + to_compute = computable_dates - computed_dates + + for date in to_compute: + try: + self.louvain_computation_wrapper(projection_utils, guild_id, date) + except Exception as exp: + msg = f"GUILDID: {guild_id} " + logging.error( + f"{msg}Louvain Modularity computation for date: {date}, exp: {exp}" + ) + + def louvain_computation_wrapper( + self, projection_utils: ProjectionUtils, guild_id: str, date: float + ) -> None: + """ + a wrapper for louvain modularity computation process + we're doing the projection here and computing on that, + then we'll drop the pojection + + Parameters: + ------------ + projection_utils : ProjectionUtils + the utils needed to get the work done + guild_id : str + the guild we want the temp relationships + between its members + date : float + timestamp of the relation + """ + graph_projected_name = f"GraphLouvain_{uuid1()}" + projection_utils.project_temp_graph( + guildId=guild_id, + graph_name=graph_projected_name, + weighted=True, + date=date, + relation_direction="NATURAL", + ) + + # get the results as pandas dataframe + self.compute_graph_louvain( + date=date, graph_name=graph_projected_name, guild_id=guild_id + ) + + # dropping the computed date + _ = self.neo4j_ops.gds.run_cypher( + f""" + CALL gds.graph.drop("{graph_projected_name}") + """ + ) + + def get_computed_dates( + self, projection_utils: ProjectionUtils, guildId: str + ) -> set[float]: + """ + get localClusteringCoeff computed dates + + Parameters: + ------------ + guildId : str + the guild we want the temp relationships + between its members + projection_utils : ProjectionUtils + the utils needed to get the work done + + Returns: + ---------- + computed_dates : set[float] + the computation dates + """ + # getting the dates computed before + query = f""" + MATCH (g:Guild {{guildId: '{guildId}'}}) + -[r:HAVE_METRICS]->(g) + WHERE r.louvainModularityScore IS NOT NULL + RETURN r.date as computed_dates + """ + computed_dates = projection_utils.get_computed_dates(query) + + return computed_dates + + def compute_graph_louvain( + self, date: float, graph_name: str, guild_id: str + ) -> None: + """ + compute louvain algorithm for the projected graph and + save the results back into db + + Parameters: + ------------ + date : float + timestamp of the relation + graph_name : str + the operation would be done on the graph + guild_id : str + the guild_id to save the data for it + """ + msg = f"GUILDID: {guild_id}" + try: + _ = self.neo4j_ops.gds.run_cypher( + f""" + CALL gds.louvain.stats("{graph_name}") + YIELD modularity + WITH modularity + MATCH (g:Guild {{guildId: '{guild_id}'}}) + MERGE (g) -[r:HAVE_METRICS {{ + date: {date} + }}]-> (g) + SET r.louvainModularityScore = modularity + """ + ) + except Exception as exp: + logging.error( + f"{msg} Error in computing louvain modularity algorithm, {exp}" + ) diff --git a/discord_analyzer/analyzer/neo4j_analytics.py b/discord_analyzer/analyzer/neo4j_analytics.py index a0b8d48..3672dae 100644 --- a/discord_analyzer/analyzer/neo4j_analytics.py +++ b/discord_analyzer/analyzer/neo4j_analytics.py @@ -6,6 +6,7 @@ from discord_analyzer.analysis.neo4j_analysis.local_clustering_coefficient import ( LocalClusteringCoeff, ) +from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain from tc_neo4j_lib.neo4j_ops import Neo4jOps @@ -30,9 +31,11 @@ def compute_metrics(self, guildId: str, from_start: bool) -> None: Note: only some metrics support this others would be computed from_start=True """ - if from_start: - self._remove_analytics_interacted_in(guildId) + # we don't need this, as the data will be replaced after + # if from_start: + # self._remove_analytics_interacted_in(guildId) + self.compute_louvain_algorithm(guildId, from_start) self.compute_local_clustering_coefficient(guildId, from_start) self.compute_network_decentrality(guildId, from_start) self.compute_node_stats(guildId, from_start) @@ -155,3 +158,18 @@ def _remove_analytics_interacted_in(self, guildId: str) -> None: DELETE r """ session.run(query=query, guildId=guildId) + + def compute_louvain_algorithm(self, guild_id: str, from_start: bool) -> None: + """ + compute the louvain algorithm and save the results within the db + + Parameters + ------------ + guild_id : str + the guild string that the algorithm would be computed on + from_start : bool + compute from the start of the data available or continue the previous + """ + louvain = Louvain(self.neo4j_ops) + + louvain.compute(guild_id, from_start) diff --git a/discord_analyzer/models/RawInfoModel.py b/discord_analyzer/models/RawInfoModel.py index 9deda62..ed29447 100644 --- a/discord_analyzer/models/RawInfoModel.py +++ b/discord_analyzer/models/RawInfoModel.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import logging -from datetime import timedelta +from datetime import datetime, timedelta +from typing import Any import pymongo from discord_analyzer.models.BaseModel import BaseModel @@ -69,7 +70,7 @@ def get_first_date(self): print(f"{self.guild_msg} No documents found in the collection") return None - def get_day_entries(self, day, msg=""): + def get_day_entries(self, day: datetime, msg: str = "") -> list[dict[str, Any]]: """ Gets the list of entries for the stated day This is RawInfo specific method @@ -85,12 +86,12 @@ def get_day_entries(self, day, msg=""): logg_msg += f" {self.collection_name}: {start_day} -> {end_day}" logging.info(logg_msg) - # date_str = day.strftime("%Y-%m-%d") - - # entries = self.database[self.collection_name].find( - # {"datetime": {"$regex": "^" + date_str}} - # ) entries = self.database[self.collection_name].find( - {"createdDate": {"$gte": start_day, "$lte": end_day}} + { + "$and": [ + {"createdDate": {"$gte": start_day, "$lte": end_day}}, + {"isGeneratedByWebhook": False}, + ] + } ) return list(entries) diff --git a/discord_utils.py b/discord_utils.py index 9b3721e..dbe7b28 100644 --- a/discord_utils.py +++ b/discord_utils.py @@ -2,6 +2,7 @@ from typing import Any from analyzer_init import AnalyzerInit +from automation.automation_workflow import AutomationWorkflow from tc_messageBroker.rabbit_mq.saga.saga_base import get_saga from utils.get_rabbitmq import prepare_rabbit_mq from utils.transactions_ordering import sort_transactions @@ -81,7 +82,9 @@ def get_saga_instance(sagaId: str, connection: str, saga_db: str, saga_collectio def publish_on_success(connection, result, *args, **kwargs): # we must get these three things try: - rabbit_creds = args[0][0] + # rabbitmq creds + # TODO: remove sending it in future + _ = args[0][0] sagaId = args[0][1] mongo_creds = args[0][2] logging.info(f"SAGAID: {sagaId}: ON_SUCCESS callback! ") @@ -92,17 +95,18 @@ def publish_on_success(connection, result, *args, **kwargs): saga_db=mongo_creds["db_name"], saga_collection=mongo_creds["collection_name"], ) - rabbitmq = prepare_rabbit_mq(rabbit_creds) + rabbitmq = prepare_rabbit_mq() transactions = saga.choreography.transactions (transactions_ordered, tx_not_started_count) = sort_transactions(transactions) + guildId = saga.data["guildId"] + msg = f"GUILDID: {guildId}: " if tx_not_started_count != 0: - guildId = saga.data["guildId"] tx = transactions_ordered[0] - logging.info(f"GUILDID: {guildId}: Publishing for {tx.queue}") + logging.info(f"{msg}Publishing for {tx.queue}") rabbitmq.connect(tx.queue) rabbitmq.publish( @@ -110,5 +114,9 @@ def publish_on_success(connection, result, *args, **kwargs): event=tx.event, content={"uuid": sagaId, "data": saga.data}, ) + + automation_workflow = AutomationWorkflow() + automation_workflow.start(guild_id=guildId) + except Exception as exp: logging.info(f"Exception occured in job on_success callback: {exp}") diff --git a/docker-compose.example.yml b/docker-compose.example.yml index db8a3de..e42b039 100644 --- a/docker-compose.example.yml +++ b/docker-compose.example.yml @@ -6,7 +6,7 @@ services: context: . target: prod dockerfile: Dockerfile - command: python3 start_rabbit_mq.py + command: python3 server.py worker: build: context: . diff --git a/docker-compose.test.yml b/docker-compose.test.yml index f796239..504399e 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -26,7 +26,9 @@ services: - NEO4J_PASSWORD=password - NEO4J_DB=neo4j - SAGA_DB_NAME=Saga - - SAGA_DB_COLLECTION=saga + - SAGA_DB_COLLECTION=sagas + - AUTOMATION_DB_NAME=Automation + - AUTOMATION_DB_COLLECTION=automations - SENTRY_DSN=sample_dsn - SENTRY_ENV=local volumes: diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index b31af7c..c10c66b 100644 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash python3 -m coverage run --omit=tests/* -m pytest tests -python3 -m coverage lcov -o coverage/lcov.info \ No newline at end of file +python3 -m coverage lcov -i -o coverage/lcov.info \ No newline at end of file diff --git a/redis_worker.py b/redis_worker.py deleted file mode 100644 index 01fae69..0000000 --- a/redis_worker.py +++ /dev/null @@ -1,34 +0,0 @@ -import logging - -import redis -from rq import Worker -from utils.daolytics_uitls import get_redis_credentials - - -def worker_exception_handler(job, exc_type, exc_value, traceback): - logging.error(" ========= RQ Exception =========") - logging.error(f"JOB: {job}") - logging.error(f"exc_type: {exc_type}") - logging.error(f"exc_value: {exc_value}") - logging.error(f"traceback: {traceback}") - - -if __name__ == "__main__": - redis_creds = get_redis_credentials() - - logging.basicConfig() - logging.getLogger().setLevel(logging.INFO) - - host = redis_creds["host"] - port = redis_creds["port"] - password = redis_creds["pass"] - - r = redis.Redis(host=host, port=port, password=password) - worker = Worker( - queues=["default"], connection=r, exception_handlers=worker_exception_handler - ) - try: - worker.work(with_scheduler=True, max_jobs=1) - except KeyboardInterrupt: - worker.clean_registries() - worker.stop_scheduler() diff --git a/requirements.txt b/requirements.txt index 1929f8b..09a2a38 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,5 +22,6 @@ tc-messageBroker==1.4.0 sentry-sdk rq redis -tc-core-analyzer-lib==1.0.1 +tc-core-analyzer-lib==1.0.2 tc-neo4j-lib==1.0.0 +pybars3 diff --git a/start_rabbit_mq.py b/server.py similarity index 100% rename from start_rabbit_mq.py rename to server.py diff --git a/tests/integration/test_all_active_period.py b/tests/integration/test_all_active_period.py index 0f400eb..9b016f0 100644 --- a/tests/integration/test_all_active_period.py +++ b/tests/integration/test_all_active_period.py @@ -45,6 +45,7 @@ def test_two_weeks_period_active_members(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) @@ -64,6 +65,7 @@ def test_two_weeks_period_active_members(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) @@ -83,6 +85,7 @@ def test_two_weeks_period_active_members(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) @@ -102,6 +105,7 @@ def test_two_weeks_period_active_members(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_all_joined_day_members.py b/tests/integration/test_all_joined_day_members.py index 5fe7927..78728a5 100644 --- a/tests/integration/test_all_joined_day_members.py +++ b/tests/integration/test_all_joined_day_members.py @@ -55,6 +55,7 @@ def test_all_joined_day_members(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_init.py b/tests/integration/test_analyzer_init.py index ce09b1f..7fd7899 100644 --- a/tests/integration/test_analyzer_init.py +++ b/tests/integration/test_analyzer_init.py @@ -77,6 +77,7 @@ def test_analyzer_init(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py b/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py index 3475de8..2d9a6d4 100644 --- a/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_1year_recompute_available_analytics.py @@ -67,6 +67,7 @@ def test_analyzer_one_year_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py index 5b57020..0842d51 100644 --- a/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_1year_recompute_empty_analytics.py @@ -47,6 +47,7 @@ def test_analyzer_one_year_period_recompute_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py b/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py index ec7de27..949fdf7 100644 --- a/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_1year_run_once_available_analytics.py @@ -67,6 +67,7 @@ def test_analyzer_one_year_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py index e3b8de1..1a4ce15 100644 --- a/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_1year_run_once_empty_analytics.py @@ -47,6 +47,7 @@ def test_analyzer_one_year_period_run_once_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py index 5bb228c..35dfba6 100644 --- a/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_3month_recompute_available_analytics.py @@ -67,6 +67,7 @@ def test_analyzer_three_month_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py index 427c971..dd75b2d 100644 --- a/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_3month_recompute_empty_analytics.py @@ -47,6 +47,7 @@ def test_analyzer_three_month_period_recompute_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py index 3383018..9504978 100644 --- a/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_3month_run_once_available_analytics.py @@ -67,6 +67,7 @@ def test_analyzer_three_month_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py index 053dfbe..93d6ae2 100644 --- a/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_3month_run_once_empty_analytics.py @@ -47,6 +47,7 @@ def test_analyzer_three_month_period_run_once_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py index cf12e33..9d3a531 100644 --- a/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_6month_recompute_available_analytics.py @@ -67,6 +67,7 @@ def test_analyzer_six_month_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py index 47657b8..3bc0ebf 100644 --- a/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_6month_recompute_empty_analytics.py @@ -47,6 +47,7 @@ def test_analyzer_six_month_period_recompute_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py index 377dedd..4174abf 100644 --- a/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_6month_run_once_available_analytics.py @@ -67,6 +67,7 @@ def test_analyzer_six_month_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py index 5e4e726..df451a0 100644 --- a/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_6month_run_once_empty_analytics.py @@ -47,6 +47,7 @@ def test_analyzer_six_month_period_run_once_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_month_recompute_available_analytics.py b/tests/integration/test_analyzer_period_month_recompute_available_analytics.py index 689fdad..b44039d 100644 --- a/tests/integration/test_analyzer_period_month_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_month_recompute_available_analytics.py @@ -66,6 +66,7 @@ def test_analyzer_month_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py index a6ccc31..8fee78c 100644 --- a/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_month_recompute_empty_analytics.py @@ -46,6 +46,7 @@ def test_analyzer_month_period_recompute_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_month_run_once_available_analytics.py b/tests/integration/test_analyzer_period_month_run_once_available_analytics.py index a6403ab..36f07fe 100644 --- a/tests/integration/test_analyzer_period_month_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_month_run_once_available_analytics.py @@ -66,6 +66,7 @@ def test_analyzer_month_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py index e0ade20..1510889 100644 --- a/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_month_run_once_empty_analytics.py @@ -46,6 +46,7 @@ def test_analyzer_month_period_run_once_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_week_recompute_available_analytics.py b/tests/integration/test_analyzer_period_week_recompute_available_analytics.py index ab29b80..ad4a412 100644 --- a/tests/integration/test_analyzer_period_week_recompute_available_analytics.py +++ b/tests/integration/test_analyzer_period_week_recompute_available_analytics.py @@ -62,6 +62,7 @@ def test_analyzer_week_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py b/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py index 157d042..b1b55ab 100644 --- a/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py +++ b/tests/integration/test_analyzer_period_week_recompute_empty_analytics.py @@ -46,6 +46,7 @@ def test_analyzer_week_period_recompute_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_week_run_once_available_analytics.py b/tests/integration/test_analyzer_period_week_run_once_available_analytics.py index 29e1346..c6814ed 100644 --- a/tests/integration/test_analyzer_period_week_run_once_available_analytics.py +++ b/tests/integration/test_analyzer_period_week_run_once_available_analytics.py @@ -61,6 +61,7 @@ def test_analyzer_week_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py b/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py index 4ca0941..81f791f 100644 --- a/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py +++ b/tests/integration/test_analyzer_period_week_run_once_empty_analytics.py @@ -45,6 +45,7 @@ def test_analyzer_week_period_run_once_empty_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_automation_base_create_manual_saga.py b/tests/integration/test_automation_base_create_manual_saga.py new file mode 100644 index 0000000..72f1c6b --- /dev/null +++ b/tests/integration/test_automation_base_create_manual_saga.py @@ -0,0 +1,44 @@ +import unittest + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +class TestManualSagaCreation(unittest.TestCase): + def test_create_manual_saga(self): + guild_id = "1234" + + db_access = launch_db_access(guild_id) + db_access.db_mongo_client["Saga"].drop_collection("sagas") + automation_base = AutomationBase() + + data = { + "guildId": guild_id, + "created": False, + "discordId": "user1", + "message": "This message is sent you for notifications!", + "userFallback": True, + } + + saga_id = automation_base._create_manual_saga(data) + + manual_saga = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"sagaId": saga_id} + ) + + self.assertEqual(manual_saga["choreography"]["name"], "DISCORD_NOTIFY_USERS") + self.assertEqual( + manual_saga["choreography"]["transactions"], + [ + { + "queue": "DISCORD_BOT", + "event": "SEND_MESSAGE", + "order": 1, + "status": "NOT_STARTED", + } + ], + ) + self.assertEqual(manual_saga["status"], "IN_PROGRESS") + self.assertEqual(manual_saga["data"], data) + self.assertEqual(manual_saga["sagaId"], saga_id) diff --git a/tests/integration/test_automation_base_get_users_from_memberactivities_empty_database.py b/tests/integration/test_automation_base_get_users_from_memberactivities_empty_database.py new file mode 100644 index 0000000..d68d294 --- /dev/null +++ b/tests/integration/test_automation_base_get_users_from_memberactivities_empty_database.py @@ -0,0 +1,205 @@ +from datetime import datetime, timedelta + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +def test_automation_base_get_users_no_data_new_disengaged(): + """ + try to get the users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_disengaged" + ) + + assert users1 == [] + assert users2 == [] + + +def test_automation_base_get_users_no_data_new_active(): + """ + try to get the users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_active" + ) + + assert users1 == [] + assert users2 == [] + + +def test_automation_base_get_users_empty_new_disengaged(): + """ + get empty users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + db_access.db_mongo_client[guildId]["memberactivities"].delete_many({}) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guildId]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": [], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": [], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_disengaged" + ) + + assert users1 == [] + assert users2 == [] + + +def test_automation_base_get_users_empty_new_active(): + """ + get empty users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + db_access.db_mongo_client[guildId]["memberactivities"].delete_many({}) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guildId]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": ["user5", "user8"], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": [], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["user1", "user2"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_active" + ) + + assert users1 == [] + assert users2 == [] diff --git a/tests/integration/test_automation_base_get_users_from_memberactivities_filled_database.py b/tests/integration/test_automation_base_get_users_from_memberactivities_filled_database.py new file mode 100644 index 0000000..6ace0da --- /dev/null +++ b/tests/integration/test_automation_base_get_users_from_memberactivities_filled_database.py @@ -0,0 +1,249 @@ +from datetime import datetime, timedelta + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +def test_automation_base_get_users_one_user_new_disengaged(): + """ + get empty users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guildId]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["user1"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": [], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_disengaged" + ) + + assert users1 == ["user1"] + assert users2 == [] + + +def test_automation_base_get_users_multiple_user_new_disengaged(): + """ + get empty users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guildId]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["user1", "user2"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["user3", "user6", "user9"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_disengaged" + ) + + assert users1 == ["user1", "user2"] + assert users2 == ["user3", "user6", "user9"] + + +def test_automation_base_get_users_multiple_user_new_active(): + """ + get empty users in case of no data available + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("memberactivities") + db_access.db_mongo_client[guildId]["memberactivities"].delete_many({}) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guildId]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": ["user10"], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["user1", "user2"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": ["user5", "user8", "user12"], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["user3", "user6", "user9"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": ["user10"], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_base = AutomationBase() + users1, users2 = automation_base._get_users_from_memberactivities( + guildId, category="all_new_active" + ) + + assert users1 == ["user5", "user8", "user12"] + assert users2 == ["user10"] diff --git a/tests/integration/test_automation_base_prepare_names_globalname.py b/tests/integration/test_automation_base_prepare_names_globalname.py new file mode 100644 index 0000000..d0d3831 --- /dev/null +++ b/tests/integration/test_automation_base_prepare_names_globalname.py @@ -0,0 +1,103 @@ +from datetime import datetime, timedelta + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +def test_prepare_ngu_some_data_globalname_strategy(): + """ + test the preparation module in case of some data available + the output should be have the names of the field `globalName` + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("guildmembers") + + db_access.db_mongo_client[guildId]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + automation_base = AutomationBase() + id_names = automation_base.prepare_names( + guild_id=guildId, + user_ids=["1111", "1112", "1113", "1116"], + user_field="globalName", + ) + + assert id_names == [ + ( + "1111", + "User1GlobalName", + ), + ("1112", "User2GlobalName"), + ("1113", None), + ("1116", "User6GlobalName"), + ] diff --git a/tests/integration/test_automation_base_prepare_names_ngu.py b/tests/integration/test_automation_base_prepare_names_ngu.py new file mode 100644 index 0000000..e73af3a --- /dev/null +++ b/tests/integration/test_automation_base_prepare_names_ngu.py @@ -0,0 +1,117 @@ +from datetime import datetime, timedelta + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +def test_prepare_ngu_no_data(): + """ + test the ngu preparation module in case of no data available + the output should be an empty list + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("guildmembers") + + automation_base = AutomationBase() + names = automation_base.prepare_names(guild_id=guildId, user_ids=[]) + + assert names == [] + + +def test_prepare_ngu_some_data_ngu_strategy(): + """ + test the name preparation module in case of some data available + the output should be have the names with the priority of ngu + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("guildmembers") + + db_access.db_mongo_client[guildId]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", # this will be used for the message + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", # this will be used for the message + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", # this will be used for the message + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + automation_base = AutomationBase() + id_names = automation_base.prepare_names( + guild_id=guildId, + user_ids=["1111", "1112", "1113", "1116", "1119"], + user_field="ngu", + ) + + assert id_names == [ + ("1111", "User1NickName"), + ("1112", "User2GlobalName"), + ("1113", "user3"), + ("1116", "User6NickName"), + ("1119", "User9GlobalName"), + ] diff --git a/tests/integration/test_automation_base_prepare_names_nickname.py b/tests/integration/test_automation_base_prepare_names_nickname.py new file mode 100644 index 0000000..c121a18 --- /dev/null +++ b/tests/integration/test_automation_base_prepare_names_nickname.py @@ -0,0 +1,100 @@ +from datetime import datetime, timedelta + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +def test_prepare_ngu_some_data_nickname_strategy(): + """ + test the preparation module in case of some data available + the output should be have the names of the field `nickname` + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("guildmembers") + + db_access.db_mongo_client[guildId]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + automation_base = AutomationBase() + id_names = automation_base.prepare_names( + guild_id=guildId, + user_ids=["1111", "1112", "1113", "1116"], + user_field="nickname", + ) + + assert id_names == [ + ("1111", "User1NickName"), + ("1112", None), + ("1113", None), + ("1116", "User6NickName"), + ] diff --git a/tests/integration/test_automation_base_prepare_names_usernames.py b/tests/integration/test_automation_base_prepare_names_usernames.py new file mode 100644 index 0000000..6c5c0ba --- /dev/null +++ b/tests/integration/test_automation_base_prepare_names_usernames.py @@ -0,0 +1,195 @@ +from datetime import datetime, timedelta + +from automation.utils.automation_base import AutomationBase + +from .utils.analyzer_setup import launch_db_access + + +def test_prepare_ngu_some_data_nickname_strategy(): + """ + test the preparation module in case of some data available + the output should be have the names of the field `username` + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("guildmembers") + + db_access.db_mongo_client[guildId]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + automation_base = AutomationBase() + id_names = automation_base.prepare_names( + guild_id=guildId, + user_ids=["1111", "1112", "1113", "1116"], + user_field="username", + ) + + assert id_names == [ + ("1111", "user1"), + ("1112", "user2"), + ("1113", "user3"), + ("1116", "user6"), + ] + + +def test_prepare_ngu_some_data_username_strategy(): + """ + test the username preparation module in case of some data available + the output should be have the names of the field `username` + """ + guildId = "1234" + db_access = launch_db_access(guildId) + + db_access.db_mongo_client[guildId].drop_collection("guildmembers") + + db_access.db_mongo_client[guildId]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + automation_base = AutomationBase() + id_names = automation_base.prepare_names( + guild_id=guildId, + user_ids=["1111", "1112", "1113", "1116"], + user_field="username", + ) + + assert id_names == [ + ("1111", "user1"), + ("1112", "user2"), + ("1113", "user3"), + ("1116", "user6"), + ] diff --git a/tests/integration/test_automation_db_load_from_db.py b/tests/integration/test_automation_db_load_from_db.py new file mode 100644 index 0000000..213bd60 --- /dev/null +++ b/tests/integration/test_automation_db_load_from_db.py @@ -0,0 +1,278 @@ +import os +import unittest +from datetime import datetime, timedelta + +from automation.utils.model import AutomationDB +from dotenv import load_dotenv +from utils.get_mongo_client import MongoSingleton + + +class TestAutomationDBLoadFromDB(unittest.TestCase): + def test_load_from_db_no_results(self): + instance = MongoSingleton.get_instance() + client = instance.get_client() + + load_dotenv() + db_name = os.getenv("AUTOMATION_DB_NAME") + collection_name = os.getenv("AUTOMATION_DB_COLLECTION") + + client[db_name].drop_collection(collection_name) + + automation_db = AutomationDB() + automations = automation_db.load_from_db(guild_id="123") + self.assertEqual(automations, []) + + def test_load_from_db(self): + instance = MongoSingleton.get_instance() + client = instance.get_client() + + load_dotenv() + db_name = os.getenv("AUTOMATION_DB_NAME") + collection_name = os.getenv("AUTOMATION_DB_COLLECTION") + + client[db_name].drop_collection(collection_name) + past_two_day_created_at = datetime.now() - timedelta(days=2) + yesterday_created_at = datetime.now() - timedelta(days=1) + today_created_at = datetime.now() - timedelta(days=0) + + automations_dict = [ + { + "guildId": "123", + "triggers": [ + {"options": {"category": "all_new_disengaged"}, "enabled": True}, + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + "actions": [ + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + ], + "report": { + "recipientIds": ["111"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + "enabled": True, + "createdAt": past_two_day_created_at, + "updatedAt": past_two_day_created_at, + "id": "dsajhf2390j0wadjc", + }, + { + "guildId": "124", + "triggers": [ + {"options": {"category": "all_new_disengaged"}, "enabled": True}, + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + "actions": [ + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + ], + "report": { + "recipientIds": ["111"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + "enabled": True, + "createdAt": yesterday_created_at, + "updatedAt": yesterday_created_at, + "id": "328qujmajdsoiwur", + }, + { + "guildId": "123", + "triggers": [ + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + "actions": [ + { + "template": "hello {{username}}!", + "options": {}, + "enabled": True, + }, + { + "template": "hi {{username}}!", + "options": {}, + "enabled": True, + }, + ], + "report": { + "recipientIds": ["111", "113"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + "enabled": True, + "createdAt": today_created_at, + "updatedAt": today_created_at, + "id": "uahfoiewrj8979832", + }, + ] + + client[db_name][collection_name].insert_many(automations_dict) + + automation_db = AutomationDB() + + automations = automation_db.load_from_db(guild_id="123") + + self.assertEqual(len(automations), 2) + for at in automations: + at_dict = at.to_dict() + + self.assertEqual(at_dict["guildId"], "123") + + assert ( + at_dict["triggers"] + == [ + {"options": {"category": "all_new_disengaged"}, "enabled": True}, + {"options": {"category": "all_new_active"}, "enabled": True}, + ] + ) or ( + at_dict["triggers"] + == [ + {"options": {"category": "all_new_active"}, "enabled": True}, + ] + ) + + assert ( + at_dict["actions"] + == [ + { + "template": "hello {{username}}!", + "options": {}, + "enabled": True, + }, + { + "template": "hi {{username}}!", + "options": {}, + "enabled": True, + }, + ] + ) or ( + at_dict["actions"] + == [ + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + ] + ) + + self.assertIn( + at_dict["report"], + [ + { + "recipientIds": ["111", "113"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + { + "recipientIds": ["111"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + ], + ) + self.assertEqual(at_dict["enabled"], True) + self.assertIn( + at_dict["createdAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + [ + past_two_day_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + today_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ], + ) + self.assertIn( + at_dict["updatedAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + [ + past_two_day_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + today_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ], + ) + self.assertIn(at_dict["id"], ["dsajhf2390j0wadjc", "uahfoiewrj8979832"]) + automations = automation_db.load_from_db(guild_id="124") + + self.assertEqual(len(automations), 1) + for at in automations: + at_dict = at.to_dict() + + self.assertEqual(at_dict["guildId"], "124") + self.assertEqual( + at_dict["triggers"], + [ + {"options": {"category": "all_new_disengaged"}, "enabled": True}, + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + ) + self.assertEqual( + at_dict["actions"], + [ + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + ], + ) + self.assertEqual( + at_dict["report"], + { + "recipientIds": ["111"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + ) + self.assertEqual(at_dict["enabled"], True) + self.assertEqual( + at_dict["createdAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + yesterday_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ) + self.assertEqual(at_dict["id"], "328qujmajdsoiwur") diff --git a/tests/integration/test_automation_db_save_to_db.py b/tests/integration/test_automation_db_save_to_db.py new file mode 100644 index 0000000..dd3fa64 --- /dev/null +++ b/tests/integration/test_automation_db_save_to_db.py @@ -0,0 +1,205 @@ +import os +import unittest +from datetime import datetime, timezone + +from automation.utils.interfaces import Automation +from automation.utils.model import AutomationDB +from dotenv import load_dotenv +from utils.get_mongo_client import MongoSingleton + + +class TestAutomationDBSaveToDB(unittest.TestCase): + def test_save_to_db_automation_instance(self): + instance = MongoSingleton.get_instance() + client = instance.get_client() + + load_dotenv() + db_name = os.getenv("AUTOMATION_DB_NAME") + collection_name = os.getenv("AUTOMATION_DB_COLLECTION") + + client[db_name].drop_collection(collection_name) + + today_created_at = datetime.now(tz=timezone.utc) + + automation_dict = { + "guildId": "123", + "triggers": [ + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + "actions": [ + { + "template": "hello {{username}}!", + "options": {}, + "enabled": True, + }, + { + "template": "hi {{username}}!", + "options": {}, + "enabled": True, + }, + ], + "report": { + "recipientIds": ["111", "113"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + "enabled": True, + "createdAt": today_created_at, + "updatedAt": today_created_at, + "id": "hafujwe09023", + } + + automation = Automation.from_dict(automation_dict) + automation_db = AutomationDB() + + automation_db.save_to_db(automation) + + cursor = client[db_name][collection_name].find({"guildId": "123"}, {"_id": 0}) + + at_from_db = list(cursor) + + self.assertEqual(len(at_from_db), 1) + self.assertEqual(at_from_db[0]["guildId"], "123") + self.assertEqual( + at_from_db[0]["triggers"], + [ + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + ) + self.assertEqual( + at_from_db[0]["actions"], + [ + { + "template": "hello {{username}}!", + "options": {}, + "enabled": True, + }, + { + "template": "hi {{username}}!", + "options": {}, + "enabled": True, + }, + ], + ) + self.assertEqual( + at_from_db[0]["report"], + { + "recipientIds": ["111", "113"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + ) + self.assertEqual(at_from_db[0]["enabled"], True) + self.assertEqual( + at_from_db[0]["createdAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + today_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ) + self.assertEqual( + at_from_db[0]["updatedAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + today_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ) + self.assertEqual(at_from_db[0]["id"], "hafujwe09023") + + def test_save_to_db_dict_instance(self): + instance = MongoSingleton.get_instance() + client = instance.get_client() + + load_dotenv() + db_name = os.getenv("AUTOMATION_DB_NAME") + collection_name = os.getenv("AUTOMATION_DB_COLLECTION") + today_created_at = datetime.now(tz=timezone.utc) + + client[db_name].drop_collection(collection_name) + + automation_dict = { + "guildId": "123", + "triggers": [ + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + "actions": [ + { + "template": "hello {{username}}!", + "options": {}, + "enabled": True, + }, + { + "template": "hi {{username}}!", + "options": {}, + "enabled": True, + }, + ], + "report": { + "recipientIds": ["111", "113"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + "enabled": True, + "createdAt": today_created_at, + "updatedAt": today_created_at, + } + + automation_db = AutomationDB() + + automation_db.save_to_db(automation_dict) + + cursor = client[db_name][collection_name].find({"guildId": "123"}, {"_id": 0}) + + at_from_db = list(cursor) + + self.assertEqual(len(at_from_db), 1) + self.assertEqual(at_from_db[0]["guildId"], "123") + self.assertEqual( + at_from_db[0]["triggers"], + [ + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + ) + self.assertEqual( + at_from_db[0]["actions"], + [ + { + "template": "hello {{username}}!", + "options": {}, + "enabled": True, + }, + { + "template": "hi {{username}}!", + "options": {}, + "enabled": True, + }, + ], + ) + self.assertEqual( + at_from_db[0]["report"], + { + "recipientIds": ["111", "113"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + ) + self.assertEqual(at_from_db[0]["enabled"], True) + self.assertEqual( + at_from_db[0]["createdAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + today_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ) + self.assertEqual( + at_from_db[0]["updatedAt"].strftime("%Y-%m-%dT%H:%M:%S+00:00"), + today_created_at.strftime("%Y-%m-%dT%H:%M:%S+00:00"), + ) + + def test_raise_error(self): + """ + give the save_to_db a type that it raises an error + """ + automation_db = AutomationDB() + + try: + automation_db.save_to_db(automation="this was an automation") + except TypeError as exp: + msg = "Not supported the type of entered object!," + msg += f"given type is: {type('ss')}" + self.assertEqual(str(exp), msg) diff --git a/tests/integration/test_automation_fire_messages_mongo_saga_message_ngu_strategy.py b/tests/integration/test_automation_fire_messages_mongo_saga_message_ngu_strategy.py new file mode 100644 index 0000000..4cfda71 --- /dev/null +++ b/tests/integration/test_automation_fire_messages_mongo_saga_message_ngu_strategy.py @@ -0,0 +1,239 @@ +from datetime import datetime, timedelta + +from automation.automation_workflow import AutomationWorkflow +from automation.utils.interfaces import ( + Automation, + AutomationAction, + AutomationReport, + AutomationTrigger, +) + +from .utils.analyzer_setup import launch_db_access + + +def test_automation_fire_message_check_mongodb_document_messages_ngu_strategy(): + """ + check the created messages in saga + """ + guild_id = "1234" + db_access = launch_db_access(guild_id) + + db_access.db_mongo_client[guild_id].drop_collection("memberactivities") + db_access.db_mongo_client["Saga"].drop_collection("sagas") + db_access.db_mongo_client[guild_id].drop_collection("guildmembers") + db_access.db_mongo_client["Automation"].drop_collection("automations") + + db_access.db_mongo_client[guild_id]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", # this will be used for the message + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", # this will be used for the message + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", # this will be used for the message + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + { + "discordId": "999", + "username": "community_manager", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + triggers = [ + AutomationTrigger(options={"category": "all_new_disengaged"}, enabled=True), + AutomationTrigger(options={"category": "all_new_active"}, enabled=False), + ] + actions = [ + AutomationAction( + template="hey {{ngu}}! please get back to us!", + options={}, + enabled=True, + ), + AutomationAction( + template="hey {{ngu}}! please get back to us2!", + options={}, + enabled=False, + ), + ] + + report = AutomationReport( + recipientIds=["999"], + template="hey body! This users were messaged:\n{{#each usernames}}{{this}}{{/each}}", + options={}, + enabled=True, + ) + today_time = datetime.now() + + automation = Automation( + guild_id, + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + db_access.db_mongo_client["Automation"]["automations"].insert_one( + automation.to_dict() + ) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guild_id]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1111", "1112", "1113"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1116", "1119"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_workflow = AutomationWorkflow() + automation_workflow.start(guild_id) + + count = db_access.db_mongo_client["Saga"]["sagas"].count_documents({}) + assert count == 4 + + user1_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1111"} + ) + assert user1_doc["data"]["message"] == ("hey User1NickName! please get back to us!") + + user2_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1112"} + ) + assert user2_doc["data"]["message"] == ( + "hey User2GlobalName! please get back to us!" + ) + + user3_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1113"} + ) + assert user3_doc["data"]["message"] == ("hey user3! please get back to us!") + + user_cm_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "999"} + ) + expected_msg = "hey body! This users were messaged:\n" + expected_msg += "- User1NickName\n- User2GlobalName\n- user3\n" + assert user_cm_doc["data"]["message"] == expected_msg diff --git a/tests/integration/test_automation_fire_messages_mongo_saga_message_nickname_strategy.py b/tests/integration/test_automation_fire_messages_mongo_saga_message_nickname_strategy.py new file mode 100644 index 0000000..932e6d6 --- /dev/null +++ b/tests/integration/test_automation_fire_messages_mongo_saga_message_nickname_strategy.py @@ -0,0 +1,237 @@ +from datetime import datetime, timedelta + +from automation.automation_workflow import AutomationWorkflow +from automation.utils.interfaces import ( + Automation, + AutomationAction, + AutomationReport, + AutomationTrigger, +) + +from .utils.analyzer_setup import launch_db_access + + +def test_automation_fire_message_check_mongodb_document_messages_username_strategy(): + """ + check the created messages in saga + """ + guild_id = "1234" + db_access = launch_db_access(guild_id) + + db_access.db_mongo_client[guild_id].drop_collection("memberactivities") + db_access.db_mongo_client["Saga"].drop_collection("sagas") + db_access.db_mongo_client[guild_id].drop_collection("guildmembers") + db_access.db_mongo_client["Automation"].drop_collection("automations") + + db_access.db_mongo_client[guild_id]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", # this will be used for the message + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", # this will be used for the message + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", # this will be used for the message + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + { + "discordId": "999", + "username": "community_manager", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + triggers = [ + AutomationTrigger(options={"category": "all_new_disengaged"}, enabled=True), + AutomationTrigger(options={"category": "all_new_active"}, enabled=False), + ] + actions = [ + AutomationAction( + template="hey {{nickname}}! please get back to us!", + options={}, + enabled=True, + ), + AutomationAction( + template="hey {{nickname}}! please get back to us2!", + options={}, + enabled=False, + ), + ] + + report = AutomationReport( + recipientIds=["999"], + template="hey body! This users were messaged:\n{{#each usernames}}{{this}}{{/each}}", + options={}, + enabled=True, + ) + today_time = datetime.now() + + automation = Automation( + guild_id, + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + db_access.db_mongo_client["Automation"]["automations"].insert_one( + automation.to_dict() + ) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guild_id]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1111", "1112", "1113"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1116", "1119"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_workflow = AutomationWorkflow() + automation_workflow.start(guild_id) + + count = db_access.db_mongo_client["Saga"]["sagas"].count_documents({}) + assert count == 4 + + user1_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1111"} + ) + assert user1_doc["data"]["message"] == ("hey User1NickName! please get back to us!") + + user2_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1112"} + ) + assert user2_doc["data"]["message"] == ("hey ! please get back to us!") + + user3_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1113"} + ) + assert user3_doc["data"]["message"] == ("hey ! please get back to us!") + + user_cm_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "999"} + ) + expected_msg = "hey body! This users were messaged:\n" + expected_msg += "- User1NickName\n- 1112\n- 1113\n" + assert user_cm_doc["data"]["message"] == expected_msg diff --git a/tests/integration/test_automation_fire_messages_mongo_saga_message_no_handlebar.py b/tests/integration/test_automation_fire_messages_mongo_saga_message_no_handlebar.py new file mode 100644 index 0000000..9de908b --- /dev/null +++ b/tests/integration/test_automation_fire_messages_mongo_saga_message_no_handlebar.py @@ -0,0 +1,232 @@ +from datetime import datetime, timedelta + +from automation.automation_workflow import AutomationWorkflow +from automation.utils.interfaces import ( + Automation, + AutomationAction, + AutomationReport, + AutomationTrigger, +) + +from .utils.analyzer_setup import launch_db_access + + +def test_automation_fire_message_check_mongodb_document_messages_ngu_strategy(): + """ + check the created messages in saga + """ + guild_id = "1234" + db_access = launch_db_access(guild_id) + + db_access.db_mongo_client[guild_id].drop_collection("memberactivities") + db_access.db_mongo_client["Saga"].drop_collection("sagas") + db_access.db_mongo_client[guild_id].drop_collection("guildmembers") + db_access.db_mongo_client["Automation"].drop_collection("automations") + + db_access.db_mongo_client[guild_id]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", # this will be used for the message + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", # this will be used for the message + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", # this will be used for the message + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + { + "discordId": "999", + "username": "community_manager", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + triggers = [ + AutomationTrigger(options={"category": "all_new_disengaged"}, enabled=True), + AutomationTrigger(options={"category": "all_new_active"}, enabled=False), + ] + actions = [ + AutomationAction( + template="hey! please get back to us!", + options={}, + enabled=True, + ), + ] + + report = AutomationReport( + recipientIds=["999"], + template="hey body! This users were messaged:\n{{#each usernames}}{{this}}{{/each}}", + options={}, + enabled=True, + ) + today_time = datetime.now() + + automation = Automation( + guild_id, + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + db_access.db_mongo_client["Automation"]["automations"].insert_one( + automation.to_dict() + ) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guild_id]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1111", "1112", "1113"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1116", "1119"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_workflow = AutomationWorkflow() + automation_workflow.start(guild_id) + + count = db_access.db_mongo_client["Saga"]["sagas"].count_documents({}) + assert count == 4 + + user1_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1111"} + ) + assert user1_doc["data"]["message"] == "hey! please get back to us!" + + user2_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1112"} + ) + assert user2_doc["data"]["message"] == "hey! please get back to us!" + + user3_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1113"} + ) + assert user3_doc["data"]["message"] == "hey! please get back to us!" + + user_cm_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "999"} + ) + expected_msg = "hey body! This users were messaged:\n" + expected_msg += "- 1111\n- 1112\n- 1113\n" + assert user_cm_doc["data"]["message"] == expected_msg diff --git a/tests/integration/test_automation_fire_messages_mongo_saga_message_username_strategy.py b/tests/integration/test_automation_fire_messages_mongo_saga_message_username_strategy.py new file mode 100644 index 0000000..0c7d744 --- /dev/null +++ b/tests/integration/test_automation_fire_messages_mongo_saga_message_username_strategy.py @@ -0,0 +1,237 @@ +from datetime import datetime, timedelta + +from automation.automation_workflow import AutomationWorkflow +from automation.utils.interfaces import ( + Automation, + AutomationAction, + AutomationReport, + AutomationTrigger, +) + +from .utils.analyzer_setup import launch_db_access + + +def test_automation_fire_message_check_mongodb_document_messages_username_strategy(): + """ + check the created messages in saga + """ + guild_id = "1234" + db_access = launch_db_access(guild_id) + + db_access.db_mongo_client[guild_id].drop_collection("memberactivities") + db_access.db_mongo_client["Saga"].drop_collection("sagas") + db_access.db_mongo_client[guild_id].drop_collection("guildmembers") + db_access.db_mongo_client["Automation"].drop_collection("automations") + + db_access.db_mongo_client[guild_id]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", # this will be used for the message + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", # this will be used for the message + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", # this will be used for the message + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + { + "discordId": "999", + "username": "community_manager", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + triggers = [ + AutomationTrigger(options={"category": "all_new_disengaged"}, enabled=True), + AutomationTrigger(options={"category": "all_new_active"}, enabled=False), + ] + actions = [ + AutomationAction( + template="hey {{username}}! please get back to us!", + options={}, + enabled=True, + ), + AutomationAction( + template="hey {{username}}! please get back to us2!", + options={}, + enabled=False, + ), + ] + + report = AutomationReport( + recipientIds=["999"], + template="hey body! This users were messaged:\n{{#each usernames}}{{this}}{{/each}}", + options={}, + enabled=True, + ) + today_time = datetime.now() + + automation = Automation( + guild_id, + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + db_access.db_mongo_client["Automation"]["automations"].insert_one( + automation.to_dict() + ) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guild_id]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1111", "1112", "1113"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1116", "1119"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + automation_workflow = AutomationWorkflow() + automation_workflow.start(guild_id) + + count = db_access.db_mongo_client["Saga"]["sagas"].count_documents({}) + assert count == 4 + + user1_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1111"} + ) + assert user1_doc["data"]["message"] == ("hey user1! please get back to us!") + + user2_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1112"} + ) + assert user2_doc["data"]["message"] == ("hey user2! please get back to us!") + + user3_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "1113"} + ) + assert user3_doc["data"]["message"] == ("hey user3! please get back to us!") + + user_cm_doc = db_access.db_mongo_client["Saga"]["sagas"].find_one( + {"data.discordId": "999"} + ) + expected_msg = "hey body! This users were messaged:\n" + expected_msg += "- user1\n- user2\n- user3\n" + assert user_cm_doc["data"]["message"] == expected_msg diff --git a/tests/integration/test_exclude_bots.py b/tests/integration/test_exclude_bots.py index 874a8d2..973e23d 100644 --- a/tests/integration/test_exclude_bots.py +++ b/tests/integration/test_exclude_bots.py @@ -59,6 +59,7 @@ def test_excluding_bots_heatmaps(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_1_year_run_once.py b/tests/integration/test_generated_graph_period_1_year_run_once.py index 5c17584..4f2e63e 100644 --- a/tests/integration/test_generated_graph_period_1_year_run_once.py +++ b/tests/integration/test_generated_graph_period_1_year_run_once.py @@ -75,6 +75,7 @@ def test_networkgraph_one_year_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_1year.py b/tests/integration/test_generated_graph_period_1year.py index 50e63b7..84c4af3 100644 --- a/tests/integration/test_generated_graph_period_1year.py +++ b/tests/integration/test_generated_graph_period_1year.py @@ -75,6 +75,7 @@ def test_networkgraph_one_year_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_35_days.py b/tests/integration/test_generated_graph_period_35_days.py index 602ee38..4701d7e 100644 --- a/tests/integration/test_generated_graph_period_35_days.py +++ b/tests/integration/test_generated_graph_period_35_days.py @@ -75,6 +75,7 @@ def test_networkgraph_35_days_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_35_days_run_once.py b/tests/integration/test_generated_graph_period_35_days_run_once.py index d996bd6..aea2f77 100644 --- a/tests/integration/test_generated_graph_period_35_days_run_once.py +++ b/tests/integration/test_generated_graph_period_35_days_run_once.py @@ -75,6 +75,7 @@ def test_networkgraph_35_days_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_3_months.py b/tests/integration/test_generated_graph_period_3_months.py index 8d08b4a..fec5821 100644 --- a/tests/integration/test_generated_graph_period_3_months.py +++ b/tests/integration/test_generated_graph_period_3_months.py @@ -75,6 +75,7 @@ def test_networkgraph_three_months_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_3_months_run_once.py b/tests/integration/test_generated_graph_period_3_months_run_once.py index ad86f9a..c079294 100644 --- a/tests/integration/test_generated_graph_period_3_months_run_once.py +++ b/tests/integration/test_generated_graph_period_3_months_run_once.py @@ -75,6 +75,7 @@ def test_networkgraph_three_months_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_6_months.py b/tests/integration/test_generated_graph_period_6_months.py index a320ae0..b2f969e 100644 --- a/tests/integration/test_generated_graph_period_6_months.py +++ b/tests/integration/test_generated_graph_period_6_months.py @@ -75,6 +75,7 @@ def test_networkgraph_six_months_period_recompute_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_generated_graph_period_6_months_run_once.py b/tests/integration/test_generated_graph_period_6_months_run_once.py index 1352146..0146b54 100644 --- a/tests/integration/test_generated_graph_period_6_months_run_once.py +++ b/tests/integration/test_generated_graph_period_6_months_run_once.py @@ -75,6 +75,7 @@ def test_networkgraph_six_months_period_run_once_available_analytics(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_louvain_algorithm_computation.py b/tests/integration/test_louvain_algorithm_computation.py new file mode 100644 index 0000000..b08c224 --- /dev/null +++ b/tests/integration/test_louvain_algorithm_computation.py @@ -0,0 +1,120 @@ +from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain + +from .utils.neo4j_conn import neo4j_setup + + +def test_louvain_algorithm_available_data(): + """ + test the louvain algorithm with some nodes connected + """ + neo4j_ops = neo4j_setup() + # deleting all data + neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") + + # timestamps + today = 1689280200.0 + yesterday = 1689193800.0 + guild_id = "1234" + + # creating some nodes with data + neo4j_ops.gds.run_cypher( + f""" + CREATE (a:DiscordAccount) -[:IS_MEMBER]->(g:Guild {{guildId: '{guild_id}'}}) + CREATE (b:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (c:DiscordAccount) -[:IS_MEMBER]->(g) + SET a.userId = "1000" + SET b.userId = "1001" + SET c.userId = "1002" + MERGE (a) -[r:INTERACTED_WITH {{weight: 1, date: {yesterday}}}]->(b) + MERGE (a) -[r2:INTERACTED_WITH {{weight: 2, date: {today}}}]->(b) + MERGE (a) -[r3:INTERACTED_WITH {{weight: 3, date: {yesterday}}}]->(c) + MERGE (b) -[r4:INTERACTED_WITH {{weight: 2, date: {yesterday}}}]->(c) + SET r.guildId = '{guild_id}' + SET r2.guildId = '{guild_id}' + SET r3.guildId = '{guild_id}' + SET r4.guildId = '{guild_id}' + """ + ) + louvain = Louvain(neo4j_ops) + + louvain.compute(guild_id=guild_id, from_start=False) + + results = neo4j_ops.gds.run_cypher( + f""" + MATCH (g:Guild {{guildId: '{guild_id}'}})-[r:HAVE_METRICS]->(g) + RETURN r.date as date, r.louvainModularityScore as modularityScore + """ + ) + + assert len(results) == 2 + assert results["date"].iloc[0] in [yesterday, today] + assert results["date"].iloc[1] in [yesterday, today] + + +def test_louvain_algorithm_more_available_data(): + """ + test the louvain algorithm with some more data available + """ + neo4j_ops = neo4j_setup() + # deleting all data + neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") + + # timestamps + today = 1689280200.0 + yesterday = 1689193800.0 + guild_id = "1234" + + # creating some nodes with data + neo4j_ops.gds.run_cypher( + f""" + CREATE (a:DiscordAccount) -[:IS_MEMBER]->(g:Guild {{guildId: '{guild_id}'}}) + CREATE (b:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (c:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (d:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (e:DiscordAccount) -[:IS_MEMBER]->(g) + SET a.userId = "1000" + SET b.userId = "1001" + SET c.userId = "1002" + SET d.userId = "1003" + SET e.userId = "1004" + MERGE (a) -[r:INTERACTED_WITH {{date: {yesterday}, weight: 1}}]->(b) + MERGE (a) -[r2:INTERACTED_WITH {{date: {today}, weight: 2}}]->(b) + MERGE (a) -[r3:INTERACTED_WITH {{date: {yesterday}, weight: 3}}]->(d) + MERGE (c) -[r4:INTERACTED_WITH {{date: {yesterday}, weight: 2}}]->(b) + MERGE (c) -[r5:INTERACTED_WITH {{date: {today}, weight: 1}}]->(b) + MERGE (c) -[r6:INTERACTED_WITH {{date: {yesterday}, weight: 2}}]->(d) + MERGE (d) -[r7:INTERACTED_WITH {{date: {yesterday}, weight: 1}}]->(b) + MERGE (c) -[r8:INTERACTED_WITH {{date: {today}, weight: 2}}]->(a) + MERGE (d) -[r9:INTERACTED_WITH {{date: {today}, weight: 1}}]->(c) + MERGE (b) -[r10:INTERACTED_WITH {{date: {today}, weight: 2}}]->(d) + MERGE (d) -[r11:INTERACTED_WITH {{date: {today}, weight: 1}}]->(c) + MERGE (e) -[r12:INTERACTED_WITH {{date: {today}, weight: 3}}]->(b) + + SET r.guildId = '{guild_id}' + SET r2.guildId = '{guild_id}' + SET r3.guildId = '{guild_id}' + SET r4.guildId = '{guild_id}' + SET r5.guildId = '{guild_id}' + SET r6.guildId = '{guild_id}' + SET r7.guildId = '{guild_id}' + SET r8.guildId = '{guild_id}' + SET r9.guildId = '{guild_id}' + SET r10.guildId = '{guild_id}' + SET r11.guildId = '{guild_id}' + SET r12.guildId = '{guild_id}' + """ + ) + louvain = Louvain(neo4j_ops) + + louvain.compute(guild_id=guild_id, from_start=False) + + results = neo4j_ops.gds.run_cypher( + f""" + MATCH (g:Guild {{guildId: '{guild_id}'}})-[r:HAVE_METRICS]->(g) + RETURN r.date as date, r.louvainModularityScore as modularityScore + """ + ) + print(results) + assert len(results) == 2 + assert results["date"].iloc[0] in [yesterday, today] + assert results["date"].iloc[1] in [yesterday, today] diff --git a/tests/integration/test_louvain_algorithm_get_computed_dates.py b/tests/integration/test_louvain_algorithm_get_computed_dates.py new file mode 100644 index 0000000..c2a1a13 --- /dev/null +++ b/tests/integration/test_louvain_algorithm_get_computed_dates.py @@ -0,0 +1,132 @@ +from discord_analyzer.analysis.neo4j_analysis.louvain import Louvain +from discord_analyzer.analysis.neo4j_utils.projection_utils import ProjectionUtils + +from .utils.neo4j_conn import neo4j_setup + + +def test_louvain_get_computed_dates_empty_data(): + """ + test with empty data for getting the computed dates + """ + neo4j_ops = neo4j_setup() + # deleting all data + neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") + + # timestamps + today = 1689280200.0 + yesterday = 1689193800.0 + guild_id = "1234" + + # creating some nodes with data + neo4j_ops.gds.run_cypher( + f""" + CREATE (a:DiscordAccount) -[:IS_MEMBER]->(g:Guild {{guildId: '{guild_id}'}}) + CREATE (b:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (c:DiscordAccount) -[:IS_MEMBER]->(g) + SET a.userId = "1000" + SET b.userId = "1001" + SET c.userId = "1002" + MERGE (a) -[r:INTERACTED_WITH {{weight: 1, date: {yesterday}}}]->(b) + MERGE (a) -[r2:INTERACTED_WITH {{weight: 2, date: {today}}}]->(b) + MERGE (a) -[r3:INTERACTED_WITH {{weight: 3, date: {yesterday}}}]->(c) + MERGE (b) -[r4:INTERACTED_WITH {{weight: 2, date: {yesterday}}}]->(c) + SET r.guildId = '{guild_id}' + SET r2.guildId = '{guild_id}' + SET r3.guildId = '{guild_id}' + SET r4.guildId = '{guild_id}' + """ + ) + louvain = Louvain(neo4j_ops) + projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guild_id) + + projection_utils = ProjectionUtils(gds=neo4j_ops.gds, guildId=guild_id) + + computed_dates = louvain.get_computed_dates(projection_utils, guildId=guild_id) + + assert computed_dates == set() + + +def test_louvain_get_computed_dates_empty_data_with_have_metrics_relation(): + """ + test with empty data for getting the computed dates + """ + neo4j_ops = neo4j_setup() + # deleting all data + neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") + + # timestamps + today = 1689280200.0 + yesterday = 1689193800.0 + guild_id = "1234" + + # creating some nodes with data + neo4j_ops.gds.run_cypher( + f""" + CREATE (a:DiscordAccount) -[:IS_MEMBER]->(g:Guild {{guildId: '{guild_id}'}}) + CREATE (b:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (c:DiscordAccount) -[:IS_MEMBER]->(g) + SET a.userId = "1000" + SET b.userId = "1001" + SET c.userId = "1002" + MERGE (a) -[r:INTERACTED_WITH {{weight: 1, date: {yesterday}}}]->(b) + MERGE (a) -[r2:INTERACTED_WITH {{weight: 2, date: {today}}}]->(b) + MERGE (a) -[r3:INTERACTED_WITH {{weight: 3, date: {yesterday}}}]->(c) + MERGE (b) -[r4:INTERACTED_WITH {{weight: 2, date: {yesterday}}}]->(c) + MERGE (g)-[:HAVE_METRICS {{date: {yesterday}}}]->(g) + SET r.guildId = '{guild_id}' + SET r2.guildId = '{guild_id}' + SET r3.guildId = '{guild_id}' + SET r4.guildId = '{guild_id}' + """ + ) + louvain = Louvain(neo4j_ops) + projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guild_id) + + projection_utils = ProjectionUtils(gds=neo4j_ops.gds, guildId=guild_id) + + computed_dates = louvain.get_computed_dates(projection_utils, guildId=guild_id) + + assert computed_dates == set() + + +def test_louvain_get_computed_dates_one_data(): + """ + test with empty data for getting the computed dates + """ + neo4j_ops = neo4j_setup() + # deleting all data + neo4j_ops.gds.run_cypher("MATCH (n) DETACH DELETE (n)") + + # timestamps + today = 1689280200.0 + yesterday = 1689193800.0 + guild_id = "1234" + + # creating some nodes with data + neo4j_ops.gds.run_cypher( + f""" + CREATE (a:DiscordAccount) -[:IS_MEMBER]->(g:Guild {{guildId: '{guild_id}'}}) + CREATE (b:DiscordAccount) -[:IS_MEMBER]->(g) + CREATE (c:DiscordAccount) -[:IS_MEMBER]->(g) + SET a.userId = "1000" + SET b.userId = "1001" + SET c.userId = "1002" + MERGE (a) -[r:INTERACTED_WITH {{weight: 1, date: {yesterday}}}]->(b) + MERGE (a) -[r2:INTERACTED_WITH {{weight: 2, date: {today}}}]->(b) + MERGE (a) -[r3:INTERACTED_WITH {{weight: 3, date: {yesterday}}}]->(c) + MERGE (b) -[r4:INTERACTED_WITH {{weight: 2, date: {yesterday}}}]->(c) + MERGE (g)-[:HAVE_METRICS {{date: {yesterday}, louvainModularityScore: 0.0}}]->(g) + SET r.guildId = '{guild_id}' + SET r2.guildId = '{guild_id}' + SET r3.guildId = '{guild_id}' + SET r4.guildId = '{guild_id}' + """ + ) + louvain = Louvain(neo4j_ops) + projection_utils = ProjectionUtils(neo4j_ops.gds, guildId=guild_id) + + projection_utils = ProjectionUtils(gds=neo4j_ops.gds, guildId=guild_id) + + computed_dates = louvain.get_computed_dates(projection_utils, guildId=guild_id) + + assert computed_dates == {yesterday} diff --git a/tests/integration/test_member_activity_from_start_no_past_data.py b/tests/integration/test_member_activity_from_start_no_past_data.py index 441bad6..23ee60c 100644 --- a/tests/integration/test_member_activity_from_start_no_past_data.py +++ b/tests/integration/test_member_activity_from_start_no_past_data.py @@ -68,6 +68,7 @@ def test_analyzer_member_activities_from_start_empty_memberactivities(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py b/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py index 2b9f1dc..e195209 100644 --- a/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py +++ b/tests/integration/test_member_activity_from_start_with_guild_heatmaps_available.py @@ -50,6 +50,7 @@ def test_analyzer_member_activities_from_start_available_heatmaps(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py b/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py index 5de588d..d857d0e 100644 --- a/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py +++ b/tests/integration/test_member_activity_from_start_with_guild_memberactivities_available.py @@ -44,6 +44,7 @@ def test_analyzer_member_activities_from_start_available_member_activity(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_member_activity_from_start_with_one_interval.py b/tests/integration/test_member_activity_from_start_with_one_interval.py index e065b7c..7dfea94 100644 --- a/tests/integration/test_member_activity_from_start_with_one_interval.py +++ b/tests/integration/test_member_activity_from_start_with_one_interval.py @@ -32,6 +32,7 @@ def test_analyzer_from_start_one_interval(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_member_activity_utils.py b/tests/integration/test_member_activity_utils.py index ce3c1f0..776261d 100644 --- a/tests/integration/test_member_activity_utils.py +++ b/tests/integration/test_member_activity_utils.py @@ -30,6 +30,7 @@ def test_utils_get_members(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_mentioned_active_members_from_message.py b/tests/integration/test_mentioned_active_members_from_message.py index def6ba9..d0b94f3 100644 --- a/tests/integration/test_mentioned_active_members_from_message.py +++ b/tests/integration/test_mentioned_active_members_from_message.py @@ -43,6 +43,7 @@ def test_mention_active_members_from_rawinfo(): "channelName": "general", "threadId": None, "threadName": None, + "isGeneratedByWebhook": False, } rawinfo_samples.append(sample) diff --git a/tests/integration/test_neo4j_compute_metrics.py b/tests/integration/test_neo4j_compute_metrics.py index 9eb185e..e2d0295 100644 --- a/tests/integration/test_neo4j_compute_metrics.py +++ b/tests/integration/test_neo4j_compute_metrics.py @@ -98,3 +98,17 @@ def test_guild_results_available(): assert row["date"] in [yesterday, today] assert row["guildId"] == guildId assert bool(np.isnan(row["decentralizationScore"])) is False + + results = neo4j_ops.gds.run_cypher( + f""" + MATCH (g:Guild {{guildId: '{guildId}'}})-[r:HAVE_METRICS]->(g) + RETURN r.date as date, r.louvainModularityScore as modularityScore + """ + ) + + assert len(results) == 2 + print(results) + assert results["date"].iloc[0] in [yesterday, today] + assert results["date"].iloc[1] in [yesterday, today] + assert results["modularityScore"].iloc[0] is not None + assert results["modularityScore"].iloc[1] is not None diff --git a/tests/integration/test_publish_on_success.py b/tests/integration/test_publish_on_success.py new file mode 100644 index 0000000..7839ccd --- /dev/null +++ b/tests/integration/test_publish_on_success.py @@ -0,0 +1,339 @@ +import os +from datetime import datetime, timedelta + +from automation.utils.interfaces import ( + Automation, + AutomationAction, + AutomationReport, + AutomationTrigger, +) +from discord_utils import publish_on_success +from dotenv import load_dotenv +from utils.daolytics_uitls import get_mongo_credentials + +from .utils.analyzer_setup import launch_db_access + + +def test_publish_on_success_check_notification_choreographies(): + """ + test the publish on success functions + we want to check the database if the notify choreographies are created + """ + load_dotenv() + + guild_id = "1234" + saga_id = "000000011111113333377777ie0w" + expected_owner_id = "334461287892" + db_access = launch_db_access(guild_id) + saga_db = os.getenv("SAGA_DB_NAME") + saga_collection = os.getenv("SAGA_DB_COLLECTION") + at_db = os.getenv("AUTOMATION_DB_NAME") + at_collection = os.getenv("AUTOMATION_DB_COLLECTION") + + db_access.db_mongo_client["RnDAO"].drop_collection("guilds") + + db_access.db_mongo_client[guild_id].drop_collection("memberactivities") + db_access.db_mongo_client[saga_db].drop_collection(saga_collection) + db_access.db_mongo_client[guild_id].drop_collection("guildmembers") + db_access.db_mongo_client[at_db].drop_collection(at_collection) + + db_access.db_mongo_client["RnDAO"]["guilds"].insert_one( + { + "guildId": guild_id, + "user": expected_owner_id, + "name": "Sample Guild", + "connectedAt": datetime.now(), + "isInProgress": False, + "isDisconnected": False, + "icon": "4256asdiqwjo032", + "window": [7, 1], + "action": [1, 1, 1, 4, 3, 5, 5, 4, 3, 2, 2, 2, 1], + "selectedChannels": [ + { + "channelId": "11111111111111", + "channelName": "general", + }, + ], + } + ) + + # Adding sample memberactivities + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[saga_db][saga_collection].insert_one( + { + "choreography": { + "name": "DISCORD_UPDATE_CHANNELS", + "transactions": [ + { + "queue": "DISCORD_BOT", + "event": "FETCH", + "order": 1, + "status": "SUCCESS", + "start": datetime.now(), + "end": datetime.now(), + "runtime": 1, + }, + { + "queue": "DISCORD_ANALYZER", + "event": "RUN", + "order": 2, + "status": "SUCCESS", + "start": datetime.now(), + "end": datetime.now(), + "runtime": 1, + }, + ], + }, + "status": "IN_PROGRESS", + "data": { + "guildId": guild_id, + "created": False, + "discordId": expected_owner_id, + "message": "data is ready", + "useFallback": True, + }, + "sagaId": saga_id, + "createdAt": datetime.now(), + "updatedAt": datetime.now(), + } + ) + + db_access.db_mongo_client[guild_id]["guildmembers"].insert_many( + [ + { + "discordId": "1111", + "username": "user1", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User1GlobalName", + "nickname": "User1NickName", # this will be used for the message + }, + { + "discordId": "1112", + "username": "user2", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User2GlobalName", # this will be used for the message + "nickname": None, + }, + { + "discordId": "1113", + "username": "user3", # this will be used for the message + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": None, + "nickname": None, + }, + { + "discordId": "1116", + "username": "user6", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User6GlobalName", + "nickname": "User6NickName", + }, + { + "discordId": "1119", + "username": "user9", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + { + "discordId": "999", + "username": "community_manager", + "roles": [], + "joinedAt": datetime.now() - timedelta(days=10), + "avatar": None, + "isBot": False, + "discriminator": "0", + "permissions": "6677", + "deletedAt": None, + "globalName": "User9GlobalName", + "nickname": None, + }, + ] + ) + + triggers = [ + AutomationTrigger(options={"category": "all_new_disengaged"}, enabled=True), + AutomationTrigger(options={"category": "all_new_active"}, enabled=False), + ] + actions = [ + AutomationAction( + template="hey {{ngu}}! please get back to us!", + options={}, + enabled=True, + ), + AutomationAction( + template="hey {{ngu}}! please get back to us2!", + options={}, + enabled=False, + ), + ] + + report = AutomationReport( + recipientIds=["999"], + template="hey body! This users were messaged:\n{{#each usernames}}{{this}}{{/each}}", + options={}, + enabled=True, + ) + today_time = datetime.now() + + automation = Automation( + guild_id, + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + db_access.db_mongo_client[at_db][at_collection].insert_one(automation.to_dict()) + + date_yesterday = ( + (datetime.now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + date_two_past_days = ( + (datetime.now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0) + .strftime("%Y-%m-%dT%H:%M:%S") + ) + + db_access.db_mongo_client[guild_id]["memberactivities"].insert_many( + [ + { + "date": date_yesterday, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1111", "1112", "1113"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + { + "date": date_two_past_days, + "all_joined": [], + "all_joined_day": [], + "all_consistent": [], + "all_vital": [], + "all_active": [], + "all_connected": [], + "all_paused": [], + "all_new_disengaged": ["1116", "1119"], + "all_disengaged": [], + "all_unpaused": [], + "all_returned": [], + "all_new_active": [], + "all_still_active": [], + "all_dropped": [], + "all_disengaged_were_newly_active": [], + "all_disengaged_were_consistently_active": [], + "all_disengaged_were_vital": [], + "all_lurker": [], + "all_about_to_disengage": [], + "all_disengaged_in_past": [], + }, + ] + ) + + # preparing the data for publish_on_success function + mongo_creds = get_mongo_credentials() + user = mongo_creds["user"] + password = mongo_creds["password"] + host = mongo_creds["host"] + port = mongo_creds["port"] + connection_uri = f"mongodb://{user}:{password}@{host}:{port}" + mongo_creds = { + "connection_str": connection_uri, + "db_name": saga_db, + "collection_name": saga_collection, + } + + sample_args_data = ["sample", saga_id, mongo_creds] + publish_on_success(None, None, sample_args_data) + + notification_count = db_access.db_mongo_client[saga_db][ + saga_collection + ].count_documents({"choreography.name": "DISCORD_NOTIFY_USERS"}) + + assert notification_count == 4 + + user1_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + {"data.discordId": "1111"} + ) + assert user1_doc["data"]["message"] == ("hey User1NickName! please get back to us!") + + user2_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + {"data.discordId": "1112"} + ) + assert user2_doc["data"]["message"] == ( + "hey User2GlobalName! please get back to us!" + ) + + user3_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + {"data.discordId": "1113"} + ) + assert user3_doc["data"]["message"] == ("hey user3! please get back to us!") + + user_cm_doc = db_access.db_mongo_client[saga_db][saga_collection].find_one( + {"data.discordId": "999"} + ) + expected_msg = "hey body! This users were messaged:\n" + expected_msg += "- User1NickName\n- User2GlobalName\n- user3\n" + assert user_cm_doc["data"]["message"] == expected_msg diff --git a/tests/integration/test_rawinfo_webhook_fetching.py b/tests/integration/test_rawinfo_webhook_fetching.py new file mode 100644 index 0000000..681cff9 --- /dev/null +++ b/tests/integration/test_rawinfo_webhook_fetching.py @@ -0,0 +1,113 @@ +from datetime import datetime, timedelta + +from discord_analyzer.models.RawInfoModel import RawInfoModel +from utils.get_mongo_client import MongoSingleton + + +def test_rawinfo_get_day_entry_empty_data(): + """ + test rawinfo dailty data fetching with no data avaialbe + """ + guildId = "1234" + + mongo_singleton = MongoSingleton.get_instance() + client = mongo_singleton.get_client() + + client[guildId].drop_collection("rawinfos") + + rawinfo_model = RawInfoModel(client[guildId]) + + today = datetime.now() + data = rawinfo_model.get_day_entries(today) + + assert data == [] + + +def test_rawinfo_get_day_entry_data_avaialble(): + """ + test the rawinfo daily data fetching in case of some data available + """ + guildId = "1234" + + mongo_singleton = MongoSingleton.get_instance() + client = mongo_singleton.get_client() + + client[guildId].drop_collection("rawinfos") + + specific_midday = datetime(2023, 3, 3, 12) + + # generating rawinfo samples + rawinfo_samples = [ + { + "type": 19, + "author": "user1", + "content": "test_message", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": "user3", + "createdDate": (specific_midday - timedelta(hours=1)), + "messageId": "222222", + "channelId": "1115555666777889", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + }, + { + "type": 19, + "author": "This is a test!", + "content": "test_message", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": "user3", + "createdDate": (specific_midday - timedelta(hours=2)), + "messageId": "222223", + "channelId": "1115555666777889", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": True, + }, + { + "type": 19, + "author": "This is a test!", + "content": "test_message", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": "user3", + "createdDate": (specific_midday - timedelta(hours=3)), + "messageId": "222224", + "channelId": "1115555666777889", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": True, + }, + { + "type": 19, + "author": "Hello", + "content": "test_message", + "user_mentions": [], + "role_mentions": [], + "reactions": [], + "replied_user": "user3", + "createdDate": (specific_midday - timedelta(hours=4)), + "messageId": "222225", + "channelId": "1115555666777889", + "channelName": "general", + "threadId": None, + "threadName": None, + "isGeneratedByWebhook": False, + }, + ] + + client[guildId]["rawinfos"].insert_many(rawinfo_samples) + + rawinfo_model = RawInfoModel(client[guildId]) + + data = rawinfo_model.get_day_entries(specific_midday) + + assert len(data) == 2 diff --git a/tests/unit/test_automation_class.py b/tests/unit/test_automation_class.py new file mode 100644 index 0000000..b7e02d4 --- /dev/null +++ b/tests/unit/test_automation_class.py @@ -0,0 +1,263 @@ +import unittest +from datetime import datetime + +from automation.utils.interfaces import ( + Automation, + AutomationAction, + AutomationReport, + AutomationTrigger, +) + + +class TestAutomation(unittest.TestCase): + def test_automation(self): + triggers = [ + AutomationTrigger( + options={"category": "all_new_disengaged"}, enabled=False + ), + AutomationTrigger(options={"category": "all_new_active"}, enabled=True), + ] + actions = [ + AutomationAction( + template="hey {{username}}! please get back to us!", + options={}, + enabled=True, + ), + AutomationAction( + template="hey {{username}}! please get back to us2!", + options={}, + enabled=False, + ), + ] + + report = AutomationReport( + recipientIds=["111"], + template="hey {{username}}, this is a report!", + options={}, + enabled=True, + ) + today_time = datetime.now() + + automation = Automation( + "123", + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + self.assertIsInstance(automation.triggers[0], AutomationTrigger) + self.assertEqual( + automation.triggers[0].options, + {"category": "all_new_disengaged"}, + ) + self.assertEqual( + automation.triggers[0].enabled, + False, + ) + + self.assertIsInstance(automation.triggers[1], AutomationTrigger) + self.assertEqual( + automation.triggers[1].options, + {"category": "all_new_active"}, + ) + self.assertEqual( + automation.triggers[1].enabled, + True, + ) + + self.assertIsInstance(automation.actions[0], AutomationAction) + self.assertEqual( + automation.actions[0].template, + "hey {{username}}! please get back to us!", + ) + self.assertEqual( + automation.actions[0].options, + {}, + ) + self.assertEqual( + automation.actions[0].enabled, + True, + ) + + self.assertIsInstance(automation.actions[1], AutomationAction) + self.assertEqual( + automation.actions[1].template, + "hey {{username}}! please get back to us2!", + ) + self.assertEqual( + automation.actions[1].options, + {}, + ) + self.assertEqual( + automation.actions[1].enabled, + False, + ) + self.assertIsInstance(automation.id, str) + + def test_to_dict(self): + triggers = [ + AutomationTrigger( + options={"category": "all_new_disengaged"}, enabled=False + ), + AutomationTrigger(options={"category": "all_new_active"}, enabled=True), + ] + actions = [ + AutomationAction( + template="hey {{username}}! please get back to us!", + options={}, + enabled=True, + ), + AutomationAction( + template="hey {{username}}! please get back to us2!", + options={}, + enabled=False, + ), + ] + + report = AutomationReport( + recipientIds=["111"], + template="hey {{username}}, this is a report!", + options={}, + enabled=True, + ) + + today_time = datetime.now() + automation = Automation( + "123", + triggers, + actions, + report, + enabled=True, + createdAt=today_time, + updatedAt=today_time, + ) + + automation_dict = automation.to_dict() + + self.assertEqual(automation_dict["guildId"], "123") + + self.assertEqual( + automation_dict["triggers"], + [ + {"options": {"category": "all_new_disengaged"}, "enabled": False}, + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + ) + + self.assertEqual( + automation_dict["actions"], + [ + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us2!", + "options": {}, + "enabled": False, + }, + ], + ) + + self.assertEqual( + automation_dict["report"], + { + "recipientIds": ["111"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + ) + self.assertEqual(automation_dict["enabled"], True) + self.assertEqual(automation_dict["createdAt"], today_time) + self.assertEqual(automation_dict["updatedAt"], today_time) + self.assertIsInstance(automation_dict["id"], str) + + def test_from_dict(self): + today_time = datetime.now() + automation_dict = { + "guildId": "123", + "triggers": [ + {"options": {"category": "all_new_disengaged"}, "enabled": False}, + {"options": {"category": "all_new_active"}, "enabled": True}, + ], + "actions": [ + { + "template": "hey {{username}}! please get back to us!", + "options": {}, + "enabled": True, + }, + { + "template": "hey {{username}}! please get back to us2!", + "options": {}, + "enabled": False, + }, + ], + "report": { + "recipientIds": ["111"], + "template": "hey {{username}}, this is a report!", + "options": {}, + "enabled": True, + }, + "enabled": False, + "createdAt": today_time, + "updatedAt": today_time, + "id": "uiasdbfjiasn12e237878h", + } + + automation = Automation.from_dict(automation_dict) + + self.assertIsInstance(automation.triggers[0], AutomationTrigger) + self.assertEqual( + automation.triggers[0].options, + {"category": "all_new_disengaged"}, + ) + self.assertEqual( + automation.triggers[0].enabled, + False, + ) + + self.assertIsInstance(automation.triggers[1], AutomationTrigger) + self.assertEqual( + automation.triggers[1].options, + {"category": "all_new_active"}, + ) + self.assertEqual( + automation.triggers[1].enabled, + True, + ) + + self.assertIsInstance(automation.actions[0], AutomationAction) + self.assertEqual( + automation.actions[0].template, + "hey {{username}}! please get back to us!", + ) + self.assertEqual( + automation.actions[0].options, + {}, + ) + self.assertEqual( + automation.actions[0].enabled, + True, + ) + + self.assertIsInstance(automation.actions[1], AutomationAction) + self.assertEqual( + automation.actions[1].template, + "hey {{username}}! please get back to us2!", + ) + self.assertEqual( + automation.actions[1].options, + {}, + ) + self.assertEqual( + automation.actions[1].enabled, + False, + ) + self.assertEqual(automation.createdAt, today_time) + self.assertEqual(automation.updatedAt, today_time) + self.assertEqual(automation.id, "uiasdbfjiasn12e237878h") diff --git a/tests/unit/test_automation_get_handler_type.py b/tests/unit/test_automation_get_handler_type.py new file mode 100644 index 0000000..ccbd8bd --- /dev/null +++ b/tests/unit/test_automation_get_handler_type.py @@ -0,0 +1,31 @@ +import unittest + +from automation.automation_workflow import AutomationWorkflow + + +class TestGetHandlerType(unittest.TestCase): + def test_empty_string(self): + template = "" + at_workflow = AutomationWorkflow() + type = at_workflow._get_handlebar_type(template) + + self.assertEqual(type, None) + + def test_sample_string(self): + template = "Hi {{username}}!" + + at_workflow = AutomationWorkflow() + type = at_workflow._get_handlebar_type(template) + + self.assertEqual(type, "username") + + def test_sample_string_multiple_handlebar(self): + """ + for now we're supporting have the only first handlebar + """ + template = "Hello {{username}} and {{nickname}}!" + + at_workflow = AutomationWorkflow() + type = at_workflow._get_handlebar_type(template) + + self.assertEqual(type, "username") diff --git a/tests/unit/test_automation_trigger_action_report.py b/tests/unit/test_automation_trigger_action_report.py new file mode 100644 index 0000000..4f2ec9f --- /dev/null +++ b/tests/unit/test_automation_trigger_action_report.py @@ -0,0 +1,69 @@ +import unittest + +from automation.utils.interfaces import ( + AutomationAction, + AutomationReport, + AutomationTrigger, +) + + +class TestAutomationTrigger(unittest.TestCase): + def test_automation_trigger_trigger(self): + at_trigger = AutomationTrigger( + options={"category": "all_new_disengaged"}, enabled=True + ) + + self.assertEqual(at_trigger.options, {"category": "all_new_disengaged"}) + self.assertEqual(at_trigger.enabled, True) + + +class TestAutomationAction(unittest.TestCase): + def test_automation_actio_true_enabled(self): + at_action = AutomationAction( + template="hey {{username}}! please get back to us!", + options={}, + enabled=True, + ) + + self.assertEqual(at_action.template, "hey {{username}}! please get back to us!") + self.assertEqual(at_action.options, {}) + self.assertEqual(at_action.enabled, True) + + def test_automation_actio_false_enabled(self): + at_action = AutomationAction( + template="hey {{username}}! please get back to us!", + options={}, + enabled=False, + ) + + self.assertEqual(at_action.template, "hey {{username}}! please get back to us!") + self.assertEqual(at_action.options, {}) + self.assertEqual(at_action.enabled, False) + + +class TestAutomationReport(unittest.TestCase): + def test_automation_report_true_enabled(self): + at_report = AutomationReport( + recipientIds=["111"], + template="hey {{username}}, this is a report!", + options={}, + enabled=True, + ) + + self.assertEqual(at_report.recipientIds, ["111"]) + self.assertEqual(at_report.template, "hey {{username}}, this is a report!") + self.assertEqual(at_report.options, {}) + self.assertEqual(at_report.enabled, True) + + def test_automation_report_flase_enabled(self): + at_report = AutomationReport( + recipientIds=["111", "112"], + template="hey {{username}}, this is a report!", + options={}, + enabled=False, + ) + + self.assertEqual(at_report.recipientIds, ["111", "112"]) + self.assertEqual(at_report.template, "hey {{username}}, this is a report!") + self.assertEqual(at_report.options, {}) + self.assertEqual(at_report.enabled, False) diff --git a/tests/unit/test_compile_message.py b/tests/unit/test_compile_message.py new file mode 100644 index 0000000..57dae41 --- /dev/null +++ b/tests/unit/test_compile_message.py @@ -0,0 +1,36 @@ +import unittest + +from automation.automation_workflow import AutomationWorkflow + + +class CompileTemplateMessage(unittest.TestCase): + def test_action_compile_message(self): + """ + test the templates that could be found in actions + """ + template = "Hi {{username}}!" + user_name = "user1" + + at_workflow = AutomationWorkflow() + type = at_workflow._get_handlebar_type(template) + compiled_message = at_workflow._compile_message( + data={type: user_name}, message=template + ) + + self.assertEqual(compiled_message, "Hi user1!") + + def test_report_compile_message(self): + """ + test the templates that could be found in reports + """ + template = "This users were messaged\n{{#each usernames}}{{this}}{{/each}}" + user_names = ["user1", "user2"] + + at_workflow = AutomationWorkflow() + compiled_message = at_workflow._prepare_report_compiled_message( + user_names, template + ) + + expected_message = "This users were messaged\n" + expected_message += "- user1\n- user2\n" + self.assertEqual(compiled_message, expected_message) diff --git a/tests/unit/test_engagement_notifier_subtract_users.py b/tests/unit/test_engagement_notifier_subtract_users.py new file mode 100644 index 0000000..8140903 --- /dev/null +++ b/tests/unit/test_engagement_notifier_subtract_users.py @@ -0,0 +1,87 @@ +from automation.utils.automation_base import AutomationBase + + +def test_subtract_users_empty_data(): + """ + in case of no data for days ago + """ + automation_base = AutomationBase() + + users1 = [] + users2 = [] + + users = automation_base._subtract_users(users1, users2) + + assert users == set([]) + + +def test_subtract_users_some_data_past_two_days(): + """ + in case of having some data for two days ago + """ + automation_base = AutomationBase() + + users1 = [] + users2 = ["user2", "user3"] + + users = automation_base._subtract_users(users1, users2) + + assert users == set([]) + + +def test_subtract_users_one_user_yesterday(): + """ + in case of having one users for yesterday + """ + automation_base = AutomationBase() + + users1 = ["user2"] + users2 = [] + + users = automation_base._subtract_users(users1, users2) + + assert users == set(["user2"]) + + +def test_subtract_users_multiple_users_yesterday(): + """ + in case of having multiple users for yesterday + """ + automation_base = AutomationBase() + + users1 = ["user2", "user3", "user4"] + users2 = [] + + users = automation_base._subtract_users(users1, users2) + + assert users == set(["user2", "user3", "user4"]) + + +def test_subtract_users_multiple_users_non_overlapping_both_days(): + """ + in case of having multiple users for + both yesterday and two days ago but non overlapping users + """ + automation_base = AutomationBase() + + users1 = ["user2", "user3", "user4"] + users2 = ["user6", "user7"] + + users = automation_base._subtract_users(users1, users2) + + assert users == set(["user2", "user3", "user4"]) + + +def test_subtract_users_multiple_users_overlapping_both_days(): + """ + in case of having multiple users for + both yesterday and two days ago with overlapping users + """ + automation_base = AutomationBase() + + users1 = ["user2", "user3", "user4"] + users2 = ["user2", "user7"] + + users = automation_base._subtract_users(users1, users2) + + assert users == set(["user3", "user4"]) diff --git a/tests/unit/test_mongo_singleton.py b/tests/unit/test_mongo_singleton.py new file mode 100644 index 0000000..518c02e --- /dev/null +++ b/tests/unit/test_mongo_singleton.py @@ -0,0 +1,16 @@ +import unittest + +from pymongo import MongoClient +from utils.get_mongo_client import MongoSingleton + + +class TestMongoSingleton(unittest.TestCase): + def test_singleton_instance(self): + mongo_singleton_1 = MongoSingleton.get_instance() + mongo_singleton_2 = MongoSingleton.get_instance() + self.assertEqual(mongo_singleton_1, mongo_singleton_2) + + def test_mongo_client(self): + mongo_singleton = MongoSingleton.get_instance() + mongo_client = mongo_singleton.get_client() + self.assertIsInstance(mongo_client, MongoClient) diff --git a/utils/get_automation_env.py b/utils/get_automation_env.py new file mode 100644 index 0000000..6e5041d --- /dev/null +++ b/utils/get_automation_env.py @@ -0,0 +1,21 @@ +import os + +from dotenv import load_dotenv + + +def get_automations_env() -> dict[str, str]: + """ + get the automations env variables + + Returns + --------- + env_vars : dict[str, str] + the environment variables for automation service + the keys would be `DB_NAME`, and `COLLECTION_NAME` + and values are right values + """ + load_dotenv() + db_name = os.getenv("AUTOMATION_DB_NAME", "") + collection_name = os.getenv("AUTOMATION_DB_COLLECTION", "") + + return {"DB_NAME": db_name, "COLLECTION_NAME": collection_name} diff --git a/utils/get_mongo_client.py b/utils/get_mongo_client.py new file mode 100644 index 0000000..450f1c7 --- /dev/null +++ b/utils/get_mongo_client.py @@ -0,0 +1,37 @@ +from typing import Any + +from pymongo import MongoClient +from utils.daolytics_uitls import get_mongo_credentials + + +class MongoSingleton: + __instance = None + + def __init__(self): + if MongoSingleton.__instance is not None: + raise Exception("This class is a singleton!") + else: + creds = get_mongo_credentials() + connection_uri = config_mogno_creds(creds) + self.client = MongoClient(connection_uri) + MongoSingleton.__instance = self + + @staticmethod + def get_instance(): + if MongoSingleton.__instance is None: + MongoSingleton() + return MongoSingleton.__instance + + def get_client(self): + return self.client + + +def config_mogno_creds(mongo_creds: dict[str, Any]): + user = mongo_creds["user"] + password = mongo_creds["password"] + host = mongo_creds["host"] + port = mongo_creds["port"] + + connection = f"mongodb://{user}:{password}@{host}:{port}" + + return connection diff --git a/utils/get_rabbitmq.py b/utils/get_rabbitmq.py index 71a5f22..3a34727 100644 --- a/utils/get_rabbitmq.py +++ b/utils/get_rabbitmq.py @@ -1,8 +1,18 @@ from tc_messageBroker import RabbitMQ from tc_messageBroker.rabbit_mq.queue import Queue +from utils.daolytics_uitls import get_rabbit_mq_credentials -def prepare_rabbit_mq(rabbit_creds): +def prepare_rabbit_mq(): + """ + Prepare connection to rabbitMQ + + Returns: + ---------- + rabbitmq : tc_messageBroker.RabbitMQ + an instance connected to broker + """ + rabbit_creds = get_rabbit_mq_credentials() rabbitmq = RabbitMQ( broker_url=rabbit_creds["broker_url"], port=rabbit_creds["port"], diff --git a/worker.py b/worker.py index 5920945..01fae69 100644 --- a/worker.py +++ b/worker.py @@ -1,11 +1,8 @@ import logging -import os import redis -from dotenv import load_dotenv from rq import Worker - -load_dotenv() +from utils.daolytics_uitls import get_redis_credentials def worker_exception_handler(job, exc_type, exc_value, traceback): @@ -17,18 +14,18 @@ def worker_exception_handler(job, exc_type, exc_value, traceback): if __name__ == "__main__": - host = os.getenv("REDIS_HOST", "") - port = int(os.getenv("REDIS_PORT", 6379)) - password = os.getenv("REDIS_PASSWORD") + redis_creds = get_redis_credentials() logging.basicConfig() logging.getLogger().setLevel(logging.INFO) - redis_connection = redis.Redis(host=host, port=port, password=password) + host = redis_creds["host"] + port = redis_creds["port"] + password = redis_creds["pass"] + + r = redis.Redis(host=host, port=port, password=password) worker = Worker( - queues=["default"], - connection=redis_connection, - exception_handlers=worker_exception_handler, + queues=["default"], connection=r, exception_handlers=worker_exception_handler ) try: worker.work(with_scheduler=True, max_jobs=1)