Skip to content

Commit

Permalink
Merge pull request #133 from satisfactorymodding/dev
Browse files Browse the repository at this point in the history
2.22.1
  • Loading branch information
Borketh authored Sep 10, 2024
2 parents f95180c + bf179f0 commit 23888aa
Show file tree
Hide file tree
Showing 5 changed files with 284 additions and 319 deletions.
84 changes: 53 additions & 31 deletions fred/cogs/crashes.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@
from urllib.parse import urlparse
from zipfile import ZipFile

import regex
import re2

re2.set_fallback_notification(re2.FALLBACK_WARNING)

from PIL import Image, ImageEnhance
from aiohttp import ClientResponseError
from attr import dataclass
Expand All @@ -23,23 +26,26 @@
from ..libraries.common import FredCog, new_logger
from ..libraries.createembed import CrashResponse

REGEX_LIMIT: float = 6.9
REGEX_LIMIT: float = 0.002
DOWNLOAD_SIZE_LIMIT = 104857600 # 100 MiB

EMOJI_CRASHES_ANALYZING = "<:FredAnalyzingFile:1283182945019891712>"
EMOJI_CRASHES_TIMEOUT = "<:FredAnalyzingTimedOut:1283183010967195730>"

logger = new_logger(__name__)


async def regex_with_timeout(*args, **kwargs):
try:
return await asyncio.wait_for(asyncio.to_thread(regex.search, *args, **kwargs), REGEX_LIMIT)
return await asyncio.wait_for(asyncio.to_thread(re2.search, *args, **kwargs), REGEX_LIMIT)
except asyncio.TimeoutError:
raise TimeoutError(
f"A regex timed out after {REGEX_LIMIT} seconds! \n"
f"pattern: ({args[0]}) \n"
f"flags: {kwargs['flags']} \n"
f"on text of length {len(args[1])}"
)
except re2.RegexError as e:
raise ValueError(args[0]) from e


class Crashes(FredCog):
Expand Down Expand Up @@ -169,7 +175,7 @@ def formatted_chunks_of_100_mod_references() -> Generator[str, None, None]:

async def mass_regex(self, text: str) -> AsyncIterator[CrashResponse]:
for crash in config.Crashes.fetch_all():
if match := await regex_with_timeout(crash["crash"], text, flags=regex.IGNORECASE | regex.S):
if match := await regex_with_timeout(crash["crash"], text, flags=re2.IGNORECASE | re2.S):
if str(crash["response"]).startswith(self.bot.command_prefix):
if command := config.Commands.fetch(crash["response"].strip(self.bot.command_prefix)):
command_response = command["content"]
Expand All @@ -182,13 +188,13 @@ async def mass_regex(self, text: str) -> AsyncIterator[CrashResponse]:
inline=True,
)
else:
response = regex.sub(r"{(\d+)}", lambda m: match.group(int(m.group(1))), str(crash["response"]))
response = re2.sub(r"{(\d+)}", lambda m: match.group(int(m.group(1))), str(crash["response"]))
yield CrashResponse(name=crash["name"], value=response, inline=True)

async def detect_and_fetch_pastebin_content(self, text: str) -> str:
if match := regex.search(r"(https://pastebin.com/\S+)", text):
if match := re2.search(r"(https://pastebin.com/\S+)", text):
self.logger.info("Found a pastebin link! Fetching text.")
url = regex.sub(r"(?<=bin.com)/", "/raw/", match.group(1))
url = re2.sub(r"(?<=bin.com)/", "/raw/", match.group(1))
async with self.bot.web_session.get(url) as response:
return await response.text()
else:
Expand Down Expand Up @@ -226,12 +232,12 @@ async def process_image(self, file: IO) -> list[CrashResponse]:
self.logger.exception(oops)
return []

def _create_debug_messages(self, debug_zip: ZipFile) -> Optional[CrashJob]:
def _create_debug_messages(self, debug_zip: ZipFile, filename: str) -> Optional[CrashJob]:
files = debug_zip.namelist()
info: Optional[InstallInfo] = None
if "metadata.json" in files:
with debug_zip.open("metadata.json") as f:
info = InstallInfo.from_metadata_json(f)
info = InstallInfo.from_metadata_json(f, filename)

if info is None:
return
Expand All @@ -250,7 +256,7 @@ def _get_file_jobs(self, filename: str, file: IO) -> CrashJobGenerator:
case "zip":
self.logger.info(f"Adding jobs from zip file {filename}")
zip_file = ZipFile(file)
if res := self._create_debug_messages(zip_file):
if res := self._create_debug_messages(zip_file, filename):
yield res
for zipped_item_filename in zip_file.namelist():
with zip_file.open(zipped_item_filename) as zip_item:
Expand All @@ -273,7 +279,7 @@ def _ext_filter(ext: str) -> bool:
return ext in ("png", "log", "txt", "zip", "json")

async def _obtain_attachments(self, message: Message) -> AsyncGenerator[tuple[str, IO | Exception], None, None]:
cdn_links = regex.findall(r"(https://cdn.discordapp.com/attachments/\S+)", message.content)
cdn_links = re2.findall(r"(https://cdn.discordapp.com/attachments/\S+)", message.content)

yield bool(cdn_links or message.attachments)

Expand Down Expand Up @@ -322,7 +328,7 @@ async def process_message(self, message: Message) -> bool:
# get the first yield, which is just whether there's anything to do
if there_were_files := await file_getter.asend(None):
self.logger.info("Indicating interest in message")
await message.add_reaction("👀")
await message.add_reaction(EMOJI_CRASHES_ANALYZING)

responses: list[CrashResponse] = []
files: list[IO] = []
Expand All @@ -346,36 +352,49 @@ async def process_message(self, message: Message) -> bool:
files.append(file)
jobs.extend((task_group.create_task(job) for job in self._get_file_jobs(name, file)))
except ExceptionGroup as eg:
raise eg.exceptions[0]
for ex in eg.exceptions:
if isinstance(ex, TimeoutError):
self.logger.exception(ex)
await message.remove_reaction(EMOJI_CRASHES_ANALYZING, self.bot.user)
await message.add_reaction(EMOJI_CRASHES_TIMEOUT)
for j in jobs:
j.cancel()
else:
raise ex

self.logger.info("Collecting job results")
for job in jobs:
responses.extend(job.result())

self.logger.info("Closing files")
for file in files:
file.close()
if files:
self.logger.info("Closing files")
for file in files:
file.close()

if there_were_files:
self.logger.info("Removing reaction")
await message.remove_reaction("👀", self.bot.user)

if responses:
if len(responses) == 1:
if filtered_responses := list(set(responses)): # remove dupes

if len(filtered_responses) == 1:
self.logger.info("Found only one response to message, sending.")
await self.bot.reply_to_msg(
message,
f"{responses[0].value}\n-# Responding to `{responses[0].name}` triggered by {message.author.mention}",
f"{filtered_responses[0].value}\n-# Responding to `{filtered_responses[0].name}` triggered by {message.author.mention}",
)

else:

self.logger.info("Found responses to message, sending.")
embed = createembed.crashes(responses)
embed = createembed.crashes(filtered_responses)
embed.set_author(
name=f"Automated responses for {message.author.global_name or message.author.display_name} ({message.author.id})",
icon_url=message.author.avatar.url,
)
await self.bot.reply_to_msg(message, embed=embed)
return True

else:
self.logger.info("No responses to message, skipping.")
return False
Expand All @@ -390,6 +409,7 @@ class InstallInfo:

type InstalledMods = dict[str, str] # key: mod reference, value: mod version

filename: str
game_version: str = ""
game_type: str = ""
smm_version: str = ""
Expand All @@ -401,7 +421,7 @@ class InstallInfo:
mismatches: list[str] = []

@classmethod
def from_metadata_json(cls: Type[InstallInfo], file: IO[bytes]) -> Optional[InstallInfo]:
def from_metadata_json(cls: Type[InstallInfo], file: IO[bytes], filename: str) -> Optional[InstallInfo]:
metadata: dict = json.load(file)
match metadata:
case {
Expand All @@ -418,6 +438,7 @@ def from_metadata_json(cls: Type[InstallInfo], file: IO[bytes]) -> Optional[Inst
# if there is no install everything can default to None
selected_installation = selected_installation or {}
return cls(
filename,
smm_version=smm_version,
sml_version=sml_version or "",
game_version=selected_installation.get("version", ""),
Expand All @@ -441,6 +462,7 @@ def from_metadata_json(cls: Type[InstallInfo], file: IO[bytes]) -> Optional[Inst
# if there is no install everything can default to None
selected_installation = selected_installation or {}
return cls(
filename,
smm_version=smm_version,
sml_version=sml_version or "",
game_version=selected_installation.get("version", ""),
Expand Down Expand Up @@ -495,14 +517,14 @@ def update_from_fg_log(self, log_file: IO[bytes]):
def _get_fg_log_details(log_file: IO[bytes]):
lines = log_file.readlines()

vanilla_info_search_area = filter(lambda l: regex.match("^LogInit", str(l)), lines)
vanilla_info_search_area = filter(lambda l: re2.match("^LogInit", str(l)), lines)

info = {}
patterns = [
regex.compile(r"Net CL: (?P<game_version>\d+)"),
regex.compile(r"Command Line: (?P<cli>.*)"),
regex.compile(r"Base Directory: (?P<path>.+)"),
regex.compile(r"Launcher ID: (?P<launcher>\w+)"),
re2.compile(r"Net CL: (?P<game_version>\d+)"),
re2.compile(r"Command Line: (?P<cli>.*)"),
re2.compile(r"Base Directory: (?P<path>.+)"),
re2.compile(r"Launcher ID: (?P<launcher>\w+)"),
]

# This loop sequentially finds information,
Expand All @@ -513,15 +535,15 @@ def _get_fg_log_details(log_file: IO[bytes]):
for line in vanilla_info_search_area:
if not patterns:
break
elif match := regex.search(patterns[0], line):
elif match := re2.search(patterns[0], line):
info |= match.groupdict()
patterns.pop(0)
else:
logger.info("Didn't find all four pieces of information normally found in a log")

mod_loader_logs = filter(lambda l: regex.match("LogSatisfactoryModLoader", str(l)), lines)
mod_loader_logs = filter(lambda l: re2.match("LogSatisfactoryModLoader", str(l)), lines)
for line in mod_loader_logs:
if match := regex.search(r"(?<=v\.)(?P<sml>[\d.]+)", line):
if match := re2.search(r"(?<=v\.)(?P<sml>[\d.]+)", line):
info |= match.groupdict()
break

Expand Down Expand Up @@ -562,6 +584,6 @@ def _version_info(self) -> str:

def format(self) -> CrashResponse:
return CrashResponse(
name="Key Details",
name=f"Key Details for {self.filename}",
value=self._version_info(),
)
2 changes: 1 addition & 1 deletion fred/fred.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .fred_commands import Commands, FredHelpEmbed
from .libraries import createembed, common

__version__ = "2.22.0"
__version__ = "2.22.1"


class Bot(commands.Bot):
Expand Down
10 changes: 5 additions & 5 deletions fred/libraries/createembed.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ async def mod_embed(name: str, bot: Bot) -> tuple[nextcord.Embed | None, nextcor
return embed, file, multiple_mods


@dataclass
@dataclass(unsafe_hash=True)
class CrashResponse:

name: str
Expand All @@ -407,12 +407,12 @@ def add_self_as_field(self, embed: nextcord.Embed):
logger.debug(self.value)
embed.add_field(name=self.name, value=self.value, inline=self.inline)

def __hash__(self):
return hash(self.name)


def crashes(responses: list[CrashResponse]) -> nextcord.Embed:
embed = nextcord.Embed(
# title=f"{len(responses)} automated responses found: ",
colour=config.ActionColours.fetch("Purple")
)
embed = nextcord.Embed(colour=config.ActionColours.fetch("Purple"))
# sort the responses by size, so they display in a more efficient order
responses = sorted(responses, key=lambda r: len(r.value), reverse=True) # smaller = less important, can be cut

Expand Down
Loading

0 comments on commit 23888aa

Please sign in to comment.