From d27f0361f7f1304b0911f623a39bfe5aaffc2016 Mon Sep 17 00:00:00 2001 From: JMousqueton Date: Thu, 8 Aug 2024 18:39:33 +0000 Subject: [PATCH] nigtly build --- .github/DOC.md | 197 ++++++++++++++++++++++++++++++ .github/TODO.md | 12 ++ docs/index.html | 1 - exceptions.lst | 3 +- libs/generatesite.py | 162 +++++++++++++++++++++++- libs/graph.py | 94 +++++++++++++- libs/mystripe.py | 173 ++++++++++++++++++++++++++ libs/ransomwarelive.py | 3 +- parsers/alphalocker.py | 2 +- parsers/blacksuit.py | 10 +- ransomwarelive.py => ransomcmd.py | 34 ++---- run.sh | 8 +- 12 files changed, 655 insertions(+), 44 deletions(-) create mode 100644 .github/DOC.md create mode 100644 .github/TODO.md create mode 100644 libs/mystripe.py rename ransomwarelive.py => ransomcmd.py (95%) diff --git a/.github/DOC.md b/.github/DOC.md new file mode 100644 index 0000000..9feb504 --- /dev/null +++ b/.github/DOC.md @@ -0,0 +1,197 @@ + +# Ransomware.live + +Teh ransomcmd.py is a comprehensive command-line tool designed to manage and monitor ransomware activities. It supports various functionalities including scraping ransomware DLS (Dark Leak Sites), parsing the collected data, generating reports and graphs, taking screenshots of ransomware sites, and more. The program is built with extensibility in mind, allowing for easy addition of new features and integration with existing tools and libraries. + +## Table of Contents + +- [Usage](#usage) +- [Dependencies](#dependencies) +- [Environment Variables](#environment-variables) +- [Author](#author) +- [Version](#version) +- [Commands](#commands) + - [scrape](#scrape) + - [parse](#parse) + - [generate](#generate) + - [screenshot](#screenshot) + - [status](#status) + - [search](#search) + - [rss](#rss) + - [infostealer](#infostealer) + - [tools](#tools) + - [duplicate](#duplicate) + - [order](#order) + - [blur](#blur) + - [add](#add) + - [append](#append) + +## Usage + +\`\`\`bash +python3 ransomcmd.py [options] +\`\`\` + +## Dependencies + +- Python 3.x +- Python packages: sys, os, asyncio, argparse, dotenv, hashlib, time, importlib, glob, datetime, atexit, tempfile, subprocess, re + +## Environment Variables + +Managed via a `.env` file, which includes configurations for directories, data files, etc. + +## Author + +Julien Mousqueton + +## Commands + +### scrape + +Scrape ransomware DLS sites. + +\`\`\`bash +python3 ransomcmd.py scrape [options] +\`\`\` + +Options: + +- `-F`, `--force`: Force scraping +- `-g`, `--group`: Specify a specific group to scrape + +### parse + +Parse ransomware DLS sites. + +\`\`\`bash +python3 ransomcmd.py parse [options] +\`\`\` + +Options: + +- `-g`, `--group`: Specify a specific group to parse + +### generate + +Generate Ransomware.live site. + +\`\`\`bash +python3 ransomcmd.py generate +\`\`\` + +### screenshot + +Generate screenshot for ransomware sites. + +\`\`\`bash +python3 ransomcmd.py screenshot [options] +\`\`\` + +Options: + +- `-g`, `--group`: Specify a specific group to screenshot +- `-u`, `--url`: Specify a specific URL to screenshot + +### status + +Show the status of ransomware.live. + +\`\`\`bash +python3 ransomcmd.py status +\`\`\` + +### search + +Search victim in the database. + +\`\`\`bash +python3 ransomcmd.py search [options] +\`\`\` + +Options: + +- `-v`, `--victim`: Specify a victim name +- `-d`, `--domain`: Specify a domain name + +### rss + +Generate RSS feeds. + +\`\`\`bash +python3 ransomcmd.py rss +\`\`\` + +### infostealer + +Query Hudsonrock database. + +\`\`\`bash +python3 ransomcmd.py infostealer [options] +\`\`\` + +Options: + +- `-d`, `--domain`: Specify a victim domain + +### tools + +Tools for Ransomware.live. + +\`\`\`bash +python3 ransomcmd.py tools [options] +\`\`\` + +#### duplicate + +Remove duplicate source files. + +\`\`\`bash +python3 ransomcmd.py tools duplicate +\`\`\` + +#### order + +Order groups by alphabetic order. + +\`\`\`bash +python3 ransomcmd.py tools order +\`\`\` + +#### blur + +Blur a picture. + +\`\`\`bash +python3 ransomcmd.py tools blur [options] +\`\`\` + +Options: + +- `-f`, `--file`: Full path of the image to blur + +### add + +Add a new ransomware group. + +\`\`\`bash +python3 ransomcmd.py add [options] +\`\`\` + +Options: + +- `-n`, `--name`: Specify the ransomware group name +- `-l`, `--location`: Specify the ransomware group site + +### append + +Add a new ransomware site to an existing group. + +\`\`\`bash +python3 ransomcmd.py append [options] +\`\`\` + +Options: + +- `-n`, `--name`: Specify the ransomware group name +- `-l`, `--location`: Specify the ransomware group site diff --git a/.github/TODO.md b/.github/TODO.md new file mode 100644 index 0000000..ad3bfd1 --- /dev/null +++ b/.github/TODO.md @@ -0,0 +1,12 @@ +# To-Do List + +## Migration to NG +- [ ] Remove all hardcoded variable in the code +- [ ] Integrate all external programs into ransomcmd.py + +## Documentation / Installation +- [ ] Write the documentation +- [ ] Write the installation procedure +- [ ] Code a install script + +## Enhancements diff --git a/docs/index.html b/docs/index.html index 634d1bb..7ce7406 100644 --- a/docs/index.html +++ b/docs/index.html @@ -116,5 +116,4 @@ - diff --git a/exceptions.lst b/exceptions.lst index 372e12b..23f08d7 100644 --- a/exceptions.lst +++ b/exceptions.lst @@ -8,4 +8,5 @@ Defaulters;malas HOW TO BUY DATA?;medusalocker skalar.com;medusalocker visitingphysiciansnetwork;threeam -IDF SMS system;malekteam \ No newline at end of file +IDF SMS system;malekteam +Anyone.. Who need some bags?;donutleaks \ No newline at end of file diff --git a/libs/generatesite.py b/libs/generatesite.py index 2b8a0ef..1568d70 100644 --- a/libs/generatesite.py +++ b/libs/generatesite.py @@ -4,12 +4,12 @@ from dotenv import load_dotenv import logging import fnmatch -from collections import Counter +from collections import Counter, defaultdict import urllib.parse from urllib.parse import urlparse import xml.etree.ElementTree as ET #import datetime - +import pandas as pd from ransomwarelive import stdlog, errlog, openjson @@ -159,6 +159,24 @@ def recentdiscoveredposts(top): stdlog('recent posts generated') return recentposts +def recentattackedposts(top): + ''' + create a list the last X posts (most recent) + ''' + stdlog('Finding recent posts') + posts = openjson(VICTIMS_FILE) + # sort the posts by timestamp - descending + sorted_posts = sorted(posts, key=lambda x: x['published'], reverse=True) + # create a list of the last X posts + recentposts = [] + for post in sorted_posts: + recentposts.append(post) + if len(recentposts) == top: + break + stdlog('recent posts generated') + return recentposts + + def extract_domain(url): if '://' not in url: url = 'http://' + url # Assumption to handle URLs without a scheme @@ -417,6 +435,10 @@ def generate_sitemapXML(base_url, pages, note_directories, output_file="./docs/s tree = ET.ElementTree(urlset) tree.write(output_file, encoding="UTF-8", xml_declaration=True) +def json2cvs(): + df = pd.read_json (r'./data/victims.json') + df.to_csv (r'docs/victims.csv', index = None) + def generate_sitemapHTML(base_url, pages, note_directories, output_file="./docs/sitemap.xml"): with open(output_file, "w") as file: @@ -1030,10 +1052,10 @@ def allposts(): writeline(allpage, '_All `' + str(postcount()) + '` posts_') writeline(allpage, '') writeline(allpage, '') - writeline(allpage, 'πŸ’Ύ [Download](https://data.ransomware.live/posts.json) full list in **json** format') + writeline(allpage, 'πŸ’Ύ [Download](https://data.ransomware.live/victims.json) full list in **json** format') + writeline(allpage, '') + writeline(allpage, 'πŸ’Ύ [Download](https://www.ransomware.live/victims.csv) full list in **csv** format') writeline(allpage, '') - #writeline(allpage, 'πŸ’Ύ [Download](https://www.ransomware.live/posts.csv) full list in **csv** format') - #writeline(allpage, '') writeline(allpage, '') writeline(allpage, '| Discovery Date | Attack Date | Victim | [Country](country) | Group | πŸ“Έ | πŸ•΅πŸ»β€β™‚οΈ | ') writeline(allpage, '|---|---|---|---|---|---|---|') @@ -1339,4 +1361,132 @@ def summaryjson(): writeline(uptime_sheet, '"currentyear": "' + str(datetime.now().year) + '",') writeline(uptime_sheet, '"overallposts": "' + str(postcount()) + '"') writeline(uptime_sheet, '}') - writeline(uptime_sheet, ']') \ No newline at end of file + writeline(uptime_sheet, ']') + + + +def generate_admin_page(directory, output_file): + # Ensure the directory exists + if not os.path.exists(directory): + print("Directory does not exist.") + return + + # Get the list of files in the directory + files = os.listdir(directory) + + # Filter image files (assuming common image file extensions) + image_extensions = ('.png', '.jpg', '.jpeg', '.gif', '.bmp', '.tiff') + image_files = [file for file in files if file.endswith(image_extensions)] + + # Create a dictionary to hold lists of files by topic + images_by_topic = defaultdict(list) + + for file in image_files: + # Split the file name to get the topic + topic = file.split('-')[0] + images_by_topic[topic].append(file) + + # Sort topics alphabetically + sorted_topics = sorted(images_by_topic.keys()) + + # Create the Markdown structure + markdown_content = "# Administration\n" + markdown_content += "> Restricted area \n" + + for topic in sorted_topics: + images = images_by_topic[topic] + topic_name = topic.replace("_", " ").capitalize().replace('.png','') + markdown_content += "## {}\n".format(topic_name) + markdown_content += "\n" + for i in range(0, len(images), 2): + markdown_content += " \n" + markdown_content += " \n".format(images[i], images[i]) + if i + 1 < len(images): + markdown_content += " \n".format(images[i + 1], images[i + 1]) + else: + markdown_content += " \n" + markdown_content += " \n" + markdown_content += "
\"{}\"\"{}\"
\n" + markdown_content += "\n" + markdown_content += "## Statistics\n" + markdown_content += "API | web\n" + + + # Write the Markdown content to a file + with open(output_file, "w") as md_file: + md_file.write(markdown_content) + +def recentpublishedposts(top): + ''' + create a list the last X posts (most recent) + ''' + stdlog('finding recent posts') + posts = openjson('./data/victims.json') + # sort the posts by timestamp - descending + sorted_posts = sorted(posts, key=lambda x: x['published'], reverse=True) + # create a list of the last X posts + recentposts = [] + for post in sorted_posts: + recentposts.append(post) + if len(recentposts) == top: + break + stdlog('recent posts generated') + return recentposts + +def recentpublishedpage(): + '''create a markdown table for the last 200 posts based on the published value''' + fetching_count = 200 + stdlog('generating recent published victims page') + recentpage = 'docs/recentvictims.md' + # delete contents of file + with open(recentpage, 'w', encoding='utf-8') as f: + f.close() + writeline(recentpage,'# Recent victims') + writeline(recentpage,'') + writeline(recentpage, '> [!INFO] `Ransomware.live` provides tracking of ransomware groups and their victims. Descriptions available in the [group profiles view](profiles.md)') + writeline(recentpage,'') + writeline(recentpage, '**πŸ“° 200 last victims sorted by published date**') + writeline(recentpage, '') + writeline(recentpage, '| Attack Date | Victim | [Country](country) | Ransomware Group | πŸ“Έ |') + writeline(recentpage, '|---|---|---|---|---|') + for post in recentpublishedposts(fetching_count): + # show friendly date for discovered + date = post['published'].split(' ')[0] + attacked_date = post.get('attacked_date', None) + if attacked_date: + date = attacked_date.split(' ')[0] + # replace markdown tampering characters + title = post['post_title'].replace('|', '-').replace('&','&').replace('amp;','') + group = post['group_name'].replace('|', '-') + urlencodedtitle = urllib.parse.quote_plus(title) + grouplink = '[' + group + '](group/' + group + ')' + # screenpost='❌' + screenpost=' ' + if post['post_url'] is not None: + # Create an MD5 hash object + hash_object = hashlib.md5() + # Update the hash object with the string + hash_object.update(post['post_url'].encode('utf-8')) + # Get the hexadecimal representation of the hash + hex_digest = hash_object.hexdigest() + if os.path.exists('docs/screenshots/posts/'+hex_digest+'.png'): + screenpost='πŸ‘€' + if len(post['country']) > 1: + match post['country']: + case 'UK': + flag = 'GB' + case _: + flag = post['country'] + country="[!["+flag+"](https://images.ransomware.live/flags/"+flag+".svg ':size=32x24 :no-zoom')](country/"+flag.lower()+")" + else: + country='' + line = '| ' + date + ' | [`' + title + '`](https://google.com/search?q=' + urlencodedtitle + ') | ' + country + ' | ' + grouplink + ' | ' + screenpost + ' |' + result = get_removal(title, group) + if result: + line = '| ' + date + ' | *' + result + '* | ' + country + ' | ' + grouplink + ' | | |' + writeline(recentpage, line) + writeline(recentpage, '') + writeline(recentpage, '> [!TIP] You can also check the 200 last victims sorted by discovered date by `Ransomware.live` [here](recentdiscoveredvictims.md).') + writeline(recentpage, '') + writeline(recentpage, 'Last update : _'+ NowTime.strftime('%A %d/%m/%Y %H.%M') + ' (UTC)_') + stdlog('recent published victims page generated') \ No newline at end of file diff --git a/libs/graph.py b/libs/graph.py index 39b0244..0757e19 100644 --- a/libs/graph.py +++ b/libs/graph.py @@ -833,13 +833,99 @@ def generate_ransomware_map(): stdlog('Writing markdown file ...') current_datetime = datetime.now().isoformat() content = f""" - ### πŸ—ΊοΈ Worldmap for ransomware's attacks in {current_year} +### πŸ—ΊοΈ Worldmap for ransomware's attacks in {current_year} - [filename](map.html ':include') +[filename](map.html ':include') - _Last update: {current_datetime}_ - """ +_Last update: {current_datetime}_ +""" with open(md_path, 'w') as file: file.write(content) +import matplotlib.pyplot as plt +import pandas as pd + +def generate_execution_time_graphs(): + # Define the path to the log file and the output image files + log_file_path = '/var/log/ransomwarelive.log' + output_image_days = './docs/admin/execution_times-days.png' + output_image_daily = './docs/admin/execution_times-daily.png' + output_image_monthly = './docs/admin/execution_times-monthly.png' + + # Warning value in minutes + warning = 120 + + # Read the log file into a pandas DataFrame + try: + # Read the log file with appropriate column names + log_df = pd.read_csv(log_file_path, header=None, names=['datetime', 'scraping_time', 'parsing_time', 'markdown_time', 'total_execution_time']) + log_df['datetime'] = pd.to_datetime(log_df['datetime']) + + # Convert times from seconds to minutes + log_df['scraping_time'] = log_df['scraping_time'] / 60 + log_df['parsing_time'] = log_df['parsing_time'] / 60 + log_df['markdown_time'] = log_df['markdown_time'] / 60 + log_df['total_execution_time'] = log_df['total_execution_time'] / 60 + + # Calculate the other time component + log_df['other_time'] = log_df['total_execution_time'] - (log_df['parsing_time'] + log_df['scraping_time'] + log_df['markdown_time']) + + # Set the datetime as the index for easier plotting + log_df.set_index('datetime', inplace=True) + + # Filter data for the last 3 days + last_3_days_df = log_df[log_df.index >= (pd.Timestamp.now() - pd.Timedelta(days=3))] + + # Plot the execution times for the last 3 days as a cumulative bar graph + ax = last_3_days_df[['scraping_time', 'parsing_time', 'markdown_time', 'other_time']].plot(kind='bar', stacked=True, figsize=(12, 8), color=['#1f77b4', '#ff7f0e', '#9467bd', '#2ca02c']) + plt.axhline(y=warning, color='r', linestyle='--', label='Warning Level') + plt.title('Execution Times for the Last 3 Days') + plt.xlabel('Date and Time') + plt.ylabel('Execution Time (minutes)') + plt.xticks(rotation=45) + plt.legend(title='Execution Time Components') + plt.grid(True, axis='y') + plt.tight_layout() + plt.savefig(output_image_days) + plt.close() + + # Resample data to get daily averages + daily_avg_df = log_df.resample('D').mean() + + # Plot the daily average execution times + ax = daily_avg_df[['scraping_time', 'parsing_time', 'markdown_time', 'other_time']].plot(kind='bar', stacked=True, figsize=(12, 8), color=['#1f77b4', '#ff7f0e', '#9467bd', '#2ca02c']) + plt.axhline(y=warning, color='r', linestyle='--', label='Warning Level') + plt.title('Average Daily Execution Times') + plt.xlabel('Date') + plt.ylabel('Average Execution Time (minutes)') + plt.xticks(ticks=range(len(daily_avg_df.index)), labels=[date.strftime('%Y-%m-%d') for date in daily_avg_df.index], rotation=45) + plt.legend(title='Execution Time Components') + plt.grid(True, axis='y') + plt.tight_layout() + plt.savefig(output_image_daily) + plt.close() + + # Resample data to get monthly averages + monthly_avg_df = log_df.resample('M').mean() + + # Plot the monthly average execution times + ax = monthly_avg_df[['scraping_time', 'parsing_time', 'markdown_time', 'other_time']].plot(kind='bar', stacked=True, figsize=(12, 8), color=['#1f77b4', '#ff7f0e', '#9467bd', '#2ca02c']) + plt.axhline(y=warning, color='r', linestyle='--', label='Warning Level') + plt.title('Average Monthly Execution Times') + plt.xlabel('Month') + plt.ylabel('Average Execution Time (minutes)') + plt.xticks(ticks=range(len(monthly_avg_df.index)), labels=[date.strftime('%Y-%m') for date in monthly_avg_df.index], rotation=45) + plt.legend(title='Execution Time Components') + plt.grid(True, axis='y') + plt.tight_layout() + plt.savefig(output_image_monthly) + plt.close() + + stdlog(f'Execution time graphs saved to {output_image_days}, {output_image_daily}, and {output_image_monthly}') + except FileNotFoundError: + errlog(f'Log file not found at {log_file_path}') + except pd.errors.ParserError: + errlog(f'Error parsing the log file at {log_file_path}') + except Exception as e: + errlog(f'An unexpected error occurred: {e}') diff --git a/libs/mystripe.py b/libs/mystripe.py new file mode 100644 index 0000000..64115e0 --- /dev/null +++ b/libs/mystripe.py @@ -0,0 +1,173 @@ +import os +import stripe +from dotenv import load_dotenv +from datetime import datetime, timedelta +import calendar +import matplotlib.pyplot as plt +from ransomwarelive import stdlog, errlog + +# Load environment variables from .env file +env_path = os.path.join(os.path.dirname(__file__), '../.env') +load_dotenv(dotenv_path=env_path) +stripe.api_key = os.getenv('STRIPE_API_KEY') + +current_year = datetime.now().year +# Define the output file as a global variable +output_file = './docs/admin/budget_stripe-' + str(current_year) + '.png' + +# Function to get all payments for a specific month +def get_monthly_payments(year, month): + start_date = datetime(year, month, 1) + end_date = datetime(year, month, calendar.monthrange(year, month)[1], 23, 59, 59) + + start_timestamp = int(start_date.timestamp()) + end_timestamp = int(end_date.timestamp()) + + payments = stripe.PaymentIntent.list( + created={ + 'gte': start_timestamp, + 'lte': end_timestamp, + }, + limit=100 # Adjust as necessary, Stripe max limit is 100 + ) + + return payments + +# Function to get all payouts for a specific month +def get_monthly_payouts(year, month): + start_date = datetime(year, month, 1) + end_date = datetime(year, month, calendar.monthrange(year, month)[1], 23, 59, 59) + + start_timestamp = int(start_date.timestamp()) + end_timestamp = int(end_date.timestamp()) + + payouts = stripe.Payout.list( + created={ + 'gte': start_timestamp, + 'lte': end_timestamp, + }, + limit=100 # Adjust as necessary, Stripe max limit is 100 + ) + + return payouts + +# Function to get monthly payments and payouts +def get_monthly_financials(year): + months = range(1, 13) + monthly_payments = [] + monthly_payouts = [] + payment_counts = [] + + for month in months: + payments = get_monthly_payments(year, month) + total_payments = sum(payment.amount for payment in payments['data']) / 100 # Convert cents to euros + monthly_payments.append(total_payments) + payment_counts.append(len(payments['data'])) + + payouts = get_monthly_payouts(year, month) + total_payouts = sum(payout.amount for payout in payouts['data']) / 100 # Convert cents to euros + monthly_payouts.append(total_payouts) + + return monthly_payments, monthly_payouts, payment_counts + +# Function to plot the combined graph and save it to a file +def plot_financials(year): + monthly_payments, monthly_payouts, payment_counts = get_monthly_financials(year) + cumulative_payouts = [sum(monthly_payouts[:i+1]) for i in range(len(monthly_payouts))] + months = [calendar.month_name[i] for i in range(1, 13)] + + # Calculate the cumulative payout for December + cumulative_payout_december = cumulative_payouts[-1] + + fig, ax = plt.subplots(figsize=(10, 6)) + + # Bar chart for monthly payments received + bars = ax.bar(months, monthly_payments, color='skyblue', label='Monthly Payments') + + # Line chart for cumulative payouts + ax.plot(months, cumulative_payouts, marker='o', color='red', label='Cumulative Payouts') + + # Annotate bars with the number of payments + for bar, count in zip(bars, payment_counts): + if count > 0: # Only annotate if the count is greater than 0 + height = bar.get_height() + ax.annotate(f'{count}', + xy=(bar.get_x() + bar.get_width() / 2, height), + xytext=(0, 3), # 3 points vertical offset + textcoords="offset points", + ha='center', va='bottom', fontsize=10) + + # Display the cumulative payout for December + ax.text(11, cumulative_payout_december, f'€{cumulative_payout_december:.2f}', + color='red', ha='center', va='bottom', fontsize=10, bbox=dict(facecolor='white', alpha=0.5)) + + ax.set_xlabel('Month') + ax.set_ylabel('Amount in Euros') + ax.set_title(f'Financial Overview in {year}') + ax.tick_params(axis='x', rotation=45) + ax.legend() + ax.grid(True) + + fig.tight_layout() + plt.savefig(output_file) + +# Check if the script should run based on the file modification date +def should_run(): + if os.path.exists(output_file): + file_mod_time = datetime.fromtimestamp(os.path.getmtime(output_file)) + if datetime.now() - file_mod_time < timedelta(hours=24): + return False + return True + +# Function to get total paid by email and plot the graph +def plot_payments_by_email(): + # Retrieve all charges + charges = stripe.Charge.list(limit=100) + payments_by_email = {} + + # Extract customer info from charges + for charge in charges.auto_paging_iter(): + if charge.paid: + customer_id = charge.customer + customer = stripe.Customer.retrieve(customer_id) + email = customer.email + amount_paid = charge.amount / 100 # Convert amount to euros + if email in payments_by_email: + payments_by_email[email]['amount'] += amount_paid + payments_by_email[email]['count'] += 1 + else: + payments_by_email[email] = {'amount': amount_paid, 'count': 1} + + # Sort emails by total amount paid + sorted_payments_by_email = dict(sorted(payments_by_email.items(), key=lambda item: item[1]['amount'], reverse=True)) + + # Plot the data + emails = list(sorted_payments_by_email.keys())[:100] # Get top 10 emails + amounts = [sorted_payments_by_email[email]['amount'] for email in emails] + counts = [sorted_payments_by_email[email]['count'] for email in emails] + + fig, ax = plt.subplots(figsize=(10, 6)) + bars = ax.barh(emails, amounts, color='skyblue') + ax.set_xlabel('Amount in Euros') + ax.set_ylabel('Sponsors') + ax.set_title('Sponsors by Total Payments') + + # Annotate bars with the payment amounts and counts + for bar, amount, count in zip(bars, amounts, counts): + ax.annotate(f'€{amount:.2f} ({count})', + xy=(amount, bar.get_y() + bar.get_height() / 2), + xytext=(3, 0), # 3 points horizontal offset + textcoords="offset points", + ha='left', va='center', fontsize=10) + + fig.tight_layout() + plt.savefig('./docs/admin/budget_sponsors.png') + +def generatestripe(): + if should_run(): + plot_financials(current_year) + plot_payments_by_email() + stdlog('Stripe graph generated') + else: + errlog("The script has been executed within the last 24 hours. Exiting.") + diff --git a/libs/ransomwarelive.py b/libs/ransomwarelive.py index 3588f61..6f21bcd 100644 --- a/libs/ransomwarelive.py +++ b/libs/ransomwarelive.py @@ -251,8 +251,9 @@ def md5_file(file_path, chunk_size=8192): def clean_string(s): chars_to_remove='|\t\b\n' for char in chars_to_remove: - s = s.replace(char, '') + s = s.replace(char, ' ') s = s.replace('[DISCLOSED]', '') # Remove [DISCLOSED] + s = s.replace('Data Leak', '') # Remove [DISCLOSED] s = s.strip() s = re.sub(' +', ' ', s) # Replace multiple spaces with a single space return s diff --git a/parsers/alphalocker.py b/parsers/alphalocker.py index 016d349..09c5d9e 100644 --- a/parsers/alphalocker.py +++ b/parsers/alphalocker.py @@ -33,7 +33,7 @@ def main(): url = header.get('href') or header['href'] description = div.find('div', style='line-height:20px; padding-top:5px; margin-bottom:30px;').text.strip() try: - url = find_slug_by_md5('alphalocker', extract_md5_from_filename(html_doc)) + "/" + str(url) + url = find_slug_by_md5('alphalocker', extract_md5_from_filename(html_doc)).replace('_1','') + "/" + str(url) except: url = 'http://mydatae2d63il5oaxxangwnid5loq2qmtsol2ozr6vtb7yfm5ypzo6id.onion' + "/" + str(url) appender(title, 'alphalocker', description,"","",url) diff --git a/parsers/blacksuit.py b/parsers/blacksuit.py index 95cde1d..0865c11 100644 --- a/parsers/blacksuit.py +++ b/parsers/blacksuit.py @@ -7,7 +7,7 @@ +-----------------------+-----------+----------+ Rappel : def appender(post_title, group_name, description="", website="", published="", post_url="") """ - + import os,datetime,sys,re from bs4 import BeautifulSoup from datetime import datetime @@ -19,7 +19,7 @@ def main(): for filename in os.listdir('source'): - #try: + try: if filename.startswith('blacksuit-'): html_doc='source/'+filename file=open(html_doc,'r') @@ -46,6 +46,6 @@ def main(): appender(title, 'blacksuit', description,website,'',post_url) file.close() - #except: - # errlog('blacksuit : ' + 'parsing fail') - # pass + except: + errlog('blacksuit : ' + 'parsing fail') + pass diff --git a/ransomwarelive.py b/ransomcmd.py similarity index 95% rename from ransomwarelive.py rename to ransomcmd.py index b4a62dd..9796085 100755 --- a/ransomwarelive.py +++ b/ransomcmd.py @@ -11,24 +11,7 @@ existing tools and libraries. Usage: - python3 ransomwarelive.py [options] - -Commands: - scrape Scrape ransomware DLS sites - parse Parse ransomware DLS sites - generate Generate Ransomware.live site - screenshot Generate screenshots for ransomware sites - search Search victim in the database - rss Generate RSS feed - infostealer Search for Hudsonrock database - tools Tools for Ransomware.live (subcommands: duplicate, order) - -Options: - -F, --force Force the execution of the command (e.g., force scraping) - -g, --group Specify a specific group - -u, --url Specify a specific URL for screenshots - -v, --victim Specify a victim name - -d, --domain Specify a domain name + python3 ransomcmd.py [options] Dependencies: - Python 3.x @@ -71,6 +54,7 @@ import ransomnotes import hudsonrock import negotiations +import mystripe SOURCE='./source' @@ -275,9 +259,9 @@ def check_lock_file(): LOCK_FILE_PATH = os.path.join(tempfile.gettempdir(), LOCK_FILE_NAME) create_lock_file(LOCK_FILE_PATH) start_time = time.time() - generatesite.mainpage() - generatesite.statuspage() - generatesite.summaryjson() + #generatesite.mainpage() + #generatesite.statuspage() + #generatesite.summaryjson() load_dotenv() DATA_DIR = os.getenv('DATA_DIR') VICTIMS_FILE = os.getenv('VICTIMS_FILE') @@ -365,6 +349,14 @@ def check_lock_file(): negotiations.parse_group(gang) negotiations.generatenegotiationindex() ransomwarelive.stdlog('Ransomware Negotiation generated') + generatesite.json2cvs() + ### BEGIN : ADMIN ### + mystripe.generatestripe() + graph.generate_execution_time_graphs() + directory_path = "./docs/admin" + markdown_file = os.path.join(directory_path, "README.md") + generatesite.generate_admin_page(directory_path, markdown_file) + ### END : ADMIN ### end_time = time.time() execution_time = end_time - start_time ransomwarelive.stdlog(f'Generating execution time {execution_time:.2f} secondes') diff --git a/run.sh b/run.sh index 47729e1..bc15bc9 100755 --- a/run.sh +++ b/run.sh @@ -60,26 +60,26 @@ source .env SCRAPE_BEGIN_TIME=$(date +%s) -python3 ransomwarelive.py scrape +python3 ransomcmd.py scrape SCRAPE_END_TIME=$(date +%s) SCRAPE_EXECUTION_TIME=$((SCRAPE_END_TIME - SCRAPE_BEGIN_TIME)) PARSE_BEGIN_TIME=$(date +%s) -python3 ransomwarelive.py parse +python3 ransomcmd.py parse PARSE_END_TIME=$(date +%s) PARSE_EXECUTION_TIME=$((PARSE_END_TIME - PARSE_BEGIN_TIME)) MARKDOWN_BEGIN_TIME=$(date +%s) -python3 ransomwarelive.py generate +python3 ransomcmd.py generate # TODO: Need to be include in ransomwarelive library python3 generateworldmap.py # TODO: Need to be include in ransomwarelive library python3 generatecyberattacks.py # TODO: Need to be include in ransomwarelive library python3 generatecountries.py -python3 ransomwarelive.py rss +python3 ransomcmd.py rss MARKDOWN_END_TIME=$(date +%s) MARKDOWN_EXECUTION_TIME=$((MARKDOWN_END_TIME - MARKDOWN_BEGIN_TIME))