Skip to content

Commit

Permalink
Created an url_helper to get resources from my own domain instead of …
Browse files Browse the repository at this point in the history
…a github repo
  • Loading branch information
vil committed Jul 14, 2023
1 parent 892e2b1 commit ce9e3fd
Show file tree
Hide file tree
Showing 4 changed files with 95 additions and 51 deletions.
9 changes: 4 additions & 5 deletions h4xtools.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from utils import email_search, search_username, ig_scrape, whois_lookup, webhook_spammer, port_scanner, ip_lookup, \
phonenumber_lookup, websearch, smsbomber, web_scrape, wifi_finder, wifi_password_getter, fake_info_generator, \
dirbuster
from helper import printer
from helper import printer, url_helper

if os.name == "nt":
os.system("cls")
Expand Down Expand Up @@ -57,11 +57,10 @@ def version_check():
:return: version
"""
url = "https://raw.githubusercontent.com/V1li/H4X-Tools-ver/master/version.txt"
# Get the version from the url and return it
path = "h4xtools/version.txt"
try:
r = requests.get(url)
return r.text
r = url_helper.read_content(path)
return r
except requests.exceptions.ConnectionError:
printer.error("Failed to check the version..!")

Expand Down
77 changes: 77 additions & 0 deletions helper/url_helper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
"""
Copyright (c) 2022 GNU GENERAL PUBLIC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""

import requests
import random
import json
from helper import printer
from utils import randomuser

BASE_URL = "https://resources.vili.dev/"


def get_file(path):
"""
Downloads the file from the given url and saves it to the current directory
:param path: path to the file in BASE_URL (https://resources.vili.dev/)
"""
try:
# printer.info(f"Getting file from '{BASE_URL + path}'..!")
headers = {
"User-Agent": random.choice(randomuser.users)
}
r = requests.get(BASE_URL + path, headers=headers)
with open(path, 'wb') as f:
f.write(r.content)
printer.success(f"Successfully downloaded file to '{path}'..!")
except requests.exceptions.ConnectionError:
printer.error("Unable to connect to the server..!")


def read_content(path):
"""
Reads the content of the file from the given url
:param path: path to the file in BASE_URL (https://resources.vili.dev/)
"""
try:
# printer.info(f"Getting file from '{BASE_URL + path}'..!")
headers = {
"User-Agent": random.choice(randomuser.users)
}
r = requests.get(BASE_URL + path, headers=headers)
return r.text
except requests.exceptions.ConnectionError:
printer.error("Unable to connect to the server..!")


def read_json_content(path):
"""
Reads the content of a json file from the given url
:param path: path to the file in BASE_URL (https://resources.vili.dev/)
"""
try:
# printer.info(f"Getting file from '{BASE_URL + path}'..!")
headers = {
"User-Agent": random.choice(randomuser.users)
}
r = requests.get(BASE_URL + path, headers=headers)
return json.loads(r.text)
except requests.exceptions.ConnectionError:
printer.error("Unable to connect to the server..!")
39 changes: 9 additions & 30 deletions utils/dirbuster.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@

import requests
import random
from helper import printer
from helper import printer, url_helper
from utils import randomuser

url_list = []
wordlist_url = "https://raw.githubusercontent.com/V1li/H4X-Tools-ver/master/wordlist.txt"
PATH = "h4xtools/wordlist.txt"


class Scan:
Expand All @@ -40,43 +40,23 @@ def __init__(self, domain):
printer.success(f"Scan Complete..! Found {len(url_list)} valid URL(s)..!")


def get_wordlist(text_file):
"""
Reads the wordlist from the file and returns a list of names
:param text_file: path to the text file
"""
names = []
try:
with open(text_file, 'r') as f:
for line in f:
line = line.strip()
if len(line) == 0:
continue
else:
names.append(line)
return names
except FileNotFoundError:
printer.error(f"File '{text_file}' not found..!")


def get_wordlist_from_url(url):
def get_wordlist():
"""
Reads the wordlist from the url and returns a list of names
:param url: url to the text file
:return: list of names
"""
names = []
try:
r = requests.get(url)
for line in r.text.splitlines():
content = url_helper.read_content(PATH)
for line in content.splitlines():
line = line.strip()
if len(line) == 0:
continue
else:
names.append(line)
return names
except requests.ConnectionError:
except requests.exceptions.ConnectionError:
printer.error("Connection Error..!")
return None

Expand All @@ -87,8 +67,7 @@ def scan_urls(domain):
:param domain: domain name to scan
"""
# paths = get_wordlist('data/wordlist.txt')
paths = get_wordlist_from_url(wordlist_url)
paths = get_wordlist()
valid_url = 0

try:
Expand All @@ -102,7 +81,7 @@ def scan_urls(domain):
valid_url += 1
printer.success(f"{valid_url} Valid URL(s): {url}")
url_list.append(url)
except requests.ConnectionError:
except requests.exceptions.ConnectionError:
printer.error("Connection Error..!")
continue
except KeyboardInterrupt:
Expand Down
21 changes: 5 additions & 16 deletions utils/search_username.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,11 @@
import time
from datetime import datetime
import aiohttp
import requests
import asyncio
from utils import randomuser
from helper import printer
from helper import printer, url_helper

url = "https://raw.githubusercontent.com/V1li/H4X-Tools-ver/master/data.json"
PATH = "h4xtools/data.json"


class Search:
Expand All @@ -50,7 +49,7 @@ def scan(username):
:param username: The username to scan for.
"""
start_time = time.time()
printer.info(f"Searching for '{username}' across {len(get_data_from_url(url)['sites'])} sites...")
printer.info(f"Searching for '{username}' across {len(url_helper.read_json_content(PATH)['sites'])} sites...")

results = []
loop = asyncio.get_event_loop()
Expand All @@ -61,7 +60,7 @@ def scan(username):
user_json = {
"search-params": {
"username": username,
"sites-number": len(get_data_from_url(url)['sites']),
"sites-number": len(url_helper.read_json_content(PATH)['sites']),
"date": now,
"execution-time": execution_time
},
Expand All @@ -82,7 +81,7 @@ async def make_requests(username, results):
"""
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=20)) as session:
tasks = []
for u in get_data_from_url(url)["sites"]:
for u in url_helper.read_json_content(PATH)["sites"]:
task = asyncio.ensure_future(make_request(session, u, username, results))
tasks.append(task)
await asyncio.gather(*tasks)
Expand Down Expand Up @@ -137,13 +136,3 @@ def print_results(user_json):
printer.success(f"Response Status: {site['response-status']}")
printer.success(f"Status: {site['status']}")
printer.error(f"Error Message: {site['error-message']}\n")


def get_data_from_url(url):
"""
Gets the search data from the given url.
:param url: The url to get the search data from.
"""
headers = {"User-Agent": random.choice(randomuser.users)}
return json.loads(requests.get(url, headers=headers).text)

0 comments on commit ce9e3fd

Please sign in to comment.