Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GRC price update #59

Merged
merged 4 commits into from
Feb 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 104 additions & 0 deletions grc_price_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import random
from typing import Union, List, Tuple

import requests
from bs4 import BeautifulSoup

AGENTS = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 14.5; rv:128.0) Gecko/20100101 Firefox/128.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 14_5) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.5 Safari/605.1.15",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.2592.113",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.2592.113"
"Mozilla/5.0 (Macintosh; Intel Mac OS X 12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36"
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36"
)

GRC_PRICE_URLS = ("https://www.bybit.com/en/coin-price/gridcoin-research/", "https://coinstats.app/coins/gridcoin/", "https://marketcapof.com/crypto/gridcoin-research/")

def parse_grc_price_soup(url: str, price_soup: str) -> Tuple[Union[float, None], str, str]:
float_price = None
info_message = ""
url_message = ""

soup = BeautifulSoup(price_soup, "html.parser")

if url == "https://www.bybit.com/en/coin-price/gridcoin-research/":
pre_price = soup.find("div", attrs={"data-cy": "coinPrice"})

if pre_price is not None:
try:
price = pre_price.text.replace("$", "").strip()
float_price = float(price)
info_message = f"Found GRC price of {float_price} from {url}"
except Exception:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"
elif url == "https://coinstats.app/coins/gridcoin/":
pre_price = soup.find("div", class_="CoinOverview_mainPrice__YygaC")

if pre_price is not None:
try:
price = pre_price.p.text.replace("$", "").strip()
float_price = float(price)
info_message = f"Found GRC price of {float_price} from {url}"
except Exception:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"
elif url == "https://marketcapof.com/crypto/gridcoin-research/":
pre_pre_price = soup.find("div", class_="price")

if pre_pre_price is not None:
pre_price = pre_pre_price.find(string=True, recursive=False)

if pre_price is not None:
try:
price = pre_price.replace("$", "").strip()
float_price = float(price)
info_message = f"Found GRC price of {float_price} from {url}"
except Exception:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"

return float_price, url_message, info_message


def get_grc_price_from_sites() -> Tuple[Union[float, None], str, List[str], List[str], List[str]]:
headers = requests.utils.default_headers()
headers["User-Agent"] = random.choice(AGENTS)
found_prices = []
url_messages = []
info_logger_messages = []
error_logger_messages = []

for url in GRC_PRICE_URLS:
try:
response = requests.get(url, headers=headers, timeout=5)
except requests.exceptions.Timeout as error:
error_logger_messages.append(f"Error fetching stats from {url}: {error}")
continue

price, url_message, info_message = parse_grc_price_soup(url, response.content)

if price is not None:
found_prices.append(price)

url_messages.append(url_message)
info_logger_messages.append(info_message)

if len(found_prices) > 0:
table_message = f"Found GRC price {sum(found_prices) / len(found_prices)}"
return sum(found_prices) / len(found_prices), table_message, url_messages, info_logger_messages, error_logger_messages

table_message = "Unable to find GRC price"
return None, table_message, url_messages, info_logger_messages, error_logger_messages
84 changes: 25 additions & 59 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from requests.auth import HTTPBasicAuth
from typing import List, Union, Dict, Tuple, Set, Any
import sys, signal
from grc_price_utils import get_grc_price_from_sites

# This is needed for some async stuff
import nest_asyncio
Expand Down Expand Up @@ -143,21 +144,6 @@
SAVE_STATS_DB = (
{}
) # Keeps cache of saved stats databases so we don't write more often than we need too
# Dictionary for places we query in format key=url, value=Tuple[nickname,regex].
# Note they all must match group 2
PRICE_URL_DICT: Dict[str, Tuple[str, Union[str, re.Pattern]]] = {
"https://finance.yahoo.com/quote/GRC-USD/": (
"yahoo.com",
r'(data-field="regularMarketPrice" data-trend="none" data-pricehint="\d" value=")(\d*\.\d*)',
),
"https://www.coingecko.com/en/coins/gridcoin-research": (
"coingecko",
re.compile(
r'(data-coin-id="243" data-coin-symbol="grc" data-target="price.price">\$)(\d*\.\d*)(</span>)',
flags=re.MULTILINE | re.IGNORECASE,
),
),
}


def resolve_url_database(url: str) -> str:
Expand Down Expand Up @@ -190,7 +176,7 @@ def resolve_url_database(url: str) -> str:
from config import *
except Exception as e:
print("Error opening config.py, using defaults! Error is: {}".format(e))
# Import addl user settings from user_config
# Import additional user settings from user_config
if os.path.isfile("user_config.py"):
try:
from user_config import * # You can ignore an unresolved reference error here in pycharm since user is expected to create this file
Expand Down Expand Up @@ -982,48 +968,28 @@ def get_grc_price(sample_text: str = None) -> Union[float, None]:
Raises:
Exception: An error occurred accessing an online GRC price source.
"""
import requests as req
price, table_message, url_messages, info_log_messages, error_log_messages = get_grc_price_from_sites()

found_prices = []
headers = req.utils.default_headers()
headers.update(
{
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36",
}
)
for url, info in PRICE_URL_DICT.items():
regex = info[1]
name = info[0]
resp = ""
if sample_text:
resp = sample_text
else:
try:
resp = req.get(url, headers=headers).text
except Exception as e:
log.error("Error fetching stats from {}: {}".format(name, e))
regex_result = re.search(regex, resp)
if regex_result:
try:
answer = float(regex_result.group(2))
except Exception as e:
DATABASE["TABLE_STATUS"] = "Error getting info from {}".format(name)
print_and_log("Error getting info from {}".format(name), "ERROR")
else:
log.info("Found GRC price of {} from {}".format(answer, name))
found_prices.append(answer)
else:
DATABASE["TABLE_STATUS"] = "Error getting info from {}".format(name)
print_and_log("Error getting info from {}".format(name), "ERROR")
# Return average of all found prices
if len(found_prices) > 0:
DATABASE["TABLE_STATUS"] = "Found GRC price {}".format(
sum(found_prices) / len(found_prices)
)
return sum(found_prices) / len(found_prices)
else:
DATABASE["TABLE_STATUS"] = "Unable to find GRC price"
return None
for log_message in info_log_messages:
log.info(log_message)

for log_message in error_log_messages:
log.error(log_message)

if price:
DATABASE["TABLE_STATUS"] = table_message

for url_message in url_messages:
print_and_log(url_message, "ERROR")

return price

DATABASE["TABLE_STATUS"] = table_message

for url_message in url_messages:
print_and_log(url_message, "ERROR")

return DATABASE.get("GRCPRICE", 0)


def get_approved_project_urls_web(query_result: str = None) -> Dict[str, str]:
Expand Down Expand Up @@ -2460,8 +2426,8 @@ def print_table(
bottom_bar_2 = left_align("Info: {}".format(status), total_len=60, min_pad=1)
bottom_bar_3 = (
left_align(
"GRC Price: {:.4f}".format(DATABASE.get("GRCPRICE", 0.00000)),
total_len=17,
"GRC Price: {:.6f}".format(DATABASE.get("GRCPRICE", 0.00000)),
total_len=19,
min_pad=1,
)
+ "*"
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ urllib3 = "^2.2.3"
xmltodict = "^0.14.2"
zope-interface = "^7.2"
nest-asyncio = "^1.6.0"
beautifulsoup4 = "^4.13.3"


[build-system]
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ typing
urllib3
xmltodict
zope.interface
nest-asyncio
nest-asyncio
beautifulsoup4
28 changes: 18 additions & 10 deletions tests/main_tests.py

Large diffs are not rendered by default.

37 changes: 15 additions & 22 deletions tests/network/network_tests.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import pytest
import main
import grc_price_utils
from typing import Dict,List,Tuple,Union,Any
# Tests that require a network connection and will fail without one
APPROVED_PROJECT_URLS={}

@pytest.fixture()
def test_get_approved_project_urls_web():
"""
Expand All @@ -11,31 +13,22 @@ def test_get_approved_project_urls_web():
"""
global APPROVED_PROJECT_URLS
APPROVED_PROJECT_URLS=main.get_approved_project_urls_web()


def test_get_project_mag_ratios_from_url(test_get_approved_project_urls_web):
result=main.get_project_mag_ratios_from_url(30,APPROVED_PROJECT_URLS)
assert len(result)>3
def test_get_grc_price_regex():
# Function to test the regexes for getting grc price. Note this may fail if you get a "are you a bot?" page.
# Inspect HTML before assuming the regex is broken
import requests as req
import re
headers = req.utils.default_headers()
headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36',
})

sample_text=None
for url, info in main.PRICE_URL_DICT.items():
regex = info[1]
name = info[0]
resp = ''
if sample_text:
resp = sample_text
else:
resp = req.get(url, headers=headers).text
regex_result = re.search(regex, resp)
assert regex_result
float(regex_result.group(2))


def test_get_grc_price():
# Function to test the soup finds for getting the grc price. Note this may fail if you get a "are you a bot?" page.
# Inspect the html before assuming that the finds are broken.
price, _, _, _, _ = grc_price_utils.get_grc_price_from_sites()

assert price
assert isinstance(price,float)


def test_grc_grc_price():
answer=main.get_grc_price()
assert isinstance(answer,float)
17 changes: 17 additions & 0 deletions tests/soups.py

Large diffs are not rendered by default.