Skip to content

Commit 8b9b4e5

Browse files
committed
pylint
1 parent 5d8a824 commit 8b9b4e5

21 files changed

+226
-243
lines changed

engines/sitemap.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import io
66
from bs4 import BeautifulSoup
77
from engines.utils import use_item
8-
from tests.utils import get_content_type, httpRequestGetContent, merge_dicts, cache_time_delta
8+
from tests.utils import get_content_type, get_http_content, merge_dicts, CACHE_TIME_DELTA
99

1010
def read_sites(input_sitemap_url, input_skip, input_take):
1111
ignore_none_html = True
@@ -25,7 +25,7 @@ def read_sitemap(input_sitemap_url, input_skip, input_take, ignore_none_html):
2525

2626
if input_sitemap_url.endswith('.xml.gz'):
2727
# unpack gzip:ed sitemap
28-
sitemap_content = httpRequestGetContent(input_sitemap_url, True, False)
28+
sitemap_content = get_http_content(input_sitemap_url, True, False)
2929
try:
3030
if isinstance(sitemap_content, str):
3131
return result
@@ -42,7 +42,7 @@ def read_sitemap(input_sitemap_url, input_skip, input_take, ignore_none_html):
4242
except gzip.BadGzipFile:
4343
return result
4444
else:
45-
sitemap_content = httpRequestGetContent(input_sitemap_url, True, True)
45+
sitemap_content = get_http_content(input_sitemap_url, True, True)
4646
result = merge_dicts(read_sitemap_xml(input_sitemap_url,
4747
sitemap_content,
4848
input_skip,
@@ -113,7 +113,7 @@ def read_sitemap_xml(key, sitemap_content, input_skip, input_take, ignore_none_h
113113
print(f'- skipping because it is of type: {item_type}')
114114
continue
115115

116-
item_content_type = get_content_type(item_url, cache_time_delta)
116+
item_content_type = get_content_type(item_url, CACHE_TIME_DELTA)
117117
print('content-type', item_content_type)
118118
if item_content_type == 401:
119119
print(f'- skipping because it is of status-code: {item_content_type}')

engines/webperf.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def read_sites(input_url, input_skip, input_take):
2424
else:
2525
raise NotImplementedError('input is incorrect')
2626

27-
category_content = httpRequestGetContent(input_url)
27+
category_content = get_http_content(input_url)
2828

2929
category_regex = r"<a href=\"(?P<detail_url>\/site\/[^\"]+)\""
3030
category_matches = re.finditer(
@@ -43,7 +43,7 @@ def read_sites(input_url, input_skip, input_take):
4343
detail_regex = r"Webbplats:<\/th>[ \r\n\t]+<td><a href=\"(?P<item_url>[^\"]+)\""
4444
current_index = 0
4545
for detail_url in detailed_urls:
46-
detail_content = httpRequestGetContent(detail_url)
46+
detail_content = get_http_content(detail_url)
4747
detail_match = re.search(detail_regex, detail_content, re.MULTILINE)
4848
item_url = detail_match.group('item_url')
4949

test_dnssec.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ def validate_dnssec(domain, domain_entry):
243243
def testdns(key, datatype, use_dnssec):
244244
print('\ttestdns', key, datatype, use_dnssec)
245245
cache_key = 'dnslookup://{0}#{1}#{2}'.format(key, datatype, use_dnssec)
246-
if has_cache_file(cache_key, True, cache_time_delta):
246+
if has_cache_file(cache_key, True, CACHE_TIME_DELTA):
247247
cache_path = get_cache_path(cache_key, True)
248248
print('\t- Using dnslookup cache')
249249
response = dns.message.from_file(cache_path)

tests/a11y_statement.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from bs4 import BeautifulSoup
99
import gettext
1010

11-
from tests.utils import httpRequestGetContent
11+
from tests.utils import get_http_content
1212
_ = gettext.gettext
1313

1414
review_show_improvements_only = config.review_show_improvements_only
@@ -64,7 +64,7 @@ def run_test(_, langCode, url):
6464

6565

6666
def get_digg_report_canonical():
67-
content = httpRequestGetContent(digg_url)
67+
content = get_http_content(digg_url)
6868
content_match = re.search(
6969
r'<link rel="canonical" href="(?P<url>[^"]+)', content)
7070
if content_match:
@@ -83,7 +83,7 @@ def check_item(item, root_item, org_url_start, _):
8383
statements = list()
8484
content = None
8585
if item['url'] not in checked_urls:
86-
content = httpRequestGetContent(item['url'], True)
86+
content = get_http_content(item['url'], True)
8787
time.sleep(1)
8888
checked_urls[item['url']] = content
8989
else:

tests/css_validator_w3c.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616
_local = gettext.gettext
1717

1818
# DEFAULTS
19-
request_timeout = config.http_request_timeout
20-
useragent = config.useragent
19+
REQUEST_TIMEOUT = config.http_request_timeout
20+
USERAGENT = config.useragent
2121
css_review_group_errors = config.css_review_group_errors
2222
review_show_improvements_only = config.review_show_improvements_only
2323
sitespeed_use_docker = config.sitespeed_use_docker
@@ -27,12 +27,12 @@
2727
# If sitespeed timeout is not set in config.py this will be the default
2828
sitespeed_timeout = 600
2929
try:
30-
use_cache = config.cache_when_possible
31-
cache_time_delta = config.cache_time_delta
30+
USE_CACHE = config.cache_when_possible
31+
CACHE_TIME_DELTA = config.cache_time_delta
3232
except:
3333
# If cache_when_possible variable is not set in config.py this will be the default
34-
use_cache = False
35-
cache_time_delta = timedelta(hours=1)
34+
USE_CACHE = False
35+
CACHE_TIME_DELTA = timedelta(hours=1)
3636

3737
global css_features
3838
global css_properties_doesnt_exist
@@ -306,7 +306,7 @@ def get_mdn_web_docs_css_features():
306306
css_features = {}
307307
css_functions = {}
308308

309-
html = httpRequestGetContent(
309+
html = get_http_content(
310310
'https://developer.mozilla.org/en-US/docs/Web/CSS/Reference')
311311

312312
soup = BeautifulSoup(html, 'lxml')

tests/email_validator.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import dns
1616
import config
1717
from models import Rating
18-
from tests.utils import dns_lookup, get_best_country_code, httpRequestGetContent, is_country_code_in_eu_or_on_exception_list
18+
from tests.utils import dns_lookup, get_best_country_code, get_http_content, is_country_code_in_eu_or_on_exception_list
1919
import gettext
2020
_local = gettext.gettext
2121

@@ -152,13 +152,13 @@ def run_test(_, langCode, url):
152152
hostname, result_dict, _, _local)
153153
if rating.get_overall() == -1.0:
154154
# NO MX record found for domain, look for e-mail on website for alternative e-mail domain.
155-
content = httpRequestGetContent(url, True)
155+
content = get_http_content(url, True)
156156
time.sleep(1)
157157
result = search_for_email_domain(content)
158158
if result == None:
159159
interesting_urls = get_interesting_urls(content, url, 0)
160160
for interesting_url in interesting_urls:
161-
content = httpRequestGetContent(interesting_url, True)
161+
content = get_http_content(interesting_url, True)
162162
result = search_for_email_domain(content)
163163
if result != None:
164164
break
@@ -362,7 +362,7 @@ def Validate_MTA_STS_Policy(_, rating, _local, hostname):
362362
rating += has_mta_sts_records_rating
363363

364364
# https://mta-sts.example.com/.well-known/mta-sts.txt
365-
content = httpRequestGetContent(
365+
content = get_http_content(
366366
"https://mta-sts.{0}/.well-known/mta-sts.txt".format(hostname))
367367

368368
has_mta_sts_txt_rating = Rating(_, review_show_improvements_only)

tests/energy_efficiency_websitecarbon.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def run_test(_, langCode, url):
3535
print(_('TEXT_TEST_START').format(
3636
datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
3737

38-
result_json = httpRequestGetContent(
38+
result_json = get_http_content(
3939
'https://api.websitecarbon.com/site?url={0}'.format(url))
4040
result_dict = json.loads(result_json)
4141

tests/frontend_quality_yellow_lab_tools.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
_local = gettext.gettext
1010

1111
# DEFAULTS
12-
request_timeout = config.http_request_timeout
12+
REQUEST_TIMEOUT = config.http_request_timeout
1313
review_show_improvements_only = config.review_show_improvements_only
1414
time_sleep = config.webbkoll_sleep
1515
if time_sleep < 5:
@@ -58,23 +58,23 @@ def run_test(_, langCode, url, device='phone'):
5858

5959
running_status = 'running'
6060
while running_status == 'running':
61-
running_json = httpRequestGetContent(
61+
running_json = get_http_content(
6262
'{0}/api/runs/{1}'.format(ylt_server_address, test_id))
6363
running_info = json.loads(running_json)
6464
running_status = running_info['status']['statusCode']
6565
time.sleep(time_sleep)
6666

6767
result_url = '{0}/api/results/{1}?exclude=toolsResults'.format(
6868
ylt_server_address, test_id)
69-
result_json = httpRequestGetContent(result_url)
69+
result_json = get_http_content(result_url)
7070
else:
7171
import subprocess
7272

7373
# bashCommand = "yellowlabtools {0}".format(url)
7474
bashCommand = "node node_modules{1}yellowlabtools{1}bin{1}cli.js {0}".format(
7575
url, os.path.sep)
7676
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
77-
output, error = process.communicate(timeout=request_timeout * 10)
77+
output, error = process.communicate(timeout=REQUEST_TIMEOUT * 10)
7878

7979
result_json = output
8080

tests/html_validator_w3c.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
_local = gettext.gettext
1111

1212
# DEFAULTS
13-
request_timeout = config.http_request_timeout
14-
useragent = config.useragent
13+
REQUEST_TIMEOUT = config.http_request_timeout
14+
USERAGENT = config.useragent
1515
review_show_improvements_only = config.review_show_improvements_only
1616
sitespeed_use_docker = config.sitespeed_use_docker
1717
try:
@@ -20,12 +20,12 @@
2020
# If sitespeed timeout is not set in config.py this will be the default
2121
sitespeed_timeout = 600
2222
try:
23-
use_cache = config.cache_when_possible
24-
cache_time_delta = config.cache_time_delta
23+
USE_CACHE = config.cache_when_possible
24+
CACHE_TIME_DELTA = config.cache_time_delta
2525
except:
2626
# If cache_when_possible variable is not set in config.py this will be the default
27-
use_cache = False
28-
cache_time_delta = timedelta(hours=1)
27+
USE_CACHE = False
28+
CACHE_TIME_DELTA = timedelta(hours=1)
2929

3030

3131
def run_test(_, langCode, url):

tests/http_validator.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,9 @@
3232

3333

3434
# DEFAULTS
35-
request_timeout = config.http_request_timeout
35+
REQUEST_TIMEOUT = config.http_request_timeout
3636
sitespeed_timeout = config.sitespeed_timeout
37-
useragent = config.useragent
37+
USERAGENT = config.useragent
3838
review_show_improvements_only = config.review_show_improvements_only
3939
sitespeed_use_docker = config.sitespeed_use_docker
4040

@@ -44,12 +44,12 @@
4444
# If browser is not set in config.py this will be the default
4545
software_browser = 'chrome'
4646
try:
47-
use_cache = config.cache_when_possible
48-
cache_time_delta = config.cache_time_delta
47+
USE_CACHE = config.cache_when_possible
48+
CACHE_TIME_DELTA = config.cache_time_delta
4949
except:
5050
# If cache_when_possible variable is not set in config.py this will be the default
51-
use_cache = False
52-
cache_time_delta = timedelta(hours=1)
51+
USE_CACHE = False
52+
CACHE_TIME_DELTA = timedelta(hours=1)
5353
try:
5454
use_detailed_report = config.use_detailed_report
5555
except:
@@ -2363,9 +2363,9 @@ def has_weak_cipher(url, protocol_version):
23632363
try:
23642364
allow_redirects = False
23652365

2366-
headers = {'user-agent': useragent}
2366+
headers = {'user-agent': USERAGENT}
23672367
a = session.get(url, verify=False, allow_redirects=allow_redirects,
2368-
headers=headers, timeout=request_timeout)
2368+
headers=headers, timeout=REQUEST_TIMEOUT)
23692369

23702370
if a.status_code == 200 or a.status_code == 301 or a.status_code == 302 or a.status_code == 404:
23712371
# print('is ok')
@@ -2451,9 +2451,9 @@ def has_insecure_cipher(url, protocol_version):
24512451
try:
24522452
allow_redirects = False
24532453

2454-
headers = {'user-agent': useragent}
2454+
headers = {'user-agent': USERAGENT}
24552455
a = session.get(url, verify=False, allow_redirects=allow_redirects,
2456-
headers=headers, timeout=request_timeout)
2456+
headers=headers, timeout=REQUEST_TIMEOUT)
24572457

24582458
if a.status_code == 200 or a.status_code == 301 or a.status_code == 302 or a.status_code == 404:
24592459
# print('is ok')
@@ -2529,9 +2529,9 @@ def has_tls_version(url, validate_hostname, protocol_version):
25292529
try:
25302530
allow_redirects = False
25312531

2532-
headers = {'user-agent': useragent}
2532+
headers = {'user-agent': USERAGENT}
25332533
session.get(url, verify=validate_hostname, allow_redirects=allow_redirects,
2534-
headers=headers, timeout=request_timeout)
2534+
headers=headers, timeout=REQUEST_TIMEOUT)
25352535

25362536
return (True, 'is ok')
25372537

tests/lighthouse_base.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,14 @@
44
import json
55
from tests.utils import *
66

7-
request_timeout = config.http_request_timeout
7+
REQUEST_TIMEOUT = config.http_request_timeout
88
try:
9-
use_cache = config.cache_when_possible
10-
cache_time_delta = config.cache_time_delta
9+
USE_CACHE = config.cache_when_possible
10+
CACHE_TIME_DELTA = config.cache_time_delta
1111
except:
1212
# If cache_when_possible variable is not set in config.py this will be the default
13-
use_cache = False
14-
cache_time_delta = timedelta(hours=1)
13+
USE_CACHE = False
14+
CACHE_TIME_DELTA = timedelta(hours=1)
1515

1616
def run_test(_, langCode, url, googlePageSpeedApiKey, strategy, category, review_show_improvements_only, lighthouse_use_api):
1717
"""
@@ -165,15 +165,15 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
165165
# print('pagespeed_api_request: {0}'.format(pagespeed_api_request))
166166

167167
try:
168-
get_content = httpRequestGetContent(pagespeed_api_request)
168+
get_content = get_http_content(pagespeed_api_request)
169169
json_content = str_to_json(get_content, check_url)
170170
return json_content
171171
except: # breaking and hoping for more luck with the next URL
172172
print(
173173
'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format(
174174
check_url, sys.exc_info()[0]))
175175
return
176-
elif use_cache:
176+
elif USE_CACHE:
177177
dir = Path(os.path.dirname(
178178
os.path.realpath(__file__)) + os.path.sep).parent
179179
try:
@@ -190,14 +190,14 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
190190
bashCommand = "node node_modules{2}lighthouse{2}cli{2}index.js --output json --output-path {3} --locale {1} --form-factor {0} --chrome-flags=\"--headless\" --quiet".format(
191191
strategy, langCode, os.path.sep, result_file)
192192
artifacts_file = os.path.join(cache_path, 'artifacts.json')
193-
if os.path.exists(result_file) and not is_file_older_than(result_file, cache_time_delta):
193+
if os.path.exists(result_file) and not is_file_older_than(result_file, CACHE_TIME_DELTA):
194194
file_created_timestamp = os.path.getctime(result_file)
195195
file_created_date = time.ctime(file_created_timestamp)
196196
print('Cached entry found from {0}, using it instead of calling website again.'.format(
197197
file_created_date))
198198
with open(result_file, 'r', encoding='utf-8', newline='') as file:
199199
return str_to_json('\n'.join(file.readlines()), check_url)
200-
elif os.path.exists(artifacts_file) and not is_file_older_than(artifacts_file, cache_time_delta):
200+
elif os.path.exists(artifacts_file) and not is_file_older_than(artifacts_file, CACHE_TIME_DELTA):
201201
file_created_timestamp = os.path.getctime(artifacts_file)
202202
file_created_date = time.ctime(file_created_timestamp)
203203
print('Cached entry found from {0}, using it instead of calling website again.'.format(
@@ -209,7 +209,7 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
209209
import subprocess
210210

211211
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
212-
output, error = process.communicate(timeout=request_timeout * 10)
212+
output, error = process.communicate(timeout=REQUEST_TIMEOUT * 10)
213213
with open(result_file, 'r', encoding='utf-8', newline='') as file:
214214
return str_to_json('\n'.join(file.readlines()), check_url)
215215
except:
@@ -221,7 +221,7 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
221221
import subprocess
222222

223223
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
224-
output, error = process.communicate(timeout=request_timeout * 10)
224+
output, error = process.communicate(timeout=REQUEST_TIMEOUT * 10)
225225

226226
get_content = output
227227

0 commit comments

Comments
 (0)