4
4
import json
5
5
from tests .utils import *
6
6
7
- request_timeout = config .http_request_timeout
7
+ REQUEST_TIMEOUT = config .http_request_timeout
8
8
try :
9
- use_cache = config .cache_when_possible
10
- cache_time_delta = config .cache_time_delta
9
+ USE_CACHE = config .cache_when_possible
10
+ CACHE_TIME_DELTA = config .cache_time_delta
11
11
except :
12
12
# If cache_when_possible variable is not set in config.py this will be the default
13
- use_cache = False
14
- cache_time_delta = timedelta (hours = 1 )
13
+ USE_CACHE = False
14
+ CACHE_TIME_DELTA = timedelta (hours = 1 )
15
15
16
16
def run_test (_ , langCode , url , googlePageSpeedApiKey , strategy , category , review_show_improvements_only , lighthouse_use_api ):
17
17
"""
@@ -165,15 +165,15 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
165
165
# print('pagespeed_api_request: {0}'.format(pagespeed_api_request))
166
166
167
167
try :
168
- get_content = httpRequestGetContent (pagespeed_api_request )
168
+ get_content = get_http_content (pagespeed_api_request )
169
169
json_content = str_to_json (get_content , check_url )
170
170
return json_content
171
171
except : # breaking and hoping for more luck with the next URL
172
172
print (
173
173
'Error! Unfortunately the request for URL "{0}" failed, message:\n {1}' .format (
174
174
check_url , sys .exc_info ()[0 ]))
175
175
return
176
- elif use_cache :
176
+ elif USE_CACHE :
177
177
dir = Path (os .path .dirname (
178
178
os .path .realpath (__file__ )) + os .path .sep ).parent
179
179
try :
@@ -190,14 +190,14 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
190
190
bashCommand = "node node_modules{2}lighthouse{2}cli{2}index.js --output json --output-path {3} --locale {1} --form-factor {0} --chrome-flags=\" --headless\" --quiet" .format (
191
191
strategy , langCode , os .path .sep , result_file )
192
192
artifacts_file = os .path .join (cache_path , 'artifacts.json' )
193
- if os .path .exists (result_file ) and not is_file_older_than (result_file , cache_time_delta ):
193
+ if os .path .exists (result_file ) and not is_file_older_than (result_file , CACHE_TIME_DELTA ):
194
194
file_created_timestamp = os .path .getctime (result_file )
195
195
file_created_date = time .ctime (file_created_timestamp )
196
196
print ('Cached entry found from {0}, using it instead of calling website again.' .format (
197
197
file_created_date ))
198
198
with open (result_file , 'r' , encoding = 'utf-8' , newline = '' ) as file :
199
199
return str_to_json ('\n ' .join (file .readlines ()), check_url )
200
- elif os .path .exists (artifacts_file ) and not is_file_older_than (artifacts_file , cache_time_delta ):
200
+ elif os .path .exists (artifacts_file ) and not is_file_older_than (artifacts_file , CACHE_TIME_DELTA ):
201
201
file_created_timestamp = os .path .getctime (artifacts_file )
202
202
file_created_date = time .ctime (file_created_timestamp )
203
203
print ('Cached entry found from {0}, using it instead of calling website again.' .format (
@@ -209,7 +209,7 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
209
209
import subprocess
210
210
211
211
process = subprocess .Popen (bashCommand .split (), stdout = subprocess .PIPE )
212
- output , error = process .communicate (timeout = request_timeout * 10 )
212
+ output , error = process .communicate (timeout = REQUEST_TIMEOUT * 10 )
213
213
with open (result_file , 'r' , encoding = 'utf-8' , newline = '' ) as file :
214
214
return str_to_json ('\n ' .join (file .readlines ()), check_url )
215
215
except :
@@ -221,7 +221,7 @@ def get_json_result(langCode, url, googlePageSpeedApiKey, strategy, category, li
221
221
import subprocess
222
222
223
223
process = subprocess .Popen (bashCommand .split (), stdout = subprocess .PIPE )
224
- output , error = process .communicate (timeout = request_timeout * 10 )
224
+ output , error = process .communicate (timeout = REQUEST_TIMEOUT * 10 )
225
225
226
226
get_content = output
227
227
0 commit comments