Skip to content

Commit

Permalink
dependency: remove pycurl package (#30771)
Browse files Browse the repository at this point in the history
* Update qcomgpsd.py

* Update url_file.py

* remove pycurl

* requests -> urllib3

* unused

* redundant

* fix import

* Revert "requests -> urllib3"

This reverts commit 7ca39e6.

* headless

* fix trail

* use requests.exceptions.RequestException

* use fp.tell

* fix indents

* reorder imports

* change timeout

* fix debug timing

* remove exception

* add timeout

* missing headers

* move to constructor

* move import

* unused import

* fix debug

* try

* no retries
  • Loading branch information
royjr authored Dec 18, 2023
1 parent 8c1176c commit 70624ff
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 92 deletions.
23 changes: 1 addition & 22 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ polyline = "*"
# these should be removed
markdown-it-py = "*"
timezonefinder = "*"
pycurl = "*"
setproctitle = "*"


Expand Down Expand Up @@ -153,7 +152,6 @@ sphinx-rtd-theme = "*"
sphinx-sitemap = "*"
tabulate = "*"
tenacity = "*"
types-pycurl = "*"
types-requests = "*"
types-tabulate = "*"
tqdm = "*"
Expand Down
31 changes: 11 additions & 20 deletions system/qcomgpsd/qcomgpsd.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import itertools
import math
import time
import pycurl
import requests
import shutil
import subprocess
import datetime
Expand Down Expand Up @@ -102,27 +102,18 @@ def gps_enabled() -> bool:

def download_assistance():
try:
c = pycurl.Curl()
c.setopt(pycurl.URL, ASSISTANCE_URL)
c.setopt(pycurl.NOBODY, 1)
c.setopt(pycurl.CONNECTTIMEOUT, 2)
c.perform()
bytes_n = c.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
c.close()
if bytes_n > 1e5:
cloudlog.error("Qcom assistance data larger than expected")
return
response = requests.get(ASSISTANCE_URL, timeout=5, stream=True)

with open(ASSIST_DATA_FILE_DOWNLOAD, 'wb') as fp:
c = pycurl.Curl()
c.setopt(pycurl.URL, ASSISTANCE_URL)
c.setopt(pycurl.CONNECTTIMEOUT, 5)

c.setopt(pycurl.WRITEDATA, fp)
c.perform()
c.close()
os.rename(ASSIST_DATA_FILE_DOWNLOAD, ASSIST_DATA_FILE)
except pycurl.error:
for chunk in response.iter_content(chunk_size=8192):
fp.write(chunk)
if fp.tell() > 1e5:
cloudlog.error("Qcom assistance data larger than expected")
return

os.rename(ASSIST_DATA_FILE_DOWNLOAD, ASSIST_DATA_FILE)

except requests.exceptions.RequestException:
cloudlog.exception("Failed to download assistance file")
return

Expand Down
70 changes: 22 additions & 48 deletions tools/lib/url_file.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import os
import time
import threading
import pycurl
from hashlib import sha256
from io import BytesIO
from urllib3 import PoolManager, Retry
from urllib3.util import Timeout
from tenacity import retry, wait_random_exponential, stop_after_attempt

from openpilot.common.file_helpers import atomic_write_in_dir
from openpilot.system.hardware.hw import Paths
# Cache chunk size
Expand Down Expand Up @@ -35,13 +36,14 @@ def __init__(self, url, debug=False, cache=None):
if cache is not None:
self._force_download = not cache

try:
self._curl = self._tlocal.curl
except AttributeError:
self._curl = self._tlocal.curl = pycurl.Curl()
if not self._force_download:
os.makedirs(Paths.download_cache_root(), exist_ok=True)

try:
self._http_client = URLFile._tlocal.http_client
except AttributeError:
self._http_client = URLFile._tlocal.http_client = PoolManager()

def __enter__(self):
return self

Expand All @@ -53,17 +55,10 @@ def __exit__(self, exc_type, exc_value, traceback):

@retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True)
def get_length_online(self):
c = self._curl
c.reset()
c.setopt(pycurl.NOSIGNAL, 1)
c.setopt(pycurl.TIMEOUT_MS, 500000)
c.setopt(pycurl.FOLLOWLOCATION, True)
c.setopt(pycurl.URL, self._url)
c.setopt(c.NOBODY, 1)
c.perform()
length = int(c.getinfo(c.CONTENT_LENGTH_DOWNLOAD))
c.reset()
return length
timeout = Timeout(connect=50.0, read=500.0)
response = self._http_client.request('HEAD', self._url, timeout=timeout, preload_content=False)
length = response.headers.get('content-length', 0)
return int(length)

def get_length(self):
if self._length is not None:
Expand Down Expand Up @@ -117,58 +112,37 @@ def read(self, ll=None):
@retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True)
def read_aux(self, ll=None):
download_range = False
headers = ["Connection: keep-alive"]
headers = {'Connection': 'keep-alive'}
if self._pos != 0 or ll is not None:
if ll is None:
end = self.get_length() - 1
else:
end = min(self._pos + ll, self.get_length()) - 1
if self._pos >= end:
return b""
headers.append(f"Range: bytes={self._pos}-{end}")
headers['Range'] = f"bytes={self._pos}-{end}"
download_range = True

dats = BytesIO()
c = self._curl
c.setopt(pycurl.URL, self._url)
c.setopt(pycurl.WRITEDATA, dats)
c.setopt(pycurl.NOSIGNAL, 1)
c.setopt(pycurl.TIMEOUT_MS, 500000)
c.setopt(pycurl.HTTPHEADER, headers)
c.setopt(pycurl.FOLLOWLOCATION, True)

if self._debug:
print("downloading", self._url)

def header(x):
if b'MISS' in x:
print(x.strip())

c.setopt(pycurl.HEADERFUNCTION, header)

def test(debug_type, debug_msg):
print(" debug(%d): %s" % (debug_type, debug_msg.strip()))

c.setopt(pycurl.VERBOSE, 1)
c.setopt(pycurl.DEBUGFUNCTION, test)
t1 = time.time()

c.perform()
timeout = Timeout(connect=50.0, read=500.0)
response = self._http_client.request('GET', self._url, timeout=timeout, preload_content=False, headers=headers)
ret = response.data

if self._debug:
t2 = time.time()
if t2 - t1 > 0.1:
print(f"get {self._url} {headers!r} {t2 - t1:.f} slow")
print(f"get {self._url} {headers!r} {t2 - t1:.3f} slow")

response_code = c.getinfo(pycurl.RESPONSE_CODE)
response_code = response.status
if response_code == 416: # Requested Range Not Satisfiable
raise URLFileException(f"Error, range out of bounds {response_code} {headers} ({self._url}): {repr(dats.getvalue())[:500]}")
raise URLFileException(f"Error, range out of bounds {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
if download_range and response_code != 206: # Partial Content
raise URLFileException(f"Error, requested range but got unexpected response {response_code} {headers} ({self._url}): {repr(dats.getvalue())[:500]}")
raise URLFileException(f"Error, requested range but got unexpected response {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
if (not download_range) and response_code != 200: # OK
raise URLFileException(f"Error {response_code} {headers} ({self._url}): {repr(dats.getvalue())[:500]}")
raise URLFileException(f"Error {response_code} {headers} ({self._url}): {repr(ret)[:500]}")

ret = dats.getvalue()
self._pos += len(ret)
return ret

Expand Down

0 comments on commit 70624ff

Please sign in to comment.