Skip to content

Commit

Permalink
Merge branch 'master' into improve-install-md
Browse files Browse the repository at this point in the history
  • Loading branch information
lyoshenka authored Jul 19, 2021
2 parents 35ec4ee + ac671a0 commit 3c89eca
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 6 deletions.
1 change: 1 addition & 0 deletions docker/Dockerfile.wallet_server
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ RUN apt-get update && \
wget \
tar unzip \
build-essential \
automake libtool \
pkg-config \
libleveldb-dev \
python3.7 \
Expand Down
1 change: 1 addition & 0 deletions docker/Dockerfile.web
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \
apt-get -y --no-install-recommends install \
wget \
automake libtool \
tar unzip \
build-essential \
pkg-config \
Expand Down
4 changes: 4 additions & 0 deletions lbry/extras/daemon/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,10 @@ async def _maintain_redirects(self):
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
if external_ip:
self.external_ip = external_ip
dht_component = self.component_manager.get_component(DHT_COMPONENT)
if dht_component:
dht_node = dht_component.component
dht_node.protocol.external_ip = external_ip
# assert self.external_ip is not None # TODO: handle going/starting offline

if not self.upnp_redirects and self.upnp: # setup missing redirects
Expand Down
2 changes: 1 addition & 1 deletion lbry/extras/daemon/exchange_rate_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ class UPbitBTCFeed(MarketFeed):
params = {"markets": "BTC-LBC"}

def get_rate_from_response(self, json_response):
if len(json_response) != 1 or 'trade_price' not in json_response[0]:
if "error" in json_response or len(json_response) != 1 or 'trade_price' not in json_response[0]:
raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response[0]['trade_price'])

Expand Down
3 changes: 2 additions & 1 deletion lbry/wallet/account.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,8 @@ async def get_addresses(self, only_usable: bool = False, **constraints) -> List[
return [r['address'] for r in records]

async def get_or_create_usable_address(self) -> str:
addresses = await self.get_addresses(only_usable=True, limit=10)
async with self.address_generator_lock:
addresses = await self.get_addresses(only_usable=True, limit=10)
if addresses:
return random.choice(addresses)
addresses = await self.ensure_address_gap()
Expand Down
22 changes: 21 additions & 1 deletion scripts/download_blob_from_peer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
"""A simple script that attempts to directly download a single blob.
To Do:
------
Currently `lbrynet blob get <hash>` does not work to download single blobs
which are not already present in the system. The function locks up and
never returns.
It only works for blobs that are in the `blobfiles` directory already.
This bug is reported in lbryio/lbry-sdk, issue #2070.
Maybe this script can be investigated, and certain parts can be added to
`lbry.extras.daemon.daemon.jsonrpc_blob_get`
in order to solve the previous issue, and finally download single blobs
from the network (peers or reflector servers).
"""
import sys
import os
import asyncio
Expand Down Expand Up @@ -47,7 +63,11 @@ async def main(blob_hash: str, url: str):
print(f"deleted {blob_hash}")


if __name__ == "__main__": # usage: python download_blob_from_peer.py <blob_hash> [host url:port]
if __name__ == "__main__":
if len(sys.argv) < 2:
print("usage: download_blob_from_peer.py <blob_hash> [host_url:port]")
sys.exit(1)

url = 'reflector.lbry.com:5567'
if len(sys.argv) > 2:
url = sys.argv[2]
Expand Down
71 changes: 71 additions & 0 deletions scripts/troubleshoot_p2p_and_dht_webservice.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import asyncio
from aiohttp import web

from lbry.blob_exchange.serialization import BlobRequest, BlobResponse
from lbry.dht.constants import generate_id
from lbry.dht.node import Node
from lbry.dht.peer import make_kademlia_peer, PeerManager
from lbry.extras.daemon.storage import SQLiteStorage

loop = asyncio.get_event_loop()
NODE = Node(
loop, PeerManager(loop), generate_id(), 60600, 60600, 3333, None,
storage=SQLiteStorage(None, ":memory:", loop, loop.time)
)


async def check_p2p(ip, port):
writer = None
try:
reader, writer = await asyncio.open_connection(ip, port)
writer.write(BlobRequest.make_request_for_blob_hash('0'*96).serialize())
return BlobResponse.deserialize(await reader.readuntil(b'}')).get_address_response().lbrycrd_address
except OSError:
return None
finally:
if writer:
writer.close()
await writer.wait_closed()


async def check_dht(ip, port):
peer = make_kademlia_peer(None, ip, udp_port=int(port))
return await NODE.protocol.get_rpc_peer(peer).ping()


async def endpoint_p2p(request):
p2p_port = request.match_info.get('p2p_port', "3333")
try:
address = await asyncio.wait_for(check_p2p(request.remote, p2p_port), 3)
except asyncio.TimeoutError:
address = None
return {"status": address is not None, "port": p2p_port, "payment_address": address}


async def endpoint_dht(request):
dht_port = request.match_info.get('dht_port', "3333")
try:
response = await check_dht(request.remote, dht_port)
except asyncio.TimeoutError:
response = None
return {"status": response == b'pong', "port": dht_port}


async def endpoint_default(request):
return {"dht_status": await endpoint_dht(request), "p2p_status": await endpoint_p2p(request)}


def as_json_response_wrapper(endpoint):
async def json_endpoint(*args, **kwargs):
return web.json_response(await endpoint(*args, **kwargs))
return json_endpoint


app = web.Application()
app.add_routes([web.get('/', as_json_response_wrapper(endpoint_default)),
web.get('/dht/{dht_port}', as_json_response_wrapper(endpoint_dht)),
web.get('/p2p/{p2p_port}', as_json_response_wrapper(endpoint_p2p))])

if __name__ == '__main__':
loop.create_task(NODE.start_listening("0.0.0.0"))
web.run_app(app, port=60666)
7 changes: 4 additions & 3 deletions tests/unit/dht/test_node.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import time
import unittest
import typing
from lbry.testcase import AsyncioTestCase
Expand Down Expand Up @@ -92,11 +93,11 @@ async def test_ping_queue_discover(self):


class TestTemporarilyLosingConnection(AsyncioTestCase):
@unittest.SkipTest
TIMEOUT = None # not supported as it advances time
async def test_losing_connection(self):
async def wait_for(check_ok, insist, timeout=20):
start = loop.time()
while loop.time() - start < timeout:
start = time.time()
while time.time() - start < timeout:
if check_ok():
break
await asyncio.sleep(0)
Expand Down
13 changes: 13 additions & 0 deletions tests/unit/wallet/test_account.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import asyncio
from binascii import hexlify
from lbry.testcase import AsyncioTestCase
from lbry.wallet import Wallet, Ledger, Database, Headers, Account, SingleKey, HierarchicalDeterministic
Expand Down Expand Up @@ -37,6 +38,18 @@ async def test_generate_account(self):
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 6)

async def test_unused_address_on_account_creation_does_not_cause_a_race(self):
account = Account.generate(self.ledger, Wallet(), 'lbryum')
await account.ledger.db.db.executescript("update pubkey_address set used_times=10")
await account.receiving.address_generator_lock.acquire()
delayed1 = asyncio.ensure_future(account.receiving.ensure_address_gap())
delayed = asyncio.ensure_future(account.receiving.get_or_create_usable_address())
await asyncio.sleep(0)
# wallet being created and queried at the same time
account.receiving.address_generator_lock.release()
await delayed1
await delayed

async def test_generate_keys_over_batch_threshold_saves_it_properly(self):
account = Account.generate(self.ledger, Wallet(), 'lbryum')
async with account.receiving.address_generator_lock:
Expand Down

0 comments on commit 3c89eca

Please sign in to comment.