summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authormissing2022-05-23 11:56:34 +0100
committermissing2022-05-23 11:56:34 +0100
commit00e30bb5962302ec0cafa20f42b211c294a6de4f (patch)
tree6fbebe42ddeb9669ae8151693bca57645ccd3605
downloadaur-00e30bb5962302ec0cafa20f42b211c294a6de4f.tar.gz
Initial commit
-rw-r--r--.SRCINFO21
-rw-r--r--PKGBUILD52
-rw-r--r--lbry-rocksdb.patch22
-rw-r--r--lbry-sdk-0.108.0.patch918
-rw-r--r--lbry-venv.patch56
5 files changed, 1069 insertions, 0 deletions
diff --git a/.SRCINFO b/.SRCINFO
new file mode 100644
index 000000000000..d924be22cfaf
--- /dev/null
+++ b/.SRCINFO
@@ -0,0 +1,21 @@
+pkgbase = lbry-sdk
+ pkgdesc = The LBRY SDK for building decentralized, censorship resistant, monetized, digital content apps.
+ pkgver = 0.108.0
+ pkgrel = 1
+ url = https://github.com/lbryio/lbry-sdk
+ arch = x86_64
+ license = MIT
+ makedepends = git
+ makedepends = cmake
+ makedepends = openssl
+ makedepends = python-pip
+ makedepends = python-protobuf
+ provides = lbrynet
+ conflicts = lbrynet
+ options = !buildflags
+ source = lbry-sdk-0.108.0.tar.gz::https://github.com/lbryio/lbry-sdk/archive/refs/tags/v0.108.0.tar.gz
+ source = git+https://github.com/lbryio/lbry-rocksdb
+ sha256sums = 410f92741d87f2ca13df3748d2e593e8dc70f7b5a1420fe6f4f1013b4b03f35d
+ sha256sums = SKIP
+
+pkgname = lbry-sdk
diff --git a/PKGBUILD b/PKGBUILD
new file mode 100644
index 000000000000..d2187fd31893
--- /dev/null
+++ b/PKGBUILD
@@ -0,0 +1,52 @@
+# Maintainer: missing <liri_bucketful at slmail dot me>
+pkgname=lbry-sdk
+pkgver=0.108.0
+pkgrel=1
+pkgdesc="The LBRY SDK for building decentralized, censorship resistant, monetized, digital content apps."
+arch=('x86_64')
+url="https://github.com/lbryio/lbry-sdk"
+license=('MIT')
+makedepends=('git' 'cmake' 'openssl' 'python-pip' 'python-protobuf')
+provides=('lbrynet')
+conflicts=('lbrynet')
+options=('!buildflags')
+source=("${pkgname}-${pkgver}.tar.gz::https://github.com/lbryio/lbry-sdk/archive/refs/tags/v${pkgver}.tar.gz"
+ 'git+https://github.com/lbryio/lbry-rocksdb')
+sha256sums=('410f92741d87f2ca13df3748d2e593e8dc70f7b5a1420fe6f4f1013b4b03f35d'
+ 'SKIP')
+
+prepare() {
+ cp ../*.patch "$srcdir"
+
+ cd lbry-rocksdb
+ git submodule update --init --recursive
+ git pull --recurse-submodules
+}
+
+build() {
+ python -m venv lbry-venv
+ source lbry-venv/bin/activate
+
+ patch --strip=0 --input=lbry-rocksdb.patch
+ cd lbry-rocksdb
+ make
+ pip install -e .
+
+ cd ../
+ patch --strip=0 --input="lbry-sdk-${pkgver}.patch"
+ cd "lbry-sdk-${pkgver}"
+ make install
+
+ cd ..
+ patch --strip=0 --input=lbry-venv.patch
+
+ mkdir build
+ cd build
+ pip install pyinstaller
+ pyinstaller --onefile --name lbrynet "../lbry-sdk-${pkgver}/lbry/extras/cli.py"
+}
+
+package() {
+ install -Dm755 "${srcdir}/build/dist/lbrynet" "${pkgdir}/usr/bin/lbrynet"
+ install -Dm644 "${srcdir}/lbry-sdk-${pkgver}/LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
+}
diff --git a/lbry-rocksdb.patch b/lbry-rocksdb.patch
new file mode 100644
index 000000000000..16f280676b6e
--- /dev/null
+++ b/lbry-rocksdb.patch
@@ -0,0 +1,22 @@
+--- lbry-rocksdb/Makefile.old 2022-05-22 11:44:08.003333325 +0100
++++ lbry-rocksdb/Makefile 2022-05-22 11:45:08.903333331 +0100
+@@ -3,7 +3,7 @@
+ JOBS=8
+
+ src/rocksdb/librocksdb.a:
+- make \
++ make release \
+ -e EXTRA_CXXFLAGS="-fPIC" \
+ -e EXTRA_CFLAGS="-fPIC" \
+ -C src/rocksdb \
+--- lbry-rocksdb/src/rocksdb/trace_replay/trace_replay.cc.old 2022-05-22 1
++++ lbry-rocksdb/src/rocksdb/trace_replay/trace_replay.cc 2022-05-22 1
+@@ -317,7 +317,7 @@
+ cf_ids.reserve(multiget_size);
+ multiget_keys.reserve(multiget_size);
+ for (uint32_t i = 0; i < multiget_size; i++) {
+- uint32_t tmp_cfid;
++ uint32_t tmp_cfid = 0;
+ Slice tmp_key;
+ GetFixed32(&cfids_payload, &tmp_cfid);
+ GetLengthPrefixedSlice(&keys_payload, &tmp_key);
diff --git a/lbry-sdk-0.108.0.patch b/lbry-sdk-0.108.0.patch
new file mode 100644
index 000000000000..61c0066d30e0
--- /dev/null
+++ b/lbry-sdk-0.108.0.patch
@@ -0,0 +1,918 @@
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/blob/blob_file.py lbry-sdk-0.108.0/lbry/blob/blob_file.py
+--- lbry-sdk-0.108.0_orig/lbry/blob/blob_file.py 2022-05-06 21:35:23.283333242 +0200
++++ lbry-sdk-0.108.0/lbry/blob/blob_file.py 2022-05-06 20:11:21.756666643 +0200
+@@ -87,8 +87,8 @@
+ self.blob_completed_callback = blob_completed_callback
+ self.blob_directory = blob_directory
+ self.writers: typing.Dict[typing.Tuple[typing.Optional[str], typing.Optional[int]], HashBlobWriter] = {}
+- self.verified: asyncio.Event = asyncio.Event(loop=self.loop)
+- self.writing: asyncio.Event = asyncio.Event(loop=self.loop)
++ self.verified: asyncio.Event = asyncio.Event()
++ self.writing: asyncio.Event = asyncio.Event()
+ self.readers: typing.List[typing.BinaryIO] = []
+ self.added_on = added_on or time.time()
+ self.is_mine = is_mine
+Only in lbry-sdk-0.108.0/lbry/blob: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/blob_exchange/client.py lbry-sdk-0.108.0/lbry/blob_exchange/client.py
+--- lbry-sdk-0.108.0_orig/lbry/blob_exchange/client.py 2022-05-06 21:35:23.286666576 +0200
++++ lbry-sdk-0.108.0/lbry/blob_exchange/client.py 2022-05-06 23:35:47.749999588 +0200
+@@ -32,7 +32,7 @@
+ self.buf = b''
+
+ # this is here to handle the race when the downloader is closed right as response_fut gets a result
+- self.closed = asyncio.Event(loop=self.loop)
++ self.closed = asyncio.Event()
+
+ def data_received(self, data: bytes):
+ if self.connection_manager:
+@@ -111,7 +111,7 @@
+ self.transport.write(msg)
+ if self.connection_manager:
+ self.connection_manager.sent_data(f"{self.peer_address}:{self.peer_port}", len(msg))
+- response: BlobResponse = await asyncio.wait_for(self._response_fut, self.peer_timeout, loop=self.loop)
++ response: BlobResponse = await asyncio.wait_for(self._response_fut, self.peer_timeout)
+ availability_response = response.get_availability_response()
+ price_response = response.get_price_response()
+ blob_response = response.get_blob_response()
+@@ -151,7 +151,7 @@
+ f" timeout in {self.peer_timeout}"
+ log.debug(msg)
+ msg = f"downloaded {self.blob.blob_hash[:8]} from {self.peer_address}:{self.peer_port}"
+- await asyncio.wait_for(self.writer.finished, self.peer_timeout, loop=self.loop)
++ await asyncio.wait_for(self.writer.finished, self.peer_timeout)
+ # wait for the io to finish
+ await self.blob.verified.wait()
+ log.info("%s at %fMB/s", msg,
+@@ -244,7 +244,7 @@
+ try:
+ if not connected_protocol:
+ await asyncio.wait_for(loop.create_connection(lambda: protocol, address, tcp_port),
+- peer_connect_timeout, loop=loop)
++ peer_connect_timeout)
+ connected_protocol = protocol
+ if blob is None or blob.get_is_verified() or not blob.is_writeable():
+ # blob is None happens when we are just opening a connection
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/blob_exchange/downloader.py lbry-sdk-0.108.0/lbry/blob_exchange/downloader.py
+--- lbry-sdk-0.108.0_orig/lbry/blob_exchange/downloader.py 2022-05-06 21:35:23.286666576 +0200
++++ lbry-sdk-0.108.0/lbry/blob_exchange/downloader.py 2022-05-06 23:34:40.909999593 +0200
+@@ -30,7 +30,7 @@
+ self.failures: typing.Dict['KademliaPeer', int] = {}
+ self.connection_failures: typing.Set['KademliaPeer'] = set()
+ self.connections: typing.Dict['KademliaPeer', 'BlobExchangeClientProtocol'] = {}
+- self.is_running = asyncio.Event(loop=self.loop)
++ self.is_running = asyncio.Event()
+
+ def should_race_continue(self, blob: 'AbstractBlob'):
+ max_probes = self.config.max_connections_per_download * (1 if self.connections else 10)
+@@ -65,7 +65,7 @@
+
+ async def new_peer_or_finished(self):
+ active_tasks = list(self.active_connections.values()) + [asyncio.sleep(1)]
+- await asyncio.wait(active_tasks, loop=self.loop, return_when='FIRST_COMPLETED')
++ await asyncio.wait(active_tasks, return_when='FIRST_COMPLETED')
+
+ def cleanup_active(self):
+ if not self.active_connections and not self.connections:
+@@ -126,7 +126,7 @@
+
+ async def download_blob(loop, config: 'Config', blob_manager: 'BlobManager', dht_node: 'Node',
+ blob_hash: str) -> 'AbstractBlob':
+- search_queue = asyncio.Queue(loop=loop, maxsize=config.max_connections_per_download)
++ search_queue = asyncio.Queue(maxsize=config.max_connections_per_download)
+ search_queue.put_nowait(blob_hash)
+ peer_queue, accumulate_task = dht_node.accumulate_peers(search_queue)
+ fixed_peers = None if not config.fixed_peers else await get_kademlia_peers_from_hosts(config.fixed_peers)
+Only in lbry-sdk-0.108.0/lbry/blob_exchange: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/blob_exchange/server.py lbry-sdk-0.108.0/lbry/blob_exchange/server.py
+--- lbry-sdk-0.108.0_orig/lbry/blob_exchange/server.py 2022-05-06 21:35:23.286666576 +0200
++++ lbry-sdk-0.108.0/lbry/blob_exchange/server.py 2022-05-06 23:33:44.683332936 +0200
+@@ -25,19 +25,19 @@
+ self.idle_timeout = idle_timeout
+ self.transfer_timeout = transfer_timeout
+ self.server_task: typing.Optional[asyncio.Task] = None
+- self.started_listening = asyncio.Event(loop=self.loop)
++ self.started_listening = asyncio.Event()
+ self.buf = b''
+ self.transport: typing.Optional[asyncio.Transport] = None
+ self.lbrycrd_address = lbrycrd_address
+ self.peer_address_and_port: typing.Optional[str] = None
+- self.started_transfer = asyncio.Event(loop=self.loop)
+- self.transfer_finished = asyncio.Event(loop=self.loop)
++ self.started_transfer = asyncio.Event()
++ self.transfer_finished = asyncio.Event()
+ self.close_on_idle_task: typing.Optional[asyncio.Task] = None
+
+ async def close_on_idle(self):
+ while self.transport:
+ try:
+- await asyncio.wait_for(self.started_transfer.wait(), self.idle_timeout, loop=self.loop)
++ await asyncio.wait_for(self.started_transfer.wait(), self.idle_timeout)
+ except asyncio.TimeoutError:
+ log.debug("closing idle connection from %s", self.peer_address_and_port)
+ return self.close()
+@@ -101,7 +101,7 @@
+ log.debug("send %s to %s:%i", blob_hash, peer_address, peer_port)
+ self.started_transfer.set()
+ try:
+- sent = await asyncio.wait_for(blob.sendfile(self), self.transfer_timeout, loop=self.loop)
++ sent = await asyncio.wait_for(blob.sendfile(self), self.transfer_timeout)
+ if sent and sent > 0:
+ self.blob_manager.connection_manager.sent_data(self.peer_address_and_port, sent)
+ log.info("sent %s (%i bytes) to %s:%i", blob_hash, sent, peer_address, peer_port)
+@@ -157,7 +157,7 @@
+ self.loop = loop
+ self.blob_manager = blob_manager
+ self.server_task: typing.Optional[asyncio.Task] = None
+- self.started_listening = asyncio.Event(loop=self.loop)
++ self.started_listening = asyncio.Event()
+ self.lbrycrd_address = lbrycrd_address
+ self.idle_timeout = idle_timeout
+ self.transfer_timeout = transfer_timeout
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/connection_manager.py lbry-sdk-0.108.0/lbry/connection_manager.py
+--- lbry-sdk-0.108.0_orig/lbry/connection_manager.py 2022-05-06 21:35:23.286666576 +0200
++++ lbry-sdk-0.108.0/lbry/connection_manager.py 2022-05-06 20:24:34.796666665 +0200
+@@ -67,7 +67,7 @@
+
+ while True:
+ last = time.perf_counter()
+- await asyncio.sleep(0.1, loop=self.loop)
++ await asyncio.sleep(0.1)
+ self._status['incoming_bps'].clear()
+ self._status['outgoing_bps'].clear()
+ now = time.perf_counter()
+Only in lbry-sdk-0.108.0/lbry/crypto: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/dht/blob_announcer.py lbry-sdk-0.108.0/lbry/dht/blob_announcer.py
+--- lbry-sdk-0.108.0_orig/lbry/dht/blob_announcer.py 2022-05-06 21:35:23.286666576 +0200
++++ lbry-sdk-0.108.0/lbry/dht/blob_announcer.py 2022-05-06 23:36:07.353332914 +0200
+@@ -50,7 +50,7 @@
+ while batch_size:
+ if not self.node.joined.is_set():
+ await self.node.joined.wait()
+- await asyncio.sleep(60, loop=self.loop)
++ await asyncio.sleep(60)
+ if not self.node.protocol.routing_table.get_peers():
+ log.warning("No peers in DHT, announce round skipped")
+ continue
+@@ -59,7 +59,7 @@
+ log.debug("announcer task wake up, %d blobs to announce", len(self.announce_queue))
+ while len(self.announce_queue) > 0:
+ log.info("%i blobs to announce", len(self.announce_queue))
+- await asyncio.gather(*[self._run_consumer() for _ in range(batch_size)], loop=self.loop)
++ await asyncio.gather(*[self._run_consumer() for _ in range(batch_size)])
+ announced = list(filter(None, self.announced))
+ if announced:
+ await self.storage.update_last_announced_blobs(announced)
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/dht/node.py lbry-sdk-0.108.0/lbry/dht/node.py
+--- lbry-sdk-0.108.0_orig/lbry/dht/node.py 2022-05-06 21:35:23.286666576 +0200
++++ lbry-sdk-0.108.0/lbry/dht/node.py 2022-05-06 23:37:18.716666238 +0200
+@@ -37,7 +37,7 @@
+ self.protocol = KademliaProtocol(loop, peer_manager, node_id, external_ip, udp_port, peer_port, rpc_timeout,
+ split_buckets_under_index)
+ self.listening_port: asyncio.DatagramTransport = None
+- self.joined = asyncio.Event(loop=self.loop)
++ self.joined = asyncio.Event()
+ self._join_task: asyncio.Task = None
+ self._refresh_task: asyncio.Task = None
+ self._storage = storage
+@@ -115,7 +115,7 @@
+ for peer in peers:
+ log.debug("store to %s %s %s", peer.address, peer.udp_port, peer.tcp_port)
+ stored_to_tup = await asyncio.gather(
+- *(self.protocol.store_to_peer(hash_value, peer) for peer in peers), loop=self.loop
++ *(self.protocol.store_to_peer(hash_value, peer) for peer in peers)
+ )
+ stored_to = [node_id for node_id, contacted in stored_to_tup if contacted]
+ if stored_to:
+@@ -189,14 +189,14 @@
+ for address, udp_port in known_node_urls or []
+ ]))
+ except socket.gaierror:
+- await asyncio.sleep(30, loop=self.loop)
++ await asyncio.sleep(30)
+ continue
+
+ self.protocol.peer_manager.reset()
+ self.protocol.ping_queue.enqueue_maybe_ping(*seed_peers, delay=0.0)
+ await self.peer_search(self.protocol.node_id, shortlist=seed_peers, count=32)
+
+- await asyncio.sleep(1, loop=self.loop)
++ await asyncio.sleep(1)
+
+ def start(self, interface: str, known_node_urls: typing.Optional[typing.List[typing.Tuple[str, int]]] = None):
+ self._join_task = self.loop.create_task(self.join_network(interface, known_node_urls))
+@@ -279,7 +279,7 @@
+ def accumulate_peers(self, search_queue: asyncio.Queue,
+ peer_queue: typing.Optional[asyncio.Queue] = None
+ ) -> typing.Tuple[asyncio.Queue, asyncio.Task]:
+- queue = peer_queue or asyncio.Queue(loop=self.loop)
++ queue = peer_queue or asyncio.Queue()
+ return queue, self.loop.create_task(self._accumulate_peers_for_value(search_queue, queue))
+
+
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/dht/protocol/iterative_find.py lbry-sdk-0.108.0/lbry/dht/protocol/iterative_find.py
+--- lbry-sdk-0.108.0_orig/lbry/dht/protocol/iterative_find.py 2022-05-06 21:35:23.289999909 +0200
++++ lbry-sdk-0.108.0/lbry/dht/protocol/iterative_find.py 2022-05-06 23:37:21.083332906 +0200
+@@ -92,7 +92,7 @@
+ self.contacted: typing.Set['KademliaPeer'] = set()
+ self.distance = Distance(key)
+
+- self.iteration_queue = asyncio.Queue(loop=self.loop)
++ self.iteration_queue = asyncio.Queue()
+
+ self.running_probes: typing.Dict['KademliaPeer', asyncio.Task] = {}
+ self.iteration_count = 0
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/dht/protocol/protocol.py lbry-sdk-0.108.0/lbry/dht/protocol/protocol.py
+--- lbry-sdk-0.108.0_orig/lbry/dht/protocol/protocol.py 2022-05-06 21:35:23.289999909 +0200
++++ lbry-sdk-0.108.0/lbry/dht/protocol/protocol.py 2022-05-06 20:55:11.973333451 +0200
+@@ -249,7 +249,7 @@
+ del self._pending_contacts[peer]
+ self.maybe_ping(peer)
+ break
+- await asyncio.sleep(1, loop=self._loop)
++ await asyncio.sleep(1)
+
+ def start(self):
+ assert not self._running
+@@ -314,10 +314,10 @@
+ self.ping_queue = PingQueue(self.loop, self)
+ self.node_rpc = KademliaRPC(self, self.loop, self.peer_port)
+ self.rpc_timeout = rpc_timeout
+- self._split_lock = asyncio.Lock(loop=self.loop)
++ self._split_lock = asyncio.Lock()
+ self._to_remove: typing.Set['KademliaPeer'] = set()
+ self._to_add: typing.Set['KademliaPeer'] = set()
+- self._wakeup_routing_task = asyncio.Event(loop=self.loop)
++ self._wakeup_routing_task = asyncio.Event()
+ self.maintaing_routing_task: typing.Optional[asyncio.Task] = None
+
+ @functools.lru_cache(128)
+@@ -443,7 +443,7 @@
+ while self._to_add:
+ async with self._split_lock:
+ await self._add_peer(self._to_add.pop())
+- await asyncio.gather(self._wakeup_routing_task.wait(), asyncio.sleep(.1, loop=self.loop), loop=self.loop)
++ await asyncio.gather(self._wakeup_routing_task.wait(), asyncio.sleep(.1))
+ self._wakeup_routing_task.clear()
+
+ def _handle_rpc(self, sender_contact: 'KademliaPeer', message: RequestDatagram):
+Only in lbry-sdk-0.108.0/lbry/dht/protocol: __pycache__
+Only in lbry-sdk-0.108.0/lbry/dht: __pycache__
+Only in lbry-sdk-0.108.0/lbry/dht/serialization: __pycache__
+Only in lbry-sdk-0.108.0/lbry/error: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/extras/daemon/componentmanager.py lbry-sdk-0.108.0/lbry/extras/daemon/componentmanager.py
+--- lbry-sdk-0.108.0_orig/lbry/extras/daemon/componentmanager.py 2022-05-06 21:35:23.289999909 +0200
++++ lbry-sdk-0.108.0/lbry/extras/daemon/componentmanager.py 2022-05-06 19:52:24.049999905 +0200
+@@ -42,7 +42,7 @@
+ self.analytics_manager = analytics_manager
+ self.component_classes = {}
+ self.components = set()
+- self.started = asyncio.Event(loop=self.loop)
++ self.started = asyncio.Event()
+ self.peer_manager = peer_manager or PeerManager(asyncio.get_event_loop_policy().get_event_loop())
+
+ for component_name, component_class in self.default_component_classes.items():
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/extras/daemon/components.py lbry-sdk-0.108.0/lbry/extras/daemon/components.py
+--- lbry-sdk-0.108.0_orig/lbry/extras/daemon/components.py 2022-05-06 21:35:23.293333242 +0200
++++ lbry-sdk-0.108.0/lbry/extras/daemon/components.py 2022-05-06 23:39:01.399999564 +0200
+@@ -555,13 +555,13 @@
+ while True:
+ if now:
+ await self._maintain_redirects()
+- await asyncio.sleep(360, loop=self.component_manager.loop)
++ await asyncio.sleep(360)
+
+ async def _maintain_redirects(self):
+ # setup the gateway if necessary
+ if not self.upnp:
+ try:
+- self.upnp = await UPnP.discover(loop=self.component_manager.loop)
++ self.upnp = await UPnP.discover()
+ log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
+ except Exception as err:
+ if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
+@@ -677,7 +677,7 @@
+ log.info("Removing upnp redirects: %s", self.upnp_redirects)
+ await asyncio.wait([
+ self.upnp.delete_port_mapping(port, protocol) for protocol, port in self.upnp_redirects.items()
+- ], loop=self.component_manager.loop)
++ ])
+ if self._maintain_redirects_task and not self._maintain_redirects_task.done():
+ self._maintain_redirects_task.cancel()
+
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/extras/daemon/daemon.py lbry-sdk-0.108.0/lbry/extras/daemon/daemon.py
+--- lbry-sdk-0.108.0_orig/lbry/extras/daemon/daemon.py 2022-05-06 21:35:23.296666576 +0200
++++ lbry-sdk-0.108.0/lbry/extras/daemon/daemon.py 2022-05-06 23:38:30.453332902 +0200
+@@ -4972,7 +4972,7 @@
+ # TODO: use error from lbry.error
+ raise Exception("invalid blob hash")
+ peers = []
+- peer_q = asyncio.Queue(loop=self.component_manager.loop)
++ peer_q = asyncio.Queue()
+ await self.dht_node._peers_for_value_producer(blob_hash, peer_q)
+ while not peer_q.empty():
+ peers.extend(peer_q.get_nowait())
+Only in lbry-sdk-0.108.0/lbry/extras/daemon: __pycache__
+Only in lbry-sdk-0.108.0/lbry/extras: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/file/file_manager.py lbry-sdk-0.108.0/lbry/file/file_manager.py
+--- lbry-sdk-0.108.0_orig/lbry/file/file_manager.py 2022-05-06 21:35:23.299999909 +0200
++++ lbry-sdk-0.108.0/lbry/file/file_manager.py 2022-05-06 23:41:18.193332887 +0200
+@@ -240,8 +240,7 @@
+ claim_info = await self.storage.get_content_claim_for_torrent(stream.identifier)
+ stream.set_claim(claim_info, claim)
+ if save_file:
+- await asyncio.wait_for(stream.save_file(), timeout - (self.loop.time() - before_download),
+- loop=self.loop)
++ await asyncio.wait_for(stream.save_file(), timeout - (self.loop.time() - before_download))
+ return stream
+ except asyncio.TimeoutError:
+ error = DownloadDataTimeoutError(stream.sd_hash)
+Only in lbry-sdk-0.108.0/lbry/file: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/file/source_manager.py lbry-sdk-0.108.0/lbry/file/source_manager.py
+--- lbry-sdk-0.108.0_orig/lbry/file/source_manager.py 2022-05-06 21:35:23.299999909 +0200
++++ lbry-sdk-0.108.0/lbry/file/source_manager.py 2022-05-06 20:12:52.789999975 +0200
+@@ -54,7 +54,7 @@
+ self.storage = storage
+ self.analytics_manager = analytics_manager
+ self._sources: typing.Dict[str, ManagedDownloadSource] = {}
+- self.started = asyncio.Event(loop=self.loop)
++ self.started = asyncio.Event()
+
+ def add(self, source: ManagedDownloadSource):
+ self._sources[source.identifier] = source
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/file/source.py lbry-sdk-0.108.0/lbry/file/source.py
+--- lbry-sdk-0.108.0_orig/lbry/file/source.py 2022-05-06 21:35:23.299999909 +0200
++++ lbry-sdk-0.108.0/lbry/file/source.py 2022-05-06 20:12:22.839999974 +0200
+@@ -46,10 +46,10 @@
+ self._added_on = added_on
+ self.analytics_manager = analytics_manager
+
+- self.saving = asyncio.Event(loop=self.loop)
+- self.finished_writing = asyncio.Event(loop=self.loop)
+- self.started_writing = asyncio.Event(loop=self.loop)
+- self.finished_write_attempt = asyncio.Event(loop=self.loop)
++ self.saving = asyncio.Event()
++ self.finished_writing = asyncio.Event()
++ self.started_writing = asyncio.Event()
++ self.finished_write_attempt = asyncio.Event()
+
+ # @classmethod
+ # async def create(cls, loop: asyncio.AbstractEventLoop, config: 'Config', file_path: str,
+Only in lbry-sdk-0.108.0/lbry: __pycache__
+Only in lbry-sdk-0.108.0/lbry/schema: __pycache__
+Only in lbry-sdk-0.108.0/lbry/schema/types: __pycache__
+Only in lbry-sdk-0.108.0/lbry/schema/types/v1: __pycache__
+Only in lbry-sdk-0.108.0/lbry/schema/types/v2: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/stream/downloader.py lbry-sdk-0.108.0/lbry/stream/downloader.py
+--- lbry-sdk-0.108.0_orig/lbry/stream/downloader.py 2022-05-06 21:35:23.306666576 +0200
++++ lbry-sdk-0.108.0/lbry/stream/downloader.py 2022-05-06 23:32:15.459999609 +0200
+@@ -25,8 +25,8 @@
+ self.config = config
+ self.blob_manager = blob_manager
+ self.sd_hash = sd_hash
+- self.search_queue = asyncio.Queue(loop=loop) # blob hashes to feed into the iterative finder
+- self.peer_queue = asyncio.Queue(loop=loop) # new peers to try
++ self.search_queue = asyncio.Queue() # blob hashes to feed into the iterative finder
++ self.peer_queue = asyncio.Queue() # new peers to try
+ self.blob_downloader = BlobDownloader(self.loop, self.config, self.blob_manager, self.peer_queue)
+ self.descriptor: typing.Optional[StreamDescriptor] = descriptor
+ self.node: typing.Optional['Node'] = None
+@@ -70,7 +70,7 @@
+ now = self.loop.time()
+ sd_blob = await asyncio.wait_for(
+ self.blob_downloader.download_blob(self.sd_hash, connection_id),
+- self.config.blob_download_timeout, loop=self.loop
++ self.config.blob_download_timeout
+ )
+ log.info("downloaded sd blob %s", self.sd_hash)
+ self.time_to_descriptor = self.loop.time() - now
+@@ -108,7 +108,7 @@
+ raise ValueError(f"blob {blob_info.blob_hash} is not part of stream with sd hash {self.sd_hash}")
+ blob = await asyncio.wait_for(
+ self.blob_downloader.download_blob(blob_info.blob_hash, blob_info.length, connection_id),
+- self.config.blob_download_timeout * 10, loop=self.loop
++ self.config.blob_download_timeout * 10
+ )
+ return blob
+
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/stream/managed_stream.py lbry-sdk-0.108.0/lbry/stream/managed_stream.py
+--- lbry-sdk-0.108.0_orig/lbry/stream/managed_stream.py 2022-05-06 21:35:23.306666576 +0200
++++ lbry-sdk-0.108.0/lbry/stream/managed_stream.py 2022-05-06 23:31:42.899999609 +0200
+@@ -62,9 +62,9 @@
+ self.file_output_task: typing.Optional[asyncio.Task] = None
+ self.delayed_stop_task: typing.Optional[asyncio.Task] = None
+ self.streaming_responses: typing.List[typing.Tuple[Request, StreamResponse]] = []
+- self.fully_reflected = asyncio.Event(loop=self.loop)
+- self.streaming = asyncio.Event(loop=self.loop)
+- self._running = asyncio.Event(loop=self.loop)
++ self.fully_reflected = asyncio.Event()
++ self.streaming = asyncio.Event()
++ self._running = asyncio.Event()
+
+ @property
+ def sd_hash(self) -> str:
+@@ -151,7 +151,7 @@
+ log.info("start downloader for stream (sd hash: %s)", self.sd_hash)
+ self._running.set()
+ try:
+- await asyncio.wait_for(self.downloader.start(), timeout, loop=self.loop)
++ await asyncio.wait_for(self.downloader.start(), timeout)
+ except asyncio.TimeoutError:
+ self._running.clear()
+ raise DownloadSDTimeoutError(self.sd_hash)
+@@ -311,7 +311,7 @@
+ await self.update_status(ManagedStream.STATUS_RUNNING)
+ self.file_output_task = self.loop.create_task(self._save_file(self.full_path))
+ try:
+- await asyncio.wait_for(self.started_writing.wait(), self.config.download_timeout, loop=self.loop)
++ await asyncio.wait_for(self.started_writing.wait(), self.config.download_timeout)
+ except asyncio.TimeoutError:
+ log.warning("timeout starting to write data for lbry://%s#%s", self.claim_name, self.claim_id)
+ self.stop_tasks()
+@@ -391,7 +391,7 @@
+ self.sd_hash[:6])
+ await self.stop()
+ return
+- await asyncio.sleep(1, loop=self.loop)
++ await asyncio.sleep(1)
+
+ def _prepare_range_response_headers(self, get_range: str) -> typing.Tuple[typing.Dict[str, str], int, int, int]:
+ if '=' in get_range:
+Only in lbry-sdk-0.108.0/lbry/stream: __pycache__
+Only in lbry-sdk-0.108.0/lbry/stream/reflector: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/stream/reflector/server.py lbry-sdk-0.108.0/lbry/stream/reflector/server.py
+--- lbry-sdk-0.108.0_orig/lbry/stream/reflector/server.py 2022-05-06 21:35:23.306666576 +0200
++++ lbry-sdk-0.108.0/lbry/stream/reflector/server.py 2022-05-06 23:32:40.216666274 +0200
+@@ -21,7 +21,7 @@
+ self.loop = asyncio.get_event_loop()
+ self.blob_manager = blob_manager
+ self.server_task: asyncio.Task = None
+- self.started_listening = asyncio.Event(loop=self.loop)
++ self.started_listening = asyncio.Event()
+ self.buf = b''
+ self.transport: asyncio.StreamWriter = None
+ self.writer: typing.Optional['HashBlobWriter'] = None
+@@ -29,9 +29,9 @@
+ self.descriptor: typing.Optional['StreamDescriptor'] = None
+ self.sd_blob: typing.Optional['BlobFile'] = None
+ self.received = []
+- self.incoming = incoming_event or asyncio.Event(loop=self.loop)
+- self.not_incoming = not_incoming_event or asyncio.Event(loop=self.loop)
+- self.stop_event = stop_event or asyncio.Event(loop=self.loop)
++ self.incoming = incoming_event or asyncio.Event()
++ self.not_incoming = not_incoming_event or asyncio.Event()
++ self.stop_event = stop_event or asyncio.Event()
+ self.chunk_size = response_chunk_size
+ self.wait_for_stop_task: typing.Optional[asyncio.Task] = None
+ self.partial_event = partial_event
+@@ -94,7 +94,7 @@
+ self.incoming.set()
+ self.send_response({"send_sd_blob": True})
+ try:
+- await asyncio.wait_for(self.sd_blob.verified.wait(), 30, loop=self.loop)
++ await asyncio.wait_for(self.sd_blob.verified.wait(), 30)
+ self.descriptor = await StreamDescriptor.from_stream_descriptor_blob(
+ self.loop, self.blob_manager.blob_dir, self.sd_blob
+ )
+@@ -140,7 +140,7 @@
+ self.incoming.set()
+ self.send_response({"send_blob": True})
+ try:
+- await asyncio.wait_for(blob.verified.wait(), 30, loop=self.loop)
++ await asyncio.wait_for(blob.verified.wait(), 30)
+ self.send_response({"received_blob": True})
+ except asyncio.TimeoutError:
+ self.send_response({"received_blob": False})
+@@ -162,10 +162,10 @@
+ self.loop = asyncio.get_event_loop()
+ self.blob_manager = blob_manager
+ self.server_task: typing.Optional[asyncio.Task] = None
+- self.started_listening = asyncio.Event(loop=self.loop)
+- self.stopped_listening = asyncio.Event(loop=self.loop)
+- self.incoming_event = incoming_event or asyncio.Event(loop=self.loop)
+- self.not_incoming_event = not_incoming_event or asyncio.Event(loop=self.loop)
++ self.started_listening = asyncio.Event()
++ self.stopped_listening = asyncio.Event()
++ self.incoming_event = incoming_event or asyncio.Event()
++ self.not_incoming_event = not_incoming_event or asyncio.Event()
+ self.response_chunk_size = response_chunk_size
+ self.stop_event = stop_event
+ self.partial_needs = partial_needs # for testing cases where it doesn't know what it wants
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/stream/stream_manager.py lbry-sdk-0.108.0/lbry/stream/stream_manager.py
+--- lbry-sdk-0.108.0_orig/lbry/stream/stream_manager.py 2022-05-06 21:35:23.306666576 +0200
++++ lbry-sdk-0.108.0/lbry/stream/stream_manager.py 2022-05-06 23:33:22.773332939 +0200
+@@ -54,7 +54,7 @@
+ self.re_reflect_task: Optional[asyncio.Task] = None
+ self.update_stream_finished_futs: typing.List[asyncio.Future] = []
+ self.running_reflector_uploads: typing.Dict[str, asyncio.Task] = {}
+- self.started = asyncio.Event(loop=self.loop)
++ self.started = asyncio.Event()
+
+ @property
+ def streams(self):
+@@ -150,7 +150,7 @@
+ file_info['added_on'], file_info['fully_reflected']
+ )))
+ if add_stream_tasks:
+- await asyncio.gather(*add_stream_tasks, loop=self.loop)
++ await asyncio.gather(*add_stream_tasks)
+ log.info("Started stream manager with %i files", len(self._sources))
+ if not self.node:
+ log.info("no DHT node given, resuming downloads trusting that we can contact reflector")
+@@ -158,8 +158,7 @@
+ log.info("Resuming saving %i files", len(to_resume_saving))
+ self.resume_saving_task = asyncio.ensure_future(asyncio.gather(
+ *(self._sources[sd_hash].save_file(file_name, download_directory)
+- for (file_name, download_directory, sd_hash) in to_resume_saving),
+- loop=self.loop
++ for (file_name, download_directory, sd_hash) in to_resume_saving)
+ ))
+
+ async def reflect_streams(self):
+@@ -186,14 +185,14 @@
+ batch.append(self.reflect_stream(stream))
+ if len(batch) >= self.config.concurrent_reflector_uploads:
+ log.debug("waiting for batch of %s reflecting streams", len(batch))
+- await asyncio.gather(*batch, loop=self.loop)
++ await asyncio.gather(*batch)
+ log.debug("done processing %s streams", len(batch))
+ batch = []
+ if batch:
+ log.debug("waiting for batch of %s reflecting streams", len(batch))
+- await asyncio.gather(*batch, loop=self.loop)
++ await asyncio.gather(*batch)
+ log.debug("done processing %s streams", len(batch))
+- await asyncio.sleep(300, loop=self.loop)
++ await asyncio.sleep(300)
+
+ async def start(self):
+ await super().start()
+Only in lbry-sdk-0.108.0/lbry/torrent: __pycache__
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/torrent/session.py lbry-sdk-0.108.0/lbry/torrent/session.py
+--- lbry-sdk-0.108.0_orig/lbry/torrent/session.py 2022-05-06 21:35:23.306666576 +0200
++++ lbry-sdk-0.108.0/lbry/torrent/session.py 2022-05-06 20:24:52.156666666 +0200
+@@ -56,9 +56,9 @@
+ self._loop = loop
+ self._executor = executor
+ self._handle: libtorrent.torrent_handle = handle
+- self.started = asyncio.Event(loop=loop)
+- self.finished = asyncio.Event(loop=loop)
+- self.metadata_completed = asyncio.Event(loop=loop)
++ self.started = asyncio.Event()
++ self.finished = asyncio.Event()
++ self.metadata_completed = asyncio.Event()
+ self.size = 0
+ self.total_wanted_done = 0
+ self.name = ''
+@@ -121,7 +121,7 @@
+ self._show_status()
+ if self.finished.is_set():
+ break
+- await asyncio.sleep(0.1, loop=self._loop)
++ await asyncio.sleep(0.1)
+
+ async def pause(self):
+ await self._loop.run_in_executor(
+@@ -186,7 +186,7 @@
+ await self._loop.run_in_executor(
+ self._executor, self._pop_alerts
+ )
+- await asyncio.sleep(1, loop=self._loop)
++ await asyncio.sleep(1)
+
+ async def pause(self):
+ await self._loop.run_in_executor(
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/torrent/torrent.py lbry-sdk-0.108.0/lbry/torrent/torrent.py
+--- lbry-sdk-0.108.0_orig/lbry/torrent/torrent.py 2022-05-06 21:35:23.306666576 +0200
++++ lbry-sdk-0.108.0/lbry/torrent/torrent.py 2022-05-06 20:25:13.123333331 +0200
+@@ -36,7 +36,7 @@
+ def __init__(self, loop, handle):
+ self._loop = loop
+ self._handle = handle
+- self.finished = asyncio.Event(loop=loop)
++ self.finished = asyncio.Event()
+
+ def _threaded_update_status(self):
+ status = self._handle.status()
+@@ -58,7 +58,7 @@
+ log.info("finished downloading torrent!")
+ await self.pause()
+ break
+- await asyncio.sleep(1, loop=self._loop)
++ await asyncio.sleep(1)
+
+ async def pause(self):
+ log.info("pause torrent")
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/lbry/utils.py lbry-sdk-0.108.0/lbry/utils.py
+--- lbry-sdk-0.108.0_orig/lbry/utils.py 2022-05-06 21:35:23.309999909 +0200
++++ lbry-sdk-0.108.0/lbry/utils.py 2022-05-06 19:55:12.329999921 +0200
+@@ -456,7 +456,7 @@
+
+ class LockWithMetrics(asyncio.Lock):
+ def __init__(self, acquire_metric, held_time_metric, loop=None):
+- super().__init__(loop=loop)
++ super().__init__()
+ self._acquire_metric = acquire_metric
+ self._lock_held_time_metric = held_time_metric
+ self._lock_acquired_time = None
+Only in lbry-sdk-0.108.0/lbry/wallet: __pycache__
+Only in lbry-sdk-0.108.0/lbry/wallet/rpc: __pycache__
+Only in lbry-sdk-0.108.0/lbry/wallet/words: __pycache__
+Only in lbry-sdk: lbry.egg-info
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/setup.py lbry-sdk-0.108.0/setup.py
+--- lbry-sdk-0.108.0_orig/setup.py 2022-05-06 21:35:23.316666576 +0200
++++ lbry-sdk-0.108.0/setup.py 2022-05-06 19:05:24.523333260 +0200
+@@ -10,7 +10,7 @@
+
+ ROCKSDB = []
+ if sys.platform.startswith('linux') or sys.platform.startswith('darwin'):
+- ROCKSDB.append('lbry-rocksdb==0.8.2')
++ ROCKSDB.append('lbry-rocksdb')
+
+
+ setup(
+@@ -34,39 +34,39 @@
+ ],
+ },
+ install_requires=[
+- 'aiohttp==3.7.4',
+- 'aioupnp==0.0.18',
+- 'appdirs==1.4.3',
+- 'certifi>=2021.10.08',
+- 'colorama==0.3.7',
+- 'distro==1.4.0',
+- 'base58==1.0.0',
+- 'cffi==1.13.2',
+- 'cryptography==2.5',
+- 'protobuf==3.17.2',
+- 'msgpack==0.6.1',
+- 'prometheus_client==0.7.1',
+- 'ecdsa==0.13.3',
+- 'pyyaml==5.3.1',
+- 'docopt==0.6.2',
+- 'hachoir==3.1.2',
+- 'multidict==4.6.1',
+- 'coincurve==15.0.0',
+- 'pbkdf2==1.3',
+- 'attrs==18.2.0',
+- 'pylru==1.1.0',
+- 'elasticsearch==7.10.1',
+- 'grpcio==1.38.0',
+- 'filetype==1.0.9',
++ 'aiohttp',
++ 'aioupnp',
++ 'appdirs',
++ 'certifi',
++ 'colorama',
++ 'distro',
++ 'base58',
++ 'cffi',
++ 'cryptography',
++ 'protobuf',
++ 'msgpack',
++ 'prometheus_client',
++ 'ecdsa',
++ 'pyyaml',
++ 'docopt',
++ 'hachoir',
++ 'multidict',
++ 'coincurve',
++ 'pbkdf2',
++ 'attrs',
++ 'pylru',
++ 'elasticsearch',
++ 'grpcio',
++ 'filetype',
+ ] + ROCKSDB,
+ extras_require={
+ 'torrent': ['lbry-libtorrent'],
+ 'lint': [
+- 'pylint==2.10.0'
++ 'pylint'
+ ],
+ 'test': [
+ 'coverage',
+- 'jsonschema==4.4.0',
++ 'jsonschema',
+ ],
+ 'scribe': [
+ 'scribe @ git+https://github.com/lbryio/scribe.git#311db529a03de7fce43ed8579f51ac23a1a884ea'
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/integration/datanetwork/test_file_commands.py lbry-sdk-0.108.0/tests/integration/datanetwork/test_file_commands.py
+--- lbry-sdk-0.108.0_orig/tests/integration/datanetwork/test_file_commands.py 2022-05-06 21:35:23.319999910 +0200
++++ lbry-sdk-0.108.0/tests/integration/datanetwork/test_file_commands.py 2022-05-06 20:20:50.919999997 +0200
+@@ -334,7 +334,7 @@
+ self.assertNotIn('error', resp)
+ self.assertTrue(os.path.isfile(path))
+ self.daemon.file_manager.stop()
+- await asyncio.sleep(0.01, loop=self.loop) # FIXME: this sleep should not be needed
++ await asyncio.sleep(0.01) # FIXME: this sleep should not be needed
+ self.assertFalse(os.path.isfile(path))
+
+ async def test_incomplete_downloads_retry(self):
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/integration/datanetwork/test_streaming.py lbry-sdk-0.108.0/tests/integration/datanetwork/test_streaming.py
+--- lbry-sdk-0.108.0_orig/tests/integration/datanetwork/test_streaming.py 2022-05-06 21:35:23.319999910 +0200
++++ lbry-sdk-0.108.0/tests/integration/datanetwork/test_streaming.py 2022-05-06 20:21:03.329999997 +0200
+@@ -414,6 +414,6 @@
+
+ # running with cache size 0 gets through without errors without
+ # this since the server doesn't stop immediately
+- await asyncio.sleep(1, loop=self.loop)
++ await asyncio.sleep(1)
+
+ await self._request_stream()
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/blob/test_blob_file.py lbry-sdk-0.108.0/tests/unit/blob/test_blob_file.py
+--- lbry-sdk-0.108.0_orig/tests/unit/blob/test_blob_file.py 2022-05-06 21:35:23.323333243 +0200
++++ lbry-sdk-0.108.0/tests/unit/blob/test_blob_file.py 2022-05-06 20:20:29.226666663 +0200
+@@ -36,7 +36,7 @@
+ writer.write(self.blob_bytes)
+ await blob.verified.wait()
+ self.assertTrue(blob.get_is_verified())
+- await asyncio.sleep(0, loop=self.loop) # wait for the db save task
++ await asyncio.sleep(0) # wait for the db save task
+ return blob
+
+ async def _test_close_writers_on_finished(self, blob_class=AbstractBlob, blob_directory=None):
+@@ -48,7 +48,7 @@
+ with self.assertRaises(InvalidDataError):
+ writers[1].write(self.blob_bytes * 2)
+ await writers[1].finished
+- await asyncio.sleep(0, loop=self.loop)
++ await asyncio.sleep(0)
+ self.assertEqual(4, len(blob.writers))
+
+ # write the blob
+@@ -208,7 +208,7 @@
+ async def read_blob_buffer():
+ with reader as read_handle:
+ self.assertEqual(1, len(blob.readers))
+- await asyncio.sleep(2, loop=self.loop)
++ await asyncio.sleep(2)
+ self.assertEqual(0, len(blob.readers))
+ return read_handle.read()
+
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/blob_exchange/test_transfer_blob.py lbry-sdk-0.108.0/tests/unit/blob_exchange/test_transfer_blob.py
+--- lbry-sdk-0.108.0_orig/tests/unit/blob_exchange/test_transfer_blob.py 2022-05-06 21:35:23.323333243 +0200
++++ lbry-sdk-0.108.0/tests/unit/blob_exchange/test_transfer_blob.py 2022-05-06 23:30:31.823332947 +0200
+@@ -182,7 +182,7 @@
+ writer.write(mock_blob_bytes)
+ return self.loop.create_task(_inner())
+
+- await asyncio.gather(write_task(writer1), write_task(writer2), loop=self.loop)
++ await asyncio.gather(write_task(writer1), write_task(writer2))
+
+ self.assertDictEqual({1: mock_blob_bytes, 2: mock_blob_bytes}, results)
+ self.assertEqual(1, write_called_count)
+@@ -268,7 +268,7 @@
+ client_blob.delete()
+
+ # wait for less than the idle timeout
+- await asyncio.sleep(0.5, loop=self.loop)
++ await asyncio.sleep(0.5)
+
+ # download the blob again
+ downloaded, protocol2 = await request_blob(self.loop, client_blob, self.server_from_client.address,
+@@ -282,10 +282,10 @@
+ client_blob.delete()
+
+ # check that the connection times out from the server side
+- await asyncio.sleep(0.9, loop=self.loop)
++ await asyncio.sleep(0.9)
+ self.assertFalse(protocol.transport.is_closing())
+ self.assertIsNotNone(protocol.transport._sock)
+- await asyncio.sleep(0.1, loop=self.loop)
++ await asyncio.sleep(0.1)
+ self.assertIsNone(protocol.transport)
+
+ def test_max_request_size(self):
+@@ -321,7 +321,7 @@
+ server_blob = self.server_blob_manager.get_blob(blob_hash)
+
+ async def sendfile(writer):
+- await asyncio.sleep(2, loop=self.loop)
++ await asyncio.sleep(2)
+ return 0
+
+ server_blob.sendfile = sendfile
+@@ -345,7 +345,7 @@
+ def _mock_accumulate_peers(q1, q2=None):
+ async def _task():
+ pass
+- q2 = q2 or asyncio.Queue(loop=self.loop)
++ q2 = q2 or asyncio.Queue()
+ return q2, self.loop.create_task(_task())
+
+ mock_node.accumulate_peers = _mock_accumulate_peers
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/core/test_utils.py lbry-sdk-0.108.0/tests/unit/core/test_utils.py
+--- lbry-sdk-0.108.0_orig/tests/unit/core/test_utils.py 2022-05-06 21:35:23.323333243 +0200
++++ lbry-sdk-0.108.0/tests/unit/core/test_utils.py 2022-05-06 23:30:02.809999615 +0200
+@@ -72,14 +72,14 @@
+ @utils.cache_concurrent
+ async def foo(self, arg1, arg2=None, delay=1):
+ self.called.append((arg1, arg2, delay))
+- await asyncio.sleep(delay, loop=self.loop)
++ await asyncio.sleep(delay)
+ self.counter += 1
+ self.finished.append((arg1, arg2, delay))
+ return object()
+
+ async def test_gather_duplicates(self):
+ result = await asyncio.gather(
+- self.loop.create_task(self.foo(1)), self.loop.create_task(self.foo(1)), loop=self.loop
++ self.loop.create_task(self.foo(1)), self.loop.create_task(self.foo(1))
+ )
+ self.assertEqual(1, len(self.called))
+ self.assertEqual(1, len(self.finished))
+@@ -93,7 +93,7 @@
+
+ with self.assertRaises(asyncio.CancelledError):
+ await asyncio.gather(
+- t1, self.loop.create_task(self.foo(1)), loop=self.loop
++ t1, self.loop.create_task(self.foo(1))
+ )
+ self.assertEqual(1, len(self.called))
+ self.assertEqual(0, len(self.finished))
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/dht/test_blob_announcer.py lbry-sdk-0.108.0/tests/unit/dht/test_blob_announcer.py
+--- lbry-sdk-0.108.0_orig/tests/unit/dht/test_blob_announcer.py 2022-05-06 21:35:23.323333243 +0200
++++ lbry-sdk-0.108.0/tests/unit/dht/test_blob_announcer.py 2022-05-06 23:31:06.076666282 +0200
+@@ -128,7 +128,7 @@
+ await self.chain_peer(constants.generate_id(current + 4), '1.2.3.13')
+ last = await self.chain_peer(constants.generate_id(current + 5), '1.2.3.14')
+
+- search_q, peer_q = asyncio.Queue(loop=self.loop), asyncio.Queue(loop=self.loop)
++ search_q, peer_q = asyncio.Queue(), asyncio.Queue()
+ search_q.put_nowait(blob1)
+
+ _, task = last.accumulate_peers(search_q, peer_q)
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/stream/test_managed_stream.py lbry-sdk-0.108.0/tests/unit/stream/test_managed_stream.py
+--- lbry-sdk-0.108.0_orig/tests/unit/stream/test_managed_stream.py 2022-05-06 21:35:23.326666576 +0200
++++ lbry-sdk-0.108.0/tests/unit/stream/test_managed_stream.py 2022-05-06 20:19:20.383333322 +0200
+@@ -96,9 +96,9 @@
+ await self._test_transfer_stream(10, stop_when_done=False)
+ self.assertEqual(self.stream.status, "finished")
+ self.assertTrue(self.stream._running.is_set())
+- await asyncio.sleep(0.5, loop=self.loop)
++ await asyncio.sleep(0.5)
+ self.assertTrue(self.stream._running.is_set())
+- await asyncio.sleep(2, loop=self.loop)
++ await asyncio.sleep(2)
+ self.assertEqual(self.stream.status, "finished")
+ self.assertFalse(self.stream._running.is_set())
+
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/stream/test_reflector.py lbry-sdk-0.108.0/tests/unit/stream/test_reflector.py
+--- lbry-sdk-0.108.0_orig/tests/unit/stream/test_reflector.py 2022-05-06 21:35:23.326666576 +0200
++++ lbry-sdk-0.108.0/tests/unit/stream/test_reflector.py 2022-05-06 23:29:24.469999627 +0200
+@@ -86,13 +86,13 @@
+ self.assertListEqual(sent, [])
+
+ async def test_reflect_stream(self):
+- return await asyncio.wait_for(self._test_reflect_stream(response_chunk_size=50), 3, loop=self.loop)
++ return await asyncio.wait_for(self._test_reflect_stream(response_chunk_size=50), 3)
+
+ async def test_reflect_stream_but_reflector_changes_its_mind(self):
+- return await asyncio.wait_for(self._test_reflect_stream(partial_needs=True), 3, loop=self.loop)
++ return await asyncio.wait_for(self._test_reflect_stream(partial_needs=True), 3)
+
+ async def test_reflect_stream_small_response_chunks(self):
+- return await asyncio.wait_for(self._test_reflect_stream(response_chunk_size=30), 3, loop=self.loop)
++ return await asyncio.wait_for(self._test_reflect_stream(response_chunk_size=30), 3)
+
+ async def test_announces(self):
+ to_announce = await self.storage.get_blobs_to_announce()
+diff --exclude .git --unified --recursive --text --color lbry-sdk-0.108.0_orig/tests/unit/stream/test_stream_manager.py lbry-sdk-0.108.0/tests/unit/stream/test_stream_manager.py
+--- lbry-sdk-0.108.0_orig/tests/unit/stream/test_stream_manager.py 2022-05-06 21:35:23.326666576 +0200
++++ lbry-sdk-0.108.0/tests/unit/stream/test_stream_manager.py 2022-05-06 20:18:33.609999987 +0200
+@@ -174,7 +174,7 @@
+ await self.file_manager.download_from_uri(self.uri, self.exchange_rate_manager)
+ else:
+ await self.file_manager.download_from_uri(self.uri, self.exchange_rate_manager)
+- await asyncio.sleep(0, loop=self.loop)
++ await asyncio.sleep(0)
+ self.assertTrue(checked_analytics_event)
+
+ async def test_time_to_first_bytes(self):
+@@ -317,7 +317,7 @@
+ stream.downloader.node = self.stream_manager.node
+ await stream.save_file()
+ await stream.finished_writing.wait()
+- await asyncio.sleep(0, loop=self.loop)
++ await asyncio.sleep(0)
+ self.assertTrue(stream.finished)
+ self.assertFalse(stream.running)
+ self.assertTrue(os.path.isfile(os.path.join(self.client_dir, "test_file")))
+@@ -355,7 +355,7 @@
+
+ self.stream_manager.analytics_manager._post = check_post
+ await self._test_download_error_on_start(expected_error, timeout)
+- await asyncio.sleep(0, loop=self.loop)
++ await asyncio.sleep(0)
+ self.assertListEqual([expected_error.__name__], received)
+
+ async def test_insufficient_funds(self):
+@@ -448,7 +448,7 @@
+ self.assertDictEqual(self.stream_manager.streams, {})
+ stream = await self.file_manager.download_from_uri(self.uri, self.exchange_rate_manager)
+ await stream.finished_writing.wait()
+- await asyncio.sleep(0, loop=self.loop)
++ await asyncio.sleep(0)
+ self.stream_manager.stop()
+ self.client_blob_manager.stop()
+ # partial removal, only sd blob is missing.
diff --git a/lbry-venv.patch b/lbry-venv.patch
new file mode 100644
index 000000000000..978080e9d76c
--- /dev/null
+++ b/lbry-venv.patch
@@ -0,0 +1,56 @@
+diff --exclude __pycache__ --unified --recursive --text --color lbry-venv/lib/python3.10/site-packages/aioupnp/gateway.py lbry-venv_new/lib/python3.10/site-packages/aioupnp/gateway.py
+--- lbry-venv/lib/python3.10/site-packages/aioupnp/gateway.py 2022-05-07 23:04:48.359999989 +0200
++++ lbry-venv_new/lib/python3.10/site-packages/aioupnp/gateway.py 2022-05-06 21:21:09.263333000 +0200
+@@ -224,7 +224,7 @@
+ try:
+ return await asyncio.wait_for(loop.create_task(
+ cls._discover_gateway(lan_address, gateway_address, timeout, loop)
+- ), timeout, loop=loop)
++ ), timeout)
+ except asyncio.TimeoutError:
+ raise UPnPError(f"M-SEARCH for {gateway_address}:1900 timed out")
+
+diff --exclude __pycache__ --unified --recursive --text --color lbry-venv/lib/python3.10/site-packages/aioupnp/protocols/scpd.py lbry-venv_new/lib/python3.10/site-packages/aioupnp/protocols/scpd.py
+--- lbry-venv/lib/python3.10/site-packages/aioupnp/protocols/scpd.py 2022-05-07 23:04:48.363333323 +0200
++++ lbry-venv_new/lib/python3.10/site-packages/aioupnp/protocols/scpd.py 2022-05-06 23:10:44.523333086 +0200
+@@ -141,7 +141,7 @@
+ assert isinstance(protocol, SCPDHTTPClientProtocol)
+
+ error = None
+- wait_task: typing.Awaitable[typing.Tuple[bytes, bytes, int, bytes]] = asyncio.wait_for(protocol.finished, 1.0, loop=loop)
++ wait_task: typing.Awaitable[typing.Tuple[bytes, bytes, int, bytes]] = asyncio.wait_for(protocol.finished, 1.0)
+ body = b''
+ raw_response = b''
+ try:
+@@ -182,7 +182,7 @@
+ assert isinstance(protocol, SCPDHTTPClientProtocol)
+
+ try:
+- wait_task: typing.Awaitable[typing.Tuple[bytes, bytes, int, bytes]] = asyncio.wait_for(finished, 1.0, loop=loop)
++ wait_task: typing.Awaitable[typing.Tuple[bytes, bytes, int, bytes]] = asyncio.wait_for(finished, 1.0)
+ raw_response, body, response_code, response_msg = await wait_task
+ except asyncio.TimeoutError:
+ return {}, b'', UPnPError("Timeout")
+diff --exclude __pycache__ --unified --recursive --text --color lbry-venv/lib/python3.10/site-packages/aioupnp/protocols/ssdp.py lbry-venv_new/lib/python3.10/site-packages/aioupnp/protocols/ssdp.py
+--- lbry-venv/lib/python3.10/site-packages/aioupnp/protocols/ssdp.py 2022-05-07 23:04:48.366666656 +0200
++++ lbry-venv_new/lib/python3.10/site-packages/aioupnp/protocols/ssdp.py 2022-05-06 23:26:52.969999640 +0200
+@@ -31,8 +31,8 @@
+ self.transport: Optional[DatagramTransport] = None
+ self._pending_searches: List[PendingSearch] = []
+ self.notifications: List[SSDPDatagram] = []
+- self.connected = asyncio.Event(loop=self.loop)
+- self.devices: 'asyncio.Queue[SSDPDatagram]' = asyncio.Queue(loop=self.loop)
++ self.connected = asyncio.Event()
++ self.devices: 'asyncio.Queue[SSDPDatagram]' = asyncio.Queue()
+
+ def connection_made(self, transport: asyncio.DatagramTransport) -> None: # type: ignore
+ super().connection_made(transport)
+@@ -98,7 +98,7 @@
+ async def m_search(self, address: str, timeout: float,
+ datagrams: List[Dict[str, typing.Union[str, int]]]) -> SSDPDatagram:
+ fut = self.send_m_searches(address, datagrams)
+- return await asyncio.wait_for(fut, timeout, loop=self.loop)
++ return await asyncio.wait_for(fut, timeout)
+
+ def datagram_received(self, data: bytes, addr: Tuple[str, int]) -> None: # type: ignore
+ if addr[0] == self.bind_address: