run lint with pyupgrade at py39-plus

This commit is contained in:
pacrob
2025-01-25 15:31:51 -07:00
committed by Paul Robinson
parent 20580b9a4e
commit 8787613e91
44 changed files with 221 additions and 240 deletions

View File

@ -2,9 +2,6 @@ from dataclasses import (
dataclass,
)
import hmac
from typing import (
Tuple,
)
from Crypto.Cipher import (
AES,
@ -66,7 +63,7 @@ class MacAndCipher:
def initialize_pair(
cipher_type: str, hash_type: str, secret: bytes
) -> Tuple[EncryptionParameters, EncryptionParameters]:
) -> tuple[EncryptionParameters, EncryptionParameters]:
"""
Return a pair of ``Keys`` for use in securing a communications channel
with authenticated encryption derived from the ``secret`` and using the

View File

@ -1,6 +1,5 @@
from typing import (
Callable,
Tuple,
cast,
)
@ -22,7 +21,7 @@ SharedKeyGenerator = Callable[[bytes], bytes]
int_bytelen = util.int_bytelen
def create_ephemeral_key_pair(curve_type: str) -> Tuple[PublicKey, SharedKeyGenerator]:
def create_ephemeral_key_pair(curve_type: str) -> tuple[PublicKey, SharedKeyGenerator]:
"""Facilitates ECDH key exchange."""
if curve_type != "P-256":
raise NotImplementedError()

View File

@ -1,12 +1,13 @@
from collections.abc import (
AsyncIterator,
Sequence,
)
from contextlib import (
asynccontextmanager,
)
import logging
from typing import (
TYPE_CHECKING,
AsyncIterator,
List,
Sequence,
)
import multiaddr
@ -132,20 +133,20 @@ class BasicHost(IHost):
"""
return self.multiselect
def get_addrs(self) -> List[multiaddr.Multiaddr]:
def get_addrs(self) -> list[multiaddr.Multiaddr]:
"""
:return: all the multiaddr addresses this host is listening to
"""
# TODO: We don't need "/p2p/{peer_id}" postfix actually.
p2p_part = multiaddr.Multiaddr(f"/p2p/{self.get_id()!s}")
addrs: List[multiaddr.Multiaddr] = []
addrs: list[multiaddr.Multiaddr] = []
for transport in self._network.listeners.values():
for addr in transport.get_addrs():
addrs.append(addr.encapsulate(p2p_part))
return addrs
def get_connected_peers(self) -> List[ID]:
def get_connected_peers(self) -> list[ID]:
"""
:return: all the ids of peers this host is currently connected to
"""

View File

@ -2,11 +2,12 @@ from abc import (
ABC,
abstractmethod,
)
from collections.abc import (
Sequence,
)
from typing import (
Any,
AsyncContextManager,
List,
Sequence,
)
import multiaddr
@ -66,13 +67,13 @@ class IHost(ABC):
"""
@abstractmethod
def get_addrs(self) -> List[multiaddr.Multiaddr]:
def get_addrs(self) -> list[multiaddr.Multiaddr]:
"""
:return: all the multiaddr addresses this host is listening to
"""
@abstractmethod
def get_connected_peers(self) -> List[ID]:
def get_connected_peers(self) -> list[ID]:
"""
:return: all the ids of peers this host is currently connected to
"""

View File

@ -1,9 +1,6 @@
import logging
import secrets
import time
from typing import (
List,
)
import trio
@ -102,7 +99,7 @@ class PingService:
def __init__(self, host: IHost):
self._host = host
async def ping(self, peer_id: PeerID, ping_amt: int = 1) -> List[int]:
async def ping(self, peer_id: PeerID, ping_amt: int = 1) -> list[int]:
stream = await self._host.new_stream(peer_id, [ID])
try:

View File

@ -1,9 +1,6 @@
from abc import (
abstractmethod,
)
from typing import (
Tuple,
)
import trio
@ -27,5 +24,5 @@ class INetConn(Closer):
...
@abstractmethod
def get_streams(self) -> Tuple[INetStream, ...]:
def get_streams(self) -> tuple[INetStream, ...]:
...

View File

@ -1,7 +1,5 @@
from typing import (
TYPE_CHECKING,
Set,
Tuple,
)
import trio
@ -32,7 +30,7 @@ Reference: https://github.com/libp2p/go-libp2p-swarm/blob/04c86bbdafd390651cb2ee
class SwarmConn(INetConn):
muxed_conn: IMuxedConn
swarm: "Swarm"
streams: Set[NetStream]
streams: set[NetStream]
event_closed: trio.Event
def __init__(self, muxed_conn: IMuxedConn, swarm: "Swarm") -> None:
@ -104,7 +102,7 @@ class SwarmConn(INetConn):
muxed_stream = await self.muxed_conn.open_stream()
return await self._add_stream(muxed_stream)
def get_streams(self) -> Tuple[NetStream, ...]:
def get_streams(self) -> tuple[NetStream, ...]:
return tuple(self.streams)
def remove_stream(self, stream: NetStream) -> None:

View File

@ -2,10 +2,11 @@ from abc import (
ABC,
abstractmethod,
)
from collections.abc import (
Sequence,
)
from typing import (
TYPE_CHECKING,
Dict,
Sequence,
)
from multiaddr import (
@ -41,8 +42,8 @@ if TYPE_CHECKING:
class INetwork(ABC):
peerstore: IPeerStore
connections: Dict[ID, INetConn]
listeners: Dict[str, IListener]
connections: dict[ID, INetConn]
listeners: dict[str, IListener]
@abstractmethod
def get_peer_id(self) -> ID:

View File

@ -1,7 +1,5 @@
import logging
from typing import (
Dict,
List,
Optional,
)
@ -88,13 +86,13 @@ class Swarm(Service, INetworkService):
transport: ITransport
# TODO: Connection and `peer_id` are 1-1 mapping in our implementation,
# whereas in Go one `peer_id` may point to multiple connections.
connections: Dict[ID, INetConn]
listeners: Dict[str, IListener]
connections: dict[ID, INetConn]
listeners: dict[str, IListener]
common_stream_handler: StreamHandlerFn
listener_nursery: Optional[trio.Nursery]
event_listener_nursery_created: trio.Event
notifees: List[INotifee]
notifees: list[INotifee]
def __init__(
self,
@ -161,7 +159,7 @@ class Swarm(Service, INetworkService):
if not addrs:
raise SwarmException(f"No known addresses to peer {peer_id}")
exceptions: List[SwarmException] = []
exceptions: list[SwarmException] = []
# Try all known addresses
for multiaddr in addrs:

View File

@ -2,8 +2,7 @@ from abc import (
ABC,
abstractmethod,
)
from typing import (
List,
from collections.abc import (
Sequence,
)
@ -41,7 +40,7 @@ class IAddrBook(ABC):
""" # noqa: E501
@abstractmethod
def addrs(self, peer_id: ID) -> List[Multiaddr]:
def addrs(self, peer_id: ID) -> list[Multiaddr]:
"""
:param peer_id: peer to get addresses of
:return: all known (and valid) addresses for the given peer
@ -56,7 +55,7 @@ class IAddrBook(ABC):
"""
@abstractmethod
def peers_with_addrs(self) -> List[ID]:
def peers_with_addrs(self) -> list[ID]:
"""
:return: all of the peer IDs stored with addresses
"""

View File

@ -1,8 +1,8 @@
from collections.abc import (
Sequence,
)
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import (
@ -22,9 +22,9 @@ from .peerdata_interface import (
class PeerData(IPeerData):
pubkey: PublicKey
privkey: PrivateKey
metadata: Dict[Any, Any]
protocols: List[str]
addrs: List[Multiaddr]
metadata: dict[Any, Any]
protocols: list[str]
addrs: list[Multiaddr]
def __init__(self) -> None:
self.pubkey = None
@ -33,7 +33,7 @@ class PeerData(IPeerData):
self.protocols = []
self.addrs = []
def get_protocols(self) -> List[str]:
def get_protocols(self) -> list[str]:
"""
:return: all protocols associated with given peer
"""
@ -59,7 +59,7 @@ class PeerData(IPeerData):
if addr not in self.addrs:
self.addrs.append(addr)
def get_addrs(self) -> List[Multiaddr]:
def get_addrs(self) -> list[Multiaddr]:
"""
:return: all multiaddresses
"""

View File

@ -2,10 +2,11 @@ from abc import (
ABC,
abstractmethod,
)
from collections.abc import (
Sequence,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import (
@ -24,7 +25,7 @@ from .peermetadata_interface import (
class IPeerData(ABC):
@abstractmethod
def get_protocols(self) -> List[str]:
def get_protocols(self) -> list[str]:
"""
:return: all protocols associated with given peer
"""
@ -48,7 +49,7 @@ class IPeerData(ABC):
"""
@abstractmethod
def get_addrs(self) -> List[Multiaddr]:
def get_addrs(self) -> list[Multiaddr]:
"""
:return: all multiaddresses
"""

View File

@ -1,7 +1,8 @@
from collections.abc import (
Sequence,
)
from typing import (
Any,
List,
Sequence,
)
import multiaddr
@ -13,7 +14,7 @@ from .id import (
class PeerInfo:
peer_id: ID
addrs: List[multiaddr.Multiaddr]
addrs: list[multiaddr.Multiaddr]
def __init__(self, peer_id: ID, addrs: Sequence[multiaddr.Multiaddr]) -> None:
self.peer_id = peer_id

View File

@ -1,11 +1,11 @@
from collections import (
defaultdict,
)
from collections.abc import (
Sequence,
)
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import (
@ -34,7 +34,7 @@ from .peerstore_interface import (
class PeerStore(IPeerStore):
peer_data_map: Dict[ID, PeerData]
peer_data_map: dict[ID, PeerData]
def __init__(self) -> None:
self.peer_data_map = defaultdict(PeerData)
@ -49,7 +49,7 @@ class PeerStore(IPeerStore):
return PeerInfo(peer_id, peer_data.get_addrs())
raise PeerStoreError("peer ID not found")
def get_protocols(self, peer_id: ID) -> List[str]:
def get_protocols(self, peer_id: ID) -> list[str]:
"""
:param peer_id: peer ID to get protocols for
:return: protocols (as list of strings)
@ -75,7 +75,7 @@ class PeerStore(IPeerStore):
peer_data = self.peer_data_map[peer_id]
peer_data.set_protocols(list(protocols))
def peer_ids(self) -> List[ID]:
def peer_ids(self) -> list[ID]:
"""
:return: all of the peer IDs stored in peer store
"""
@ -123,7 +123,7 @@ class PeerStore(IPeerStore):
peer_data = self.peer_data_map[peer_id]
peer_data.add_addrs(list(addrs))
def addrs(self, peer_id: ID) -> List[Multiaddr]:
def addrs(self, peer_id: ID) -> list[Multiaddr]:
"""
:param peer_id: peer ID to get addrs for
:return: list of addrs
@ -141,12 +141,12 @@ class PeerStore(IPeerStore):
if peer_id in self.peer_data_map:
self.peer_data_map[peer_id].clear_addrs()
def peers_with_addrs(self) -> List[ID]:
def peers_with_addrs(self) -> list[ID]:
"""
:return: all of the peer IDs which has addrs stored in peer store
"""
# Add all peers with addrs at least 1 to output
output: List[ID] = []
output: list[ID] = []
for peer_id in self.peer_data_map:
if len(self.peer_data_map[peer_id].get_addrs()) >= 1:

View File

@ -1,10 +1,11 @@
from abc import (
abstractmethod,
)
from collections.abc import (
Sequence,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import (
@ -40,7 +41,7 @@ class IPeerStore(IAddrBook, IPeerMetadata):
"""
@abstractmethod
def get_protocols(self, peer_id: ID) -> List[str]:
def get_protocols(self, peer_id: ID) -> list[str]:
"""
:param peer_id: peer ID to get protocols for
:return: protocols (as list of strings)
@ -62,7 +63,7 @@ class IPeerStore(IAddrBook, IPeerMetadata):
"""
@abstractmethod
def peer_ids(self) -> List[ID]:
def peer_ids(self) -> list[ID]:
"""
:return: all of the peer IDs stored in peer store
"""
@ -101,7 +102,7 @@ class IPeerStore(IAddrBook, IPeerMetadata):
"""
@abstractmethod
def addrs(self, peer_id: ID) -> List[Multiaddr]:
def addrs(self, peer_id: ID) -> list[Multiaddr]:
"""
:param peer_id: peer ID to get addrs for
:return: list of addrs
@ -114,7 +115,7 @@ class IPeerStore(IAddrBook, IPeerMetadata):
"""
@abstractmethod
def peers_with_addrs(self) -> List[ID]:
def peers_with_addrs(self) -> list[ID]:
"""
:return: all of the peer IDs which has addrs stored in peer store
"""

View File

@ -1,8 +1,3 @@
from typing import (
Dict,
Tuple,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
@ -30,10 +25,10 @@ class Multiselect(IMultiselectMuxer):
communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn]
handlers: dict[TProtocol, StreamHandlerFn]
def __init__(
self, default_handlers: Dict[TProtocol, StreamHandlerFn] = None
self, default_handlers: dict[TProtocol, StreamHandlerFn] = None
) -> None:
if not default_handlers:
default_handlers = {}
@ -50,7 +45,7 @@ class Multiselect(IMultiselectMuxer):
async def negotiate(
self, communicator: IMultiselectCommunicator
) -> Tuple[TProtocol, StreamHandlerFn]:
) -> tuple[TProtocol, StreamHandlerFn]:
"""
Negotiate performs protocol selection.

View File

@ -1,4 +1,4 @@
from typing import (
from collections.abc import (
Sequence,
)

View File

@ -2,7 +2,7 @@ from abc import (
ABC,
abstractmethod,
)
from typing import (
from collections.abc import (
Sequence,
)

View File

@ -2,10 +2,6 @@ from abc import (
ABC,
abstractmethod,
)
from typing import (
Dict,
Tuple,
)
from libp2p.typing import (
StreamHandlerFn,
@ -24,7 +20,7 @@ class IMultiselectMuxer(ABC):
communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn]
handlers: dict[TProtocol, StreamHandlerFn]
@abstractmethod
def add_handler(self, protocol: TProtocol, handler: StreamHandlerFn) -> None:
@ -35,13 +31,13 @@ class IMultiselectMuxer(ABC):
:param handler: handler function
"""
def get_protocols(self) -> Tuple[TProtocol, ...]:
def get_protocols(self) -> tuple[TProtocol, ...]:
return tuple(self.handlers.keys())
@abstractmethod
async def negotiate(
self, communicator: IMultiselectCommunicator
) -> Tuple[TProtocol, StreamHandlerFn]:
) -> tuple[TProtocol, StreamHandlerFn]:
"""
Negotiate performs protocol selection.

View File

@ -2,13 +2,13 @@ from abc import (
ABC,
abstractmethod,
)
from collections.abc import (
AsyncIterable,
KeysView,
)
from typing import (
TYPE_CHECKING,
AsyncContextManager,
AsyncIterable,
KeysView,
List,
Tuple,
)
from libp2p.peer.id import (
@ -46,7 +46,7 @@ class ISubscriptionAPI(
class IPubsubRouter(ABC):
@abstractmethod
def get_protocols(self) -> List[TProtocol]:
def get_protocols(self) -> list[TProtocol]:
"""
:return: the list of protocols supported by the router
"""
@ -123,7 +123,7 @@ class IPubsub(ServiceAPI):
@property
@abstractmethod
def protocols(self) -> Tuple[TProtocol, ...]:
def protocols(self) -> tuple[TProtocol, ...]:
...
@property

View File

@ -1,9 +1,8 @@
import logging
from typing import (
from collections.abc import (
Iterable,
List,
Sequence,
)
import logging
import trio
@ -36,7 +35,7 @@ logger = logging.getLogger("libp2p.pubsub.floodsub")
class FloodSub(IPubsubRouter):
protocols: List[TProtocol]
protocols: list[TProtocol]
pubsub: Pubsub
@ -44,7 +43,7 @@ class FloodSub(IPubsubRouter):
self.protocols = list(protocols)
self.pubsub = None
def get_protocols(self) -> List[TProtocol]:
def get_protocols(self) -> list[TProtocol]:
"""
:return: the list of protocols supported by the router
"""

View File

@ -4,17 +4,15 @@ from ast import (
from collections import (
defaultdict,
)
from collections.abc import (
Iterable,
Sequence,
)
import logging
import random
from typing import (
Any,
DefaultDict,
Dict,
Iterable,
List,
Sequence,
Set,
Tuple,
)
import trio
@ -60,7 +58,7 @@ logger = logging.getLogger("libp2p.pubsub.gossipsub")
class GossipSub(IPubsubRouter, Service):
protocols: List[TProtocol]
protocols: list[TProtocol]
pubsub: Pubsub
degree: int
@ -69,11 +67,11 @@ class GossipSub(IPubsubRouter, Service):
time_to_live: int
mesh: Dict[str, Set[ID]]
fanout: Dict[str, Set[ID]]
mesh: dict[str, set[ID]]
fanout: dict[str, set[ID]]
# The protocol peer supports
peer_protocol: Dict[ID, TProtocol]
peer_protocol: dict[ID, TProtocol]
# TODO: Add `time_since_last_publish`
# Create topic --> time since last publish map.
@ -128,7 +126,7 @@ class GossipSub(IPubsubRouter, Service):
# Interface functions
def get_protocols(self) -> List[TProtocol]:
def get_protocols(self) -> list[TProtocol]:
"""
:return: the list of protocols supported by the router
"""
@ -237,13 +235,13 @@ class GossipSub(IPubsubRouter, Service):
:param origin: peer id of the peer the message originate from.
:return: a generator of the peer ids who we send data to.
"""
send_to: Set[ID] = set()
send_to: set[ID] = set()
for topic in topic_ids:
if topic not in self.pubsub.peer_topics:
continue
# floodsub peers
floodsub_peers: Set[ID] = {
floodsub_peers: set[ID] = {
peer_id
for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == floodsub.PROTOCOL_ID
@ -251,7 +249,7 @@ class GossipSub(IPubsubRouter, Service):
send_to.update(floodsub_peers)
# gossipsub peers
gossipsub_peers: Set[ID] = set()
gossipsub_peers: set[ID] = set()
if topic in self.mesh:
gossipsub_peers = self.mesh[topic]
else:
@ -259,7 +257,7 @@ class GossipSub(IPubsubRouter, Service):
# pick `self.degree` number of peers who have subscribed to the topic
# and add them as our `fanout` peers.
topic_in_fanout: bool = topic in self.fanout
fanout_peers: Set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_peers: set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_size = len(fanout_peers)
if not topic_in_fanout or (
topic_in_fanout and fanout_size < self.degree
@ -292,7 +290,7 @@ class GossipSub(IPubsubRouter, Service):
self.mesh[topic] = set()
topic_in_fanout: bool = topic in self.fanout
fanout_peers: Set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_peers: set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_size = len(fanout_peers)
if not topic_in_fanout or (topic_in_fanout and fanout_size < self.degree):
# There are less than D peers (let this number be x)
@ -333,13 +331,13 @@ class GossipSub(IPubsubRouter, Service):
async def _emit_control_msgs(
self,
peers_to_graft: Dict[ID, List[str]],
peers_to_prune: Dict[ID, List[str]],
peers_to_gossip: Dict[ID, Dict[str, List[str]]],
peers_to_graft: dict[ID, list[str]],
peers_to_prune: dict[ID, list[str]],
peers_to_gossip: dict[ID, dict[str, list[str]]],
) -> None:
graft_msgs: List[rpc_pb2.ControlGraft] = []
prune_msgs: List[rpc_pb2.ControlPrune] = []
ihave_msgs: List[rpc_pb2.ControlIHave] = []
graft_msgs: list[rpc_pb2.ControlGraft] = []
prune_msgs: list[rpc_pb2.ControlPrune] = []
ihave_msgs: list[rpc_pb2.ControlIHave] = []
# Starting with GRAFT messages
for peer, topics in peers_to_graft.items():
for topic in topics:
@ -428,9 +426,9 @@ class GossipSub(IPubsubRouter, Service):
def mesh_heartbeat(
self,
) -> Tuple[DefaultDict[ID, List[str]], DefaultDict[ID, List[str]]]:
peers_to_graft: DefaultDict[ID, List[str]] = defaultdict(list)
peers_to_prune: DefaultDict[ID, List[str]] = defaultdict(list)
) -> tuple[DefaultDict[ID, list[str]], DefaultDict[ID, list[str]]]:
peers_to_graft: DefaultDict[ID, list[str]] = defaultdict(list)
peers_to_prune: DefaultDict[ID, list[str]] = defaultdict(list)
for topic in self.mesh:
# Skip if no peers have subscribed to the topic
if topic not in self.pubsub.peer_topics:
@ -493,8 +491,8 @@ class GossipSub(IPubsubRouter, Service):
# Add the peers to fanout[topic]
self.fanout[topic].update(selected_peers)
def gossip_heartbeat(self) -> DefaultDict[ID, Dict[str, List[str]]]:
peers_to_gossip: DefaultDict[ID, Dict[str, List[str]]] = defaultdict(dict)
def gossip_heartbeat(self) -> DefaultDict[ID, dict[str, list[str]]]:
peers_to_gossip: DefaultDict[ID, dict[str, list[str]]] = defaultdict(dict)
for topic in self.mesh:
msg_ids = self.mcache.window(topic)
if msg_ids:
@ -534,7 +532,7 @@ class GossipSub(IPubsubRouter, Service):
@staticmethod
def select_from_minus(
num_to_select: int, pool: Iterable[Any], minus: Iterable[Any]
) -> List[Any]:
) -> list[Any]:
"""
Select at most num_to_select subset of elements from the set
(pool - minus) randomly.
@ -546,7 +544,7 @@ class GossipSub(IPubsubRouter, Service):
# Create selection pool, which is selection_pool = pool - minus
if minus:
# Create a new selection pool by removing elements of minus
selection_pool: List[Any] = [x for x in pool if x not in minus]
selection_pool: list[Any] = [x for x in pool if x not in minus]
else:
# Don't create a new selection_pool if we are not subbing anything
selection_pool = list(pool)
@ -558,13 +556,13 @@ class GossipSub(IPubsubRouter, Service):
return selection_pool
# Random selection
selection: List[Any] = random.sample(selection_pool, num_to_select)
selection: list[Any] = random.sample(selection_pool, num_to_select)
return selection
def _get_in_topic_gossipsub_peers_from_minus(
self, topic: str, num_to_select: int, minus: Iterable[ID]
) -> List[ID]:
) -> list[ID]:
gossipsub_peers_in_topic = {
peer_id
for peer_id in self.pubsub.peer_topics[topic]
@ -587,7 +585,7 @@ class GossipSub(IPubsubRouter, Service):
# Add all unknown message ids (ids that appear in ihave_msg but not in
# seen_seqnos) to list of messages we want to request
# FIXME: Update type of message ID
msg_ids_wanted: List[Any] = [
msg_ids_wanted: list[Any] = [
msg_id
for msg_id in ihave_msg.messageIDs
if literal_eval(msg_id) not in seen_seqnos_and_peers
@ -606,8 +604,8 @@ class GossipSub(IPubsubRouter, Service):
"""
# FIXME: Update type of message ID
# FIXME: Find a better way to parse the msg ids
msg_ids: List[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs]
msgs_to_forward: List[rpc_pb2.Message] = []
msg_ids: list[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs]
msgs_to_forward: list[rpc_pb2.Message] = []
for msg_id_iwant in msg_ids:
# Check if the wanted message ID is present in mcache
msg: rpc_pb2.Message = self.mcache.get(msg_id_iwant)
@ -674,9 +672,9 @@ class GossipSub(IPubsubRouter, Service):
def pack_control_msgs(
self,
ihave_msgs: List[rpc_pb2.ControlIHave],
graft_msgs: List[rpc_pb2.ControlGraft],
prune_msgs: List[rpc_pb2.ControlPrune],
ihave_msgs: list[rpc_pb2.ControlIHave],
graft_msgs: list[rpc_pb2.ControlGraft],
prune_msgs: list[rpc_pb2.ControlPrune],
) -> rpc_pb2.ControlMessage:
control_msg: rpc_pb2.ControlMessage = rpc_pb2.ControlMessage()
if ihave_msgs:

View File

@ -1,9 +1,8 @@
from typing import (
Dict,
List,
Optional,
from collections.abc import (
Sequence,
Tuple,
)
from typing import (
Optional,
)
from .pb import (
@ -12,14 +11,14 @@ from .pb import (
class CacheEntry:
mid: Tuple[bytes, bytes]
topics: List[str]
mid: tuple[bytes, bytes]
topics: list[str]
"""
A logical representation of an entry in the mcache's _history_.
"""
def __init__(self, mid: Tuple[bytes, bytes], topics: Sequence[str]) -> None:
def __init__(self, mid: tuple[bytes, bytes], topics: Sequence[str]) -> None:
"""
Constructor.
@ -34,9 +33,9 @@ class MessageCache:
window_size: int
history_size: int
msgs: Dict[Tuple[bytes, bytes], rpc_pb2.Message]
msgs: dict[tuple[bytes, bytes], rpc_pb2.Message]
history: List[List[CacheEntry]]
history: list[list[CacheEntry]]
def __init__(self, window_size: int, history_size: int) -> None:
"""
@ -62,12 +61,12 @@ class MessageCache:
:param msg: The rpc message to put in. Should contain seqno and from_id
"""
mid: Tuple[bytes, bytes] = (msg.seqno, msg.from_id)
mid: tuple[bytes, bytes] = (msg.seqno, msg.from_id)
self.msgs[mid] = msg
self.history[0].append(CacheEntry(mid, msg.topicIDs))
def get(self, mid: Tuple[bytes, bytes]) -> Optional[rpc_pb2.Message]:
def get(self, mid: tuple[bytes, bytes]) -> Optional[rpc_pb2.Message]:
"""
Get a message from the mcache.
@ -79,14 +78,14 @@ class MessageCache:
return None
def window(self, topic: str) -> List[Tuple[bytes, bytes]]:
def window(self, topic: str) -> list[tuple[bytes, bytes]]:
"""
Get the window for this topic.
:param topic: Topic whose message ids we desire.
:return: List of mids in the current window.
"""
mids: List[Tuple[bytes, bytes]] = []
mids: list[tuple[bytes, bytes]] = []
for entries_list in self.history[: self.window_size]:
for entry in entries_list:
@ -100,7 +99,7 @@ class MessageCache:
"""
Shift the window over by 1 position, dropping the last element of the history.
"""
last_entries: List[CacheEntry] = self.history[len(self.history) - 1]
last_entries: list[CacheEntry] = self.history[len(self.history) - 1]
for entry in last_entries:
self.msgs.pop(entry.mid)

View File

@ -3,6 +3,9 @@ from __future__ import (
)
import base64
from collections.abc import (
KeysView,
)
import functools
import hashlib
import logging
@ -10,7 +13,6 @@ import time
from typing import (
TYPE_CHECKING,
Callable,
KeysView,
NamedTuple,
cast,
)

View File

@ -1,8 +1,10 @@
from collections.abc import (
AsyncIterator,
)
from types import (
TracebackType,
)
from typing import (
AsyncIterator,
Optional,
Type,
)

View File

@ -1,5 +1,7 @@
from typing import (
from collections.abc import (
Awaitable,
)
from typing import (
Callable,
Union,
)

View File

@ -2,7 +2,7 @@ from abc import (
ABC,
abstractmethod,
)
from typing import (
from collections.abc import (
Iterable,
)

View File

@ -4,7 +4,6 @@ from dataclasses import (
import itertools
from typing import (
Optional,
Tuple,
)
import multihash
@ -243,7 +242,7 @@ def _select_parameter_from_order(
def _select_encryption_parameters(
local_proposal: Proposal, remote_proposal: Proposal
) -> Tuple[str, str, str, int]:
) -> tuple[str, str, str, int]:
first_score = _mk_score(remote_proposal.public_key, local_proposal.nonce)
second_score = _mk_score(local_proposal.public_key, remote_proposal.nonce)
@ -276,7 +275,7 @@ async def _establish_session_parameters(
remote_peer: Optional[PeerID],
conn: SecioPacketReadWriter,
nonce: bytes,
) -> Tuple[SessionParameters, bytes]:
) -> tuple[SessionParameters, bytes]:
# establish shared encryption parameters
session_parameters = SessionParameters()
session_parameters.local_peer = local_peer

View File

@ -1,8 +1,6 @@
import logging
from typing import (
Dict,
Optional,
Tuple,
)
import trio
@ -64,9 +62,9 @@ class Mplex(IMuxedConn):
secured_conn: ISecureConn
peer_id: ID
next_channel_id: int
streams: Dict[StreamID, MplexStream]
streams: dict[StreamID, MplexStream]
streams_lock: trio.Lock
streams_msg_channels: Dict[StreamID, "trio.MemorySendChannel[bytes]"]
streams_msg_channels: dict[StreamID, "trio.MemorySendChannel[bytes]"]
new_stream_send_channel: "trio.MemorySendChannel[IMuxedStream]"
new_stream_receive_channel: "trio.MemoryReceiveChannel[IMuxedStream]"
@ -222,7 +220,7 @@ class Mplex(IMuxedConn):
# We should clean things up.
await self._cleanup()
async def read_message(self) -> Tuple[int, int, bytes]:
async def read_message(self) -> tuple[int, int, bytes]:
"""
Read a single message off of the secured connection.

View File

@ -4,11 +4,12 @@ from abc import (
ABC,
abstractmethod,
)
from collections.abc import (
Hashable,
)
from typing import (
Any,
Hashable,
Optional,
Set,
)
import trio_typing
@ -45,7 +46,7 @@ class TaskAPI(Hashable):
class TaskWithChildrenAPI(TaskAPI):
children: Set[TaskAPI]
children: set[TaskAPI]
@abstractmethod
def add_child(self, child: TaskAPI) -> None:

View File

@ -7,18 +7,17 @@ import asyncio
from collections import (
Counter,
)
from collections.abc import (
Awaitable,
Iterable,
Sequence,
)
import logging
import sys
from typing import (
Any,
Awaitable,
Callable,
Iterable,
List,
Optional,
Sequence,
Set,
Type,
TypeVar,
cast,
)
@ -79,7 +78,7 @@ class Service(ServiceAPI):
LogicFnType = Callable[..., Awaitable[Any]]
def as_service(service_fn: LogicFnType) -> Type[ServiceAPI]:
def as_service(service_fn: LogicFnType) -> type[ServiceAPI]:
"""
Create a service out of a simple function
"""
@ -143,7 +142,7 @@ T = TypeVar("T", bound="BaseFunctionTask")
class BaseFunctionTask(BaseTaskWithChildren):
@classmethod
def iterate_tasks(cls: Type[T], *tasks: TaskAPI) -> Iterable[T]:
def iterate_tasks(cls: type[T], *tasks: TaskAPI) -> Iterable[T]:
for task in tasks:
if isinstance(task, cls):
yield task
@ -206,7 +205,7 @@ class BaseManager(InternalManagerAPI):
_service: ServiceAPI
_errors: List[EXC_INFO]
_errors: list[EXC_INFO]
def __init__(self, service: ServiceAPI) -> None:
if hasattr(service, "_manager"):
@ -220,7 +219,7 @@ class BaseManager(InternalManagerAPI):
self._errors = []
# tasks
self._root_tasks: Set[TaskAPI] = set()
self._root_tasks: set[TaskAPI] = set()
# stats
self._total_task_count = 0

View File

@ -3,6 +3,12 @@ from __future__ import (
annotations,
)
from collections.abc import (
AsyncIterator,
Awaitable,
Coroutine,
Sequence,
)
from contextlib import (
asynccontextmanager,
)
@ -10,13 +16,8 @@ import functools
import sys
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Coroutine,
Optional,
Sequence,
Tuple,
TypeVar,
cast,
)
@ -337,7 +338,7 @@ class TrioManager(BaseManager):
TFunc = TypeVar("TFunc", bound=Callable[..., Coroutine[Any, Any, Any]])
_ChannelPayload = Tuple[Optional[Any], Optional[BaseException]]
_ChannelPayload = tuple[Optional[Any], Optional[BaseException]]
async def _wait_finished(

View File

@ -1,16 +1,16 @@
# Copied from https://github.com/ethereum/async-service
from collections.abc import (
Awaitable,
)
from types import (
TracebackType,
)
from typing import (
Any,
Awaitable,
Callable,
Tuple,
Type,
)
EXC_INFO = Tuple[Type[BaseException], BaseException, TracebackType]
EXC_INFO = tuple[type[BaseException], BaseException, TracebackType]
AsyncFn = Callable[..., Awaitable[Any]]

View File

@ -1,15 +1,14 @@
from collections.abc import (
AsyncIterator,
Sequence,
)
from contextlib import (
AsyncExitStack,
asynccontextmanager,
)
from typing import (
Any,
AsyncIterator,
Callable,
Dict,
List,
Sequence,
Tuple,
cast,
)
@ -222,7 +221,7 @@ def default_muxer_transport_factory() -> TMuxerOptions:
@asynccontextmanager
async def raw_conn_factory(
nursery: trio.Nursery,
) -> AsyncIterator[Tuple[IRawConnection, IRawConnection]]:
) -> AsyncIterator[tuple[IRawConnection, IRawConnection]]:
conn_0 = None
conn_1 = None
event = trio.Event()
@ -245,7 +244,7 @@ async def raw_conn_factory(
@asynccontextmanager
async def noise_conn_factory(
nursery: trio.Nursery,
) -> AsyncIterator[Tuple[ISecureConn, ISecureConn]]:
) -> AsyncIterator[tuple[ISecureConn, ISecureConn]]:
local_transport = cast(
NoiseTransport, noise_transport_factory(create_secp256k1_key_pair())
)
@ -312,7 +311,7 @@ class SwarmFactory(factory.Factory):
# `factory.Factory.__init__` does *not* prepare a *default value* if we pass
# an argument explicitly with `None`. If an argument is `None`, we don't pass it
# to `factory.Factory.__init__`, in order to let the function initialize it.
optional_kwargs: Dict[str, Any] = {}
optional_kwargs: dict[str, Any] = {}
if key_pair is not None:
optional_kwargs["key_pair"] = key_pair
if security_protocol is not None:
@ -331,7 +330,7 @@ class SwarmFactory(factory.Factory):
number: int,
security_protocol: TProtocol = None,
muxer_opt: TMuxerOptions = None,
) -> AsyncIterator[Tuple[Swarm, ...]]:
) -> AsyncIterator[tuple[Swarm, ...]]:
async with AsyncExitStack() as stack:
ctx_mgrs = [
await stack.enter_async_context(
@ -366,7 +365,7 @@ class HostFactory(factory.Factory):
number: int,
security_protocol: TProtocol = None,
muxer_opt: TMuxerOptions = None,
) -> AsyncIterator[Tuple[BasicHost, ...]]:
) -> AsyncIterator[tuple[BasicHost, ...]]:
async with SwarmFactory.create_batch_and_listen(
number, security_protocol=security_protocol, muxer_opt=muxer_opt
) as swarms:
@ -375,12 +374,12 @@ class HostFactory(factory.Factory):
class DummyRouter(IPeerRouting):
_routing_table: Dict[ID, PeerInfo]
_routing_table: dict[ID, PeerInfo]
def __init__(self) -> None:
self._routing_table = dict()
def _add_peer(self, peer_id: ID, addrs: List[Multiaddr]) -> None:
def _add_peer(self, peer_id: ID, addrs: list[Multiaddr]) -> None:
self._routing_table[peer_id] = PeerInfo(peer_id, addrs)
async def find_peer(self, peer_id: ID) -> PeerInfo:
@ -411,7 +410,7 @@ class RoutedHostFactory(factory.Factory):
number: int,
security_protocol: TProtocol = None,
muxer_opt: TMuxerOptions = None,
) -> AsyncIterator[Tuple[RoutedHost, ...]]:
) -> AsyncIterator[tuple[RoutedHost, ...]]:
routing_table = DummyRouter()
async with HostFactory.create_batch_and_listen(
number, security_protocol=security_protocol, muxer_opt=muxer_opt
@ -487,7 +486,7 @@ class PubsubFactory(factory.Factory):
security_protocol: TProtocol = None,
muxer_opt: TMuxerOptions = None,
msg_id_constructor: Callable[[rpc_pb2.Message], bytes] = None,
) -> AsyncIterator[Tuple[Pubsub, ...]]:
) -> AsyncIterator[tuple[Pubsub, ...]]:
async with HostFactory.create_batch_and_listen(
number, security_protocol=security_protocol, muxer_opt=muxer_opt
) as hosts:
@ -516,7 +515,7 @@ class PubsubFactory(factory.Factory):
msg_id_constructor: Callable[
[rpc_pb2.Message], bytes
] = get_peer_and_seqno_msg_id,
) -> AsyncIterator[Tuple[Pubsub, ...]]:
) -> AsyncIterator[tuple[Pubsub, ...]]:
if protocols is not None:
floodsubs = FloodsubFactory.create_batch(number, protocols=list(protocols))
else:
@ -554,7 +553,7 @@ class PubsubFactory(factory.Factory):
msg_id_constructor: Callable[
[rpc_pb2.Message], bytes
] = get_peer_and_seqno_msg_id,
) -> AsyncIterator[Tuple[Pubsub, ...]]:
) -> AsyncIterator[tuple[Pubsub, ...]]:
if protocols is not None:
gossipsubs = GossipsubFactory.create_batch(
number,
@ -595,7 +594,7 @@ class PubsubFactory(factory.Factory):
@asynccontextmanager
async def swarm_pair_factory(
security_protocol: TProtocol = None, muxer_opt: TMuxerOptions = None
) -> AsyncIterator[Tuple[Swarm, Swarm]]:
) -> AsyncIterator[tuple[Swarm, Swarm]]:
async with SwarmFactory.create_batch_and_listen(
2, security_protocol=security_protocol, muxer_opt=muxer_opt
) as swarms:
@ -606,7 +605,7 @@ async def swarm_pair_factory(
@asynccontextmanager
async def host_pair_factory(
security_protocol: TProtocol = None, muxer_opt: TMuxerOptions = None
) -> AsyncIterator[Tuple[BasicHost, BasicHost]]:
) -> AsyncIterator[tuple[BasicHost, BasicHost]]:
async with HostFactory.create_batch_and_listen(
2, security_protocol=security_protocol, muxer_opt=muxer_opt
) as hosts:
@ -617,7 +616,7 @@ async def host_pair_factory(
@asynccontextmanager
async def swarm_conn_pair_factory(
security_protocol: TProtocol = None, muxer_opt: TMuxerOptions = None
) -> AsyncIterator[Tuple[SwarmConn, SwarmConn]]:
) -> AsyncIterator[tuple[SwarmConn, SwarmConn]]:
async with swarm_pair_factory(
security_protocol=security_protocol, muxer_opt=muxer_opt
) as swarms:
@ -629,7 +628,7 @@ async def swarm_conn_pair_factory(
@asynccontextmanager
async def mplex_conn_pair_factory(
security_protocol: TProtocol = None,
) -> AsyncIterator[Tuple[Mplex, Mplex]]:
) -> AsyncIterator[tuple[Mplex, Mplex]]:
async with swarm_conn_pair_factory(
security_protocol=security_protocol, muxer_opt=default_muxer_transport_factory()
) as swarm_pair:
@ -642,7 +641,7 @@ async def mplex_conn_pair_factory(
@asynccontextmanager
async def mplex_stream_pair_factory(
security_protocol: TProtocol = None,
) -> AsyncIterator[Tuple[MplexStream, MplexStream]]:
) -> AsyncIterator[tuple[MplexStream, MplexStream]]:
async with mplex_conn_pair_factory(
security_protocol=security_protocol
) as mplex_conn_pair_info:
@ -660,7 +659,7 @@ async def mplex_stream_pair_factory(
@asynccontextmanager
async def net_stream_pair_factory(
security_protocol: TProtocol = None, muxer_opt: TMuxerOptions = None
) -> AsyncIterator[Tuple[INetStream, INetStream]]:
) -> AsyncIterator[tuple[INetStream, INetStream]]:
protocol_id = TProtocol("/example/id/1")
stream_1: INetStream

View File

@ -1,12 +1,10 @@
from collections.abc import (
AsyncIterator,
)
from contextlib import (
AsyncExitStack,
asynccontextmanager,
)
from typing import (
AsyncIterator,
Dict,
Tuple,
)
from libp2p.host.host_interface import (
IHost,
@ -45,7 +43,7 @@ class DummyAccountNode(Service):
def __init__(self, pubsub: Pubsub) -> None:
self.pubsub = pubsub
self.balances: Dict[str, int] = {}
self.balances: dict[str, int] = {}
@property
def host(self) -> IHost:
@ -58,7 +56,7 @@ class DummyAccountNode(Service):
@classmethod
@asynccontextmanager
async def create(cls, number: int) -> AsyncIterator[Tuple["DummyAccountNode", ...]]:
async def create(cls, number: int) -> AsyncIterator[tuple["DummyAccountNode", ...]]:
"""
Create a new DummyAccountNode and attach a libp2p node, a floodsub, and
a pubsub instance to this new node.

View File

@ -1,4 +1,4 @@
from typing import (
from collections.abc import (
Sequence,
)

View File

@ -1,5 +1,7 @@
from typing import (
from collections.abc import (
Awaitable,
)
from typing import (
Callable,
)

View File

@ -2,9 +2,6 @@ from abc import (
ABC,
abstractmethod,
)
from typing import (
Tuple,
)
from multiaddr import (
Multiaddr,
@ -23,7 +20,7 @@ class IListener(ABC):
"""
@abstractmethod
def get_addrs(self) -> Tuple[Multiaddr, ...]:
def get_addrs(self) -> tuple[Multiaddr, ...]:
"""
Retrieve list of addresses the listener is listening on.

View File

@ -1,10 +1,10 @@
from collections.abc import (
Awaitable,
Sequence,
)
import logging
from typing import (
Awaitable,
Callable,
List,
Sequence,
Tuple,
)
from multiaddr import (
@ -41,7 +41,7 @@ logger = logging.getLogger("libp2p.transport.tcp")
class TCPListener(IListener):
listeners: List[trio.SocketListener]
listeners: list[trio.SocketListener]
def __init__(self, handler_function: THandler) -> None:
self.listeners = []
@ -78,7 +78,7 @@ class TCPListener(IListener):
)
self.listeners.extend(listeners)
def get_addrs(self) -> Tuple[Multiaddr, ...]:
def get_addrs(self) -> tuple[Multiaddr, ...]:
"""
Retrieve list of addresses the listener is listening on.

View File

@ -1,8 +1,9 @@
from typing import (
from collections.abc import (
Awaitable,
Callable,
Mapping,
Type,
)
from typing import (
Callable,
)
from libp2p.io.abc import (
@ -20,5 +21,5 @@ from libp2p.typing import (
THandler = Callable[[ReadWriteCloser], Awaitable[None]]
TSecurityOptions = Mapping[TProtocol, ISecureTransport]
TMuxerClass = Type[IMuxedConn]
TMuxerClass = type[IMuxedConn]
TMuxerOptions = Mapping[TProtocol, TMuxerClass]

View File

@ -1,6 +1,8 @@
from collections.abc import (
Awaitable,
)
from typing import (
TYPE_CHECKING,
Awaitable,
Callable,
NewType,
)

View File

@ -0,0 +1 @@
Drop CI runs for python 3.8, run ``pyupgrade`` to bring code up to python 3.9.

View File

@ -1,9 +1,9 @@
from collections.abc import (
AsyncIterator,
)
from contextlib import (
asynccontextmanager,
)
from typing import (
AsyncIterator,
)
import multiaddr
from multiaddr import (

View File

@ -2,11 +2,10 @@ from abc import (
ABC,
abstractmethod,
)
import subprocess
from typing import (
from collections.abc import (
Iterable,
List,
)
import subprocess
import trio
@ -26,7 +25,7 @@ class AbstractInterativeProcess(ABC):
class BaseInteractiveProcess(AbstractInterativeProcess):
proc: trio.Process = None
cmd: str
args: List[str]
args: list[str]
bytes_read: bytearray
patterns: Iterable[bytes] = None
event_ready: trio.Event