run lint and fix errors, except mypy

This commit is contained in:
pacrob
2024-02-19 15:56:20 -07:00
parent 42605c0288
commit 94483714a3
171 changed files with 4809 additions and 2290 deletions

View File

@ -1,21 +1,58 @@
from libp2p.crypto.keys import KeyPair
from libp2p.crypto.rsa import create_new_key_pair
from libp2p.host.basic_host import BasicHost
from libp2p.host.host_interface import IHost
from libp2p.host.routed_host import RoutedHost
from libp2p.network.network_interface import INetworkService
from libp2p.network.swarm import Swarm
from libp2p.peer.id import ID
from libp2p.peer.peerstore import PeerStore
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.routing.interfaces import IPeerRouting
from libp2p.security.insecure.transport import PLAINTEXT_PROTOCOL_ID, InsecureTransport
from libp2p.crypto.keys import (
KeyPair,
)
from libp2p.crypto.rsa import (
create_new_key_pair,
)
from libp2p.host.basic_host import (
BasicHost,
)
from libp2p.host.host_interface import (
IHost,
)
from libp2p.host.routed_host import (
RoutedHost,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.swarm import (
Swarm,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore import (
PeerStore,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
from libp2p.security.insecure.transport import (
PLAINTEXT_PROTOCOL_ID,
InsecureTransport,
)
import libp2p.security.secio.transport as secio
from libp2p.stream_muxer.mplex.mplex import MPLEX_PROTOCOL_ID, Mplex
from libp2p.transport.tcp.tcp import TCP
from libp2p.transport.typing import TMuxerOptions, TSecurityOptions
from libp2p.transport.upgrader import TransportUpgrader
from libp2p.typing import TProtocol
from libp2p.stream_muxer.mplex.mplex import (
MPLEX_PROTOCOL_ID,
Mplex,
)
from libp2p.transport.tcp.tcp import (
TCP,
)
from libp2p.transport.typing import (
TMuxerOptions,
TSecurityOptions,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
TProtocol,
)
def generate_new_rsa_identity() -> KeyPair:
@ -42,7 +79,6 @@ def new_swarm(
:param peerstore_opt: optional peerstore
:return: return a default swarm instance
"""
if key_pair is None:
key_pair = generate_new_rsa_identity()

View File

@ -1,8 +1,14 @@
from dataclasses import dataclass
from dataclasses import (
dataclass,
)
import hmac
from typing import Tuple
from typing import (
Tuple,
)
from Crypto.Cipher import AES
from Crypto.Cipher import (
AES,
)
import Crypto.Util.Counter as Counter
@ -61,9 +67,11 @@ class MacAndCipher:
def initialize_pair(
cipher_type: str, hash_type: str, secret: bytes
) -> Tuple[EncryptionParameters, EncryptionParameters]:
"""Return a pair of ``Keys`` for use in securing a communications channel
"""
Return a pair of ``Keys`` for use in securing a communications channel
with authenticated encryption derived from the ``secret`` and using the
requested ``cipher_type`` and ``hash_type``."""
requested ``cipher_type`` and ``hash_type``.
"""
if cipher_type != "AES-128":
raise NotImplementedError()
if hash_type != "SHA256":
@ -72,7 +80,7 @@ def initialize_pair(
iv_size = 16
cipher_key_size = 16
hmac_key_size = 20
seed = "key expansion".encode()
seed = b"key expansion"
params_size = iv_size + cipher_key_size + hmac_key_size
result = bytearray(2 * params_size)

View File

@ -1,13 +1,25 @@
from fastecdsa import (
keys,
point,
)
from fastecdsa import curve as curve_types
from fastecdsa import keys, point
from fastecdsa.encoding.sec1 import SEC1Encoder
from fastecdsa.encoding.sec1 import (
SEC1Encoder,
)
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
def infer_local_type(curve: str) -> curve_types.Curve:
"""converts a ``str`` representation of some elliptic curve to a
representation understood by the backend of this module."""
"""
Convert a ``str`` representation of some elliptic curve to a
representation understood by the backend of this module.
"""
if curve == "P-256":
return curve_types.P256
else:
@ -61,8 +73,10 @@ class ECCPrivateKey(PrivateKey):
def create_new_key_pair(curve: str) -> KeyPair:
"""Return a new ECC keypair with the requested ``curve`` type, e.g.
"P-256"."""
"""
Return a new ECC keypair with the requested ``curve`` type, e.g.
"P-256".
"""
private_key = ECCPrivateKey.new(curve)
public_key = private_key.get_public_key()
return KeyPair(private_key, public_key)

View File

@ -1,11 +1,23 @@
from Crypto.Hash import SHA256
from nacl.exceptions import BadSignatureError
from Crypto.Hash import (
SHA256,
)
from nacl.exceptions import (
BadSignatureError,
)
from nacl.public import PrivateKey as PrivateKeyImpl
from nacl.public import PublicKey as PublicKeyImpl
from nacl.signing import SigningKey, VerifyKey
from nacl.signing import (
SigningKey,
VerifyKey,
)
import nacl.utils as utils
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class Ed25519PublicKey(PublicKey):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class CryptographyError(BaseLibp2pError):
@ -6,7 +8,7 @@ class CryptographyError(BaseLibp2pError):
class MissingDeserializerError(CryptographyError):
"""Raise if the requested deserialization routine is missing for some type
of cryptographic key."""
pass
"""
Raise if the requested deserialization routine is missing for some type
of cryptographic key.
"""

View File

@ -1,9 +1,21 @@
from typing import Callable, Tuple, cast
from typing import (
Callable,
Tuple,
cast,
)
from fastecdsa.encoding import util
from fastecdsa.encoding import (
util,
)
from libp2p.crypto.ecc import ECCPrivateKey, ECCPublicKey, create_new_key_pair
from libp2p.crypto.keys import PublicKey
from libp2p.crypto.ecc import (
ECCPrivateKey,
ECCPublicKey,
create_new_key_pair,
)
from libp2p.crypto.keys import (
PublicKey,
)
SharedKeyGenerator = Callable[[bytes], bytes]

View File

@ -1,6 +1,14 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum, unique
from abc import (
ABC,
abstractmethod,
)
from dataclasses import (
dataclass,
)
from enum import (
Enum,
unique,
)
from .pb import crypto_pb2 as protobuf
@ -38,8 +46,10 @@ class PublicKey(Key):
@abstractmethod
def verify(self, data: bytes, signature: bytes) -> bool:
"""Verify that ``signature`` is the cryptographic signature of the hash
of ``data``."""
"""
Verify that ``signature`` is the cryptographic signature of the hash
of ``data``.
"""
...
def _serialize_to_protobuf(self) -> protobuf.PublicKey:

View File

@ -17,4 +17,4 @@ message PublicKey {
message PrivateKey {
required KeyType key_type = 1;
required bytes data = 2;
}
}

View File

@ -1,56 +1,53 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/crypto/pb/crypto.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/crypto/pb/crypto.proto',
package='crypto.pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb\"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c\"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03')
name="libp2p/crypto/pb/crypto.proto",
package="crypto.pb",
syntax="proto2",
serialized_options=None,
serialized_pb=_b(
'\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03'
),
)
_KEYTYPE = _descriptor.EnumDescriptor(
name='KeyType',
full_name='crypto.pb.KeyType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='RSA', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Ed25519', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Secp256k1', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ECDSA', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=175,
serialized_end=232,
name="KeyType",
full_name="crypto.pb.KeyType",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="RSA", index=0, number=0, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="Ed25519", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="Secp256k1", index=2, number=2, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ECDSA", index=3, number=3, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=175,
serialized_end=232,
)
_sym_db.RegisterEnumDescriptor(_KEYTYPE)
@ -61,101 +58,146 @@ Secp256k1 = 2
ECDSA = 3
_PUBLICKEY = _descriptor.Descriptor(
name='PublicKey',
full_name='crypto.pb.PublicKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key_type', full_name='crypto.pb.PublicKey.key_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='crypto.pb.PublicKey.data', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=107,
name="PublicKey",
full_name="crypto.pb.PublicKey",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key_type",
full_name="crypto.pb.PublicKey.key_type",
index=0,
number=1,
type=14,
cpp_type=8,
label=2,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data",
full_name="crypto.pb.PublicKey.data",
index=1,
number=2,
type=12,
cpp_type=9,
label=2,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=44,
serialized_end=107,
)
_PRIVATEKEY = _descriptor.Descriptor(
name='PrivateKey',
full_name='crypto.pb.PrivateKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key_type', full_name='crypto.pb.PrivateKey.key_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='crypto.pb.PrivateKey.data', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=109,
serialized_end=173,
name="PrivateKey",
full_name="crypto.pb.PrivateKey",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key_type",
full_name="crypto.pb.PrivateKey.key_type",
index=0,
number=1,
type=14,
cpp_type=8,
label=2,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data",
full_name="crypto.pb.PrivateKey.data",
index=1,
number=2,
type=12,
cpp_type=9,
label=2,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=109,
serialized_end=173,
)
_PUBLICKEY.fields_by_name['key_type'].enum_type = _KEYTYPE
_PRIVATEKEY.fields_by_name['key_type'].enum_type = _KEYTYPE
DESCRIPTOR.message_types_by_name['PublicKey'] = _PUBLICKEY
DESCRIPTOR.message_types_by_name['PrivateKey'] = _PRIVATEKEY
DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE
_PUBLICKEY.fields_by_name["key_type"].enum_type = _KEYTYPE
_PRIVATEKEY.fields_by_name["key_type"].enum_type = _KEYTYPE
DESCRIPTOR.message_types_by_name["PublicKey"] = _PUBLICKEY
DESCRIPTOR.message_types_by_name["PrivateKey"] = _PRIVATEKEY
DESCRIPTOR.enum_types_by_name["KeyType"] = _KEYTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PublicKey = _reflection.GeneratedProtocolMessageType('PublicKey', (_message.Message,), {
'DESCRIPTOR' : _PUBLICKEY,
'__module__' : 'libp2p.crypto.pb.crypto_pb2'
# @@protoc_insertion_point(class_scope:crypto.pb.PublicKey)
})
PublicKey = _reflection.GeneratedProtocolMessageType(
"PublicKey",
(_message.Message,),
{
"DESCRIPTOR": _PUBLICKEY,
"__module__": "libp2p.crypto.pb.crypto_pb2"
# @@protoc_insertion_point(class_scope:crypto.pb.PublicKey)
},
)
_sym_db.RegisterMessage(PublicKey)
PrivateKey = _reflection.GeneratedProtocolMessageType('PrivateKey', (_message.Message,), {
'DESCRIPTOR' : _PRIVATEKEY,
'__module__' : 'libp2p.crypto.pb.crypto_pb2'
# @@protoc_insertion_point(class_scope:crypto.pb.PrivateKey)
})
PrivateKey = _reflection.GeneratedProtocolMessageType(
"PrivateKey",
(_message.Message,),
{
"DESCRIPTOR": _PRIVATEKEY,
"__module__": "libp2p.crypto.pb.crypto_pb2"
# @@protoc_insertion_point(class_scope:crypto.pb.PrivateKey)
},
)
_sym_db.RegisterMessage(PrivateKey)

View File

@ -19,7 +19,6 @@ from typing_extensions import (
Literal as typing_extensions___Literal,
)
class KeyType(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod
@ -36,6 +35,7 @@ class KeyType(int):
Ed25519 = typing___cast(KeyType, 1)
Secp256k1 = typing___cast(KeyType, 2)
ECDSA = typing___cast(KeyType, 3)
RSA = typing___cast(KeyType, 0)
Ed25519 = typing___cast(KeyType, 1)
Secp256k1 = typing___cast(KeyType, 2)
@ -43,42 +43,72 @@ ECDSA = typing___cast(KeyType, 3)
class PublicKey(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
key_type = ... # type: KeyType
data = ... # type: bytes
key_type = ... # type: KeyType
data = ... # type: bytes
def __init__(self,
def __init__(
self,
*,
key_type : KeyType,
data : bytes,
) -> None: ...
key_type: KeyType,
data: bytes,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> PublicKey: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["data", "key_type"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["data", "key_type"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> None: ...
class PrivateKey(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
key_type = ... # type: KeyType
data = ... # type: bytes
key_type = ... # type: KeyType
data = ... # type: bytes
def __init__(self,
def __init__(
self,
*,
key_type : KeyType,
data : bytes,
) -> None: ...
key_type: KeyType,
data: bytes,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> PrivateKey: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["data", "key_type"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["data", "key_type"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> None: ...

View File

@ -1,9 +1,20 @@
from Crypto.Hash import SHA256
from Crypto.Hash import (
SHA256,
)
import Crypto.PublicKey.RSA as RSA
from Crypto.PublicKey.RSA import RsaKey
from Crypto.Signature import pkcs1_15
from Crypto.PublicKey.RSA import (
RsaKey,
)
from Crypto.Signature import (
pkcs1_15,
)
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class RSAPublicKey(PublicKey):

View File

@ -1,6 +1,11 @@
import coincurve
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class Secp256k1PublicKey(PublicKey):

View File

@ -1,8 +1,22 @@
from libp2p.crypto.ed25519 import Ed25519PrivateKey, Ed25519PublicKey
from libp2p.crypto.exceptions import MissingDeserializerError
from libp2p.crypto.keys import KeyType, PrivateKey, PublicKey
from libp2p.crypto.rsa import RSAPublicKey
from libp2p.crypto.secp256k1 import Secp256k1PrivateKey, Secp256k1PublicKey
from libp2p.crypto.ed25519 import (
Ed25519PrivateKey,
Ed25519PublicKey,
)
from libp2p.crypto.exceptions import (
MissingDeserializerError,
)
from libp2p.crypto.keys import (
KeyType,
PrivateKey,
PublicKey,
)
from libp2p.crypto.rsa import (
RSAPublicKey,
)
from libp2p.crypto.secp256k1 import (
Secp256k1PrivateKey,
Secp256k1PublicKey,
)
key_type_to_public_key_deserializer = {
KeyType.Secp256k1.value: Secp256k1PublicKey.from_bytes,

View File

@ -1,28 +1,70 @@
import logging
from typing import TYPE_CHECKING, AsyncIterator, List, Sequence
from typing import (
TYPE_CHECKING,
AsyncIterator,
List,
Sequence,
)
from async_generator import asynccontextmanager
from async_service import background_trio_service
from async_generator import (
asynccontextmanager,
)
from async_service import (
background_trio_service,
)
import multiaddr
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.host.defaults import get_default_protocols
from libp2p.host.exceptions import StreamFailure
from libp2p.network.network_interface import INetworkService
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.protocol_muxer.exceptions import MultiselectClientError, MultiselectError
from libp2p.protocol_muxer.multiselect import Multiselect
from libp2p.protocol_muxer.multiselect_client import MultiselectClient
from libp2p.protocol_muxer.multiselect_communicator import MultiselectCommunicator
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.host.defaults import (
get_default_protocols,
)
from libp2p.host.exceptions import (
StreamFailure,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.protocol_muxer.exceptions import (
MultiselectClientError,
MultiselectError,
)
from libp2p.protocol_muxer.multiselect import (
Multiselect,
)
from libp2p.protocol_muxer.multiselect_client import (
MultiselectClient,
)
from libp2p.protocol_muxer.multiselect_communicator import (
MultiselectCommunicator,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .host_interface import IHost
from .host_interface import (
IHost,
)
if TYPE_CHECKING:
from collections import OrderedDict
from collections import (
OrderedDict,
)
# Upon host creation, host takes in options,
# including the list of addresses on which to listen.
@ -108,7 +150,7 @@ class BasicHost(IHost):
self, listen_addrs: Sequence[multiaddr.Multiaddr]
) -> AsyncIterator[None]:
"""
run the host instance and listen to ``listen_addrs``.
Run the host instance and listen to ``listen_addrs``.
:param listen_addrs: a sequence of multiaddrs that we want to listen to
"""
@ -121,7 +163,7 @@ class BasicHost(IHost):
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
) -> None:
"""
set stream handler for given `protocol_id`
Set stream handler for given `protocol_id`
:param protocol_id: protocol id used on stream
:param stream_handler: a stream handler function
@ -136,7 +178,6 @@ class BasicHost(IHost):
:param protocol_ids: available protocol ids to use for stream
:return: stream: new stream created
"""
net_stream = await self._network.new_stream(peer_id)
# Perform protocol muxing to determine protocol to use
@ -154,7 +195,7 @@ class BasicHost(IHost):
async def connect(self, peer_info: PeerInfo) -> None:
"""
connect ensures there is a connection between this host and the peer
Ensure there is a connection between this host and the peer
with given `peer_info.peer_id`. connect will absorb the addresses in
peer_info into its internal peerstore. If there is not an active
connection, connect will issue a dial, and block until a connection is

View File

@ -1,14 +1,27 @@
from collections import OrderedDict
from typing import TYPE_CHECKING
from collections import (
OrderedDict,
)
from typing import (
TYPE_CHECKING,
)
from libp2p.host.host_interface import IHost
from libp2p.host.host_interface import (
IHost,
)
from libp2p.host.ping import (
handle_ping,
)
from libp2p.host.ping import ID as PingID
from libp2p.host.ping import handle_ping
from libp2p.identity.identify.protocol import (
identify_handler_for,
)
from libp2p.identity.identify.protocol import ID as IdentifyID
from libp2p.identity.identify.protocol import identify_handler_for
if TYPE_CHECKING:
from libp2p.typing import TProtocol, StreamHandlerFn
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
def get_default_protocols(host: IHost) -> "OrderedDict[TProtocol, StreamHandlerFn]":

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class HostException(BaseLibp2pError):

View File

@ -1,14 +1,36 @@
from abc import ABC, abstractmethod
from typing import Any, AsyncContextManager, List, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
Any,
AsyncContextManager,
List,
Sequence,
)
import multiaddr
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.network.network_interface import INetworkService
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
class IHost(ABC):
@ -54,7 +76,7 @@ class IHost(ABC):
self, listen_addrs: Sequence[multiaddr.Multiaddr]
) -> AsyncContextManager[None]:
"""
run the host instance and listen to ``listen_addrs``.
Run the host instance and listen to ``listen_addrs``.
:param listen_addrs: a sequence of multiaddrs that we want to listen to
"""
@ -64,7 +86,7 @@ class IHost(ABC):
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
) -> None:
"""
set stream handler for host.
Set stream handler for host.
:param protocol_id: protocol id used on stream
:param stream_handler: a stream handler function
@ -85,7 +107,7 @@ class IHost(ABC):
@abstractmethod
async def connect(self, peer_info: PeerInfo) -> None:
"""
connect ensures there is a connection between this host and the peer
Ensure there is a connection between this host and the peer
with given peer_info.peer_id. connect will absorb the addresses in
peer_info into its internal peerstore. If there is not an active
connection, connect will issue a dial, and block until a connection is

View File

@ -2,10 +2,18 @@ import logging
import trio
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.stream.exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import ID as PeerID
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
ID = TProtocol("/ipfs/ping/1.0.0")
PING_LENGTH = 32
@ -15,8 +23,9 @@ logger = logging.getLogger("libp2p.host.ping")
async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
"""Return a boolean indicating if we expect more pings from the peer at
``peer_id``."""
"""
Return a boolean indicating if we expect more pings from the peer at ``peer_id``.
"""
try:
with trio.fail_after(RESP_TIMEOUT):
payload = await stream.read(PING_LENGTH)
@ -46,8 +55,10 @@ async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
async def handle_ping(stream: INetStream) -> None:
"""``handle_ping`` responds to incoming ping requests until one side errors
or closes the ``stream``."""
"""
Respond to incoming ping requests until one side errors
or closes the ``stream``.
"""
peer_id = stream.muxed_conn.peer_id
while True:

View File

@ -1,8 +1,18 @@
from libp2p.host.basic_host import BasicHost
from libp2p.host.exceptions import ConnectionFailure
from libp2p.network.network_interface import INetworkService
from libp2p.peer.peerinfo import PeerInfo
from libp2p.routing.interfaces import IPeerRouting
from libp2p.host.basic_host import (
BasicHost,
)
from libp2p.host.exceptions import (
ConnectionFailure,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
# RoutedHost is a p2p Host that includes a routing system.
@ -16,7 +26,7 @@ class RoutedHost(BasicHost):
async def connect(self, peer_info: PeerInfo) -> None:
"""
connect ensures there is a connection between this host and the peer
Ensure there is a connection between this host and the peer
with given `peer_info.peer_id`. See (basic_host).connect for more
information.
@ -26,7 +36,8 @@ class RoutedHost(BasicHost):
:param peer_info: peer_info of the peer we want to connect to
:type peer_info: peer.peerinfo.PeerInfo
"""
# check if we were given some addresses, otherwise, find some with the routing system.
# check if we were given some addresses, otherwise, find some with the
# routing system.
if not peer_info.addrs:
found_peer_info = await self._router.find_peer(peer_info.peer_id)
if not found_peer_info:

View File

@ -9,4 +9,4 @@ message Identify {
repeated bytes listen_addrs = 2;
optional bytes observed_addr = 4;
repeated string protocols = 3;
}
}

View File

@ -1,104 +1,170 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/identity/identify/pb/identify.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/identity/identify/pb/identify.proto',
package='identify.pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb\"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t')
name="libp2p/identity/identify/pb/identify.proto",
package="identify.pb",
syntax="proto2",
serialized_options=None,
serialized_pb=_b(
'\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t'
),
)
_IDENTIFY = _descriptor.Descriptor(
name='Identify',
full_name='identify.pb.Identify',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='protocol_version', full_name='identify.pb.Identify.protocol_version', index=0,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='agent_version', full_name='identify.pb.Identify.agent_version', index=1,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='public_key', full_name='identify.pb.Identify.public_key', index=2,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='listen_addrs', full_name='identify.pb.Identify.listen_addrs', index=3,
number=2, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='observed_addr', full_name='identify.pb.Identify.observed_addr', index=4,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='protocols', full_name='identify.pb.Identify.protocols', index=5,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=60,
serialized_end=203,
name="Identify",
full_name="identify.pb.Identify",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="protocol_version",
full_name="identify.pb.Identify.protocol_version",
index=0,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="agent_version",
full_name="identify.pb.Identify.agent_version",
index=1,
number=6,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="public_key",
full_name="identify.pb.Identify.public_key",
index=2,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="listen_addrs",
full_name="identify.pb.Identify.listen_addrs",
index=3,
number=2,
type=12,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="observed_addr",
full_name="identify.pb.Identify.observed_addr",
index=4,
number=4,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="protocols",
full_name="identify.pb.Identify.protocols",
index=5,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=60,
serialized_end=203,
)
DESCRIPTOR.message_types_by_name['Identify'] = _IDENTIFY
DESCRIPTOR.message_types_by_name["Identify"] = _IDENTIFY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Identify = _reflection.GeneratedProtocolMessageType('Identify', (_message.Message,), {
'DESCRIPTOR' : _IDENTIFY,
'__module__' : 'libp2p.identity.identify.pb.identify_pb2'
# @@protoc_insertion_point(class_scope:identify.pb.Identify)
})
Identify = _reflection.GeneratedProtocolMessageType(
"Identify",
(_message.Message,),
{
"DESCRIPTOR": _IDENTIFY,
"__module__": "libp2p.identity.identify.pb.identify_pb2"
# @@protoc_insertion_point(class_scope:identify.pb.Identify)
},
)
_sym_db.RegisterMessage(Identify)

View File

@ -22,32 +22,79 @@ from typing_extensions import (
Literal as typing_extensions___Literal,
)
class Identify(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
protocol_version = ... # type: typing___Text
agent_version = ... # type: typing___Text
public_key = ... # type: bytes
listen_addrs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
observed_addr = ... # type: bytes
protocols = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
protocol_version = ... # type: typing___Text
agent_version = ... # type: typing___Text
public_key = ... # type: bytes
listen_addrs = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
observed_addr = ... # type: bytes
protocols = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self,
def __init__(
self,
*,
protocol_version : typing___Optional[typing___Text] = None,
agent_version : typing___Optional[typing___Text] = None,
public_key : typing___Optional[bytes] = None,
listen_addrs : typing___Optional[typing___Iterable[bytes]] = None,
observed_addr : typing___Optional[bytes] = None,
protocols : typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
protocol_version: typing___Optional[typing___Text] = None,
agent_version: typing___Optional[typing___Text] = None,
public_key: typing___Optional[bytes] = None,
listen_addrs: typing___Optional[typing___Iterable[bytes]] = None,
observed_addr: typing___Optional[bytes] = None,
protocols: typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Identify: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",u"observed_addr",u"protocol_version",u"public_key"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",u"listen_addrs",u"observed_addr",u"protocol_version",u"protocols",u"public_key"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"agent_version", "observed_addr", "protocol_version", "public_key"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"agent_version",
"listen_addrs",
"observed_addr",
"protocol_version",
"protocols",
"public_key",
],
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"public_key",b"public_key"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"listen_addrs",b"listen_addrs",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"protocols",b"protocols",u"public_key",b"public_key"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"agent_version",
b"agent_version",
"observed_addr",
b"observed_addr",
"protocol_version",
b"protocol_version",
"public_key",
b"public_key",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"agent_version",
b"agent_version",
"listen_addrs",
b"listen_addrs",
"observed_addr",
b"observed_addr",
"protocol_version",
b"protocol_version",
"protocols",
b"protocols",
"public_key",
b"public_key",
],
) -> None: ...

View File

@ -1,13 +1,26 @@
import logging
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.host.host_interface import IHost
from libp2p.network.stream.exceptions import StreamClosed
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.host.host_interface import (
IHost,
)
from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .pb.identify_pb2 import Identify
from .pb.identify_pb2 import (
Identify,
)
ID = TProtocol("/ipfs/id/1.0.0")
PROTOCOL_VERSION = "ipfs/0.1.0"

View File

@ -1,4 +1,7 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
class Closer(ABC):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class IOException(BaseLibp2pError):

View File

@ -5,13 +5,26 @@ from that repo: "a simple package to r/w length-delimited slices."
NOTE: currently missing the capability to indicate lengths by "varint" method.
"""
from abc import abstractmethod
from abc import (
abstractmethod,
)
from libp2p.io.abc import MsgReadWriteCloser, Reader, ReadWriteCloser
from libp2p.io.utils import read_exactly
from libp2p.utils import decode_uvarint_from_stream, encode_varint_prefixed
from libp2p.io.abc import (
MsgReadWriteCloser,
Reader,
ReadWriteCloser,
)
from libp2p.io.utils import (
read_exactly,
)
from libp2p.utils import (
decode_uvarint_from_stream,
encode_varint_prefixed,
)
from .exceptions import MessageTooLarge
from .exceptions import (
MessageTooLarge,
)
BYTE_ORDER = "big"

View File

@ -2,8 +2,12 @@ import logging
import trio
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.exceptions import IOException
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
logger = logging.getLogger("libp2p.io.trio")

View File

@ -1,5 +1,9 @@
from libp2p.io.abc import Reader
from libp2p.io.exceptions import IncompleteReadError
from libp2p.io.abc import (
Reader,
)
from libp2p.io.exceptions import (
IncompleteReadError,
)
DEFAULT_RETRY_READ_COUNT = 100

View File

@ -1,4 +1,6 @@
from libp2p.io.exceptions import IOException
from libp2p.io.exceptions import (
IOException,
)
class RawConnError(IOException):

View File

@ -1,11 +1,21 @@
from abc import abstractmethod
from typing import Tuple
from abc import (
abstractmethod,
)
from typing import (
Tuple,
)
import trio
from libp2p.io.abc import Closer
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.stream_muxer.abc import IMuxedConn
from libp2p.io.abc import (
Closer,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
class INetConn(Closer):

View File

@ -1,8 +1,16 @@
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.exceptions import IOException
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
from .exceptions import RawConnError
from .raw_connection_interface import IRawConnection
from .exceptions import (
RawConnError,
)
from .raw_connection_interface import (
IRawConnection,
)
class RawConnection(IRawConnection):

View File

@ -1,4 +1,6 @@
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.abc import (
ReadWriteCloser,
)
class IRawConnection(ReadWriteCloser):

View File

@ -1,11 +1,24 @@
from typing import TYPE_CHECKING, Set, Tuple
from typing import (
TYPE_CHECKING,
Set,
Tuple,
)
import trio
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.network.stream.net_stream import NetStream
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
from libp2p.stream_muxer.exceptions import MuxedConnUnavailable
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.network.stream.net_stream import (
NetStream,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import (
MuxedConnUnavailable,
)
if TYPE_CHECKING:
from libp2p.network.swarm import Swarm # noqa: F401
@ -48,8 +61,8 @@ class SwarmConn(INetConn):
# We *could* optimize this but it really isn't worth it.
for stream in self.streams.copy():
await stream.reset()
# Force context switch for stream handlers to process the stream reset event we just emit
# before we cancel the stream handler tasks.
# Force context switch for stream handlers to process the stream reset event we
# just emit before we cancel the stream handler tasks.
await trio.sleep(0.1)
await self._notify_disconnected()
@ -63,13 +76,15 @@ class SwarmConn(INetConn):
except MuxedConnUnavailable:
await self.close()
break
# Asynchronously handle the accepted stream, to avoid blocking the next stream.
# Asynchronously handle the accepted stream, to avoid blocking
# the next stream.
nursery.start_soon(self._handle_muxed_stream, stream)
async def _handle_muxed_stream(self, muxed_stream: IMuxedStream) -> None:
net_stream = await self._add_stream(muxed_stream)
try:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
await self.swarm.common_stream_handler(net_stream) # type: ignore
finally:
# As long as `common_stream_handler`, remove the stream.

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class SwarmException(BaseLibp2pError):

View File

@ -1,23 +1,45 @@
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Dict, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
Dict,
Sequence,
)
from async_service import ServiceAPI
from multiaddr import Multiaddr
from async_service import (
ServiceAPI,
)
from multiaddr import (
Multiaddr,
)
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.peer.id import ID
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.transport.listener_interface import IListener
from libp2p.typing import StreamHandlerFn
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.transport.listener_interface import (
IListener,
)
from libp2p.typing import (
StreamHandlerFn,
)
from .stream.net_stream_interface import INetStream
from .stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING:
from .notifee_interface import INotifee # noqa: F401
class INetwork(ABC):
peerstore: IPeerStore
connections: Dict[ID, INetConn]
listeners: Dict[str, IListener]

View File

@ -1,10 +1,21 @@
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING
from abc import (
ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING:
from .network_interface import INetwork # noqa: F401

View File

@ -1,4 +1,6 @@
from libp2p.io.exceptions import IOException
from libp2p.io.exceptions import (
IOException,
)
class StreamError(IOException):

View File

@ -1,22 +1,33 @@
from typing import Optional
from typing import (
Optional,
)
from libp2p.stream_muxer.abc import IMuxedStream
from libp2p.stream_muxer.abc import (
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import (
MuxedStreamClosed,
MuxedStreamEOF,
MuxedStreamReset,
)
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
from .exceptions import StreamClosed, StreamEOF, StreamReset
from .net_stream_interface import INetStream
from .exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from .net_stream_interface import (
INetStream,
)
# TODO: Handle exceptions from `muxed_stream`
# TODO: Add stream state
# - Reference: https://github.com/libp2p/go-libp2p-swarm/blob/99831444e78c8f23c9335c17d8f7c700ba25ca14/swarm_stream.go # noqa: E501
class NetStream(INetStream):
muxed_stream: IMuxedStream
protocol_id: Optional[TProtocol]
@ -39,7 +50,7 @@ class NetStream(INetStream):
async def read(self, n: int = None) -> bytes:
"""
reads from stream.
Read from stream.
:param n: number of bytes to read
:return: bytes of input
@ -53,7 +64,7 @@ class NetStream(INetStream):
async def write(self, data: bytes) -> None:
"""
write to stream.
Write to stream.
:return: number of bytes written
"""
@ -63,7 +74,7 @@ class NetStream(INetStream):
raise StreamClosed() from error
async def close(self) -> None:
"""close stream."""
"""Close stream."""
await self.muxed_stream.close()
async def reset(self) -> None:

View File

@ -1,12 +1,19 @@
from abc import abstractmethod
from abc import (
abstractmethod,
)
from libp2p.io.abc import ReadWriteCloser
from libp2p.stream_muxer.abc import IMuxedConn
from libp2p.typing import TProtocol
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
from libp2p.typing import (
TProtocol,
)
class INetStream(ReadWriteCloser):
muxed_conn: IMuxedConn
@abstractmethod

View File

@ -1,33 +1,75 @@
import logging
from typing import Dict, List, Optional
from typing import (
Dict,
List,
Optional,
)
from async_service import Service
from multiaddr import Multiaddr
from async_service import (
Service,
)
from multiaddr import (
Multiaddr,
)
import trio
from libp2p.io.abc import ReadWriteCloser
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.peer.id import ID
from libp2p.peer.peerstore import PeerStoreError
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.stream_muxer.abc import IMuxedConn
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore import (
PeerStoreError,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
from libp2p.transport.exceptions import (
MuxerUpgradeFailure,
OpenConnectionError,
SecurityUpgradeFailure,
)
from libp2p.transport.listener_interface import IListener
from libp2p.transport.transport_interface import ITransport
from libp2p.transport.upgrader import TransportUpgrader
from libp2p.typing import StreamHandlerFn
from libp2p.transport.listener_interface import (
IListener,
)
from libp2p.transport.transport_interface import (
ITransport,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
StreamHandlerFn,
)
from ..exceptions import MultiError
from .connection.raw_connection import RawConnection
from .connection.swarm_connection import SwarmConn
from .exceptions import SwarmException
from .network_interface import INetworkService
from .notifee_interface import INotifee
from .stream.net_stream_interface import INetStream
from ..exceptions import (
MultiError,
)
from .connection.raw_connection import (
RawConnection,
)
from .connection.swarm_connection import (
SwarmConn,
)
from .exceptions import (
SwarmException,
)
from .network_interface import (
INetworkService,
)
from .notifee_interface import (
INotifee,
)
from .stream.net_stream_interface import (
INetStream,
)
logger = logging.getLogger("libp2p.network.swarm")
@ -40,7 +82,6 @@ def create_default_stream_handler(network: INetworkService) -> StreamHandlerFn:
class Swarm(Service, INetworkService):
self_id: ID
peerstore: IPeerStore
upgrader: TransportUpgrader
@ -72,7 +113,8 @@ class Swarm(Service, INetworkService):
# Create Notifee array
self.notifees = []
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
self.common_stream_handler = create_default_stream_handler(self) # type: ignore
self.listener_nursery = None
@ -95,18 +137,18 @@ class Swarm(Service, INetworkService):
return self.self_id
def set_stream_handler(self, stream_handler: StreamHandlerFn) -> None:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
self.common_stream_handler = stream_handler # type: ignore
async def dial_peer(self, peer_id: ID) -> INetConn:
"""
dial_peer try to create a connection to peer_id.
Try to create a connection to peer_id.
:param peer_id: peer if we want to dial
:raises SwarmException: raised when an error occurs
:return: muxed connection
"""
if peer_id in self.connections:
# If muxed connection already exists for peer_id,
# set muxed connection equal to existing muxed connection
@ -140,20 +182,19 @@ class Swarm(Service, INetworkService):
# Tried all addresses, raising exception.
raise SwarmException(
f"unable to connect to {peer_id}, no addresses established a successful connection "
"(with exceptions)"
f"unable to connect to {peer_id}, no addresses established a successful "
"connection (with exceptions)"
) from MultiError(exceptions)
async def dial_addr(self, addr: Multiaddr, peer_id: ID) -> INetConn:
"""
dial_addr try to create a connection to peer_id with addr.
Try to create a connection to peer_id with addr.
:param addr: the address we want to connect with
:param peer_id: the peer we want to connect to
:raises SwarmException: raised when an error occurs
:return: network connection
"""
# Dial peer (connection to peer does not yet exist)
# Transport dials peer (gets back a raw conn)
try:
@ -231,11 +272,13 @@ class Swarm(Service, INetworkService):
if str(maddr) in self.listeners:
return True
async def conn_handler(read_write_closer: ReadWriteCloser) -> None:
async def conn_handler(
read_write_closer: ReadWriteCloser, maddr=maddr
) -> None:
raw_conn = RawConnection(read_write_closer, False)
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure
# the conn and then mux the conn
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first
# secure the conn and then mux the conn
try:
# FIXME: This dummy `ID(b"")` for the remote peer is useless.
secured_conn = await self.upgrader.upgrade_security(
@ -264,8 +307,8 @@ class Swarm(Service, INetworkService):
await self.add_conn(muxed_conn)
logger.debug("successfully opened connection to peer %s", peer_id)
# NOTE: This is a intentional barrier to prevent from the handler exiting and
# closing the connection.
# NOTE: This is a intentional barrier to prevent from the handler
# exiting and closing the connection.
await self.manager.wait_finished()
try:
@ -282,7 +325,7 @@ class Swarm(Service, INetworkService):
await self.notify_listen(maddr)
return True
except IOError:
except OSError:
# Failed. Continue looping.
logger.debug("fail to listen on: %s", maddr)
@ -304,9 +347,11 @@ class Swarm(Service, INetworkService):
logger.debug("successfully close the connection to peer %s", peer_id)
async def add_conn(self, muxed_conn: IMuxedConn) -> SwarmConn:
"""Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
"""
Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
and start to monitor the connection for its new streams and
disconnection."""
disconnection.
"""
swarm_conn = SwarmConn(muxed_conn, self)
self.manager.run_task(muxed_conn.start)
await muxed_conn.event_started.wait()
@ -319,8 +364,10 @@ class Swarm(Service, INetworkService):
return swarm_conn
def remove_conn(self, swarm_conn: SwarmConn) -> None:
"""Simply remove the connection from Swarm's records, without closing
the connection."""
"""
Simply remove the connection from Swarm's records, without closing
the connection.
"""
peer_id = swarm_conn.muxed_conn.peer_id
if peer_id not in self.connections:
return

View File

@ -1,5 +1,5 @@
# PeerStore
The PeerStore contains a mapping of peer IDs to PeerData objects. Each PeerData object represents a peer, and each PeerData contains a collection of protocols, addresses, and a mapping of metadata. PeerStore implements the IPeerStore (peer protocols), IAddrBook (address book), and IPeerMetadata (peer metadata) interfaces, which allows the peer store to effectively function as a dictionary for peer ID to protocol, address, and metadata.
The PeerStore contains a mapping of peer IDs to PeerData objects. Each PeerData object represents a peer, and each PeerData contains a collection of protocols, addresses, and a mapping of metadata. PeerStore implements the IPeerStore (peer protocols), IAddrBook (address book), and IPeerMetadata (peer metadata) interfaces, which allows the peer store to effectively function as a dictionary for peer ID to protocol, address, and metadata.
Note: PeerInfo represents a read-only summary of a PeerData object. Only the attributes assigned in PeerInfo are readable by references to PeerInfo objects.
Note: PeerInfo represents a read-only summary of a PeerData object. Only the attributes assigned in PeerInfo are readable by references to PeerInfo objects.

View File

@ -1,9 +1,19 @@
from abc import ABC, abstractmethod
from typing import List, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from .id import ID
from .id import (
ID,
)
class IAddrBook(ABC):
@ -15,7 +25,7 @@ class IAddrBook(ABC):
:param peer_id: the peer to add address for
:param addr: multiaddress of the peer
:param ttl: time-to-live for the address (after this time, address is no longer valid)
"""
""" # noqa: E501
@abstractmethod
def add_addrs(self, peer_id: ID, addrs: Sequence[Multiaddr], ttl: int) -> None:
@ -28,7 +38,7 @@ class IAddrBook(ABC):
:param peer_id: the peer to add address for
:param addr: multiaddresses of the peer
:param ttl: time-to-live for the address (after this time, address is no longer valid
"""
""" # noqa: E501
@abstractmethod
def addrs(self, peer_id: ID) -> List[Multiaddr]:

View File

@ -1,10 +1,14 @@
import hashlib
from typing import Union
from typing import (
Union,
)
import base58
import multihash
from libp2p.crypto.keys import PublicKey
from libp2p.crypto.keys import (
PublicKey,
)
# NOTE: On inlining...
# See: https://github.com/libp2p/specs/issues/138

View File

@ -1,14 +1,25 @@
from typing import Any, Dict, List, Sequence
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from .peerdata_interface import IPeerData
from .peerdata_interface import (
IPeerData,
)
class PeerData(IPeerData):
pubkey: PublicKey
privkey: PrivateKey
metadata: Dict[Any, Any]

View File

@ -1,11 +1,25 @@
from abc import ABC, abstractmethod
from typing import Any, List, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from .peermetadata_interface import IPeerMetadata
from .peermetadata_interface import (
IPeerMetadata,
)
class IPeerData(ABC):

View File

@ -1,8 +1,14 @@
from typing import Any, List, Sequence
from typing import (
Any,
List,
Sequence,
)
import multiaddr
from .id import ID
from .id import (
ID,
)
class PeerInfo:

View File

@ -1,7 +1,14 @@
from abc import ABC, abstractmethod
from typing import Any
from abc import (
ABC,
abstractmethod,
)
from typing import (
Any,
)
from .id import ID
from .id import (
ID,
)
class IPeerMetadata(ABC):

View File

@ -1,18 +1,39 @@
from collections import defaultdict
from typing import Any, Dict, List, Sequence
from collections import (
defaultdict,
)
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import KeyPair, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
PublicKey,
)
from .id import ID
from .peerdata import PeerData, PeerDataError
from .peerinfo import PeerInfo
from .peerstore_interface import IPeerStore
from .id import (
ID,
)
from .peerdata import (
PeerData,
PeerDataError,
)
from .peerinfo import (
PeerInfo,
)
from .peerstore_interface import (
IPeerStore,
)
class PeerStore(IPeerStore):
peer_data_map: Dict[ID, PeerData]
def __init__(self) -> None:

View File

@ -1,14 +1,34 @@
from abc import abstractmethod
from typing import Any, List, Sequence
from abc import (
abstractmethod,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import KeyPair, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
PublicKey,
)
from .addrbook_interface import IAddrBook
from .id import ID
from .peerinfo import PeerInfo
from .peermetadata_interface import IPeerMetadata
from .addrbook_interface import (
IAddrBook,
)
from .id import (
ID,
)
from .peerinfo import (
PeerInfo,
)
from .peermetadata_interface import (
IPeerMetadata,
)
class IPeerStore(IAddrBook, IPeerMetadata):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class MultiselectCommunicatorError(BaseLibp2pError):

View File

@ -1,19 +1,34 @@
from typing import Dict, Tuple
from typing import (
Dict,
Tuple,
)
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .exceptions import MultiselectCommunicatorError, MultiselectError
from .multiselect_communicator_interface import IMultiselectCommunicator
from .multiselect_muxer_interface import IMultiselectMuxer
from .exceptions import (
MultiselectCommunicatorError,
MultiselectError,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
from .multiselect_muxer_interface import (
IMultiselectMuxer,
)
MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0"
PROTOCOL_NOT_FOUND_MSG = "na"
class Multiselect(IMultiselectMuxer):
"""Multiselect module that is responsible for responding to a multiselect
"""
Multiselect module that is responsible for responding to a multiselect
client and deciding on a specific protocol and handler pair to use for
communication."""
communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn]

View File

@ -1,18 +1,31 @@
from typing import Sequence
from typing import (
Sequence,
)
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
from .exceptions import MultiselectClientError, MultiselectCommunicatorError
from .multiselect_client_interface import IMultiselectClient
from .multiselect_communicator_interface import IMultiselectCommunicator
from .exceptions import (
MultiselectClientError,
MultiselectCommunicatorError,
)
from .multiselect_client_interface import (
IMultiselectClient,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0"
PROTOCOL_NOT_FOUND_MSG = "na"
class MultiselectClient(IMultiselectClient):
"""Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over."""
"""
Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over.
"""
async def handshake(self, communicator: IMultiselectCommunicator) -> None:
"""

View File

@ -1,16 +1,26 @@
from abc import ABC, abstractmethod
from typing import Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
Sequence,
)
from libp2p.protocol_muxer.multiselect_communicator_interface import (
IMultiselectCommunicator,
)
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
class IMultiselectClient(ABC):
"""Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over."""
"""
Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over.
"""
@abstractmethod
async def handshake(self, communicator: IMultiselectCommunicator) -> None:
"""
Ensure that the client and multiselect are both using the same
@ -34,6 +44,7 @@ class IMultiselectClient(ABC):
:return: selected protocol
"""
@abstractmethod
async def try_select(
self, communicator: IMultiselectCommunicator, protocol: TProtocol
) -> TProtocol:

View File

@ -1,10 +1,23 @@
from libp2p.exceptions import ParseError
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.exceptions import IOException
from libp2p.utils import encode_delim, read_delim
from libp2p.exceptions import (
ParseError,
)
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
from libp2p.utils import (
encode_delim,
read_delim,
)
from .exceptions import MultiselectCommunicatorError
from .multiselect_communicator_interface import IMultiselectCommunicator
from .exceptions import (
MultiselectCommunicatorError,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
class MultiselectCommunicator(IMultiselectCommunicator):
@ -16,7 +29,7 @@ class MultiselectCommunicator(IMultiselectCommunicator):
async def write(self, msg_str: str) -> None:
"""
:raise MultiselectCommunicatorError: raised when failed to write to underlying reader
"""
""" # noqa: E501
msg_bytes = encode_delim(msg_str.encode())
try:
await self.read_writer.write(msg_bytes)
@ -28,7 +41,7 @@ class MultiselectCommunicator(IMultiselectCommunicator):
async def read(self) -> str:
"""
:raise MultiselectCommunicatorError: raised when failed to read from underlying reader
"""
""" # noqa: E501
try:
data = await read_delim(self.read_writer)
# `IOException` includes `IncompleteReadError` and `StreamError`

View File

@ -1,10 +1,15 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
class IMultiselectCommunicator(ABC):
"""Communicator helper class that ensures both the client and multistream
"""
Communicator helper class that ensures both the client and multistream
module will follow the same multistream protocol, which is necessary for
them to work."""
them to work.
"""
@abstractmethod
async def write(self, msg_str: str) -> None:

View File

@ -1,15 +1,28 @@
from abc import ABC, abstractmethod
from typing import Dict, Tuple
from abc import (
ABC,
abstractmethod,
)
from typing import (
Dict,
Tuple,
)
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .multiselect_communicator_interface import IMultiselectCommunicator
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
class IMultiselectMuxer(ABC):
"""Multiselect module that is responsible for responding to a multiselect
"""
Multiselect module that is responsible for responding to a multiselect
client and deciding on a specific protocol and handler pair to use for
communication."""
communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn]

View File

@ -1,4 +1,7 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
AsyncContextManager,
@ -8,13 +11,23 @@ from typing import (
Tuple,
)
from async_service import ServiceAPI
from async_service import (
ServiceAPI,
)
from libp2p.peer.id import ID
from libp2p.typing import TProtocol
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from .pb import rpc_pb2
from .typing import ValidatorFn
from .pb import (
rpc_pb2,
)
from .typing import (
ValidatorFn,
)
if TYPE_CHECKING:
from .pubsub import Pubsub # noqa: F401
@ -69,9 +82,9 @@ class IPubsubRouter(ABC):
"""
Invoked to process control messages in the RPC envelope.
It is invoked after subscriptions and payload messages have been processed
TODO: Check if this interface is ok. It's not the exact same as the go code, but the go
code is really confusing with the msg origin, they specify `rpc.from` even when the rpc
shouldn't have a from
TODO: Check if this interface is ok. It's not the exact same as the go code, but
the go code is really confusing with the msg origin, they specify `rpc.from`
even when the rpc shouldn't have a from
:param rpc: rpc message
"""

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class PubsubRouterError(BaseLibp2pError):

View File

@ -1,16 +1,34 @@
import logging
from typing import Iterable, List, Sequence
from typing import (
Iterable,
List,
Sequence,
)
import trio
from libp2p.network.stream.exceptions import StreamClosed
from libp2p.peer.id import ID
from libp2p.typing import TProtocol
from libp2p.utils import encode_varint_prefixed
from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
)
from .abc import IPubsubRouter
from .pb import rpc_pb2
from .pubsub import Pubsub
from .abc import (
IPubsubRouter,
)
from .pb import (
rpc_pb2,
)
from .pubsub import (
Pubsub,
)
PROTOCOL_ID = TProtocol("/floodsub/1.0.0")
@ -18,7 +36,6 @@ logger = logging.getLogger("libp2p.pubsub.floodsub")
class FloodSub(IPubsubRouter):
protocols: List[TProtocol]
pubsub: Pubsub
@ -80,7 +97,6 @@ class FloodSub(IPubsubRouter):
:param msg_forwarder: peer ID of the peer who forwards the message to us
:param pubsub_msg: pubsub message in protobuf.
"""
peers_gen = set(
self._get_peers_to_send(
pubsub_msg.topicIDs,

View File

@ -1,23 +1,58 @@
from ast import literal_eval
from collections import defaultdict
from ast import (
literal_eval,
)
from collections import (
defaultdict,
)
import logging
import random
from typing import Any, DefaultDict, Dict, Iterable, List, Sequence, Set, Tuple
from typing import (
Any,
DefaultDict,
Dict,
Iterable,
List,
Sequence,
Set,
Tuple,
)
from async_service import Service
from async_service import (
Service,
)
import trio
from libp2p.network.stream.exceptions import StreamClosed
from libp2p.peer.id import ID
from libp2p.pubsub import floodsub
from libp2p.typing import TProtocol
from libp2p.utils import encode_varint_prefixed
from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.peer.id import (
ID,
)
from libp2p.pubsub import (
floodsub,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
)
from .abc import IPubsubRouter
from .exceptions import NoPubsubAttached
from .mcache import MessageCache
from .pb import rpc_pb2
from .pubsub import Pubsub
from .abc import (
IPubsubRouter,
)
from .exceptions import (
NoPubsubAttached,
)
from .mcache import (
MessageCache,
)
from .pb import (
rpc_pb2,
)
from .pubsub import (
Pubsub,
)
PROTOCOL_ID = TProtocol("/meshsub/1.0.0")
@ -120,10 +155,10 @@ class GossipSub(IPubsubRouter, Service):
logger.debug("adding peer %s with protocol %s", peer_id, protocol_id)
if protocol_id not in (PROTOCOL_ID, floodsub.PROTOCOL_ID):
# We should never enter here. Becuase the `protocol_id` is registered by your pubsub
# instance in multistream-select, but it is not the protocol that gossipsub supports.
# In this case, probably we registered gossipsub to a wrong `protocol_id`
# in multistream-select, or wrong versions.
# We should never enter here. Becuase the `protocol_id` is registered by
# your pubsub instance in multistream-select, but it is not the protocol
# that gossipsub supports. In this case, probably we registered gossipsub
# to a wrong `protocol_id` in multistream-select, or wrong versions.
raise ValueError(f"Protocol={protocol_id} is not supported.")
self.peer_protocol[peer_id] = protocol_id
@ -208,11 +243,11 @@ class GossipSub(IPubsubRouter, Service):
continue
# floodsub peers
floodsub_peers: Set[ID] = set(
floodsub_peers: Set[ID] = {
peer_id
for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == floodsub.PROTOCOL_ID
)
}
send_to.update(floodsub_peers)
# gossipsub peers
@ -220,9 +255,9 @@ class GossipSub(IPubsubRouter, Service):
if topic in self.mesh:
gossipsub_peers = self.mesh[topic]
else:
# When we publish to a topic that we have not subscribe to, we randomly pick
# `self.degree` number of peers who have subscribed to the topic and add them
# as our `fanout` peers.
# When we publish to a topic that we have not subscribe to, we randomly
# pick `self.degree` number of peers who have subscribed to the topic
# and add them as our `fanout` peers.
topic_in_fanout: bool = topic in self.fanout
fanout_peers: Set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_size = len(fanout_peers)
@ -270,7 +305,7 @@ class GossipSub(IPubsubRouter, Service):
# Combine fanout peers with selected peers
fanout_peers.update(selected_peers)
# Add fanout peers to mesh and notifies them with a GRAFT(topic) control message.
# Add fanout peers to mesh and notifies them with a GRAFT(topic) control message
for peer in fanout_peers:
self.mesh[topic].add(peer)
await self.emit_graft(topic, peer)
@ -369,10 +404,10 @@ class GossipSub(IPubsubRouter, Service):
"""
Call individual heartbeats.
Note: the heartbeats are called with awaits because each heartbeat depends on the
state changes in the preceding heartbeat
Note: the heartbeats are called with awaits because each heartbeat depends on
the state changes in the preceding heartbeat
"""
# Start after a delay. Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L410 # Noqa: E501
# Start after a delay. Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L410 # noqa: E501
await trio.sleep(self.heartbeat_initial_delay)
while True:
# Maintain mesh and keep track of which peers to send GRAFT or PRUNE to
@ -381,7 +416,8 @@ class GossipSub(IPubsubRouter, Service):
self.fanout_heartbeat()
# Get the peers to send IHAVE to
peers_to_gossip = self.gossip_heartbeat()
# Pack GRAFT, PRUNE and IHAVE for the same peer into one control message and send it
# Pack GRAFT, PRUNE and IHAVE for the same peer into one control message and
# send it
await self._emit_control_msgs(
peers_to_graft, peers_to_prune, peers_to_gossip
)
@ -391,7 +427,7 @@ class GossipSub(IPubsubRouter, Service):
await trio.sleep(self.heartbeat_interval)
def mesh_heartbeat(
self
self,
) -> Tuple[DefaultDict[ID, List[str]], DefaultDict[ID, List[str]]]:
peers_to_graft: DefaultDict[ID, List[str]] = defaultdict(list)
peers_to_prune: DefaultDict[ID, List[str]] = defaultdict(list)
@ -402,7 +438,7 @@ class GossipSub(IPubsubRouter, Service):
num_mesh_peers_in_topic = len(self.mesh[topic])
if num_mesh_peers_in_topic < self.degree_low:
# Select D - |mesh[topic]| peers from peers.gossipsub[topic] - mesh[topic]
# Select D - |mesh[topic]| peers from peers.gossipsub[topic] - mesh[topic] # noqa: E501
selected_peers = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree - num_mesh_peers_in_topic, self.mesh[topic]
)
@ -436,7 +472,7 @@ class GossipSub(IPubsubRouter, Service):
# Remove topic from fanout
del self.fanout[topic]
else:
# Check if fanout peers are still in the topic and remove the ones that are not
# Check if fanout peers are still in the topic and remove the ones that are not # noqa: E501
# ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L498-L504 # noqa: E501
in_topic_fanout_peers = [
peer
@ -448,7 +484,7 @@ class GossipSub(IPubsubRouter, Service):
# If |fanout[topic]| < D
if num_fanout_peers_in_topic < self.degree:
# Select D - |fanout[topic]| peers from peers.gossipsub[topic] - fanout[topic]
# Select D - |fanout[topic]| peers from peers.gossipsub[topic] - fanout[topic] # noqa: E501
selected_peers = self._get_in_topic_gossipsub_peers_from_minus(
topic,
self.degree - num_fanout_peers_in_topic,
@ -462,11 +498,14 @@ class GossipSub(IPubsubRouter, Service):
for topic in self.mesh:
msg_ids = self.mcache.window(topic)
if msg_ids:
# Get all pubsub peers in a topic and only add them if they are gossipsub peers too
# Get all pubsub peers in a topic and only add them if they are
# gossipsub peers too
if topic in self.pubsub.peer_topics:
# Select D peers from peers.gossipsub[topic]
peers_to_emit_ihave_to = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.mesh[topic]
peers_to_emit_ihave_to = (
self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.mesh[topic]
)
)
msg_id_strs = [str(msg_id) for msg_id in msg_ids]
@ -478,11 +517,14 @@ class GossipSub(IPubsubRouter, Service):
for topic in self.fanout:
msg_ids = self.mcache.window(topic)
if msg_ids:
# Get all pubsub peers in topic and only add if they are gossipsub peers also
# Get all pubsub peers in topic and only add if they are
# gossipsub peers also
if topic in self.pubsub.peer_topics:
# Select D peers from peers.gossipsub[topic]
peers_to_emit_ihave_to = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.fanout[topic]
peers_to_emit_ihave_to = (
self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.fanout[topic]
)
)
msg_id_strs = [str(msg) for msg in msg_ids]
for peer in peers_to_emit_ihave_to:
@ -494,7 +536,8 @@ class GossipSub(IPubsubRouter, Service):
num_to_select: int, pool: Iterable[Any], minus: Iterable[Any]
) -> List[Any]:
"""
Select at most num_to_select subset of elements from the set (pool - minus) randomly.
Select at most num_to_select subset of elements from the set
(pool - minus) randomly.
:param num_to_select: number of elements to randomly select
:param pool: list of items to select from (excluding elements in minus)
:param minus: elements to be excluded from selection pool
@ -508,8 +551,9 @@ class GossipSub(IPubsubRouter, Service):
# Don't create a new selection_pool if we are not subbing anything
selection_pool = list(pool)
# If num_to_select > size(selection_pool), then return selection_pool (which has the most
# possible elements s.t. the number of elements is less than num_to_select)
# If num_to_select > size(selection_pool), then return selection_pool (which has
# the most possible elements s.t. the number of elements is less than
# num_to_select)
if num_to_select >= len(selection_pool):
return selection_pool
@ -521,11 +565,11 @@ class GossipSub(IPubsubRouter, Service):
def _get_in_topic_gossipsub_peers_from_minus(
self, topic: str, num_to_select: int, minus: Iterable[ID]
) -> List[ID]:
gossipsub_peers_in_topic = set(
gossipsub_peers_in_topic = {
peer_id
for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == PROTOCOL_ID
)
}
return self.select_from_minus(num_to_select, gossipsub_peers_in_topic, minus)
# RPC handlers
@ -533,15 +577,15 @@ class GossipSub(IPubsubRouter, Service):
async def handle_ihave(
self, ihave_msg: rpc_pb2.ControlIHave, sender_peer_id: ID
) -> None:
"""Checks the seen set and requests unknown messages with an IWANT
message."""
# Get list of all seen (seqnos, from) from the (seqno, from) tuples in seen_messages cache
"""Checks the seen set and requests unknown messages with an IWANT message."""
# Get list of all seen (seqnos, from) from the (seqno, from) tuples in
# seen_messages cache
seen_seqnos_and_peers = [
seqno_and_from for seqno_and_from in self.pubsub.seen_messages.keys()
]
# Add all unknown message ids (ids that appear in ihave_msg but not in seen_seqnos) to list
# of messages we want to request
# Add all unknown message ids (ids that appear in ihave_msg but not in
# seen_seqnos) to list of messages we want to request
# FIXME: Update type of message ID
msg_ids_wanted: List[Any] = [
msg_id
@ -556,8 +600,10 @@ class GossipSub(IPubsubRouter, Service):
async def handle_iwant(
self, iwant_msg: rpc_pb2.ControlIWant, sender_peer_id: ID
) -> None:
"""Forwards all request messages that are present in mcache to the
requesting peer."""
"""
Forwards all request messages that are present in mcache to the
requesting peer.
"""
# FIXME: Update type of message ID
# FIXME: Find a better way to parse the msg ids
msg_ids: List[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs]
@ -572,8 +618,8 @@ class GossipSub(IPubsubRouter, Service):
msgs_to_forward.append(msg)
# Forward messages to requesting peer
# Should this just be publishing? No
# because then the message will forwarded to peers in the topics contained in the messages.
# Should this just be publishing? No, because then the message will forwarded to
# peers in the topics contained in the messages.
# We should
# 1) Package these messages into a single packet
packet: rpc_pb2.RPC = rpc_pb2.RPC()
@ -643,7 +689,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_ihave(self, topic: str, msg_ids: Any, to_peer: ID) -> None:
"""Emit ihave message, sent to to_peer, for topic and msg_ids."""
ihave_msg: rpc_pb2.ControlIHave = rpc_pb2.ControlIHave()
ihave_msg.messageIDs.extend(msg_ids)
ihave_msg.topicID = topic
@ -655,7 +700,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_iwant(self, msg_ids: Any, to_peer: ID) -> None:
"""Emit iwant message, sent to to_peer, for msg_ids."""
iwant_msg: rpc_pb2.ControlIWant = rpc_pb2.ControlIWant()
iwant_msg.messageIDs.extend(msg_ids)
@ -666,7 +710,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_graft(self, topic: str, to_peer: ID) -> None:
"""Emit graft message, sent to to_peer, for topic."""
graft_msg: rpc_pb2.ControlGraft = rpc_pb2.ControlGraft()
graft_msg.topicID = topic
@ -677,7 +720,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_prune(self, topic: str, to_peer: ID) -> None:
"""Emit graft message, sent to to_peer, for topic."""
prune_msg: rpc_pb2.ControlPrune = rpc_pb2.ControlPrune()
prune_msg.topicID = topic

View File

@ -1,10 +1,17 @@
from typing import Dict, List, Optional, Sequence, Tuple
from typing import (
Dict,
List,
Optional,
Sequence,
Tuple,
)
from .pb import rpc_pb2
from .pb import (
rpc_pb2,
)
class CacheEntry:
mid: Tuple[bytes, bytes]
topics: List[str]
@ -24,7 +31,6 @@ class CacheEntry:
class MessageCache:
window_size: int
history_size: int
@ -91,8 +97,9 @@ class MessageCache:
return mids
def shift(self) -> None:
"""Shift the window over by 1 position, dropping the last element of
the history."""
"""
Shift the window over by 1 position, dropping the last element of the history.
"""
last_entries: List[CacheEntry] = self.history[len(self.history) - 1]
for entry in last_entries:

File diff suppressed because it is too large Load Diff

View File

@ -27,197 +27,352 @@ from typing_extensions import (
Literal as typing_extensions___Literal,
)
class RPC(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class SubOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
subscribe = ... # type: bool
topicid = ... # type: typing___Text
subscribe = ... # type: bool
topicid = ... # type: typing___Text
def __init__(self,
def __init__(
self,
*,
subscribe : typing___Optional[bool] = None,
topicid : typing___Optional[typing___Text] = None,
) -> None: ...
subscribe: typing___Optional[bool] = None,
topicid: typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> RPC.SubOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def MergeFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
def CopyFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"subscribe",u"topicid"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"subscribe",u"topicid"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["subscribe", "topicid"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["subscribe", "topicid"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"subscribe",b"subscribe",u"topicid",b"topicid"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"subscribe",b"subscribe",u"topicid",b"topicid"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"subscribe", b"subscribe", "topicid", b"topicid"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"subscribe", b"subscribe", "topicid", b"topicid"
],
) -> None: ...
@property
def subscriptions(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[RPC.SubOpts]: ...
def subscriptions(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
RPC.SubOpts
]: ...
@property
def publish(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Message]: ...
def publish(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
Message
]: ...
@property
def control(self) -> ControlMessage: ...
def __init__(self,
def __init__(
self,
*,
subscriptions : typing___Optional[typing___Iterable[RPC.SubOpts]] = None,
publish : typing___Optional[typing___Iterable[Message]] = None,
control : typing___Optional[ControlMessage] = None,
) -> None: ...
subscriptions: typing___Optional[typing___Iterable[RPC.SubOpts]] = None,
publish: typing___Optional[typing___Iterable[Message]] = None,
control: typing___Optional[ControlMessage] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> RPC: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"control"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"control",u"publish",u"subscriptions"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["control"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"control", "publish", "subscriptions"
],
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"control",b"control"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"control",b"control",u"publish",b"publish",u"subscriptions",b"subscriptions"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["control", b"control"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"control",
b"control",
"publish",
b"publish",
"subscriptions",
b"subscriptions",
],
) -> None: ...
class Message(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
from_id = ... # type: bytes
data = ... # type: bytes
seqno = ... # type: bytes
topicIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
signature = ... # type: bytes
key = ... # type: bytes
from_id = ... # type: bytes
data = ... # type: bytes
seqno = ... # type: bytes
topicIDs = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
signature = ... # type: bytes
key = ... # type: bytes
def __init__(self,
def __init__(
self,
*,
from_id : typing___Optional[bytes] = None,
data : typing___Optional[bytes] = None,
seqno : typing___Optional[bytes] = None,
topicIDs : typing___Optional[typing___Iterable[typing___Text]] = None,
signature : typing___Optional[bytes] = None,
key : typing___Optional[bytes] = None,
) -> None: ...
from_id: typing___Optional[bytes] = None,
data: typing___Optional[bytes] = None,
seqno: typing___Optional[bytes] = None,
topicIDs: typing___Optional[typing___Iterable[typing___Text]] = None,
signature: typing___Optional[bytes] = None,
key: typing___Optional[bytes] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Message: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"from_id",u"key",u"seqno",u"signature"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"from_id",u"key",u"seqno",u"signature",u"topicIDs"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"data", "from_id", "key", "seqno", "signature"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", "from_id", "key", "seqno", "signature", "topicIDs"
],
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"from_id",b"from_id",u"key",b"key",u"seqno",b"seqno",u"signature",b"signature"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"from_id",b"from_id",u"key",b"key",u"seqno",b"seqno",u"signature",b"signature",u"topicIDs",b"topicIDs"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"data",
b"data",
"from_id",
b"from_id",
"key",
b"key",
"seqno",
b"seqno",
"signature",
b"signature",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data",
b"data",
"from_id",
b"from_id",
"key",
b"key",
"seqno",
b"seqno",
"signature",
b"signature",
"topicIDs",
b"topicIDs",
],
) -> None: ...
class ControlMessage(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def ihave(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlIHave]: ...
def ihave(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlIHave
]: ...
@property
def iwant(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlIWant]: ...
def iwant(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlIWant
]: ...
@property
def graft(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlGraft]: ...
def graft(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlGraft
]: ...
@property
def prune(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlPrune]: ...
def __init__(self,
def prune(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlPrune
]: ...
def __init__(
self,
*,
ihave : typing___Optional[typing___Iterable[ControlIHave]] = None,
iwant : typing___Optional[typing___Iterable[ControlIWant]] = None,
graft : typing___Optional[typing___Iterable[ControlGraft]] = None,
prune : typing___Optional[typing___Iterable[ControlPrune]] = None,
) -> None: ...
ihave: typing___Optional[typing___Iterable[ControlIHave]] = None,
iwant: typing___Optional[typing___Iterable[ControlIWant]] = None,
graft: typing___Optional[typing___Iterable[ControlGraft]] = None,
prune: typing___Optional[typing___Iterable[ControlPrune]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlMessage: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"graft",u"ihave",u"iwant",u"prune"]) -> None: ...
def ClearField(
self,
field_name: typing_extensions___Literal["graft", "ihave", "iwant", "prune"],
) -> None: ...
else:
def ClearField(self, field_name: typing_extensions___Literal[u"graft",b"graft",u"ihave",b"ihave",u"iwant",b"iwant",u"prune",b"prune"]) -> None: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"graft",
b"graft",
"ihave",
b"ihave",
"iwant",
b"iwant",
"prune",
b"prune",
],
) -> None: ...
class ControlIHave(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text
messageIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
topicID = ... # type: typing___Text
messageIDs = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self,
def __init__(
self,
*,
topicID : typing___Optional[typing___Text] = None,
messageIDs : typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
topicID: typing___Optional[typing___Text] = None,
messageIDs: typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlIHave: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",u"topicID"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["messageIDs", "topicID"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",b"messageIDs",u"topicID",b"topicID"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"messageIDs", b"messageIDs", "topicID", b"topicID"
],
) -> None: ...
class ControlIWant(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
messageIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
messageIDs = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self,
def __init__(
self,
*,
messageIDs : typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
messageIDs: typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlIWant: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs"]) -> None: ...
def ClearField(
self, field_name: typing_extensions___Literal["messageIDs"]
) -> None: ...
else:
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",b"messageIDs"]) -> None: ...
def ClearField(
self, field_name: typing_extensions___Literal["messageIDs", b"messageIDs"]
) -> None: ...
class ControlGraft(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text
topicID = ... # type: typing___Text
def __init__(self,
def __init__(
self,
*,
topicID : typing___Optional[typing___Text] = None,
) -> None: ...
topicID: typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlGraft: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> None: ...
class ControlPrune(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text
topicID = ... # type: typing___Text
def __init__(self,
def __init__(
self,
*,
topicID : typing___Optional[typing___Text] = None,
) -> None: ...
topicID: typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlPrune: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> None: ...
class TopicDescriptor(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AuthOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AuthMode(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod
@ -229,7 +384,11 @@ class TopicDescriptor(google___protobuf___message___Message):
@classmethod
def values(cls) -> typing___List[TopicDescriptor.AuthOpts.AuthMode]: ...
@classmethod
def items(cls) -> typing___List[typing___Tuple[str, TopicDescriptor.AuthOpts.AuthMode]]: ...
def items(
cls,
) -> typing___List[
typing___Tuple[str, TopicDescriptor.AuthOpts.AuthMode]
]: ...
NONE = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 0)
KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1)
WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2)
@ -237,27 +396,46 @@ class TopicDescriptor(google___protobuf___message___Message):
KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1)
WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2)
mode = ... # type: TopicDescriptor.AuthOpts.AuthMode
keys = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
mode = ... # type: TopicDescriptor.AuthOpts.AuthMode
keys = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
def __init__(self,
def __init__(
self,
*,
mode : typing___Optional[TopicDescriptor.AuthOpts.AuthMode] = None,
keys : typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ...
mode: typing___Optional[TopicDescriptor.AuthOpts.AuthMode] = None,
keys: typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> TopicDescriptor.AuthOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def MergeFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
def CopyFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keys",u"mode"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["mode"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["keys", "mode"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"mode",b"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keys",b"keys",u"mode",b"mode"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["mode", b"mode"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"keys", b"keys", "mode", b"mode"
],
) -> None: ...
class EncOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class EncMode(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod
@ -269,7 +447,11 @@ class TopicDescriptor(google___protobuf___message___Message):
@classmethod
def values(cls) -> typing___List[TopicDescriptor.EncOpts.EncMode]: ...
@classmethod
def items(cls) -> typing___List[typing___Tuple[str, TopicDescriptor.EncOpts.EncMode]]: ...
def items(
cls,
) -> typing___List[
typing___Tuple[str, TopicDescriptor.EncOpts.EncMode]
]: ...
NONE = typing___cast(TopicDescriptor.EncOpts.EncMode, 0)
SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1)
WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2)
@ -277,46 +459,77 @@ class TopicDescriptor(google___protobuf___message___Message):
SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1)
WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2)
mode = ... # type: TopicDescriptor.EncOpts.EncMode
keyHashes = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
mode = ... # type: TopicDescriptor.EncOpts.EncMode
keyHashes = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
def __init__(self,
def __init__(
self,
*,
mode : typing___Optional[TopicDescriptor.EncOpts.EncMode] = None,
keyHashes : typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ...
mode: typing___Optional[TopicDescriptor.EncOpts.EncMode] = None,
keyHashes: typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> TopicDescriptor.EncOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def MergeFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
def CopyFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keyHashes",u"mode"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["mode"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["keyHashes", "mode"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"mode",b"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keyHashes",b"keyHashes",u"mode",b"mode"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["mode", b"mode"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"keyHashes", b"keyHashes", "mode", b"mode"
],
) -> None: ...
name = ... # type: typing___Text
name = ... # type: typing___Text
@property
def auth(self) -> TopicDescriptor.AuthOpts: ...
@property
def enc(self) -> TopicDescriptor.EncOpts: ...
def __init__(self,
def __init__(
self,
*,
name : typing___Optional[typing___Text] = None,
auth : typing___Optional[TopicDescriptor.AuthOpts] = None,
enc : typing___Optional[TopicDescriptor.EncOpts] = None,
) -> None: ...
name: typing___Optional[typing___Text] = None,
auth: typing___Optional[TopicDescriptor.AuthOpts] = None,
enc: typing___Optional[TopicDescriptor.EncOpts] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> TopicDescriptor: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"auth",u"enc",u"name"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"auth",u"enc",u"name"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["auth", "enc", "name"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["auth", "enc", "name"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"auth",b"auth",u"enc",b"enc",u"name",b"name"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"auth",b"auth",u"enc",b"enc",u"name",b"name"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"auth", b"auth", "enc", b"enc", "name", b"name"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"auth", b"auth", "enc", b"enc", "name", b"name"
],
) -> None: ...

View File

@ -15,33 +15,78 @@ from typing import (
cast,
)
from async_service import Service
from async_service import (
Service,
)
import base58
from lru import LRU
from lru import (
LRU,
)
import trio
from libp2p.crypto.keys import PrivateKey
from libp2p.exceptions import ParseError, ValidationError
from libp2p.host.host_interface import IHost
from libp2p.io.exceptions import IncompleteReadError
from libp2p.network.exceptions import SwarmException
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.id import ID
from libp2p.typing import TProtocol
from libp2p.utils import encode_varint_prefixed, read_varint_prefixed_bytes
from libp2p.crypto.keys import (
PrivateKey,
)
from libp2p.exceptions import (
ParseError,
ValidationError,
)
from libp2p.host.host_interface import (
IHost,
)
from libp2p.io.exceptions import (
IncompleteReadError,
)
from libp2p.network.exceptions import (
SwarmException,
)
from libp2p.network.stream.exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
read_varint_prefixed_bytes,
)
from .abc import IPubsub, ISubscriptionAPI
from .pb import rpc_pb2
from .pubsub_notifee import PubsubNotifee
from .subscription import TrioSubscriptionAPI
from .typing import AsyncValidatorFn, SyncValidatorFn, ValidatorFn
from .validators import PUBSUB_SIGNING_PREFIX, signature_validator
from .abc import (
IPubsub,
ISubscriptionAPI,
)
from .pb import (
rpc_pb2,
)
from .pubsub_notifee import (
PubsubNotifee,
)
from .subscription import (
TrioSubscriptionAPI,
)
from .typing import (
AsyncValidatorFn,
SyncValidatorFn,
ValidatorFn,
)
from .validators import (
PUBSUB_SIGNING_PREFIX,
signature_validator,
)
if TYPE_CHECKING:
from .abc import IPubsubRouter # noqa: F401
from typing import Any # noqa: F401
from .abc import IPubsubRouter # noqa: F401
# Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/40e1c94708658b155f30cf99e4574f384756d83c/topic.go#L97 # noqa: E501
SUBSCRIPTION_CHANNEL_SIZE = 32
@ -64,7 +109,6 @@ class TopicValidator(NamedTuple):
class Pubsub(Service, IPubsub):
host: IHost
router: "IPubsubRouter"
@ -186,8 +230,10 @@ class Pubsub(Service, IPubsub):
return self.subscribed_topics_receive.keys()
def get_hello_packet(self) -> rpc_pb2.RPC:
"""Generate subscription message with all topics we are subscribed to
only send hello packet if we have subscribed topics."""
"""
Generate subscription message with all topics we are subscribed to
only send hello packet if we have subscribed topics.
"""
packet = rpc_pb2.RPC()
for topic_id in self.topic_ids:
packet.subscriptions.extend(
@ -254,7 +300,7 @@ class Pubsub(Service, IPubsub):
:param topic: the topic to register validator under
:param validator: the validator used to validate messages published to the topic
:param is_async_validator: indicate if the validator is an asynchronous validator
"""
""" # noqa: E501
self.topic_validators[topic] = TopicValidator(validator, is_async_validator)
def remove_topic_validator(self, topic: str) -> None:
@ -341,9 +387,11 @@ class Pubsub(Service, IPubsub):
logger.debug("removed dead peer %s", peer_id)
async def handle_peer_queue(self) -> None:
"""Continuously read from peer queue and each time a new peer is found,
"""
Continuously read from peer queue and each time a new peer is found,
open a stream to the peer using a supported pubsub protocol pubsub
protocols we support."""
protocols we support.
"""
async with self.peer_receive_channel:
self.event_handle_peer_queue_started.set()
async for peer_id in self.peer_receive_channel:
@ -351,9 +399,10 @@ class Pubsub(Service, IPubsub):
self.manager.run_task(self._handle_new_peer, peer_id)
async def handle_dead_peer_queue(self) -> None:
"""Continuously read from dead peer channel and close the stream
between that peer and remove peer info from pubsub and pubsub
router."""
"""
Continuously read from dead peer channel and close the stream
between that peer and remove peer info from pubsub and pubsub router.
"""
async with self.dead_peer_receive_channel:
self.event_handle_dead_peer_queue_started.set()
async for peer_id in self.dead_peer_receive_channel:
@ -373,7 +422,7 @@ class Pubsub(Service, IPubsub):
"""
if sub_message.subscribe:
if sub_message.topicid not in self.peer_topics:
self.peer_topics[sub_message.topicid] = set([origin_id])
self.peer_topics[sub_message.topicid] = {origin_id}
elif origin_id not in self.peer_topics[sub_message.topicid]:
# Add peer to topic
self.peer_topics[sub_message.topicid].add(origin_id)
@ -388,7 +437,6 @@ class Pubsub(Service, IPubsub):
:param publish_message: RPC.Message format
"""
# Check if this message has any topics that we are subscribed to
for topic in publish_message.topicIDs:
if topic in self.topic_ids:
@ -409,7 +457,6 @@ class Pubsub(Service, IPubsub):
:param topic_id: topic_id to subscribe to
"""
logger.debug("subscribing to topic %s", topic_id)
# Already subscribed
@ -448,7 +495,6 @@ class Pubsub(Service, IPubsub):
:param topic_id: topic_id to unsubscribe from
"""
logger.debug("unsubscribing from topic %s", topic_id)
# Return if we already unsubscribed from the topic
@ -479,7 +525,6 @@ class Pubsub(Service, IPubsub):
:param raw_msg: raw contents of the message to broadcast
"""
# Broadcast message
for stream in self.peers.values():
# Write message to stream
@ -571,7 +616,7 @@ class Pubsub(Service, IPubsub):
# TODO: Check if the `from` is in the blacklist. If yes, reject.
# If the message is processed before, return(i.e., don't further process the message).
# If the message is processed before, return(i.e., don't further process the message) # noqa: E501
if self._is_msg_seen(msg):
return
@ -588,7 +633,7 @@ class Pubsub(Service, IPubsub):
await self.validate_msg(msg_forwarder, msg)
except ValidationError:
logger.debug(
"Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s",
"Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s", # noqa: E501
msg_forwarder,
msg.data.hex(),
msg.topicIDs,
@ -612,8 +657,8 @@ class Pubsub(Service, IPubsub):
def _mark_msg_seen(self, msg: rpc_pb2.Message) -> None:
msg_id = self._msg_id_constructor(msg)
# FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there is a
# more appropriate way.
# FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there
# is a more appropriate way.
self.seen_messages[msg_id] = 1
def _is_subscribed_to_msg(self, msg: rpc_pb2.Message) -> bool:

View File

@ -1,19 +1,30 @@
from typing import TYPE_CHECKING
from typing import (
TYPE_CHECKING,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
import trio
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.network.network_interface import INetwork
from libp2p.network.notifee_interface import INotifee
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.network.network_interface import (
INetwork,
)
from libp2p.network.notifee_interface import (
INotifee,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING:
from libp2p.peer.id import ID # noqa: F401
class PubsubNotifee(INotifee):
initiator_peers_queue: "trio.MemorySendChannel[ID]"
dead_peers_queue: "trio.MemorySendChannel[ID]"

View File

@ -1,11 +1,23 @@
from types import TracebackType
from typing import AsyncIterator, Optional, Type
from types import (
TracebackType,
)
from typing import (
AsyncIterator,
Optional,
Type,
)
import trio
from .abc import ISubscriptionAPI
from .pb import rpc_pb2
from .typing import UnsubscribeFn
from .abc import (
ISubscriptionAPI,
)
from .pb import (
rpc_pb2,
)
from .typing import (
UnsubscribeFn,
)
class BaseSubscriptionAPI(ISubscriptionAPI):
@ -32,11 +44,11 @@ class TrioSubscriptionAPI(BaseSubscriptionAPI):
unsubscribe_fn: UnsubscribeFn,
) -> None:
self.receive_channel = receive_channel
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # noqa: E501
self.unsubscribe_fn = unsubscribe_fn # type: ignore
async def unsubscribe(self) -> None:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # noqa: E501
await self.unsubscribe_fn() # type: ignore
def __aiter__(self) -> AsyncIterator[rpc_pb2.Message]:

View File

@ -1,8 +1,16 @@
from typing import Awaitable, Callable, Union
from typing import (
Awaitable,
Callable,
Union,
)
from libp2p.peer.id import ID
from libp2p.peer.id import (
ID,
)
from .pb import rpc_pb2
from .pb import (
rpc_pb2,
)
SyncValidatorFn = Callable[[ID, rpc_pb2.Message], bool]
AsyncValidatorFn = Callable[[ID, rpc_pb2.Message], Awaitable[bool]]

View File

@ -1,9 +1,15 @@
import logging
from libp2p.crypto.serialization import deserialize_public_key
from libp2p.peer.id import ID
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from libp2p.peer.id import (
ID,
)
from .pb import rpc_pb2
from .pb import (
rpc_pb2,
)
logger = logging.getLogger("libp2p.pubsub")

View File

@ -1,8 +1,17 @@
from abc import ABC, abstractmethod
from typing import Iterable
from abc import (
ABC,
abstractmethod,
)
from typing import (
Iterable,
)
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
class IContentRouting(ABC):
@ -18,12 +27,16 @@ class IContentRouting(ABC):
@abstractmethod
def find_provider_iter(self, cid: bytes, count: int) -> Iterable[PeerInfo]:
"""Search for peers who are able to provide a given key returns an
iterator of peer.PeerInfo."""
"""
Search for peers who are able to provide a given key returns an
iterator of peer.PeerInfo.
"""
class IPeerRouting(ABC):
@abstractmethod
async def find_peer(self, peer_id: ID) -> PeerInfo:
"""Find specific Peer FindPeer searches for a peer with given peer_id,
returns a peer.PeerInfo with relevant addresses."""
"""
Find specific Peer FindPeer searches for a peer with given peer_id,
returns a peer.PeerInfo with relevant addresses.
"""

View File

@ -1,13 +1,24 @@
from typing import Optional
from typing import (
Optional,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
class BaseSession(ISecureConn):
"""``BaseSession`` is not fully instantiated from its abstract classes as
it is only meant to be used in clases that derive from it."""
"""
``BaseSession`` is not fully instantiated from its abstract classes as
it is only meant to be used in clases that derive from it.
"""
local_peer: ID
local_private_key: PrivateKey

View File

@ -1,9 +1,17 @@
import secrets
from typing import Callable
from typing import (
Callable,
)
from libp2p.crypto.keys import KeyPair
from libp2p.peer.id import ID
from libp2p.security.secure_transport_interface import ISecureTransport
from libp2p.crypto.keys import (
KeyPair,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
def default_secure_bytes_provider(n: int) -> bytes:

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class HandshakeFailure(BaseLibp2pError):

View File

@ -2,12 +2,13 @@
# source: libp2p/security/insecure/pb/plaintext.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@ -17,62 +18,89 @@ from libp2p.crypto.pb import crypto_pb2 as libp2p_dot_crypto_dot_pb_dot_crypto__
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/insecure/pb/plaintext.proto',
package='plaintext.pb',
syntax='proto2',
serialized_pb=_b('\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto\"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey')
,
dependencies=[libp2p_dot_crypto_dot_pb_dot_crypto__pb2.DESCRIPTOR,])
name="libp2p/security/insecure/pb/plaintext.proto",
package="plaintext.pb",
syntax="proto2",
serialized_pb=_b(
'\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey'
),
dependencies=[
libp2p_dot_crypto_dot_pb_dot_crypto__pb2.DESCRIPTOR,
],
)
_EXCHANGE = _descriptor.Descriptor(
name='Exchange',
full_name='plaintext.pb.Exchange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='plaintext.pb.Exchange.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pubkey', full_name='plaintext.pb.Exchange.pubkey', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=92,
serialized_end=152,
name="Exchange",
full_name="plaintext.pb.Exchange",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="id",
full_name="plaintext.pb.Exchange.id",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="pubkey",
full_name="plaintext.pb.Exchange.pubkey",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=92,
serialized_end=152,
)
_EXCHANGE.fields_by_name['pubkey'].message_type = libp2p_dot_crypto_dot_pb_dot_crypto__pb2._PUBLICKEY
DESCRIPTOR.message_types_by_name['Exchange'] = _EXCHANGE
_EXCHANGE.fields_by_name[
"pubkey"
].message_type = libp2p_dot_crypto_dot_pb_dot_crypto__pb2._PUBLICKEY
DESCRIPTOR.message_types_by_name["Exchange"] = _EXCHANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Exchange = _reflection.GeneratedProtocolMessageType('Exchange', (_message.Message,), dict(
DESCRIPTOR = _EXCHANGE,
__module__ = 'libp2p.security.insecure.pb.plaintext_pb2'
# @@protoc_insertion_point(class_scope:plaintext.pb.Exchange)
))
Exchange = _reflection.GeneratedProtocolMessageType(
"Exchange",
(_message.Message,),
dict(
DESCRIPTOR=_EXCHANGE,
__module__="libp2p.security.insecure.pb.plaintext_pb2"
# @@protoc_insertion_point(class_scope:plaintext.pb.Exchange)
),
)
_sym_db.RegisterMessage(Exchange)

View File

@ -20,26 +20,35 @@ from typing_extensions import (
Literal as typing_extensions___Literal,
)
class Exchange(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
id = ... # type: bytes
id = ... # type: bytes
@property
def pubkey(self) -> libp2p___crypto___pb___crypto_pb2___PublicKey: ...
def __init__(self,
def __init__(
self,
*,
id : typing___Optional[bytes] = None,
pubkey : typing___Optional[libp2p___crypto___pb___crypto_pb2___PublicKey] = None,
) -> None: ...
id: typing___Optional[bytes] = None,
pubkey: typing___Optional[libp2p___crypto___pb___crypto_pb2___PublicKey] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Exchange: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"id",u"pubkey"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"id",u"pubkey"]) -> None: ...
def HasField(
self, field_name: typing_extensions___Literal["id", "pubkey"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["id", "pubkey"]
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"id",b"id",u"pubkey",b"pubkey"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"id",b"id",u"pubkey",b"pubkey"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal["id", b"id", "pubkey", b"pubkey"],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal["id", b"id", "pubkey", b"pubkey"],
) -> None: ...

View File

@ -1,21 +1,52 @@
from libp2p.crypto.exceptions import MissingDeserializerError
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.pb import crypto_pb2
from libp2p.crypto.serialization import deserialize_public_key
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.msgio import VarIntLengthMsgReadWriter
from libp2p.network.connection.exceptions import RawConnError
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.security.base_session import BaseSession
from libp2p.security.base_transport import BaseSecureTransport
from libp2p.security.exceptions import HandshakeFailure
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.typing import TProtocol
from libp2p.crypto.exceptions import (
MissingDeserializerError,
)
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.crypto.pb import (
crypto_pb2,
)
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.msgio import (
VarIntLengthMsgReadWriter,
)
from libp2p.network.connection.exceptions import (
RawConnError,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.base_session import (
BaseSession,
)
from libp2p.security.base_transport import (
BaseSecureTransport,
)
from libp2p.security.exceptions import (
HandshakeFailure,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.typing import (
TProtocol,
)
from .pb import plaintext_pb2
from .pb import (
plaintext_pb2,
)
# Reference: https://github.com/libp2p/go-libp2p-core/blob/master/sec/insecure/insecure.go
# Reference: https://github.com/libp2p/go-libp2p-core/blob/master/sec/insecure/insecure.go # noqa: E501
PLAINTEXT_PROTOCOL_ID = TProtocol("/plaintext/2.0.0")
@ -120,9 +151,10 @@ async def run_handshake(
class InsecureTransport(BaseSecureTransport):
"""``InsecureTransport`` provides the "identity" upgrader for a
``IRawConnection``, i.e. the upgraded transport does not add any additional
security."""
"""
Provides the "identity" upgrader for a ``IRawConnection``, i.e. the upgraded
transport does not add any additional security.
"""
async def secure_inbound(self, conn: IRawConnection) -> ISecureConn:
"""

View File

@ -1,4 +1,6 @@
from libp2p.security.exceptions import HandshakeFailure
from libp2p.security.exceptions import (
HandshakeFailure,
)
class NoiseFailure(HandshakeFailure):
@ -14,8 +16,10 @@ class InvalidSignature(NoiseFailure):
class NoiseStateError(NoiseFailure):
"""Raised when anything goes wrong in the noise state in `noiseprotocol`
package."""
"""
Raised when anything goes wrong in the noise state in `noiseprotocol`
package.
"""
class PeerIDMismatchesPubkey(NoiseFailure):

View File

@ -1,10 +1,20 @@
from typing import cast
from typing import (
cast,
)
from noise.connection import NoiseConnection as NoiseState
from libp2p.io.abc import EncryptedMsgReadWriter, MsgReadWriteCloser, ReadWriteCloser
from libp2p.io.msgio import FixedSizeLenMsgReadWriter
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.io.abc import (
EncryptedMsgReadWriter,
MsgReadWriteCloser,
ReadWriteCloser,
)
from libp2p.io.msgio import (
FixedSizeLenMsgReadWriter,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
SIZE_NOISE_MESSAGE_LEN = 2
MAX_NOISE_MESSAGE_LEN = 2 ** (8 * SIZE_NOISE_MESSAGE_LEN) - 1

View File

@ -1,7 +1,14 @@
from dataclasses import dataclass
from dataclasses import (
dataclass,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.serialization import deserialize_public_key
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from .pb import noise_pb2 as noise_pb

View File

@ -1,16 +1,34 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives import (
serialization,
)
from noise.backends.default.keypairs import KeyPair as NoiseKeyPair
from noise.connection import Keypair as NoiseKeypairEnum
from noise.connection import NoiseConnection as NoiseState
from libp2p.crypto.ed25519 import Ed25519PublicKey
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.security.secure_session import SecureSession
from libp2p.crypto.ed25519 import (
Ed25519PublicKey,
)
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_session import (
SecureSession,
)
from .exceptions import (
HandshakeHasNotFinished,
@ -18,7 +36,10 @@ from .exceptions import (
NoiseStateError,
PeerIDMismatchesPubkey,
)
from .io import NoiseHandshakeReadWriter, NoiseTransportReadWriter
from .io import (
NoiseHandshakeReadWriter,
NoiseTransportReadWriter,
)
from .messages import (
NoiseHandshakePayload,
make_handshake_payload_sig,
@ -95,8 +116,8 @@ class PatternXX(BasePattern):
if handshake_state.rs is None:
raise NoiseStateError(
"something is wrong in the underlying noise `handshake_state`: "
"we received and consumed msg#3, which should have included the"
" remote static public key, but it is not present in the handshake_state"
"we received and consumed msg#3, which should have included the "
"remote static public key, but it is not present in the handshake_state"
)
remote_pubkey = self._get_pubkey_from_noise_keypair(handshake_state.rs)
@ -139,8 +160,8 @@ class PatternXX(BasePattern):
if handshake_state.rs is None:
raise NoiseStateError(
"something is wrong in the underlying noise `handshake_state`: "
"we received and consumed msg#3, which should have included the"
" remote static public key, but it is not present in the handshake_state"
"we received and consumed msg#3, which should have included the "
"remote static public key, but it is not present in the handshake_state"
)
remote_pubkey = self._get_pubkey_from_noise_keypair(handshake_state.rs)

View File

@ -2,81 +2,114 @@
# source: libp2p/security/noise/pb/noise.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/noise/pb/noise.proto',
package='pb',
syntax='proto3',
serialized_pb=_b('\n$libp2p/security/noise/pb/noise.proto\x12\x02pb\"Q\n\x15NoiseHandshakePayload\x12\x14\n\x0cidentity_key\x18\x01 \x01(\x0c\x12\x14\n\x0cidentity_sig\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x62\x06proto3')
name="libp2p/security/noise/pb/noise.proto",
package="pb",
syntax="proto3",
serialized_pb=_b(
'\n$libp2p/security/noise/pb/noise.proto\x12\x02pb"Q\n\x15NoiseHandshakePayload\x12\x14\n\x0cidentity_key\x18\x01 \x01(\x0c\x12\x14\n\x0cidentity_sig\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x62\x06proto3'
),
)
_NOISEHANDSHAKEPAYLOAD = _descriptor.Descriptor(
name='NoiseHandshakePayload',
full_name='pb.NoiseHandshakePayload',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identity_key', full_name='pb.NoiseHandshakePayload.identity_key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='identity_sig', full_name='pb.NoiseHandshakePayload.identity_sig', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pb.NoiseHandshakePayload.data', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=125,
name="NoiseHandshakePayload",
full_name="pb.NoiseHandshakePayload",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="identity_key",
full_name="pb.NoiseHandshakePayload.identity_key",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="identity_sig",
full_name="pb.NoiseHandshakePayload.identity_sig",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data",
full_name="pb.NoiseHandshakePayload.data",
index=2,
number=3,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=44,
serialized_end=125,
)
DESCRIPTOR.message_types_by_name['NoiseHandshakePayload'] = _NOISEHANDSHAKEPAYLOAD
DESCRIPTOR.message_types_by_name["NoiseHandshakePayload"] = _NOISEHANDSHAKEPAYLOAD
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
NoiseHandshakePayload = _reflection.GeneratedProtocolMessageType('NoiseHandshakePayload', (_message.Message,), dict(
DESCRIPTOR = _NOISEHANDSHAKEPAYLOAD,
__module__ = 'libp2p.security.noise.pb.noise_pb2'
# @@protoc_insertion_point(class_scope:pb.NoiseHandshakePayload)
))
NoiseHandshakePayload = _reflection.GeneratedProtocolMessageType(
"NoiseHandshakePayload",
(_message.Message,),
dict(
DESCRIPTOR=_NOISEHANDSHAKEPAYLOAD,
__module__="libp2p.security.noise.pb.noise_pb2"
# @@protoc_insertion_point(class_scope:pb.NoiseHandshakePayload)
),
)
_sym_db.RegisterMessage(NoiseHandshakePayload)

View File

@ -16,24 +16,39 @@ from typing_extensions import (
Literal as typing_extensions___Literal,
)
class NoiseHandshakePayload(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
identity_key = ... # type: bytes
identity_sig = ... # type: bytes
data = ... # type: bytes
identity_key = ... # type: bytes
identity_sig = ... # type: bytes
data = ... # type: bytes
def __init__(self,
def __init__(
self,
*,
identity_key : typing___Optional[bytes] = None,
identity_sig : typing___Optional[bytes] = None,
data : typing___Optional[bytes] = None,
) -> None: ...
identity_key: typing___Optional[bytes] = None,
identity_sig: typing___Optional[bytes] = None,
data: typing___Optional[bytes] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> NoiseHandshakePayload: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"identity_key",u"identity_sig"]) -> None: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", "identity_key", "identity_sig"
],
) -> None: ...
else:
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"identity_key",b"identity_key",u"identity_sig",b"identity_sig"]) -> None: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data",
b"data",
"identity_key",
b"identity_key",
"identity_sig",
b"identity_sig",
],
) -> None: ...

View File

@ -1,11 +1,27 @@
from libp2p.crypto.keys import KeyPair, PrivateKey
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.security.secure_transport_interface import ISecureTransport
from libp2p.typing import TProtocol
from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
from libp2p.typing import (
TProtocol,
)
from .patterns import IPattern, PatternXX
from .patterns import (
IPattern,
PatternXX,
)
PROTOCOL_ID = TProtocol("/noise")

View File

@ -1,4 +1,6 @@
from libp2p.security.exceptions import HandshakeFailure
from libp2p.security.exceptions import (
HandshakeFailure,
)
class SecioException(HandshakeFailure):
@ -6,10 +8,10 @@ class SecioException(HandshakeFailure):
class SelfEncryption(SecioException):
"""Raised to indicate that a host is attempting to encrypt communications
with itself."""
pass
"""
Raised to indicate that a host is attempting to encrypt communications
with itself.
"""
class PeerMismatchException(SecioException):

View File

@ -13,4 +13,4 @@ message Propose {
message Exchange {
optional bytes ephemeral_public_key = 1;
optional bytes signature = 2;
}
}

View File

@ -1,143 +1,221 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/security/secio/pb/spipe.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/secio/pb/spipe.proto',
package='spipe.pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n$libp2p/security/secio/pb/spipe.proto\x12\x08spipe.pb\"_\n\x07Propose\x12\x0c\n\x04rand\x18\x01 \x01(\x0c\x12\x12\n\npublic_key\x18\x02 \x01(\x0c\x12\x11\n\texchanges\x18\x03 \x01(\t\x12\x0f\n\x07\x63iphers\x18\x04 \x01(\t\x12\x0e\n\x06hashes\x18\x05 \x01(\t\";\n\x08\x45xchange\x12\x1c\n\x14\x65phemeral_public_key\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c')
name="libp2p/security/secio/pb/spipe.proto",
package="spipe.pb",
syntax="proto2",
serialized_options=None,
serialized_pb=_b(
'\n$libp2p/security/secio/pb/spipe.proto\x12\x08spipe.pb"_\n\x07Propose\x12\x0c\n\x04rand\x18\x01 \x01(\x0c\x12\x12\n\npublic_key\x18\x02 \x01(\x0c\x12\x11\n\texchanges\x18\x03 \x01(\t\x12\x0f\n\x07\x63iphers\x18\x04 \x01(\t\x12\x0e\n\x06hashes\x18\x05 \x01(\t";\n\x08\x45xchange\x12\x1c\n\x14\x65phemeral_public_key\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c'
),
)
_PROPOSE = _descriptor.Descriptor(
name='Propose',
full_name='spipe.pb.Propose',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rand', full_name='spipe.pb.Propose.rand', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='public_key', full_name='spipe.pb.Propose.public_key', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchanges', full_name='spipe.pb.Propose.exchanges', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ciphers', full_name='spipe.pb.Propose.ciphers', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hashes', full_name='spipe.pb.Propose.hashes', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=145,
name="Propose",
full_name="spipe.pb.Propose",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="rand",
full_name="spipe.pb.Propose.rand",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="public_key",
full_name="spipe.pb.Propose.public_key",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="exchanges",
full_name="spipe.pb.Propose.exchanges",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="ciphers",
full_name="spipe.pb.Propose.ciphers",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="hashes",
full_name="spipe.pb.Propose.hashes",
index=4,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=50,
serialized_end=145,
)
_EXCHANGE = _descriptor.Descriptor(
name='Exchange',
full_name='spipe.pb.Exchange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ephemeral_public_key', full_name='spipe.pb.Exchange.ephemeral_public_key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signature', full_name='spipe.pb.Exchange.signature', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=147,
serialized_end=206,
name="Exchange",
full_name="spipe.pb.Exchange",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="ephemeral_public_key",
full_name="spipe.pb.Exchange.ephemeral_public_key",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="signature",
full_name="spipe.pb.Exchange.signature",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=147,
serialized_end=206,
)
DESCRIPTOR.message_types_by_name['Propose'] = _PROPOSE
DESCRIPTOR.message_types_by_name['Exchange'] = _EXCHANGE
DESCRIPTOR.message_types_by_name["Propose"] = _PROPOSE
DESCRIPTOR.message_types_by_name["Exchange"] = _EXCHANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Propose = _reflection.GeneratedProtocolMessageType('Propose', (_message.Message,), {
'DESCRIPTOR' : _PROPOSE,
'__module__' : 'libp2p.security.secio.pb.spipe_pb2'
# @@protoc_insertion_point(class_scope:spipe.pb.Propose)
})
Propose = _reflection.GeneratedProtocolMessageType(
"Propose",
(_message.Message,),
{
"DESCRIPTOR": _PROPOSE,
"__module__": "libp2p.security.secio.pb.spipe_pb2"
# @@protoc_insertion_point(class_scope:spipe.pb.Propose)
},
)
_sym_db.RegisterMessage(Propose)
Exchange = _reflection.GeneratedProtocolMessageType('Exchange', (_message.Message,), {
'DESCRIPTOR' : _EXCHANGE,
'__module__' : 'libp2p.security.secio.pb.spipe_pb2'
# @@protoc_insertion_point(class_scope:spipe.pb.Exchange)
})
Exchange = _reflection.GeneratedProtocolMessageType(
"Exchange",
(_message.Message,),
{
"DESCRIPTOR": _EXCHANGE,
"__module__": "libp2p.security.secio.pb.spipe_pb2"
# @@protoc_insertion_point(class_scope:spipe.pb.Exchange)
},
)
_sym_db.RegisterMessage(Exchange)

View File

@ -17,51 +17,116 @@ from typing_extensions import (
Literal as typing_extensions___Literal,
)
class Propose(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
rand = ... # type: bytes
public_key = ... # type: bytes
exchanges = ... # type: typing___Text
ciphers = ... # type: typing___Text
hashes = ... # type: typing___Text
rand = ... # type: bytes
public_key = ... # type: bytes
exchanges = ... # type: typing___Text
ciphers = ... # type: typing___Text
hashes = ... # type: typing___Text
def __init__(self,
def __init__(
self,
*,
rand : typing___Optional[bytes] = None,
public_key : typing___Optional[bytes] = None,
exchanges : typing___Optional[typing___Text] = None,
ciphers : typing___Optional[typing___Text] = None,
hashes : typing___Optional[typing___Text] = None,
) -> None: ...
rand: typing___Optional[bytes] = None,
public_key: typing___Optional[bytes] = None,
exchanges: typing___Optional[typing___Text] = None,
ciphers: typing___Optional[typing___Text] = None,
hashes: typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Propose: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"ciphers",u"exchanges",u"hashes",u"public_key",u"rand"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"ciphers",u"exchanges",u"hashes",u"public_key",u"rand"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"ciphers", "exchanges", "hashes", "public_key", "rand"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ciphers", "exchanges", "hashes", "public_key", "rand"
],
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"ciphers",b"ciphers",u"exchanges",b"exchanges",u"hashes",b"hashes",u"public_key",b"public_key",u"rand",b"rand"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"ciphers",b"ciphers",u"exchanges",b"exchanges",u"hashes",b"hashes",u"public_key",b"public_key",u"rand",b"rand"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"ciphers",
b"ciphers",
"exchanges",
b"exchanges",
"hashes",
b"hashes",
"public_key",
b"public_key",
"rand",
b"rand",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ciphers",
b"ciphers",
"exchanges",
b"exchanges",
"hashes",
b"hashes",
"public_key",
b"public_key",
"rand",
b"rand",
],
) -> None: ...
class Exchange(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
ephemeral_public_key = ... # type: bytes
signature = ... # type: bytes
ephemeral_public_key = ... # type: bytes
signature = ... # type: bytes
def __init__(self,
def __init__(
self,
*,
ephemeral_public_key : typing___Optional[bytes] = None,
signature : typing___Optional[bytes] = None,
) -> None: ...
ephemeral_public_key: typing___Optional[bytes] = None,
signature: typing___Optional[bytes] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Exchange: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",u"signature"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",u"signature"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"ephemeral_public_key", "signature"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ephemeral_public_key", "signature"
],
) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",b"ephemeral_public_key",u"signature",b"signature"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",b"ephemeral_public_key",u"signature",b"signature"]) -> None: ...
def HasField(
self,
field_name: typing_extensions___Literal[
"ephemeral_public_key",
b"ephemeral_public_key",
"signature",
b"signature",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ephemeral_public_key",
b"ephemeral_public_key",
"signature",
b"signature",
],
) -> None: ...

View File

@ -1,31 +1,66 @@
from dataclasses import dataclass
from dataclasses import (
dataclass,
)
import itertools
from typing import Optional, Tuple
from typing import (
Optional,
Tuple,
)
import multihash
from libp2p.crypto.authenticated_encryption import (
EncryptionParameters as AuthenticatedEncryptionParameters,
)
from libp2p.crypto.authenticated_encryption import (
InvalidMACException,
)
from libp2p.crypto.authenticated_encryption import (
initialize_pair as initialize_pair_for_encryption,
)
from libp2p.crypto.authenticated_encryption import InvalidMACException
from libp2p.crypto.authenticated_encryption import MacAndCipher as Encrypter
from libp2p.crypto.ecc import ECCPublicKey
from libp2p.crypto.exceptions import MissingDeserializerError
from libp2p.crypto.key_exchange import create_ephemeral_key_pair
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.serialization import deserialize_public_key
from libp2p.io.abc import EncryptedMsgReadWriter
from libp2p.io.exceptions import DecryptionFailedException, IOException
from libp2p.io.msgio import FixedSizeLenMsgReadWriter
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.crypto.ecc import (
ECCPublicKey,
)
from libp2p.crypto.exceptions import (
MissingDeserializerError,
)
from libp2p.crypto.key_exchange import (
create_ephemeral_key_pair,
)
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from libp2p.io.abc import (
EncryptedMsgReadWriter,
)
from libp2p.io.exceptions import (
DecryptionFailedException,
IOException,
)
from libp2p.io.msgio import (
FixedSizeLenMsgReadWriter,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import ID as PeerID
from libp2p.security.base_transport import BaseSecureTransport
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.security.secure_session import SecureSession
from libp2p.typing import TProtocol
from libp2p.security.base_transport import (
BaseSecureTransport,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_session import (
SecureSession,
)
from libp2p.typing import (
TProtocol,
)
from .exceptions import (
IncompatibleChoices,
@ -36,7 +71,10 @@ from .exceptions import (
SedesException,
SelfEncryption,
)
from .pb.spipe_pb2 import Exchange, Propose
from .pb.spipe_pb2 import (
Exchange,
Propose,
)
ID = TProtocol("/secio/1.0.0")
@ -101,8 +139,10 @@ class SecioMsgReadWriter(EncryptedMsgReadWriter):
@dataclass(frozen=True)
class Proposal:
"""A ``Proposal`` represents the set of session parameters one peer in a
pair of peers attempting to negotiate a `secio` channel prefers."""
"""
Represents the set of session parameters one peer in a
pair of peers attempting to negotiate a `secio` channel prefers.
"""
nonce: bytes
public_key: PublicKey
@ -401,8 +441,10 @@ async def create_secure_session(
class Transport(BaseSecureTransport):
"""``Transport`` provides a security upgrader for a ``IRawConnection``,
following the `secio` protocol defined in the libp2p specs."""
"""
Provide a security upgrader for a ``IRawConnection``,
following the `secio` protocol defined in the libp2p specs.
"""
def get_nonce(self) -> bytes:
return self.secure_bytes_provider(NONCE_SIZE)

View File

@ -1,13 +1,22 @@
from abc import ABC, abstractmethod
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from abc import (
ABC,
abstractmethod,
)
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
"""
Represents a secured connection object, which includes a connection and details about the security
involved in the secured connection
Represents a secured connection object, which includes a connection and details about
the security involved in the secured connection
Relevant go repo: https://github.com/libp2p/go-conn-security/blob/master/interface.go
"""

View File

@ -1,9 +1,18 @@
import io
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.io.abc import EncryptedMsgReadWriter
from libp2p.peer.id import ID
from libp2p.security.base_session import BaseSession
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.io.abc import (
EncryptedMsgReadWriter,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.base_session import (
BaseSession,
)
class SecureSession(BaseSession):
@ -39,7 +48,7 @@ class SecureSession(BaseSession):
def _drain(self, n: int) -> bytes:
if self.low_watermark == self.high_watermark:
return bytes()
return b""
data = self.buf.getbuffer()[self.low_watermark : self.high_watermark]
@ -61,7 +70,7 @@ class SecureSession(BaseSession):
async def read(self, n: int = None) -> bytes:
if n == 0:
return bytes()
return b""
data_from_buffer = self._drain(n)
if len(data_from_buffer) > 0:

View File

@ -1,9 +1,17 @@
from abc import ABC, abstractmethod
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from abc import (
ABC,
abstractmethod,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
"""
Transport that is used to secure a connection. This transport is

View File

@ -1,20 +1,41 @@
from abc import ABC
from collections import OrderedDict
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.protocol_muxer.multiselect import Multiselect
from libp2p.protocol_muxer.multiselect_client import MultiselectClient
from libp2p.protocol_muxer.multiselect_communicator import MultiselectCommunicator
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.security.secure_transport_interface import ISecureTransport
from libp2p.transport.typing import TSecurityOptions
from libp2p.typing import TProtocol
from abc import (
ABC,
)
from collections import (
OrderedDict,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.protocol_muxer.multiselect import (
Multiselect,
)
from libp2p.protocol_muxer.multiselect_client import (
MultiselectClient,
)
from libp2p.protocol_muxer.multiselect_communicator import (
MultiselectCommunicator,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
from libp2p.transport.typing import (
TSecurityOptions,
)
from libp2p.typing import (
TProtocol,
)
"""
Represents a secured connection object, which includes a connection and details about the security
involved in the secured connection
Represents a secured connection object, which includes a connection and details about
the security involved in the secured connection
Relevant go repo: https://github.com/libp2p/go-conn-security/blob/master/interface.go
"""

View File

@ -1,10 +1,19 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
import trio
from libp2p.io.abc import ReadWriteCloser
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
class IMuxedConn(ABC):
@ -18,7 +27,7 @@ class IMuxedConn(ABC):
@abstractmethod
def __init__(self, conn: ISecureConn, peer_id: ID) -> None:
"""
create a new muxed connection.
Create a new muxed connection.
:param conn: an instance of secured connection
for new muxed streams
@ -28,21 +37,21 @@ class IMuxedConn(ABC):
@property
@abstractmethod
def is_initiator(self) -> bool:
"""if this connection is the initiator."""
"""If this connection is the initiator."""
@abstractmethod
async def start(self) -> None:
"""start the multiplexer."""
"""Start the multiplexer."""
@abstractmethod
async def close(self) -> None:
"""close connection."""
"""Close connection."""
@property
@abstractmethod
def is_closed(self) -> bool:
"""
check connection is fully closed.
Check connection is fully closed.
:return: true if successful
"""
@ -50,28 +59,27 @@ class IMuxedConn(ABC):
@abstractmethod
async def open_stream(self) -> "IMuxedStream":
"""
creates a new muxed_stream.
Create a new muxed_stream.
:return: a new ``IMuxedStream`` stream
"""
@abstractmethod
async def accept_stream(self) -> "IMuxedStream":
"""accepts a muxed stream opened by the other end."""
"""Accept a muxed stream opened by the other end."""
class IMuxedStream(ReadWriteCloser):
muxed_conn: IMuxedConn
@abstractmethod
async def reset(self) -> None:
"""closes both ends of the stream tells this remote side to hang up."""
"""Close both ends of the stream tells this remote side to hang up."""
@abstractmethod
def set_deadline(self, ttl: int) -> bool:
"""
set deadline for muxed stream.
Set deadline for muxed stream.
:return: a new stream
"""

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class MuxedConnError(BaseLibp2pError):

View File

@ -1,4 +1,6 @@
from enum import Enum
from enum import (
Enum,
)
class HeaderTags(Enum):

View File

@ -1,4 +1,6 @@
from typing import NamedTuple
from typing import (
NamedTuple,
)
class StreamID(NamedTuple):

View File

@ -1,15 +1,34 @@
import logging
from typing import Dict, Optional, Tuple
from typing import (
Dict,
Optional,
Tuple,
)
import trio
from libp2p.exceptions import ParseError
from libp2p.io.exceptions import IncompleteReadError
from libp2p.network.connection.exceptions import RawConnError
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
from libp2p.typing import TProtocol
from libp2p.exceptions import (
ParseError,
)
from libp2p.io.exceptions import (
IncompleteReadError,
)
from libp2p.network.connection.exceptions import (
RawConnError,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
IMuxedStream,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
decode_uvarint_from_stream,
encode_uvarint,
@ -17,10 +36,18 @@ from libp2p.utils import (
read_varint_prefixed_bytes,
)
from .constants import HeaderTags
from .datastructures import StreamID
from .exceptions import MplexUnavailable
from .mplex_stream import MplexStream
from .constants import (
HeaderTags,
)
from .datastructures import (
StreamID,
)
from .exceptions import (
MplexUnavailable,
)
from .mplex_stream import (
MplexStream,
)
MPLEX_PROTOCOL_ID = TProtocol("/mplex/6.7.0")
# Ref: https://github.com/libp2p/go-mplex/blob/414db61813d9ad3e6f4a7db5c1b1612de343ace9/multiplex.go#L115 # noqa: E501
@ -49,7 +76,7 @@ class Mplex(IMuxedConn):
def __init__(self, secured_conn: ISecureConn, peer_id: ID) -> None:
"""
create a new muxed connection.
Create a new muxed connection.
:param secured_conn: an instance of ``ISecureConn``
:param generic_protocol_handler: generic protocol handler
@ -81,7 +108,9 @@ class Mplex(IMuxedConn):
return self.secured_conn.is_initiator
async def close(self) -> None:
"""close the stream muxer and underlying secured connection."""
"""
Close the stream muxer and underlying secured connection.
"""
if self.event_shutting_down.is_set():
return
# Set the `event_shutting_down`, to allow graceful shutdown.
@ -93,7 +122,7 @@ class Mplex(IMuxedConn):
@property
def is_closed(self) -> bool:
"""
check connection is fully closed.
Check connection is fully closed.
:return: true if successful
"""
@ -121,7 +150,7 @@ class Mplex(IMuxedConn):
async def open_stream(self) -> IMuxedStream:
"""
creates a new muxed_stream.
Create a new muxed_stream.
:return: a new ``MplexStream``
"""
@ -134,7 +163,9 @@ class Mplex(IMuxedConn):
return stream
async def accept_stream(self) -> IMuxedStream:
"""accepts a muxed stream opened by the other end."""
"""
Accept a muxed stream opened by the other end.
"""
try:
return await self.new_stream_receive_channel.receive()
except trio.EndOfChannel:
@ -144,7 +175,7 @@ class Mplex(IMuxedConn):
self, flag: HeaderTags, data: Optional[bytes], stream_id: StreamID
) -> int:
"""
sends a message over the connection.
Send a message over the connection.
:param flag: header to use
:param data: data to send in the message
@ -162,7 +193,7 @@ class Mplex(IMuxedConn):
async def write_to_stream(self, _bytes: bytes) -> None:
"""
writes a byte array to a secured connection.
Write a byte array to a secured connection.
:param _bytes: byte array to write
:return: length written
@ -175,8 +206,10 @@ class Mplex(IMuxedConn):
) from e
async def handle_incoming(self) -> None:
"""Read a message off of the secured connection and add it to the
corresponding message buffer."""
"""
Read a message off of the secured connection and add it to the
corresponding message buffer.
"""
self.event_started.set()
while True:
try:
@ -194,19 +227,19 @@ class Mplex(IMuxedConn):
:return: stream_id, flag, message contents
"""
try:
header = await decode_uvarint_from_stream(self.secured_conn)
except (ParseError, RawConnError, IncompleteReadError) as error:
raise MplexUnavailable(
f"failed to read the header correctly from the underlying connection: {error}"
"failed to read the header correctly from the underlying connection: "
f"{error}"
)
try:
message = await read_varint_prefixed_bytes(self.secured_conn)
except (ParseError, RawConnError, IncompleteReadError) as error:
raise MplexUnavailable(
"failed to read the message body correctly from the underlying connection: "
f"{error}"
"failed to read the message body correctly from the underlying "
f"connection: {error}"
)
flag = header & 0x07

View File

@ -1,16 +1,32 @@
from typing import TYPE_CHECKING
from typing import (
TYPE_CHECKING,
)
import trio
from libp2p.stream_muxer.abc import IMuxedStream
from libp2p.stream_muxer.exceptions import MuxedConnUnavailable
from libp2p.stream_muxer.abc import (
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import (
MuxedConnUnavailable,
)
from .constants import HeaderTags
from .datastructures import StreamID
from .exceptions import MplexStreamClosed, MplexStreamEOF, MplexStreamReset
from .constants import (
HeaderTags,
)
from .datastructures import (
StreamID,
)
from .exceptions import (
MplexStreamClosed,
MplexStreamEOF,
MplexStreamReset,
)
if TYPE_CHECKING:
from libp2p.stream_muxer.mplex.mplex import Mplex
from libp2p.stream_muxer.mplex.mplex import (
Mplex,
)
class MplexStream(IMuxedStream):
@ -44,7 +60,7 @@ class MplexStream(IMuxedStream):
incoming_data_channel: "trio.MemoryReceiveChannel[bytes]",
) -> None:
"""
create new MuxedStream in muxer.
Create new MuxedStream in muxer.
:param stream_id: stream id of this stream
:param muxed_conn: muxed connection of this muxed_stream
@ -93,8 +109,8 @@ class MplexStream(IMuxedStream):
"""
if n is not None and n < 0:
raise ValueError(
f"the number of bytes to read `n` must be non-negative or "
"`None` to indicate read until EOF"
"the number of bytes to read `n` must be non-negative or "
f"`None` to indicate read until EOF, got n={n}"
)
if self.event_reset.is_set():
raise MplexStreamReset
@ -102,16 +118,16 @@ class MplexStream(IMuxedStream):
return await self._read_until_eof()
if len(self._buf) == 0:
data: bytes
# Peek whether there is data available. If yes, we just read until there is no data,
# and then return.
# Peek whether there is data available. If yes, we just read until there is
# no data, then return.
try:
data = self.incoming_data_channel.receive_nowait()
self._buf.extend(data)
except trio.EndOfChannel:
raise MplexStreamEOF
except trio.WouldBlock:
# We know `receive` will be blocked here. Wait for data here with `receive` and
# catch all kinds of errors here.
# We know `receive` will be blocked here. Wait for data here with
# `receive` and catch all kinds of errors here.
try:
data = await self.incoming_data_channel.receive()
self._buf.extend(data)
@ -121,8 +137,8 @@ class MplexStream(IMuxedStream):
if self.event_remote_closed.is_set():
raise MplexStreamEOF
except trio.ClosedResourceError as error:
# Probably `incoming_data_channel` is closed in `reset` when we are waiting
# for `receive`.
# Probably `incoming_data_channel` is closed in `reset` when we are
# waiting for `receive`.
if self.event_reset.is_set():
raise MplexStreamReset
raise Exception(
@ -136,7 +152,7 @@ class MplexStream(IMuxedStream):
async def write(self, data: bytes) -> None:
"""
write to stream.
Write to stream.
:return: number of bytes written
"""
@ -150,8 +166,10 @@ class MplexStream(IMuxedStream):
await self.muxed_conn.send_message(flag, data, self.stream_id)
async def close(self) -> None:
"""Closing a stream closes it for writing and closes the remote end for
reading but allows writing in the other direction."""
"""
Closing a stream closes it for writing and closes the remote end for
reading but allows writing in the other direction.
"""
# TODO error handling with timeout
async with self.close_lock:
@ -175,7 +193,7 @@ class MplexStream(IMuxedStream):
self.muxed_conn.streams.pop(self.stream_id, None)
async def reset(self) -> None:
"""closes both ends of the stream tells this remote side to hang up."""
"""Close both ends of the stream tells this remote side to hang up."""
async with self.close_lock:
# Both sides have been closed. No need to event_reset.
if self.event_remote_closed.is_set() and self.event_local_closed.is_set():
@ -190,7 +208,8 @@ class MplexStream(IMuxedStream):
if self.is_initiator
else HeaderTags.ResetReceiver
)
# Try to send reset message to the other side. Ignore if there is anything wrong.
# Try to send reset message to the other side.
# Ignore if there is anything wrong.
try:
await self.muxed_conn.send_message(flag, None, self.stream_id)
except MuxedConnUnavailable:
@ -208,7 +227,7 @@ class MplexStream(IMuxedStream):
# TODO deadline not in use
def set_deadline(self, ttl: int) -> bool:
"""
set deadline for muxed stream.
Set deadline for muxed stream.
:return: True if successful
"""
@ -218,7 +237,7 @@ class MplexStream(IMuxedStream):
def set_read_deadline(self, ttl: int) -> bool:
"""
set read deadline for muxed stream.
Set read deadline for muxed stream.
:return: True if successful
"""
@ -227,7 +246,7 @@ class MplexStream(IMuxedStream):
def set_write_deadline(self, ttl: int) -> bool:
"""
set write deadline for muxed stream.
Set write deadline for muxed stream.
:return: True if successful
"""

View File

@ -1,15 +1,36 @@
from collections import OrderedDict
from collections import (
OrderedDict,
)
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.protocol_muxer.multiselect import Multiselect
from libp2p.protocol_muxer.multiselect_client import MultiselectClient
from libp2p.protocol_muxer.multiselect_communicator import MultiselectCommunicator
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.transport.typing import TMuxerClass, TMuxerOptions
from libp2p.typing import TProtocol
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.protocol_muxer.multiselect import (
Multiselect,
)
from libp2p.protocol_muxer.multiselect_client import (
MultiselectClient,
)
from libp2p.protocol_muxer.multiselect_communicator import (
MultiselectCommunicator,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.transport.typing import (
TMuxerClass,
TMuxerOptions,
)
from libp2p.typing import (
TProtocol,
)
from .abc import IMuxedConn
from .abc import (
IMuxedConn,
)
# FIXME: add negotiate timeout to `MuxerMultistream`
DEFAULT_NEGOTIATE_TIMEOUT = 60

View File

@ -1,8 +1,13 @@
from typing import NamedTuple
from typing import (
NamedTuple,
)
import multiaddr
from libp2p.pubsub import floodsub, gossipsub
from libp2p.pubsub import (
floodsub,
gossipsub,
)
# Just a arbitrary large number.
# It is used when calling `MplexStream.read(MAX_READ_LEN)`,

View File

@ -1,35 +1,96 @@
from typing import Any, AsyncIterator, Callable, Dict, List, Sequence, Tuple, cast
from typing import (
Any,
AsyncIterator,
Callable,
Dict,
List,
Sequence,
Tuple,
cast,
)
from async_exit_stack import AsyncExitStack
from async_generator import asynccontextmanager
from async_service import background_trio_service
from async_exit_stack import (
AsyncExitStack,
)
from async_generator import (
asynccontextmanager,
)
from async_service import (
background_trio_service,
)
import factory
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
import trio
from libp2p import generate_new_rsa_identity, generate_peer_id_from
from libp2p import (
generate_new_rsa_identity,
generate_peer_id_from,
)
from libp2p.crypto.ed25519 import create_new_key_pair as create_ed25519_key_pair
from libp2p.crypto.keys import KeyPair, PrivateKey
from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
)
from libp2p.crypto.secp256k1 import create_new_key_pair as create_secp256k1_key_pair
from libp2p.host.basic_host import BasicHost
from libp2p.host.host_interface import IHost
from libp2p.host.routed_host import RoutedHost
from libp2p.io.abc import ReadWriteCloser
from libp2p.network.connection.raw_connection import RawConnection
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.network.connection.swarm_connection import SwarmConn
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.swarm import Swarm
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.peer.peerstore import PeerStore
from libp2p.pubsub.abc import IPubsubRouter
from libp2p.pubsub.floodsub import FloodSub
from libp2p.pubsub.gossipsub import GossipSub
from libp2p.host.basic_host import (
BasicHost,
)
from libp2p.host.host_interface import (
IHost,
)
from libp2p.host.routed_host import (
RoutedHost,
)
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.network.connection.raw_connection import (
RawConnection,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.network.connection.swarm_connection import (
SwarmConn,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.network.swarm import (
Swarm,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.peer.peerstore import (
PeerStore,
)
from libp2p.pubsub.abc import (
IPubsubRouter,
)
from libp2p.pubsub.floodsub import (
FloodSub,
)
from libp2p.pubsub.gossipsub import (
GossipSub,
)
import libp2p.pubsub.pb.rpc_pb2 as rpc_pb2
from libp2p.pubsub.pubsub import Pubsub, get_peer_and_seqno_msg_id
from libp2p.routing.interfaces import IPeerRouting
from libp2p.security.insecure.transport import PLAINTEXT_PROTOCOL_ID, InsecureTransport
from libp2p.pubsub.pubsub import (
Pubsub,
get_peer_and_seqno_msg_id,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
from libp2p.security.insecure.transport import (
PLAINTEXT_PROTOCOL_ID,
InsecureTransport,
)
from libp2p.security.noise.messages import (
NoiseHandshakePayload,
make_handshake_payload_sig,
@ -37,18 +98,45 @@ from libp2p.security.noise.messages import (
from libp2p.security.noise.transport import PROTOCOL_ID as NOISE_PROTOCOL_ID
from libp2p.security.noise.transport import Transport as NoiseTransport
import libp2p.security.secio.transport as secio
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.security.secure_transport_interface import ISecureTransport
from libp2p.stream_muxer.mplex.mplex import MPLEX_PROTOCOL_ID, Mplex
from libp2p.stream_muxer.mplex.mplex_stream import MplexStream
from libp2p.tools.constants import GOSSIPSUB_PARAMS
from libp2p.transport.tcp.tcp import TCP
from libp2p.transport.typing import TMuxerOptions, TSecurityOptions
from libp2p.transport.upgrader import TransportUpgrader
from libp2p.typing import TProtocol
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
from libp2p.stream_muxer.mplex.mplex import (
MPLEX_PROTOCOL_ID,
Mplex,
)
from libp2p.stream_muxer.mplex.mplex_stream import (
MplexStream,
)
from libp2p.tools.constants import (
GOSSIPSUB_PARAMS,
)
from libp2p.transport.tcp.tcp import (
TCP,
)
from libp2p.transport.typing import (
TMuxerOptions,
TSecurityOptions,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
TProtocol,
)
from .constants import FLOODSUB_PROTOCOL_ID, GOSSIPSUB_PROTOCOL_ID, LISTEN_MADDR
from .utils import connect, connect_swarm
from .constants import (
FLOODSUB_PROTOCOL_ID,
GOSSIPSUB_PROTOCOL_ID,
LISTEN_MADDR,
)
from .utils import (
connect,
connect_swarm,
)
DEFAULT_SECURITY_PROTOCOL_ID = PLAINTEXT_PROTOCOL_ID
@ -105,7 +193,7 @@ def noise_transport_factory(key_pair: KeyPair) -> ISecureTransport:
def security_options_factory_factory(
protocol_id: TProtocol = None
protocol_id: TProtocol = None,
) -> Callable[[KeyPair], TSecurityOptions]:
if protocol_id is None:
protocol_id = DEFAULT_SECURITY_PROTOCOL_ID
@ -135,7 +223,7 @@ def default_muxer_transport_factory() -> TMuxerOptions:
@asynccontextmanager
async def raw_conn_factory(
nursery: trio.Nursery
nursery: trio.Nursery,
) -> AsyncIterator[Tuple[IRawConnection, IRawConnection]]:
conn_0 = None
conn_1 = None
@ -158,7 +246,7 @@ async def raw_conn_factory(
@asynccontextmanager
async def noise_conn_factory(
nursery: trio.Nursery
nursery: trio.Nursery,
) -> AsyncIterator[Tuple[ISecureConn, ISecureConn]]:
local_transport = cast(
NoiseTransport, noise_transport_factory(create_secp256k1_key_pair())
@ -188,7 +276,8 @@ async def noise_conn_factory(
if local_secure_conn is None or remote_secure_conn is None:
raise Exception(
"local or remote secure conn has not been successfully upgraded"
f"local_secure_conn={local_secure_conn}, remote_secure_conn={remote_secure_conn}"
f"local_secure_conn={local_secure_conn}, "
f"remote_secure_conn={remote_secure_conn}"
)
yield local_secure_conn, remote_secure_conn
@ -223,8 +312,8 @@ class SwarmFactory(factory.Factory):
muxer_opt: TMuxerOptions = None,
) -> AsyncIterator[Swarm]:
# `factory.Factory.__init__` does *not* prepare a *default value* if we pass
# an argument explicitly with `None`. If an argument is `None`, we don't pass it to
# `factory.Factory.__init__`, in order to let the function initialize it.
# an argument explicitly with `None`. If an argument is `None`, we don't pass it
# to `factory.Factory.__init__`, in order to let the function initialize it.
optional_kwargs: Dict[str, Any] = {}
if key_pair is not None:
optional_kwargs["key_pair"] = key_pair
@ -541,7 +630,7 @@ async def swarm_conn_pair_factory(
@asynccontextmanager
async def mplex_conn_pair_factory(
security_protocol: TProtocol = None
security_protocol: TProtocol = None,
) -> AsyncIterator[Tuple[Mplex, Mplex]]:
async with swarm_conn_pair_factory(
security_protocol=security_protocol, muxer_opt=default_muxer_transport_factory()
@ -554,7 +643,7 @@ async def mplex_conn_pair_factory(
@asynccontextmanager
async def mplex_stream_pair_factory(
security_protocol: TProtocol = None
security_protocol: TProtocol = None,
) -> AsyncIterator[Tuple[MplexStream, MplexStream]]:
async with mplex_conn_pair_factory(
security_protocol=security_protocol

Some files were not shown because too many files have changed in this diff Show More