run lint and fix errors, except mypy

This commit is contained in:
pacrob
2024-02-19 15:56:20 -07:00
parent 42605c0288
commit 94483714a3
171 changed files with 4809 additions and 2290 deletions

View File

@ -3,14 +3,14 @@ current_version = 0.1.5
commit = True commit = True
tag = True tag = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(-(?P<stage>[^.]*)\.(?P<devnum>\d+))? parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(-(?P<stage>[^.]*)\.(?P<devnum>\d+))?
serialize = serialize =
{major}.{minor}.{patch}-{stage}.{devnum} {major}.{minor}.{patch}-{stage}.{devnum}
{major}.{minor}.{patch} {major}.{minor}.{patch}
[bumpversion:part:stage] [bumpversion:part:stage]
optional_value = stable optional_value = stable
first_value = stable first_value = stable
values = values =
alpha alpha
beta beta
stable stable

View File

@ -84,7 +84,7 @@ windows-wheel-steps:
paths: paths:
- .tox - .tox
key: cache-v1-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }} key: cache-v1-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
docs: &docs docs: &docs
docker: docker:
- image: common - image: common

View File

@ -15,13 +15,13 @@ body:
- type: textarea - type: textarea
attributes: attributes:
label: Expected behavior label: Expected behavior
description: Describe what you expect to happen. description: Describe what you expect to happen.
validations: validations:
required: true required: true
- type: textarea - type: textarea
attributes: attributes:
label: Actual behavior label: Actual behavior
description: Describe what actually happens. description: Describe what actually happens.
validations: validations:
required: true required: true
- type: textarea - type: textarea
@ -34,13 +34,13 @@ body:
- type: textarea - type: textarea
attributes: attributes:
label: Possible Solution label: Possible Solution
description: Suggest a fix/reason for the bug, or ideas how to implement the addition or change. description: Suggest a fix/reason for the bug, or ideas how to implement the addition or change.
validations: validations:
required: false required: false
- type: textarea - type: textarea
attributes: attributes:
label: Environment label: Environment
description: Run this: `$ python -m eth_utils` and put the results here. description: Run `$ python -m eth_utils` and put the results here.
render: shell render: shell
validations: validations:
required: false required: false

View File

@ -4,7 +4,7 @@ body:
- type: textarea - type: textarea
attributes: attributes:
label: Description label: Description
description: Describe the enhancement that you are proposing. description: Describe the enhancement that you are proposing.
validations: validations:
required: true required: true
- type: textarea - type: textarea

View File

@ -21,7 +21,7 @@ body:
attributes: attributes:
label: Requirements label: Requirements
description: Write a list of what you want this feature to do. description: Write a list of what you want this feature to do.
placeholder: "1." placeholder: "1."
validations: validations:
required: true required: true
- type: textarea - type: textarea

View File

@ -8,12 +8,10 @@ Summary of approach.
### To-Do ### To-Do
[//]: # (Stay ahead of things, add list items here!)
- [ ] Clean up commit history - [ ] Clean up commit history
[//]: # (See: https://py-libp2p.readthedocs.io/en/latest/contributing.html#pull-requests) * [ ] Add entry to the [release notes](https://github.com/libp2p/py-libp2p/blob/main/newsfragments/README.md)
- [ ] Add entry to the [release notes](https://github.com/libp2p/py-libp2p/blob/main/newsfragments/README.md)
#### Cute Animal Picture #### Cute Animal Picture
![put a cute animal picture link inside the parentheses]() ![put a cute animal picture link inside the parentheses](<>)

1
.gitignore vendored
View File

@ -194,4 +194,3 @@ env.bak/
# mkdocs documentation # mkdocs documentation
/site /site

View File

@ -13,11 +13,11 @@ repos:
- id: pyupgrade - id: pyupgrade
args: [--py38-plus] args: [--py38-plus]
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 19.3b0 rev: 23.9.1
hooks: hooks:
- id: black - id: black
- repo: https://github.com/PyCQA/flake8 - repo: https://github.com/PyCQA/flake8
rev: 3.7.9 rev: 6.1.0
hooks: hooks:
- id: flake8 - id: flake8
additional_dependencies: additional_dependencies:
@ -28,7 +28,7 @@ repos:
hooks: hooks:
- id: autoflake - id: autoflake
- repo: https://github.com/pycqa/isort - repo: https://github.com/pycqa/isort
rev: 4.3.21 rev: 5.12.0
hooks: hooks:
- id: isort - id: isort
- repo: https://github.com/pycqa/pydocstyle - repo: https://github.com/pycqa/pydocstyle
@ -43,10 +43,10 @@ repos:
- id: mdformat - id: mdformat
additional_dependencies: additional_dependencies:
- mdformat-gfm - mdformat-gfm
- repo: https://github.com/pre-commit/mirrors-mypy # - repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.780 # rev: v1.5.1
hooks: # hooks:
- id: mypy # - id: mypy
additional_dependencies: # additional_dependencies:
- mypy-protobuf # - mypy-protobuf
exclude: tests/ # exclude: 'tests/|tests_interop/|crypto/|identity/|pubsub/|insecure/|noise/|security/'

View File

@ -5,4 +5,3 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use
http://www.apache.org/licenses/LICENSE-2.0 http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

View File

@ -59,7 +59,7 @@ build-docs:
$(MAKE) -C docs clean $(MAKE) -C docs clean
$(MAKE) -C docs html $(MAKE) -C docs html
$(MAKE) -C docs doctest $(MAKE) -C docs doctest
build-docs-ci: build-docs-ci:
$(MAKE) -C docs latexpdf $(MAKE) -C docs latexpdf
$(MAKE) -C docs epub $(MAKE) -C docs epub

195
README.md
View File

@ -9,12 +9,12 @@
[![Matrix](https://img.shields.io/badge/matrix-%23libp2p%3Apermaweb.io-blue.svg)](https://riot.permaweb.io/#/room/#libp2p:permaweb.io) [![Matrix](https://img.shields.io/badge/matrix-%23libp2p%3Apermaweb.io-blue.svg)](https://riot.permaweb.io/#/room/#libp2p:permaweb.io)
[![Discord](https://img.shields.io/discord/475789330380488707?color=blueviolet&label=discord)](https://discord.gg/66KBrm2) [![Discord](https://img.shields.io/discord/475789330380488707?color=blueviolet&label=discord)](https://discord.gg/66KBrm2)
<h1 align="center"> <h1 align="center">
<img width="250" align="center" src="https://github.com/libp2p/py-libp2p/blob/master/assets/py-libp2p-logo.png?raw=true" alt="py-libp2p hex logo" /> <img width="250" align="center" src="https://github.com/libp2p/py-libp2p/blob/master/assets/py-libp2p-logo.png?raw=true" alt="py-libp2p hex logo" />
</h1> </h1>
## WARNING ## WARNING
py-libp2p is an experimental and work-in-progress repo under heavy development. We do not yet recommend using py-libp2p in production environments. py-libp2p is an experimental and work-in-progress repo under heavy development. We do not yet recommend using py-libp2p in production environments.
The Python implementation of the libp2p networking stack The Python implementation of the libp2p networking stack
@ -22,9 +22,11 @@ The Python implementation of the libp2p networking stack
Read more in the [documentation on ReadTheDocs](https://py-libp2p.readthedocs.io/). [View the release notes](https://py-libp2p.readthedocs.io/en/latest/release_notes.html). Read more in the [documentation on ReadTheDocs](https://py-libp2p.readthedocs.io/). [View the release notes](https://py-libp2p.readthedocs.io/en/latest/release_notes.html).
## Sponsorship ## Sponsorship
This project is graciously sponsored by the Ethereum Foundation through [Wave 5 of their Grants Program](https://blog.ethereum.org/2019/02/21/ethereum-foundation-grants-program-wave-5/). This project is graciously sponsored by the Ethereum Foundation through [Wave 5 of their Grants Program](https://blog.ethereum.org/2019/02/21/ethereum-foundation-grants-program-wave-5/).
## Maintainers ## Maintainers
The py-libp2p team consists of: The py-libp2p team consists of:
[@zixuanzh](https://github.com/zixuanzh) [@alexh](https://github.com/alexh) [@stuckinaboot](https://github.com/stuckinaboot) [@robzajac](https://github.com/robzajac) [@carver](https://github.com/carver) [@zixuanzh](https://github.com/zixuanzh) [@alexh](https://github.com/alexh) [@stuckinaboot](https://github.com/stuckinaboot) [@robzajac](https://github.com/robzajac) [@carver](https://github.com/carver)
@ -70,125 +72,114 @@ ptw --onfail "notify-send -t 5000 'Test failure ⚠⚠⚠⚠⚠' 'python 3 test
Note that tests/libp2p/test_libp2p.py contains an end-to-end messaging test between two libp2p hosts, which is the bulk of our proof of concept. Note that tests/libp2p/test_libp2p.py contains an end-to-end messaging test between two libp2p hosts, which is the bulk of our proof of concept.
### Release setup ### Release setup
<<<<<<< HEAD \<\<\<\<\<\<\< HEAD
Releases follow the same basic pattern as releases of some tangentially-related projects, Releases follow the same basic pattern as releases of some tangentially-related projects,
like Trinity. See [Trinity's release instructions]( like Trinity. See [Trinity's release instructions](https://trinity-client.readthedocs.io/en/latest/contributing.html#releasing).
https://trinity-client.readthedocs.io/en/latest/contributing.html#releasing).
## Requirements ## Requirements
=======
\=======
To release a new version: To release a new version:
>>>>>>> template
> > > > > > > template
The protobuf description in this repository was generated by `protoc` at version `3.7.1`. The protobuf description in this repository was generated by `protoc` at version `3.7.1`.
## Feature Breakdown ## Feature Breakdown
py-libp2p aims for conformity with [the standard libp2p modules](https://github.com/libp2p/libp2p/blob/master/REQUIREMENTS.md#libp2p-modules-implementations). Below is a breakdown of the modules we have developed, are developing, and may develop in the future. py-libp2p aims for conformity with [the standard libp2p modules](https://github.com/libp2p/libp2p/blob/master/REQUIREMENTS.md#libp2p-modules-implementations). Below is a breakdown of the modules we have developed, are developing, and may develop in the future.
> Legend: :green_apple: Done &nbsp; :lemon: In Progress &nbsp; :tomato: Missing &nbsp; :chestnut: Not planned > Legend: :green_apple: Done   :lemon: In Progress   :tomato: Missing   :chestnut: Not planned
| libp2p Node | Status | | libp2p Node | Status |
| -------------------------------------------- | :-----------: | | ------------ | :-----------: |
| **`libp2p`** | :green_apple: | | **`libp2p`** | :green_apple: |
| Identify Protocol | Status |
| ----------------- | :-----: |
| **`Identify`** | :lemon: |
| Identify Protocol | Status | | Transport Protocols | Status |
| -------------------------------------------- | :-----------: | | ------------------- | :-----------: |
| **`Identify`** | :lemon: | | **`TCP`** | :green_apple: |
| **`UDP`** | :tomato: |
| **`WebSockets`** | :chestnut: |
| **`UTP`** | :chestnut: |
| **`WebRTC`** | :chestnut: |
| **`SCTP`** | :chestnut: |
| **`Tor`** | :chestnut: |
| **`i2p`** | :chestnut: |
| **`cjdns`** | :chestnut: |
| **`Bluetooth LE`** | :chestnut: |
| **`Audio TP`** | :chestnut: |
| **`Zerotier`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Stream Muxers | Status |
| ---------------- | :-----------: |
| **`multiplex`** | :green_apple: |
| **`yamux`** | :tomato: |
| **`benchmarks`** | :chestnut: |
| **`muxado`** | :chestnut: |
| **`spdystream`** | :chestnut: |
| **`spdy`** | :chestnut: |
| **`http2`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Transport Protocols | Status | | Protocol Muxers | Status |
| -------------------------------------------- | :-----------: | | ----------------- | :-----------: |
| **`TCP`** | :green_apple: | | **`multiselect`** | :green_apple: |
| **`UDP`** | :tomato: |
| **`WebSockets`** | :chestnut: |
| **`UTP`** | :chestnut: |
| **`WebRTC`** | :chestnut: |
| **`SCTP`** | :chestnut: |
| **`Tor`** | :chestnut: |
| **`i2p`** | :chestnut: |
| **`cjdns`** | :chestnut: |
| **`Bluetooth LE`** | :chestnut: |
| **`Audio TP`** | :chestnut: |
| **`Zerotier`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Switch (Swarm) | Status |
| ------------------ | :-----------: |
| **`Switch`** | :green_apple: |
| **`Dialer stack`** | :green_apple: |
| Stream Muxers | Status | | Peer Discovery | Status |
| -------------------------------------------- | :-----------: | | -------------------- | :--------: |
| **`multiplex`** | :green_apple: | | **`bootstrap list`** | :tomato: |
| **`yamux`** | :tomato: | | **`Kademlia DHT`** | :chestnut: |
| **`benchmarks`** | :chestnut: | | **`mDNS`** | :chestnut: |
| **`muxado`** | :chestnut: | | **`PEX`** | :chestnut: |
| **`spdystream`** | :chestnut: | | **`DNS`** | :chestnut: |
| **`spdy`** | :chestnut: |
| **`http2`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Content Routing | Status |
| ------------------ | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| Protocol Muxers | Status | | Peer Routing | Status |
| -------------------------------------------- | :-----------: | | ------------------ | :-----------: |
| **`multiselect`** | :green_apple: | | **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| NAT Traversal | Status |
| ------------------------ | :--------: |
| **`nat-pmp`** | :chestnut: |
| **`upnp`** | :chestnut: |
| **`ext addr discovery`** | :chestnut: |
| **`STUN-like`** | :chestnut: |
| **`line-switch relay`** | :chestnut: |
| **`pkt-switch relay`** | :chestnut: |
| Switch (Swarm) | Status | | Exchange | Status |
| -------------------------------------------- | :-----------: | | ---------------- | :--------: |
| **`Switch`** | :green_apple: | | **`HTTP`** | :chestnut: |
| **`Dialer stack`** | :green_apple: | | **`Bitswap`** | :chestnut: |
| **`Bittorrent`** | :chestnut: |
| Peer Discovery | Status |
| -------------------------------------------- | :-----------: |
| **`bootstrap list`** | :tomato: |
| **`Kademlia DHT`** | :chestnut: |
| **`mDNS`** | :chestnut: |
| **`PEX`** | :chestnut: |
| **`DNS`** | :chestnut: |
| Content Routing | Status |
| -------------------------------------------- | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| Peer Routing | Status |
| -------------------------------------------- | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| NAT Traversal | Status |
| -------------------------------------------- | :-----------: |
| **`nat-pmp`** | :chestnut: |
| **`upnp`** | :chestnut: |
| **`ext addr discovery`** | :chestnut: |
| **`STUN-like`** | :chestnut: |
| **`line-switch relay`** | :chestnut: |
| **`pkt-switch relay`** | :chestnut: |
| Exchange | Status |
| -------------------------------------------- | :-----------: |
| **`HTTP`** | :chestnut: |
| **`Bitswap`** | :chestnut: |
| **`Bittorrent`** | :chestnut: |
| Consensus | Status |
| -------------------------------------------- | :-----------: |
| **`Paxos`** | :chestnut: |
| **`Raft`** | :chestnut: |
| **`PBTF`** | :chestnut: |
| **`Nakamoto`** | :chestnut: |
| Consensus | Status |
| -------------- | :--------: |
| **`Paxos`** | :chestnut: |
| **`Raft`** | :chestnut: |
| **`PBTF`** | :chestnut: |
| **`Nakamoto`** | :chestnut: |
## Explanation of Basic Two Node Communication ## Explanation of Basic Two Node Communication
@ -199,12 +190,12 @@ _(non-normative, useful for team notes, not a reference)_
Several components of the libp2p stack take part when establishing a connection between two nodes: Several components of the libp2p stack take part when establishing a connection between two nodes:
1. **Host**: a node in the libp2p network. 1. **Host**: a node in the libp2p network.
2. **Connection**: the layer 3 connection between two nodes in a libp2p network. 1. **Connection**: the layer 3 connection between two nodes in a libp2p network.
3. **Transport**: the component that creates a _Connection_, e.g. TCP, UDP, QUIC, etc. 1. **Transport**: the component that creates a _Connection_, e.g. TCP, UDP, QUIC, etc.
3. **Streams**: an abstraction on top of a _Connection_ representing parallel conversations about different matters, each of which is identified by a protocol ID. Multiple streams are layered on top of a _Connection_ via the _Multiplexer_. 1. **Streams**: an abstraction on top of a _Connection_ representing parallel conversations about different matters, each of which is identified by a protocol ID. Multiple streams are layered on top of a _Connection_ via the _Multiplexer_.
4. **Multiplexer**: a component that is responsible for wrapping messages sent on a stream with an envelope that identifies the stream they pertain to, normally via an ID. The multiplexer on the other unwraps the message and routes it internally based on the stream identification. 1. **Multiplexer**: a component that is responsible for wrapping messages sent on a stream with an envelope that identifies the stream they pertain to, normally via an ID. The multiplexer on the other unwraps the message and routes it internally based on the stream identification.
5. **Secure channel**: optionally establishes a secure, encrypted, and authenticated channel over the _Connection_. 1. **Secure channel**: optionally establishes a secure, encrypted, and authenticated channel over the _Connection_.
5. **Upgrader**: a component that takes a raw layer 3 connection returned by the _Transport_, and performs the security and multiplexing negotiation to set up a secure, multiplexed channel on top of which _Streams_ can be opened. 1. **Upgrader**: a component that takes a raw layer 3 connection returned by the _Transport_, and performs the security and multiplexing negotiation to set up a secure, multiplexed channel on top of which _Streams_ can be opened.
### Communication between two hosts X and Y ### Communication between two hosts X and Y

View File

@ -4,13 +4,21 @@ import sys
import multiaddr import multiaddr
import trio import trio
from libp2p import new_host from libp2p import (
from libp2p.network.stream.net_stream_interface import INetStream new_host,
from libp2p.peer.peerinfo import info_from_p2p_addr )
from libp2p.typing import TProtocol from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.peerinfo import (
info_from_p2p_addr,
)
from libp2p.typing import (
TProtocol,
)
PROTOCOL_ID = TProtocol("/chat/1.0.0") PROTOCOL_ID = TProtocol("/chat/1.0.0")
MAX_READ_LEN = 2 ** 32 - 1 MAX_READ_LEN = 2**32 - 1
async def read_data(stream: INetStream) -> None: async def read_data(stream: INetStream) -> None:
@ -58,7 +66,8 @@ async def run(port: int, destination: str) -> None:
# Associate the peer with local ip address # Associate the peer with local ip address
await host.connect(info) await host.connect(info)
# Start a stream with the destination. # Start a stream with the destination.
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'. # Multiaddress of the destination peer is fetched from the peerstore
# using 'peerId'.
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID]) stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
nursery.start_soon(read_data, stream) nursery.start_soon(read_data, stream)

View File

@ -3,11 +3,21 @@ import argparse
import multiaddr import multiaddr
import trio import trio
from libp2p import new_host from libp2p import (
from libp2p.crypto.secp256k1 import create_new_key_pair new_host,
from libp2p.network.stream.net_stream_interface import INetStream )
from libp2p.peer.peerinfo import info_from_p2p_addr from libp2p.crypto.secp256k1 import (
from libp2p.typing import TProtocol create_new_key_pair,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.peerinfo import (
info_from_p2p_addr,
)
from libp2p.typing import (
TProtocol,
)
PROTOCOL_ID = TProtocol("/echo/1.0.0") PROTOCOL_ID = TProtocol("/echo/1.0.0")
@ -36,11 +46,9 @@ async def run(port: int, destination: str, seed: int = None) -> None:
host = new_host(key_pair=create_new_key_pair(secret)) host = new_host(key_pair=create_new_key_pair(secret))
async with host.run(listen_addrs=[listen_addr]): async with host.run(listen_addrs=[listen_addr]):
print(f"I am {host.get_id().to_string()}") print(f"I am {host.get_id().to_string()}")
if not destination: # its the server if not destination: # its the server
host.set_stream_handler(PROTOCOL_ID, _echo_stream_handler) host.set_stream_handler(PROTOCOL_ID, _echo_stream_handler)
print( print(
@ -59,7 +67,8 @@ async def run(port: int, destination: str, seed: int = None) -> None:
await host.connect(info) await host.connect(info)
# Start a stream with the destination. # Start a stream with the destination.
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'. # Multiaddress of the destination peer is fetched from the peerstore
# using 'peerId'.
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID]) stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
msg = b"hi, there!\n" msg = b"hi, there!\n"
@ -99,7 +108,7 @@ def main() -> None:
"-s", "-s",
"--seed", "--seed",
type=int, type=int,
help="provide a seed to the random number generator (e.g. to fix peer IDs across runs)", help="provide a seed to the random number generator (e.g. to fix peer IDs across runs)", # noqa: E501
) )
args = parser.parse_args() args = parser.parse_args()

View File

@ -1,21 +1,58 @@
from libp2p.crypto.keys import KeyPair from libp2p.crypto.keys import (
from libp2p.crypto.rsa import create_new_key_pair KeyPair,
from libp2p.host.basic_host import BasicHost )
from libp2p.host.host_interface import IHost from libp2p.crypto.rsa import (
from libp2p.host.routed_host import RoutedHost create_new_key_pair,
from libp2p.network.network_interface import INetworkService )
from libp2p.network.swarm import Swarm from libp2p.host.basic_host import (
from libp2p.peer.id import ID BasicHost,
from libp2p.peer.peerstore import PeerStore )
from libp2p.peer.peerstore_interface import IPeerStore from libp2p.host.host_interface import (
from libp2p.routing.interfaces import IPeerRouting IHost,
from libp2p.security.insecure.transport import PLAINTEXT_PROTOCOL_ID, InsecureTransport )
from libp2p.host.routed_host import (
RoutedHost,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.swarm import (
Swarm,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore import (
PeerStore,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
from libp2p.security.insecure.transport import (
PLAINTEXT_PROTOCOL_ID,
InsecureTransport,
)
import libp2p.security.secio.transport as secio import libp2p.security.secio.transport as secio
from libp2p.stream_muxer.mplex.mplex import MPLEX_PROTOCOL_ID, Mplex from libp2p.stream_muxer.mplex.mplex import (
from libp2p.transport.tcp.tcp import TCP MPLEX_PROTOCOL_ID,
from libp2p.transport.typing import TMuxerOptions, TSecurityOptions Mplex,
from libp2p.transport.upgrader import TransportUpgrader )
from libp2p.typing import TProtocol from libp2p.transport.tcp.tcp import (
TCP,
)
from libp2p.transport.typing import (
TMuxerOptions,
TSecurityOptions,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
TProtocol,
)
def generate_new_rsa_identity() -> KeyPair: def generate_new_rsa_identity() -> KeyPair:
@ -42,7 +79,6 @@ def new_swarm(
:param peerstore_opt: optional peerstore :param peerstore_opt: optional peerstore
:return: return a default swarm instance :return: return a default swarm instance
""" """
if key_pair is None: if key_pair is None:
key_pair = generate_new_rsa_identity() key_pair = generate_new_rsa_identity()

View File

@ -1,8 +1,14 @@
from dataclasses import dataclass from dataclasses import (
dataclass,
)
import hmac import hmac
from typing import Tuple from typing import (
Tuple,
)
from Crypto.Cipher import AES from Crypto.Cipher import (
AES,
)
import Crypto.Util.Counter as Counter import Crypto.Util.Counter as Counter
@ -61,9 +67,11 @@ class MacAndCipher:
def initialize_pair( def initialize_pair(
cipher_type: str, hash_type: str, secret: bytes cipher_type: str, hash_type: str, secret: bytes
) -> Tuple[EncryptionParameters, EncryptionParameters]: ) -> Tuple[EncryptionParameters, EncryptionParameters]:
"""Return a pair of ``Keys`` for use in securing a communications channel """
Return a pair of ``Keys`` for use in securing a communications channel
with authenticated encryption derived from the ``secret`` and using the with authenticated encryption derived from the ``secret`` and using the
requested ``cipher_type`` and ``hash_type``.""" requested ``cipher_type`` and ``hash_type``.
"""
if cipher_type != "AES-128": if cipher_type != "AES-128":
raise NotImplementedError() raise NotImplementedError()
if hash_type != "SHA256": if hash_type != "SHA256":
@ -72,7 +80,7 @@ def initialize_pair(
iv_size = 16 iv_size = 16
cipher_key_size = 16 cipher_key_size = 16
hmac_key_size = 20 hmac_key_size = 20
seed = "key expansion".encode() seed = b"key expansion"
params_size = iv_size + cipher_key_size + hmac_key_size params_size = iv_size + cipher_key_size + hmac_key_size
result = bytearray(2 * params_size) result = bytearray(2 * params_size)

View File

@ -1,13 +1,25 @@
from fastecdsa import (
keys,
point,
)
from fastecdsa import curve as curve_types from fastecdsa import curve as curve_types
from fastecdsa import keys, point from fastecdsa.encoding.sec1 import (
from fastecdsa.encoding.sec1 import SEC1Encoder SEC1Encoder,
)
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
def infer_local_type(curve: str) -> curve_types.Curve: def infer_local_type(curve: str) -> curve_types.Curve:
"""converts a ``str`` representation of some elliptic curve to a """
representation understood by the backend of this module.""" Convert a ``str`` representation of some elliptic curve to a
representation understood by the backend of this module.
"""
if curve == "P-256": if curve == "P-256":
return curve_types.P256 return curve_types.P256
else: else:
@ -61,8 +73,10 @@ class ECCPrivateKey(PrivateKey):
def create_new_key_pair(curve: str) -> KeyPair: def create_new_key_pair(curve: str) -> KeyPair:
"""Return a new ECC keypair with the requested ``curve`` type, e.g. """
"P-256".""" Return a new ECC keypair with the requested ``curve`` type, e.g.
"P-256".
"""
private_key = ECCPrivateKey.new(curve) private_key = ECCPrivateKey.new(curve)
public_key = private_key.get_public_key() public_key = private_key.get_public_key()
return KeyPair(private_key, public_key) return KeyPair(private_key, public_key)

View File

@ -1,11 +1,23 @@
from Crypto.Hash import SHA256 from Crypto.Hash import (
from nacl.exceptions import BadSignatureError SHA256,
)
from nacl.exceptions import (
BadSignatureError,
)
from nacl.public import PrivateKey as PrivateKeyImpl from nacl.public import PrivateKey as PrivateKeyImpl
from nacl.public import PublicKey as PublicKeyImpl from nacl.public import PublicKey as PublicKeyImpl
from nacl.signing import SigningKey, VerifyKey from nacl.signing import (
SigningKey,
VerifyKey,
)
import nacl.utils as utils import nacl.utils as utils
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class Ed25519PublicKey(PublicKey): class Ed25519PublicKey(PublicKey):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class CryptographyError(BaseLibp2pError): class CryptographyError(BaseLibp2pError):
@ -6,7 +8,7 @@ class CryptographyError(BaseLibp2pError):
class MissingDeserializerError(CryptographyError): class MissingDeserializerError(CryptographyError):
"""Raise if the requested deserialization routine is missing for some type """
of cryptographic key.""" Raise if the requested deserialization routine is missing for some type
of cryptographic key.
pass """

View File

@ -1,9 +1,21 @@
from typing import Callable, Tuple, cast from typing import (
Callable,
Tuple,
cast,
)
from fastecdsa.encoding import util from fastecdsa.encoding import (
util,
)
from libp2p.crypto.ecc import ECCPrivateKey, ECCPublicKey, create_new_key_pair from libp2p.crypto.ecc import (
from libp2p.crypto.keys import PublicKey ECCPrivateKey,
ECCPublicKey,
create_new_key_pair,
)
from libp2p.crypto.keys import (
PublicKey,
)
SharedKeyGenerator = Callable[[bytes], bytes] SharedKeyGenerator = Callable[[bytes], bytes]

View File

@ -1,6 +1,14 @@
from abc import ABC, abstractmethod from abc import (
from dataclasses import dataclass ABC,
from enum import Enum, unique abstractmethod,
)
from dataclasses import (
dataclass,
)
from enum import (
Enum,
unique,
)
from .pb import crypto_pb2 as protobuf from .pb import crypto_pb2 as protobuf
@ -38,8 +46,10 @@ class PublicKey(Key):
@abstractmethod @abstractmethod
def verify(self, data: bytes, signature: bytes) -> bool: def verify(self, data: bytes, signature: bytes) -> bool:
"""Verify that ``signature`` is the cryptographic signature of the hash """
of ``data``.""" Verify that ``signature`` is the cryptographic signature of the hash
of ``data``.
"""
... ...
def _serialize_to_protobuf(self) -> protobuf.PublicKey: def _serialize_to_protobuf(self) -> protobuf.PublicKey:

View File

@ -17,4 +17,4 @@ message PublicKey {
message PrivateKey { message PrivateKey {
required KeyType key_type = 1; required KeyType key_type = 1;
required bytes data = 2; required bytes data = 2;
} }

View File

@ -1,56 +1,53 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/crypto/pb/crypto.proto # source: libp2p/crypto/pb/crypto.proto
import sys import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf.internal import enum_type_wrapper from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default() _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor( DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/crypto/pb/crypto.proto', name="libp2p/crypto/pb/crypto.proto",
package='crypto.pb', package="crypto.pb",
syntax='proto2', syntax="proto2",
serialized_options=None, serialized_options=None,
serialized_pb=_b('\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb\"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c\"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03') serialized_pb=_b(
'\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03'
),
) )
_KEYTYPE = _descriptor.EnumDescriptor( _KEYTYPE = _descriptor.EnumDescriptor(
name='KeyType', name="KeyType",
full_name='crypto.pb.KeyType', full_name="crypto.pb.KeyType",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
values=[ values=[
_descriptor.EnumValueDescriptor( _descriptor.EnumValueDescriptor(
name='RSA', index=0, number=0, name="RSA", index=0, number=0, serialized_options=None, type=None
serialized_options=None, ),
type=None), _descriptor.EnumValueDescriptor(
_descriptor.EnumValueDescriptor( name="Ed25519", index=1, number=1, serialized_options=None, type=None
name='Ed25519', index=1, number=1, ),
serialized_options=None, _descriptor.EnumValueDescriptor(
type=None), name="Secp256k1", index=2, number=2, serialized_options=None, type=None
_descriptor.EnumValueDescriptor( ),
name='Secp256k1', index=2, number=2, _descriptor.EnumValueDescriptor(
serialized_options=None, name="ECDSA", index=3, number=3, serialized_options=None, type=None
type=None), ),
_descriptor.EnumValueDescriptor( ],
name='ECDSA', index=3, number=3, containing_type=None,
serialized_options=None, serialized_options=None,
type=None), serialized_start=175,
], serialized_end=232,
containing_type=None,
serialized_options=None,
serialized_start=175,
serialized_end=232,
) )
_sym_db.RegisterEnumDescriptor(_KEYTYPE) _sym_db.RegisterEnumDescriptor(_KEYTYPE)
@ -61,101 +58,146 @@ Secp256k1 = 2
ECDSA = 3 ECDSA = 3
_PUBLICKEY = _descriptor.Descriptor( _PUBLICKEY = _descriptor.Descriptor(
name='PublicKey', name="PublicKey",
full_name='crypto.pb.PublicKey', full_name="crypto.pb.PublicKey",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='key_type', full_name='crypto.pb.PublicKey.key_type', index=0, name="key_type",
number=1, type=14, cpp_type=8, label=2, full_name="crypto.pb.PublicKey.key_type",
has_default_value=False, default_value=0, index=0,
message_type=None, enum_type=None, containing_type=None, number=1,
is_extension=False, extension_scope=None, type=14,
serialized_options=None, file=DESCRIPTOR), cpp_type=8,
_descriptor.FieldDescriptor( label=2,
name='data', full_name='crypto.pb.PublicKey.data', index=1, has_default_value=False,
number=2, type=12, cpp_type=9, label=2, default_value=0,
has_default_value=False, default_value=_b(""), message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
serialized_options=None, file=DESCRIPTOR), is_extension=False,
], extension_scope=None,
extensions=[ serialized_options=None,
], file=DESCRIPTOR,
nested_types=[], ),
enum_types=[ _descriptor.FieldDescriptor(
], name="data",
serialized_options=None, full_name="crypto.pb.PublicKey.data",
is_extendable=False, index=1,
syntax='proto2', number=2,
extension_ranges=[], type=12,
oneofs=[ cpp_type=9,
], label=2,
serialized_start=44, has_default_value=False,
serialized_end=107, default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=44,
serialized_end=107,
) )
_PRIVATEKEY = _descriptor.Descriptor( _PRIVATEKEY = _descriptor.Descriptor(
name='PrivateKey', name="PrivateKey",
full_name='crypto.pb.PrivateKey', full_name="crypto.pb.PrivateKey",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='key_type', full_name='crypto.pb.PrivateKey.key_type', index=0, name="key_type",
number=1, type=14, cpp_type=8, label=2, full_name="crypto.pb.PrivateKey.key_type",
has_default_value=False, default_value=0, index=0,
message_type=None, enum_type=None, containing_type=None, number=1,
is_extension=False, extension_scope=None, type=14,
serialized_options=None, file=DESCRIPTOR), cpp_type=8,
_descriptor.FieldDescriptor( label=2,
name='data', full_name='crypto.pb.PrivateKey.data', index=1, has_default_value=False,
number=2, type=12, cpp_type=9, label=2, default_value=0,
has_default_value=False, default_value=_b(""), message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
serialized_options=None, file=DESCRIPTOR), is_extension=False,
], extension_scope=None,
extensions=[ serialized_options=None,
], file=DESCRIPTOR,
nested_types=[], ),
enum_types=[ _descriptor.FieldDescriptor(
], name="data",
serialized_options=None, full_name="crypto.pb.PrivateKey.data",
is_extendable=False, index=1,
syntax='proto2', number=2,
extension_ranges=[], type=12,
oneofs=[ cpp_type=9,
], label=2,
serialized_start=109, has_default_value=False,
serialized_end=173, default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=109,
serialized_end=173,
) )
_PUBLICKEY.fields_by_name['key_type'].enum_type = _KEYTYPE _PUBLICKEY.fields_by_name["key_type"].enum_type = _KEYTYPE
_PRIVATEKEY.fields_by_name['key_type'].enum_type = _KEYTYPE _PRIVATEKEY.fields_by_name["key_type"].enum_type = _KEYTYPE
DESCRIPTOR.message_types_by_name['PublicKey'] = _PUBLICKEY DESCRIPTOR.message_types_by_name["PublicKey"] = _PUBLICKEY
DESCRIPTOR.message_types_by_name['PrivateKey'] = _PRIVATEKEY DESCRIPTOR.message_types_by_name["PrivateKey"] = _PRIVATEKEY
DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE DESCRIPTOR.enum_types_by_name["KeyType"] = _KEYTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
PublicKey = _reflection.GeneratedProtocolMessageType('PublicKey', (_message.Message,), { PublicKey = _reflection.GeneratedProtocolMessageType(
'DESCRIPTOR' : _PUBLICKEY, "PublicKey",
'__module__' : 'libp2p.crypto.pb.crypto_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:crypto.pb.PublicKey) {
}) "DESCRIPTOR": _PUBLICKEY,
"__module__": "libp2p.crypto.pb.crypto_pb2"
# @@protoc_insertion_point(class_scope:crypto.pb.PublicKey)
},
)
_sym_db.RegisterMessage(PublicKey) _sym_db.RegisterMessage(PublicKey)
PrivateKey = _reflection.GeneratedProtocolMessageType('PrivateKey', (_message.Message,), { PrivateKey = _reflection.GeneratedProtocolMessageType(
'DESCRIPTOR' : _PRIVATEKEY, "PrivateKey",
'__module__' : 'libp2p.crypto.pb.crypto_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:crypto.pb.PrivateKey) {
}) "DESCRIPTOR": _PRIVATEKEY,
"__module__": "libp2p.crypto.pb.crypto_pb2"
# @@protoc_insertion_point(class_scope:crypto.pb.PrivateKey)
},
)
_sym_db.RegisterMessage(PrivateKey) _sym_db.RegisterMessage(PrivateKey)

View File

@ -19,7 +19,6 @@ from typing_extensions import (
Literal as typing_extensions___Literal, Literal as typing_extensions___Literal,
) )
class KeyType(int): class KeyType(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod @classmethod
@ -36,6 +35,7 @@ class KeyType(int):
Ed25519 = typing___cast(KeyType, 1) Ed25519 = typing___cast(KeyType, 1)
Secp256k1 = typing___cast(KeyType, 2) Secp256k1 = typing___cast(KeyType, 2)
ECDSA = typing___cast(KeyType, 3) ECDSA = typing___cast(KeyType, 3)
RSA = typing___cast(KeyType, 0) RSA = typing___cast(KeyType, 0)
Ed25519 = typing___cast(KeyType, 1) Ed25519 = typing___cast(KeyType, 1)
Secp256k1 = typing___cast(KeyType, 2) Secp256k1 = typing___cast(KeyType, 2)
@ -43,42 +43,72 @@ ECDSA = typing___cast(KeyType, 3)
class PublicKey(google___protobuf___message___Message): class PublicKey(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
key_type = ... # type: KeyType key_type = ... # type: KeyType
data = ... # type: bytes data = ... # type: bytes
def __init__(self, def __init__(
self,
*, *,
key_type : KeyType, key_type: KeyType,
data : bytes, data: bytes,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> PublicKey: ... def FromString(cls, s: bytes) -> PublicKey: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ... self, field_name: typing_extensions___Literal["data", "key_type"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["data", "key_type"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ... self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> None: ...
class PrivateKey(google___protobuf___message___Message): class PrivateKey(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
key_type = ... # type: KeyType key_type = ... # type: KeyType
data = ... # type: bytes data = ... # type: bytes
def __init__(self, def __init__(
self,
*, *,
key_type : KeyType, key_type: KeyType,
data : bytes, data: bytes,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> PrivateKey: ... def FromString(cls, s: bytes) -> PrivateKey: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ... self, field_name: typing_extensions___Literal["data", "key_type"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["data", "key_type"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ... self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", b"data", "key_type", b"key_type"
],
) -> None: ...

View File

@ -1,9 +1,20 @@
from Crypto.Hash import SHA256 from Crypto.Hash import (
SHA256,
)
import Crypto.PublicKey.RSA as RSA import Crypto.PublicKey.RSA as RSA
from Crypto.PublicKey.RSA import RsaKey from Crypto.PublicKey.RSA import (
from Crypto.Signature import pkcs1_15 RsaKey,
)
from Crypto.Signature import (
pkcs1_15,
)
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class RSAPublicKey(PublicKey): class RSAPublicKey(PublicKey):

View File

@ -1,6 +1,11 @@
import coincurve import coincurve
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class Secp256k1PublicKey(PublicKey): class Secp256k1PublicKey(PublicKey):

View File

@ -1,8 +1,22 @@
from libp2p.crypto.ed25519 import Ed25519PrivateKey, Ed25519PublicKey from libp2p.crypto.ed25519 import (
from libp2p.crypto.exceptions import MissingDeserializerError Ed25519PrivateKey,
from libp2p.crypto.keys import KeyType, PrivateKey, PublicKey Ed25519PublicKey,
from libp2p.crypto.rsa import RSAPublicKey )
from libp2p.crypto.secp256k1 import Secp256k1PrivateKey, Secp256k1PublicKey from libp2p.crypto.exceptions import (
MissingDeserializerError,
)
from libp2p.crypto.keys import (
KeyType,
PrivateKey,
PublicKey,
)
from libp2p.crypto.rsa import (
RSAPublicKey,
)
from libp2p.crypto.secp256k1 import (
Secp256k1PrivateKey,
Secp256k1PublicKey,
)
key_type_to_public_key_deserializer = { key_type_to_public_key_deserializer = {
KeyType.Secp256k1.value: Secp256k1PublicKey.from_bytes, KeyType.Secp256k1.value: Secp256k1PublicKey.from_bytes,

View File

@ -1,28 +1,70 @@
import logging import logging
from typing import TYPE_CHECKING, AsyncIterator, List, Sequence from typing import (
TYPE_CHECKING,
AsyncIterator,
List,
Sequence,
)
from async_generator import asynccontextmanager from async_generator import (
from async_service import background_trio_service asynccontextmanager,
)
from async_service import (
background_trio_service,
)
import multiaddr import multiaddr
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.keys import (
from libp2p.host.defaults import get_default_protocols PrivateKey,
from libp2p.host.exceptions import StreamFailure PublicKey,
from libp2p.network.network_interface import INetworkService )
from libp2p.network.stream.net_stream_interface import INetStream from libp2p.host.defaults import (
from libp2p.peer.id import ID get_default_protocols,
from libp2p.peer.peerinfo import PeerInfo )
from libp2p.peer.peerstore_interface import IPeerStore from libp2p.host.exceptions import (
from libp2p.protocol_muxer.exceptions import MultiselectClientError, MultiselectError StreamFailure,
from libp2p.protocol_muxer.multiselect import Multiselect )
from libp2p.protocol_muxer.multiselect_client import MultiselectClient from libp2p.network.network_interface import (
from libp2p.protocol_muxer.multiselect_communicator import MultiselectCommunicator INetworkService,
from libp2p.typing import StreamHandlerFn, TProtocol )
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.protocol_muxer.exceptions import (
MultiselectClientError,
MultiselectError,
)
from libp2p.protocol_muxer.multiselect import (
Multiselect,
)
from libp2p.protocol_muxer.multiselect_client import (
MultiselectClient,
)
from libp2p.protocol_muxer.multiselect_communicator import (
MultiselectCommunicator,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .host_interface import IHost from .host_interface import (
IHost,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from collections import OrderedDict from collections import (
OrderedDict,
)
# Upon host creation, host takes in options, # Upon host creation, host takes in options,
# including the list of addresses on which to listen. # including the list of addresses on which to listen.
@ -108,7 +150,7 @@ class BasicHost(IHost):
self, listen_addrs: Sequence[multiaddr.Multiaddr] self, listen_addrs: Sequence[multiaddr.Multiaddr]
) -> AsyncIterator[None]: ) -> AsyncIterator[None]:
""" """
run the host instance and listen to ``listen_addrs``. Run the host instance and listen to ``listen_addrs``.
:param listen_addrs: a sequence of multiaddrs that we want to listen to :param listen_addrs: a sequence of multiaddrs that we want to listen to
""" """
@ -121,7 +163,7 @@ class BasicHost(IHost):
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
) -> None: ) -> None:
""" """
set stream handler for given `protocol_id` Set stream handler for given `protocol_id`
:param protocol_id: protocol id used on stream :param protocol_id: protocol id used on stream
:param stream_handler: a stream handler function :param stream_handler: a stream handler function
@ -136,7 +178,6 @@ class BasicHost(IHost):
:param protocol_ids: available protocol ids to use for stream :param protocol_ids: available protocol ids to use for stream
:return: stream: new stream created :return: stream: new stream created
""" """
net_stream = await self._network.new_stream(peer_id) net_stream = await self._network.new_stream(peer_id)
# Perform protocol muxing to determine protocol to use # Perform protocol muxing to determine protocol to use
@ -154,7 +195,7 @@ class BasicHost(IHost):
async def connect(self, peer_info: PeerInfo) -> None: async def connect(self, peer_info: PeerInfo) -> None:
""" """
connect ensures there is a connection between this host and the peer Ensure there is a connection between this host and the peer
with given `peer_info.peer_id`. connect will absorb the addresses in with given `peer_info.peer_id`. connect will absorb the addresses in
peer_info into its internal peerstore. If there is not an active peer_info into its internal peerstore. If there is not an active
connection, connect will issue a dial, and block until a connection is connection, connect will issue a dial, and block until a connection is

View File

@ -1,14 +1,27 @@
from collections import OrderedDict from collections import (
from typing import TYPE_CHECKING OrderedDict,
)
from typing import (
TYPE_CHECKING,
)
from libp2p.host.host_interface import IHost from libp2p.host.host_interface import (
IHost,
)
from libp2p.host.ping import (
handle_ping,
)
from libp2p.host.ping import ID as PingID from libp2p.host.ping import ID as PingID
from libp2p.host.ping import handle_ping from libp2p.identity.identify.protocol import (
identify_handler_for,
)
from libp2p.identity.identify.protocol import ID as IdentifyID from libp2p.identity.identify.protocol import ID as IdentifyID
from libp2p.identity.identify.protocol import identify_handler_for
if TYPE_CHECKING: if TYPE_CHECKING:
from libp2p.typing import TProtocol, StreamHandlerFn from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
def get_default_protocols(host: IHost) -> "OrderedDict[TProtocol, StreamHandlerFn]": def get_default_protocols(host: IHost) -> "OrderedDict[TProtocol, StreamHandlerFn]":

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class HostException(BaseLibp2pError): class HostException(BaseLibp2pError):

View File

@ -1,14 +1,36 @@
from abc import ABC, abstractmethod from abc import (
from typing import Any, AsyncContextManager, List, Sequence ABC,
abstractmethod,
)
from typing import (
Any,
AsyncContextManager,
List,
Sequence,
)
import multiaddr import multiaddr
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.keys import (
from libp2p.network.network_interface import INetworkService PrivateKey,
from libp2p.network.stream.net_stream_interface import INetStream PublicKey,
from libp2p.peer.id import ID )
from libp2p.peer.peerinfo import PeerInfo from libp2p.network.network_interface import (
from libp2p.typing import StreamHandlerFn, TProtocol INetworkService,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
class IHost(ABC): class IHost(ABC):
@ -54,7 +76,7 @@ class IHost(ABC):
self, listen_addrs: Sequence[multiaddr.Multiaddr] self, listen_addrs: Sequence[multiaddr.Multiaddr]
) -> AsyncContextManager[None]: ) -> AsyncContextManager[None]:
""" """
run the host instance and listen to ``listen_addrs``. Run the host instance and listen to ``listen_addrs``.
:param listen_addrs: a sequence of multiaddrs that we want to listen to :param listen_addrs: a sequence of multiaddrs that we want to listen to
""" """
@ -64,7 +86,7 @@ class IHost(ABC):
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
) -> None: ) -> None:
""" """
set stream handler for host. Set stream handler for host.
:param protocol_id: protocol id used on stream :param protocol_id: protocol id used on stream
:param stream_handler: a stream handler function :param stream_handler: a stream handler function
@ -85,7 +107,7 @@ class IHost(ABC):
@abstractmethod @abstractmethod
async def connect(self, peer_info: PeerInfo) -> None: async def connect(self, peer_info: PeerInfo) -> None:
""" """
connect ensures there is a connection between this host and the peer Ensure there is a connection between this host and the peer
with given peer_info.peer_id. connect will absorb the addresses in with given peer_info.peer_id. connect will absorb the addresses in
peer_info into its internal peerstore. If there is not an active peer_info into its internal peerstore. If there is not an active
connection, connect will issue a dial, and block until a connection is connection, connect will issue a dial, and block until a connection is

View File

@ -2,10 +2,18 @@ import logging
import trio import trio
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset from libp2p.network.stream.exceptions import (
from libp2p.network.stream.net_stream_interface import INetStream StreamClosed,
StreamEOF,
StreamReset,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import ID as PeerID from libp2p.peer.id import ID as PeerID
from libp2p.typing import TProtocol from libp2p.typing import (
TProtocol,
)
ID = TProtocol("/ipfs/ping/1.0.0") ID = TProtocol("/ipfs/ping/1.0.0")
PING_LENGTH = 32 PING_LENGTH = 32
@ -15,8 +23,9 @@ logger = logging.getLogger("libp2p.host.ping")
async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool: async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
"""Return a boolean indicating if we expect more pings from the peer at """
``peer_id``.""" Return a boolean indicating if we expect more pings from the peer at ``peer_id``.
"""
try: try:
with trio.fail_after(RESP_TIMEOUT): with trio.fail_after(RESP_TIMEOUT):
payload = await stream.read(PING_LENGTH) payload = await stream.read(PING_LENGTH)
@ -46,8 +55,10 @@ async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
async def handle_ping(stream: INetStream) -> None: async def handle_ping(stream: INetStream) -> None:
"""``handle_ping`` responds to incoming ping requests until one side errors """
or closes the ``stream``.""" Respond to incoming ping requests until one side errors
or closes the ``stream``.
"""
peer_id = stream.muxed_conn.peer_id peer_id = stream.muxed_conn.peer_id
while True: while True:

View File

@ -1,8 +1,18 @@
from libp2p.host.basic_host import BasicHost from libp2p.host.basic_host import (
from libp2p.host.exceptions import ConnectionFailure BasicHost,
from libp2p.network.network_interface import INetworkService )
from libp2p.peer.peerinfo import PeerInfo from libp2p.host.exceptions import (
from libp2p.routing.interfaces import IPeerRouting ConnectionFailure,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
# RoutedHost is a p2p Host that includes a routing system. # RoutedHost is a p2p Host that includes a routing system.
@ -16,7 +26,7 @@ class RoutedHost(BasicHost):
async def connect(self, peer_info: PeerInfo) -> None: async def connect(self, peer_info: PeerInfo) -> None:
""" """
connect ensures there is a connection between this host and the peer Ensure there is a connection between this host and the peer
with given `peer_info.peer_id`. See (basic_host).connect for more with given `peer_info.peer_id`. See (basic_host).connect for more
information. information.
@ -26,7 +36,8 @@ class RoutedHost(BasicHost):
:param peer_info: peer_info of the peer we want to connect to :param peer_info: peer_info of the peer we want to connect to
:type peer_info: peer.peerinfo.PeerInfo :type peer_info: peer.peerinfo.PeerInfo
""" """
# check if we were given some addresses, otherwise, find some with the routing system. # check if we were given some addresses, otherwise, find some with the
# routing system.
if not peer_info.addrs: if not peer_info.addrs:
found_peer_info = await self._router.find_peer(peer_info.peer_id) found_peer_info = await self._router.find_peer(peer_info.peer_id)
if not found_peer_info: if not found_peer_info:

View File

@ -9,4 +9,4 @@ message Identify {
repeated bytes listen_addrs = 2; repeated bytes listen_addrs = 2;
optional bytes observed_addr = 4; optional bytes observed_addr = 4;
repeated string protocols = 3; repeated string protocols = 3;
} }

View File

@ -1,104 +1,170 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/identity/identify/pb/identify.proto # source: libp2p/identity/identify/pb/identify.proto
import sys import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default() _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor( DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/identity/identify/pb/identify.proto', name="libp2p/identity/identify/pb/identify.proto",
package='identify.pb', package="identify.pb",
syntax='proto2', syntax="proto2",
serialized_options=None, serialized_options=None,
serialized_pb=_b('\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb\"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t') serialized_pb=_b(
'\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t'
),
) )
_IDENTIFY = _descriptor.Descriptor( _IDENTIFY = _descriptor.Descriptor(
name='Identify', name="Identify",
full_name='identify.pb.Identify', full_name="identify.pb.Identify",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='protocol_version', full_name='identify.pb.Identify.protocol_version', index=0, name="protocol_version",
number=5, type=9, cpp_type=9, label=1, full_name="identify.pb.Identify.protocol_version",
has_default_value=False, default_value=_b("").decode('utf-8'), index=0,
message_type=None, enum_type=None, containing_type=None, number=5,
is_extension=False, extension_scope=None, type=9,
serialized_options=None, file=DESCRIPTOR), cpp_type=9,
_descriptor.FieldDescriptor( label=1,
name='agent_version', full_name='identify.pb.Identify.agent_version', index=1, has_default_value=False,
number=6, type=9, cpp_type=9, label=1, default_value=_b("").decode("utf-8"),
has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
serialized_options=None, file=DESCRIPTOR), is_extension=False,
_descriptor.FieldDescriptor( extension_scope=None,
name='public_key', full_name='identify.pb.Identify.public_key', index=2, serialized_options=None,
number=1, type=12, cpp_type=9, label=1, file=DESCRIPTOR,
has_default_value=False, default_value=_b(""), ),
message_type=None, enum_type=None, containing_type=None, _descriptor.FieldDescriptor(
is_extension=False, extension_scope=None, name="agent_version",
serialized_options=None, file=DESCRIPTOR), full_name="identify.pb.Identify.agent_version",
_descriptor.FieldDescriptor( index=1,
name='listen_addrs', full_name='identify.pb.Identify.listen_addrs', index=3, number=6,
number=2, type=12, cpp_type=9, label=3, type=9,
has_default_value=False, default_value=[], cpp_type=9,
message_type=None, enum_type=None, containing_type=None, label=1,
is_extension=False, extension_scope=None, has_default_value=False,
serialized_options=None, file=DESCRIPTOR), default_value=_b("").decode("utf-8"),
_descriptor.FieldDescriptor( message_type=None,
name='observed_addr', full_name='identify.pb.Identify.observed_addr', index=4, enum_type=None,
number=4, type=12, cpp_type=9, label=1, containing_type=None,
has_default_value=False, default_value=_b(""), is_extension=False,
message_type=None, enum_type=None, containing_type=None, extension_scope=None,
is_extension=False, extension_scope=None, serialized_options=None,
serialized_options=None, file=DESCRIPTOR), file=DESCRIPTOR,
_descriptor.FieldDescriptor( ),
name='protocols', full_name='identify.pb.Identify.protocols', index=5, _descriptor.FieldDescriptor(
number=3, type=9, cpp_type=9, label=3, name="public_key",
has_default_value=False, default_value=[], full_name="identify.pb.Identify.public_key",
message_type=None, enum_type=None, containing_type=None, index=2,
is_extension=False, extension_scope=None, number=1,
serialized_options=None, file=DESCRIPTOR), type=12,
], cpp_type=9,
extensions=[ label=1,
], has_default_value=False,
nested_types=[], default_value=_b(""),
enum_types=[ message_type=None,
], enum_type=None,
serialized_options=None, containing_type=None,
is_extendable=False, is_extension=False,
syntax='proto2', extension_scope=None,
extension_ranges=[], serialized_options=None,
oneofs=[ file=DESCRIPTOR,
], ),
serialized_start=60, _descriptor.FieldDescriptor(
serialized_end=203, name="listen_addrs",
full_name="identify.pb.Identify.listen_addrs",
index=3,
number=2,
type=12,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="observed_addr",
full_name="identify.pb.Identify.observed_addr",
index=4,
number=4,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="protocols",
full_name="identify.pb.Identify.protocols",
index=5,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=60,
serialized_end=203,
) )
DESCRIPTOR.message_types_by_name['Identify'] = _IDENTIFY DESCRIPTOR.message_types_by_name["Identify"] = _IDENTIFY
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
Identify = _reflection.GeneratedProtocolMessageType('Identify', (_message.Message,), { Identify = _reflection.GeneratedProtocolMessageType(
'DESCRIPTOR' : _IDENTIFY, "Identify",
'__module__' : 'libp2p.identity.identify.pb.identify_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:identify.pb.Identify) {
}) "DESCRIPTOR": _IDENTIFY,
"__module__": "libp2p.identity.identify.pb.identify_pb2"
# @@protoc_insertion_point(class_scope:identify.pb.Identify)
},
)
_sym_db.RegisterMessage(Identify) _sym_db.RegisterMessage(Identify)

View File

@ -22,32 +22,79 @@ from typing_extensions import (
Literal as typing_extensions___Literal, Literal as typing_extensions___Literal,
) )
class Identify(google___protobuf___message___Message): class Identify(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
protocol_version = ... # type: typing___Text protocol_version = ... # type: typing___Text
agent_version = ... # type: typing___Text agent_version = ... # type: typing___Text
public_key = ... # type: bytes public_key = ... # type: bytes
listen_addrs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] listen_addrs = (
observed_addr = ... # type: bytes ...
protocols = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] ) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
observed_addr = ... # type: bytes
protocols = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self, def __init__(
self,
*, *,
protocol_version : typing___Optional[typing___Text] = None, protocol_version: typing___Optional[typing___Text] = None,
agent_version : typing___Optional[typing___Text] = None, agent_version: typing___Optional[typing___Text] = None,
public_key : typing___Optional[bytes] = None, public_key: typing___Optional[bytes] = None,
listen_addrs : typing___Optional[typing___Iterable[bytes]] = None, listen_addrs: typing___Optional[typing___Iterable[bytes]] = None,
observed_addr : typing___Optional[bytes] = None, observed_addr: typing___Optional[bytes] = None,
protocols : typing___Optional[typing___Iterable[typing___Text]] = None, protocols: typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> Identify: ... def FromString(cls, s: bytes) -> Identify: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",u"observed_addr",u"protocol_version",u"public_key"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",u"listen_addrs",u"observed_addr",u"protocol_version",u"protocols",u"public_key"]) -> None: ... self,
field_name: typing_extensions___Literal[
"agent_version", "observed_addr", "protocol_version", "public_key"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"agent_version",
"listen_addrs",
"observed_addr",
"protocol_version",
"protocols",
"public_key",
],
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"public_key",b"public_key"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"listen_addrs",b"listen_addrs",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"protocols",b"protocols",u"public_key",b"public_key"]) -> None: ... self,
field_name: typing_extensions___Literal[
"agent_version",
b"agent_version",
"observed_addr",
b"observed_addr",
"protocol_version",
b"protocol_version",
"public_key",
b"public_key",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"agent_version",
b"agent_version",
"listen_addrs",
b"listen_addrs",
"observed_addr",
b"observed_addr",
"protocol_version",
b"protocol_version",
"protocols",
b"protocols",
"public_key",
b"public_key",
],
) -> None: ...

View File

@ -1,13 +1,26 @@
import logging import logging
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from libp2p.host.host_interface import IHost from libp2p.host.host_interface import (
from libp2p.network.stream.exceptions import StreamClosed IHost,
from libp2p.network.stream.net_stream_interface import INetStream )
from libp2p.typing import StreamHandlerFn, TProtocol from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .pb.identify_pb2 import Identify from .pb.identify_pb2 import (
Identify,
)
ID = TProtocol("/ipfs/id/1.0.0") ID = TProtocol("/ipfs/id/1.0.0")
PROTOCOL_VERSION = "ipfs/0.1.0" PROTOCOL_VERSION = "ipfs/0.1.0"

View File

@ -1,4 +1,7 @@
from abc import ABC, abstractmethod from abc import (
ABC,
abstractmethod,
)
class Closer(ABC): class Closer(ABC):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class IOException(BaseLibp2pError): class IOException(BaseLibp2pError):

View File

@ -5,13 +5,26 @@ from that repo: "a simple package to r/w length-delimited slices."
NOTE: currently missing the capability to indicate lengths by "varint" method. NOTE: currently missing the capability to indicate lengths by "varint" method.
""" """
from abc import abstractmethod from abc import (
abstractmethod,
)
from libp2p.io.abc import MsgReadWriteCloser, Reader, ReadWriteCloser from libp2p.io.abc import (
from libp2p.io.utils import read_exactly MsgReadWriteCloser,
from libp2p.utils import decode_uvarint_from_stream, encode_varint_prefixed Reader,
ReadWriteCloser,
)
from libp2p.io.utils import (
read_exactly,
)
from libp2p.utils import (
decode_uvarint_from_stream,
encode_varint_prefixed,
)
from .exceptions import MessageTooLarge from .exceptions import (
MessageTooLarge,
)
BYTE_ORDER = "big" BYTE_ORDER = "big"

View File

@ -2,8 +2,12 @@ import logging
import trio import trio
from libp2p.io.abc import ReadWriteCloser from libp2p.io.abc import (
from libp2p.io.exceptions import IOException ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
logger = logging.getLogger("libp2p.io.trio") logger = logging.getLogger("libp2p.io.trio")

View File

@ -1,5 +1,9 @@
from libp2p.io.abc import Reader from libp2p.io.abc import (
from libp2p.io.exceptions import IncompleteReadError Reader,
)
from libp2p.io.exceptions import (
IncompleteReadError,
)
DEFAULT_RETRY_READ_COUNT = 100 DEFAULT_RETRY_READ_COUNT = 100

View File

@ -1,4 +1,6 @@
from libp2p.io.exceptions import IOException from libp2p.io.exceptions import (
IOException,
)
class RawConnError(IOException): class RawConnError(IOException):

View File

@ -1,11 +1,21 @@
from abc import abstractmethod from abc import (
from typing import Tuple abstractmethod,
)
from typing import (
Tuple,
)
import trio import trio
from libp2p.io.abc import Closer from libp2p.io.abc import (
from libp2p.network.stream.net_stream_interface import INetStream Closer,
from libp2p.stream_muxer.abc import IMuxedConn )
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
class INetConn(Closer): class INetConn(Closer):

View File

@ -1,8 +1,16 @@
from libp2p.io.abc import ReadWriteCloser from libp2p.io.abc import (
from libp2p.io.exceptions import IOException ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
from .exceptions import RawConnError from .exceptions import (
from .raw_connection_interface import IRawConnection RawConnError,
)
from .raw_connection_interface import (
IRawConnection,
)
class RawConnection(IRawConnection): class RawConnection(IRawConnection):

View File

@ -1,4 +1,6 @@
from libp2p.io.abc import ReadWriteCloser from libp2p.io.abc import (
ReadWriteCloser,
)
class IRawConnection(ReadWriteCloser): class IRawConnection(ReadWriteCloser):

View File

@ -1,11 +1,24 @@
from typing import TYPE_CHECKING, Set, Tuple from typing import (
TYPE_CHECKING,
Set,
Tuple,
)
import trio import trio
from libp2p.network.connection.net_connection_interface import INetConn from libp2p.network.connection.net_connection_interface import (
from libp2p.network.stream.net_stream import NetStream INetConn,
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream )
from libp2p.stream_muxer.exceptions import MuxedConnUnavailable from libp2p.network.stream.net_stream import (
NetStream,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import (
MuxedConnUnavailable,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from libp2p.network.swarm import Swarm # noqa: F401 from libp2p.network.swarm import Swarm # noqa: F401
@ -48,8 +61,8 @@ class SwarmConn(INetConn):
# We *could* optimize this but it really isn't worth it. # We *could* optimize this but it really isn't worth it.
for stream in self.streams.copy(): for stream in self.streams.copy():
await stream.reset() await stream.reset()
# Force context switch for stream handlers to process the stream reset event we just emit # Force context switch for stream handlers to process the stream reset event we
# before we cancel the stream handler tasks. # just emit before we cancel the stream handler tasks.
await trio.sleep(0.1) await trio.sleep(0.1)
await self._notify_disconnected() await self._notify_disconnected()
@ -63,13 +76,15 @@ class SwarmConn(INetConn):
except MuxedConnUnavailable: except MuxedConnUnavailable:
await self.close() await self.close()
break break
# Asynchronously handle the accepted stream, to avoid blocking the next stream. # Asynchronously handle the accepted stream, to avoid blocking
# the next stream.
nursery.start_soon(self._handle_muxed_stream, stream) nursery.start_soon(self._handle_muxed_stream, stream)
async def _handle_muxed_stream(self, muxed_stream: IMuxedStream) -> None: async def _handle_muxed_stream(self, muxed_stream: IMuxedStream) -> None:
net_stream = await self._add_stream(muxed_stream) net_stream = await self._add_stream(muxed_stream)
try: try:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
await self.swarm.common_stream_handler(net_stream) # type: ignore await self.swarm.common_stream_handler(net_stream) # type: ignore
finally: finally:
# As long as `common_stream_handler`, remove the stream. # As long as `common_stream_handler`, remove the stream.

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class SwarmException(BaseLibp2pError): class SwarmException(BaseLibp2pError):

View File

@ -1,23 +1,45 @@
from abc import ABC, abstractmethod from abc import (
from typing import TYPE_CHECKING, Dict, Sequence ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
Dict,
Sequence,
)
from async_service import ServiceAPI from async_service import (
from multiaddr import Multiaddr ServiceAPI,
)
from multiaddr import (
Multiaddr,
)
from libp2p.network.connection.net_connection_interface import INetConn from libp2p.network.connection.net_connection_interface import (
from libp2p.peer.id import ID INetConn,
from libp2p.peer.peerstore_interface import IPeerStore )
from libp2p.transport.listener_interface import IListener from libp2p.peer.id import (
from libp2p.typing import StreamHandlerFn ID,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.transport.listener_interface import (
IListener,
)
from libp2p.typing import (
StreamHandlerFn,
)
from .stream.net_stream_interface import INetStream from .stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from .notifee_interface import INotifee # noqa: F401 from .notifee_interface import INotifee # noqa: F401
class INetwork(ABC): class INetwork(ABC):
peerstore: IPeerStore peerstore: IPeerStore
connections: Dict[ID, INetConn] connections: Dict[ID, INetConn]
listeners: Dict[str, IListener] listeners: Dict[str, IListener]

View File

@ -1,10 +1,21 @@
from abc import ABC, abstractmethod from abc import (
from typing import TYPE_CHECKING ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from libp2p.network.connection.net_connection_interface import INetConn from libp2p.network.connection.net_connection_interface import (
from libp2p.network.stream.net_stream_interface import INetStream INetConn,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from .network_interface import INetwork # noqa: F401 from .network_interface import INetwork # noqa: F401

View File

@ -1,4 +1,6 @@
from libp2p.io.exceptions import IOException from libp2p.io.exceptions import (
IOException,
)
class StreamError(IOException): class StreamError(IOException):

View File

@ -1,22 +1,33 @@
from typing import Optional from typing import (
Optional,
)
from libp2p.stream_muxer.abc import IMuxedStream from libp2p.stream_muxer.abc import (
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import ( from libp2p.stream_muxer.exceptions import (
MuxedStreamClosed, MuxedStreamClosed,
MuxedStreamEOF, MuxedStreamEOF,
MuxedStreamReset, MuxedStreamReset,
) )
from libp2p.typing import TProtocol from libp2p.typing import (
TProtocol,
)
from .exceptions import StreamClosed, StreamEOF, StreamReset from .exceptions import (
from .net_stream_interface import INetStream StreamClosed,
StreamEOF,
StreamReset,
)
from .net_stream_interface import (
INetStream,
)
# TODO: Handle exceptions from `muxed_stream` # TODO: Handle exceptions from `muxed_stream`
# TODO: Add stream state # TODO: Add stream state
# - Reference: https://github.com/libp2p/go-libp2p-swarm/blob/99831444e78c8f23c9335c17d8f7c700ba25ca14/swarm_stream.go # noqa: E501 # - Reference: https://github.com/libp2p/go-libp2p-swarm/blob/99831444e78c8f23c9335c17d8f7c700ba25ca14/swarm_stream.go # noqa: E501
class NetStream(INetStream): class NetStream(INetStream):
muxed_stream: IMuxedStream muxed_stream: IMuxedStream
protocol_id: Optional[TProtocol] protocol_id: Optional[TProtocol]
@ -39,7 +50,7 @@ class NetStream(INetStream):
async def read(self, n: int = None) -> bytes: async def read(self, n: int = None) -> bytes:
""" """
reads from stream. Read from stream.
:param n: number of bytes to read :param n: number of bytes to read
:return: bytes of input :return: bytes of input
@ -53,7 +64,7 @@ class NetStream(INetStream):
async def write(self, data: bytes) -> None: async def write(self, data: bytes) -> None:
""" """
write to stream. Write to stream.
:return: number of bytes written :return: number of bytes written
""" """
@ -63,7 +74,7 @@ class NetStream(INetStream):
raise StreamClosed() from error raise StreamClosed() from error
async def close(self) -> None: async def close(self) -> None:
"""close stream.""" """Close stream."""
await self.muxed_stream.close() await self.muxed_stream.close()
async def reset(self) -> None: async def reset(self) -> None:

View File

@ -1,12 +1,19 @@
from abc import abstractmethod from abc import (
abstractmethod,
)
from libp2p.io.abc import ReadWriteCloser from libp2p.io.abc import (
from libp2p.stream_muxer.abc import IMuxedConn ReadWriteCloser,
from libp2p.typing import TProtocol )
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
from libp2p.typing import (
TProtocol,
)
class INetStream(ReadWriteCloser): class INetStream(ReadWriteCloser):
muxed_conn: IMuxedConn muxed_conn: IMuxedConn
@abstractmethod @abstractmethod

View File

@ -1,33 +1,75 @@
import logging import logging
from typing import Dict, List, Optional from typing import (
Dict,
List,
Optional,
)
from async_service import Service from async_service import (
from multiaddr import Multiaddr Service,
)
from multiaddr import (
Multiaddr,
)
import trio import trio
from libp2p.io.abc import ReadWriteCloser from libp2p.io.abc import (
from libp2p.network.connection.net_connection_interface import INetConn ReadWriteCloser,
from libp2p.peer.id import ID )
from libp2p.peer.peerstore import PeerStoreError from libp2p.network.connection.net_connection_interface import (
from libp2p.peer.peerstore_interface import IPeerStore INetConn,
from libp2p.stream_muxer.abc import IMuxedConn )
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore import (
PeerStoreError,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
from libp2p.transport.exceptions import ( from libp2p.transport.exceptions import (
MuxerUpgradeFailure, MuxerUpgradeFailure,
OpenConnectionError, OpenConnectionError,
SecurityUpgradeFailure, SecurityUpgradeFailure,
) )
from libp2p.transport.listener_interface import IListener from libp2p.transport.listener_interface import (
from libp2p.transport.transport_interface import ITransport IListener,
from libp2p.transport.upgrader import TransportUpgrader )
from libp2p.typing import StreamHandlerFn from libp2p.transport.transport_interface import (
ITransport,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
StreamHandlerFn,
)
from ..exceptions import MultiError from ..exceptions import (
from .connection.raw_connection import RawConnection MultiError,
from .connection.swarm_connection import SwarmConn )
from .exceptions import SwarmException from .connection.raw_connection import (
from .network_interface import INetworkService RawConnection,
from .notifee_interface import INotifee )
from .stream.net_stream_interface import INetStream from .connection.swarm_connection import (
SwarmConn,
)
from .exceptions import (
SwarmException,
)
from .network_interface import (
INetworkService,
)
from .notifee_interface import (
INotifee,
)
from .stream.net_stream_interface import (
INetStream,
)
logger = logging.getLogger("libp2p.network.swarm") logger = logging.getLogger("libp2p.network.swarm")
@ -40,7 +82,6 @@ def create_default_stream_handler(network: INetworkService) -> StreamHandlerFn:
class Swarm(Service, INetworkService): class Swarm(Service, INetworkService):
self_id: ID self_id: ID
peerstore: IPeerStore peerstore: IPeerStore
upgrader: TransportUpgrader upgrader: TransportUpgrader
@ -72,7 +113,8 @@ class Swarm(Service, INetworkService):
# Create Notifee array # Create Notifee array
self.notifees = [] self.notifees = []
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
self.common_stream_handler = create_default_stream_handler(self) # type: ignore self.common_stream_handler = create_default_stream_handler(self) # type: ignore
self.listener_nursery = None self.listener_nursery = None
@ -95,18 +137,18 @@ class Swarm(Service, INetworkService):
return self.self_id return self.self_id
def set_stream_handler(self, stream_handler: StreamHandlerFn) -> None: def set_stream_handler(self, stream_handler: StreamHandlerFn) -> None:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
self.common_stream_handler = stream_handler # type: ignore self.common_stream_handler = stream_handler # type: ignore
async def dial_peer(self, peer_id: ID) -> INetConn: async def dial_peer(self, peer_id: ID) -> INetConn:
""" """
dial_peer try to create a connection to peer_id. Try to create a connection to peer_id.
:param peer_id: peer if we want to dial :param peer_id: peer if we want to dial
:raises SwarmException: raised when an error occurs :raises SwarmException: raised when an error occurs
:return: muxed connection :return: muxed connection
""" """
if peer_id in self.connections: if peer_id in self.connections:
# If muxed connection already exists for peer_id, # If muxed connection already exists for peer_id,
# set muxed connection equal to existing muxed connection # set muxed connection equal to existing muxed connection
@ -140,20 +182,19 @@ class Swarm(Service, INetworkService):
# Tried all addresses, raising exception. # Tried all addresses, raising exception.
raise SwarmException( raise SwarmException(
f"unable to connect to {peer_id}, no addresses established a successful connection " f"unable to connect to {peer_id}, no addresses established a successful "
"(with exceptions)" "connection (with exceptions)"
) from MultiError(exceptions) ) from MultiError(exceptions)
async def dial_addr(self, addr: Multiaddr, peer_id: ID) -> INetConn: async def dial_addr(self, addr: Multiaddr, peer_id: ID) -> INetConn:
""" """
dial_addr try to create a connection to peer_id with addr. Try to create a connection to peer_id with addr.
:param addr: the address we want to connect with :param addr: the address we want to connect with
:param peer_id: the peer we want to connect to :param peer_id: the peer we want to connect to
:raises SwarmException: raised when an error occurs :raises SwarmException: raised when an error occurs
:return: network connection :return: network connection
""" """
# Dial peer (connection to peer does not yet exist) # Dial peer (connection to peer does not yet exist)
# Transport dials peer (gets back a raw conn) # Transport dials peer (gets back a raw conn)
try: try:
@ -231,11 +272,13 @@ class Swarm(Service, INetworkService):
if str(maddr) in self.listeners: if str(maddr) in self.listeners:
return True return True
async def conn_handler(read_write_closer: ReadWriteCloser) -> None: async def conn_handler(
read_write_closer: ReadWriteCloser, maddr=maddr
) -> None:
raw_conn = RawConnection(read_write_closer, False) raw_conn = RawConnection(read_write_closer, False)
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure # Per, https://discuss.libp2p.io/t/multistream-security/130, we first
# the conn and then mux the conn # secure the conn and then mux the conn
try: try:
# FIXME: This dummy `ID(b"")` for the remote peer is useless. # FIXME: This dummy `ID(b"")` for the remote peer is useless.
secured_conn = await self.upgrader.upgrade_security( secured_conn = await self.upgrader.upgrade_security(
@ -264,8 +307,8 @@ class Swarm(Service, INetworkService):
await self.add_conn(muxed_conn) await self.add_conn(muxed_conn)
logger.debug("successfully opened connection to peer %s", peer_id) logger.debug("successfully opened connection to peer %s", peer_id)
# NOTE: This is a intentional barrier to prevent from the handler exiting and # NOTE: This is a intentional barrier to prevent from the handler
# closing the connection. # exiting and closing the connection.
await self.manager.wait_finished() await self.manager.wait_finished()
try: try:
@ -282,7 +325,7 @@ class Swarm(Service, INetworkService):
await self.notify_listen(maddr) await self.notify_listen(maddr)
return True return True
except IOError: except OSError:
# Failed. Continue looping. # Failed. Continue looping.
logger.debug("fail to listen on: %s", maddr) logger.debug("fail to listen on: %s", maddr)
@ -304,9 +347,11 @@ class Swarm(Service, INetworkService):
logger.debug("successfully close the connection to peer %s", peer_id) logger.debug("successfully close the connection to peer %s", peer_id)
async def add_conn(self, muxed_conn: IMuxedConn) -> SwarmConn: async def add_conn(self, muxed_conn: IMuxedConn) -> SwarmConn:
"""Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected", """
Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
and start to monitor the connection for its new streams and and start to monitor the connection for its new streams and
disconnection.""" disconnection.
"""
swarm_conn = SwarmConn(muxed_conn, self) swarm_conn = SwarmConn(muxed_conn, self)
self.manager.run_task(muxed_conn.start) self.manager.run_task(muxed_conn.start)
await muxed_conn.event_started.wait() await muxed_conn.event_started.wait()
@ -319,8 +364,10 @@ class Swarm(Service, INetworkService):
return swarm_conn return swarm_conn
def remove_conn(self, swarm_conn: SwarmConn) -> None: def remove_conn(self, swarm_conn: SwarmConn) -> None:
"""Simply remove the connection from Swarm's records, without closing """
the connection.""" Simply remove the connection from Swarm's records, without closing
the connection.
"""
peer_id = swarm_conn.muxed_conn.peer_id peer_id = swarm_conn.muxed_conn.peer_id
if peer_id not in self.connections: if peer_id not in self.connections:
return return

View File

@ -1,5 +1,5 @@
# PeerStore # PeerStore
The PeerStore contains a mapping of peer IDs to PeerData objects. Each PeerData object represents a peer, and each PeerData contains a collection of protocols, addresses, and a mapping of metadata. PeerStore implements the IPeerStore (peer protocols), IAddrBook (address book), and IPeerMetadata (peer metadata) interfaces, which allows the peer store to effectively function as a dictionary for peer ID to protocol, address, and metadata. The PeerStore contains a mapping of peer IDs to PeerData objects. Each PeerData object represents a peer, and each PeerData contains a collection of protocols, addresses, and a mapping of metadata. PeerStore implements the IPeerStore (peer protocols), IAddrBook (address book), and IPeerMetadata (peer metadata) interfaces, which allows the peer store to effectively function as a dictionary for peer ID to protocol, address, and metadata.
Note: PeerInfo represents a read-only summary of a PeerData object. Only the attributes assigned in PeerInfo are readable by references to PeerInfo objects. Note: PeerInfo represents a read-only summary of a PeerData object. Only the attributes assigned in PeerInfo are readable by references to PeerInfo objects.

View File

@ -1,9 +1,19 @@
from abc import ABC, abstractmethod from abc import (
from typing import List, Sequence ABC,
abstractmethod,
)
from typing import (
List,
Sequence,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from .id import ID from .id import (
ID,
)
class IAddrBook(ABC): class IAddrBook(ABC):
@ -15,7 +25,7 @@ class IAddrBook(ABC):
:param peer_id: the peer to add address for :param peer_id: the peer to add address for
:param addr: multiaddress of the peer :param addr: multiaddress of the peer
:param ttl: time-to-live for the address (after this time, address is no longer valid) :param ttl: time-to-live for the address (after this time, address is no longer valid)
""" """ # noqa: E501
@abstractmethod @abstractmethod
def add_addrs(self, peer_id: ID, addrs: Sequence[Multiaddr], ttl: int) -> None: def add_addrs(self, peer_id: ID, addrs: Sequence[Multiaddr], ttl: int) -> None:
@ -28,7 +38,7 @@ class IAddrBook(ABC):
:param peer_id: the peer to add address for :param peer_id: the peer to add address for
:param addr: multiaddresses of the peer :param addr: multiaddresses of the peer
:param ttl: time-to-live for the address (after this time, address is no longer valid :param ttl: time-to-live for the address (after this time, address is no longer valid
""" """ # noqa: E501
@abstractmethod @abstractmethod
def addrs(self, peer_id: ID) -> List[Multiaddr]: def addrs(self, peer_id: ID) -> List[Multiaddr]:

View File

@ -1,10 +1,14 @@
import hashlib import hashlib
from typing import Union from typing import (
Union,
)
import base58 import base58
import multihash import multihash
from libp2p.crypto.keys import PublicKey from libp2p.crypto.keys import (
PublicKey,
)
# NOTE: On inlining... # NOTE: On inlining...
# See: https://github.com/libp2p/specs/issues/138 # See: https://github.com/libp2p/specs/issues/138

View File

@ -1,14 +1,25 @@
from typing import Any, Dict, List, Sequence from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from .peerdata_interface import IPeerData from .peerdata_interface import (
IPeerData,
)
class PeerData(IPeerData): class PeerData(IPeerData):
pubkey: PublicKey pubkey: PublicKey
privkey: PrivateKey privkey: PrivateKey
metadata: Dict[Any, Any] metadata: Dict[Any, Any]

View File

@ -1,11 +1,25 @@
from abc import ABC, abstractmethod from abc import (
from typing import Any, List, Sequence ABC,
abstractmethod,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from .peermetadata_interface import IPeerMetadata from .peermetadata_interface import (
IPeerMetadata,
)
class IPeerData(ABC): class IPeerData(ABC):

View File

@ -1,8 +1,14 @@
from typing import Any, List, Sequence from typing import (
Any,
List,
Sequence,
)
import multiaddr import multiaddr
from .id import ID from .id import (
ID,
)
class PeerInfo: class PeerInfo:

View File

@ -1,7 +1,14 @@
from abc import ABC, abstractmethod from abc import (
from typing import Any ABC,
abstractmethod,
)
from typing import (
Any,
)
from .id import ID from .id import (
ID,
)
class IPeerMetadata(ABC): class IPeerMetadata(ABC):

View File

@ -1,18 +1,39 @@
from collections import defaultdict from collections import (
from typing import Any, Dict, List, Sequence defaultdict,
)
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import KeyPair, PrivateKey, PublicKey from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
PublicKey,
)
from .id import ID from .id import (
from .peerdata import PeerData, PeerDataError ID,
from .peerinfo import PeerInfo )
from .peerstore_interface import IPeerStore from .peerdata import (
PeerData,
PeerDataError,
)
from .peerinfo import (
PeerInfo,
)
from .peerstore_interface import (
IPeerStore,
)
class PeerStore(IPeerStore): class PeerStore(IPeerStore):
peer_data_map: Dict[ID, PeerData] peer_data_map: Dict[ID, PeerData]
def __init__(self) -> None: def __init__(self) -> None:

View File

@ -1,14 +1,34 @@
from abc import abstractmethod from abc import (
from typing import Any, List, Sequence abstractmethod,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import KeyPair, PrivateKey, PublicKey from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
PublicKey,
)
from .addrbook_interface import IAddrBook from .addrbook_interface import (
from .id import ID IAddrBook,
from .peerinfo import PeerInfo )
from .peermetadata_interface import IPeerMetadata from .id import (
ID,
)
from .peerinfo import (
PeerInfo,
)
from .peermetadata_interface import (
IPeerMetadata,
)
class IPeerStore(IAddrBook, IPeerMetadata): class IPeerStore(IAddrBook, IPeerMetadata):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class MultiselectCommunicatorError(BaseLibp2pError): class MultiselectCommunicatorError(BaseLibp2pError):

View File

@ -1,19 +1,34 @@
from typing import Dict, Tuple from typing import (
Dict,
Tuple,
)
from libp2p.typing import StreamHandlerFn, TProtocol from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .exceptions import MultiselectCommunicatorError, MultiselectError from .exceptions import (
from .multiselect_communicator_interface import IMultiselectCommunicator MultiselectCommunicatorError,
from .multiselect_muxer_interface import IMultiselectMuxer MultiselectError,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
from .multiselect_muxer_interface import (
IMultiselectMuxer,
)
MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0" MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0"
PROTOCOL_NOT_FOUND_MSG = "na" PROTOCOL_NOT_FOUND_MSG = "na"
class Multiselect(IMultiselectMuxer): class Multiselect(IMultiselectMuxer):
"""Multiselect module that is responsible for responding to a multiselect """
Multiselect module that is responsible for responding to a multiselect
client and deciding on a specific protocol and handler pair to use for client and deciding on a specific protocol and handler pair to use for
communication.""" communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn] handlers: Dict[TProtocol, StreamHandlerFn]

View File

@ -1,18 +1,31 @@
from typing import Sequence from typing import (
Sequence,
)
from libp2p.typing import TProtocol from libp2p.typing import (
TProtocol,
)
from .exceptions import MultiselectClientError, MultiselectCommunicatorError from .exceptions import (
from .multiselect_client_interface import IMultiselectClient MultiselectClientError,
from .multiselect_communicator_interface import IMultiselectCommunicator MultiselectCommunicatorError,
)
from .multiselect_client_interface import (
IMultiselectClient,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0" MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0"
PROTOCOL_NOT_FOUND_MSG = "na" PROTOCOL_NOT_FOUND_MSG = "na"
class MultiselectClient(IMultiselectClient): class MultiselectClient(IMultiselectClient):
"""Client for communicating with receiver's multiselect module in order to """
select a protocol id to communicate over.""" Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over.
"""
async def handshake(self, communicator: IMultiselectCommunicator) -> None: async def handshake(self, communicator: IMultiselectCommunicator) -> None:
""" """

View File

@ -1,16 +1,26 @@
from abc import ABC, abstractmethod from abc import (
from typing import Sequence ABC,
abstractmethod,
)
from typing import (
Sequence,
)
from libp2p.protocol_muxer.multiselect_communicator_interface import ( from libp2p.protocol_muxer.multiselect_communicator_interface import (
IMultiselectCommunicator, IMultiselectCommunicator,
) )
from libp2p.typing import TProtocol from libp2p.typing import (
TProtocol,
)
class IMultiselectClient(ABC): class IMultiselectClient(ABC):
"""Client for communicating with receiver's multiselect module in order to """
select a protocol id to communicate over.""" Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over.
"""
@abstractmethod
async def handshake(self, communicator: IMultiselectCommunicator) -> None: async def handshake(self, communicator: IMultiselectCommunicator) -> None:
""" """
Ensure that the client and multiselect are both using the same Ensure that the client and multiselect are both using the same
@ -34,6 +44,7 @@ class IMultiselectClient(ABC):
:return: selected protocol :return: selected protocol
""" """
@abstractmethod
async def try_select( async def try_select(
self, communicator: IMultiselectCommunicator, protocol: TProtocol self, communicator: IMultiselectCommunicator, protocol: TProtocol
) -> TProtocol: ) -> TProtocol:

View File

@ -1,10 +1,23 @@
from libp2p.exceptions import ParseError from libp2p.exceptions import (
from libp2p.io.abc import ReadWriteCloser ParseError,
from libp2p.io.exceptions import IOException )
from libp2p.utils import encode_delim, read_delim from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
from libp2p.utils import (
encode_delim,
read_delim,
)
from .exceptions import MultiselectCommunicatorError from .exceptions import (
from .multiselect_communicator_interface import IMultiselectCommunicator MultiselectCommunicatorError,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
class MultiselectCommunicator(IMultiselectCommunicator): class MultiselectCommunicator(IMultiselectCommunicator):
@ -16,7 +29,7 @@ class MultiselectCommunicator(IMultiselectCommunicator):
async def write(self, msg_str: str) -> None: async def write(self, msg_str: str) -> None:
""" """
:raise MultiselectCommunicatorError: raised when failed to write to underlying reader :raise MultiselectCommunicatorError: raised when failed to write to underlying reader
""" """ # noqa: E501
msg_bytes = encode_delim(msg_str.encode()) msg_bytes = encode_delim(msg_str.encode())
try: try:
await self.read_writer.write(msg_bytes) await self.read_writer.write(msg_bytes)
@ -28,7 +41,7 @@ class MultiselectCommunicator(IMultiselectCommunicator):
async def read(self) -> str: async def read(self) -> str:
""" """
:raise MultiselectCommunicatorError: raised when failed to read from underlying reader :raise MultiselectCommunicatorError: raised when failed to read from underlying reader
""" """ # noqa: E501
try: try:
data = await read_delim(self.read_writer) data = await read_delim(self.read_writer)
# `IOException` includes `IncompleteReadError` and `StreamError` # `IOException` includes `IncompleteReadError` and `StreamError`

View File

@ -1,10 +1,15 @@
from abc import ABC, abstractmethod from abc import (
ABC,
abstractmethod,
)
class IMultiselectCommunicator(ABC): class IMultiselectCommunicator(ABC):
"""Communicator helper class that ensures both the client and multistream """
Communicator helper class that ensures both the client and multistream
module will follow the same multistream protocol, which is necessary for module will follow the same multistream protocol, which is necessary for
them to work.""" them to work.
"""
@abstractmethod @abstractmethod
async def write(self, msg_str: str) -> None: async def write(self, msg_str: str) -> None:

View File

@ -1,15 +1,28 @@
from abc import ABC, abstractmethod from abc import (
from typing import Dict, Tuple ABC,
abstractmethod,
)
from typing import (
Dict,
Tuple,
)
from libp2p.typing import StreamHandlerFn, TProtocol from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .multiselect_communicator_interface import IMultiselectCommunicator from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
class IMultiselectMuxer(ABC): class IMultiselectMuxer(ABC):
"""Multiselect module that is responsible for responding to a multiselect """
Multiselect module that is responsible for responding to a multiselect
client and deciding on a specific protocol and handler pair to use for client and deciding on a specific protocol and handler pair to use for
communication.""" communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn] handlers: Dict[TProtocol, StreamHandlerFn]

View File

@ -1,4 +1,7 @@
from abc import ABC, abstractmethod from abc import (
ABC,
abstractmethod,
)
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
AsyncContextManager, AsyncContextManager,
@ -8,13 +11,23 @@ from typing import (
Tuple, Tuple,
) )
from async_service import ServiceAPI from async_service import (
ServiceAPI,
)
from libp2p.peer.id import ID from libp2p.peer.id import (
from libp2p.typing import TProtocol ID,
)
from libp2p.typing import (
TProtocol,
)
from .pb import rpc_pb2 from .pb import (
from .typing import ValidatorFn rpc_pb2,
)
from .typing import (
ValidatorFn,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from .pubsub import Pubsub # noqa: F401 from .pubsub import Pubsub # noqa: F401
@ -69,9 +82,9 @@ class IPubsubRouter(ABC):
""" """
Invoked to process control messages in the RPC envelope. Invoked to process control messages in the RPC envelope.
It is invoked after subscriptions and payload messages have been processed It is invoked after subscriptions and payload messages have been processed
TODO: Check if this interface is ok. It's not the exact same as the go code, but the go TODO: Check if this interface is ok. It's not the exact same as the go code, but
code is really confusing with the msg origin, they specify `rpc.from` even when the rpc the go code is really confusing with the msg origin, they specify `rpc.from`
shouldn't have a from even when the rpc shouldn't have a from
:param rpc: rpc message :param rpc: rpc message
""" """

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class PubsubRouterError(BaseLibp2pError): class PubsubRouterError(BaseLibp2pError):

View File

@ -1,16 +1,34 @@
import logging import logging
from typing import Iterable, List, Sequence from typing import (
Iterable,
List,
Sequence,
)
import trio import trio
from libp2p.network.stream.exceptions import StreamClosed from libp2p.network.stream.exceptions import (
from libp2p.peer.id import ID StreamClosed,
from libp2p.typing import TProtocol )
from libp2p.utils import encode_varint_prefixed from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
)
from .abc import IPubsubRouter from .abc import (
from .pb import rpc_pb2 IPubsubRouter,
from .pubsub import Pubsub )
from .pb import (
rpc_pb2,
)
from .pubsub import (
Pubsub,
)
PROTOCOL_ID = TProtocol("/floodsub/1.0.0") PROTOCOL_ID = TProtocol("/floodsub/1.0.0")
@ -18,7 +36,6 @@ logger = logging.getLogger("libp2p.pubsub.floodsub")
class FloodSub(IPubsubRouter): class FloodSub(IPubsubRouter):
protocols: List[TProtocol] protocols: List[TProtocol]
pubsub: Pubsub pubsub: Pubsub
@ -80,7 +97,6 @@ class FloodSub(IPubsubRouter):
:param msg_forwarder: peer ID of the peer who forwards the message to us :param msg_forwarder: peer ID of the peer who forwards the message to us
:param pubsub_msg: pubsub message in protobuf. :param pubsub_msg: pubsub message in protobuf.
""" """
peers_gen = set( peers_gen = set(
self._get_peers_to_send( self._get_peers_to_send(
pubsub_msg.topicIDs, pubsub_msg.topicIDs,

View File

@ -1,23 +1,58 @@
from ast import literal_eval from ast import (
from collections import defaultdict literal_eval,
)
from collections import (
defaultdict,
)
import logging import logging
import random import random
from typing import Any, DefaultDict, Dict, Iterable, List, Sequence, Set, Tuple from typing import (
Any,
DefaultDict,
Dict,
Iterable,
List,
Sequence,
Set,
Tuple,
)
from async_service import Service from async_service import (
Service,
)
import trio import trio
from libp2p.network.stream.exceptions import StreamClosed from libp2p.network.stream.exceptions import (
from libp2p.peer.id import ID StreamClosed,
from libp2p.pubsub import floodsub )
from libp2p.typing import TProtocol from libp2p.peer.id import (
from libp2p.utils import encode_varint_prefixed ID,
)
from libp2p.pubsub import (
floodsub,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
)
from .abc import IPubsubRouter from .abc import (
from .exceptions import NoPubsubAttached IPubsubRouter,
from .mcache import MessageCache )
from .pb import rpc_pb2 from .exceptions import (
from .pubsub import Pubsub NoPubsubAttached,
)
from .mcache import (
MessageCache,
)
from .pb import (
rpc_pb2,
)
from .pubsub import (
Pubsub,
)
PROTOCOL_ID = TProtocol("/meshsub/1.0.0") PROTOCOL_ID = TProtocol("/meshsub/1.0.0")
@ -120,10 +155,10 @@ class GossipSub(IPubsubRouter, Service):
logger.debug("adding peer %s with protocol %s", peer_id, protocol_id) logger.debug("adding peer %s with protocol %s", peer_id, protocol_id)
if protocol_id not in (PROTOCOL_ID, floodsub.PROTOCOL_ID): if protocol_id not in (PROTOCOL_ID, floodsub.PROTOCOL_ID):
# We should never enter here. Becuase the `protocol_id` is registered by your pubsub # We should never enter here. Becuase the `protocol_id` is registered by
# instance in multistream-select, but it is not the protocol that gossipsub supports. # your pubsub instance in multistream-select, but it is not the protocol
# In this case, probably we registered gossipsub to a wrong `protocol_id` # that gossipsub supports. In this case, probably we registered gossipsub
# in multistream-select, or wrong versions. # to a wrong `protocol_id` in multistream-select, or wrong versions.
raise ValueError(f"Protocol={protocol_id} is not supported.") raise ValueError(f"Protocol={protocol_id} is not supported.")
self.peer_protocol[peer_id] = protocol_id self.peer_protocol[peer_id] = protocol_id
@ -208,11 +243,11 @@ class GossipSub(IPubsubRouter, Service):
continue continue
# floodsub peers # floodsub peers
floodsub_peers: Set[ID] = set( floodsub_peers: Set[ID] = {
peer_id peer_id
for peer_id in self.pubsub.peer_topics[topic] for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == floodsub.PROTOCOL_ID if self.peer_protocol[peer_id] == floodsub.PROTOCOL_ID
) }
send_to.update(floodsub_peers) send_to.update(floodsub_peers)
# gossipsub peers # gossipsub peers
@ -220,9 +255,9 @@ class GossipSub(IPubsubRouter, Service):
if topic in self.mesh: if topic in self.mesh:
gossipsub_peers = self.mesh[topic] gossipsub_peers = self.mesh[topic]
else: else:
# When we publish to a topic that we have not subscribe to, we randomly pick # When we publish to a topic that we have not subscribe to, we randomly
# `self.degree` number of peers who have subscribed to the topic and add them # pick `self.degree` number of peers who have subscribed to the topic
# as our `fanout` peers. # and add them as our `fanout` peers.
topic_in_fanout: bool = topic in self.fanout topic_in_fanout: bool = topic in self.fanout
fanout_peers: Set[ID] = self.fanout[topic] if topic_in_fanout else set() fanout_peers: Set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_size = len(fanout_peers) fanout_size = len(fanout_peers)
@ -270,7 +305,7 @@ class GossipSub(IPubsubRouter, Service):
# Combine fanout peers with selected peers # Combine fanout peers with selected peers
fanout_peers.update(selected_peers) fanout_peers.update(selected_peers)
# Add fanout peers to mesh and notifies them with a GRAFT(topic) control message. # Add fanout peers to mesh and notifies them with a GRAFT(topic) control message
for peer in fanout_peers: for peer in fanout_peers:
self.mesh[topic].add(peer) self.mesh[topic].add(peer)
await self.emit_graft(topic, peer) await self.emit_graft(topic, peer)
@ -369,10 +404,10 @@ class GossipSub(IPubsubRouter, Service):
""" """
Call individual heartbeats. Call individual heartbeats.
Note: the heartbeats are called with awaits because each heartbeat depends on the Note: the heartbeats are called with awaits because each heartbeat depends on
state changes in the preceding heartbeat the state changes in the preceding heartbeat
""" """
# Start after a delay. Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L410 # Noqa: E501 # Start after a delay. Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L410 # noqa: E501
await trio.sleep(self.heartbeat_initial_delay) await trio.sleep(self.heartbeat_initial_delay)
while True: while True:
# Maintain mesh and keep track of which peers to send GRAFT or PRUNE to # Maintain mesh and keep track of which peers to send GRAFT or PRUNE to
@ -381,7 +416,8 @@ class GossipSub(IPubsubRouter, Service):
self.fanout_heartbeat() self.fanout_heartbeat()
# Get the peers to send IHAVE to # Get the peers to send IHAVE to
peers_to_gossip = self.gossip_heartbeat() peers_to_gossip = self.gossip_heartbeat()
# Pack GRAFT, PRUNE and IHAVE for the same peer into one control message and send it # Pack GRAFT, PRUNE and IHAVE for the same peer into one control message and
# send it
await self._emit_control_msgs( await self._emit_control_msgs(
peers_to_graft, peers_to_prune, peers_to_gossip peers_to_graft, peers_to_prune, peers_to_gossip
) )
@ -391,7 +427,7 @@ class GossipSub(IPubsubRouter, Service):
await trio.sleep(self.heartbeat_interval) await trio.sleep(self.heartbeat_interval)
def mesh_heartbeat( def mesh_heartbeat(
self self,
) -> Tuple[DefaultDict[ID, List[str]], DefaultDict[ID, List[str]]]: ) -> Tuple[DefaultDict[ID, List[str]], DefaultDict[ID, List[str]]]:
peers_to_graft: DefaultDict[ID, List[str]] = defaultdict(list) peers_to_graft: DefaultDict[ID, List[str]] = defaultdict(list)
peers_to_prune: DefaultDict[ID, List[str]] = defaultdict(list) peers_to_prune: DefaultDict[ID, List[str]] = defaultdict(list)
@ -402,7 +438,7 @@ class GossipSub(IPubsubRouter, Service):
num_mesh_peers_in_topic = len(self.mesh[topic]) num_mesh_peers_in_topic = len(self.mesh[topic])
if num_mesh_peers_in_topic < self.degree_low: if num_mesh_peers_in_topic < self.degree_low:
# Select D - |mesh[topic]| peers from peers.gossipsub[topic] - mesh[topic] # Select D - |mesh[topic]| peers from peers.gossipsub[topic] - mesh[topic] # noqa: E501
selected_peers = self._get_in_topic_gossipsub_peers_from_minus( selected_peers = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree - num_mesh_peers_in_topic, self.mesh[topic] topic, self.degree - num_mesh_peers_in_topic, self.mesh[topic]
) )
@ -436,7 +472,7 @@ class GossipSub(IPubsubRouter, Service):
# Remove topic from fanout # Remove topic from fanout
del self.fanout[topic] del self.fanout[topic]
else: else:
# Check if fanout peers are still in the topic and remove the ones that are not # Check if fanout peers are still in the topic and remove the ones that are not # noqa: E501
# ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L498-L504 # noqa: E501 # ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L498-L504 # noqa: E501
in_topic_fanout_peers = [ in_topic_fanout_peers = [
peer peer
@ -448,7 +484,7 @@ class GossipSub(IPubsubRouter, Service):
# If |fanout[topic]| < D # If |fanout[topic]| < D
if num_fanout_peers_in_topic < self.degree: if num_fanout_peers_in_topic < self.degree:
# Select D - |fanout[topic]| peers from peers.gossipsub[topic] - fanout[topic] # Select D - |fanout[topic]| peers from peers.gossipsub[topic] - fanout[topic] # noqa: E501
selected_peers = self._get_in_topic_gossipsub_peers_from_minus( selected_peers = self._get_in_topic_gossipsub_peers_from_minus(
topic, topic,
self.degree - num_fanout_peers_in_topic, self.degree - num_fanout_peers_in_topic,
@ -462,11 +498,14 @@ class GossipSub(IPubsubRouter, Service):
for topic in self.mesh: for topic in self.mesh:
msg_ids = self.mcache.window(topic) msg_ids = self.mcache.window(topic)
if msg_ids: if msg_ids:
# Get all pubsub peers in a topic and only add them if they are gossipsub peers too # Get all pubsub peers in a topic and only add them if they are
# gossipsub peers too
if topic in self.pubsub.peer_topics: if topic in self.pubsub.peer_topics:
# Select D peers from peers.gossipsub[topic] # Select D peers from peers.gossipsub[topic]
peers_to_emit_ihave_to = self._get_in_topic_gossipsub_peers_from_minus( peers_to_emit_ihave_to = (
topic, self.degree, self.mesh[topic] self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.mesh[topic]
)
) )
msg_id_strs = [str(msg_id) for msg_id in msg_ids] msg_id_strs = [str(msg_id) for msg_id in msg_ids]
@ -478,11 +517,14 @@ class GossipSub(IPubsubRouter, Service):
for topic in self.fanout: for topic in self.fanout:
msg_ids = self.mcache.window(topic) msg_ids = self.mcache.window(topic)
if msg_ids: if msg_ids:
# Get all pubsub peers in topic and only add if they are gossipsub peers also # Get all pubsub peers in topic and only add if they are
# gossipsub peers also
if topic in self.pubsub.peer_topics: if topic in self.pubsub.peer_topics:
# Select D peers from peers.gossipsub[topic] # Select D peers from peers.gossipsub[topic]
peers_to_emit_ihave_to = self._get_in_topic_gossipsub_peers_from_minus( peers_to_emit_ihave_to = (
topic, self.degree, self.fanout[topic] self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.fanout[topic]
)
) )
msg_id_strs = [str(msg) for msg in msg_ids] msg_id_strs = [str(msg) for msg in msg_ids]
for peer in peers_to_emit_ihave_to: for peer in peers_to_emit_ihave_to:
@ -494,7 +536,8 @@ class GossipSub(IPubsubRouter, Service):
num_to_select: int, pool: Iterable[Any], minus: Iterable[Any] num_to_select: int, pool: Iterable[Any], minus: Iterable[Any]
) -> List[Any]: ) -> List[Any]:
""" """
Select at most num_to_select subset of elements from the set (pool - minus) randomly. Select at most num_to_select subset of elements from the set
(pool - minus) randomly.
:param num_to_select: number of elements to randomly select :param num_to_select: number of elements to randomly select
:param pool: list of items to select from (excluding elements in minus) :param pool: list of items to select from (excluding elements in minus)
:param minus: elements to be excluded from selection pool :param minus: elements to be excluded from selection pool
@ -508,8 +551,9 @@ class GossipSub(IPubsubRouter, Service):
# Don't create a new selection_pool if we are not subbing anything # Don't create a new selection_pool if we are not subbing anything
selection_pool = list(pool) selection_pool = list(pool)
# If num_to_select > size(selection_pool), then return selection_pool (which has the most # If num_to_select > size(selection_pool), then return selection_pool (which has
# possible elements s.t. the number of elements is less than num_to_select) # the most possible elements s.t. the number of elements is less than
# num_to_select)
if num_to_select >= len(selection_pool): if num_to_select >= len(selection_pool):
return selection_pool return selection_pool
@ -521,11 +565,11 @@ class GossipSub(IPubsubRouter, Service):
def _get_in_topic_gossipsub_peers_from_minus( def _get_in_topic_gossipsub_peers_from_minus(
self, topic: str, num_to_select: int, minus: Iterable[ID] self, topic: str, num_to_select: int, minus: Iterable[ID]
) -> List[ID]: ) -> List[ID]:
gossipsub_peers_in_topic = set( gossipsub_peers_in_topic = {
peer_id peer_id
for peer_id in self.pubsub.peer_topics[topic] for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == PROTOCOL_ID if self.peer_protocol[peer_id] == PROTOCOL_ID
) }
return self.select_from_minus(num_to_select, gossipsub_peers_in_topic, minus) return self.select_from_minus(num_to_select, gossipsub_peers_in_topic, minus)
# RPC handlers # RPC handlers
@ -533,15 +577,15 @@ class GossipSub(IPubsubRouter, Service):
async def handle_ihave( async def handle_ihave(
self, ihave_msg: rpc_pb2.ControlIHave, sender_peer_id: ID self, ihave_msg: rpc_pb2.ControlIHave, sender_peer_id: ID
) -> None: ) -> None:
"""Checks the seen set and requests unknown messages with an IWANT """Checks the seen set and requests unknown messages with an IWANT message."""
message.""" # Get list of all seen (seqnos, from) from the (seqno, from) tuples in
# Get list of all seen (seqnos, from) from the (seqno, from) tuples in seen_messages cache # seen_messages cache
seen_seqnos_and_peers = [ seen_seqnos_and_peers = [
seqno_and_from for seqno_and_from in self.pubsub.seen_messages.keys() seqno_and_from for seqno_and_from in self.pubsub.seen_messages.keys()
] ]
# Add all unknown message ids (ids that appear in ihave_msg but not in seen_seqnos) to list # Add all unknown message ids (ids that appear in ihave_msg but not in
# of messages we want to request # seen_seqnos) to list of messages we want to request
# FIXME: Update type of message ID # FIXME: Update type of message ID
msg_ids_wanted: List[Any] = [ msg_ids_wanted: List[Any] = [
msg_id msg_id
@ -556,8 +600,10 @@ class GossipSub(IPubsubRouter, Service):
async def handle_iwant( async def handle_iwant(
self, iwant_msg: rpc_pb2.ControlIWant, sender_peer_id: ID self, iwant_msg: rpc_pb2.ControlIWant, sender_peer_id: ID
) -> None: ) -> None:
"""Forwards all request messages that are present in mcache to the """
requesting peer.""" Forwards all request messages that are present in mcache to the
requesting peer.
"""
# FIXME: Update type of message ID # FIXME: Update type of message ID
# FIXME: Find a better way to parse the msg ids # FIXME: Find a better way to parse the msg ids
msg_ids: List[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs] msg_ids: List[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs]
@ -572,8 +618,8 @@ class GossipSub(IPubsubRouter, Service):
msgs_to_forward.append(msg) msgs_to_forward.append(msg)
# Forward messages to requesting peer # Forward messages to requesting peer
# Should this just be publishing? No # Should this just be publishing? No, because then the message will forwarded to
# because then the message will forwarded to peers in the topics contained in the messages. # peers in the topics contained in the messages.
# We should # We should
# 1) Package these messages into a single packet # 1) Package these messages into a single packet
packet: rpc_pb2.RPC = rpc_pb2.RPC() packet: rpc_pb2.RPC = rpc_pb2.RPC()
@ -643,7 +689,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_ihave(self, topic: str, msg_ids: Any, to_peer: ID) -> None: async def emit_ihave(self, topic: str, msg_ids: Any, to_peer: ID) -> None:
"""Emit ihave message, sent to to_peer, for topic and msg_ids.""" """Emit ihave message, sent to to_peer, for topic and msg_ids."""
ihave_msg: rpc_pb2.ControlIHave = rpc_pb2.ControlIHave() ihave_msg: rpc_pb2.ControlIHave = rpc_pb2.ControlIHave()
ihave_msg.messageIDs.extend(msg_ids) ihave_msg.messageIDs.extend(msg_ids)
ihave_msg.topicID = topic ihave_msg.topicID = topic
@ -655,7 +700,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_iwant(self, msg_ids: Any, to_peer: ID) -> None: async def emit_iwant(self, msg_ids: Any, to_peer: ID) -> None:
"""Emit iwant message, sent to to_peer, for msg_ids.""" """Emit iwant message, sent to to_peer, for msg_ids."""
iwant_msg: rpc_pb2.ControlIWant = rpc_pb2.ControlIWant() iwant_msg: rpc_pb2.ControlIWant = rpc_pb2.ControlIWant()
iwant_msg.messageIDs.extend(msg_ids) iwant_msg.messageIDs.extend(msg_ids)
@ -666,7 +710,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_graft(self, topic: str, to_peer: ID) -> None: async def emit_graft(self, topic: str, to_peer: ID) -> None:
"""Emit graft message, sent to to_peer, for topic.""" """Emit graft message, sent to to_peer, for topic."""
graft_msg: rpc_pb2.ControlGraft = rpc_pb2.ControlGraft() graft_msg: rpc_pb2.ControlGraft = rpc_pb2.ControlGraft()
graft_msg.topicID = topic graft_msg.topicID = topic
@ -677,7 +720,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_prune(self, topic: str, to_peer: ID) -> None: async def emit_prune(self, topic: str, to_peer: ID) -> None:
"""Emit graft message, sent to to_peer, for topic.""" """Emit graft message, sent to to_peer, for topic."""
prune_msg: rpc_pb2.ControlPrune = rpc_pb2.ControlPrune() prune_msg: rpc_pb2.ControlPrune = rpc_pb2.ControlPrune()
prune_msg.topicID = topic prune_msg.topicID = topic

View File

@ -1,10 +1,17 @@
from typing import Dict, List, Optional, Sequence, Tuple from typing import (
Dict,
List,
Optional,
Sequence,
Tuple,
)
from .pb import rpc_pb2 from .pb import (
rpc_pb2,
)
class CacheEntry: class CacheEntry:
mid: Tuple[bytes, bytes] mid: Tuple[bytes, bytes]
topics: List[str] topics: List[str]
@ -24,7 +31,6 @@ class CacheEntry:
class MessageCache: class MessageCache:
window_size: int window_size: int
history_size: int history_size: int
@ -91,8 +97,9 @@ class MessageCache:
return mids return mids
def shift(self) -> None: def shift(self) -> None:
"""Shift the window over by 1 position, dropping the last element of """
the history.""" Shift the window over by 1 position, dropping the last element of the history.
"""
last_entries: List[CacheEntry] = self.history[len(self.history) - 1] last_entries: List[CacheEntry] = self.history[len(self.history) - 1]
for entry in last_entries: for entry in last_entries:

File diff suppressed because it is too large Load Diff

View File

@ -27,197 +27,352 @@ from typing_extensions import (
Literal as typing_extensions___Literal, Literal as typing_extensions___Literal,
) )
class RPC(google___protobuf___message___Message): class RPC(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class SubOpts(google___protobuf___message___Message): class SubOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
subscribe = ... # type: bool subscribe = ... # type: bool
topicid = ... # type: typing___Text topicid = ... # type: typing___Text
def __init__(self, def __init__(
self,
*, *,
subscribe : typing___Optional[bool] = None, subscribe: typing___Optional[bool] = None,
topicid : typing___Optional[typing___Text] = None, topicid: typing___Optional[typing___Text] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> RPC.SubOpts: ... def FromString(cls, s: bytes) -> RPC.SubOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... self, other_msg: google___protobuf___message___Message
) -> None: ...
def CopyFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"subscribe",u"topicid"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"subscribe",u"topicid"]) -> None: ... self, field_name: typing_extensions___Literal["subscribe", "topicid"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["subscribe", "topicid"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"subscribe",b"subscribe",u"topicid",b"topicid"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"subscribe",b"subscribe",u"topicid",b"topicid"]) -> None: ... self,
field_name: typing_extensions___Literal[
"subscribe", b"subscribe", "topicid", b"topicid"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"subscribe", b"subscribe", "topicid", b"topicid"
],
) -> None: ...
@property @property
def subscriptions(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[RPC.SubOpts]: ... def subscriptions(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
RPC.SubOpts
]: ...
@property @property
def publish(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Message]: ... def publish(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
Message
]: ...
@property @property
def control(self) -> ControlMessage: ... def control(self) -> ControlMessage: ...
def __init__(
def __init__(self, self,
*, *,
subscriptions : typing___Optional[typing___Iterable[RPC.SubOpts]] = None, subscriptions: typing___Optional[typing___Iterable[RPC.SubOpts]] = None,
publish : typing___Optional[typing___Iterable[Message]] = None, publish: typing___Optional[typing___Iterable[Message]] = None,
control : typing___Optional[ControlMessage] = None, control: typing___Optional[ControlMessage] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> RPC: ... def FromString(cls, s: bytes) -> RPC: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"control"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"control",u"publish",u"subscriptions"]) -> None: ... self, field_name: typing_extensions___Literal["control"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"control", "publish", "subscriptions"
],
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"control",b"control"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"control",b"control",u"publish",b"publish",u"subscriptions",b"subscriptions"]) -> None: ... self, field_name: typing_extensions___Literal["control", b"control"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"control",
b"control",
"publish",
b"publish",
"subscriptions",
b"subscriptions",
],
) -> None: ...
class Message(google___protobuf___message___Message): class Message(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
from_id = ... # type: bytes from_id = ... # type: bytes
data = ... # type: bytes data = ... # type: bytes
seqno = ... # type: bytes seqno = ... # type: bytes
topicIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] topicIDs = (
signature = ... # type: bytes ...
key = ... # type: bytes ) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
signature = ... # type: bytes
key = ... # type: bytes
def __init__(self, def __init__(
self,
*, *,
from_id : typing___Optional[bytes] = None, from_id: typing___Optional[bytes] = None,
data : typing___Optional[bytes] = None, data: typing___Optional[bytes] = None,
seqno : typing___Optional[bytes] = None, seqno: typing___Optional[bytes] = None,
topicIDs : typing___Optional[typing___Iterable[typing___Text]] = None, topicIDs: typing___Optional[typing___Iterable[typing___Text]] = None,
signature : typing___Optional[bytes] = None, signature: typing___Optional[bytes] = None,
key : typing___Optional[bytes] = None, key: typing___Optional[bytes] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> Message: ... def FromString(cls, s: bytes) -> Message: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"from_id",u"key",u"seqno",u"signature"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"from_id",u"key",u"seqno",u"signature",u"topicIDs"]) -> None: ... self,
field_name: typing_extensions___Literal[
"data", "from_id", "key", "seqno", "signature"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data", "from_id", "key", "seqno", "signature", "topicIDs"
],
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"from_id",b"from_id",u"key",b"key",u"seqno",b"seqno",u"signature",b"signature"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"from_id",b"from_id",u"key",b"key",u"seqno",b"seqno",u"signature",b"signature",u"topicIDs",b"topicIDs"]) -> None: ... self,
field_name: typing_extensions___Literal[
"data",
b"data",
"from_id",
b"from_id",
"key",
b"key",
"seqno",
b"seqno",
"signature",
b"signature",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"data",
b"data",
"from_id",
b"from_id",
"key",
b"key",
"seqno",
b"seqno",
"signature",
b"signature",
"topicIDs",
b"topicIDs",
],
) -> None: ...
class ControlMessage(google___protobuf___message___Message): class ControlMessage(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property @property
def ihave(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlIHave]: ... def ihave(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlIHave
]: ...
@property @property
def iwant(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlIWant]: ... def iwant(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlIWant
]: ...
@property @property
def graft(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlGraft]: ... def graft(
self,
) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlGraft
]: ...
@property @property
def prune(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlPrune]: ... def prune(
self,
def __init__(self, ) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[
ControlPrune
]: ...
def __init__(
self,
*, *,
ihave : typing___Optional[typing___Iterable[ControlIHave]] = None, ihave: typing___Optional[typing___Iterable[ControlIHave]] = None,
iwant : typing___Optional[typing___Iterable[ControlIWant]] = None, iwant: typing___Optional[typing___Iterable[ControlIWant]] = None,
graft : typing___Optional[typing___Iterable[ControlGraft]] = None, graft: typing___Optional[typing___Iterable[ControlGraft]] = None,
prune : typing___Optional[typing___Iterable[ControlPrune]] = None, prune: typing___Optional[typing___Iterable[ControlPrune]] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> ControlMessage: ... def FromString(cls, s: bytes) -> ControlMessage: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"graft",u"ihave",u"iwant",u"prune"]) -> None: ... def ClearField(
self,
field_name: typing_extensions___Literal["graft", "ihave", "iwant", "prune"],
) -> None: ...
else: else:
def ClearField(self, field_name: typing_extensions___Literal[u"graft",b"graft",u"ihave",b"ihave",u"iwant",b"iwant",u"prune",b"prune"]) -> None: ... def ClearField(
self,
field_name: typing_extensions___Literal[
"graft",
b"graft",
"ihave",
b"ihave",
"iwant",
b"iwant",
"prune",
b"prune",
],
) -> None: ...
class ControlIHave(google___protobuf___message___Message): class ControlIHave(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text topicID = ... # type: typing___Text
messageIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] messageIDs = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self, def __init__(
self,
*, *,
topicID : typing___Optional[typing___Text] = None, topicID: typing___Optional[typing___Text] = None,
messageIDs : typing___Optional[typing___Iterable[typing___Text]] = None, messageIDs: typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> ControlIHave: ... def FromString(cls, s: bytes) -> ControlIHave: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",u"topicID"]) -> None: ... self, field_name: typing_extensions___Literal["topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["messageIDs", "topicID"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",b"messageIDs",u"topicID",b"topicID"]) -> None: ... self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"messageIDs", b"messageIDs", "topicID", b"topicID"
],
) -> None: ...
class ControlIWant(google___protobuf___message___Message): class ControlIWant(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
messageIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] messageIDs = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self, def __init__(
self,
*, *,
messageIDs : typing___Optional[typing___Iterable[typing___Text]] = None, messageIDs: typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> ControlIWant: ... def FromString(cls, s: bytes) -> ControlIWant: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs"]) -> None: ... def ClearField(
self, field_name: typing_extensions___Literal["messageIDs"]
) -> None: ...
else: else:
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",b"messageIDs"]) -> None: ... def ClearField(
self, field_name: typing_extensions___Literal["messageIDs", b"messageIDs"]
) -> None: ...
class ControlGraft(google___protobuf___message___Message): class ControlGraft(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text topicID = ... # type: typing___Text
def __init__(self, def __init__(
self,
*, *,
topicID : typing___Optional[typing___Text] = None, topicID: typing___Optional[typing___Text] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> ControlGraft: ... def FromString(cls, s: bytes) -> ControlGraft: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"topicID"]) -> None: ... self, field_name: typing_extensions___Literal["topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> None: ... self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> None: ...
class ControlPrune(google___protobuf___message___Message): class ControlPrune(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text topicID = ... # type: typing___Text
def __init__(self, def __init__(
self,
*, *,
topicID : typing___Optional[typing___Text] = None, topicID: typing___Optional[typing___Text] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> ControlPrune: ... def FromString(cls, s: bytes) -> ControlPrune: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"topicID"]) -> None: ... self, field_name: typing_extensions___Literal["topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> None: ... self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["topicID", b"topicID"]
) -> None: ...
class TopicDescriptor(google___protobuf___message___Message): class TopicDescriptor(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AuthOpts(google___protobuf___message___Message): class AuthOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AuthMode(int): class AuthMode(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod @classmethod
@ -229,7 +384,11 @@ class TopicDescriptor(google___protobuf___message___Message):
@classmethod @classmethod
def values(cls) -> typing___List[TopicDescriptor.AuthOpts.AuthMode]: ... def values(cls) -> typing___List[TopicDescriptor.AuthOpts.AuthMode]: ...
@classmethod @classmethod
def items(cls) -> typing___List[typing___Tuple[str, TopicDescriptor.AuthOpts.AuthMode]]: ... def items(
cls,
) -> typing___List[
typing___Tuple[str, TopicDescriptor.AuthOpts.AuthMode]
]: ...
NONE = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 0) NONE = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 0)
KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1) KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1)
WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2) WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2)
@ -237,27 +396,46 @@ class TopicDescriptor(google___protobuf___message___Message):
KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1) KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1)
WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2) WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2)
mode = ... # type: TopicDescriptor.AuthOpts.AuthMode mode = ... # type: TopicDescriptor.AuthOpts.AuthMode
keys = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] keys = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
def __init__(self, def __init__(
self,
*, *,
mode : typing___Optional[TopicDescriptor.AuthOpts.AuthMode] = None, mode: typing___Optional[TopicDescriptor.AuthOpts.AuthMode] = None,
keys : typing___Optional[typing___Iterable[bytes]] = None, keys: typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> TopicDescriptor.AuthOpts: ... def FromString(cls, s: bytes) -> TopicDescriptor.AuthOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... self, other_msg: google___protobuf___message___Message
) -> None: ...
def CopyFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"mode"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"keys",u"mode"]) -> None: ... self, field_name: typing_extensions___Literal["mode"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["keys", "mode"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"mode",b"mode"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"keys",b"keys",u"mode",b"mode"]) -> None: ... self, field_name: typing_extensions___Literal["mode", b"mode"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"keys", b"keys", "mode", b"mode"
],
) -> None: ...
class EncOpts(google___protobuf___message___Message): class EncOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class EncMode(int): class EncMode(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod @classmethod
@ -269,7 +447,11 @@ class TopicDescriptor(google___protobuf___message___Message):
@classmethod @classmethod
def values(cls) -> typing___List[TopicDescriptor.EncOpts.EncMode]: ... def values(cls) -> typing___List[TopicDescriptor.EncOpts.EncMode]: ...
@classmethod @classmethod
def items(cls) -> typing___List[typing___Tuple[str, TopicDescriptor.EncOpts.EncMode]]: ... def items(
cls,
) -> typing___List[
typing___Tuple[str, TopicDescriptor.EncOpts.EncMode]
]: ...
NONE = typing___cast(TopicDescriptor.EncOpts.EncMode, 0) NONE = typing___cast(TopicDescriptor.EncOpts.EncMode, 0)
SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1) SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1)
WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2) WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2)
@ -277,46 +459,77 @@ class TopicDescriptor(google___protobuf___message___Message):
SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1) SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1)
WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2) WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2)
mode = ... # type: TopicDescriptor.EncOpts.EncMode mode = ... # type: TopicDescriptor.EncOpts.EncMode
keyHashes = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] keyHashes = (
...
) # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
def __init__(self, def __init__(
self,
*, *,
mode : typing___Optional[TopicDescriptor.EncOpts.EncMode] = None, mode: typing___Optional[TopicDescriptor.EncOpts.EncMode] = None,
keyHashes : typing___Optional[typing___Iterable[bytes]] = None, keyHashes: typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> TopicDescriptor.EncOpts: ... def FromString(cls, s: bytes) -> TopicDescriptor.EncOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... self, other_msg: google___protobuf___message___Message
) -> None: ...
def CopyFrom(
self, other_msg: google___protobuf___message___Message
) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"mode"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"keyHashes",u"mode"]) -> None: ... self, field_name: typing_extensions___Literal["mode"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["keyHashes", "mode"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"mode",b"mode"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"keyHashes",b"keyHashes",u"mode",b"mode"]) -> None: ... self, field_name: typing_extensions___Literal["mode", b"mode"]
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"keyHashes", b"keyHashes", "mode", b"mode"
],
) -> None: ...
name = ... # type: typing___Text name = ... # type: typing___Text
@property @property
def auth(self) -> TopicDescriptor.AuthOpts: ... def auth(self) -> TopicDescriptor.AuthOpts: ...
@property @property
def enc(self) -> TopicDescriptor.EncOpts: ... def enc(self) -> TopicDescriptor.EncOpts: ...
def __init__(
def __init__(self, self,
*, *,
name : typing___Optional[typing___Text] = None, name: typing___Optional[typing___Text] = None,
auth : typing___Optional[TopicDescriptor.AuthOpts] = None, auth: typing___Optional[TopicDescriptor.AuthOpts] = None,
enc : typing___Optional[TopicDescriptor.EncOpts] = None, enc: typing___Optional[TopicDescriptor.EncOpts] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> TopicDescriptor: ... def FromString(cls, s: bytes) -> TopicDescriptor: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"auth",u"enc",u"name"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"auth",u"enc",u"name"]) -> None: ... self, field_name: typing_extensions___Literal["auth", "enc", "name"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["auth", "enc", "name"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"auth",b"auth",u"enc",b"enc",u"name",b"name"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"auth",b"auth",u"enc",b"enc",u"name",b"name"]) -> None: ... self,
field_name: typing_extensions___Literal[
"auth", b"auth", "enc", b"enc", "name", b"name"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"auth", b"auth", "enc", b"enc", "name", b"name"
],
) -> None: ...

View File

@ -15,33 +15,78 @@ from typing import (
cast, cast,
) )
from async_service import Service from async_service import (
Service,
)
import base58 import base58
from lru import LRU from lru import (
LRU,
)
import trio import trio
from libp2p.crypto.keys import PrivateKey from libp2p.crypto.keys import (
from libp2p.exceptions import ParseError, ValidationError PrivateKey,
from libp2p.host.host_interface import IHost )
from libp2p.io.exceptions import IncompleteReadError from libp2p.exceptions import (
from libp2p.network.exceptions import SwarmException ParseError,
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset ValidationError,
from libp2p.network.stream.net_stream_interface import INetStream )
from libp2p.peer.id import ID from libp2p.host.host_interface import (
from libp2p.typing import TProtocol IHost,
from libp2p.utils import encode_varint_prefixed, read_varint_prefixed_bytes )
from libp2p.io.exceptions import (
IncompleteReadError,
)
from libp2p.network.exceptions import (
SwarmException,
)
from libp2p.network.stream.exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
read_varint_prefixed_bytes,
)
from .abc import IPubsub, ISubscriptionAPI from .abc import (
from .pb import rpc_pb2 IPubsub,
from .pubsub_notifee import PubsubNotifee ISubscriptionAPI,
from .subscription import TrioSubscriptionAPI )
from .typing import AsyncValidatorFn, SyncValidatorFn, ValidatorFn from .pb import (
from .validators import PUBSUB_SIGNING_PREFIX, signature_validator rpc_pb2,
)
from .pubsub_notifee import (
PubsubNotifee,
)
from .subscription import (
TrioSubscriptionAPI,
)
from .typing import (
AsyncValidatorFn,
SyncValidatorFn,
ValidatorFn,
)
from .validators import (
PUBSUB_SIGNING_PREFIX,
signature_validator,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from .abc import IPubsubRouter # noqa: F401
from typing import Any # noqa: F401 from typing import Any # noqa: F401
from .abc import IPubsubRouter # noqa: F401
# Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/40e1c94708658b155f30cf99e4574f384756d83c/topic.go#L97 # noqa: E501 # Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/40e1c94708658b155f30cf99e4574f384756d83c/topic.go#L97 # noqa: E501
SUBSCRIPTION_CHANNEL_SIZE = 32 SUBSCRIPTION_CHANNEL_SIZE = 32
@ -64,7 +109,6 @@ class TopicValidator(NamedTuple):
class Pubsub(Service, IPubsub): class Pubsub(Service, IPubsub):
host: IHost host: IHost
router: "IPubsubRouter" router: "IPubsubRouter"
@ -186,8 +230,10 @@ class Pubsub(Service, IPubsub):
return self.subscribed_topics_receive.keys() return self.subscribed_topics_receive.keys()
def get_hello_packet(self) -> rpc_pb2.RPC: def get_hello_packet(self) -> rpc_pb2.RPC:
"""Generate subscription message with all topics we are subscribed to """
only send hello packet if we have subscribed topics.""" Generate subscription message with all topics we are subscribed to
only send hello packet if we have subscribed topics.
"""
packet = rpc_pb2.RPC() packet = rpc_pb2.RPC()
for topic_id in self.topic_ids: for topic_id in self.topic_ids:
packet.subscriptions.extend( packet.subscriptions.extend(
@ -254,7 +300,7 @@ class Pubsub(Service, IPubsub):
:param topic: the topic to register validator under :param topic: the topic to register validator under
:param validator: the validator used to validate messages published to the topic :param validator: the validator used to validate messages published to the topic
:param is_async_validator: indicate if the validator is an asynchronous validator :param is_async_validator: indicate if the validator is an asynchronous validator
""" """ # noqa: E501
self.topic_validators[topic] = TopicValidator(validator, is_async_validator) self.topic_validators[topic] = TopicValidator(validator, is_async_validator)
def remove_topic_validator(self, topic: str) -> None: def remove_topic_validator(self, topic: str) -> None:
@ -341,9 +387,11 @@ class Pubsub(Service, IPubsub):
logger.debug("removed dead peer %s", peer_id) logger.debug("removed dead peer %s", peer_id)
async def handle_peer_queue(self) -> None: async def handle_peer_queue(self) -> None:
"""Continuously read from peer queue and each time a new peer is found, """
Continuously read from peer queue and each time a new peer is found,
open a stream to the peer using a supported pubsub protocol pubsub open a stream to the peer using a supported pubsub protocol pubsub
protocols we support.""" protocols we support.
"""
async with self.peer_receive_channel: async with self.peer_receive_channel:
self.event_handle_peer_queue_started.set() self.event_handle_peer_queue_started.set()
async for peer_id in self.peer_receive_channel: async for peer_id in self.peer_receive_channel:
@ -351,9 +399,10 @@ class Pubsub(Service, IPubsub):
self.manager.run_task(self._handle_new_peer, peer_id) self.manager.run_task(self._handle_new_peer, peer_id)
async def handle_dead_peer_queue(self) -> None: async def handle_dead_peer_queue(self) -> None:
"""Continuously read from dead peer channel and close the stream """
between that peer and remove peer info from pubsub and pubsub Continuously read from dead peer channel and close the stream
router.""" between that peer and remove peer info from pubsub and pubsub router.
"""
async with self.dead_peer_receive_channel: async with self.dead_peer_receive_channel:
self.event_handle_dead_peer_queue_started.set() self.event_handle_dead_peer_queue_started.set()
async for peer_id in self.dead_peer_receive_channel: async for peer_id in self.dead_peer_receive_channel:
@ -373,7 +422,7 @@ class Pubsub(Service, IPubsub):
""" """
if sub_message.subscribe: if sub_message.subscribe:
if sub_message.topicid not in self.peer_topics: if sub_message.topicid not in self.peer_topics:
self.peer_topics[sub_message.topicid] = set([origin_id]) self.peer_topics[sub_message.topicid] = {origin_id}
elif origin_id not in self.peer_topics[sub_message.topicid]: elif origin_id not in self.peer_topics[sub_message.topicid]:
# Add peer to topic # Add peer to topic
self.peer_topics[sub_message.topicid].add(origin_id) self.peer_topics[sub_message.topicid].add(origin_id)
@ -388,7 +437,6 @@ class Pubsub(Service, IPubsub):
:param publish_message: RPC.Message format :param publish_message: RPC.Message format
""" """
# Check if this message has any topics that we are subscribed to # Check if this message has any topics that we are subscribed to
for topic in publish_message.topicIDs: for topic in publish_message.topicIDs:
if topic in self.topic_ids: if topic in self.topic_ids:
@ -409,7 +457,6 @@ class Pubsub(Service, IPubsub):
:param topic_id: topic_id to subscribe to :param topic_id: topic_id to subscribe to
""" """
logger.debug("subscribing to topic %s", topic_id) logger.debug("subscribing to topic %s", topic_id)
# Already subscribed # Already subscribed
@ -448,7 +495,6 @@ class Pubsub(Service, IPubsub):
:param topic_id: topic_id to unsubscribe from :param topic_id: topic_id to unsubscribe from
""" """
logger.debug("unsubscribing from topic %s", topic_id) logger.debug("unsubscribing from topic %s", topic_id)
# Return if we already unsubscribed from the topic # Return if we already unsubscribed from the topic
@ -479,7 +525,6 @@ class Pubsub(Service, IPubsub):
:param raw_msg: raw contents of the message to broadcast :param raw_msg: raw contents of the message to broadcast
""" """
# Broadcast message # Broadcast message
for stream in self.peers.values(): for stream in self.peers.values():
# Write message to stream # Write message to stream
@ -571,7 +616,7 @@ class Pubsub(Service, IPubsub):
# TODO: Check if the `from` is in the blacklist. If yes, reject. # TODO: Check if the `from` is in the blacklist. If yes, reject.
# If the message is processed before, return(i.e., don't further process the message). # If the message is processed before, return(i.e., don't further process the message) # noqa: E501
if self._is_msg_seen(msg): if self._is_msg_seen(msg):
return return
@ -588,7 +633,7 @@ class Pubsub(Service, IPubsub):
await self.validate_msg(msg_forwarder, msg) await self.validate_msg(msg_forwarder, msg)
except ValidationError: except ValidationError:
logger.debug( logger.debug(
"Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s", "Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s", # noqa: E501
msg_forwarder, msg_forwarder,
msg.data.hex(), msg.data.hex(),
msg.topicIDs, msg.topicIDs,
@ -612,8 +657,8 @@ class Pubsub(Service, IPubsub):
def _mark_msg_seen(self, msg: rpc_pb2.Message) -> None: def _mark_msg_seen(self, msg: rpc_pb2.Message) -> None:
msg_id = self._msg_id_constructor(msg) msg_id = self._msg_id_constructor(msg)
# FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there is a # FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there
# more appropriate way. # is a more appropriate way.
self.seen_messages[msg_id] = 1 self.seen_messages[msg_id] = 1
def _is_subscribed_to_msg(self, msg: rpc_pb2.Message) -> bool: def _is_subscribed_to_msg(self, msg: rpc_pb2.Message) -> bool:

View File

@ -1,19 +1,30 @@
from typing import TYPE_CHECKING from typing import (
TYPE_CHECKING,
)
from multiaddr import Multiaddr from multiaddr import (
Multiaddr,
)
import trio import trio
from libp2p.network.connection.net_connection_interface import INetConn from libp2p.network.connection.net_connection_interface import (
from libp2p.network.network_interface import INetwork INetConn,
from libp2p.network.notifee_interface import INotifee )
from libp2p.network.stream.net_stream_interface import INetStream from libp2p.network.network_interface import (
INetwork,
)
from libp2p.network.notifee_interface import (
INotifee,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from libp2p.peer.id import ID # noqa: F401 from libp2p.peer.id import ID # noqa: F401
class PubsubNotifee(INotifee): class PubsubNotifee(INotifee):
initiator_peers_queue: "trio.MemorySendChannel[ID]" initiator_peers_queue: "trio.MemorySendChannel[ID]"
dead_peers_queue: "trio.MemorySendChannel[ID]" dead_peers_queue: "trio.MemorySendChannel[ID]"

View File

@ -1,11 +1,23 @@
from types import TracebackType from types import (
from typing import AsyncIterator, Optional, Type TracebackType,
)
from typing import (
AsyncIterator,
Optional,
Type,
)
import trio import trio
from .abc import ISubscriptionAPI from .abc import (
from .pb import rpc_pb2 ISubscriptionAPI,
from .typing import UnsubscribeFn )
from .pb import (
rpc_pb2,
)
from .typing import (
UnsubscribeFn,
)
class BaseSubscriptionAPI(ISubscriptionAPI): class BaseSubscriptionAPI(ISubscriptionAPI):
@ -32,11 +44,11 @@ class TrioSubscriptionAPI(BaseSubscriptionAPI):
unsubscribe_fn: UnsubscribeFn, unsubscribe_fn: UnsubscribeFn,
) -> None: ) -> None:
self.receive_channel = receive_channel self.receive_channel = receive_channel
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # noqa: E501
self.unsubscribe_fn = unsubscribe_fn # type: ignore self.unsubscribe_fn = unsubscribe_fn # type: ignore
async def unsubscribe(self) -> None: async def unsubscribe(self) -> None:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # noqa: E501
await self.unsubscribe_fn() # type: ignore await self.unsubscribe_fn() # type: ignore
def __aiter__(self) -> AsyncIterator[rpc_pb2.Message]: def __aiter__(self) -> AsyncIterator[rpc_pb2.Message]:

View File

@ -1,8 +1,16 @@
from typing import Awaitable, Callable, Union from typing import (
Awaitable,
Callable,
Union,
)
from libp2p.peer.id import ID from libp2p.peer.id import (
ID,
)
from .pb import rpc_pb2 from .pb import (
rpc_pb2,
)
SyncValidatorFn = Callable[[ID, rpc_pb2.Message], bool] SyncValidatorFn = Callable[[ID, rpc_pb2.Message], bool]
AsyncValidatorFn = Callable[[ID, rpc_pb2.Message], Awaitable[bool]] AsyncValidatorFn = Callable[[ID, rpc_pb2.Message], Awaitable[bool]]

View File

@ -1,9 +1,15 @@
import logging import logging
from libp2p.crypto.serialization import deserialize_public_key from libp2p.crypto.serialization import (
from libp2p.peer.id import ID deserialize_public_key,
)
from libp2p.peer.id import (
ID,
)
from .pb import rpc_pb2 from .pb import (
rpc_pb2,
)
logger = logging.getLogger("libp2p.pubsub") logger = logging.getLogger("libp2p.pubsub")

View File

@ -1,8 +1,17 @@
from abc import ABC, abstractmethod from abc import (
from typing import Iterable ABC,
abstractmethod,
)
from typing import (
Iterable,
)
from libp2p.peer.id import ID from libp2p.peer.id import (
from libp2p.peer.peerinfo import PeerInfo ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
class IContentRouting(ABC): class IContentRouting(ABC):
@ -18,12 +27,16 @@ class IContentRouting(ABC):
@abstractmethod @abstractmethod
def find_provider_iter(self, cid: bytes, count: int) -> Iterable[PeerInfo]: def find_provider_iter(self, cid: bytes, count: int) -> Iterable[PeerInfo]:
"""Search for peers who are able to provide a given key returns an """
iterator of peer.PeerInfo.""" Search for peers who are able to provide a given key returns an
iterator of peer.PeerInfo.
"""
class IPeerRouting(ABC): class IPeerRouting(ABC):
@abstractmethod @abstractmethod
async def find_peer(self, peer_id: ID) -> PeerInfo: async def find_peer(self, peer_id: ID) -> PeerInfo:
"""Find specific Peer FindPeer searches for a peer with given peer_id, """
returns a peer.PeerInfo with relevant addresses.""" Find specific Peer FindPeer searches for a peer with given peer_id,
returns a peer.PeerInfo with relevant addresses.
"""

View File

@ -1,13 +1,24 @@
from typing import Optional from typing import (
Optional,
)
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.keys import (
from libp2p.peer.id import ID PrivateKey,
from libp2p.security.secure_conn_interface import ISecureConn PublicKey,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
class BaseSession(ISecureConn): class BaseSession(ISecureConn):
"""``BaseSession`` is not fully instantiated from its abstract classes as """
it is only meant to be used in clases that derive from it.""" ``BaseSession`` is not fully instantiated from its abstract classes as
it is only meant to be used in clases that derive from it.
"""
local_peer: ID local_peer: ID
local_private_key: PrivateKey local_private_key: PrivateKey

View File

@ -1,9 +1,17 @@
import secrets import secrets
from typing import Callable from typing import (
Callable,
)
from libp2p.crypto.keys import KeyPair from libp2p.crypto.keys import (
from libp2p.peer.id import ID KeyPair,
from libp2p.security.secure_transport_interface import ISecureTransport )
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
def default_secure_bytes_provider(n: int) -> bytes: def default_secure_bytes_provider(n: int) -> bytes:

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError from libp2p.exceptions import (
BaseLibp2pError,
)
class HandshakeFailure(BaseLibp2pError): class HandshakeFailure(BaseLibp2pError):

View File

@ -2,12 +2,13 @@
# source: libp2p/security/insecure/pb/plaintext.proto # source: libp2p/security/insecure/pb/plaintext.proto
import sys import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default() _sym_db = _symbol_database.Default()
@ -17,62 +18,89 @@ from libp2p.crypto.pb import crypto_pb2 as libp2p_dot_crypto_dot_pb_dot_crypto__
DESCRIPTOR = _descriptor.FileDescriptor( DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/insecure/pb/plaintext.proto', name="libp2p/security/insecure/pb/plaintext.proto",
package='plaintext.pb', package="plaintext.pb",
syntax='proto2', syntax="proto2",
serialized_pb=_b('\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto\"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey') serialized_pb=_b(
, '\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey'
dependencies=[libp2p_dot_crypto_dot_pb_dot_crypto__pb2.DESCRIPTOR,]) ),
dependencies=[
libp2p_dot_crypto_dot_pb_dot_crypto__pb2.DESCRIPTOR,
],
)
_EXCHANGE = _descriptor.Descriptor( _EXCHANGE = _descriptor.Descriptor(
name='Exchange', name="Exchange",
full_name='plaintext.pb.Exchange', full_name="plaintext.pb.Exchange",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='id', full_name='plaintext.pb.Exchange.id', index=0, name="id",
number=1, type=12, cpp_type=9, label=1, full_name="plaintext.pb.Exchange.id",
has_default_value=False, default_value=_b(""), index=0,
message_type=None, enum_type=None, containing_type=None, number=1,
is_extension=False, extension_scope=None, type=12,
options=None, file=DESCRIPTOR), cpp_type=9,
_descriptor.FieldDescriptor( label=1,
name='pubkey', full_name='plaintext.pb.Exchange.pubkey', index=1, has_default_value=False,
number=2, type=11, cpp_type=10, label=1, default_value=_b(""),
has_default_value=False, default_value=None, message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
options=None, file=DESCRIPTOR), is_extension=False,
], extension_scope=None,
extensions=[ options=None,
], file=DESCRIPTOR,
nested_types=[], ),
enum_types=[ _descriptor.FieldDescriptor(
], name="pubkey",
options=None, full_name="plaintext.pb.Exchange.pubkey",
is_extendable=False, index=1,
syntax='proto2', number=2,
extension_ranges=[], type=11,
oneofs=[ cpp_type=10,
], label=1,
serialized_start=92, has_default_value=False,
serialized_end=152, default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=92,
serialized_end=152,
) )
_EXCHANGE.fields_by_name['pubkey'].message_type = libp2p_dot_crypto_dot_pb_dot_crypto__pb2._PUBLICKEY _EXCHANGE.fields_by_name[
DESCRIPTOR.message_types_by_name['Exchange'] = _EXCHANGE "pubkey"
].message_type = libp2p_dot_crypto_dot_pb_dot_crypto__pb2._PUBLICKEY
DESCRIPTOR.message_types_by_name["Exchange"] = _EXCHANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
Exchange = _reflection.GeneratedProtocolMessageType('Exchange', (_message.Message,), dict( Exchange = _reflection.GeneratedProtocolMessageType(
DESCRIPTOR = _EXCHANGE, "Exchange",
__module__ = 'libp2p.security.insecure.pb.plaintext_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:plaintext.pb.Exchange) dict(
)) DESCRIPTOR=_EXCHANGE,
__module__="libp2p.security.insecure.pb.plaintext_pb2"
# @@protoc_insertion_point(class_scope:plaintext.pb.Exchange)
),
)
_sym_db.RegisterMessage(Exchange) _sym_db.RegisterMessage(Exchange)

View File

@ -20,26 +20,35 @@ from typing_extensions import (
Literal as typing_extensions___Literal, Literal as typing_extensions___Literal,
) )
class Exchange(google___protobuf___message___Message): class Exchange(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
id = ... # type: bytes id = ... # type: bytes
@property @property
def pubkey(self) -> libp2p___crypto___pb___crypto_pb2___PublicKey: ... def pubkey(self) -> libp2p___crypto___pb___crypto_pb2___PublicKey: ...
def __init__(
def __init__(self, self,
*, *,
id : typing___Optional[bytes] = None, id: typing___Optional[bytes] = None,
pubkey : typing___Optional[libp2p___crypto___pb___crypto_pb2___PublicKey] = None, pubkey: typing___Optional[libp2p___crypto___pb___crypto_pb2___PublicKey] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> Exchange: ... def FromString(cls, s: bytes) -> Exchange: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"id",u"pubkey"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"id",u"pubkey"]) -> None: ... self, field_name: typing_extensions___Literal["id", "pubkey"]
) -> bool: ...
def ClearField(
self, field_name: typing_extensions___Literal["id", "pubkey"]
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"id",b"id",u"pubkey",b"pubkey"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"id",b"id",u"pubkey",b"pubkey"]) -> None: ... self,
field_name: typing_extensions___Literal["id", b"id", "pubkey", b"pubkey"],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal["id", b"id", "pubkey", b"pubkey"],
) -> None: ...

View File

@ -1,21 +1,52 @@
from libp2p.crypto.exceptions import MissingDeserializerError from libp2p.crypto.exceptions import (
from libp2p.crypto.keys import PrivateKey, PublicKey MissingDeserializerError,
from libp2p.crypto.pb import crypto_pb2 )
from libp2p.crypto.serialization import deserialize_public_key from libp2p.crypto.keys import (
from libp2p.io.abc import ReadWriteCloser PrivateKey,
from libp2p.io.msgio import VarIntLengthMsgReadWriter PublicKey,
from libp2p.network.connection.exceptions import RawConnError )
from libp2p.network.connection.raw_connection_interface import IRawConnection from libp2p.crypto.pb import (
from libp2p.peer.id import ID crypto_pb2,
from libp2p.security.base_session import BaseSession )
from libp2p.security.base_transport import BaseSecureTransport from libp2p.crypto.serialization import (
from libp2p.security.exceptions import HandshakeFailure deserialize_public_key,
from libp2p.security.secure_conn_interface import ISecureConn )
from libp2p.typing import TProtocol from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.msgio import (
VarIntLengthMsgReadWriter,
)
from libp2p.network.connection.exceptions import (
RawConnError,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.base_session import (
BaseSession,
)
from libp2p.security.base_transport import (
BaseSecureTransport,
)
from libp2p.security.exceptions import (
HandshakeFailure,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.typing import (
TProtocol,
)
from .pb import plaintext_pb2 from .pb import (
plaintext_pb2,
)
# Reference: https://github.com/libp2p/go-libp2p-core/blob/master/sec/insecure/insecure.go # Reference: https://github.com/libp2p/go-libp2p-core/blob/master/sec/insecure/insecure.go # noqa: E501
PLAINTEXT_PROTOCOL_ID = TProtocol("/plaintext/2.0.0") PLAINTEXT_PROTOCOL_ID = TProtocol("/plaintext/2.0.0")
@ -120,9 +151,10 @@ async def run_handshake(
class InsecureTransport(BaseSecureTransport): class InsecureTransport(BaseSecureTransport):
"""``InsecureTransport`` provides the "identity" upgrader for a """
``IRawConnection``, i.e. the upgraded transport does not add any additional Provides the "identity" upgrader for a ``IRawConnection``, i.e. the upgraded
security.""" transport does not add any additional security.
"""
async def secure_inbound(self, conn: IRawConnection) -> ISecureConn: async def secure_inbound(self, conn: IRawConnection) -> ISecureConn:
""" """

View File

@ -1,4 +1,6 @@
from libp2p.security.exceptions import HandshakeFailure from libp2p.security.exceptions import (
HandshakeFailure,
)
class NoiseFailure(HandshakeFailure): class NoiseFailure(HandshakeFailure):
@ -14,8 +16,10 @@ class InvalidSignature(NoiseFailure):
class NoiseStateError(NoiseFailure): class NoiseStateError(NoiseFailure):
"""Raised when anything goes wrong in the noise state in `noiseprotocol` """
package.""" Raised when anything goes wrong in the noise state in `noiseprotocol`
package.
"""
class PeerIDMismatchesPubkey(NoiseFailure): class PeerIDMismatchesPubkey(NoiseFailure):

View File

@ -1,10 +1,20 @@
from typing import cast from typing import (
cast,
)
from noise.connection import NoiseConnection as NoiseState from noise.connection import NoiseConnection as NoiseState
from libp2p.io.abc import EncryptedMsgReadWriter, MsgReadWriteCloser, ReadWriteCloser from libp2p.io.abc import (
from libp2p.io.msgio import FixedSizeLenMsgReadWriter EncryptedMsgReadWriter,
from libp2p.network.connection.raw_connection_interface import IRawConnection MsgReadWriteCloser,
ReadWriteCloser,
)
from libp2p.io.msgio import (
FixedSizeLenMsgReadWriter,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
SIZE_NOISE_MESSAGE_LEN = 2 SIZE_NOISE_MESSAGE_LEN = 2
MAX_NOISE_MESSAGE_LEN = 2 ** (8 * SIZE_NOISE_MESSAGE_LEN) - 1 MAX_NOISE_MESSAGE_LEN = 2 ** (8 * SIZE_NOISE_MESSAGE_LEN) - 1

View File

@ -1,7 +1,14 @@
from dataclasses import dataclass from dataclasses import (
dataclass,
)
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.keys import (
from libp2p.crypto.serialization import deserialize_public_key PrivateKey,
PublicKey,
)
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from .pb import noise_pb2 as noise_pb from .pb import noise_pb2 as noise_pb

View File

@ -1,16 +1,34 @@
from abc import ABC, abstractmethod from abc import (
ABC,
abstractmethod,
)
from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives import (
serialization,
)
from noise.backends.default.keypairs import KeyPair as NoiseKeyPair from noise.backends.default.keypairs import KeyPair as NoiseKeyPair
from noise.connection import Keypair as NoiseKeypairEnum from noise.connection import Keypair as NoiseKeypairEnum
from noise.connection import NoiseConnection as NoiseState from noise.connection import NoiseConnection as NoiseState
from libp2p.crypto.ed25519 import Ed25519PublicKey from libp2p.crypto.ed25519 import (
from libp2p.crypto.keys import PrivateKey, PublicKey Ed25519PublicKey,
from libp2p.network.connection.raw_connection_interface import IRawConnection )
from libp2p.peer.id import ID from libp2p.crypto.keys import (
from libp2p.security.secure_conn_interface import ISecureConn PrivateKey,
from libp2p.security.secure_session import SecureSession PublicKey,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_session import (
SecureSession,
)
from .exceptions import ( from .exceptions import (
HandshakeHasNotFinished, HandshakeHasNotFinished,
@ -18,7 +36,10 @@ from .exceptions import (
NoiseStateError, NoiseStateError,
PeerIDMismatchesPubkey, PeerIDMismatchesPubkey,
) )
from .io import NoiseHandshakeReadWriter, NoiseTransportReadWriter from .io import (
NoiseHandshakeReadWriter,
NoiseTransportReadWriter,
)
from .messages import ( from .messages import (
NoiseHandshakePayload, NoiseHandshakePayload,
make_handshake_payload_sig, make_handshake_payload_sig,
@ -95,8 +116,8 @@ class PatternXX(BasePattern):
if handshake_state.rs is None: if handshake_state.rs is None:
raise NoiseStateError( raise NoiseStateError(
"something is wrong in the underlying noise `handshake_state`: " "something is wrong in the underlying noise `handshake_state`: "
"we received and consumed msg#3, which should have included the" "we received and consumed msg#3, which should have included the "
" remote static public key, but it is not present in the handshake_state" "remote static public key, but it is not present in the handshake_state"
) )
remote_pubkey = self._get_pubkey_from_noise_keypair(handshake_state.rs) remote_pubkey = self._get_pubkey_from_noise_keypair(handshake_state.rs)
@ -139,8 +160,8 @@ class PatternXX(BasePattern):
if handshake_state.rs is None: if handshake_state.rs is None:
raise NoiseStateError( raise NoiseStateError(
"something is wrong in the underlying noise `handshake_state`: " "something is wrong in the underlying noise `handshake_state`: "
"we received and consumed msg#3, which should have included the" "we received and consumed msg#3, which should have included the "
" remote static public key, but it is not present in the handshake_state" "remote static public key, but it is not present in the handshake_state"
) )
remote_pubkey = self._get_pubkey_from_noise_keypair(handshake_state.rs) remote_pubkey = self._get_pubkey_from_noise_keypair(handshake_state.rs)

View File

@ -2,81 +2,114 @@
# source: libp2p/security/noise/pb/noise.proto # source: libp2p/security/noise/pb/noise.proto
import sys import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default() _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor( DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/noise/pb/noise.proto', name="libp2p/security/noise/pb/noise.proto",
package='pb', package="pb",
syntax='proto3', syntax="proto3",
serialized_pb=_b('\n$libp2p/security/noise/pb/noise.proto\x12\x02pb\"Q\n\x15NoiseHandshakePayload\x12\x14\n\x0cidentity_key\x18\x01 \x01(\x0c\x12\x14\n\x0cidentity_sig\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x62\x06proto3') serialized_pb=_b(
'\n$libp2p/security/noise/pb/noise.proto\x12\x02pb"Q\n\x15NoiseHandshakePayload\x12\x14\n\x0cidentity_key\x18\x01 \x01(\x0c\x12\x14\n\x0cidentity_sig\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x62\x06proto3'
),
) )
_NOISEHANDSHAKEPAYLOAD = _descriptor.Descriptor( _NOISEHANDSHAKEPAYLOAD = _descriptor.Descriptor(
name='NoiseHandshakePayload', name="NoiseHandshakePayload",
full_name='pb.NoiseHandshakePayload', full_name="pb.NoiseHandshakePayload",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='identity_key', full_name='pb.NoiseHandshakePayload.identity_key', index=0, name="identity_key",
number=1, type=12, cpp_type=9, label=1, full_name="pb.NoiseHandshakePayload.identity_key",
has_default_value=False, default_value=_b(""), index=0,
message_type=None, enum_type=None, containing_type=None, number=1,
is_extension=False, extension_scope=None, type=12,
options=None, file=DESCRIPTOR), cpp_type=9,
_descriptor.FieldDescriptor( label=1,
name='identity_sig', full_name='pb.NoiseHandshakePayload.identity_sig', index=1, has_default_value=False,
number=2, type=12, cpp_type=9, label=1, default_value=_b(""),
has_default_value=False, default_value=_b(""), message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
options=None, file=DESCRIPTOR), is_extension=False,
_descriptor.FieldDescriptor( extension_scope=None,
name='data', full_name='pb.NoiseHandshakePayload.data', index=2, options=None,
number=3, type=12, cpp_type=9, label=1, file=DESCRIPTOR,
has_default_value=False, default_value=_b(""), ),
message_type=None, enum_type=None, containing_type=None, _descriptor.FieldDescriptor(
is_extension=False, extension_scope=None, name="identity_sig",
options=None, file=DESCRIPTOR), full_name="pb.NoiseHandshakePayload.identity_sig",
], index=1,
extensions=[ number=2,
], type=12,
nested_types=[], cpp_type=9,
enum_types=[ label=1,
], has_default_value=False,
options=None, default_value=_b(""),
is_extendable=False, message_type=None,
syntax='proto3', enum_type=None,
extension_ranges=[], containing_type=None,
oneofs=[ is_extension=False,
], extension_scope=None,
serialized_start=44, options=None,
serialized_end=125, file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data",
full_name="pb.NoiseHandshakePayload.data",
index=2,
number=3,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=44,
serialized_end=125,
) )
DESCRIPTOR.message_types_by_name['NoiseHandshakePayload'] = _NOISEHANDSHAKEPAYLOAD DESCRIPTOR.message_types_by_name["NoiseHandshakePayload"] = _NOISEHANDSHAKEPAYLOAD
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
NoiseHandshakePayload = _reflection.GeneratedProtocolMessageType('NoiseHandshakePayload', (_message.Message,), dict( NoiseHandshakePayload = _reflection.GeneratedProtocolMessageType(
DESCRIPTOR = _NOISEHANDSHAKEPAYLOAD, "NoiseHandshakePayload",
__module__ = 'libp2p.security.noise.pb.noise_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:pb.NoiseHandshakePayload) dict(
)) DESCRIPTOR=_NOISEHANDSHAKEPAYLOAD,
__module__="libp2p.security.noise.pb.noise_pb2"
# @@protoc_insertion_point(class_scope:pb.NoiseHandshakePayload)
),
)
_sym_db.RegisterMessage(NoiseHandshakePayload) _sym_db.RegisterMessage(NoiseHandshakePayload)

View File

@ -16,24 +16,39 @@ from typing_extensions import (
Literal as typing_extensions___Literal, Literal as typing_extensions___Literal,
) )
class NoiseHandshakePayload(google___protobuf___message___Message): class NoiseHandshakePayload(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
identity_key = ... # type: bytes identity_key = ... # type: bytes
identity_sig = ... # type: bytes identity_sig = ... # type: bytes
data = ... # type: bytes data = ... # type: bytes
def __init__(self, def __init__(
self,
*, *,
identity_key : typing___Optional[bytes] = None, identity_key: typing___Optional[bytes] = None,
identity_sig : typing___Optional[bytes] = None, identity_sig: typing___Optional[bytes] = None,
data : typing___Optional[bytes] = None, data: typing___Optional[bytes] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> NoiseHandshakePayload: ... def FromString(cls, s: bytes) -> NoiseHandshakePayload: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"identity_key",u"identity_sig"]) -> None: ... def ClearField(
self,
field_name: typing_extensions___Literal[
"data", "identity_key", "identity_sig"
],
) -> None: ...
else: else:
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"identity_key",b"identity_key",u"identity_sig",b"identity_sig"]) -> None: ... def ClearField(
self,
field_name: typing_extensions___Literal[
"data",
b"data",
"identity_key",
b"identity_key",
"identity_sig",
b"identity_sig",
],
) -> None: ...

View File

@ -1,11 +1,27 @@
from libp2p.crypto.keys import KeyPair, PrivateKey from libp2p.crypto.keys import (
from libp2p.network.connection.raw_connection_interface import IRawConnection KeyPair,
from libp2p.peer.id import ID PrivateKey,
from libp2p.security.secure_conn_interface import ISecureConn )
from libp2p.security.secure_transport_interface import ISecureTransport from libp2p.network.connection.raw_connection_interface import (
from libp2p.typing import TProtocol IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
from libp2p.typing import (
TProtocol,
)
from .patterns import IPattern, PatternXX from .patterns import (
IPattern,
PatternXX,
)
PROTOCOL_ID = TProtocol("/noise") PROTOCOL_ID = TProtocol("/noise")

View File

@ -1,4 +1,6 @@
from libp2p.security.exceptions import HandshakeFailure from libp2p.security.exceptions import (
HandshakeFailure,
)
class SecioException(HandshakeFailure): class SecioException(HandshakeFailure):
@ -6,10 +8,10 @@ class SecioException(HandshakeFailure):
class SelfEncryption(SecioException): class SelfEncryption(SecioException):
"""Raised to indicate that a host is attempting to encrypt communications """
with itself.""" Raised to indicate that a host is attempting to encrypt communications
with itself.
pass """
class PeerMismatchException(SecioException): class PeerMismatchException(SecioException):

View File

@ -13,4 +13,4 @@ message Propose {
message Exchange { message Exchange {
optional bytes ephemeral_public_key = 1; optional bytes ephemeral_public_key = 1;
optional bytes signature = 2; optional bytes signature = 2;
} }

View File

@ -1,143 +1,221 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/security/secio/pb/spipe.proto # source: libp2p/security/secio/pb/spipe.proto
import sys import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default() _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor( DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/secio/pb/spipe.proto', name="libp2p/security/secio/pb/spipe.proto",
package='spipe.pb', package="spipe.pb",
syntax='proto2', syntax="proto2",
serialized_options=None, serialized_options=None,
serialized_pb=_b('\n$libp2p/security/secio/pb/spipe.proto\x12\x08spipe.pb\"_\n\x07Propose\x12\x0c\n\x04rand\x18\x01 \x01(\x0c\x12\x12\n\npublic_key\x18\x02 \x01(\x0c\x12\x11\n\texchanges\x18\x03 \x01(\t\x12\x0f\n\x07\x63iphers\x18\x04 \x01(\t\x12\x0e\n\x06hashes\x18\x05 \x01(\t\";\n\x08\x45xchange\x12\x1c\n\x14\x65phemeral_public_key\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c') serialized_pb=_b(
'\n$libp2p/security/secio/pb/spipe.proto\x12\x08spipe.pb"_\n\x07Propose\x12\x0c\n\x04rand\x18\x01 \x01(\x0c\x12\x12\n\npublic_key\x18\x02 \x01(\x0c\x12\x11\n\texchanges\x18\x03 \x01(\t\x12\x0f\n\x07\x63iphers\x18\x04 \x01(\t\x12\x0e\n\x06hashes\x18\x05 \x01(\t";\n\x08\x45xchange\x12\x1c\n\x14\x65phemeral_public_key\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c'
),
) )
_PROPOSE = _descriptor.Descriptor( _PROPOSE = _descriptor.Descriptor(
name='Propose', name="Propose",
full_name='spipe.pb.Propose', full_name="spipe.pb.Propose",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='rand', full_name='spipe.pb.Propose.rand', index=0, name="rand",
number=1, type=12, cpp_type=9, label=1, full_name="spipe.pb.Propose.rand",
has_default_value=False, default_value=_b(""), index=0,
message_type=None, enum_type=None, containing_type=None, number=1,
is_extension=False, extension_scope=None, type=12,
serialized_options=None, file=DESCRIPTOR), cpp_type=9,
_descriptor.FieldDescriptor( label=1,
name='public_key', full_name='spipe.pb.Propose.public_key', index=1, has_default_value=False,
number=2, type=12, cpp_type=9, label=1, default_value=_b(""),
has_default_value=False, default_value=_b(""), message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
serialized_options=None, file=DESCRIPTOR), is_extension=False,
_descriptor.FieldDescriptor( extension_scope=None,
name='exchanges', full_name='spipe.pb.Propose.exchanges', index=2, serialized_options=None,
number=3, type=9, cpp_type=9, label=1, file=DESCRIPTOR,
has_default_value=False, default_value=_b("").decode('utf-8'), ),
message_type=None, enum_type=None, containing_type=None, _descriptor.FieldDescriptor(
is_extension=False, extension_scope=None, name="public_key",
serialized_options=None, file=DESCRIPTOR), full_name="spipe.pb.Propose.public_key",
_descriptor.FieldDescriptor( index=1,
name='ciphers', full_name='spipe.pb.Propose.ciphers', index=3, number=2,
number=4, type=9, cpp_type=9, label=1, type=12,
has_default_value=False, default_value=_b("").decode('utf-8'), cpp_type=9,
message_type=None, enum_type=None, containing_type=None, label=1,
is_extension=False, extension_scope=None, has_default_value=False,
serialized_options=None, file=DESCRIPTOR), default_value=_b(""),
_descriptor.FieldDescriptor( message_type=None,
name='hashes', full_name='spipe.pb.Propose.hashes', index=4, enum_type=None,
number=5, type=9, cpp_type=9, label=1, containing_type=None,
has_default_value=False, default_value=_b("").decode('utf-8'), is_extension=False,
message_type=None, enum_type=None, containing_type=None, extension_scope=None,
is_extension=False, extension_scope=None, serialized_options=None,
serialized_options=None, file=DESCRIPTOR), file=DESCRIPTOR,
], ),
extensions=[ _descriptor.FieldDescriptor(
], name="exchanges",
nested_types=[], full_name="spipe.pb.Propose.exchanges",
enum_types=[ index=2,
], number=3,
serialized_options=None, type=9,
is_extendable=False, cpp_type=9,
syntax='proto2', label=1,
extension_ranges=[], has_default_value=False,
oneofs=[ default_value=_b("").decode("utf-8"),
], message_type=None,
serialized_start=50, enum_type=None,
serialized_end=145, containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="ciphers",
full_name="spipe.pb.Propose.ciphers",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="hashes",
full_name="spipe.pb.Propose.hashes",
index=4,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=50,
serialized_end=145,
) )
_EXCHANGE = _descriptor.Descriptor( _EXCHANGE = _descriptor.Descriptor(
name='Exchange', name="Exchange",
full_name='spipe.pb.Exchange', full_name="spipe.pb.Exchange",
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='ephemeral_public_key', full_name='spipe.pb.Exchange.ephemeral_public_key', index=0, name="ephemeral_public_key",
number=1, type=12, cpp_type=9, label=1, full_name="spipe.pb.Exchange.ephemeral_public_key",
has_default_value=False, default_value=_b(""), index=0,
message_type=None, enum_type=None, containing_type=None, number=1,
is_extension=False, extension_scope=None, type=12,
serialized_options=None, file=DESCRIPTOR), cpp_type=9,
_descriptor.FieldDescriptor( label=1,
name='signature', full_name='spipe.pb.Exchange.signature', index=1, has_default_value=False,
number=2, type=12, cpp_type=9, label=1, default_value=_b(""),
has_default_value=False, default_value=_b(""), message_type=None,
message_type=None, enum_type=None, containing_type=None, enum_type=None,
is_extension=False, extension_scope=None, containing_type=None,
serialized_options=None, file=DESCRIPTOR), is_extension=False,
], extension_scope=None,
extensions=[ serialized_options=None,
], file=DESCRIPTOR,
nested_types=[], ),
enum_types=[ _descriptor.FieldDescriptor(
], name="signature",
serialized_options=None, full_name="spipe.pb.Exchange.signature",
is_extendable=False, index=1,
syntax='proto2', number=2,
extension_ranges=[], type=12,
oneofs=[ cpp_type=9,
], label=1,
serialized_start=147, has_default_value=False,
serialized_end=206, default_value=_b(""),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=147,
serialized_end=206,
) )
DESCRIPTOR.message_types_by_name['Propose'] = _PROPOSE DESCRIPTOR.message_types_by_name["Propose"] = _PROPOSE
DESCRIPTOR.message_types_by_name['Exchange'] = _EXCHANGE DESCRIPTOR.message_types_by_name["Exchange"] = _EXCHANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
Propose = _reflection.GeneratedProtocolMessageType('Propose', (_message.Message,), { Propose = _reflection.GeneratedProtocolMessageType(
'DESCRIPTOR' : _PROPOSE, "Propose",
'__module__' : 'libp2p.security.secio.pb.spipe_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:spipe.pb.Propose) {
}) "DESCRIPTOR": _PROPOSE,
"__module__": "libp2p.security.secio.pb.spipe_pb2"
# @@protoc_insertion_point(class_scope:spipe.pb.Propose)
},
)
_sym_db.RegisterMessage(Propose) _sym_db.RegisterMessage(Propose)
Exchange = _reflection.GeneratedProtocolMessageType('Exchange', (_message.Message,), { Exchange = _reflection.GeneratedProtocolMessageType(
'DESCRIPTOR' : _EXCHANGE, "Exchange",
'__module__' : 'libp2p.security.secio.pb.spipe_pb2' (_message.Message,),
# @@protoc_insertion_point(class_scope:spipe.pb.Exchange) {
}) "DESCRIPTOR": _EXCHANGE,
"__module__": "libp2p.security.secio.pb.spipe_pb2"
# @@protoc_insertion_point(class_scope:spipe.pb.Exchange)
},
)
_sym_db.RegisterMessage(Exchange) _sym_db.RegisterMessage(Exchange)

View File

@ -17,51 +17,116 @@ from typing_extensions import (
Literal as typing_extensions___Literal, Literal as typing_extensions___Literal,
) )
class Propose(google___protobuf___message___Message): class Propose(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
rand = ... # type: bytes rand = ... # type: bytes
public_key = ... # type: bytes public_key = ... # type: bytes
exchanges = ... # type: typing___Text exchanges = ... # type: typing___Text
ciphers = ... # type: typing___Text ciphers = ... # type: typing___Text
hashes = ... # type: typing___Text hashes = ... # type: typing___Text
def __init__(self, def __init__(
self,
*, *,
rand : typing___Optional[bytes] = None, rand: typing___Optional[bytes] = None,
public_key : typing___Optional[bytes] = None, public_key: typing___Optional[bytes] = None,
exchanges : typing___Optional[typing___Text] = None, exchanges: typing___Optional[typing___Text] = None,
ciphers : typing___Optional[typing___Text] = None, ciphers: typing___Optional[typing___Text] = None,
hashes : typing___Optional[typing___Text] = None, hashes: typing___Optional[typing___Text] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> Propose: ... def FromString(cls, s: bytes) -> Propose: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"ciphers",u"exchanges",u"hashes",u"public_key",u"rand"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"ciphers",u"exchanges",u"hashes",u"public_key",u"rand"]) -> None: ... self,
field_name: typing_extensions___Literal[
"ciphers", "exchanges", "hashes", "public_key", "rand"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ciphers", "exchanges", "hashes", "public_key", "rand"
],
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"ciphers",b"ciphers",u"exchanges",b"exchanges",u"hashes",b"hashes",u"public_key",b"public_key",u"rand",b"rand"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"ciphers",b"ciphers",u"exchanges",b"exchanges",u"hashes",b"hashes",u"public_key",b"public_key",u"rand",b"rand"]) -> None: ... self,
field_name: typing_extensions___Literal[
"ciphers",
b"ciphers",
"exchanges",
b"exchanges",
"hashes",
b"hashes",
"public_key",
b"public_key",
"rand",
b"rand",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ciphers",
b"ciphers",
"exchanges",
b"exchanges",
"hashes",
b"hashes",
"public_key",
b"public_key",
"rand",
b"rand",
],
) -> None: ...
class Exchange(google___protobuf___message___Message): class Exchange(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
ephemeral_public_key = ... # type: bytes ephemeral_public_key = ... # type: bytes
signature = ... # type: bytes signature = ... # type: bytes
def __init__(self, def __init__(
self,
*, *,
ephemeral_public_key : typing___Optional[bytes] = None, ephemeral_public_key: typing___Optional[bytes] = None,
signature : typing___Optional[bytes] = None, signature: typing___Optional[bytes] = None,
) -> None: ... ) -> None: ...
@classmethod @classmethod
def FromString(cls, s: bytes) -> Exchange: ... def FromString(cls, s: bytes) -> Exchange: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,): if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",u"signature"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",u"signature"]) -> None: ... self,
field_name: typing_extensions___Literal[
"ephemeral_public_key", "signature"
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ephemeral_public_key", "signature"
],
) -> None: ...
else: else:
def HasField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",b"ephemeral_public_key",u"signature",b"signature"]) -> bool: ... def HasField(
def ClearField(self, field_name: typing_extensions___Literal[u"ephemeral_public_key",b"ephemeral_public_key",u"signature",b"signature"]) -> None: ... self,
field_name: typing_extensions___Literal[
"ephemeral_public_key",
b"ephemeral_public_key",
"signature",
b"signature",
],
) -> bool: ...
def ClearField(
self,
field_name: typing_extensions___Literal[
"ephemeral_public_key",
b"ephemeral_public_key",
"signature",
b"signature",
],
) -> None: ...

View File

@ -1,31 +1,66 @@
from dataclasses import dataclass from dataclasses import (
dataclass,
)
import itertools import itertools
from typing import Optional, Tuple from typing import (
Optional,
Tuple,
)
import multihash import multihash
from libp2p.crypto.authenticated_encryption import ( from libp2p.crypto.authenticated_encryption import (
EncryptionParameters as AuthenticatedEncryptionParameters, EncryptionParameters as AuthenticatedEncryptionParameters,
) )
from libp2p.crypto.authenticated_encryption import (
InvalidMACException,
)
from libp2p.crypto.authenticated_encryption import ( from libp2p.crypto.authenticated_encryption import (
initialize_pair as initialize_pair_for_encryption, initialize_pair as initialize_pair_for_encryption,
) )
from libp2p.crypto.authenticated_encryption import InvalidMACException
from libp2p.crypto.authenticated_encryption import MacAndCipher as Encrypter from libp2p.crypto.authenticated_encryption import MacAndCipher as Encrypter
from libp2p.crypto.ecc import ECCPublicKey from libp2p.crypto.ecc import (
from libp2p.crypto.exceptions import MissingDeserializerError ECCPublicKey,
from libp2p.crypto.key_exchange import create_ephemeral_key_pair )
from libp2p.crypto.keys import PrivateKey, PublicKey from libp2p.crypto.exceptions import (
from libp2p.crypto.serialization import deserialize_public_key MissingDeserializerError,
from libp2p.io.abc import EncryptedMsgReadWriter )
from libp2p.io.exceptions import DecryptionFailedException, IOException from libp2p.crypto.key_exchange import (
from libp2p.io.msgio import FixedSizeLenMsgReadWriter create_ephemeral_key_pair,
from libp2p.network.connection.raw_connection_interface import IRawConnection )
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from libp2p.io.abc import (
EncryptedMsgReadWriter,
)
from libp2p.io.exceptions import (
DecryptionFailedException,
IOException,
)
from libp2p.io.msgio import (
FixedSizeLenMsgReadWriter,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import ID as PeerID from libp2p.peer.id import ID as PeerID
from libp2p.security.base_transport import BaseSecureTransport from libp2p.security.base_transport import (
from libp2p.security.secure_conn_interface import ISecureConn BaseSecureTransport,
from libp2p.security.secure_session import SecureSession )
from libp2p.typing import TProtocol from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.security.secure_session import (
SecureSession,
)
from libp2p.typing import (
TProtocol,
)
from .exceptions import ( from .exceptions import (
IncompatibleChoices, IncompatibleChoices,
@ -36,7 +71,10 @@ from .exceptions import (
SedesException, SedesException,
SelfEncryption, SelfEncryption,
) )
from .pb.spipe_pb2 import Exchange, Propose from .pb.spipe_pb2 import (
Exchange,
Propose,
)
ID = TProtocol("/secio/1.0.0") ID = TProtocol("/secio/1.0.0")
@ -101,8 +139,10 @@ class SecioMsgReadWriter(EncryptedMsgReadWriter):
@dataclass(frozen=True) @dataclass(frozen=True)
class Proposal: class Proposal:
"""A ``Proposal`` represents the set of session parameters one peer in a """
pair of peers attempting to negotiate a `secio` channel prefers.""" Represents the set of session parameters one peer in a
pair of peers attempting to negotiate a `secio` channel prefers.
"""
nonce: bytes nonce: bytes
public_key: PublicKey public_key: PublicKey
@ -401,8 +441,10 @@ async def create_secure_session(
class Transport(BaseSecureTransport): class Transport(BaseSecureTransport):
"""``Transport`` provides a security upgrader for a ``IRawConnection``, """
following the `secio` protocol defined in the libp2p specs.""" Provide a security upgrader for a ``IRawConnection``,
following the `secio` protocol defined in the libp2p specs.
"""
def get_nonce(self) -> bytes: def get_nonce(self) -> bytes:
return self.secure_bytes_provider(NONCE_SIZE) return self.secure_bytes_provider(NONCE_SIZE)

Some files were not shown because too many files have changed in this diff Show More