Merge pull request #447 from pacrob/upgrade-template

Upgrade template
This commit is contained in:
Paul Robinson
2024-03-02 12:14:52 -07:00
committed by GitHub
194 changed files with 4109 additions and 3268 deletions

View File

@ -20,4 +20,3 @@ values =
[bumpversion:file:setup.py]
search = version="{current_version}",
replace = version="{new_version}",

View File

@ -1,4 +1,4 @@
version: 2.0
version: 2.1
# heavily inspired by https://raw.githubusercontent.com/pinax/pinax-wiki/6bd2a99ab6f702e300d708532a6d1d9aa638b9f8/.circleci/config.yml
@ -14,64 +14,285 @@ common: &common
command: ./.circleci/merge_pr.sh
when: on_fail
- run:
name: merge pull request base (3nd try)
name: merge pull request base (3rd try)
command: ./.circleci/merge_pr.sh
when: on_fail
- restore_cache:
keys:
- cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
- cache-v1-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
- run:
name: install dependencies
command: pip install --user tox
command: |
python -m pip install --upgrade pip
python -m pip install tox
- run:
name: run tox
command: ~/.local/bin/tox -r
command: python -m tox run -r
- save_cache:
paths:
- .hypothesis
- .tox
- ~/.cache/pip
- ~/.local
- ./eggs
key: cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
key: cache-v1-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
orbs:
win: circleci/windows@5.0.0
windows-wheel-steps:
windows-wheel-setup: &windows-wheel-setup
executor:
name: win/default
shell: bash.exe
working_directory: C:\Users\circleci\project\py-libp2p
environment:
TOXENV: windows-wheel
restore-cache-step: &restore-cache-step
restore_cache:
keys:
- cache-v1-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
install-pyenv-step: &install-pyenv-step
run:
name: install pyenv
command: |
pip install pyenv-win --target $HOME/.pyenv
echo 'export PYENV="$HOME/.pyenv/pyenv-win/"' >> $BASH_ENV
echo 'export PYENV_ROOT="$HOME/.pyenv/pyenv-win/"' >> $BASH_ENV
echo 'export PYENV_USERPROFILE="$HOME/.pyenv/pyenv-win/"' >> $BASH_ENV
echo 'export PATH="$PATH:$HOME/.pyenv/pyenv-win/bin"' >> $BASH_ENV
echo 'export PATH="$PATH:$HOME/.pyenv/pyenv-win/shims"' >> $BASH_ENV
source $BASH_ENV
pyenv update
install-latest-python-step: &install-latest-python-step
run:
name: install latest python version and tox
command: |
LATEST_VERSION=$(pyenv install --list | grep -E "${MINOR_VERSION}\.[0-9]+$" | tail -1)
echo "installing python version $LATEST_VERSION"
pyenv install $LATEST_VERSION
pyenv global $LATEST_VERSION
python3 -m pip install --upgrade pip
python3 -m pip install tox
run-tox-step: &run-tox-step
run:
name: run tox
command: |
echo 'running tox with' $(python3 --version)
python3 -m tox run -r
save-cache-step: &save-cache-step
save_cache:
paths:
- .tox
key: cache-v1-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
docs: &docs
docker:
- image: common
steps:
- run:
name: install latexpdf dependencies
command: |
sudo apt-get update
sudo apt-get install latexmk tex-gyre texlive-fonts-extra
interop: &interop
docker:
- image: common
steps:
- run:
name: install interop test dependencies
command: |
sudo apt-get update
sudo apt-get install latexmk tex-gyre texlive-fonts-extra
wget https://dl.google.com/go/$GOBINPKG
sudo tar -C /usr/local -xzf $GOBINPKG
export GOPATH=$HOME/go
export GOROOT=/usr/local/go
export PATH=$GOROOT/bin:$GOPATH/bin:$PATH
./tests_interop/go_pkgs/install_interop_go_pkgs.sh
jobs:
docs:
<<: *common
<<: *docs
docker:
- image: circleci/python:3.6
- image: cimg/python:3.8
environment:
TOXENV: docs
lint:
py38-core:
<<: *common
docker:
- image: circleci/python:3.6
- image: cimg/python:3.8
environment:
TOXENV: lint
py36-core:
TOXENV: py38-core
py39-core:
<<: *common
docker:
- image: circleci/python:3.6
- image: cimg/python:3.9
environment:
TOXENV: py36-core
py37-core:
TOXENV: py39-core
py310-core:
<<: *common
docker:
- image: circleci/python:3.7
- image: cimg/python:3.10
environment:
TOXENV: py37-core
pypy3-core:
TOXENV: py310-core
py311-core:
<<: *common
docker:
- image: pypy
- image: cimg/python:3.11
environment:
TOXENV: pypy3-core
TOXENV: py311-core
py312-core:
<<: *common
docker:
- image: cimg/python:3.12
environment:
TOXENV: py312-core
py38-lint:
<<: *common
docker:
- image: cimg/python:3.8
environment:
TOXENV: py38-lint
py39-lint:
<<: *common
docker:
- image: cimg/python:3.9
environment:
TOXENV: py39-lint
py310-lint:
<<: *common
docker:
- image: cimg/python:3.10
environment:
TOXENV: py310-lint
py311-lint:
<<: *common
docker:
- image: cimg/python:3.11
environment:
TOXENV: py311-lint
py312-lint:
<<: *common
docker:
- image: cimg/python:3.12
environment:
TOXENV: py312-lint
py38-wheel:
<<: *common
docker:
- image: cimg/python:3.8
environment:
TOXENV: py38-wheel
py39-wheel:
<<: *common
docker:
- image: cimg/python:3.9
environment:
TOXENV: py39-wheel
py310-wheel:
<<: *common
docker:
- image: cimg/python:3.10
environment:
TOXENV: py310-wheel
py311-wheel:
<<: *common
docker:
- image: cimg/python:3.11
environment:
TOXENV: py311-wheel
py312-wheel:
<<: *common
docker:
- image: cimg/python:3.12
environment:
TOXENV: py312-wheel
py311-windows-wheel:
<<: *windows-wheel-setup
steps:
- checkout
- <<: *restore-cache-step
- <<: *install-pyenv-step
- run:
name: set minor version
command: echo "export MINOR_VERSION='3.11'" >> $BASH_ENV
- <<: *install-latest-python-step
- <<: *run-tox-step
- <<: *save-cache-step
py312-windows-wheel:
<<: *windows-wheel-setup
steps:
- checkout
- <<: *restore-cache-step
- <<: *install-pyenv-step
- run:
name: set minor version
command: echo "export MINOR_VERSION='3.12'" >> $BASH_ENV
- <<: *install-latest-python-step
- <<: *run-tox-step
- <<: *save-cache-step
py38-interop:
<<: *interop
docker:
- image: cimg/python:3.8
environment:
TOXENV: py38-interop
py39-interop:
<<: *interop
docker:
- image: cimg/python:3.9
environment:
TOXENV: py39-interop
py310-interop:
<<: *interop
docker:
- image: cimg/python:3.10
environment:
TOXENV: py310-interop
py311-interop:
<<: *interop
docker:
- image: cimg/python:3.11
environment:
TOXENV: py311-interop
py312-interop:
<<: *interop
docker:
- image: cimg/python:3.12
environment:
TOXENV: py312-interop
workflows:
version: 2
test:
jobs:
- docs
- lint
- py36-core
- py37-core
- pypy3-core
- py38-core
- py39-core
- py310-core
- py311-core
- py312-core
- py38-lint
- py39-lint
- py310-lint
- py311-lint
- py312-lint
- py38-wheel
- py39-wheel
- py310-wheel
- py311-wheel
- py312-wheel
# - py311-windows-wheel
# - py312-windows-wheel
# - py38-interop
# - py39-interop
# - py310-interop
# - py311-interop
# - py312-interop

View File

@ -15,13 +15,13 @@ body:
- type: textarea
attributes:
label: Expected behavior
description: Describe what you expect to happen.
description: Describe what you expect to happen.
validations:
required: true
- type: textarea
attributes:
label: Actual behavior
description: Describe what actually happens.
description: Describe what actually happens.
validations:
required: true
- type: textarea
@ -34,13 +34,13 @@ body:
- type: textarea
attributes:
label: Possible Solution
description: Suggest a fix/reason for the bug, or ideas how to implement the addition or change.
description: Suggest a fix/reason for the bug, or ideas how to implement the addition or change.
validations:
required: false
- type: textarea
attributes:
label: Environment
description: Run this: `$ python -m eth_utils` and put the results here.
description: Run `$ python -m eth_utils` and put the results here.
render: shell
validations:
required: false

View File

@ -4,7 +4,7 @@ body:
- type: textarea
attributes:
label: Description
description: Describe the enhancement that you are proposing.
description: Describe the enhancement that you are proposing.
validations:
required: true
- type: textarea

View File

@ -21,7 +21,7 @@ body:
attributes:
label: Requirements
description: Write a list of what you want this feature to do.
placeholder: "1."
placeholder: "1."
validations:
required: true
- type: textarea

View File

@ -8,14 +8,10 @@ Summary of approach.
### To-Do
[//]: # (Stay ahead of things, add list items here!)
- [ ] Clean up commit history
[//]: # (For important changes that should go into the release notes please add a newsfragment file as explained here: https://github.com/libp2p/py-libp2p/blob/master/newsfragments/README.md)
[//]: # (See: https://py-libp2p.readthedocs.io/en/latest/contributing.html#pull-requests)
- [ ] Add entry to the [release notes](https://github.com/libp2p/py-libp2p/blob/master/newsfragments/README.md)
* [ ] Add entry to the [release notes](https://github.com/libp2p/py-libp2p/blob/main/newsfragments/README.md)
#### Cute Animal Picture
![put a cute animal picture link inside the parentheses]()
![put a cute animal picture link inside the parentheses](<>)

58
.gitignore vendored
View File

@ -11,6 +11,7 @@ __pycache__/
*.egg-info
dist
build
.build
eggs
.eggs
parts
@ -21,7 +22,9 @@ develop-eggs
.installed.cfg
lib
lib64
pip-wheel-metadata
venv*
.venv*
.Python
downloads/
wheels/
@ -61,15 +64,37 @@ docs/modules.rst
docs/*.internal.rst
docs/*._utils.*
# Hypothese Property base testing
# Blockchain
chains
# Hypothesis Property base testing
.hypothesis
# tox/pytest cache
.cache
.pytest_cache
# pycache
__pycache__/
# Test output logs
logs
# VIM temp files
*.sw[op]
# mypy
.mypy_cache
# macOS
.DS_Store
# pyenv
.python-version
# vs-code
.vscode
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
@ -96,32 +121,33 @@ logs
# Mongo Explorer plugin:
.idea/mongoSettings.xml
# VIM temp files
*.sw[op]
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# For a more precise, explicit template, see:
# https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# mypy
.mypy_cache
## General
.idea/*
.idea_modules/*
## File-based project format:
*.iws
## IntelliJ
out/
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
### JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
### Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# END JetBrains section
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
@ -146,9 +172,6 @@ target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
@ -171,6 +194,3 @@ env.bak/
# mkdocs documentation
/site
# vscode
.vscode/

52
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,52 @@
exclude: '.project-template|docs/conf.py|.bumpversion.cfg|.*pb2\..*'
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-yaml
- id: check-toml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
- id: pyupgrade
args: [--py38-plus]
- repo: https://github.com/psf/black
rev: 23.9.1
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
hooks:
- id: flake8
additional_dependencies:
- flake8-bugbear==23.9.16
exclude: setup.py
- repo: https://github.com/PyCQA/autoflake
rev: v2.2.1
hooks:
- id: autoflake
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/pycqa/pydocstyle
rev: 6.3.0
hooks:
- id: pydocstyle
additional_dependencies:
- tomli # required until >= python311
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.17
hooks:
- id: mdformat
additional_dependencies:
- mdformat-gfm
# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.5.1
# hooks:
# - id: mypy
# additional_dependencies:
# - mypy-protobuf
# exclude: 'tests/|tests_interop/|crypto/|identity/|pubsub/|insecure/|noise/|security/'

View File

@ -0,0 +1,71 @@
#!/usr/bin/env python3
import os
import sys
import re
from pathlib import Path
def _find_files(project_root):
path_exclude_pattern = r"\.git($|\/)|venv|_build"
file_exclude_pattern = r"fill_template_vars\.py|\.swp$"
filepaths = []
for dir_path, _dir_names, file_names in os.walk(project_root):
if not re.search(path_exclude_pattern, dir_path):
for file in file_names:
if not re.search(file_exclude_pattern, file):
filepaths.append(str(Path(dir_path, file)))
return filepaths
def _replace(pattern, replacement, project_root):
print(f"Replacing values: {pattern}")
for file in _find_files(project_root):
try:
with open(file) as f:
content = f.read()
content = re.sub(pattern, replacement, content)
with open(file, "w") as f:
f.write(content)
except UnicodeDecodeError:
pass
def main():
project_root = Path(os.path.realpath(sys.argv[0])).parent.parent
module_name = input("What is your python module name? ")
pypi_input = input(f"What is your pypi package name? (default: {module_name}) ")
pypi_name = pypi_input or module_name
repo_input = input(f"What is your github project name? (default: {pypi_name}) ")
repo_name = repo_input or pypi_name
rtd_input = input(
f"What is your readthedocs.org project name? (default: {pypi_name}) "
)
rtd_name = rtd_input or pypi_name
project_input = input(
f"What is your project name (ex: at the top of the README)? (default: {repo_name}) "
)
project_name = project_input or repo_name
short_description = input("What is a one-liner describing the project? ")
_replace("<MODULE_NAME>", module_name, project_root)
_replace("<PYPI_NAME>", pypi_name, project_root)
_replace("<REPO_NAME>", repo_name, project_root)
_replace("<RTD_NAME>", rtd_name, project_root)
_replace("<PROJECT_NAME>", project_name, project_root)
_replace("<SHORT_DESCRIPTION>", short_description, project_root)
os.makedirs(project_root / module_name, exist_ok=True)
Path(project_root / module_name / "__init__.py").touch()
Path(project_root / module_name / "py.typed").touch()
if __name__ == "__main__":
main()

View File

@ -1,48 +0,0 @@
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
PROJECT_ROOT=$(dirname $(dirname $(python -c 'import os, sys; sys.stdout.write(os.path.realpath(sys.argv[1]))' "$0")))
echo "What is your python module name?"
read MODULE_NAME
echo "What is your pypi package name? (default: $MODULE_NAME)"
read PYPI_INPUT
PYPI_NAME=${PYPI_INPUT:-$MODULE_NAME}
echo "What is your github project name? (default: $PYPI_NAME)"
read REPO_INPUT
REPO_NAME=${REPO_INPUT:-$PYPI_NAME}
echo "What is your readthedocs.org project name? (default: $PYPI_NAME)"
read RTD_INPUT
RTD_NAME=${RTD_INPUT:-$PYPI_NAME}
echo "What is your project name (ex: at the top of the README)? (default: $REPO_NAME)"
read PROJECT_INPUT
PROJECT_NAME=${PROJECT_INPUT:-$REPO_NAME}
echo "What is a one-liner describing the project?"
read SHORT_DESCRIPTION
_replace() {
local find_cmd=(find "$PROJECT_ROOT" ! -perm -u=x ! -path '*/.git/*' -type f)
if [[ $(uname) == Darwin ]]; then
"${find_cmd[@]}" -exec sed -i '' "$1" {} +
else
"${find_cmd[@]}" -exec sed -i "$1" {} +
fi
}
_replace "s/<MODULE_NAME>/$MODULE_NAME/g"
_replace "s/<PYPI_NAME>/$PYPI_NAME/g"
_replace "s/<REPO_NAME>/$REPO_NAME/g"
_replace "s/<RTD_NAME>/$RTD_NAME/g"
_replace "s/<PROJECT_NAME>/$PROJECT_NAME/g"
_replace "s/<SHORT_DESCRIPTION>/$SHORT_DESCRIPTION/g"
mkdir -p "$PROJECT_ROOT/$MODULE_NAME"
touch "$PROJECT_ROOT/$MODULE_NAME/__init__.py"

View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
import os
import sys
from pathlib import Path
import subprocess
def main():
template_dir = Path(os.path.dirname(sys.argv[0]))
template_vars_file = template_dir / "template_vars.txt"
fill_template_vars_script = template_dir / "fill_template_vars.py"
with open(template_vars_file, "r") as input_file:
content_lines = input_file.readlines()
process = subprocess.Popen(
[sys.executable, str(fill_template_vars_script)],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
for line in content_lines:
process.stdin.write(line)
process.stdin.flush()
stdout, stderr = process.communicate()
if process.returncode != 0:
print(f"Error occurred: {stderr}")
sys.exit(1)
print(stdout)
if __name__ == "__main__":
main()

View File

@ -1,2 +0,0 @@
TEMPLATE_DIR=$(dirname $(readlink -f "$0"))
<"$TEMPLATE_DIR/template_vars.txt" "$TEMPLATE_DIR/fill_template_vars.sh"

View File

@ -1,30 +0,0 @@
[pydocstyle]
; All error codes found here:
; http://www.pydocstyle.org/en/3.0.0/error_codes.html
;
; Ignored:
; D1 - Missing docstring error codes
;
; Selected:
; D2 - Whitespace error codes
; D3 - Quote error codes
; D4 - Content related error codes
select=D2,D3,D4
; Extra ignores:
; D200 - One-line docstring should fit on one line with quotes
; D203 - 1 blank line required before class docstring
; D204 - 1 blank line required after class docstring
; D205 - 1 blank line required between summary line and description
; D212 - Multi-line docstring summary should start at the first line
; D302 - Use u""" for Unicode docstrings
; D400 - First line should end with a period
; D401 - First line should be in imperative mood
; D412 - No blank lines allowed between a section header and its content
add-ignore=D200,D203,D204,D205,D212,D302,D400,D401,D412
; Explanation:
; D400 - Enabling this error code seems to make it a requirement that the first
; sentence in a docstring is not split across two lines. It also makes it a
; requirement that no docstring can have a multi-sentence description without a
; summary line. Neither one of those requirements seem appropriate.

20
.readthedocs.yaml Normal file
View File

@ -0,0 +1,20 @@
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.8"
sphinx:
configuration: docs/conf.py
fail_on_warning: true
python:
install:
- method: pip
path: .
extra_requirements:
- docs
# Build all formats for RTD Downloads - htmlzip, pdf, epub
formats: all

View File

@ -1,37 +0,0 @@
language: python
matrix:
include:
- python: 3.6-dev
dist: xenial
env: TOXENV=py36-test
- python: 3.7
dist: xenial
env: TOXENV=py37-test
- python: 3.7
dist: xenial
env: TOXENV=lint
- python: 3.7
dist: xenial
env: TOXENV=docs
- python: 3.7
dist: xenial
env: TOXENV=py37-interop GOBINPKG=go1.13.8.linux-amd64.tar.gz
sudo: true
before_install:
- wget https://dl.google.com/go/$GOBINPKG
- sudo tar -C /usr/local -xzf $GOBINPKG
- export GOPATH=$HOME/go
- export GOROOT=/usr/local/go
- export PATH=$GOROOT/bin:$GOPATH/bin:$PATH
- ./tests_interop/go_pkgs/install_interop_go_pkgs.sh
install:
- pip install --upgrade pip
- pip install tox
script:
- tox
notifications:
slack: py-libp2p:RK0WVoQZhQXLgIKfHNPL1TR2

21
LICENSE
View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2019 The Ethereum Foundation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -5,4 +5,3 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

12
MANIFEST.in Normal file
View File

@ -0,0 +1,12 @@
include LICENSE-APACHE
include LICENSE-MIT
include README.md
recursive-include tests *
global-include *.pyi
recursive-exclude * __pycache__
recursive-exclude * *.py[co]
prune .tox
prune venv*

View File

@ -5,11 +5,11 @@ CURRENT_SIGN_SETTING := $(shell git config commit.gpgSign)
help:
@echo "clean-build - remove build artifacts"
@echo "clean-pyc - remove Python file artifacts"
@echo "lint - check style with flake8, etc"
@echo "lint-roll - auto-correct styles with isort, black, docformatter, etc"
@echo "lint - fix linting issues with pre-commit"
@echo "test - run tests quickly with the default Python"
@echo "testall - run tests on every Python version with tox"
@echo "release - package and upload a release"
@echo "docs - generate docs and open in browser (linux-docs for version on linux)"
@echo "notes - consume towncrier newsfragments/ and update release notes in docs/"
@echo "release - package and upload a release (does not run notes target)"
@echo "dist - package"
FILES_TO_LINT = libp2p tests tests_interop examples setup.py
@ -38,7 +38,6 @@ clean: clean-build clean-pyc
clean-build:
rm -fr build/
rm -fr dist/
rm -fr *.egg-info
clean-pyc:
find . -name '*.pyc' -exec rm -f {} +
@ -47,37 +46,36 @@ clean-pyc:
find . -name '__pycache__' -exec rm -rf {} +
lint:
mypy -p libp2p -p examples --config-file mypy.ini
flake8 $(FILES_TO_LINT)
black --check $(FILES_TO_LINT)
isort --recursive --check-only --diff $(FILES_TO_LINT)
docformatter --pre-summary-newline --check --recursive $(FILES_TO_LINT)
tox -e lint # This is probably redundant, but just in case...
lint-roll:
isort --recursive $(FILES_TO_LINT)
black $(FILES_TO_LINT)
docformatter -ir --pre-summary-newline $(FILES_TO_LINT)
$(MAKE) lint
@pre-commit run --all-files --show-diff-on-failure || ( \
echo "\n\n\n * pre-commit should have fixed the errors above. Running again to make sure everything is good..." \
&& pre-commit run --all-files --show-diff-on-failure \
)
test:
pytest tests
test-all:
tox
build-docs:
sphinx-apidoc -o docs/ . setup.py "*conftest*" "libp2p/tools/interop*"
$(MAKE) -C docs clean
$(MAKE) -C docs html
$(MAKE) -C docs doctest
./newsfragments/validate_files.py
towncrier --draft --version preview
docs: build-docs
build-docs-ci:
$(MAKE) -C docs latexpdf
$(MAKE) -C docs epub
validate-newsfragments:
python ./newsfragments/validate_files.py
towncrier build --draft --version preview
check-docs: build-docs validate-newsfragments
check-docs-ci: build-docs build-docs-ci validate-newsfragments
docs: check-docs
open docs/_build/html/index.html
linux-docs: build-docs
linux-docs: check-docs
xdg-open docs/_build/html/index.html
package: clean
@ -85,17 +83,23 @@ package: clean
python scripts/release/test_package.py
notes:
check-bump:
ifndef bump
$(error bump must be set, typically: major, minor, patch, or devnum)
endif
notes: check-bump
# Let UPCOMING_VERSION be the version that is used for the current bump
$(eval UPCOMING_VERSION=$(shell bumpversion $(bump) --dry-run --list | grep new_version= | sed 's/new_version=//g'))
# Now generate the release notes to have them included in the release commit
towncrier --yes --version $(UPCOMING_VERSION)
towncrier build --yes --version $(UPCOMING_VERSION)
# Before we bump the version, make sure that the towncrier-generated docs will build
make build-docs
git commit -m "Compile release notes"
git commit -m "Compile release notes for v$(UPCOMING_VERSION)"
release: clean
# require that you be on a branch that's linked to upstream/master
git status -s -b | head -1 | grep "\.\.upstream/master"
release: check-bump clean
# require that upstream is configured for ethereum/py-libp2p
@git remote -v | grep -E "upstream\tgit@github.com:ethereum/py-libp2p.git \(push\)|upstream\thttps://(www.)?github.com/ethereum/py-libp2p \(push\)"
# verify that docs build correctly
./newsfragments/validate_files.py is-empty
make build-docs
@ -103,11 +107,11 @@ release: clean
git config commit.gpgSign true
bumpversion $(bump)
git push upstream && git push upstream --tags
python setup.py sdist bdist_wheel
python -m build
twine upload dist/*
git config commit.gpgSign "$(CURRENT_SIGN_SETTING)"
dist: clean
python setup.py sdist bdist_wheel
python -m build
ls -l dist

237
README.md
View File

@ -9,182 +9,153 @@
[![Matrix](https://img.shields.io/badge/matrix-%23libp2p%3Apermaweb.io-blue.svg)](https://riot.permaweb.io/#/room/#libp2p:permaweb.io)
[![Discord](https://img.shields.io/discord/475789330380488707?color=blueviolet&label=discord)](https://discord.gg/66KBrm2)
<h1 align="center">
<img width="250" align="center" src="https://github.com/libp2p/py-libp2p/blob/master/assets/py-libp2p-logo.png?raw=true" alt="py-libp2p hex logo" />
</h1>
## WARNING
py-libp2p is an experimental and work-in-progress repo under heavy development. We do not yet recommend using py-libp2p in production environments.
py-libp2p is an experimental and work-in-progress repo under development. We do not yet recommend using py-libp2p in production environments.
Right now, `tests_interop` are turned off for CI, and a number of `tests` are failing.
The Python implementation of the libp2p networking stack
Read more in the [documentation on ReadTheDocs](https://py-libp2p.readthedocs.io/). [View the release notes](https://py-libp2p.readthedocs.io/en/latest/release_notes.html).
## Sponsorship
This project is graciously sponsored by the Ethereum Foundation through [Wave 5 of their Grants Program](https://blog.ethereum.org/2019/02/21/ethereum-foundation-grants-program-wave-5/).
## Maintainers
The py-libp2p team consists of:
Currently maintained by [@pacrob](https://github.com/pacrob), looking for assistance!
The py-libp2p team previously consisted of:
[@zixuanzh](https://github.com/zixuanzh) [@alexh](https://github.com/alexh) [@stuckinaboot](https://github.com/stuckinaboot) [@robzajac](https://github.com/robzajac) [@carver](https://github.com/carver)
## Development
py-libp2p requires Python 3.7 and the best way to guarantee a clean Python 3.7 environment is with [`virtualenv`](https://virtualenv.pypa.io/en/stable/)
py-libp2p requires Python 3.8+ and the best way to guarantee a clean Python environment is with [`virtualenv`](https://virtualenv.pypa.io/en/stable/)
```sh
git clone git@github.com:libp2p/py-libp2p.git
cd py-libp2p
virtualenv -p python3.7 venv
virtualenv -p python venv
. venv/bin/activate
pip install -e .[dev]
python -m pip install -e .[dev]
```
### Testing Setup
During development, you might like to have tests run on every file save.
Show flake8 errors on file change:
```sh
# Test flake8
when-changed -v -s -r -1 libp2p/ tests/ -c "clear; flake8 libp2p tests && echo 'flake8 success' || echo 'error'"
```
Run multi-process tests in one command, but without color:
```sh
# in the project root:
pytest --numprocesses=4 --looponfail --maxfail=1
# the same thing, succinctly:
pytest -n 4 -f --maxfail=1
```
Run in one thread, with color and desktop notifications:
```sh
cd venv
ptw --onfail "notify-send -t 5000 'Test failure ⚠⚠⚠⚠⚠' 'python 3 test on py-libp2p failed'" ../tests ../libp2p
```
Note that tests/libp2p/test_libp2p.py contains an end-to-end messaging test between two libp2p hosts, which is the bulk of our proof of concept.
Note that tests/test_libp2p/test_libp2p.py contains an end-to-end messaging test between two libp2p hosts, which is the bulk of our proof of concept.
### Release setup
\<\<\<\<\<\<\< HEAD
Releases follow the same basic pattern as releases of some tangentially-related projects,
like Trinity. See [Trinity's release instructions](
https://trinity-client.readthedocs.io/en/latest/contributing.html#releasing).
like Trinity. See [Trinity's release instructions](https://trinity-client.readthedocs.io/en/latest/contributing.html#releasing).
## Requirements
The protobuf description in this repository was generated by `protoc` at version `3.7.1`.
\=======
To release a new version:
> > > > > > > template
The protobuf description in this repository was generated by `protoc` at version `25.3`.
## Feature Breakdown
py-libp2p aims for conformity with [the standard libp2p modules](https://github.com/libp2p/libp2p/blob/master/REQUIREMENTS.md#libp2p-modules-implementations). Below is a breakdown of the modules we have developed, are developing, and may develop in the future.
> Legend: :green_apple: Done &nbsp; :lemon: In Progress &nbsp; :tomato: Missing &nbsp; :chestnut: Not planned
> Legend: :green_apple: Done   :lemon: In Progress   :tomato: Missing   :chestnut: Not planned
| libp2p Node | Status |
| -------------------------------------------- | :-----------: |
| **`libp2p`** | :green_apple: |
| libp2p Node | Status |
| ------------ | :-----------: |
| **`libp2p`** | :green_apple: |
| Identify Protocol | Status |
| ----------------- | :-----: |
| **`Identify`** | :lemon: |
| Identify Protocol | Status |
| -------------------------------------------- | :-----------: |
| **`Identify`** | :lemon: |
| Transport Protocols | Status |
| ------------------- | :-----------: |
| **`TCP`** | :green_apple: |
| **`UDP`** | :tomato: |
| **`WebSockets`** | :chestnut: |
| **`UTP`** | :chestnut: |
| **`WebRTC`** | :chestnut: |
| **`SCTP`** | :chestnut: |
| **`Tor`** | :chestnut: |
| **`i2p`** | :chestnut: |
| **`cjdns`** | :chestnut: |
| **`Bluetooth LE`** | :chestnut: |
| **`Audio TP`** | :chestnut: |
| **`Zerotier`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Stream Muxers | Status |
| ---------------- | :-----------: |
| **`multiplex`** | :green_apple: |
| **`yamux`** | :tomato: |
| **`benchmarks`** | :chestnut: |
| **`muxado`** | :chestnut: |
| **`spdystream`** | :chestnut: |
| **`spdy`** | :chestnut: |
| **`http2`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Transport Protocols | Status |
| -------------------------------------------- | :-----------: |
| **`TCP`** | :green_apple: |
| **`UDP`** | :tomato: |
| **`WebSockets`** | :chestnut: |
| **`UTP`** | :chestnut: |
| **`WebRTC`** | :chestnut: |
| **`SCTP`** | :chestnut: |
| **`Tor`** | :chestnut: |
| **`i2p`** | :chestnut: |
| **`cjdns`** | :chestnut: |
| **`Bluetooth LE`** | :chestnut: |
| **`Audio TP`** | :chestnut: |
| **`Zerotier`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Protocol Muxers | Status |
| ----------------- | :-----------: |
| **`multiselect`** | :green_apple: |
| Switch (Swarm) | Status |
| ------------------ | :-----------: |
| **`Switch`** | :green_apple: |
| **`Dialer stack`** | :green_apple: |
| Stream Muxers | Status |
| -------------------------------------------- | :-----------: |
| **`multiplex`** | :green_apple: |
| **`yamux`** | :tomato: |
| **`benchmarks`** | :chestnut: |
| **`muxado`** | :chestnut: |
| **`spdystream`** | :chestnut: |
| **`spdy`** | :chestnut: |
| **`http2`** | :chestnut: |
| **`QUIC`** | :chestnut: |
| Peer Discovery | Status |
| -------------------- | :--------: |
| **`bootstrap list`** | :tomato: |
| **`Kademlia DHT`** | :chestnut: |
| **`mDNS`** | :chestnut: |
| **`PEX`** | :chestnut: |
| **`DNS`** | :chestnut: |
| Content Routing | Status |
| ------------------ | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| Protocol Muxers | Status |
| -------------------------------------------- | :-----------: |
| **`multiselect`** | :green_apple: |
| Peer Routing | Status |
| ------------------ | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| NAT Traversal | Status |
| ------------------------ | :--------: |
| **`nat-pmp`** | :chestnut: |
| **`upnp`** | :chestnut: |
| **`ext addr discovery`** | :chestnut: |
| **`STUN-like`** | :chestnut: |
| **`line-switch relay`** | :chestnut: |
| **`pkt-switch relay`** | :chestnut: |
| Switch (Swarm) | Status |
| -------------------------------------------- | :-----------: |
| **`Switch`** | :green_apple: |
| **`Dialer stack`** | :green_apple: |
| Peer Discovery | Status |
| -------------------------------------------- | :-----------: |
| **`bootstrap list`** | :tomato: |
| **`Kademlia DHT`** | :chestnut: |
| **`mDNS`** | :chestnut: |
| **`PEX`** | :chestnut: |
| **`DNS`** | :chestnut: |
| Content Routing | Status |
| -------------------------------------------- | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| Peer Routing | Status |
| -------------------------------------------- | :-----------: |
| **`Kademlia DHT`** | :chestnut: |
| **`floodsub`** | :green_apple: |
| **`gossipsub`** | :green_apple: |
| **`PHT`** | :chestnut: |
| NAT Traversal | Status |
| -------------------------------------------- | :-----------: |
| **`nat-pmp`** | :chestnut: |
| **`upnp`** | :chestnut: |
| **`ext addr discovery`** | :chestnut: |
| **`STUN-like`** | :chestnut: |
| **`line-switch relay`** | :chestnut: |
| **`pkt-switch relay`** | :chestnut: |
| Exchange | Status |
| -------------------------------------------- | :-----------: |
| **`HTTP`** | :chestnut: |
| **`Bitswap`** | :chestnut: |
| **`Bittorrent`** | :chestnut: |
| Consensus | Status |
| -------------------------------------------- | :-----------: |
| **`Paxos`** | :chestnut: |
| **`Raft`** | :chestnut: |
| **`PBTF`** | :chestnut: |
| **`Nakamoto`** | :chestnut: |
| Exchange | Status |
| ---------------- | :--------: |
| **`HTTP`** | :chestnut: |
| **`Bitswap`** | :chestnut: |
| **`Bittorrent`** | :chestnut: |
| Consensus | Status |
| -------------- | :--------: |
| **`Paxos`** | :chestnut: |
| **`Raft`** | :chestnut: |
| **`PBTF`** | :chestnut: |
| **`Nakamoto`** | :chestnut: |
## Explanation of Basic Two Node Communication
@ -195,12 +166,12 @@ _(non-normative, useful for team notes, not a reference)_
Several components of the libp2p stack take part when establishing a connection between two nodes:
1. **Host**: a node in the libp2p network.
2. **Connection**: the layer 3 connection between two nodes in a libp2p network.
3. **Transport**: the component that creates a _Connection_, e.g. TCP, UDP, QUIC, etc.
3. **Streams**: an abstraction on top of a _Connection_ representing parallel conversations about different matters, each of which is identified by a protocol ID. Multiple streams are layered on top of a _Connection_ via the _Multiplexer_.
4. **Multiplexer**: a component that is responsible for wrapping messages sent on a stream with an envelope that identifies the stream they pertain to, normally via an ID. The multiplexer on the other unwraps the message and routes it internally based on the stream identification.
5. **Secure channel**: optionally establishes a secure, encrypted, and authenticated channel over the _Connection_.
5. **Upgrader**: a component that takes a raw layer 3 connection returned by the _Transport_, and performs the security and multiplexing negotiation to set up a secure, multiplexed channel on top of which _Streams_ can be opened.
1. **Connection**: the layer 3 connection between two nodes in a libp2p network.
1. **Transport**: the component that creates a _Connection_, e.g. TCP, UDP, QUIC, etc.
1. **Streams**: an abstraction on top of a _Connection_ representing parallel conversations about different matters, each of which is identified by a protocol ID. Multiple streams are layered on top of a _Connection_ via the _Multiplexer_.
1. **Multiplexer**: a component that is responsible for wrapping messages sent on a stream with an envelope that identifies the stream they pertain to, normally via an ID. The multiplexer on the other unwraps the message and routes it internally based on the stream identification.
1. **Secure channel**: optionally establishes a secure, encrypted, and authenticated channel over the _Connection_.
1. **Upgrader**: a component that takes a raw layer 3 connection returned by the _Transport_, and performs the security and multiplexing negotiation to set up a secure, multiplexed channel on top of which _Streams_ can be opened.
### Communication between two hosts X and Y

View File

@ -85,17 +85,17 @@ qthelp:
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/web3.qhcp"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/libp2p.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/web3.qhc"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/libp2p.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/web3"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/web3"
@echo "# mkdir -p $$HOME/.local/share/devhelp/libp2p"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/libp2p"
@echo "# devhelp"
epub:

View File

@ -15,45 +15,46 @@
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
import os
DIR = os.path.dirname('__file__')
with open (os.path.join(DIR, '../setup.py'), 'r') as f:
DIR = os.path.dirname("__file__")
with open(os.path.join(DIR, "../setup.py"), "r") as f:
for line in f:
if 'version=' in line:
if "version=" in line:
setup_version = line.split('"')[1]
break
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx_rtd_theme",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = 'py-libp2p'
project = "py-libp2p"
copyright = '2019, The Ethereum Foundation'
__version__ = setup_version
@ -62,176 +63,179 @@ __version__ = setup_version
# built documents.
#
# The short X.Y version.
version = '.'.join(__version__.split('.')[:2])
version = ".".join(__version__.split(".")[:2])
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
'_build',
'modules.rst',
"_build",
"modules.rst",
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'libp2pdoc'
htmlhelp_basename = "libp2pdocs"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'libp2p.tex', 'py-libp2p Documentation',
'The Ethereum Foundation', 'manual'),
(
"index",
"libp2p.tex",
"py-libp2p Documentation",
"The Ethereum Foundation",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
@ -239,12 +243,17 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'libp2p', 'py-libp2p Documentation',
['The Ethereum Foundation'], 1)
(
"index",
"libp2p",
"py-libp2p Documentation",
["The Ethereum Foundation"],
1,
)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
@ -253,34 +262,41 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'py-libp2p', 'py-libp2p Documentation',
'The Ethereum Foundation', 'py-libp2p', 'The Python implementation of the libp2p networking stack',
'Miscellaneous'),
(
"index",
"py-libp2p",
"py-libp2p Documentation",
"The Ethereum Foundation",
"py-libp2p",
"The Python implementation of the libp2p networking stack",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# texinfo_no_detailmenu = False
# -- Intersphinx configuration ------------------------------------------------
intersphinx_mapping = {
'python': ('https://docs.python.org/3.6', None),
"python": ("https://docs.python.org/3.10", None),
}
# -- Doctest configuration ----------------------------------------
import doctest
doctest_default_flags = (0
doctest_default_flags = (
0
| doctest.DONT_ACCEPT_TRUE_FOR_1
| doctest.ELLIPSIS
| doctest.IGNORE_EXCEPTION_DETAIL

View File

@ -4,13 +4,21 @@ import sys
import multiaddr
import trio
from libp2p import new_host
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.typing import TProtocol
from libp2p import (
new_host,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.peerinfo import (
info_from_p2p_addr,
)
from libp2p.typing import (
TProtocol,
)
PROTOCOL_ID = TProtocol("/chat/1.0.0")
MAX_READ_LEN = 2 ** 32 - 1
MAX_READ_LEN = 2**32 - 1
async def read_data(stream: INetStream) -> None:
@ -58,7 +66,8 @@ async def run(port: int, destination: str) -> None:
# Associate the peer with local ip address
await host.connect(info)
# Start a stream with the destination.
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'.
# Multiaddress of the destination peer is fetched from the peerstore
# using 'peerId'.
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
nursery.start_soon(read_data, stream)

View File

@ -3,11 +3,21 @@ import argparse
import multiaddr
import trio
from libp2p import new_host
from libp2p.crypto.secp256k1 import create_new_key_pair
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.typing import TProtocol
from libp2p import (
new_host,
)
from libp2p.crypto.secp256k1 import (
create_new_key_pair,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.peerinfo import (
info_from_p2p_addr,
)
from libp2p.typing import (
TProtocol,
)
PROTOCOL_ID = TProtocol("/echo/1.0.0")
@ -36,11 +46,9 @@ async def run(port: int, destination: str, seed: int = None) -> None:
host = new_host(key_pair=create_new_key_pair(secret))
async with host.run(listen_addrs=[listen_addr]):
print(f"I am {host.get_id().to_string()}")
if not destination: # its the server
host.set_stream_handler(PROTOCOL_ID, _echo_stream_handler)
print(
@ -59,7 +67,8 @@ async def run(port: int, destination: str, seed: int = None) -> None:
await host.connect(info)
# Start a stream with the destination.
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'.
# Multiaddress of the destination peer is fetched from the peerstore
# using 'peerId'.
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
msg = b"hi, there!\n"
@ -99,7 +108,7 @@ def main() -> None:
"-s",
"--seed",
type=int,
help="provide a seed to the random number generator (e.g. to fix peer IDs across runs)",
help="provide a seed to the random number generator (e.g. to fix peer IDs across runs)", # noqa: E501
)
args = parser.parse_args()

View File

@ -1,21 +1,60 @@
from libp2p.crypto.keys import KeyPair
from libp2p.crypto.rsa import create_new_key_pair
from libp2p.host.basic_host import BasicHost
from libp2p.host.host_interface import IHost
from libp2p.host.routed_host import RoutedHost
from libp2p.network.network_interface import INetworkService
from libp2p.network.swarm import Swarm
from libp2p.peer.id import ID
from libp2p.peer.peerstore import PeerStore
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.routing.interfaces import IPeerRouting
from libp2p.security.insecure.transport import PLAINTEXT_PROTOCOL_ID, InsecureTransport
from importlib.metadata import version as __version
from libp2p.crypto.keys import (
KeyPair,
)
from libp2p.crypto.rsa import (
create_new_key_pair,
)
from libp2p.host.basic_host import (
BasicHost,
)
from libp2p.host.host_interface import (
IHost,
)
from libp2p.host.routed_host import (
RoutedHost,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.swarm import (
Swarm,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore import (
PeerStore,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
from libp2p.security.insecure.transport import (
PLAINTEXT_PROTOCOL_ID,
InsecureTransport,
)
import libp2p.security.secio.transport as secio
from libp2p.stream_muxer.mplex.mplex import MPLEX_PROTOCOL_ID, Mplex
from libp2p.transport.tcp.tcp import TCP
from libp2p.transport.typing import TMuxerOptions, TSecurityOptions
from libp2p.transport.upgrader import TransportUpgrader
from libp2p.typing import TProtocol
from libp2p.stream_muxer.mplex.mplex import (
MPLEX_PROTOCOL_ID,
Mplex,
)
from libp2p.transport.tcp.tcp import (
TCP,
)
from libp2p.transport.typing import (
TMuxerOptions,
TSecurityOptions,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
TProtocol,
)
def generate_new_rsa_identity() -> KeyPair:
@ -42,7 +81,6 @@ def new_swarm(
:param peerstore_opt: optional peerstore
:return: return a default swarm instance
"""
if key_pair is None:
key_pair = generate_new_rsa_identity()
@ -96,3 +134,6 @@ def new_host(
else:
host = BasicHost(swarm)
return host
__version__ = __version("libp2p")

View File

@ -1,8 +1,14 @@
from dataclasses import dataclass
from dataclasses import (
dataclass,
)
import hmac
from typing import Tuple
from typing import (
Tuple,
)
from Crypto.Cipher import AES
from Crypto.Cipher import (
AES,
)
import Crypto.Util.Counter as Counter
@ -61,9 +67,11 @@ class MacAndCipher:
def initialize_pair(
cipher_type: str, hash_type: str, secret: bytes
) -> Tuple[EncryptionParameters, EncryptionParameters]:
"""Return a pair of ``Keys`` for use in securing a communications channel
"""
Return a pair of ``Keys`` for use in securing a communications channel
with authenticated encryption derived from the ``secret`` and using the
requested ``cipher_type`` and ``hash_type``."""
requested ``cipher_type`` and ``hash_type``.
"""
if cipher_type != "AES-128":
raise NotImplementedError()
if hash_type != "SHA256":
@ -72,7 +80,7 @@ def initialize_pair(
iv_size = 16
cipher_key_size = 16
hmac_key_size = 20
seed = "key expansion".encode()
seed = b"key expansion"
params_size = iv_size + cipher_key_size + hmac_key_size
result = bytearray(2 * params_size)

View File

@ -1,13 +1,25 @@
from fastecdsa import (
keys,
point,
)
from fastecdsa import curve as curve_types
from fastecdsa import keys, point
from fastecdsa.encoding.sec1 import SEC1Encoder
from fastecdsa.encoding.sec1 import (
SEC1Encoder,
)
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
def infer_local_type(curve: str) -> curve_types.Curve:
"""converts a ``str`` representation of some elliptic curve to a
representation understood by the backend of this module."""
"""
Convert a ``str`` representation of some elliptic curve to a
representation understood by the backend of this module.
"""
if curve == "P-256":
return curve_types.P256
else:
@ -61,8 +73,10 @@ class ECCPrivateKey(PrivateKey):
def create_new_key_pair(curve: str) -> KeyPair:
"""Return a new ECC keypair with the requested ``curve`` type, e.g.
"P-256"."""
"""
Return a new ECC keypair with the requested ``curve`` type, e.g.
"P-256".
"""
private_key = ECCPrivateKey.new(curve)
public_key = private_key.get_public_key()
return KeyPair(private_key, public_key)

View File

@ -1,11 +1,23 @@
from Crypto.Hash import SHA256
from nacl.exceptions import BadSignatureError
from Crypto.Hash import (
SHA256,
)
from nacl.exceptions import (
BadSignatureError,
)
from nacl.public import PrivateKey as PrivateKeyImpl
from nacl.public import PublicKey as PublicKeyImpl
from nacl.signing import SigningKey, VerifyKey
from nacl.signing import (
SigningKey,
VerifyKey,
)
import nacl.utils as utils
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class Ed25519PublicKey(PublicKey):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class CryptographyError(BaseLibp2pError):
@ -6,7 +8,7 @@ class CryptographyError(BaseLibp2pError):
class MissingDeserializerError(CryptographyError):
"""Raise if the requested deserialization routine is missing for some type
of cryptographic key."""
pass
"""
Raise if the requested deserialization routine is missing for some type
of cryptographic key.
"""

View File

@ -1,9 +1,21 @@
from typing import Callable, Tuple, cast
from typing import (
Callable,
Tuple,
cast,
)
from fastecdsa.encoding import util
from fastecdsa.encoding import (
util,
)
from libp2p.crypto.ecc import ECCPrivateKey, ECCPublicKey, create_new_key_pair
from libp2p.crypto.keys import PublicKey
from libp2p.crypto.ecc import (
ECCPrivateKey,
ECCPublicKey,
create_new_key_pair,
)
from libp2p.crypto.keys import (
PublicKey,
)
SharedKeyGenerator = Callable[[bytes], bytes]

View File

@ -1,6 +1,14 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum, unique
from abc import (
ABC,
abstractmethod,
)
from dataclasses import (
dataclass,
)
from enum import (
Enum,
unique,
)
from .pb import crypto_pb2 as protobuf
@ -38,8 +46,10 @@ class PublicKey(Key):
@abstractmethod
def verify(self, data: bytes, signature: bytes) -> bool:
"""Verify that ``signature`` is the cryptographic signature of the hash
of ``data``."""
"""
Verify that ``signature`` is the cryptographic signature of the hash
of ``data``.
"""
...
def _serialize_to_protobuf(self) -> protobuf.PublicKey:

View File

@ -17,4 +17,4 @@ message PublicKey {
message PrivateKey {
required KeyType key_type = 1;
required bytes data = 2;
}
}

View File

@ -1,14 +1,12 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/crypto/pb/crypto.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@ -16,147 +14,17 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/crypto/pb/crypto.proto',
package='crypto.pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb\"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c\"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03')
)
_KEYTYPE = _descriptor.EnumDescriptor(
name='KeyType',
full_name='crypto.pb.KeyType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='RSA', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Ed25519', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Secp256k1', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ECDSA', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=175,
serialized_end=232,
)
_sym_db.RegisterEnumDescriptor(_KEYTYPE)
KeyType = enum_type_wrapper.EnumTypeWrapper(_KEYTYPE)
RSA = 0
Ed25519 = 1
Secp256k1 = 2
ECDSA = 3
_PUBLICKEY = _descriptor.Descriptor(
name='PublicKey',
full_name='crypto.pb.PublicKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key_type', full_name='crypto.pb.PublicKey.key_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='crypto.pb.PublicKey.data', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=107,
)
_PRIVATEKEY = _descriptor.Descriptor(
name='PrivateKey',
full_name='crypto.pb.PrivateKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key_type', full_name='crypto.pb.PrivateKey.key_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='crypto.pb.PrivateKey.data', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=109,
serialized_end=173,
)
_PUBLICKEY.fields_by_name['key_type'].enum_type = _KEYTYPE
_PRIVATEKEY.fields_by_name['key_type'].enum_type = _KEYTYPE
DESCRIPTOR.message_types_by_name['PublicKey'] = _PUBLICKEY
DESCRIPTOR.message_types_by_name['PrivateKey'] = _PRIVATEKEY
DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PublicKey = _reflection.GeneratedProtocolMessageType('PublicKey', (_message.Message,), {
'DESCRIPTOR' : _PUBLICKEY,
'__module__' : 'libp2p.crypto.pb.crypto_pb2'
# @@protoc_insertion_point(class_scope:crypto.pb.PublicKey)
})
_sym_db.RegisterMessage(PublicKey)
PrivateKey = _reflection.GeneratedProtocolMessageType('PrivateKey', (_message.Message,), {
'DESCRIPTOR' : _PRIVATEKEY,
'__module__' : 'libp2p.crypto.pb.crypto_pb2'
# @@protoc_insertion_point(class_scope:crypto.pb.PrivateKey)
})
_sym_db.RegisterMessage(PrivateKey)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb\"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c\"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.crypto.pb.crypto_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals['_KEYTYPE']._serialized_start=175
_globals['_KEYTYPE']._serialized_end=232
_globals['_PUBLICKEY']._serialized_start=44
_globals['_PUBLICKEY']._serialized_end=107
_globals['_PRIVATEKEY']._serialized_start=109
_globals['_PRIVATEKEY']._serialized_end=173
# @@protoc_insertion_point(module_scope)

View File

@ -1,84 +1,74 @@
# @generated by generate_proto_mypy_stubs.py. Do not edit!
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
EnumDescriptor as google___protobuf___descriptor___EnumDescriptor,
)
import typing
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
from typing import (
List as typing___List,
Tuple as typing___Tuple,
cast as typing___cast,
)
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
from typing_extensions import (
Literal as typing_extensions___Literal,
)
class _KeyType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _KeyTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_KeyType.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
RSA: _KeyType.ValueType # 0
Ed25519: _KeyType.ValueType # 1
Secp256k1: _KeyType.ValueType # 2
ECDSA: _KeyType.ValueType # 3
class KeyType(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod
def Name(cls, number: int) -> str: ...
@classmethod
def Value(cls, name: str) -> KeyType: ...
@classmethod
def keys(cls) -> typing___List[str]: ...
@classmethod
def values(cls) -> typing___List[KeyType]: ...
@classmethod
def items(cls) -> typing___List[typing___Tuple[str, KeyType]]: ...
RSA = typing___cast(KeyType, 0)
Ed25519 = typing___cast(KeyType, 1)
Secp256k1 = typing___cast(KeyType, 2)
ECDSA = typing___cast(KeyType, 3)
RSA = typing___cast(KeyType, 0)
Ed25519 = typing___cast(KeyType, 1)
Secp256k1 = typing___cast(KeyType, 2)
ECDSA = typing___cast(KeyType, 3)
class KeyType(_KeyType, metaclass=_KeyTypeEnumTypeWrapper): ...
class PublicKey(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
key_type = ... # type: KeyType
data = ... # type: bytes
RSA: KeyType.ValueType # 0
Ed25519: KeyType.ValueType # 1
Secp256k1: KeyType.ValueType # 2
ECDSA: KeyType.ValueType # 3
global___KeyType = KeyType
def __init__(self,
@typing_extensions.final
class PublicKey(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_TYPE_FIELD_NUMBER: builtins.int
DATA_FIELD_NUMBER: builtins.int
key_type: global___KeyType.ValueType
data: builtins.bytes
def __init__(
self,
*,
key_type : KeyType,
data : bytes,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> PublicKey: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ...
key_type: global___KeyType.ValueType | None = ...,
data: builtins.bytes | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["data", b"data", "key_type", b"key_type"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "key_type", b"key_type"]) -> None: ...
class PrivateKey(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
key_type = ... # type: KeyType
data = ... # type: bytes
global___PublicKey = PublicKey
def __init__(self,
@typing_extensions.final
class PrivateKey(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_TYPE_FIELD_NUMBER: builtins.int
DATA_FIELD_NUMBER: builtins.int
key_type: global___KeyType.ValueType
data: builtins.bytes
def __init__(
self,
*,
key_type : KeyType,
data : bytes,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> PrivateKey: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ...
key_type: global___KeyType.ValueType | None = ...,
data: builtins.bytes | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["data", b"data", "key_type", b"key_type"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "key_type", b"key_type"]) -> None: ...
global___PrivateKey = PrivateKey

View File

@ -1,9 +1,20 @@
from Crypto.Hash import SHA256
from Crypto.Hash import (
SHA256,
)
import Crypto.PublicKey.RSA as RSA
from Crypto.PublicKey.RSA import RsaKey
from Crypto.Signature import pkcs1_15
from Crypto.PublicKey.RSA import (
RsaKey,
)
from Crypto.Signature import (
pkcs1_15,
)
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class RSAPublicKey(PublicKey):

View File

@ -1,6 +1,11 @@
import coincurve
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
KeyType,
PrivateKey,
PublicKey,
)
class Secp256k1PublicKey(PublicKey):

View File

@ -1,8 +1,22 @@
from libp2p.crypto.ed25519 import Ed25519PrivateKey, Ed25519PublicKey
from libp2p.crypto.exceptions import MissingDeserializerError
from libp2p.crypto.keys import KeyType, PrivateKey, PublicKey
from libp2p.crypto.rsa import RSAPublicKey
from libp2p.crypto.secp256k1 import Secp256k1PrivateKey, Secp256k1PublicKey
from libp2p.crypto.ed25519 import (
Ed25519PrivateKey,
Ed25519PublicKey,
)
from libp2p.crypto.exceptions import (
MissingDeserializerError,
)
from libp2p.crypto.keys import (
KeyType,
PrivateKey,
PublicKey,
)
from libp2p.crypto.rsa import (
RSAPublicKey,
)
from libp2p.crypto.secp256k1 import (
Secp256k1PrivateKey,
Secp256k1PublicKey,
)
key_type_to_public_key_deserializer = {
KeyType.Secp256k1.value: Secp256k1PublicKey.from_bytes,

View File

@ -1,28 +1,70 @@
import logging
from typing import TYPE_CHECKING, AsyncIterator, List, Sequence
from typing import (
TYPE_CHECKING,
AsyncIterator,
List,
Sequence,
)
from async_generator import asynccontextmanager
from async_service import background_trio_service
from async_generator import (
asynccontextmanager,
)
from async_service import (
background_trio_service,
)
import multiaddr
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.host.defaults import get_default_protocols
from libp2p.host.exceptions import StreamFailure
from libp2p.network.network_interface import INetworkService
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.protocol_muxer.exceptions import MultiselectClientError, MultiselectError
from libp2p.protocol_muxer.multiselect import Multiselect
from libp2p.protocol_muxer.multiselect_client import MultiselectClient
from libp2p.protocol_muxer.multiselect_communicator import MultiselectCommunicator
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.host.defaults import (
get_default_protocols,
)
from libp2p.host.exceptions import (
StreamFailure,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.protocol_muxer.exceptions import (
MultiselectClientError,
MultiselectError,
)
from libp2p.protocol_muxer.multiselect import (
Multiselect,
)
from libp2p.protocol_muxer.multiselect_client import (
MultiselectClient,
)
from libp2p.protocol_muxer.multiselect_communicator import (
MultiselectCommunicator,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .host_interface import IHost
from .host_interface import (
IHost,
)
if TYPE_CHECKING:
from collections import OrderedDict
from collections import (
OrderedDict,
)
# Upon host creation, host takes in options,
# including the list of addresses on which to listen.
@ -108,7 +150,7 @@ class BasicHost(IHost):
self, listen_addrs: Sequence[multiaddr.Multiaddr]
) -> AsyncIterator[None]:
"""
run the host instance and listen to ``listen_addrs``.
Run the host instance and listen to ``listen_addrs``.
:param listen_addrs: a sequence of multiaddrs that we want to listen to
"""
@ -121,7 +163,7 @@ class BasicHost(IHost):
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
) -> None:
"""
set stream handler for given `protocol_id`
Set stream handler for given `protocol_id`
:param protocol_id: protocol id used on stream
:param stream_handler: a stream handler function
@ -136,7 +178,6 @@ class BasicHost(IHost):
:param protocol_ids: available protocol ids to use for stream
:return: stream: new stream created
"""
net_stream = await self._network.new_stream(peer_id)
# Perform protocol muxing to determine protocol to use
@ -154,7 +195,7 @@ class BasicHost(IHost):
async def connect(self, peer_info: PeerInfo) -> None:
"""
connect ensures there is a connection between this host and the peer
Ensure there is a connection between this host and the peer
with given `peer_info.peer_id`. connect will absorb the addresses in
peer_info into its internal peerstore. If there is not an active
connection, connect will issue a dial, and block until a connection is

View File

@ -1,14 +1,27 @@
from collections import OrderedDict
from typing import TYPE_CHECKING
from collections import (
OrderedDict,
)
from typing import (
TYPE_CHECKING,
)
from libp2p.host.host_interface import IHost
from libp2p.host.host_interface import (
IHost,
)
from libp2p.host.ping import (
handle_ping,
)
from libp2p.host.ping import ID as PingID
from libp2p.host.ping import handle_ping
from libp2p.identity.identify.protocol import (
identify_handler_for,
)
from libp2p.identity.identify.protocol import ID as IdentifyID
from libp2p.identity.identify.protocol import identify_handler_for
if TYPE_CHECKING:
from libp2p.typing import TProtocol, StreamHandlerFn
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
def get_default_protocols(host: IHost) -> "OrderedDict[TProtocol, StreamHandlerFn]":

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class HostException(BaseLibp2pError):

View File

@ -1,14 +1,36 @@
from abc import ABC, abstractmethod
from typing import Any, AsyncContextManager, List, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
Any,
AsyncContextManager,
List,
Sequence,
)
import multiaddr
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.network.network_interface import INetworkService
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
class IHost(ABC):
@ -54,7 +76,7 @@ class IHost(ABC):
self, listen_addrs: Sequence[multiaddr.Multiaddr]
) -> AsyncContextManager[None]:
"""
run the host instance and listen to ``listen_addrs``.
Run the host instance and listen to ``listen_addrs``.
:param listen_addrs: a sequence of multiaddrs that we want to listen to
"""
@ -64,7 +86,7 @@ class IHost(ABC):
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
) -> None:
"""
set stream handler for host.
Set stream handler for host.
:param protocol_id: protocol id used on stream
:param stream_handler: a stream handler function
@ -85,7 +107,7 @@ class IHost(ABC):
@abstractmethod
async def connect(self, peer_info: PeerInfo) -> None:
"""
connect ensures there is a connection between this host and the peer
Ensure there is a connection between this host and the peer
with given peer_info.peer_id. connect will absorb the addresses in
peer_info into its internal peerstore. If there is not an active
connection, connect will issue a dial, and block until a connection is

View File

@ -2,10 +2,18 @@ import logging
import trio
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.stream.exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import ID as PeerID
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
ID = TProtocol("/ipfs/ping/1.0.0")
PING_LENGTH = 32
@ -15,8 +23,9 @@ logger = logging.getLogger("libp2p.host.ping")
async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
"""Return a boolean indicating if we expect more pings from the peer at
``peer_id``."""
"""
Return a boolean indicating if we expect more pings from the peer at ``peer_id``.
"""
try:
with trio.fail_after(RESP_TIMEOUT):
payload = await stream.read(PING_LENGTH)
@ -46,8 +55,10 @@ async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
async def handle_ping(stream: INetStream) -> None:
"""``handle_ping`` responds to incoming ping requests until one side errors
or closes the ``stream``."""
"""
Respond to incoming ping requests until one side errors
or closes the ``stream``.
"""
peer_id = stream.muxed_conn.peer_id
while True:

View File

@ -1,8 +1,18 @@
from libp2p.host.basic_host import BasicHost
from libp2p.host.exceptions import ConnectionFailure
from libp2p.network.network_interface import INetworkService
from libp2p.peer.peerinfo import PeerInfo
from libp2p.routing.interfaces import IPeerRouting
from libp2p.host.basic_host import (
BasicHost,
)
from libp2p.host.exceptions import (
ConnectionFailure,
)
from libp2p.network.network_interface import (
INetworkService,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
from libp2p.routing.interfaces import (
IPeerRouting,
)
# RoutedHost is a p2p Host that includes a routing system.
@ -16,7 +26,7 @@ class RoutedHost(BasicHost):
async def connect(self, peer_info: PeerInfo) -> None:
"""
connect ensures there is a connection between this host and the peer
Ensure there is a connection between this host and the peer
with given `peer_info.peer_id`. See (basic_host).connect for more
information.
@ -26,7 +36,8 @@ class RoutedHost(BasicHost):
:param peer_info: peer_info of the peer we want to connect to
:type peer_info: peer.peerinfo.PeerInfo
"""
# check if we were given some addresses, otherwise, find some with the routing system.
# check if we were given some addresses, otherwise, find some with the
# routing system.
if not peer_info.addrs:
found_peer_info = await self._router.find_peer(peer_info.peer_id)
if not found_peer_info:

View File

@ -9,4 +9,4 @@ message Identify {
repeated bytes listen_addrs = 2;
optional bytes observed_addr = 4;
repeated string protocols = 3;
}
}

View File

@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/identity/identify/pb/identify.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@ -15,91 +14,13 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/identity/identify/pb/identify.proto',
package='identify.pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb\"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t')
)
_IDENTIFY = _descriptor.Descriptor(
name='Identify',
full_name='identify.pb.Identify',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='protocol_version', full_name='identify.pb.Identify.protocol_version', index=0,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='agent_version', full_name='identify.pb.Identify.agent_version', index=1,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='public_key', full_name='identify.pb.Identify.public_key', index=2,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='listen_addrs', full_name='identify.pb.Identify.listen_addrs', index=3,
number=2, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='observed_addr', full_name='identify.pb.Identify.observed_addr', index=4,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='protocols', full_name='identify.pb.Identify.protocols', index=5,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=60,
serialized_end=203,
)
DESCRIPTOR.message_types_by_name['Identify'] = _IDENTIFY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Identify = _reflection.GeneratedProtocolMessageType('Identify', (_message.Message,), {
'DESCRIPTOR' : _IDENTIFY,
'__module__' : 'libp2p.identity.identify.pb.identify_pb2'
# @@protoc_insertion_point(class_scope:identify.pb.Identify)
})
_sym_db.RegisterMessage(Identify)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb\"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.identity.identify.pb.identify_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals['_IDENTIFY']._serialized_start=60
_globals['_IDENTIFY']._serialized_end=203
# @@protoc_insertion_point(module_scope)

View File

@ -1,53 +1,50 @@
# @generated by generate_proto_mypy_stubs.py. Do not edit!
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer,
)
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
)
@typing_extensions.final
class Identify(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
from typing_extensions import (
Literal as typing_extensions___Literal,
)
class Identify(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
protocol_version = ... # type: typing___Text
agent_version = ... # type: typing___Text
public_key = ... # type: bytes
listen_addrs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
observed_addr = ... # type: bytes
protocols = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
def __init__(self,
PROTOCOL_VERSION_FIELD_NUMBER: builtins.int
AGENT_VERSION_FIELD_NUMBER: builtins.int
PUBLIC_KEY_FIELD_NUMBER: builtins.int
LISTEN_ADDRS_FIELD_NUMBER: builtins.int
OBSERVED_ADDR_FIELD_NUMBER: builtins.int
PROTOCOLS_FIELD_NUMBER: builtins.int
protocol_version: builtins.str
agent_version: builtins.str
public_key: builtins.bytes
@property
def listen_addrs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ...
observed_addr: builtins.bytes
@property
def protocols(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
def __init__(
self,
*,
protocol_version : typing___Optional[typing___Text] = None,
agent_version : typing___Optional[typing___Text] = None,
public_key : typing___Optional[bytes] = None,
listen_addrs : typing___Optional[typing___Iterable[bytes]] = None,
observed_addr : typing___Optional[bytes] = None,
protocols : typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Identify: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",u"observed_addr",u"protocol_version",u"public_key"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",u"listen_addrs",u"observed_addr",u"protocol_version",u"protocols",u"public_key"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"public_key",b"public_key"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"listen_addrs",b"listen_addrs",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"protocols",b"protocols",u"public_key",b"public_key"]) -> None: ...
protocol_version: builtins.str | None = ...,
agent_version: builtins.str | None = ...,
public_key: builtins.bytes | None = ...,
listen_addrs: collections.abc.Iterable[builtins.bytes] | None = ...,
observed_addr: builtins.bytes | None = ...,
protocols: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["agent_version", b"agent_version", "observed_addr", b"observed_addr", "protocol_version", b"protocol_version", "public_key", b"public_key"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["agent_version", b"agent_version", "listen_addrs", b"listen_addrs", "observed_addr", b"observed_addr", "protocol_version", b"protocol_version", "protocols", b"protocols", "public_key", b"public_key"]) -> None: ...
global___Identify = Identify

View File

@ -1,13 +1,26 @@
import logging
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.host.host_interface import IHost
from libp2p.network.stream.exceptions import StreamClosed
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.host.host_interface import (
IHost,
)
from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .pb.identify_pb2 import Identify
from .pb.identify_pb2 import (
Identify,
)
ID = TProtocol("/ipfs/id/1.0.0")
PROTOCOL_VERSION = "ipfs/0.1.0"

View File

@ -1,4 +1,7 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
class Closer(ABC):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class IOException(BaseLibp2pError):

View File

@ -5,13 +5,26 @@ from that repo: "a simple package to r/w length-delimited slices."
NOTE: currently missing the capability to indicate lengths by "varint" method.
"""
from abc import abstractmethod
from abc import (
abstractmethod,
)
from libp2p.io.abc import MsgReadWriteCloser, Reader, ReadWriteCloser
from libp2p.io.utils import read_exactly
from libp2p.utils import decode_uvarint_from_stream, encode_varint_prefixed
from libp2p.io.abc import (
MsgReadWriteCloser,
Reader,
ReadWriteCloser,
)
from libp2p.io.utils import (
read_exactly,
)
from libp2p.utils import (
decode_uvarint_from_stream,
encode_varint_prefixed,
)
from .exceptions import MessageTooLarge
from .exceptions import (
MessageTooLarge,
)
BYTE_ORDER = "big"

View File

@ -2,8 +2,12 @@ import logging
import trio
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.exceptions import IOException
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
logger = logging.getLogger("libp2p.io.trio")

View File

@ -1,5 +1,9 @@
from libp2p.io.abc import Reader
from libp2p.io.exceptions import IncompleteReadError
from libp2p.io.abc import (
Reader,
)
from libp2p.io.exceptions import (
IncompleteReadError,
)
DEFAULT_RETRY_READ_COUNT = 100

View File

@ -1,4 +1,6 @@
from libp2p.io.exceptions import IOException
from libp2p.io.exceptions import (
IOException,
)
class RawConnError(IOException):

View File

@ -1,11 +1,21 @@
from abc import abstractmethod
from typing import Tuple
from abc import (
abstractmethod,
)
from typing import (
Tuple,
)
import trio
from libp2p.io.abc import Closer
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.stream_muxer.abc import IMuxedConn
from libp2p.io.abc import (
Closer,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
class INetConn(Closer):

View File

@ -1,8 +1,16 @@
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.exceptions import IOException
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
from .exceptions import RawConnError
from .raw_connection_interface import IRawConnection
from .exceptions import (
RawConnError,
)
from .raw_connection_interface import (
IRawConnection,
)
class RawConnection(IRawConnection):

View File

@ -1,4 +1,6 @@
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.abc import (
ReadWriteCloser,
)
class IRawConnection(ReadWriteCloser):

View File

@ -1,11 +1,24 @@
from typing import TYPE_CHECKING, Set, Tuple
from typing import (
TYPE_CHECKING,
Set,
Tuple,
)
import trio
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.network.stream.net_stream import NetStream
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
from libp2p.stream_muxer.exceptions import MuxedConnUnavailable
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.network.stream.net_stream import (
NetStream,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import (
MuxedConnUnavailable,
)
if TYPE_CHECKING:
from libp2p.network.swarm import Swarm # noqa: F401
@ -48,8 +61,8 @@ class SwarmConn(INetConn):
# We *could* optimize this but it really isn't worth it.
for stream in self.streams.copy():
await stream.reset()
# Force context switch for stream handlers to process the stream reset event we just emit
# before we cancel the stream handler tasks.
# Force context switch for stream handlers to process the stream reset event we
# just emit before we cancel the stream handler tasks.
await trio.sleep(0.1)
await self._notify_disconnected()
@ -63,13 +76,15 @@ class SwarmConn(INetConn):
except MuxedConnUnavailable:
await self.close()
break
# Asynchronously handle the accepted stream, to avoid blocking the next stream.
# Asynchronously handle the accepted stream, to avoid blocking
# the next stream.
nursery.start_soon(self._handle_muxed_stream, stream)
async def _handle_muxed_stream(self, muxed_stream: IMuxedStream) -> None:
net_stream = await self._add_stream(muxed_stream)
try:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
await self.swarm.common_stream_handler(net_stream) # type: ignore
finally:
# As long as `common_stream_handler`, remove the stream.

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class SwarmException(BaseLibp2pError):

View File

@ -1,23 +1,45 @@
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Dict, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
Dict,
Sequence,
)
from async_service import ServiceAPI
from multiaddr import Multiaddr
from async_service import (
ServiceAPI,
)
from multiaddr import (
Multiaddr,
)
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.peer.id import ID
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.transport.listener_interface import IListener
from libp2p.typing import StreamHandlerFn
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.transport.listener_interface import (
IListener,
)
from libp2p.typing import (
StreamHandlerFn,
)
from .stream.net_stream_interface import INetStream
from .stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING:
from .notifee_interface import INotifee # noqa: F401
class INetwork(ABC):
peerstore: IPeerStore
connections: Dict[ID, INetConn]
listeners: Dict[str, IListener]

View File

@ -1,10 +1,21 @@
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING
from abc import (
ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING:
from .network_interface import INetwork # noqa: F401

View File

@ -1,4 +1,6 @@
from libp2p.io.exceptions import IOException
from libp2p.io.exceptions import (
IOException,
)
class StreamError(IOException):

View File

@ -1,22 +1,33 @@
from typing import Optional
from typing import (
Optional,
)
from libp2p.stream_muxer.abc import IMuxedStream
from libp2p.stream_muxer.abc import (
IMuxedStream,
)
from libp2p.stream_muxer.exceptions import (
MuxedStreamClosed,
MuxedStreamEOF,
MuxedStreamReset,
)
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
from .exceptions import StreamClosed, StreamEOF, StreamReset
from .net_stream_interface import INetStream
from .exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from .net_stream_interface import (
INetStream,
)
# TODO: Handle exceptions from `muxed_stream`
# TODO: Add stream state
# - Reference: https://github.com/libp2p/go-libp2p-swarm/blob/99831444e78c8f23c9335c17d8f7c700ba25ca14/swarm_stream.go # noqa: E501
class NetStream(INetStream):
muxed_stream: IMuxedStream
protocol_id: Optional[TProtocol]
@ -39,7 +50,7 @@ class NetStream(INetStream):
async def read(self, n: int = None) -> bytes:
"""
reads from stream.
Read from stream.
:param n: number of bytes to read
:return: bytes of input
@ -53,7 +64,7 @@ class NetStream(INetStream):
async def write(self, data: bytes) -> None:
"""
write to stream.
Write to stream.
:return: number of bytes written
"""
@ -63,7 +74,7 @@ class NetStream(INetStream):
raise StreamClosed() from error
async def close(self) -> None:
"""close stream."""
"""Close stream."""
await self.muxed_stream.close()
async def reset(self) -> None:

View File

@ -1,12 +1,19 @@
from abc import abstractmethod
from abc import (
abstractmethod,
)
from libp2p.io.abc import ReadWriteCloser
from libp2p.stream_muxer.abc import IMuxedConn
from libp2p.typing import TProtocol
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
from libp2p.typing import (
TProtocol,
)
class INetStream(ReadWriteCloser):
muxed_conn: IMuxedConn
@abstractmethod

View File

@ -1,33 +1,75 @@
import logging
from typing import Dict, List, Optional
from typing import (
Dict,
List,
Optional,
)
from async_service import Service
from multiaddr import Multiaddr
from async_service import (
Service,
)
from multiaddr import (
Multiaddr,
)
import trio
from libp2p.io.abc import ReadWriteCloser
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.peer.id import ID
from libp2p.peer.peerstore import PeerStoreError
from libp2p.peer.peerstore_interface import IPeerStore
from libp2p.stream_muxer.abc import IMuxedConn
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerstore import (
PeerStoreError,
)
from libp2p.peer.peerstore_interface import (
IPeerStore,
)
from libp2p.stream_muxer.abc import (
IMuxedConn,
)
from libp2p.transport.exceptions import (
MuxerUpgradeFailure,
OpenConnectionError,
SecurityUpgradeFailure,
)
from libp2p.transport.listener_interface import IListener
from libp2p.transport.transport_interface import ITransport
from libp2p.transport.upgrader import TransportUpgrader
from libp2p.typing import StreamHandlerFn
from libp2p.transport.listener_interface import (
IListener,
)
from libp2p.transport.transport_interface import (
ITransport,
)
from libp2p.transport.upgrader import (
TransportUpgrader,
)
from libp2p.typing import (
StreamHandlerFn,
)
from ..exceptions import MultiError
from .connection.raw_connection import RawConnection
from .connection.swarm_connection import SwarmConn
from .exceptions import SwarmException
from .network_interface import INetworkService
from .notifee_interface import INotifee
from .stream.net_stream_interface import INetStream
from ..exceptions import (
MultiError,
)
from .connection.raw_connection import (
RawConnection,
)
from .connection.swarm_connection import (
SwarmConn,
)
from .exceptions import (
SwarmException,
)
from .network_interface import (
INetworkService,
)
from .notifee_interface import (
INotifee,
)
from .stream.net_stream_interface import (
INetStream,
)
logger = logging.getLogger("libp2p.network.swarm")
@ -40,7 +82,6 @@ def create_default_stream_handler(network: INetworkService) -> StreamHandlerFn:
class Swarm(Service, INetworkService):
self_id: ID
peerstore: IPeerStore
upgrader: TransportUpgrader
@ -72,7 +113,8 @@ class Swarm(Service, INetworkService):
# Create Notifee array
self.notifees = []
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
self.common_stream_handler = create_default_stream_handler(self) # type: ignore
self.listener_nursery = None
@ -95,18 +137,18 @@ class Swarm(Service, INetworkService):
return self.self_id
def set_stream_handler(self, stream_handler: StreamHandlerFn) -> None:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains:
# https://github.com/python/mypy/issues/2427
self.common_stream_handler = stream_handler # type: ignore
async def dial_peer(self, peer_id: ID) -> INetConn:
"""
dial_peer try to create a connection to peer_id.
Try to create a connection to peer_id.
:param peer_id: peer if we want to dial
:raises SwarmException: raised when an error occurs
:return: muxed connection
"""
if peer_id in self.connections:
# If muxed connection already exists for peer_id,
# set muxed connection equal to existing muxed connection
@ -140,20 +182,19 @@ class Swarm(Service, INetworkService):
# Tried all addresses, raising exception.
raise SwarmException(
f"unable to connect to {peer_id}, no addresses established a successful connection "
"(with exceptions)"
f"unable to connect to {peer_id}, no addresses established a successful "
"connection (with exceptions)"
) from MultiError(exceptions)
async def dial_addr(self, addr: Multiaddr, peer_id: ID) -> INetConn:
"""
dial_addr try to create a connection to peer_id with addr.
Try to create a connection to peer_id with addr.
:param addr: the address we want to connect with
:param peer_id: the peer we want to connect to
:raises SwarmException: raised when an error occurs
:return: network connection
"""
# Dial peer (connection to peer does not yet exist)
# Transport dials peer (gets back a raw conn)
try:
@ -231,11 +272,13 @@ class Swarm(Service, INetworkService):
if str(maddr) in self.listeners:
return True
async def conn_handler(read_write_closer: ReadWriteCloser) -> None:
async def conn_handler(
read_write_closer: ReadWriteCloser, maddr=maddr
) -> None:
raw_conn = RawConnection(read_write_closer, False)
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure
# the conn and then mux the conn
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first
# secure the conn and then mux the conn
try:
# FIXME: This dummy `ID(b"")` for the remote peer is useless.
secured_conn = await self.upgrader.upgrade_security(
@ -264,8 +307,8 @@ class Swarm(Service, INetworkService):
await self.add_conn(muxed_conn)
logger.debug("successfully opened connection to peer %s", peer_id)
# NOTE: This is a intentional barrier to prevent from the handler exiting and
# closing the connection.
# NOTE: This is a intentional barrier to prevent from the handler
# exiting and closing the connection.
await self.manager.wait_finished()
try:
@ -282,7 +325,7 @@ class Swarm(Service, INetworkService):
await self.notify_listen(maddr)
return True
except IOError:
except OSError:
# Failed. Continue looping.
logger.debug("fail to listen on: %s", maddr)
@ -304,9 +347,11 @@ class Swarm(Service, INetworkService):
logger.debug("successfully close the connection to peer %s", peer_id)
async def add_conn(self, muxed_conn: IMuxedConn) -> SwarmConn:
"""Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
"""
Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
and start to monitor the connection for its new streams and
disconnection."""
disconnection.
"""
swarm_conn = SwarmConn(muxed_conn, self)
self.manager.run_task(muxed_conn.start)
await muxed_conn.event_started.wait()
@ -319,8 +364,10 @@ class Swarm(Service, INetworkService):
return swarm_conn
def remove_conn(self, swarm_conn: SwarmConn) -> None:
"""Simply remove the connection from Swarm's records, without closing
the connection."""
"""
Simply remove the connection from Swarm's records, without closing
the connection.
"""
peer_id = swarm_conn.muxed_conn.peer_id
if peer_id not in self.connections:
return

View File

@ -1,5 +1,5 @@
# PeerStore
The PeerStore contains a mapping of peer IDs to PeerData objects. Each PeerData object represents a peer, and each PeerData contains a collection of protocols, addresses, and a mapping of metadata. PeerStore implements the IPeerStore (peer protocols), IAddrBook (address book), and IPeerMetadata (peer metadata) interfaces, which allows the peer store to effectively function as a dictionary for peer ID to protocol, address, and metadata.
The PeerStore contains a mapping of peer IDs to PeerData objects. Each PeerData object represents a peer, and each PeerData contains a collection of protocols, addresses, and a mapping of metadata. PeerStore implements the IPeerStore (peer protocols), IAddrBook (address book), and IPeerMetadata (peer metadata) interfaces, which allows the peer store to effectively function as a dictionary for peer ID to protocol, address, and metadata.
Note: PeerInfo represents a read-only summary of a PeerData object. Only the attributes assigned in PeerInfo are readable by references to PeerInfo objects.
Note: PeerInfo represents a read-only summary of a PeerData object. Only the attributes assigned in PeerInfo are readable by references to PeerInfo objects.

View File

@ -1,9 +1,19 @@
from abc import ABC, abstractmethod
from typing import List, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from .id import ID
from .id import (
ID,
)
class IAddrBook(ABC):
@ -15,7 +25,7 @@ class IAddrBook(ABC):
:param peer_id: the peer to add address for
:param addr: multiaddress of the peer
:param ttl: time-to-live for the address (after this time, address is no longer valid)
"""
""" # noqa: E501
@abstractmethod
def add_addrs(self, peer_id: ID, addrs: Sequence[Multiaddr], ttl: int) -> None:
@ -28,7 +38,7 @@ class IAddrBook(ABC):
:param peer_id: the peer to add address for
:param addr: multiaddresses of the peer
:param ttl: time-to-live for the address (after this time, address is no longer valid
"""
""" # noqa: E501
@abstractmethod
def addrs(self, peer_id: ID) -> List[Multiaddr]:

View File

@ -1,10 +1,14 @@
import hashlib
from typing import Union
from typing import (
Union,
)
import base58
import multihash
from libp2p.crypto.keys import PublicKey
from libp2p.crypto.keys import (
PublicKey,
)
# NOTE: On inlining...
# See: https://github.com/libp2p/specs/issues/138

View File

@ -1,14 +1,25 @@
from typing import Any, Dict, List, Sequence
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from .peerdata_interface import IPeerData
from .peerdata_interface import (
IPeerData,
)
class PeerData(IPeerData):
pubkey: PublicKey
privkey: PrivateKey
metadata: Dict[Any, Any]

View File

@ -1,11 +1,25 @@
from abc import ABC, abstractmethod
from typing import Any, List, Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from .peermetadata_interface import IPeerMetadata
from .peermetadata_interface import (
IPeerMetadata,
)
class IPeerData(ABC):

View File

@ -1,8 +1,14 @@
from typing import Any, List, Sequence
from typing import (
Any,
List,
Sequence,
)
import multiaddr
from .id import ID
from .id import (
ID,
)
class PeerInfo:

View File

@ -1,7 +1,14 @@
from abc import ABC, abstractmethod
from typing import Any
from abc import (
ABC,
abstractmethod,
)
from typing import (
Any,
)
from .id import ID
from .id import (
ID,
)
class IPeerMetadata(ABC):

View File

@ -1,18 +1,39 @@
from collections import defaultdict
from typing import Any, Dict, List, Sequence
from collections import (
defaultdict,
)
from typing import (
Any,
Dict,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import KeyPair, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
PublicKey,
)
from .id import ID
from .peerdata import PeerData, PeerDataError
from .peerinfo import PeerInfo
from .peerstore_interface import IPeerStore
from .id import (
ID,
)
from .peerdata import (
PeerData,
PeerDataError,
)
from .peerinfo import (
PeerInfo,
)
from .peerstore_interface import (
IPeerStore,
)
class PeerStore(IPeerStore):
peer_data_map: Dict[ID, PeerData]
def __init__(self) -> None:

View File

@ -1,14 +1,34 @@
from abc import abstractmethod
from typing import Any, List, Sequence
from abc import (
abstractmethod,
)
from typing import (
Any,
List,
Sequence,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
from libp2p.crypto.keys import KeyPair, PrivateKey, PublicKey
from libp2p.crypto.keys import (
KeyPair,
PrivateKey,
PublicKey,
)
from .addrbook_interface import IAddrBook
from .id import ID
from .peerinfo import PeerInfo
from .peermetadata_interface import IPeerMetadata
from .addrbook_interface import (
IAddrBook,
)
from .id import (
ID,
)
from .peerinfo import (
PeerInfo,
)
from .peermetadata_interface import (
IPeerMetadata,
)
class IPeerStore(IAddrBook, IPeerMetadata):

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class MultiselectCommunicatorError(BaseLibp2pError):

View File

@ -1,19 +1,34 @@
from typing import Dict, Tuple
from typing import (
Dict,
Tuple,
)
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .exceptions import MultiselectCommunicatorError, MultiselectError
from .multiselect_communicator_interface import IMultiselectCommunicator
from .multiselect_muxer_interface import IMultiselectMuxer
from .exceptions import (
MultiselectCommunicatorError,
MultiselectError,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
from .multiselect_muxer_interface import (
IMultiselectMuxer,
)
MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0"
PROTOCOL_NOT_FOUND_MSG = "na"
class Multiselect(IMultiselectMuxer):
"""Multiselect module that is responsible for responding to a multiselect
"""
Multiselect module that is responsible for responding to a multiselect
client and deciding on a specific protocol and handler pair to use for
communication."""
communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn]

View File

@ -1,18 +1,31 @@
from typing import Sequence
from typing import (
Sequence,
)
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
from .exceptions import MultiselectClientError, MultiselectCommunicatorError
from .multiselect_client_interface import IMultiselectClient
from .multiselect_communicator_interface import IMultiselectCommunicator
from .exceptions import (
MultiselectClientError,
MultiselectCommunicatorError,
)
from .multiselect_client_interface import (
IMultiselectClient,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0"
PROTOCOL_NOT_FOUND_MSG = "na"
class MultiselectClient(IMultiselectClient):
"""Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over."""
"""
Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over.
"""
async def handshake(self, communicator: IMultiselectCommunicator) -> None:
"""

View File

@ -1,16 +1,26 @@
from abc import ABC, abstractmethod
from typing import Sequence
from abc import (
ABC,
abstractmethod,
)
from typing import (
Sequence,
)
from libp2p.protocol_muxer.multiselect_communicator_interface import (
IMultiselectCommunicator,
)
from libp2p.typing import TProtocol
from libp2p.typing import (
TProtocol,
)
class IMultiselectClient(ABC):
"""Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over."""
"""
Client for communicating with receiver's multiselect module in order to
select a protocol id to communicate over.
"""
@abstractmethod
async def handshake(self, communicator: IMultiselectCommunicator) -> None:
"""
Ensure that the client and multiselect are both using the same
@ -34,6 +44,7 @@ class IMultiselectClient(ABC):
:return: selected protocol
"""
@abstractmethod
async def try_select(
self, communicator: IMultiselectCommunicator, protocol: TProtocol
) -> TProtocol:

View File

@ -1,10 +1,23 @@
from libp2p.exceptions import ParseError
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.exceptions import IOException
from libp2p.utils import encode_delim, read_delim
from libp2p.exceptions import (
ParseError,
)
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.exceptions import (
IOException,
)
from libp2p.utils import (
encode_delim,
read_delim,
)
from .exceptions import MultiselectCommunicatorError
from .multiselect_communicator_interface import IMultiselectCommunicator
from .exceptions import (
MultiselectCommunicatorError,
)
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
class MultiselectCommunicator(IMultiselectCommunicator):
@ -16,7 +29,7 @@ class MultiselectCommunicator(IMultiselectCommunicator):
async def write(self, msg_str: str) -> None:
"""
:raise MultiselectCommunicatorError: raised when failed to write to underlying reader
"""
""" # noqa: E501
msg_bytes = encode_delim(msg_str.encode())
try:
await self.read_writer.write(msg_bytes)
@ -28,7 +41,7 @@ class MultiselectCommunicator(IMultiselectCommunicator):
async def read(self) -> str:
"""
:raise MultiselectCommunicatorError: raised when failed to read from underlying reader
"""
""" # noqa: E501
try:
data = await read_delim(self.read_writer)
# `IOException` includes `IncompleteReadError` and `StreamError`

View File

@ -1,10 +1,15 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
class IMultiselectCommunicator(ABC):
"""Communicator helper class that ensures both the client and multistream
"""
Communicator helper class that ensures both the client and multistream
module will follow the same multistream protocol, which is necessary for
them to work."""
them to work.
"""
@abstractmethod
async def write(self, msg_str: str) -> None:

View File

@ -1,15 +1,28 @@
from abc import ABC, abstractmethod
from typing import Dict, Tuple
from abc import (
ABC,
abstractmethod,
)
from typing import (
Dict,
Tuple,
)
from libp2p.typing import StreamHandlerFn, TProtocol
from libp2p.typing import (
StreamHandlerFn,
TProtocol,
)
from .multiselect_communicator_interface import IMultiselectCommunicator
from .multiselect_communicator_interface import (
IMultiselectCommunicator,
)
class IMultiselectMuxer(ABC):
"""Multiselect module that is responsible for responding to a multiselect
"""
Multiselect module that is responsible for responding to a multiselect
client and deciding on a specific protocol and handler pair to use for
communication."""
communication.
"""
handlers: Dict[TProtocol, StreamHandlerFn]

View File

@ -1,4 +1,7 @@
from abc import ABC, abstractmethod
from abc import (
ABC,
abstractmethod,
)
from typing import (
TYPE_CHECKING,
AsyncContextManager,
@ -8,13 +11,23 @@ from typing import (
Tuple,
)
from async_service import ServiceAPI
from async_service import (
ServiceAPI,
)
from libp2p.peer.id import ID
from libp2p.typing import TProtocol
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from .pb import rpc_pb2
from .typing import ValidatorFn
from .pb import (
rpc_pb2,
)
from .typing import (
ValidatorFn,
)
if TYPE_CHECKING:
from .pubsub import Pubsub # noqa: F401
@ -69,9 +82,9 @@ class IPubsubRouter(ABC):
"""
Invoked to process control messages in the RPC envelope.
It is invoked after subscriptions and payload messages have been processed
TODO: Check if this interface is ok. It's not the exact same as the go code, but the go
code is really confusing with the msg origin, they specify `rpc.from` even when the rpc
shouldn't have a from
TODO: Check if this interface is ok. It's not the exact same as the go code, but
the go code is really confusing with the msg origin, they specify `rpc.from`
even when the rpc shouldn't have a from
:param rpc: rpc message
"""

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class PubsubRouterError(BaseLibp2pError):

View File

@ -1,16 +1,34 @@
import logging
from typing import Iterable, List, Sequence
from typing import (
Iterable,
List,
Sequence,
)
import trio
from libp2p.network.stream.exceptions import StreamClosed
from libp2p.peer.id import ID
from libp2p.typing import TProtocol
from libp2p.utils import encode_varint_prefixed
from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
)
from .abc import IPubsubRouter
from .pb import rpc_pb2
from .pubsub import Pubsub
from .abc import (
IPubsubRouter,
)
from .pb import (
rpc_pb2,
)
from .pubsub import (
Pubsub,
)
PROTOCOL_ID = TProtocol("/floodsub/1.0.0")
@ -18,7 +36,6 @@ logger = logging.getLogger("libp2p.pubsub.floodsub")
class FloodSub(IPubsubRouter):
protocols: List[TProtocol]
pubsub: Pubsub
@ -80,7 +97,6 @@ class FloodSub(IPubsubRouter):
:param msg_forwarder: peer ID of the peer who forwards the message to us
:param pubsub_msg: pubsub message in protobuf.
"""
peers_gen = set(
self._get_peers_to_send(
pubsub_msg.topicIDs,

View File

@ -1,23 +1,58 @@
from ast import literal_eval
from collections import defaultdict
from ast import (
literal_eval,
)
from collections import (
defaultdict,
)
import logging
import random
from typing import Any, DefaultDict, Dict, Iterable, List, Sequence, Set, Tuple
from typing import (
Any,
DefaultDict,
Dict,
Iterable,
List,
Sequence,
Set,
Tuple,
)
from async_service import Service
from async_service import (
Service,
)
import trio
from libp2p.network.stream.exceptions import StreamClosed
from libp2p.peer.id import ID
from libp2p.pubsub import floodsub
from libp2p.typing import TProtocol
from libp2p.utils import encode_varint_prefixed
from libp2p.network.stream.exceptions import (
StreamClosed,
)
from libp2p.peer.id import (
ID,
)
from libp2p.pubsub import (
floodsub,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
)
from .abc import IPubsubRouter
from .exceptions import NoPubsubAttached
from .mcache import MessageCache
from .pb import rpc_pb2
from .pubsub import Pubsub
from .abc import (
IPubsubRouter,
)
from .exceptions import (
NoPubsubAttached,
)
from .mcache import (
MessageCache,
)
from .pb import (
rpc_pb2,
)
from .pubsub import (
Pubsub,
)
PROTOCOL_ID = TProtocol("/meshsub/1.0.0")
@ -120,10 +155,10 @@ class GossipSub(IPubsubRouter, Service):
logger.debug("adding peer %s with protocol %s", peer_id, protocol_id)
if protocol_id not in (PROTOCOL_ID, floodsub.PROTOCOL_ID):
# We should never enter here. Becuase the `protocol_id` is registered by your pubsub
# instance in multistream-select, but it is not the protocol that gossipsub supports.
# In this case, probably we registered gossipsub to a wrong `protocol_id`
# in multistream-select, or wrong versions.
# We should never enter here. Becuase the `protocol_id` is registered by
# your pubsub instance in multistream-select, but it is not the protocol
# that gossipsub supports. In this case, probably we registered gossipsub
# to a wrong `protocol_id` in multistream-select, or wrong versions.
raise ValueError(f"Protocol={protocol_id} is not supported.")
self.peer_protocol[peer_id] = protocol_id
@ -208,11 +243,11 @@ class GossipSub(IPubsubRouter, Service):
continue
# floodsub peers
floodsub_peers: Set[ID] = set(
floodsub_peers: Set[ID] = {
peer_id
for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == floodsub.PROTOCOL_ID
)
}
send_to.update(floodsub_peers)
# gossipsub peers
@ -220,9 +255,9 @@ class GossipSub(IPubsubRouter, Service):
if topic in self.mesh:
gossipsub_peers = self.mesh[topic]
else:
# When we publish to a topic that we have not subscribe to, we randomly pick
# `self.degree` number of peers who have subscribed to the topic and add them
# as our `fanout` peers.
# When we publish to a topic that we have not subscribe to, we randomly
# pick `self.degree` number of peers who have subscribed to the topic
# and add them as our `fanout` peers.
topic_in_fanout: bool = topic in self.fanout
fanout_peers: Set[ID] = self.fanout[topic] if topic_in_fanout else set()
fanout_size = len(fanout_peers)
@ -270,7 +305,7 @@ class GossipSub(IPubsubRouter, Service):
# Combine fanout peers with selected peers
fanout_peers.update(selected_peers)
# Add fanout peers to mesh and notifies them with a GRAFT(topic) control message.
# Add fanout peers to mesh and notifies them with a GRAFT(topic) control message
for peer in fanout_peers:
self.mesh[topic].add(peer)
await self.emit_graft(topic, peer)
@ -369,10 +404,10 @@ class GossipSub(IPubsubRouter, Service):
"""
Call individual heartbeats.
Note: the heartbeats are called with awaits because each heartbeat depends on the
state changes in the preceding heartbeat
Note: the heartbeats are called with awaits because each heartbeat depends on
the state changes in the preceding heartbeat
"""
# Start after a delay. Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L410 # Noqa: E501
# Start after a delay. Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L410 # noqa: E501
await trio.sleep(self.heartbeat_initial_delay)
while True:
# Maintain mesh and keep track of which peers to send GRAFT or PRUNE to
@ -381,7 +416,8 @@ class GossipSub(IPubsubRouter, Service):
self.fanout_heartbeat()
# Get the peers to send IHAVE to
peers_to_gossip = self.gossip_heartbeat()
# Pack GRAFT, PRUNE and IHAVE for the same peer into one control message and send it
# Pack GRAFT, PRUNE and IHAVE for the same peer into one control message and
# send it
await self._emit_control_msgs(
peers_to_graft, peers_to_prune, peers_to_gossip
)
@ -391,7 +427,7 @@ class GossipSub(IPubsubRouter, Service):
await trio.sleep(self.heartbeat_interval)
def mesh_heartbeat(
self
self,
) -> Tuple[DefaultDict[ID, List[str]], DefaultDict[ID, List[str]]]:
peers_to_graft: DefaultDict[ID, List[str]] = defaultdict(list)
peers_to_prune: DefaultDict[ID, List[str]] = defaultdict(list)
@ -402,7 +438,7 @@ class GossipSub(IPubsubRouter, Service):
num_mesh_peers_in_topic = len(self.mesh[topic])
if num_mesh_peers_in_topic < self.degree_low:
# Select D - |mesh[topic]| peers from peers.gossipsub[topic] - mesh[topic]
# Select D - |mesh[topic]| peers from peers.gossipsub[topic] - mesh[topic] # noqa: E501
selected_peers = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree - num_mesh_peers_in_topic, self.mesh[topic]
)
@ -436,7 +472,7 @@ class GossipSub(IPubsubRouter, Service):
# Remove topic from fanout
del self.fanout[topic]
else:
# Check if fanout peers are still in the topic and remove the ones that are not
# Check if fanout peers are still in the topic and remove the ones that are not # noqa: E501
# ref: https://github.com/libp2p/go-libp2p-pubsub/blob/01b9825fbee1848751d90a8469e3f5f43bac8466/gossipsub.go#L498-L504 # noqa: E501
in_topic_fanout_peers = [
peer
@ -448,7 +484,7 @@ class GossipSub(IPubsubRouter, Service):
# If |fanout[topic]| < D
if num_fanout_peers_in_topic < self.degree:
# Select D - |fanout[topic]| peers from peers.gossipsub[topic] - fanout[topic]
# Select D - |fanout[topic]| peers from peers.gossipsub[topic] - fanout[topic] # noqa: E501
selected_peers = self._get_in_topic_gossipsub_peers_from_minus(
topic,
self.degree - num_fanout_peers_in_topic,
@ -462,11 +498,14 @@ class GossipSub(IPubsubRouter, Service):
for topic in self.mesh:
msg_ids = self.mcache.window(topic)
if msg_ids:
# Get all pubsub peers in a topic and only add them if they are gossipsub peers too
# Get all pubsub peers in a topic and only add them if they are
# gossipsub peers too
if topic in self.pubsub.peer_topics:
# Select D peers from peers.gossipsub[topic]
peers_to_emit_ihave_to = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.mesh[topic]
peers_to_emit_ihave_to = (
self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.mesh[topic]
)
)
msg_id_strs = [str(msg_id) for msg_id in msg_ids]
@ -478,11 +517,14 @@ class GossipSub(IPubsubRouter, Service):
for topic in self.fanout:
msg_ids = self.mcache.window(topic)
if msg_ids:
# Get all pubsub peers in topic and only add if they are gossipsub peers also
# Get all pubsub peers in topic and only add if they are
# gossipsub peers also
if topic in self.pubsub.peer_topics:
# Select D peers from peers.gossipsub[topic]
peers_to_emit_ihave_to = self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.fanout[topic]
peers_to_emit_ihave_to = (
self._get_in_topic_gossipsub_peers_from_minus(
topic, self.degree, self.fanout[topic]
)
)
msg_id_strs = [str(msg) for msg in msg_ids]
for peer in peers_to_emit_ihave_to:
@ -494,7 +536,8 @@ class GossipSub(IPubsubRouter, Service):
num_to_select: int, pool: Iterable[Any], minus: Iterable[Any]
) -> List[Any]:
"""
Select at most num_to_select subset of elements from the set (pool - minus) randomly.
Select at most num_to_select subset of elements from the set
(pool - minus) randomly.
:param num_to_select: number of elements to randomly select
:param pool: list of items to select from (excluding elements in minus)
:param minus: elements to be excluded from selection pool
@ -508,8 +551,9 @@ class GossipSub(IPubsubRouter, Service):
# Don't create a new selection_pool if we are not subbing anything
selection_pool = list(pool)
# If num_to_select > size(selection_pool), then return selection_pool (which has the most
# possible elements s.t. the number of elements is less than num_to_select)
# If num_to_select > size(selection_pool), then return selection_pool (which has
# the most possible elements s.t. the number of elements is less than
# num_to_select)
if num_to_select >= len(selection_pool):
return selection_pool
@ -521,11 +565,11 @@ class GossipSub(IPubsubRouter, Service):
def _get_in_topic_gossipsub_peers_from_minus(
self, topic: str, num_to_select: int, minus: Iterable[ID]
) -> List[ID]:
gossipsub_peers_in_topic = set(
gossipsub_peers_in_topic = {
peer_id
for peer_id in self.pubsub.peer_topics[topic]
if self.peer_protocol[peer_id] == PROTOCOL_ID
)
}
return self.select_from_minus(num_to_select, gossipsub_peers_in_topic, minus)
# RPC handlers
@ -533,15 +577,15 @@ class GossipSub(IPubsubRouter, Service):
async def handle_ihave(
self, ihave_msg: rpc_pb2.ControlIHave, sender_peer_id: ID
) -> None:
"""Checks the seen set and requests unknown messages with an IWANT
message."""
# Get list of all seen (seqnos, from) from the (seqno, from) tuples in seen_messages cache
"""Checks the seen set and requests unknown messages with an IWANT message."""
# Get list of all seen (seqnos, from) from the (seqno, from) tuples in
# seen_messages cache
seen_seqnos_and_peers = [
seqno_and_from for seqno_and_from in self.pubsub.seen_messages.keys()
]
# Add all unknown message ids (ids that appear in ihave_msg but not in seen_seqnos) to list
# of messages we want to request
# Add all unknown message ids (ids that appear in ihave_msg but not in
# seen_seqnos) to list of messages we want to request
# FIXME: Update type of message ID
msg_ids_wanted: List[Any] = [
msg_id
@ -556,8 +600,10 @@ class GossipSub(IPubsubRouter, Service):
async def handle_iwant(
self, iwant_msg: rpc_pb2.ControlIWant, sender_peer_id: ID
) -> None:
"""Forwards all request messages that are present in mcache to the
requesting peer."""
"""
Forwards all request messages that are present in mcache to the
requesting peer.
"""
# FIXME: Update type of message ID
# FIXME: Find a better way to parse the msg ids
msg_ids: List[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs]
@ -572,8 +618,8 @@ class GossipSub(IPubsubRouter, Service):
msgs_to_forward.append(msg)
# Forward messages to requesting peer
# Should this just be publishing? No
# because then the message will forwarded to peers in the topics contained in the messages.
# Should this just be publishing? No, because then the message will forwarded to
# peers in the topics contained in the messages.
# We should
# 1) Package these messages into a single packet
packet: rpc_pb2.RPC = rpc_pb2.RPC()
@ -643,7 +689,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_ihave(self, topic: str, msg_ids: Any, to_peer: ID) -> None:
"""Emit ihave message, sent to to_peer, for topic and msg_ids."""
ihave_msg: rpc_pb2.ControlIHave = rpc_pb2.ControlIHave()
ihave_msg.messageIDs.extend(msg_ids)
ihave_msg.topicID = topic
@ -655,7 +700,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_iwant(self, msg_ids: Any, to_peer: ID) -> None:
"""Emit iwant message, sent to to_peer, for msg_ids."""
iwant_msg: rpc_pb2.ControlIWant = rpc_pb2.ControlIWant()
iwant_msg.messageIDs.extend(msg_ids)
@ -666,7 +710,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_graft(self, topic: str, to_peer: ID) -> None:
"""Emit graft message, sent to to_peer, for topic."""
graft_msg: rpc_pb2.ControlGraft = rpc_pb2.ControlGraft()
graft_msg.topicID = topic
@ -677,7 +720,6 @@ class GossipSub(IPubsubRouter, Service):
async def emit_prune(self, topic: str, to_peer: ID) -> None:
"""Emit graft message, sent to to_peer, for topic."""
prune_msg: rpc_pb2.ControlPrune = rpc_pb2.ControlPrune()
prune_msg.topicID = topic

View File

@ -1,10 +1,17 @@
from typing import Dict, List, Optional, Sequence, Tuple
from typing import (
Dict,
List,
Optional,
Sequence,
Tuple,
)
from .pb import rpc_pb2
from .pb import (
rpc_pb2,
)
class CacheEntry:
mid: Tuple[bytes, bytes]
topics: List[str]
@ -24,7 +31,6 @@ class CacheEntry:
class MessageCache:
window_size: int
history_size: int
@ -91,8 +97,9 @@ class MessageCache:
return mids
def shift(self) -> None:
"""Shift the window over by 1 position, dropping the last element of
the history."""
"""
Shift the window over by 1 position, dropping the last element of the history.
"""
last_entries: List[CacheEntry] = self.history[len(self.history) - 1]
for entry in last_entries:

View File

@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/pubsub/pb/rpc.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@ -15,625 +14,37 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/pubsub/pb/rpc.proto',
package='pubsub.pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x1alibp2p/pubsub/pb/rpc.proto\x12\tpubsub.pb\"\xb4\x01\n\x03RPC\x12-\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x16.pubsub.pb.RPC.SubOpts\x12#\n\x07publish\x18\x02 \x03(\x0b\x32\x12.pubsub.pb.Message\x12*\n\x07\x63ontrol\x18\x03 \x01(\x0b\x32\x19.pubsub.pb.ControlMessage\x1a-\n\x07SubOpts\x12\x11\n\tsubscribe\x18\x01 \x01(\x08\x12\x0f\n\x07topicid\x18\x02 \x01(\t\"i\n\x07Message\x12\x0f\n\x07\x66rom_id\x18\x01 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\r\n\x05seqno\x18\x03 \x01(\x0c\x12\x10\n\x08topicIDs\x18\x04 \x03(\t\x12\x11\n\tsignature\x18\x05 \x01(\x0c\x12\x0b\n\x03key\x18\x06 \x01(\x0c\"\xb0\x01\n\x0e\x43ontrolMessage\x12&\n\x05ihave\x18\x01 \x03(\x0b\x32\x17.pubsub.pb.ControlIHave\x12&\n\x05iwant\x18\x02 \x03(\x0b\x32\x17.pubsub.pb.ControlIWant\x12&\n\x05graft\x18\x03 \x03(\x0b\x32\x17.pubsub.pb.ControlGraft\x12&\n\x05prune\x18\x04 \x03(\x0b\x32\x17.pubsub.pb.ControlPrune\"3\n\x0c\x43ontrolIHave\x12\x0f\n\x07topicID\x18\x01 \x01(\t\x12\x12\n\nmessageIDs\x18\x02 \x03(\t\"\"\n\x0c\x43ontrolIWant\x12\x12\n\nmessageIDs\x18\x01 \x03(\t\"\x1f\n\x0c\x43ontrolGraft\x12\x0f\n\x07topicID\x18\x01 \x01(\t\"\x1f\n\x0c\x43ontrolPrune\x12\x0f\n\x07topicID\x18\x01 \x01(\t\"\x87\x03\n\x0fTopicDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\x04\x61uth\x18\x02 \x01(\x0b\x32#.pubsub.pb.TopicDescriptor.AuthOpts\x12/\n\x03\x65nc\x18\x03 \x01(\x0b\x32\".pubsub.pb.TopicDescriptor.EncOpts\x1a|\n\x08\x41uthOpts\x12:\n\x04mode\x18\x01 \x01(\x0e\x32,.pubsub.pb.TopicDescriptor.AuthOpts.AuthMode\x12\x0c\n\x04keys\x18\x02 \x03(\x0c\"&\n\x08\x41uthMode\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03KEY\x10\x01\x12\x07\n\x03WOT\x10\x02\x1a\x83\x01\n\x07\x45ncOpts\x12\x38\n\x04mode\x18\x01 \x01(\x0e\x32*.pubsub.pb.TopicDescriptor.EncOpts.EncMode\x12\x11\n\tkeyHashes\x18\x02 \x03(\x0c\"+\n\x07\x45ncMode\x12\x08\n\x04NONE\x10\x00\x12\r\n\tSHAREDKEY\x10\x01\x12\x07\n\x03WOT\x10\x02')
)
_TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE = _descriptor.EnumDescriptor(
name='AuthMode',
full_name='pubsub.pb.TopicDescriptor.AuthOpts.AuthMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KEY', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WOT', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=885,
serialized_end=923,
)
_sym_db.RegisterEnumDescriptor(_TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE)
_TOPICDESCRIPTOR_ENCOPTS_ENCMODE = _descriptor.EnumDescriptor(
name='EncMode',
full_name='pubsub.pb.TopicDescriptor.EncOpts.EncMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SHAREDKEY', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WOT', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1014,
serialized_end=1057,
)
_sym_db.RegisterEnumDescriptor(_TOPICDESCRIPTOR_ENCOPTS_ENCMODE)
_RPC_SUBOPTS = _descriptor.Descriptor(
name='SubOpts',
full_name='pubsub.pb.RPC.SubOpts',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='subscribe', full_name='pubsub.pb.RPC.SubOpts.subscribe', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='topicid', full_name='pubsub.pb.RPC.SubOpts.topicid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=177,
serialized_end=222,
)
_RPC = _descriptor.Descriptor(
name='RPC',
full_name='pubsub.pb.RPC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='subscriptions', full_name='pubsub.pb.RPC.subscriptions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='publish', full_name='pubsub.pb.RPC.publish', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='control', full_name='pubsub.pb.RPC.control', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_RPC_SUBOPTS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=42,
serialized_end=222,
)
_MESSAGE = _descriptor.Descriptor(
name='Message',
full_name='pubsub.pb.Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='from_id', full_name='pubsub.pb.Message.from_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pubsub.pb.Message.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='seqno', full_name='pubsub.pb.Message.seqno', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='topicIDs', full_name='pubsub.pb.Message.topicIDs', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signature', full_name='pubsub.pb.Message.signature', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='key', full_name='pubsub.pb.Message.key', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=224,
serialized_end=329,
)
_CONTROLMESSAGE = _descriptor.Descriptor(
name='ControlMessage',
full_name='pubsub.pb.ControlMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ihave', full_name='pubsub.pb.ControlMessage.ihave', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='iwant', full_name='pubsub.pb.ControlMessage.iwant', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='graft', full_name='pubsub.pb.ControlMessage.graft', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='prune', full_name='pubsub.pb.ControlMessage.prune', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=332,
serialized_end=508,
)
_CONTROLIHAVE = _descriptor.Descriptor(
name='ControlIHave',
full_name='pubsub.pb.ControlIHave',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='topicID', full_name='pubsub.pb.ControlIHave.topicID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='messageIDs', full_name='pubsub.pb.ControlIHave.messageIDs', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=510,
serialized_end=561,
)
_CONTROLIWANT = _descriptor.Descriptor(
name='ControlIWant',
full_name='pubsub.pb.ControlIWant',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='messageIDs', full_name='pubsub.pb.ControlIWant.messageIDs', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=563,
serialized_end=597,
)
_CONTROLGRAFT = _descriptor.Descriptor(
name='ControlGraft',
full_name='pubsub.pb.ControlGraft',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='topicID', full_name='pubsub.pb.ControlGraft.topicID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=599,
serialized_end=630,
)
_CONTROLPRUNE = _descriptor.Descriptor(
name='ControlPrune',
full_name='pubsub.pb.ControlPrune',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='topicID', full_name='pubsub.pb.ControlPrune.topicID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=632,
serialized_end=663,
)
_TOPICDESCRIPTOR_AUTHOPTS = _descriptor.Descriptor(
name='AuthOpts',
full_name='pubsub.pb.TopicDescriptor.AuthOpts',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mode', full_name='pubsub.pb.TopicDescriptor.AuthOpts.mode', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keys', full_name='pubsub.pb.TopicDescriptor.AuthOpts.keys', index=1,
number=2, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=799,
serialized_end=923,
)
_TOPICDESCRIPTOR_ENCOPTS = _descriptor.Descriptor(
name='EncOpts',
full_name='pubsub.pb.TopicDescriptor.EncOpts',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mode', full_name='pubsub.pb.TopicDescriptor.EncOpts.mode', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyHashes', full_name='pubsub.pb.TopicDescriptor.EncOpts.keyHashes', index=1,
number=2, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_TOPICDESCRIPTOR_ENCOPTS_ENCMODE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=926,
serialized_end=1057,
)
_TOPICDESCRIPTOR = _descriptor.Descriptor(
name='TopicDescriptor',
full_name='pubsub.pb.TopicDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pubsub.pb.TopicDescriptor.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='auth', full_name='pubsub.pb.TopicDescriptor.auth', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enc', full_name='pubsub.pb.TopicDescriptor.enc', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TOPICDESCRIPTOR_AUTHOPTS, _TOPICDESCRIPTOR_ENCOPTS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=666,
serialized_end=1057,
)
_RPC_SUBOPTS.containing_type = _RPC
_RPC.fields_by_name['subscriptions'].message_type = _RPC_SUBOPTS
_RPC.fields_by_name['publish'].message_type = _MESSAGE
_RPC.fields_by_name['control'].message_type = _CONTROLMESSAGE
_CONTROLMESSAGE.fields_by_name['ihave'].message_type = _CONTROLIHAVE
_CONTROLMESSAGE.fields_by_name['iwant'].message_type = _CONTROLIWANT
_CONTROLMESSAGE.fields_by_name['graft'].message_type = _CONTROLGRAFT
_CONTROLMESSAGE.fields_by_name['prune'].message_type = _CONTROLPRUNE
_TOPICDESCRIPTOR_AUTHOPTS.fields_by_name['mode'].enum_type = _TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE
_TOPICDESCRIPTOR_AUTHOPTS.containing_type = _TOPICDESCRIPTOR
_TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE.containing_type = _TOPICDESCRIPTOR_AUTHOPTS
_TOPICDESCRIPTOR_ENCOPTS.fields_by_name['mode'].enum_type = _TOPICDESCRIPTOR_ENCOPTS_ENCMODE
_TOPICDESCRIPTOR_ENCOPTS.containing_type = _TOPICDESCRIPTOR
_TOPICDESCRIPTOR_ENCOPTS_ENCMODE.containing_type = _TOPICDESCRIPTOR_ENCOPTS
_TOPICDESCRIPTOR.fields_by_name['auth'].message_type = _TOPICDESCRIPTOR_AUTHOPTS
_TOPICDESCRIPTOR.fields_by_name['enc'].message_type = _TOPICDESCRIPTOR_ENCOPTS
DESCRIPTOR.message_types_by_name['RPC'] = _RPC
DESCRIPTOR.message_types_by_name['Message'] = _MESSAGE
DESCRIPTOR.message_types_by_name['ControlMessage'] = _CONTROLMESSAGE
DESCRIPTOR.message_types_by_name['ControlIHave'] = _CONTROLIHAVE
DESCRIPTOR.message_types_by_name['ControlIWant'] = _CONTROLIWANT
DESCRIPTOR.message_types_by_name['ControlGraft'] = _CONTROLGRAFT
DESCRIPTOR.message_types_by_name['ControlPrune'] = _CONTROLPRUNE
DESCRIPTOR.message_types_by_name['TopicDescriptor'] = _TOPICDESCRIPTOR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
RPC = _reflection.GeneratedProtocolMessageType('RPC', (_message.Message,), {
'SubOpts' : _reflection.GeneratedProtocolMessageType('SubOpts', (_message.Message,), {
'DESCRIPTOR' : _RPC_SUBOPTS,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.RPC.SubOpts)
})
,
'DESCRIPTOR' : _RPC,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.RPC)
})
_sym_db.RegisterMessage(RPC)
_sym_db.RegisterMessage(RPC.SubOpts)
Message = _reflection.GeneratedProtocolMessageType('Message', (_message.Message,), {
'DESCRIPTOR' : _MESSAGE,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.Message)
})
_sym_db.RegisterMessage(Message)
ControlMessage = _reflection.GeneratedProtocolMessageType('ControlMessage', (_message.Message,), {
'DESCRIPTOR' : _CONTROLMESSAGE,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.ControlMessage)
})
_sym_db.RegisterMessage(ControlMessage)
ControlIHave = _reflection.GeneratedProtocolMessageType('ControlIHave', (_message.Message,), {
'DESCRIPTOR' : _CONTROLIHAVE,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.ControlIHave)
})
_sym_db.RegisterMessage(ControlIHave)
ControlIWant = _reflection.GeneratedProtocolMessageType('ControlIWant', (_message.Message,), {
'DESCRIPTOR' : _CONTROLIWANT,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.ControlIWant)
})
_sym_db.RegisterMessage(ControlIWant)
ControlGraft = _reflection.GeneratedProtocolMessageType('ControlGraft', (_message.Message,), {
'DESCRIPTOR' : _CONTROLGRAFT,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.ControlGraft)
})
_sym_db.RegisterMessage(ControlGraft)
ControlPrune = _reflection.GeneratedProtocolMessageType('ControlPrune', (_message.Message,), {
'DESCRIPTOR' : _CONTROLPRUNE,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.ControlPrune)
})
_sym_db.RegisterMessage(ControlPrune)
TopicDescriptor = _reflection.GeneratedProtocolMessageType('TopicDescriptor', (_message.Message,), {
'AuthOpts' : _reflection.GeneratedProtocolMessageType('AuthOpts', (_message.Message,), {
'DESCRIPTOR' : _TOPICDESCRIPTOR_AUTHOPTS,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.TopicDescriptor.AuthOpts)
})
,
'EncOpts' : _reflection.GeneratedProtocolMessageType('EncOpts', (_message.Message,), {
'DESCRIPTOR' : _TOPICDESCRIPTOR_ENCOPTS,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.TopicDescriptor.EncOpts)
})
,
'DESCRIPTOR' : _TOPICDESCRIPTOR,
'__module__' : 'libp2p.pubsub.pb.rpc_pb2'
# @@protoc_insertion_point(class_scope:pubsub.pb.TopicDescriptor)
})
_sym_db.RegisterMessage(TopicDescriptor)
_sym_db.RegisterMessage(TopicDescriptor.AuthOpts)
_sym_db.RegisterMessage(TopicDescriptor.EncOpts)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1alibp2p/pubsub/pb/rpc.proto\x12\tpubsub.pb\"\xb4\x01\n\x03RPC\x12-\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x16.pubsub.pb.RPC.SubOpts\x12#\n\x07publish\x18\x02 \x03(\x0b\x32\x12.pubsub.pb.Message\x12*\n\x07\x63ontrol\x18\x03 \x01(\x0b\x32\x19.pubsub.pb.ControlMessage\x1a-\n\x07SubOpts\x12\x11\n\tsubscribe\x18\x01 \x01(\x08\x12\x0f\n\x07topicid\x18\x02 \x01(\t\"i\n\x07Message\x12\x0f\n\x07\x66rom_id\x18\x01 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\r\n\x05seqno\x18\x03 \x01(\x0c\x12\x10\n\x08topicIDs\x18\x04 \x03(\t\x12\x11\n\tsignature\x18\x05 \x01(\x0c\x12\x0b\n\x03key\x18\x06 \x01(\x0c\"\xb0\x01\n\x0e\x43ontrolMessage\x12&\n\x05ihave\x18\x01 \x03(\x0b\x32\x17.pubsub.pb.ControlIHave\x12&\n\x05iwant\x18\x02 \x03(\x0b\x32\x17.pubsub.pb.ControlIWant\x12&\n\x05graft\x18\x03 \x03(\x0b\x32\x17.pubsub.pb.ControlGraft\x12&\n\x05prune\x18\x04 \x03(\x0b\x32\x17.pubsub.pb.ControlPrune\"3\n\x0c\x43ontrolIHave\x12\x0f\n\x07topicID\x18\x01 \x01(\t\x12\x12\n\nmessageIDs\x18\x02 \x03(\t\"\"\n\x0c\x43ontrolIWant\x12\x12\n\nmessageIDs\x18\x01 \x03(\t\"\x1f\n\x0c\x43ontrolGraft\x12\x0f\n\x07topicID\x18\x01 \x01(\t\"\x1f\n\x0c\x43ontrolPrune\x12\x0f\n\x07topicID\x18\x01 \x01(\t\"\x87\x03\n\x0fTopicDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\x04\x61uth\x18\x02 \x01(\x0b\x32#.pubsub.pb.TopicDescriptor.AuthOpts\x12/\n\x03\x65nc\x18\x03 \x01(\x0b\x32\".pubsub.pb.TopicDescriptor.EncOpts\x1a|\n\x08\x41uthOpts\x12:\n\x04mode\x18\x01 \x01(\x0e\x32,.pubsub.pb.TopicDescriptor.AuthOpts.AuthMode\x12\x0c\n\x04keys\x18\x02 \x03(\x0c\"&\n\x08\x41uthMode\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03KEY\x10\x01\x12\x07\n\x03WOT\x10\x02\x1a\x83\x01\n\x07\x45ncOpts\x12\x38\n\x04mode\x18\x01 \x01(\x0e\x32*.pubsub.pb.TopicDescriptor.EncOpts.EncMode\x12\x11\n\tkeyHashes\x18\x02 \x03(\x0c\"+\n\x07\x45ncMode\x12\x08\n\x04NONE\x10\x00\x12\r\n\tSHAREDKEY\x10\x01\x12\x07\n\x03WOT\x10\x02')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.pubsub.pb.rpc_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals['_RPC']._serialized_start=42
_globals['_RPC']._serialized_end=222
_globals['_RPC_SUBOPTS']._serialized_start=177
_globals['_RPC_SUBOPTS']._serialized_end=222
_globals['_MESSAGE']._serialized_start=224
_globals['_MESSAGE']._serialized_end=329
_globals['_CONTROLMESSAGE']._serialized_start=332
_globals['_CONTROLMESSAGE']._serialized_end=508
_globals['_CONTROLIHAVE']._serialized_start=510
_globals['_CONTROLIHAVE']._serialized_end=561
_globals['_CONTROLIWANT']._serialized_start=563
_globals['_CONTROLIWANT']._serialized_end=597
_globals['_CONTROLGRAFT']._serialized_start=599
_globals['_CONTROLGRAFT']._serialized_end=630
_globals['_CONTROLPRUNE']._serialized_start=632
_globals['_CONTROLPRUNE']._serialized_end=663
_globals['_TOPICDESCRIPTOR']._serialized_start=666
_globals['_TOPICDESCRIPTOR']._serialized_end=1057
_globals['_TOPICDESCRIPTOR_AUTHOPTS']._serialized_start=799
_globals['_TOPICDESCRIPTOR_AUTHOPTS']._serialized_end=923
_globals['_TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE']._serialized_start=885
_globals['_TOPICDESCRIPTOR_AUTHOPTS_AUTHMODE']._serialized_end=923
_globals['_TOPICDESCRIPTOR_ENCOPTS']._serialized_start=926
_globals['_TOPICDESCRIPTOR_ENCOPTS']._serialized_end=1057
_globals['_TOPICDESCRIPTOR_ENCOPTS_ENCMODE']._serialized_start=1014
_globals['_TOPICDESCRIPTOR_ENCOPTS_ENCMODE']._serialized_end=1057
# @@protoc_insertion_point(module_scope)

View File

@ -1,322 +1,294 @@
# @generated by generate_proto_mypy_stubs.py. Do not edit!
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Modified from https://github.com/libp2p/go-libp2p-pubsub/blob/master/pb/rpc.proto"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
EnumDescriptor as google___protobuf___descriptor___EnumDescriptor,
)
import typing
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer,
)
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
from typing import (
Iterable as typing___Iterable,
List as typing___List,
Optional as typing___Optional,
Text as typing___Text,
Tuple as typing___Tuple,
cast as typing___cast,
)
@typing_extensions.final
class RPC(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
from typing_extensions import (
Literal as typing_extensions___Literal,
)
@typing_extensions.final
class SubOpts(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class RPC(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class SubOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
subscribe = ... # type: bool
topicid = ... # type: typing___Text
def __init__(self,
SUBSCRIBE_FIELD_NUMBER: builtins.int
TOPICID_FIELD_NUMBER: builtins.int
subscribe: builtins.bool
"""subscribe or unsubscribe"""
topicid: builtins.str
def __init__(
self,
*,
subscribe : typing___Optional[bool] = None,
topicid : typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> RPC.SubOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"subscribe",u"topicid"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"subscribe",u"topicid"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"subscribe",b"subscribe",u"topicid",b"topicid"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"subscribe",b"subscribe",u"topicid",b"topicid"]) -> None: ...
@property
def subscriptions(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[RPC.SubOpts]: ...
@property
def publish(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Message]: ...
@property
def control(self) -> ControlMessage: ...
def __init__(self,
*,
subscriptions : typing___Optional[typing___Iterable[RPC.SubOpts]] = None,
publish : typing___Optional[typing___Iterable[Message]] = None,
control : typing___Optional[ControlMessage] = None,
subscribe: builtins.bool | None = ...,
topicid: builtins.str | None = ...,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> RPC: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"control"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"control",u"publish",u"subscriptions"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"control",b"control"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"control",b"control",u"publish",b"publish",u"subscriptions",b"subscriptions"]) -> None: ...
class Message(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
from_id = ... # type: bytes
data = ... # type: bytes
seqno = ... # type: bytes
topicIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
signature = ... # type: bytes
key = ... # type: bytes
def __init__(self,
*,
from_id : typing___Optional[bytes] = None,
data : typing___Optional[bytes] = None,
seqno : typing___Optional[bytes] = None,
topicIDs : typing___Optional[typing___Iterable[typing___Text]] = None,
signature : typing___Optional[bytes] = None,
key : typing___Optional[bytes] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Message: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"data",u"from_id",u"key",u"seqno",u"signature"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"from_id",u"key",u"seqno",u"signature",u"topicIDs"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"from_id",b"from_id",u"key",b"key",u"seqno",b"seqno",u"signature",b"signature"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"from_id",b"from_id",u"key",b"key",u"seqno",b"seqno",u"signature",b"signature",u"topicIDs",b"topicIDs"]) -> None: ...
class ControlMessage(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
def HasField(self, field_name: typing_extensions.Literal["subscribe", b"subscribe", "topicid", b"topicid"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["subscribe", b"subscribe", "topicid", b"topicid"]) -> None: ...
SUBSCRIPTIONS_FIELD_NUMBER: builtins.int
PUBLISH_FIELD_NUMBER: builtins.int
CONTROL_FIELD_NUMBER: builtins.int
@property
def ihave(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlIHave]: ...
def subscriptions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RPC.SubOpts]: ...
@property
def iwant(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlIWant]: ...
def publish(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Message]: ...
@property
def graft(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlGraft]: ...
def control(self) -> global___ControlMessage: ...
def __init__(
self,
*,
subscriptions: collections.abc.Iterable[global___RPC.SubOpts] | None = ...,
publish: collections.abc.Iterable[global___Message] | None = ...,
control: global___ControlMessage | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["control", b"control"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["control", b"control", "publish", b"publish", "subscriptions", b"subscriptions"]) -> None: ...
global___RPC = RPC
@typing_extensions.final
class Message(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FROM_ID_FIELD_NUMBER: builtins.int
DATA_FIELD_NUMBER: builtins.int
SEQNO_FIELD_NUMBER: builtins.int
TOPICIDS_FIELD_NUMBER: builtins.int
SIGNATURE_FIELD_NUMBER: builtins.int
KEY_FIELD_NUMBER: builtins.int
from_id: builtins.bytes
data: builtins.bytes
seqno: builtins.bytes
@property
def prune(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ControlPrune]: ...
def __init__(self,
def topicIDs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
signature: builtins.bytes
key: builtins.bytes
def __init__(
self,
*,
ihave : typing___Optional[typing___Iterable[ControlIHave]] = None,
iwant : typing___Optional[typing___Iterable[ControlIWant]] = None,
graft : typing___Optional[typing___Iterable[ControlGraft]] = None,
prune : typing___Optional[typing___Iterable[ControlPrune]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlMessage: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"graft",u"ihave",u"iwant",u"prune"]) -> None: ...
else:
def ClearField(self, field_name: typing_extensions___Literal[u"graft",b"graft",u"ihave",b"ihave",u"iwant",b"iwant",u"prune",b"prune"]) -> None: ...
from_id: builtins.bytes | None = ...,
data: builtins.bytes | None = ...,
seqno: builtins.bytes | None = ...,
topicIDs: collections.abc.Iterable[builtins.str] | None = ...,
signature: builtins.bytes | None = ...,
key: builtins.bytes | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["data", b"data", "from_id", b"from_id", "key", b"key", "seqno", b"seqno", "signature", b"signature"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "from_id", b"from_id", "key", b"key", "seqno", b"seqno", "signature", b"signature", "topicIDs", b"topicIDs"]) -> None: ...
class ControlIHave(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text
messageIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
global___Message = Message
def __init__(self,
@typing_extensions.final
class ControlMessage(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
IHAVE_FIELD_NUMBER: builtins.int
IWANT_FIELD_NUMBER: builtins.int
GRAFT_FIELD_NUMBER: builtins.int
PRUNE_FIELD_NUMBER: builtins.int
@property
def ihave(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlIHave]: ...
@property
def iwant(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlIWant]: ...
@property
def graft(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlGraft]: ...
@property
def prune(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlPrune]: ...
def __init__(
self,
*,
topicID : typing___Optional[typing___Text] = None,
messageIDs : typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlIHave: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",u"topicID"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",b"messageIDs",u"topicID",b"topicID"]) -> None: ...
ihave: collections.abc.Iterable[global___ControlIHave] | None = ...,
iwant: collections.abc.Iterable[global___ControlIWant] | None = ...,
graft: collections.abc.Iterable[global___ControlGraft] | None = ...,
prune: collections.abc.Iterable[global___ControlPrune] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["graft", b"graft", "ihave", b"ihave", "iwant", b"iwant", "prune", b"prune"]) -> None: ...
class ControlIWant(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
messageIDs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
global___ControlMessage = ControlMessage
def __init__(self,
@typing_extensions.final
class ControlIHave(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TOPICID_FIELD_NUMBER: builtins.int
MESSAGEIDS_FIELD_NUMBER: builtins.int
topicID: builtins.str
@property
def messageIDs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
def __init__(
self,
*,
messageIDs : typing___Optional[typing___Iterable[typing___Text]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlIWant: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs"]) -> None: ...
else:
def ClearField(self, field_name: typing_extensions___Literal[u"messageIDs",b"messageIDs"]) -> None: ...
topicID: builtins.str | None = ...,
messageIDs: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["topicID", b"topicID"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["messageIDs", b"messageIDs", "topicID", b"topicID"]) -> None: ...
class ControlGraft(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text
global___ControlIHave = ControlIHave
def __init__(self,
@typing_extensions.final
class ControlIWant(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MESSAGEIDS_FIELD_NUMBER: builtins.int
@property
def messageIDs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
def __init__(
self,
*,
topicID : typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlGraft: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> None: ...
messageIDs: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["messageIDs", b"messageIDs"]) -> None: ...
class ControlPrune(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
topicID = ... # type: typing___Text
global___ControlIWant = ControlIWant
def __init__(self,
@typing_extensions.final
class ControlGraft(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TOPICID_FIELD_NUMBER: builtins.int
topicID: builtins.str
def __init__(
self,
*,
topicID : typing___Optional[typing___Text] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> ControlPrune: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"topicID",b"topicID"]) -> None: ...
topicID: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["topicID", b"topicID"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["topicID", b"topicID"]) -> None: ...
class TopicDescriptor(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AuthOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class AuthMode(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod
def Name(cls, number: int) -> str: ...
@classmethod
def Value(cls, name: str) -> TopicDescriptor.AuthOpts.AuthMode: ...
@classmethod
def keys(cls) -> typing___List[str]: ...
@classmethod
def values(cls) -> typing___List[TopicDescriptor.AuthOpts.AuthMode]: ...
@classmethod
def items(cls) -> typing___List[typing___Tuple[str, TopicDescriptor.AuthOpts.AuthMode]]: ...
NONE = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 0)
KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1)
WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2)
NONE = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 0)
KEY = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 1)
WOT = typing___cast(TopicDescriptor.AuthOpts.AuthMode, 2)
global___ControlGraft = ControlGraft
mode = ... # type: TopicDescriptor.AuthOpts.AuthMode
keys = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
@typing_extensions.final
class ControlPrune(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
def __init__(self,
TOPICID_FIELD_NUMBER: builtins.int
topicID: builtins.str
def __init__(
self,
*,
topicID: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["topicID", b"topicID"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["topicID", b"topicID"]) -> None: ...
global___ControlPrune = ControlPrune
@typing_extensions.final
class TopicDescriptor(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class AuthOpts(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _AuthMode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _AuthModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TopicDescriptor.AuthOpts._AuthMode.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
NONE: TopicDescriptor.AuthOpts._AuthMode.ValueType # 0
"""no authentication, anyone can publish"""
KEY: TopicDescriptor.AuthOpts._AuthMode.ValueType # 1
"""only messages signed by keys in the topic descriptor are accepted"""
WOT: TopicDescriptor.AuthOpts._AuthMode.ValueType # 2
"""web of trust, certificates can allow publisher set to grow"""
class AuthMode(_AuthMode, metaclass=_AuthModeEnumTypeWrapper): ...
NONE: TopicDescriptor.AuthOpts.AuthMode.ValueType # 0
"""no authentication, anyone can publish"""
KEY: TopicDescriptor.AuthOpts.AuthMode.ValueType # 1
"""only messages signed by keys in the topic descriptor are accepted"""
WOT: TopicDescriptor.AuthOpts.AuthMode.ValueType # 2
"""web of trust, certificates can allow publisher set to grow"""
MODE_FIELD_NUMBER: builtins.int
KEYS_FIELD_NUMBER: builtins.int
mode: global___TopicDescriptor.AuthOpts.AuthMode.ValueType
@property
def keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
"""root keys to trust"""
def __init__(
self,
*,
mode : typing___Optional[TopicDescriptor.AuthOpts.AuthMode] = None,
keys : typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> TopicDescriptor.AuthOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keys",u"mode"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"mode",b"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keys",b"keys",u"mode",b"mode"]) -> None: ...
class EncOpts(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class EncMode(int):
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
@classmethod
def Name(cls, number: int) -> str: ...
@classmethod
def Value(cls, name: str) -> TopicDescriptor.EncOpts.EncMode: ...
@classmethod
def keys(cls) -> typing___List[str]: ...
@classmethod
def values(cls) -> typing___List[TopicDescriptor.EncOpts.EncMode]: ...
@classmethod
def items(cls) -> typing___List[typing___Tuple[str, TopicDescriptor.EncOpts.EncMode]]: ...
NONE = typing___cast(TopicDescriptor.EncOpts.EncMode, 0)
SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1)
WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2)
NONE = typing___cast(TopicDescriptor.EncOpts.EncMode, 0)
SHAREDKEY = typing___cast(TopicDescriptor.EncOpts.EncMode, 1)
WOT = typing___cast(TopicDescriptor.EncOpts.EncMode, 2)
mode = ... # type: TopicDescriptor.EncOpts.EncMode
keyHashes = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
def __init__(self,
*,
mode : typing___Optional[TopicDescriptor.EncOpts.EncMode] = None,
keyHashes : typing___Optional[typing___Iterable[bytes]] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> TopicDescriptor.EncOpts: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keyHashes",u"mode"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"mode",b"mode"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"keyHashes",b"keyHashes",u"mode",b"mode"]) -> None: ...
name = ... # type: typing___Text
@property
def auth(self) -> TopicDescriptor.AuthOpts: ...
@property
def enc(self) -> TopicDescriptor.EncOpts: ...
def __init__(self,
*,
name : typing___Optional[typing___Text] = None,
auth : typing___Optional[TopicDescriptor.AuthOpts] = None,
enc : typing___Optional[TopicDescriptor.EncOpts] = None,
mode: global___TopicDescriptor.AuthOpts.AuthMode.ValueType | None = ...,
keys: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> TopicDescriptor: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"auth",u"enc",u"name"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"auth",u"enc",u"name"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"auth",b"auth",u"enc",b"enc",u"name",b"name"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"auth",b"auth",u"enc",b"enc",u"name",b"name"]) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["mode", b"mode"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys", "mode", b"mode"]) -> None: ...
@typing_extensions.final
class EncOpts(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _EncMode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _EncModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TopicDescriptor.EncOpts._EncMode.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
NONE: TopicDescriptor.EncOpts._EncMode.ValueType # 0
"""no encryption, anyone can read"""
SHAREDKEY: TopicDescriptor.EncOpts._EncMode.ValueType # 1
"""messages are encrypted with shared key"""
WOT: TopicDescriptor.EncOpts._EncMode.ValueType # 2
"""web of trust, certificates can allow publisher set to grow"""
class EncMode(_EncMode, metaclass=_EncModeEnumTypeWrapper): ...
NONE: TopicDescriptor.EncOpts.EncMode.ValueType # 0
"""no encryption, anyone can read"""
SHAREDKEY: TopicDescriptor.EncOpts.EncMode.ValueType # 1
"""messages are encrypted with shared key"""
WOT: TopicDescriptor.EncOpts.EncMode.ValueType # 2
"""web of trust, certificates can allow publisher set to grow"""
MODE_FIELD_NUMBER: builtins.int
KEYHASHES_FIELD_NUMBER: builtins.int
mode: global___TopicDescriptor.EncOpts.EncMode.ValueType
@property
def keyHashes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
"""the hashes of the shared keys used (salted)"""
def __init__(
self,
*,
mode: global___TopicDescriptor.EncOpts.EncMode.ValueType | None = ...,
keyHashes: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["mode", b"mode"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["keyHashes", b"keyHashes", "mode", b"mode"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
AUTH_FIELD_NUMBER: builtins.int
ENC_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def auth(self) -> global___TopicDescriptor.AuthOpts: ...
@property
def enc(self) -> global___TopicDescriptor.EncOpts: ...
def __init__(
self,
*,
name: builtins.str | None = ...,
auth: global___TopicDescriptor.AuthOpts | None = ...,
enc: global___TopicDescriptor.EncOpts | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["auth", b"auth", "enc", b"enc", "name", b"name"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["auth", b"auth", "enc", b"enc", "name", b"name"]) -> None: ...
global___TopicDescriptor = TopicDescriptor

View File

@ -15,33 +15,78 @@ from typing import (
cast,
)
from async_service import Service
from async_service import (
Service,
)
import base58
from lru import LRU
from lru import (
LRU,
)
import trio
from libp2p.crypto.keys import PrivateKey
from libp2p.exceptions import ParseError, ValidationError
from libp2p.host.host_interface import IHost
from libp2p.io.exceptions import IncompleteReadError
from libp2p.network.exceptions import SwarmException
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.peer.id import ID
from libp2p.typing import TProtocol
from libp2p.utils import encode_varint_prefixed, read_varint_prefixed_bytes
from libp2p.crypto.keys import (
PrivateKey,
)
from libp2p.exceptions import (
ParseError,
ValidationError,
)
from libp2p.host.host_interface import (
IHost,
)
from libp2p.io.exceptions import (
IncompleteReadError,
)
from libp2p.network.exceptions import (
SwarmException,
)
from libp2p.network.stream.exceptions import (
StreamClosed,
StreamEOF,
StreamReset,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
from libp2p.peer.id import (
ID,
)
from libp2p.typing import (
TProtocol,
)
from libp2p.utils import (
encode_varint_prefixed,
read_varint_prefixed_bytes,
)
from .abc import IPubsub, ISubscriptionAPI
from .pb import rpc_pb2
from .pubsub_notifee import PubsubNotifee
from .subscription import TrioSubscriptionAPI
from .typing import AsyncValidatorFn, SyncValidatorFn, ValidatorFn
from .validators import PUBSUB_SIGNING_PREFIX, signature_validator
from .abc import (
IPubsub,
ISubscriptionAPI,
)
from .pb import (
rpc_pb2,
)
from .pubsub_notifee import (
PubsubNotifee,
)
from .subscription import (
TrioSubscriptionAPI,
)
from .typing import (
AsyncValidatorFn,
SyncValidatorFn,
ValidatorFn,
)
from .validators import (
PUBSUB_SIGNING_PREFIX,
signature_validator,
)
if TYPE_CHECKING:
from .abc import IPubsubRouter # noqa: F401
from typing import Any # noqa: F401
from .abc import IPubsubRouter # noqa: F401
# Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/40e1c94708658b155f30cf99e4574f384756d83c/topic.go#L97 # noqa: E501
SUBSCRIPTION_CHANNEL_SIZE = 32
@ -64,7 +109,6 @@ class TopicValidator(NamedTuple):
class Pubsub(Service, IPubsub):
host: IHost
router: "IPubsubRouter"
@ -186,8 +230,10 @@ class Pubsub(Service, IPubsub):
return self.subscribed_topics_receive.keys()
def get_hello_packet(self) -> rpc_pb2.RPC:
"""Generate subscription message with all topics we are subscribed to
only send hello packet if we have subscribed topics."""
"""
Generate subscription message with all topics we are subscribed to
only send hello packet if we have subscribed topics.
"""
packet = rpc_pb2.RPC()
for topic_id in self.topic_ids:
packet.subscriptions.extend(
@ -254,7 +300,7 @@ class Pubsub(Service, IPubsub):
:param topic: the topic to register validator under
:param validator: the validator used to validate messages published to the topic
:param is_async_validator: indicate if the validator is an asynchronous validator
"""
""" # noqa: E501
self.topic_validators[topic] = TopicValidator(validator, is_async_validator)
def remove_topic_validator(self, topic: str) -> None:
@ -341,9 +387,11 @@ class Pubsub(Service, IPubsub):
logger.debug("removed dead peer %s", peer_id)
async def handle_peer_queue(self) -> None:
"""Continuously read from peer queue and each time a new peer is found,
"""
Continuously read from peer queue and each time a new peer is found,
open a stream to the peer using a supported pubsub protocol pubsub
protocols we support."""
protocols we support.
"""
async with self.peer_receive_channel:
self.event_handle_peer_queue_started.set()
async for peer_id in self.peer_receive_channel:
@ -351,9 +399,10 @@ class Pubsub(Service, IPubsub):
self.manager.run_task(self._handle_new_peer, peer_id)
async def handle_dead_peer_queue(self) -> None:
"""Continuously read from dead peer channel and close the stream
between that peer and remove peer info from pubsub and pubsub
router."""
"""
Continuously read from dead peer channel and close the stream
between that peer and remove peer info from pubsub and pubsub router.
"""
async with self.dead_peer_receive_channel:
self.event_handle_dead_peer_queue_started.set()
async for peer_id in self.dead_peer_receive_channel:
@ -373,7 +422,7 @@ class Pubsub(Service, IPubsub):
"""
if sub_message.subscribe:
if sub_message.topicid not in self.peer_topics:
self.peer_topics[sub_message.topicid] = set([origin_id])
self.peer_topics[sub_message.topicid] = {origin_id}
elif origin_id not in self.peer_topics[sub_message.topicid]:
# Add peer to topic
self.peer_topics[sub_message.topicid].add(origin_id)
@ -388,7 +437,6 @@ class Pubsub(Service, IPubsub):
:param publish_message: RPC.Message format
"""
# Check if this message has any topics that we are subscribed to
for topic in publish_message.topicIDs:
if topic in self.topic_ids:
@ -409,7 +457,6 @@ class Pubsub(Service, IPubsub):
:param topic_id: topic_id to subscribe to
"""
logger.debug("subscribing to topic %s", topic_id)
# Already subscribed
@ -448,7 +495,6 @@ class Pubsub(Service, IPubsub):
:param topic_id: topic_id to unsubscribe from
"""
logger.debug("unsubscribing from topic %s", topic_id)
# Return if we already unsubscribed from the topic
@ -479,7 +525,6 @@ class Pubsub(Service, IPubsub):
:param raw_msg: raw contents of the message to broadcast
"""
# Broadcast message
for stream in self.peers.values():
# Write message to stream
@ -571,7 +616,7 @@ class Pubsub(Service, IPubsub):
# TODO: Check if the `from` is in the blacklist. If yes, reject.
# If the message is processed before, return(i.e., don't further process the message).
# If the message is processed before, return(i.e., don't further process the message) # noqa: E501
if self._is_msg_seen(msg):
return
@ -588,7 +633,7 @@ class Pubsub(Service, IPubsub):
await self.validate_msg(msg_forwarder, msg)
except ValidationError:
logger.debug(
"Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s",
"Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s", # noqa: E501
msg_forwarder,
msg.data.hex(),
msg.topicIDs,
@ -612,8 +657,8 @@ class Pubsub(Service, IPubsub):
def _mark_msg_seen(self, msg: rpc_pb2.Message) -> None:
msg_id = self._msg_id_constructor(msg)
# FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there is a
# more appropriate way.
# FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there
# is a more appropriate way.
self.seen_messages[msg_id] = 1
def _is_subscribed_to_msg(self, msg: rpc_pb2.Message) -> bool:

View File

@ -1,19 +1,30 @@
from typing import TYPE_CHECKING
from typing import (
TYPE_CHECKING,
)
from multiaddr import Multiaddr
from multiaddr import (
Multiaddr,
)
import trio
from libp2p.network.connection.net_connection_interface import INetConn
from libp2p.network.network_interface import INetwork
from libp2p.network.notifee_interface import INotifee
from libp2p.network.stream.net_stream_interface import INetStream
from libp2p.network.connection.net_connection_interface import (
INetConn,
)
from libp2p.network.network_interface import (
INetwork,
)
from libp2p.network.notifee_interface import (
INotifee,
)
from libp2p.network.stream.net_stream_interface import (
INetStream,
)
if TYPE_CHECKING:
from libp2p.peer.id import ID # noqa: F401
class PubsubNotifee(INotifee):
initiator_peers_queue: "trio.MemorySendChannel[ID]"
dead_peers_queue: "trio.MemorySendChannel[ID]"

View File

@ -1,11 +1,23 @@
from types import TracebackType
from typing import AsyncIterator, Optional, Type
from types import (
TracebackType,
)
from typing import (
AsyncIterator,
Optional,
Type,
)
import trio
from .abc import ISubscriptionAPI
from .pb import rpc_pb2
from .typing import UnsubscribeFn
from .abc import (
ISubscriptionAPI,
)
from .pb import (
rpc_pb2,
)
from .typing import (
UnsubscribeFn,
)
class BaseSubscriptionAPI(ISubscriptionAPI):
@ -32,11 +44,11 @@ class TrioSubscriptionAPI(BaseSubscriptionAPI):
unsubscribe_fn: UnsubscribeFn,
) -> None:
self.receive_channel = receive_channel
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # noqa: E501
self.unsubscribe_fn = unsubscribe_fn # type: ignore
async def unsubscribe(self) -> None:
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427 # noqa: E501
await self.unsubscribe_fn() # type: ignore
def __aiter__(self) -> AsyncIterator[rpc_pb2.Message]:

View File

@ -1,8 +1,16 @@
from typing import Awaitable, Callable, Union
from typing import (
Awaitable,
Callable,
Union,
)
from libp2p.peer.id import ID
from libp2p.peer.id import (
ID,
)
from .pb import rpc_pb2
from .pb import (
rpc_pb2,
)
SyncValidatorFn = Callable[[ID, rpc_pb2.Message], bool]
AsyncValidatorFn = Callable[[ID, rpc_pb2.Message], Awaitable[bool]]

View File

@ -1,9 +1,15 @@
import logging
from libp2p.crypto.serialization import deserialize_public_key
from libp2p.peer.id import ID
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from libp2p.peer.id import (
ID,
)
from .pb import rpc_pb2
from .pb import (
rpc_pb2,
)
logger = logging.getLogger("libp2p.pubsub")

View File

@ -1,8 +1,17 @@
from abc import ABC, abstractmethod
from typing import Iterable
from abc import (
ABC,
abstractmethod,
)
from typing import (
Iterable,
)
from libp2p.peer.id import ID
from libp2p.peer.peerinfo import PeerInfo
from libp2p.peer.id import (
ID,
)
from libp2p.peer.peerinfo import (
PeerInfo,
)
class IContentRouting(ABC):
@ -18,12 +27,16 @@ class IContentRouting(ABC):
@abstractmethod
def find_provider_iter(self, cid: bytes, count: int) -> Iterable[PeerInfo]:
"""Search for peers who are able to provide a given key returns an
iterator of peer.PeerInfo."""
"""
Search for peers who are able to provide a given key returns an
iterator of peer.PeerInfo.
"""
class IPeerRouting(ABC):
@abstractmethod
async def find_peer(self, peer_id: ID) -> PeerInfo:
"""Find specific Peer FindPeer searches for a peer with given peer_id,
returns a peer.PeerInfo with relevant addresses."""
"""
Find specific Peer FindPeer searches for a peer with given peer_id,
returns a peer.PeerInfo with relevant addresses.
"""

View File

@ -1,13 +1,24 @@
from typing import Optional
from typing import (
Optional,
)
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
class BaseSession(ISecureConn):
"""``BaseSession`` is not fully instantiated from its abstract classes as
it is only meant to be used in clases that derive from it."""
"""
``BaseSession`` is not fully instantiated from its abstract classes as
it is only meant to be used in clases that derive from it.
"""
local_peer: ID
local_private_key: PrivateKey

View File

@ -1,9 +1,17 @@
import secrets
from typing import Callable
from typing import (
Callable,
)
from libp2p.crypto.keys import KeyPair
from libp2p.peer.id import ID
from libp2p.security.secure_transport_interface import ISecureTransport
from libp2p.crypto.keys import (
KeyPair,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.secure_transport_interface import (
ISecureTransport,
)
def default_secure_bytes_provider(n: int) -> bytes:

View File

@ -1,4 +1,6 @@
from libp2p.exceptions import BaseLibp2pError
from libp2p.exceptions import (
BaseLibp2pError,
)
class HandshakeFailure(BaseLibp2pError):

View File

@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: libp2p/security/insecure/pb/plaintext.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@ -16,64 +15,13 @@ _sym_db = _symbol_database.Default()
from libp2p.crypto.pb import crypto_pb2 as libp2p_dot_crypto_dot_pb_dot_crypto__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='libp2p/security/insecure/pb/plaintext.proto',
package='plaintext.pb',
syntax='proto2',
serialized_pb=_b('\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto\"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey')
,
dependencies=[libp2p_dot_crypto_dot_pb_dot_crypto__pb2.DESCRIPTOR,])
_EXCHANGE = _descriptor.Descriptor(
name='Exchange',
full_name='plaintext.pb.Exchange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='plaintext.pb.Exchange.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pubkey', full_name='plaintext.pb.Exchange.pubkey', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=92,
serialized_end=152,
)
_EXCHANGE.fields_by_name['pubkey'].message_type = libp2p_dot_crypto_dot_pb_dot_crypto__pb2._PUBLICKEY
DESCRIPTOR.message_types_by_name['Exchange'] = _EXCHANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Exchange = _reflection.GeneratedProtocolMessageType('Exchange', (_message.Message,), dict(
DESCRIPTOR = _EXCHANGE,
__module__ = 'libp2p.security.insecure.pb.plaintext_pb2'
# @@protoc_insertion_point(class_scope:plaintext.pb.Exchange)
))
_sym_db.RegisterMessage(Exchange)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto\"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.insecure.pb.plaintext_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals['_EXCHANGE']._serialized_start=92
_globals['_EXCHANGE']._serialized_end=152
# @@protoc_insertion_point(module_scope)

View File

@ -1,45 +1,36 @@
# @generated by generate_proto_mypy_stubs.py. Do not edit!
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import libp2p.crypto.pb.crypto_pb2
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
from libp2p.crypto.pb.crypto_pb2 import (
PublicKey as libp2p___crypto___pb___crypto_pb2___PublicKey,
)
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
from typing import (
Optional as typing___Optional,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
class Exchange(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
id = ... # type: bytes
@typing_extensions.final
class Exchange(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ID_FIELD_NUMBER: builtins.int
PUBKEY_FIELD_NUMBER: builtins.int
id: builtins.bytes
@property
def pubkey(self) -> libp2p___crypto___pb___crypto_pb2___PublicKey: ...
def __init__(self,
def pubkey(self) -> libp2p.crypto.pb.crypto_pb2.PublicKey: ...
def __init__(
self,
*,
id : typing___Optional[bytes] = None,
pubkey : typing___Optional[libp2p___crypto___pb___crypto_pb2___PublicKey] = None,
) -> None: ...
@classmethod
def FromString(cls, s: bytes) -> Exchange: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
if sys.version_info >= (3,):
def HasField(self, field_name: typing_extensions___Literal[u"id",u"pubkey"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"id",u"pubkey"]) -> None: ...
else:
def HasField(self, field_name: typing_extensions___Literal[u"id",b"id",u"pubkey",b"pubkey"]) -> bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"id",b"id",u"pubkey",b"pubkey"]) -> None: ...
id: builtins.bytes | None = ...,
pubkey: libp2p.crypto.pb.crypto_pb2.PublicKey | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["id", b"id", "pubkey", b"pubkey"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "pubkey", b"pubkey"]) -> None: ...
global___Exchange = Exchange

View File

@ -1,21 +1,52 @@
from libp2p.crypto.exceptions import MissingDeserializerError
from libp2p.crypto.keys import PrivateKey, PublicKey
from libp2p.crypto.pb import crypto_pb2
from libp2p.crypto.serialization import deserialize_public_key
from libp2p.io.abc import ReadWriteCloser
from libp2p.io.msgio import VarIntLengthMsgReadWriter
from libp2p.network.connection.exceptions import RawConnError
from libp2p.network.connection.raw_connection_interface import IRawConnection
from libp2p.peer.id import ID
from libp2p.security.base_session import BaseSession
from libp2p.security.base_transport import BaseSecureTransport
from libp2p.security.exceptions import HandshakeFailure
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.typing import TProtocol
from libp2p.crypto.exceptions import (
MissingDeserializerError,
)
from libp2p.crypto.keys import (
PrivateKey,
PublicKey,
)
from libp2p.crypto.pb import (
crypto_pb2,
)
from libp2p.crypto.serialization import (
deserialize_public_key,
)
from libp2p.io.abc import (
ReadWriteCloser,
)
from libp2p.io.msgio import (
VarIntLengthMsgReadWriter,
)
from libp2p.network.connection.exceptions import (
RawConnError,
)
from libp2p.network.connection.raw_connection_interface import (
IRawConnection,
)
from libp2p.peer.id import (
ID,
)
from libp2p.security.base_session import (
BaseSession,
)
from libp2p.security.base_transport import (
BaseSecureTransport,
)
from libp2p.security.exceptions import (
HandshakeFailure,
)
from libp2p.security.secure_conn_interface import (
ISecureConn,
)
from libp2p.typing import (
TProtocol,
)
from .pb import plaintext_pb2
from .pb import (
plaintext_pb2,
)
# Reference: https://github.com/libp2p/go-libp2p-core/blob/master/sec/insecure/insecure.go
# Reference: https://github.com/libp2p/go-libp2p-core/blob/master/sec/insecure/insecure.go # noqa: E501
PLAINTEXT_PROTOCOL_ID = TProtocol("/plaintext/2.0.0")
@ -120,9 +151,10 @@ async def run_handshake(
class InsecureTransport(BaseSecureTransport):
"""``InsecureTransport`` provides the "identity" upgrader for a
``IRawConnection``, i.e. the upgraded transport does not add any additional
security."""
"""
Provides the "identity" upgrader for a ``IRawConnection``, i.e. the upgraded
transport does not add any additional security.
"""
async def secure_inbound(self, conn: IRawConnection) -> ISecureConn:
"""

Some files were not shown because too many files have changed in this diff Show More