mirror of
https://github.com/varun-r-mallya/py-libp2p.git
synced 2025-12-31 20:36:24 +00:00
move pubsub testing tools into tests/utils
This commit is contained in:
0
tests/utils/pubsub/__init__.py
Normal file
0
tests/utils/pubsub/__init__.py
Normal file
147
tests/utils/pubsub/dummy_account_node.py
Normal file
147
tests/utils/pubsub/dummy_account_node.py
Normal file
@ -0,0 +1,147 @@
|
||||
from collections.abc import (
|
||||
AsyncIterator,
|
||||
)
|
||||
from contextlib import (
|
||||
AsyncExitStack,
|
||||
asynccontextmanager,
|
||||
)
|
||||
|
||||
from libp2p.abc import (
|
||||
IHost,
|
||||
)
|
||||
from libp2p.pubsub.pubsub import (
|
||||
Pubsub,
|
||||
)
|
||||
from libp2p.tools.async_service import (
|
||||
Service,
|
||||
background_trio_service,
|
||||
)
|
||||
from tests.utils.factories import (
|
||||
PubsubFactory,
|
||||
)
|
||||
|
||||
CRYPTO_TOPIC = "ethereum"
|
||||
|
||||
# Message format:
|
||||
# Sending crypto: <source>,<dest>,<amount as integer>
|
||||
# Ex. send,aspyn,alex,5
|
||||
# Set crypto: <dest>,<amount as integer>
|
||||
# Ex. set,rob,5
|
||||
# Determine message type by looking at first item before first comma
|
||||
|
||||
|
||||
class DummyAccountNode(Service):
|
||||
"""
|
||||
Node which has an internal balance mapping, meant to serve as a dummy
|
||||
crypto blockchain.
|
||||
|
||||
There is no actual blockchain, just a simple map indicating how much
|
||||
crypto each user in the mappings holds
|
||||
"""
|
||||
|
||||
pubsub: Pubsub
|
||||
|
||||
def __init__(self, pubsub: Pubsub) -> None:
|
||||
self.pubsub = pubsub
|
||||
self.balances: dict[str, int] = {}
|
||||
|
||||
@property
|
||||
def host(self) -> IHost:
|
||||
return self.pubsub.host
|
||||
|
||||
async def run(self) -> None:
|
||||
self.subscription = await self.pubsub.subscribe(CRYPTO_TOPIC)
|
||||
self.manager.run_daemon_task(self.handle_incoming_msgs)
|
||||
await self.manager.wait_finished()
|
||||
|
||||
@classmethod
|
||||
@asynccontextmanager
|
||||
async def create(cls, number: int) -> AsyncIterator[tuple["DummyAccountNode", ...]]:
|
||||
"""
|
||||
Create a new DummyAccountNode and attach a libp2p node, a floodsub, and
|
||||
a pubsub instance to this new node.
|
||||
|
||||
We use create as this serves as a factory function and allows us
|
||||
to use async await, unlike the init function
|
||||
"""
|
||||
async with PubsubFactory.create_batch_with_floodsub(number) as pubsubs:
|
||||
async with AsyncExitStack() as stack:
|
||||
dummy_acount_nodes = tuple(cls(pubsub) for pubsub in pubsubs)
|
||||
for node in dummy_acount_nodes:
|
||||
await stack.enter_async_context(background_trio_service(node))
|
||||
yield dummy_acount_nodes
|
||||
|
||||
async def handle_incoming_msgs(self) -> None:
|
||||
"""Handle all incoming messages on the CRYPTO_TOPIC from peers."""
|
||||
while True:
|
||||
incoming = await self.subscription.get()
|
||||
msg_comps = incoming.data.decode("utf-8").split(",")
|
||||
|
||||
if msg_comps[0] == "send":
|
||||
self.handle_send_crypto(msg_comps[1], msg_comps[2], int(msg_comps[3]))
|
||||
elif msg_comps[0] == "set":
|
||||
self.handle_set_crypto(msg_comps[1], int(msg_comps[2]))
|
||||
|
||||
async def publish_send_crypto(
|
||||
self, source_user: str, dest_user: str, amount: int
|
||||
) -> None:
|
||||
"""
|
||||
Create a send crypto message and publish that message to all other
|
||||
nodes.
|
||||
|
||||
:param source_user: user to send crypto from
|
||||
:param dest_user: user to send crypto to
|
||||
:param amount: amount of crypto to send
|
||||
"""
|
||||
msg_contents = f"send,{source_user},{dest_user},{amount!s}"
|
||||
await self.pubsub.publish(CRYPTO_TOPIC, msg_contents.encode())
|
||||
|
||||
async def publish_set_crypto(self, user: str, amount: int) -> None:
|
||||
"""
|
||||
Create a set crypto message and publish that message to all other
|
||||
nodes.
|
||||
|
||||
:param user: user to set crypto for
|
||||
:param amount: amount of crypto
|
||||
"""
|
||||
msg_contents = f"set,{user},{amount!s}"
|
||||
await self.pubsub.publish(CRYPTO_TOPIC, msg_contents.encode())
|
||||
|
||||
def handle_send_crypto(self, source_user: str, dest_user: str, amount: int) -> None:
|
||||
"""
|
||||
Handle incoming send_crypto message.
|
||||
|
||||
:param source_user: user to send crypto from
|
||||
:param dest_user: user to send crypto to
|
||||
:param amount: amount of crypto to send
|
||||
"""
|
||||
if source_user in self.balances:
|
||||
self.balances[source_user] -= amount
|
||||
else:
|
||||
self.balances[source_user] = -amount
|
||||
|
||||
if dest_user in self.balances:
|
||||
self.balances[dest_user] += amount
|
||||
else:
|
||||
self.balances[dest_user] = amount
|
||||
|
||||
def handle_set_crypto(self, dest_user: str, amount: int) -> None:
|
||||
"""
|
||||
Handle incoming set_crypto message.
|
||||
|
||||
:param dest_user: user to set crypto for
|
||||
:param amount: amount of crypto
|
||||
"""
|
||||
self.balances[dest_user] = amount
|
||||
|
||||
def get_balance(self, user: str) -> int:
|
||||
"""
|
||||
Get balance in crypto for a particular user.
|
||||
|
||||
:param user: user to get balance for
|
||||
:return: balance of user
|
||||
"""
|
||||
if user in self.balances:
|
||||
return self.balances[user]
|
||||
else:
|
||||
return -1
|
||||
264
tests/utils/pubsub/floodsub_integration_test_settings.py
Normal file
264
tests/utils/pubsub/floodsub_integration_test_settings.py
Normal file
@ -0,0 +1,264 @@
|
||||
# type: ignore
|
||||
# To add typing to this module, it's better to do it after refactoring test cases
|
||||
# into classes
|
||||
|
||||
import pytest
|
||||
import trio
|
||||
|
||||
from libp2p.tools.constants import (
|
||||
FLOODSUB_PROTOCOL_ID,
|
||||
)
|
||||
from libp2p.tools.utils import (
|
||||
connect,
|
||||
)
|
||||
|
||||
SUPPORTED_PROTOCOLS = [FLOODSUB_PROTOCOL_ID]
|
||||
|
||||
FLOODSUB_PROTOCOL_TEST_CASES = [
|
||||
{
|
||||
"name": "simple_two_nodes",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["A", "B"],
|
||||
"adj_list": {"A": ["B"]},
|
||||
"topic_map": {"topic1": ["B"]},
|
||||
"messages": [{"topics": ["topic1"], "data": b"foo", "node_id": "A"}],
|
||||
},
|
||||
{
|
||||
"name": "three_nodes_two_topics",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["A", "B", "C"],
|
||||
"adj_list": {"A": ["B"], "B": ["C"]},
|
||||
"topic_map": {"topic1": ["B", "C"], "topic2": ["B", "C"]},
|
||||
"messages": [
|
||||
{"topics": ["topic1"], "data": b"foo", "node_id": "A"},
|
||||
{"topics": ["topic2"], "data": b"Alex is tall", "node_id": "A"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "two_nodes_one_topic_single_subscriber_is_sender",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["A", "B"],
|
||||
"adj_list": {"A": ["B"]},
|
||||
"topic_map": {"topic1": ["B"]},
|
||||
"messages": [{"topics": ["topic1"], "data": b"Alex is tall", "node_id": "B"}],
|
||||
},
|
||||
{
|
||||
"name": "two_nodes_one_topic_two_msgs",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["A", "B"],
|
||||
"adj_list": {"A": ["B"]},
|
||||
"topic_map": {"topic1": ["B"]},
|
||||
"messages": [
|
||||
{"topics": ["topic1"], "data": b"Alex is tall", "node_id": "B"},
|
||||
{"topics": ["topic1"], "data": b"foo", "node_id": "A"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "seven_nodes_tree_one_topics",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["1", "2", "3", "4", "5", "6", "7"],
|
||||
"adj_list": {"1": ["2", "3"], "2": ["4", "5"], "3": ["6", "7"]},
|
||||
"topic_map": {"astrophysics": ["2", "3", "4", "5", "6", "7"]},
|
||||
"messages": [{"topics": ["astrophysics"], "data": b"e=mc^2", "node_id": "1"}],
|
||||
},
|
||||
{
|
||||
"name": "seven_nodes_tree_three_topics",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["1", "2", "3", "4", "5", "6", "7"],
|
||||
"adj_list": {"1": ["2", "3"], "2": ["4", "5"], "3": ["6", "7"]},
|
||||
"topic_map": {
|
||||
"astrophysics": ["2", "3", "4", "5", "6", "7"],
|
||||
"space": ["2", "3", "4", "5", "6", "7"],
|
||||
"onions": ["2", "3", "4", "5", "6", "7"],
|
||||
},
|
||||
"messages": [
|
||||
{"topics": ["astrophysics"], "data": b"e=mc^2", "node_id": "1"},
|
||||
{"topics": ["space"], "data": b"foobar", "node_id": "1"},
|
||||
{"topics": ["onions"], "data": b"I am allergic", "node_id": "1"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "seven_nodes_tree_three_topics_diff_origin",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["1", "2", "3", "4", "5", "6", "7"],
|
||||
"adj_list": {"1": ["2", "3"], "2": ["4", "5"], "3": ["6", "7"]},
|
||||
"topic_map": {
|
||||
"astrophysics": ["1", "2", "3", "4", "5", "6", "7"],
|
||||
"space": ["1", "2", "3", "4", "5", "6", "7"],
|
||||
"onions": ["1", "2", "3", "4", "5", "6", "7"],
|
||||
},
|
||||
"messages": [
|
||||
{"topics": ["astrophysics"], "data": b"e=mc^2", "node_id": "1"},
|
||||
{"topics": ["space"], "data": b"foobar", "node_id": "4"},
|
||||
{"topics": ["onions"], "data": b"I am allergic", "node_id": "7"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "three_nodes_clique_two_topic_diff_origin",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["1", "2", "3"],
|
||||
"adj_list": {"1": ["2", "3"], "2": ["3"]},
|
||||
"topic_map": {"astrophysics": ["1", "2", "3"], "school": ["1", "2", "3"]},
|
||||
"messages": [
|
||||
{"topics": ["astrophysics"], "data": b"e=mc^2", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic", "node_id": "1"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "four_nodes_clique_two_topic_diff_origin_many_msgs",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["1", "2", "3", "4"],
|
||||
"adj_list": {
|
||||
"1": ["2", "3", "4"],
|
||||
"2": ["1", "3", "4"],
|
||||
"3": ["1", "2", "4"],
|
||||
"4": ["1", "2", "3"],
|
||||
},
|
||||
"topic_map": {
|
||||
"astrophysics": ["1", "2", "3", "4"],
|
||||
"school": ["1", "2", "3", "4"],
|
||||
},
|
||||
"messages": [
|
||||
{"topics": ["astrophysics"], "data": b"e=mc^2", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar2", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic2", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar3", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic3", "node_id": "1"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "five_nodes_ring_two_topic_diff_origin_many_msgs",
|
||||
"supported_protocols": SUPPORTED_PROTOCOLS,
|
||||
"nodes": ["1", "2", "3", "4", "5"],
|
||||
"adj_list": {"1": ["2"], "2": ["3"], "3": ["4"], "4": ["5"], "5": ["1"]},
|
||||
"topic_map": {
|
||||
"astrophysics": ["1", "2", "3", "4", "5"],
|
||||
"school": ["1", "2", "3", "4", "5"],
|
||||
},
|
||||
"messages": [
|
||||
{"topics": ["astrophysics"], "data": b"e=mc^2", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar2", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic2", "node_id": "1"},
|
||||
{"topics": ["school"], "data": b"foobar3", "node_id": "2"},
|
||||
{"topics": ["astrophysics"], "data": b"I am allergic3", "node_id": "1"},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
floodsub_protocol_pytest_params = [
|
||||
pytest.param(test_case, id=test_case["name"])
|
||||
for test_case in FLOODSUB_PROTOCOL_TEST_CASES
|
||||
]
|
||||
|
||||
|
||||
async def perform_test_from_obj(obj, pubsub_factory) -> None:
|
||||
"""
|
||||
Perform pubsub tests from a test object, which is composed as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"supported_protocols": ["supported/protocol/1.0.0",...],
|
||||
"adj_list": {
|
||||
"node1": ["neighbor1_of_node1", "neighbor2_of_node1", ...],
|
||||
"node2": ["neighbor1_of_node2", "neighbor2_of_node2", ...],
|
||||
...
|
||||
},
|
||||
"topic_map": {
|
||||
"topic1": ["node1_subscribed_to_topic1", "node2_subscribed_to_topic1", ...]
|
||||
},
|
||||
"messages": [
|
||||
{
|
||||
"topics": ["topic1_for_message", "topic2_for_message", ...],
|
||||
"data": b"some contents of the message (newlines are not supported)",
|
||||
"node_id": "message sender node id"
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
|
||||
.. note::
|
||||
In adj_list, for any neighbors A and B, only list B as a neighbor of A
|
||||
or B as a neighbor of A once. Do NOT list both A: ["B"] and B:["A"] as the behavior
|
||||
is undefined (even if it may work)
|
||||
""" # noqa: E501
|
||||
# Step 1) Create graph
|
||||
adj_list = obj["adj_list"]
|
||||
node_list = obj["nodes"]
|
||||
node_map = {}
|
||||
pubsub_map = {}
|
||||
|
||||
async with pubsub_factory(
|
||||
number=len(node_list), protocols=obj["supported_protocols"]
|
||||
) as pubsubs:
|
||||
for node_id_str, pubsub in zip(node_list, pubsubs):
|
||||
node_map[node_id_str] = pubsub.host
|
||||
pubsub_map[node_id_str] = pubsub
|
||||
|
||||
# Connect nodes and wait at least for 2 seconds
|
||||
async with trio.open_nursery() as nursery:
|
||||
for start_node_id in adj_list:
|
||||
# For each neighbor of start_node, create if does not yet exist,
|
||||
# then connect start_node to neighbor
|
||||
for neighbor_id in adj_list[start_node_id]:
|
||||
nursery.start_soon(
|
||||
connect, node_map[start_node_id], node_map[neighbor_id]
|
||||
)
|
||||
nursery.start_soon(trio.sleep, 2)
|
||||
|
||||
# Step 2) Subscribe to topics
|
||||
queues_map = {}
|
||||
topic_map = obj["topic_map"]
|
||||
|
||||
async def subscribe_node(node_id, topic):
|
||||
if node_id not in queues_map:
|
||||
queues_map[node_id] = {}
|
||||
# Avoid repeated works
|
||||
if topic in queues_map[node_id]:
|
||||
# Checkpoint
|
||||
await trio.lowlevel.checkpoint()
|
||||
return
|
||||
sub = await pubsub_map[node_id].subscribe(topic)
|
||||
queues_map[node_id][topic] = sub
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for topic, node_ids in topic_map.items():
|
||||
for node_id in node_ids:
|
||||
nursery.start_soon(subscribe_node, node_id, topic)
|
||||
nursery.start_soon(trio.sleep, 2)
|
||||
|
||||
# Step 3) Publish messages
|
||||
topics_in_msgs_ordered = []
|
||||
messages = obj["messages"]
|
||||
|
||||
for msg in messages:
|
||||
topics = msg["topics"]
|
||||
data = msg["data"]
|
||||
node_id = msg["node_id"]
|
||||
|
||||
# Publish message
|
||||
# TODO: Should be single RPC package with several topics
|
||||
for topic in topics:
|
||||
await pubsub_map[node_id].publish(topic, data)
|
||||
|
||||
# For each topic in topics, add (topic, node_id, data) tuple to
|
||||
# ordered test list
|
||||
for topic in topics:
|
||||
topics_in_msgs_ordered.append((topic, node_id, data))
|
||||
# Allow time for publishing before continuing
|
||||
await trio.sleep(1)
|
||||
|
||||
# Step 4) Check that all messages were received correctly.
|
||||
for topic, origin_node_id, data in topics_in_msgs_ordered:
|
||||
# Look at each node in each topic
|
||||
for node_id in topic_map[topic]:
|
||||
# Get message from subscription queue
|
||||
msg = await queues_map[node_id][topic].get()
|
||||
assert data == msg.data
|
||||
# Check the message origin
|
||||
assert node_map[origin_node_id].get_id().to_bytes() == msg.from_id
|
||||
42
tests/utils/pubsub/utils.py
Normal file
42
tests/utils/pubsub/utils.py
Normal file
@ -0,0 +1,42 @@
|
||||
from collections.abc import (
|
||||
Sequence,
|
||||
)
|
||||
|
||||
from libp2p.abc import (
|
||||
IHost,
|
||||
)
|
||||
from libp2p.peer.id import (
|
||||
ID,
|
||||
)
|
||||
from libp2p.pubsub.pb import (
|
||||
rpc_pb2,
|
||||
)
|
||||
from libp2p.tools.utils import (
|
||||
connect,
|
||||
)
|
||||
|
||||
|
||||
def make_pubsub_msg(
|
||||
origin_id: ID, topic_ids: Sequence[str], data: bytes, seqno: bytes
|
||||
) -> rpc_pb2.Message:
|
||||
return rpc_pb2.Message(
|
||||
from_id=origin_id.to_bytes(), seqno=seqno, data=data, topicIDs=list(topic_ids)
|
||||
)
|
||||
|
||||
|
||||
# TODO: Implement sparse connect
|
||||
async def dense_connect(hosts: Sequence[IHost]) -> None:
|
||||
await connect_some(hosts, 10)
|
||||
|
||||
|
||||
# FIXME: `degree` is not used at all
|
||||
async def connect_some(hosts: Sequence[IHost], degree: int) -> None:
|
||||
for i, host in enumerate(hosts):
|
||||
for host2 in hosts[i + 1 :]:
|
||||
await connect(host, host2)
|
||||
|
||||
|
||||
async def one_to_all_connect(hosts: Sequence[IHost], central_host_index: int) -> None:
|
||||
for i, host in enumerate(hosts):
|
||||
if i != central_host_index:
|
||||
await connect(hosts[central_host_index], host)
|
||||
Reference in New Issue
Block a user