Refactor floodsub.publish

Passed the first test of floodsub
This commit is contained in:
mhchia
2019-07-25 14:08:16 +08:00
parent 93cf5a2c32
commit cae4f34034
5 changed files with 87 additions and 72 deletions

View File

@ -8,7 +8,12 @@ from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.pubsub.pb import rpc_pb2
from libp2p.pubsub.pubsub import Pubsub
from libp2p.pubsub.floodsub import FloodSub
from utils import message_id_generator, generate_RPC_packet
from .utils import (
make_pubsub_msg,
message_id_generator,
generate_RPC_packet,
)
# pylint: disable=too-many-locals
@ -20,6 +25,7 @@ async def connect(node1, node2):
info = info_from_p2p_addr(addr)
await node1.connect(info)
@pytest.mark.asyncio
async def test_simple_two_nodes():
node_a = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
@ -29,6 +35,8 @@ async def test_simple_two_nodes():
await node_b.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
supported_protocols = ["/floodsub/1.0.0"]
topic = "my_topic"
data = b"some data"
floodsub_a = FloodSub(supported_protocols)
pubsub_a = Pubsub(node_a, floodsub_a, "a")
@ -38,26 +46,30 @@ async def test_simple_two_nodes():
await connect(node_a, node_b)
await asyncio.sleep(0.25)
qb = await pubsub_b.subscribe("my_topic")
sub_b = await pubsub_b.subscribe(topic)
await asyncio.sleep(0.25)
node_a_id = str(node_a.get_id())
next_msg_id_func = message_id_generator(0)
msg = generate_RPC_packet(node_a_id, ["my_topic"], "some data", next_msg_id_func())
await floodsub_a.publish(node_a_id, msg.SerializeToString())
msg = make_pubsub_msg(
origin_id=node_a.get_id(),
topic_ids=[topic],
data=data,
seqno=next_msg_id_func(),
)
await floodsub_a.publish(node_a.get_id(), msg)
await asyncio.sleep(0.25)
res_b = await qb.get()
res_b = await sub_b.get()
# Check that the msg received by node_b is the same
# as the message sent by node_a
assert res_b.SerializeToString() == msg.publish[0].SerializeToString()
assert res_b.SerializeToString() == msg.SerializeToString()
# Success, terminate pending tasks.
await cleanup()
@pytest.mark.asyncio
async def test_lru_cache_two_nodes():
# two nodes with cache_size of 4
@ -100,7 +112,7 @@ async def test_lru_cache_two_nodes():
messages = [first_message]
# for the next 5 messages
for i in range(2, 6):
# write first message
# write first message
await floodsub_a.publish(node_a_id, first_message.SerializeToString())
await asyncio.sleep(0.25)
@ -127,7 +139,7 @@ async def test_lru_cache_two_nodes():
res_b = await qb.get()
assert res_b.SerializeToString() == first_message.publish[0].SerializeToString()
assert qb.empty()
# Success, terminate pending tasks.
await cleanup()
@ -136,7 +148,7 @@ async def perform_test_from_obj(obj):
"""
Perform a floodsub test from a test obj.
test obj are composed as follows:
{
"supported_protocols": ["supported/protocol/1.0.0",...],
"adj_list": {
@ -190,7 +202,7 @@ async def perform_test_from_obj(obj):
if neighbor_id not in node_map:
neighbor_node = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
await neighbor_node.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
node_map[neighbor_id] = neighbor_node
floodsub = FloodSub(supported_protocols)

View File

@ -3,9 +3,14 @@ import multiaddr
import uuid
import random
import struct
from typing import (
Sequence,
)
from libp2p import new_node
from libp2p.pubsub.pb import rpc_pb2
from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.peer.id import ID
from libp2p.pubsub.pubsub import Pubsub
from libp2p.pubsub.gossipsub import GossipSub
@ -29,6 +34,20 @@ def message_id_generator(start_val):
return generator
def make_pubsub_msg(
origin_id: ID,
topic_ids: Sequence[str],
data: bytes,
seqno: bytes) -> rpc_pb2.Message:
return rpc_pb2.Message(
from_id=origin_id.to_bytes(),
seqno=seqno,
data=data,
topicIDs=list(topic_ids),
)
def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
"""
Generate RPC packet to send over wire
@ -42,7 +61,7 @@ def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
from_id=origin_id.encode('utf-8'),
seqno=msg_id,
data=msg_content.encode('utf-8'),
)
)
for topic in topics:
message.topicIDs.extend([topic.encode('utf-8')])
@ -50,6 +69,7 @@ def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
packet.publish.extend([message])
return packet
async def connect(node1, node2):
"""
Connect node1 to node2