Refactor floodsub.publish

Passed the first test of floodsub
This commit is contained in:
mhchia
2019-07-25 14:08:16 +08:00
parent 93cf5a2c32
commit cae4f34034
5 changed files with 87 additions and 72 deletions

View File

@ -1,10 +1,10 @@
from typing import (
Generator,
Sequence,
Iterable,
)
from libp2p.peer.id import (
ID,
id_b58_decode,
)
from .pb import rpc_pb2
@ -51,7 +51,7 @@ class FloodSub(IPubsubRouter):
:param rpc: rpc message
"""
async def publish(self, from_peer: ID, pubsub_message: rpc_pb2.Message) -> None:
async def publish(self, src: ID, pubsub_msg: rpc_pb2.Message) -> None:
"""
Invoked to forward a new message that has been validated.
This is where the "flooding" part of floodsub happens
@ -62,68 +62,36 @@ class FloodSub(IPubsubRouter):
so that seen messages are not further forwarded.
It also never forwards a message back to the source
or the peer that forwarded the message.
:param sender_peer_id: peer_id of message sender
:param rpc_message: pubsub message in RPC string format
:param src: the peer id of the peer who forwards the message to me.
:param pubsub_msg: pubsub message in protobuf.
"""
peers_gen = self._get_peers_to_send(
pubsub_message.topicIDs,
from_peer_id=from_peer,
src_peer_id=ID(pubsub_message.from_id),
pubsub_msg.topicIDs,
src=src,
origin=ID(pubsub_msg.from_id),
)
rpc_msg = rpc_pb2.RPC(
publish=[pubsub_message],
publish=[pubsub_msg],
)
for peer_id in peers_gen:
stream = self.pubsub.peers[str(peer_id)]
await stream.write(rpc_msg.SerializeToString())
# packet = rpc_pb2.RPC()
# packet.ParseFromString(rpc_message)
# from_peer_str = str(from_peer)
# for topic in pubsub_message.topicIDs:
# if topic not in self.pubsub.topics:
# continue
# peers = self.pubsub.peer_topics[topic]
# # Deliver to self if self was origin
# # Note: handle_talk checks if self is subscribed to topics in message
# for message in packet.publish:
# decoded_from_id = message.from_id.decode('utf-8')
# if msg_sender == decoded_from_id and msg_sender == str(self.pubsub.host.get_id()):
# id_in_seen_msgs = (message.seqno, message.from_id)
# if id_in_seen_msgs not in self.pubsub.seen_messages:
# self.pubsub.seen_messages[id_in_seen_msgs] = 1
# await self.pubsub.handle_talk(message)
# # Deliver to self and peers
# for topic in message.topicIDs:
# if topic in self.pubsub.peer_topics:
# for peer_id_in_topic in self.pubsub.peer_topics[topic]:
# # Forward to all known peers in the topic that are not the
# # message sender and are not the message origin
# if peer_id_in_topic not in (msg_sender, decoded_from_id):
# stream = self.pubsub.peers[peer_id_in_topic]
# # Create new packet with just publish message
# new_packet = rpc_pb2.RPC()
# new_packet.publish.extend([message])
# # Publish the packet
# await stream.write(new_packet.SerializeToString())
def _get_peers_to_send(
self,
topic_ids: Sequence[str],
from_peer_id: ID,
src_peer_id: ID) -> Generator[ID]:
# TODO: should send to self if `src_peer_id` is ourself?
topic_ids: Iterable[str],
src: ID,
origin: ID) -> Iterable[ID]:
"""
:return: the list of protocols supported by the router
"""
for topic in topic_ids:
if topic not in self.pubsub.topics:
if topic not in self.pubsub.peer_topics:
continue
for peer_id in self.pubsub.peer_topics[topic]:
if peer_id in (from_peer_id, src_peer_id):
for peer_id_str in self.pubsub.peer_topics[topic]:
peer_id = id_b58_decode(peer_id_str)
if peer_id in (src, origin):
continue
# FIXME: Should change `self.pubsub.peers` to Dict[PeerID, ...]
if str(peer_id) not in self.pubsub.peers:

View File

@ -45,7 +45,8 @@ class Pubsub:
outgoing_messages: asyncio.Queue()
seen_messages: LRU
my_topics: Dict[str, asyncio.Queue]
peer_topics: Dict[str, List[ID]]
# FIXME: Should be changed to `Dict[str, List[ID]]`
peer_topics: Dict[str, List[str]]
# FIXME: Should be changed to `Dict[ID, INetStream]`
peers: Dict[str, INetStream]
# NOTE: Be sure it is increased atomically everytime.
@ -320,23 +321,34 @@ class Pubsub:
# Write message to stream
await stream.write(rpc_msg)
def list_peers(self, topic_id: str) -> Tuple[ID]:
def list_peers(self, topic_id: str) -> Tuple[ID, ...]:
return
async def publish(self, topic_id: str, data: bytes) -> None:
"""
Publish data to a topic
:param topic_id: topic which we are going to publish the data to
:param data: data which we are publishing
"""
msg = rpc_pb2.Message(
data=data,
topicIDs=[topic_id],
# Origin is myself.
from_id=self.host.get_id().to_bytes(),
seqno=self._next_seqno(),
)
# TODO: Sign with our signing key
self.push_msg(self.host.get_id(), msg)
await self.push_msg(self.host.get_id(), msg)
async def push_msg(self, src: ID, msg: rpc_pb2.Message):
# TODO: - Check if the source is in the blacklist. If yes, reject.
async def push_msg(self, src: ID, msg: rpc_pb2.Message) -> None:
"""
Push a pubsub message to others.
:param src: the peer who forward us the message.
:param msg: the message we are going to push out.
"""
# TODO: - Check if the `source` is in the blacklist. If yes, reject.
# TODO: - Check if the `from` is in the blacklist. If yes, reject.
@ -352,6 +364,9 @@ class Pubsub:
await self.router.publish(src, msg)
def _next_seqno(self) -> bytes:
"""
Make the next message sequence id.
"""
self.counter += 1
return self.counter.to_bytes(8, 'big')

View File

@ -42,11 +42,11 @@ class IPubsubRouter(ABC):
"""
@abstractmethod
def publish(self, sender_peer_id, rpc_message):
async def publish(self, src, pubsub_msg) -> None:
"""
Invoked to forward a new message that has been validated
:param sender_peer_id: peer_id of message sender
:param rpc_message: message to forward
:param src: peer_id of message sender
:param pubsub_msg: pubsub message to forward
"""
@abstractmethod