Add automatic docstring formatter and apply

This commit is contained in:
Dominik Muhs
2019-10-24 08:41:10 +02:00
parent 30aeb35122
commit eef505f2d9
74 changed files with 565 additions and 760 deletions

View File

@ -8,13 +8,10 @@ log = logging.getLogger(__name__)
class SpiderCrawl:
"""
Crawl the network and look for given 160-bit keys.
"""
"""Crawl the network and look for given 160-bit keys."""
def __init__(self, protocol, node, peers, ksize, alpha):
"""
Create a new C{SpiderCrawl}er.
"""Create a new C{SpiderCrawl}er.
Args:
protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance.
@ -35,8 +32,7 @@ class SpiderCrawl:
self.nearest.push(peers)
async def _find(self, rpcmethod):
"""
Get either a value or list of nodes.
"""Get either a value or list of nodes.
Args:
rpcmethod: The protocol's callfindValue or call_find_node.
@ -75,15 +71,11 @@ class ValueSpiderCrawl(SpiderCrawl):
self.nearest_without_value = KadPeerHeap(self.node, 1)
async def find(self):
"""
Find either the closest nodes or the value requested.
"""
"""Find either the closest nodes or the value requested."""
return await self._find(self.protocol.call_find_value)
async def _nodes_found(self, responses):
"""
Handle the result of an iteration in _find.
"""
"""Handle the result of an iteration in _find."""
toremove = []
found_values = []
for peerid, response in responses.items():
@ -106,11 +98,11 @@ class ValueSpiderCrawl(SpiderCrawl):
return await self.find()
async def _handle_found_values(self, values):
"""
We got some values! Exciting. But let's make sure
they're all the same or freak out a little bit. Also,
make sure we tell the nearest node that *didn't* have
the value to store it.
"""We got some values!
Exciting. But let's make sure they're all the same or freak out
a little bit. Also, make sure we tell the nearest node that
*didn't* have the value to store it.
"""
value_counts = Counter(values)
if len(value_counts) != 1:
@ -127,15 +119,11 @@ class ValueSpiderCrawl(SpiderCrawl):
class NodeSpiderCrawl(SpiderCrawl):
async def find(self):
"""
Find the closest nodes.
"""
"""Find the closest nodes."""
return await self._find(self.protocol.call_find_node)
async def _nodes_found(self, responses):
"""
Handle the result of an iteration in _find.
"""
"""Handle the result of an iteration in _find."""
toremove = []
for peerid, response in responses.items():
response = RPCFindResponse(response)
@ -152,8 +140,7 @@ class NodeSpiderCrawl(SpiderCrawl):
class RPCFindResponse:
def __init__(self, response):
"""
A wrapper for the result of a RPC find.
"""A wrapper for the result of a RPC find.
Args:
response: This will be a tuple of (<response received>, <value>)
@ -163,9 +150,7 @@ class RPCFindResponse:
self.response = response
def happened(self):
"""
Did the other host actually respond?
"""
"""Did the other host actually respond?"""
return self.response[0]
def has_value(self):
@ -175,9 +160,9 @@ class RPCFindResponse:
return self.response[1]["value"]
def get_node_list(self):
"""
Get the node list in the response. If there's no value, this should
be set.
"""Get the node list in the response.
If there's no value, this should be set.
"""
nodelist = self.response[1] or []
return [create_kad_peerinfo(*nodeple) for nodeple in nodelist]