update kadmelia lib

This commit is contained in:
Christophe de Carvalho Pereira Martins
2019-01-15 18:41:41 +01:00
committed by Christophe de Carvalho
parent 57077cd3b4
commit c24a279d2d
8 changed files with 233 additions and 235 deletions

View File

@ -1,16 +1,19 @@
from collections import Counter
import logging
from .kademlia.node import Node, NodeHeap
from .kademlia.utils import gather_dict
log = logging.getLogger(__name__)
from .node import Node, NodeHeap
from .utils import gather_dict
log = logging.getLogger(__name__) # pylint: disable=invalid-name
# pylint: disable=too-few-public-methods
class SpiderCrawl:
"""
Crawl the network and look for given 160-bit keys.
"""
def __init__(self, protocol, node, peers, ksize, alpha):
"""
Create a new C{SpiderCrawl}er.
@ -29,7 +32,7 @@ class SpiderCrawl:
self.alpha = alpha
self.node = node
self.nearest = NodeHeap(self.node, self.ksize)
self.lastIDsCrawled = []
self.last_ids_crawled = []
log.info("creating spider with peers: %s", peers)
self.nearest.push(peers)
@ -38,7 +41,7 @@ class SpiderCrawl:
Get either a value or list of nodes.
Args:
rpcmethod: The protocol's callfindValue or callFindNode.
rpcmethod: The protocol's callfindValue or call_find_node.
The process:
1. calls find_* to current ALPHA nearest not already queried nodes,
@ -51,18 +54,18 @@ class SpiderCrawl:
"""
log.info("crawling network with nearest: %s", str(tuple(self.nearest)))
count = self.alpha
if self.nearest.getIDs() == self.lastIDsCrawled:
if self.nearest.get_ids() == self.last_ids_crawled:
count = len(self.nearest)
self.lastIDsCrawled = self.nearest.getIDs()
self.last_ids_crawled = self.nearest.get_ids()
ds = {}
for peer in self.nearest.getUncontacted()[:count]:
ds[peer.id] = rpcmethod(peer, self.node)
self.nearest.markContacted(peer)
found = await gather_dict(ds)
return await self._nodesFound(found)
dicts = {}
for peer in self.nearest.get_uncontacted()[:count]:
dicts[peer.id] = rpcmethod(peer, self.node)
self.nearest.mark_contacted(peer)
found = await gather_dict(dicts)
return await self._nodes_found(found)
async def _nodesFound(self, responses):
async def _nodes_found(self, responses):
raise NotImplementedError
@ -71,55 +74,55 @@ class ValueSpiderCrawl(SpiderCrawl):
SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha)
# keep track of the single nearest node without value - per
# section 2.3 so we can set the key there if found
self.nearestWithoutValue = NodeHeap(self.node, 1)
self.nearest_without_value = NodeHeap(self.node, 1)
async def find(self):
"""
Find either the closest nodes or the value requested.
"""
return await self._find(self.protocol.callFindValue)
return await self._find(self.protocol.call_find_value)
async def _nodesFound(self, responses):
async def _nodes_found(self, responses):
"""
Handle the result of an iteration in _find.
"""
toremove = []
foundValues = []
found_values = []
for peerid, response in responses.items():
response = RPCFindResponse(response)
if not response.happened():
toremove.append(peerid)
elif response.hasValue():
foundValues.append(response.getValue())
elif response.has_value():
found_values.append(response.get_value())
else:
peer = self.nearest.getNodeById(peerid)
self.nearestWithoutValue.push(peer)
self.nearest.push(response.getNodeList())
peer = self.nearest.get_node(peerid)
self.nearest_without_value.push(peer)
self.nearest.push(response.get_node_list())
self.nearest.remove(toremove)
if len(foundValues) > 0:
return await self._handleFoundValues(foundValues)
if self.nearest.allBeenContacted():
if found_values:
return await self._handle_found_values(found_values)
if self.nearest.have_contacted_all():
# not found!
return None
return await self.find()
async def _handleFoundValues(self, values):
async def _handle_found_values(self, values):
"""
We got some values! Exciting. But let's make sure
they're all the same or freak out a little bit. Also,
make sure we tell the nearest node that *didn't* have
the value to store it.
"""
valueCounts = Counter(values)
if len(valueCounts) != 1:
value_counts = Counter(values)
if len(value_counts) != 1:
log.warning("Got multiple values for key %i: %s",
self.node.long_id, str(values))
value = valueCounts.most_common(1)[0][0]
value = value_counts.most_common(1)[0][0]
peerToSaveTo = self.nearestWithoutValue.popleft()
if peerToSaveTo is not None:
await self.protocol.callStore(peerToSaveTo, self.node.id, value)
peer = self.nearest_without_value.popleft()
if peer:
await self.protocol.call_store(peer, self.node.id, value)
return value
@ -128,9 +131,9 @@ class NodeSpiderCrawl(SpiderCrawl):
"""
Find the closest nodes.
"""
return await self._find(self.protocol.callFindNode)
return await self._find(self.protocol.call_find_node)
async def _nodesFound(self, responses):
async def _nodes_found(self, responses):
"""
Handle the result of an iteration in _find.
"""
@ -140,10 +143,10 @@ class NodeSpiderCrawl(SpiderCrawl):
if not response.happened():
toremove.append(peerid)
else:
self.nearest.push(response.getNodeList())
self.nearest.push(response.get_node_list())
self.nearest.remove(toremove)
if self.nearest.allBeenContacted():
if self.nearest.have_contacted_all():
return list(self.nearest)
return await self.find()
@ -166,13 +169,13 @@ class RPCFindResponse:
"""
return self.response[0]
def hasValue(self):
def has_value(self):
return isinstance(self.response[1], dict)
def getValue(self):
def get_value(self):
return self.response[1]['value']
def getNodeList(self):
def get_node_list(self):
"""
Get the node list in the response. If there's no value, this should
be set.