all tests pass
This commit is contained in:
parent
ab8291eada
commit
20dd256608
534
.pylintrc
Normal file
534
.pylintrc
Normal file
@ -0,0 +1,534 @@
|
|||||||
|
[MASTER]
|
||||||
|
|
||||||
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
|
# run arbitrary code
|
||||||
|
extension-pkg-whitelist=
|
||||||
|
|
||||||
|
# Add files or directories to the blacklist. They should be base names, not
|
||||||
|
# paths.
|
||||||
|
ignore=CVS
|
||||||
|
|
||||||
|
# Add files or directories matching the regex patterns to the blacklist. The
|
||||||
|
# regex matches against base names, not paths.
|
||||||
|
ignore-patterns=
|
||||||
|
|
||||||
|
# Python code to execute, usually for sys.path manipulation such as
|
||||||
|
# pygtk.require().
|
||||||
|
#init-hook=
|
||||||
|
|
||||||
|
# Use multiple processes to speed up Pylint.
|
||||||
|
jobs=1
|
||||||
|
|
||||||
|
# List of plugins (as comma separated values of python modules names) to load,
|
||||||
|
# usually to register additional checkers.
|
||||||
|
load-plugins=
|
||||||
|
|
||||||
|
# Pickle collected data for later comparisons.
|
||||||
|
persistent=yes
|
||||||
|
|
||||||
|
# Specify a configuration file.
|
||||||
|
rcfile=.pylintrc
|
||||||
|
|
||||||
|
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||||
|
# user-friendly hints instead of false-positive error messages
|
||||||
|
suggestion-mode=yes
|
||||||
|
|
||||||
|
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||||
|
# active Python interpreter and may run arbitrary code.
|
||||||
|
unsafe-load-any-extension=no
|
||||||
|
|
||||||
|
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||||
|
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||||
|
confidence=
|
||||||
|
|
||||||
|
# Disable the message, report, category or checker with the given id(s). You
|
||||||
|
# can either give multiple identifiers separated by comma (,) or put this
|
||||||
|
# option multiple times (only on the command line, not in the configuration
|
||||||
|
# file where it should appear only once).You can also use "--disable=all" to
|
||||||
|
# disable everything first and then reenable specific checks. For example, if
|
||||||
|
# you want to run only the similarities checker, you can use "--disable=all
|
||||||
|
# --enable=similarities". If you want to run only the classes checker, but have
|
||||||
|
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||||
|
# --disable=W"
|
||||||
|
disable=print-statement,
|
||||||
|
parameter-unpacking,
|
||||||
|
unpacking-in-except,
|
||||||
|
old-raise-syntax,
|
||||||
|
backtick,
|
||||||
|
long-suffix,
|
||||||
|
old-ne-operator,
|
||||||
|
old-octal-literal,
|
||||||
|
import-star-module-level,
|
||||||
|
non-ascii-bytes-literal,
|
||||||
|
raw-checker-failed,
|
||||||
|
bad-inline-option,
|
||||||
|
locally-disabled,
|
||||||
|
locally-enabled,
|
||||||
|
file-ignored,
|
||||||
|
suppressed-message,
|
||||||
|
useless-suppression,
|
||||||
|
deprecated-pragma,
|
||||||
|
apply-builtin,
|
||||||
|
basestring-builtin,
|
||||||
|
buffer-builtin,
|
||||||
|
cmp-builtin,
|
||||||
|
coerce-builtin,
|
||||||
|
execfile-builtin,
|
||||||
|
file-builtin,
|
||||||
|
long-builtin,
|
||||||
|
raw_input-builtin,
|
||||||
|
reduce-builtin,
|
||||||
|
standarderror-builtin,
|
||||||
|
unicode-builtin,
|
||||||
|
xrange-builtin,
|
||||||
|
coerce-method,
|
||||||
|
delslice-method,
|
||||||
|
getslice-method,
|
||||||
|
setslice-method,
|
||||||
|
no-absolute-import,
|
||||||
|
old-division,
|
||||||
|
dict-iter-method,
|
||||||
|
dict-view-method,
|
||||||
|
next-method-called,
|
||||||
|
metaclass-assignment,
|
||||||
|
indexing-exception,
|
||||||
|
raising-string,
|
||||||
|
reload-builtin,
|
||||||
|
oct-method,
|
||||||
|
hex-method,
|
||||||
|
nonzero-method,
|
||||||
|
cmp-method,
|
||||||
|
input-builtin,
|
||||||
|
round-builtin,
|
||||||
|
intern-builtin,
|
||||||
|
unichr-builtin,
|
||||||
|
map-builtin-not-iterating,
|
||||||
|
zip-builtin-not-iterating,
|
||||||
|
range-builtin-not-iterating,
|
||||||
|
filter-builtin-not-iterating,
|
||||||
|
using-cmp-argument,
|
||||||
|
eq-without-hash,
|
||||||
|
div-method,
|
||||||
|
idiv-method,
|
||||||
|
rdiv-method,
|
||||||
|
exception-message-attribute,
|
||||||
|
invalid-str-codec,
|
||||||
|
sys-max-int,
|
||||||
|
bad-python3-import,
|
||||||
|
deprecated-string-function,
|
||||||
|
deprecated-str-translate-call,
|
||||||
|
deprecated-itertools-function,
|
||||||
|
deprecated-types-field,
|
||||||
|
next-method-defined,
|
||||||
|
dict-items-not-iterating,
|
||||||
|
dict-keys-not-iterating,
|
||||||
|
dict-values-not-iterating,
|
||||||
|
missing-docstring
|
||||||
|
|
||||||
|
# Enable the message, report, category or checker with the given id(s). You can
|
||||||
|
# either give multiple identifier separated by comma (,) or put this option
|
||||||
|
# multiple time (only on the command line, not in the configuration file where
|
||||||
|
# it should appear only once). See also the "--disable" option for examples.
|
||||||
|
enable=c-extension-no-member
|
||||||
|
|
||||||
|
|
||||||
|
[REPORTS]
|
||||||
|
|
||||||
|
# Python expression which should return a note less than 10 (10 is the highest
|
||||||
|
# note). You have access to the variables errors warning, statement which
|
||||||
|
# respectively contain the number of errors / warnings messages and the total
|
||||||
|
# number of statements analyzed. This is used by the global evaluation report
|
||||||
|
# (RP0004).
|
||||||
|
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||||
|
|
||||||
|
# Template used to display messages. This is a python new-style format string
|
||||||
|
# used to format the message information. See doc for all details
|
||||||
|
#msg-template=
|
||||||
|
|
||||||
|
# Set the output format. Available formats are text, parseable, colorized, json
|
||||||
|
# and msvs (visual studio).You can also give a reporter class, eg
|
||||||
|
# mypackage.mymodule.MyReporterClass.
|
||||||
|
output-format=text
|
||||||
|
|
||||||
|
# Tells whether to display a full report or only the messages
|
||||||
|
reports=no
|
||||||
|
|
||||||
|
# Activate the evaluation score.
|
||||||
|
score=no
|
||||||
|
|
||||||
|
|
||||||
|
[REFACTORING]
|
||||||
|
|
||||||
|
# Maximum number of nested blocks for function / method body
|
||||||
|
max-nested-blocks=5
|
||||||
|
|
||||||
|
|
||||||
|
[LOGGING]
|
||||||
|
|
||||||
|
# Logging modules to check that the string format arguments are in logging
|
||||||
|
# function parameter format
|
||||||
|
logging-modules=logging
|
||||||
|
|
||||||
|
|
||||||
|
[SPELLING]
|
||||||
|
|
||||||
|
# Limits count of emitted suggestions for spelling mistakes
|
||||||
|
max-spelling-suggestions=4
|
||||||
|
|
||||||
|
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||||
|
# install python-enchant package.
|
||||||
|
spelling-dict=
|
||||||
|
|
||||||
|
# List of comma separated words that should not be checked.
|
||||||
|
spelling-ignore-words=
|
||||||
|
|
||||||
|
# A path to a file that contains private dictionary; one word per line.
|
||||||
|
spelling-private-dict-file=
|
||||||
|
|
||||||
|
# Tells whether to store unknown words to indicated private dictionary in
|
||||||
|
# --spelling-private-dict-file option instead of raising a message.
|
||||||
|
spelling-store-unknown-words=no
|
||||||
|
|
||||||
|
|
||||||
|
[MISCELLANEOUS]
|
||||||
|
|
||||||
|
# List of note tags to take in consideration, separated by a comma.
|
||||||
|
notes=FIXME,
|
||||||
|
XXX,
|
||||||
|
TODO
|
||||||
|
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
|
||||||
|
# List of decorators that produce context managers, such as
|
||||||
|
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||||
|
# produce valid context managers.
|
||||||
|
contextmanager-decorators=contextlib.contextmanager
|
||||||
|
|
||||||
|
# List of members which are set dynamically and missed by pylint inference
|
||||||
|
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||||
|
# expressions are accepted.
|
||||||
|
generated-members=
|
||||||
|
|
||||||
|
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||||
|
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||||
|
ignore-mixin-members=yes
|
||||||
|
|
||||||
|
# This flag controls whether pylint should warn about no-member and similar
|
||||||
|
# checks whenever an opaque object is returned when inferring. The inference
|
||||||
|
# can return multiple potential results while evaluating a Python object, but
|
||||||
|
# some branches might not be evaluated, which results in partial inference. In
|
||||||
|
# that case, it might be useful to still emit no-member and other checks for
|
||||||
|
# the rest of the inferred objects.
|
||||||
|
ignore-on-opaque-inference=yes
|
||||||
|
|
||||||
|
# List of class names for which member attributes should not be checked (useful
|
||||||
|
# for classes with dynamically set attributes). This supports the use of
|
||||||
|
# qualified names.
|
||||||
|
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||||
|
|
||||||
|
# List of module names for which member attributes should not be checked
|
||||||
|
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||||
|
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||||
|
# supports qualified module names, as well as Unix pattern matching.
|
||||||
|
ignored-modules=
|
||||||
|
|
||||||
|
# Show a hint with possible names when a member name was not found. The aspect
|
||||||
|
# of finding the hint is based on edit distance.
|
||||||
|
missing-member-hint=yes
|
||||||
|
|
||||||
|
# The minimum edit distance a name should have in order to be considered a
|
||||||
|
# similar match for a missing member name.
|
||||||
|
missing-member-hint-distance=1
|
||||||
|
|
||||||
|
# The total number of similar names that should be taken in consideration when
|
||||||
|
# showing a hint for a missing member.
|
||||||
|
missing-member-max-choices=1
|
||||||
|
|
||||||
|
|
||||||
|
[VARIABLES]
|
||||||
|
|
||||||
|
# List of additional names supposed to be defined in builtins. Remember that
|
||||||
|
# you should avoid to define new builtins when possible.
|
||||||
|
additional-builtins=
|
||||||
|
|
||||||
|
# Tells whether unused global variables should be treated as a violation.
|
||||||
|
allow-global-unused-variables=yes
|
||||||
|
|
||||||
|
# List of strings which can identify a callback function by name. A callback
|
||||||
|
# name must start or end with one of those strings.
|
||||||
|
callbacks=cb_,
|
||||||
|
_cb
|
||||||
|
|
||||||
|
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||||
|
# not used).
|
||||||
|
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||||
|
|
||||||
|
# Argument names that match this expression will be ignored. Default to name
|
||||||
|
# with leading underscore
|
||||||
|
ignored-argument-names=_.*|^ignored_|^unused_
|
||||||
|
|
||||||
|
# Tells whether we should check for unused import in __init__ files.
|
||||||
|
init-import=no
|
||||||
|
|
||||||
|
# List of qualified module names which can have objects that can redefine
|
||||||
|
# builtins.
|
||||||
|
redefining-builtins-modules=six.moves,past.builtins,future.builtins
|
||||||
|
|
||||||
|
|
||||||
|
[FORMAT]
|
||||||
|
|
||||||
|
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||||
|
expected-line-ending-format=
|
||||||
|
|
||||||
|
# Regexp for a line that is allowed to be longer than the limit.
|
||||||
|
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||||
|
|
||||||
|
# Number of spaces of indent required inside a hanging or continued line.
|
||||||
|
indent-after-paren=4
|
||||||
|
|
||||||
|
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||||
|
# tab).
|
||||||
|
indent-string=' '
|
||||||
|
|
||||||
|
# Maximum number of characters on a single line.
|
||||||
|
max-line-length=100
|
||||||
|
|
||||||
|
# Maximum number of lines in a module
|
||||||
|
max-module-lines=1000
|
||||||
|
|
||||||
|
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||||
|
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||||
|
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||||
|
# `empty-line` allows space-only lines.
|
||||||
|
no-space-check=trailing-comma,
|
||||||
|
dict-separator
|
||||||
|
|
||||||
|
# Allow the body of a class to be on the same line as the declaration if body
|
||||||
|
# contains single statement.
|
||||||
|
single-line-class-stmt=no
|
||||||
|
|
||||||
|
# Allow the body of an if to be on the same line as the test if there is no
|
||||||
|
# else.
|
||||||
|
single-line-if-stmt=no
|
||||||
|
|
||||||
|
|
||||||
|
[SIMILARITIES]
|
||||||
|
|
||||||
|
# Ignore comments when computing similarities.
|
||||||
|
ignore-comments=yes
|
||||||
|
|
||||||
|
# Ignore docstrings when computing similarities.
|
||||||
|
ignore-docstrings=yes
|
||||||
|
|
||||||
|
# Ignore imports when computing similarities.
|
||||||
|
ignore-imports=no
|
||||||
|
|
||||||
|
# Minimum lines number of a similarity.
|
||||||
|
min-similarity-lines=4
|
||||||
|
|
||||||
|
|
||||||
|
[BASIC]
|
||||||
|
|
||||||
|
# Naming style matching correct argument names
|
||||||
|
argument-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct argument names. Overrides argument-
|
||||||
|
# naming-style
|
||||||
|
#argument-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct attribute names
|
||||||
|
attr-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||||
|
# style
|
||||||
|
#attr-rgx=
|
||||||
|
|
||||||
|
# Bad variable names which should always be refused, separated by a comma
|
||||||
|
bad-names=foo,
|
||||||
|
bar,
|
||||||
|
baz,
|
||||||
|
toto,
|
||||||
|
tutu,
|
||||||
|
tata
|
||||||
|
|
||||||
|
# Naming style matching correct class attribute names
|
||||||
|
class-attribute-naming-style=any
|
||||||
|
|
||||||
|
# Regular expression matching correct class attribute names. Overrides class-
|
||||||
|
# attribute-naming-style
|
||||||
|
#class-attribute-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct class names
|
||||||
|
class-naming-style=PascalCase
|
||||||
|
|
||||||
|
# Regular expression matching correct class names. Overrides class-naming-style
|
||||||
|
#class-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct constant names
|
||||||
|
const-naming-style=UPPER_CASE
|
||||||
|
|
||||||
|
# Regular expression matching correct constant names. Overrides const-naming-
|
||||||
|
# style
|
||||||
|
#const-rgx=
|
||||||
|
|
||||||
|
# Minimum line length for functions/classes that require docstrings, shorter
|
||||||
|
# ones are exempt.
|
||||||
|
docstring-min-length=-1
|
||||||
|
|
||||||
|
# Naming style matching correct function names
|
||||||
|
function-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct function names. Overrides function-
|
||||||
|
# naming-style
|
||||||
|
#function-rgx=
|
||||||
|
|
||||||
|
# Good variable names which should always be accepted, separated by a comma
|
||||||
|
good-names=i,
|
||||||
|
j,
|
||||||
|
k,
|
||||||
|
ex,
|
||||||
|
Run,
|
||||||
|
_
|
||||||
|
|
||||||
|
# Include a hint for the correct naming format with invalid-name
|
||||||
|
include-naming-hint=no
|
||||||
|
|
||||||
|
# Naming style matching correct inline iteration names
|
||||||
|
inlinevar-naming-style=any
|
||||||
|
|
||||||
|
# Regular expression matching correct inline iteration names. Overrides
|
||||||
|
# inlinevar-naming-style
|
||||||
|
#inlinevar-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct method names
|
||||||
|
method-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct method names. Overrides method-naming-
|
||||||
|
# style
|
||||||
|
#method-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct module names
|
||||||
|
module-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct module names. Overrides module-naming-
|
||||||
|
# style
|
||||||
|
#module-rgx=
|
||||||
|
|
||||||
|
# Colon-delimited sets of names that determine each other's naming style when
|
||||||
|
# the name regexes allow several styles.
|
||||||
|
name-group=
|
||||||
|
|
||||||
|
# Regular expression which should only match function or class names that do
|
||||||
|
# not require a docstring.
|
||||||
|
no-docstring-rgx=^_
|
||||||
|
|
||||||
|
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||||
|
# to this list to register other decorators that produce valid properties.
|
||||||
|
property-classes=abc.abstractproperty
|
||||||
|
|
||||||
|
# Naming style matching correct variable names
|
||||||
|
variable-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct variable names. Overrides variable-
|
||||||
|
# naming-style
|
||||||
|
#variable-rgx=
|
||||||
|
|
||||||
|
|
||||||
|
[IMPORTS]
|
||||||
|
|
||||||
|
# Allow wildcard imports from modules that define __all__.
|
||||||
|
allow-wildcard-with-all=no
|
||||||
|
|
||||||
|
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||||
|
# 3 compatible code, which means that the block might have code that exists
|
||||||
|
# only in one or another interpreter, leading to false positives when analysed.
|
||||||
|
analyse-fallback-blocks=no
|
||||||
|
|
||||||
|
# Deprecated modules which should not be used, separated by a comma
|
||||||
|
deprecated-modules=optparse,tkinter.tix
|
||||||
|
|
||||||
|
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||||
|
# not be disabled)
|
||||||
|
ext-import-graph=
|
||||||
|
|
||||||
|
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||||
|
# given file (report RP0402 must not be disabled)
|
||||||
|
import-graph=
|
||||||
|
|
||||||
|
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||||
|
# not be disabled)
|
||||||
|
int-import-graph=
|
||||||
|
|
||||||
|
# Force import order to recognize a module as part of the standard
|
||||||
|
# compatibility libraries.
|
||||||
|
known-standard-library=
|
||||||
|
|
||||||
|
# Force import order to recognize a module as part of a third party library.
|
||||||
|
known-third-party=enchant
|
||||||
|
|
||||||
|
|
||||||
|
[CLASSES]
|
||||||
|
|
||||||
|
# List of method names used to declare (i.e. assign) instance attributes.
|
||||||
|
defining-attr-methods=__init__,
|
||||||
|
__new__,
|
||||||
|
setUp
|
||||||
|
|
||||||
|
# List of member names, which should be excluded from the protected access
|
||||||
|
# warning.
|
||||||
|
exclude-protected=_asdict,
|
||||||
|
_fields,
|
||||||
|
_replace,
|
||||||
|
_source,
|
||||||
|
_make
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a class method.
|
||||||
|
valid-classmethod-first-arg=cls
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a metaclass class method.
|
||||||
|
valid-metaclass-classmethod-first-arg=mcs
|
||||||
|
|
||||||
|
|
||||||
|
[DESIGN]
|
||||||
|
|
||||||
|
# Maximum number of arguments for function / method
|
||||||
|
max-args=5
|
||||||
|
|
||||||
|
# Maximum number of attributes for a class (see R0902).
|
||||||
|
max-attributes=7
|
||||||
|
|
||||||
|
# Maximum number of boolean expressions in a if statement
|
||||||
|
max-bool-expr=5
|
||||||
|
|
||||||
|
# Maximum number of branch for function / method body
|
||||||
|
max-branches=12
|
||||||
|
|
||||||
|
# Maximum number of locals for function / method body
|
||||||
|
max-locals=15
|
||||||
|
|
||||||
|
# Maximum number of parents for a class (see R0901).
|
||||||
|
max-parents=7
|
||||||
|
|
||||||
|
# Maximum number of public methods for a class (see R0904).
|
||||||
|
max-public-methods=20
|
||||||
|
|
||||||
|
# Maximum number of return / yield for function / method body
|
||||||
|
max-returns=6
|
||||||
|
|
||||||
|
# Maximum number of statements in function / method body
|
||||||
|
max-statements=50
|
||||||
|
|
||||||
|
# Minimum number of public methods for a class (see R0903).
|
||||||
|
min-public-methods=2
|
||||||
|
|
||||||
|
|
||||||
|
[EXCEPTIONS]
|
||||||
|
|
||||||
|
# Exceptions that will emit a warning when being caught. Defaults to
|
||||||
|
# "Exception"
|
||||||
|
overgeneral-exceptions=Exception
|
@ -1,4 +1,5 @@
|
|||||||
"""
|
"""
|
||||||
Kademlia is a Python implementation of the Kademlia protocol which utilizes the asyncio library.
|
Kademlia is a Python implementation of the Kademlia protocol which
|
||||||
|
utilizes the asyncio library.
|
||||||
"""
|
"""
|
||||||
__version__ = "1.0"
|
__version__ = "1.0"
|
||||||
|
@ -17,8 +17,10 @@ class SpiderCrawl(object):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance.
|
protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance.
|
||||||
node: A :class:`~kademlia.node.Node` representing the key we're looking for
|
node: A :class:`~kademlia.node.Node` representing the key we're
|
||||||
peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network
|
looking for
|
||||||
|
peers: A list of :class:`~kademlia.node.Node` instances that
|
||||||
|
provide the entry point for the network
|
||||||
ksize: The value for k based on the paper
|
ksize: The value for k based on the paper
|
||||||
alpha: The value for alpha based on the paper
|
alpha: The value for alpha based on the paper
|
||||||
"""
|
"""
|
||||||
@ -28,10 +30,9 @@ class SpiderCrawl(object):
|
|||||||
self.node = node
|
self.node = node
|
||||||
self.nearest = NodeHeap(self.node, self.ksize)
|
self.nearest = NodeHeap(self.node, self.ksize)
|
||||||
self.lastIDsCrawled = []
|
self.lastIDsCrawled = []
|
||||||
log.info("creating spider with peers: %s" % peers)
|
log.info("creating spider with peers: %s", peers)
|
||||||
self.nearest.push(peers)
|
self.nearest.push(peers)
|
||||||
|
|
||||||
|
|
||||||
async def _find(self, rpcmethod):
|
async def _find(self, rpcmethod):
|
||||||
"""
|
"""
|
||||||
Get either a value or list of nodes.
|
Get either a value or list of nodes.
|
||||||
@ -42,16 +43,15 @@ class SpiderCrawl(object):
|
|||||||
The process:
|
The process:
|
||||||
1. calls find_* to current ALPHA nearest not already queried nodes,
|
1. calls find_* to current ALPHA nearest not already queried nodes,
|
||||||
adding results to current nearest list of k nodes.
|
adding results to current nearest list of k nodes.
|
||||||
2. current nearest list needs to keep track of who has been queried already
|
2. current nearest list needs to keep track of who has been queried
|
||||||
sort by nearest, keep KSIZE
|
already sort by nearest, keep KSIZE
|
||||||
3. if list is same as last time, next call should be to everyone not
|
3. if list is same as last time, next call should be to everyone not
|
||||||
yet queried
|
yet queried
|
||||||
4. repeat, unless nearest list has all been queried, then ur done
|
4. repeat, unless nearest list has all been queried, then ur done
|
||||||
"""
|
"""
|
||||||
log.info("crawling with nearest: %s" % str(tuple(self.nearest)))
|
log.info("crawling network with nearest: %s", str(tuple(self.nearest)))
|
||||||
count = self.alpha
|
count = self.alpha
|
||||||
if self.nearest.getIDs() == self.lastIDsCrawled:
|
if self.nearest.getIDs() == self.lastIDsCrawled:
|
||||||
log.info("last iteration same as current - checking all in list now")
|
|
||||||
count = len(self.nearest)
|
count = len(self.nearest)
|
||||||
self.lastIDsCrawled = self.nearest.getIDs()
|
self.lastIDsCrawled = self.nearest.getIDs()
|
||||||
|
|
||||||
@ -62,6 +62,9 @@ class SpiderCrawl(object):
|
|||||||
found = await gather_dict(ds)
|
found = await gather_dict(ds)
|
||||||
return await self._nodesFound(found)
|
return await self._nodesFound(found)
|
||||||
|
|
||||||
|
async def _nodesFound(self, responses):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class ValueSpiderCrawl(SpiderCrawl):
|
class ValueSpiderCrawl(SpiderCrawl):
|
||||||
def __init__(self, protocol, node, peers, ksize, alpha):
|
def __init__(self, protocol, node, peers, ksize, alpha):
|
||||||
@ -110,8 +113,8 @@ class ValueSpiderCrawl(SpiderCrawl):
|
|||||||
"""
|
"""
|
||||||
valueCounts = Counter(values)
|
valueCounts = Counter(values)
|
||||||
if len(valueCounts) != 1:
|
if len(valueCounts) != 1:
|
||||||
args = (self.node.long_id, str(values))
|
log.warning("Got multiple values for key %i: %s",
|
||||||
log.warning("Got multiple values for key %i: %s" % args)
|
self.node.long_id, str(values))
|
||||||
value = valueCounts.most_common(1)[0][0]
|
value = valueCounts.most_common(1)[0][0]
|
||||||
|
|
||||||
peerToSaveTo = self.nearestWithoutValue.popleft()
|
peerToSaveTo = self.nearestWithoutValue.popleft()
|
||||||
|
@ -18,26 +18,27 @@ log = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class Server(object):
|
class Server(object):
|
||||||
"""
|
"""
|
||||||
High level view of a node instance. This is the object that should be created
|
High level view of a node instance. This is the object that should be
|
||||||
to start listening as an active node on the network.
|
created to start listening as an active node on the network.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
protocol_class = KademliaProtocol
|
protocol_class = KademliaProtocol
|
||||||
|
|
||||||
def __init__(self, ksize=20, alpha=3, id=None, storage=None):
|
def __init__(self, ksize=20, alpha=3, node_id=None, storage=None):
|
||||||
"""
|
"""
|
||||||
Create a server instance. This will start listening on the given port.
|
Create a server instance. This will start listening on the given port.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
ksize (int): The k parameter from the paper
|
ksize (int): The k parameter from the paper
|
||||||
alpha (int): The alpha parameter from the paper
|
alpha (int): The alpha parameter from the paper
|
||||||
id: The id for this node on the network.
|
node_id: The id for this node on the network.
|
||||||
storage: An instance that implements :interface:`~kademlia.storage.IStorage`
|
storage: An instance that implements
|
||||||
|
:interface:`~kademlia.storage.IStorage`
|
||||||
"""
|
"""
|
||||||
self.ksize = ksize
|
self.ksize = ksize
|
||||||
self.alpha = alpha
|
self.alpha = alpha
|
||||||
self.storage = storage or ForgetfulStorage()
|
self.storage = storage or ForgetfulStorage()
|
||||||
self.node = Node(id or digest(random.getrandbits(255)))
|
self.node = Node(node_id or digest(random.getrandbits(255)))
|
||||||
self.transport = None
|
self.transport = None
|
||||||
self.protocol = None
|
self.protocol = None
|
||||||
self.refresh_loop = None
|
self.refresh_loop = None
|
||||||
@ -53,16 +54,20 @@ class Server(object):
|
|||||||
if self.save_state_loop:
|
if self.save_state_loop:
|
||||||
self.save_state_loop.cancel()
|
self.save_state_loop.cancel()
|
||||||
|
|
||||||
|
def _create_protocol(self):
|
||||||
|
return self.protocol_class(self.node, self.storage, self.ksize)
|
||||||
|
|
||||||
def listen(self, port, interface='0.0.0.0'):
|
def listen(self, port, interface='0.0.0.0'):
|
||||||
"""
|
"""
|
||||||
Start listening on the given port.
|
Start listening on the given port.
|
||||||
|
|
||||||
Provide interface="::" to accept ipv6 address
|
Provide interface="::" to accept ipv6 address
|
||||||
"""
|
"""
|
||||||
proto_factory = lambda: self.protocol_class(self.node, self.storage, self.ksize)
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
listen = loop.create_datagram_endpoint(proto_factory, local_addr=(interface, port))
|
listen = loop.create_datagram_endpoint(self._create_protocol,
|
||||||
log.info("Node %i listening on %s:%i", self.node.long_id, interface, port)
|
local_addr=(interface, port))
|
||||||
|
log.info("Node %i listening on %s:%i",
|
||||||
|
self.node.long_id, interface, port)
|
||||||
self.transport, self.protocol = loop.run_until_complete(listen)
|
self.transport, self.protocol = loop.run_until_complete(listen)
|
||||||
# finally, schedule refreshing table
|
# finally, schedule refreshing table
|
||||||
self.refresh_table()
|
self.refresh_table()
|
||||||
@ -79,10 +84,11 @@ class Server(object):
|
|||||||
(per section 2.3 of the paper).
|
(per section 2.3 of the paper).
|
||||||
"""
|
"""
|
||||||
ds = []
|
ds = []
|
||||||
for id in self.protocol.getRefreshIDs():
|
for node_id in self.protocol.getRefreshIDs():
|
||||||
node = Node(id)
|
node = Node(node_id)
|
||||||
nearest = self.protocol.router.findNeighbors(node, self.alpha)
|
nearest = self.protocol.router.findNeighbors(node, self.alpha)
|
||||||
spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
|
spider = NodeSpiderCrawl(self.protocol, node, nearest,
|
||||||
|
self.ksize, self.alpha)
|
||||||
ds.append(spider.find())
|
ds.append(spider.find())
|
||||||
|
|
||||||
# do our crawling
|
# do our crawling
|
||||||
@ -90,12 +96,12 @@ class Server(object):
|
|||||||
|
|
||||||
# now republish keys older than one hour
|
# now republish keys older than one hour
|
||||||
for dkey, value in self.storage.iteritemsOlderThan(3600):
|
for dkey, value in self.storage.iteritemsOlderThan(3600):
|
||||||
await self.digest_set(dkey, value)
|
await self.set_digest(dkey, value)
|
||||||
|
|
||||||
def bootstrappableNeighbors(self):
|
def bootstrappableNeighbors(self):
|
||||||
"""
|
"""
|
||||||
Get a :class:`list` of (ip, port) :class:`tuple` pairs suitable for use as an argument
|
Get a :class:`list` of (ip, port) :class:`tuple` pairs suitable for
|
||||||
to the bootstrap method.
|
use as an argument to the bootstrap method.
|
||||||
|
|
||||||
The server should have been bootstrapped
|
The server should have been bootstrapped
|
||||||
already - this is just a utility for getting some neighbors and then
|
already - this is just a utility for getting some neighbors and then
|
||||||
@ -103,43 +109,29 @@ class Server(object):
|
|||||||
back up, the list of nodes can be used to bootstrap.
|
back up, the list of nodes can be used to bootstrap.
|
||||||
"""
|
"""
|
||||||
neighbors = self.protocol.router.findNeighbors(self.node)
|
neighbors = self.protocol.router.findNeighbors(self.node)
|
||||||
return [ tuple(n)[-2:] for n in neighbors ]
|
return [tuple(n)[-2:] for n in neighbors]
|
||||||
|
|
||||||
async def bootstrap(self, addrs):
|
async def bootstrap(self, addrs):
|
||||||
"""
|
"""
|
||||||
Bootstrap the server by connecting to other known nodes in the network.
|
Bootstrap the server by connecting to other known nodes in the network.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP addresses
|
addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP
|
||||||
are acceptable - hostnames will cause an error.
|
addresses are acceptable - hostnames will cause an error.
|
||||||
"""
|
"""
|
||||||
log.debug("Attempting to bootstrap node with %i initial contacts", len(addrs))
|
log.debug("Attempting to bootstrap node with %i initial contacts",
|
||||||
|
len(addrs))
|
||||||
cos = list(map(self.bootstrap_node, addrs))
|
cos = list(map(self.bootstrap_node, addrs))
|
||||||
nodes = [node for node in await asyncio.gather(*cos) if not node is None]
|
gathered = await asyncio.gather(*cos)
|
||||||
spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha)
|
nodes = [node for node in gathered if node is not None]
|
||||||
|
spider = NodeSpiderCrawl(self.protocol, self.node, nodes,
|
||||||
|
self.ksize, self.alpha)
|
||||||
return await spider.find()
|
return await spider.find()
|
||||||
|
|
||||||
async def bootstrap_node(self, addr):
|
async def bootstrap_node(self, addr):
|
||||||
result = await self.protocol.ping(addr, self.node.id)
|
result = await self.protocol.ping(addr, self.node.id)
|
||||||
return Node(result[1], addr[0], addr[1]) if result[0] else None
|
return Node(result[1], addr[0], addr[1]) if result[0] else None
|
||||||
|
|
||||||
def inetVisibleIP(self):
|
|
||||||
"""
|
|
||||||
Get the internet visible IP's of this node as other nodes see it.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A `list` of IP's. If no one can be contacted, then the `list` will be empty.
|
|
||||||
"""
|
|
||||||
def handle(results):
|
|
||||||
ips = [ result[1][0] for result in results if result[0] ]
|
|
||||||
log.debug("other nodes think our ip is %s" % str(ips))
|
|
||||||
return ips
|
|
||||||
|
|
||||||
ds = []
|
|
||||||
for neighbor in self.bootstrappableNeighbors():
|
|
||||||
ds.append(self.protocol.stun(neighbor))
|
|
||||||
return defer.gatherResults(ds).addCallback(handle)
|
|
||||||
|
|
||||||
async def get(self, key):
|
async def get(self, key):
|
||||||
"""
|
"""
|
||||||
Get a key if the network has it.
|
Get a key if the network has it.
|
||||||
@ -155,36 +147,41 @@ class Server(object):
|
|||||||
node = Node(dkey)
|
node = Node(dkey)
|
||||||
nearest = self.protocol.router.findNeighbors(node)
|
nearest = self.protocol.router.findNeighbors(node)
|
||||||
if len(nearest) == 0:
|
if len(nearest) == 0:
|
||||||
log.warning("There are no known neighbors to get key %s" % key)
|
log.warning("There are no known neighbors to get key %s", key)
|
||||||
return None
|
return None
|
||||||
spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
|
spider = ValueSpiderCrawl(self.protocol, node, nearest,
|
||||||
|
self.ksize, self.alpha)
|
||||||
return await spider.find()
|
return await spider.find()
|
||||||
|
|
||||||
async def set(self, key, value):
|
async def set(self, key, value):
|
||||||
"""
|
"""
|
||||||
Set the given string key to the given value in the network.
|
Set the given string key to the given value in the network.
|
||||||
"""
|
"""
|
||||||
log.info("setting '%s' = '%s' on network" % (key, value))
|
log.info("setting '%s' = '%s' on network", key, value)
|
||||||
dkey = digest(key)
|
dkey = digest(key)
|
||||||
return await self.set_digest(dkey, value)
|
return await self.set_digest(dkey, value)
|
||||||
|
|
||||||
async def set_digest(self, dkey, value):
|
async def set_digest(self, dkey, value):
|
||||||
"""
|
"""
|
||||||
Set the given SHA1 digest key (bytes) to the given value in the network.
|
Set the given SHA1 digest key (bytes) to the given value in the
|
||||||
|
network.
|
||||||
"""
|
"""
|
||||||
node = Node(dkey)
|
node = Node(dkey)
|
||||||
|
|
||||||
nearest = self.protocol.router.findNeighbors(node)
|
nearest = self.protocol.router.findNeighbors(node)
|
||||||
if len(nearest) == 0:
|
if len(nearest) == 0:
|
||||||
log.warning("There are no known neighbors to set key %s" % dkey.hex())
|
log.warning("There are no known neighbors to set key %s",
|
||||||
|
dkey.hex())
|
||||||
return False
|
return False
|
||||||
|
|
||||||
spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
|
spider = NodeSpiderCrawl(self.protocol, node, nearest,
|
||||||
|
self.ksize, self.alpha)
|
||||||
nodes = await spider.find()
|
nodes = await spider.find()
|
||||||
log.info("setting '%s' on %s" % (dkey.hex(), list(map(str, nodes))))
|
log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes)))
|
||||||
|
|
||||||
# if this node is close too, then store here as well
|
# if this node is close too, then store here as well
|
||||||
if self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]):
|
biggest = max([n.distanceTo(node) for n in nodes])
|
||||||
|
if self.node.distanceTo(node) < biggest:
|
||||||
self.storage[dkey] = value
|
self.storage[dkey] = value
|
||||||
ds = [self.protocol.callStore(n, dkey, value) for n in nodes]
|
ds = [self.protocol.callStore(n, dkey, value) for n in nodes]
|
||||||
# return true only if at least one store call succeeded
|
# return true only if at least one store call succeeded
|
||||||
@ -196,10 +193,12 @@ class Server(object):
|
|||||||
to a cache file with the given fname.
|
to a cache file with the given fname.
|
||||||
"""
|
"""
|
||||||
log.info("Saving state to %s", fname)
|
log.info("Saving state to %s", fname)
|
||||||
data = { 'ksize': self.ksize,
|
data = {
|
||||||
'alpha': self.alpha,
|
'ksize': self.ksize,
|
||||||
'id': self.node.id,
|
'alpha': self.alpha,
|
||||||
'neighbors': self.bootstrappableNeighbors() }
|
'id': self.node.id,
|
||||||
|
'neighbors': self.bootstrappableNeighbors()
|
||||||
|
}
|
||||||
if len(data['neighbors']) == 0:
|
if len(data['neighbors']) == 0:
|
||||||
log.warning("No known neighbors, so not writing to cache.")
|
log.warning("No known neighbors, so not writing to cache.")
|
||||||
return
|
return
|
||||||
@ -232,4 +231,7 @@ class Server(object):
|
|||||||
"""
|
"""
|
||||||
self.saveState(fname)
|
self.saveState(fname)
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
self.save_state_loop = loop.call_later(frequency, self.saveStateRegularly, fname, frequency)
|
self.save_state_loop = loop.call_later(frequency,
|
||||||
|
self.saveStateRegularly,
|
||||||
|
fname,
|
||||||
|
frequency)
|
||||||
|
@ -3,11 +3,11 @@ import heapq
|
|||||||
|
|
||||||
|
|
||||||
class Node:
|
class Node:
|
||||||
def __init__(self, id, ip=None, port=None):
|
def __init__(self, node_id, ip=None, port=None):
|
||||||
self.id = id
|
self.id = node_id
|
||||||
self.ip = ip
|
self.ip = ip
|
||||||
self.port = port
|
self.port = port
|
||||||
self.long_id = int(id.hex(), 16)
|
self.long_id = int(node_id.hex(), 16)
|
||||||
|
|
||||||
def sameHomeAs(self, node):
|
def sameHomeAs(self, node):
|
||||||
return self.ip == node.ip and self.port == node.port
|
return self.ip == node.ip and self.port == node.port
|
||||||
@ -64,9 +64,9 @@ class NodeHeap(object):
|
|||||||
heapq.heappush(nheap, (distance, node))
|
heapq.heappush(nheap, (distance, node))
|
||||||
self.heap = nheap
|
self.heap = nheap
|
||||||
|
|
||||||
def getNodeById(self, id):
|
def getNodeById(self, node_id):
|
||||||
for _, node in self.heap:
|
for _, node in self.heap:
|
||||||
if node.id == id:
|
if node.id == node_id:
|
||||||
return node
|
return node
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -106,7 +106,7 @@ class NodeHeap(object):
|
|||||||
return iter(map(itemgetter(1), nodes))
|
return iter(map(itemgetter(1), nodes))
|
||||||
|
|
||||||
def __contains__(self, node):
|
def __contains__(self, node):
|
||||||
for distance, n in self.heap:
|
for _, n in self.heap:
|
||||||
if node.id == n.id:
|
if node.id == n.id:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -24,7 +24,8 @@ class KademliaProtocol(RPCProtocol):
|
|||||||
"""
|
"""
|
||||||
ids = []
|
ids = []
|
||||||
for bucket in self.router.getLonelyBuckets():
|
for bucket in self.router.getLonelyBuckets():
|
||||||
ids.append(random.randint(*bucket.range).to_bytes(20, byteorder='big'))
|
rid = random.randint(*bucket.range).to_bytes(20, byteorder='big')
|
||||||
|
ids.append(rid)
|
||||||
return ids
|
return ids
|
||||||
|
|
||||||
def rpc_stun(self, sender):
|
def rpc_stun(self, sender):
|
||||||
@ -38,16 +39,19 @@ class KademliaProtocol(RPCProtocol):
|
|||||||
def rpc_store(self, sender, nodeid, key, value):
|
def rpc_store(self, sender, nodeid, key, value):
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
source = Node(nodeid, sender[0], sender[1])
|
||||||
self.welcomeIfNewNode(source)
|
self.welcomeIfNewNode(source)
|
||||||
log.debug("got a store request from %s, storing '%s'='%s'", sender, key.hex(), value)
|
log.debug("got a store request from %s, storing '%s'='%s'",
|
||||||
|
sender, key.hex(), value)
|
||||||
self.storage[key] = value
|
self.storage[key] = value
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def rpc_find_node(self, sender, nodeid, key):
|
def rpc_find_node(self, sender, nodeid, key):
|
||||||
log.info("finding neighbors of %i in local table", int(nodeid.hex(), 16))
|
log.info("finding neighbors of %i in local table",
|
||||||
|
int(nodeid.hex(), 16))
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
source = Node(nodeid, sender[0], sender[1])
|
||||||
self.welcomeIfNewNode(source)
|
self.welcomeIfNewNode(source)
|
||||||
node = Node(key)
|
node = Node(key)
|
||||||
return list(map(tuple, self.router.findNeighbors(node, exclude=source)))
|
neighbors = self.router.findNeighbors(node, exclude=source)
|
||||||
|
return list(map(tuple, neighbors))
|
||||||
|
|
||||||
def rpc_find_value(self, sender, nodeid, key):
|
def rpc_find_value(self, sender, nodeid, key):
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
source = Node(nodeid, sender[0], sender[1])
|
||||||
@ -55,16 +59,18 @@ class KademliaProtocol(RPCProtocol):
|
|||||||
value = self.storage.get(key, None)
|
value = self.storage.get(key, None)
|
||||||
if value is None:
|
if value is None:
|
||||||
return self.rpc_find_node(sender, nodeid, key)
|
return self.rpc_find_node(sender, nodeid, key)
|
||||||
return { 'value': value }
|
return {'value': value}
|
||||||
|
|
||||||
async def callFindNode(self, nodeToAsk, nodeToFind):
|
async def callFindNode(self, nodeToAsk, nodeToFind):
|
||||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
address = (nodeToAsk.ip, nodeToAsk.port)
|
||||||
result = await self.find_node(address, self.sourceNode.id, nodeToFind.id)
|
result = await self.find_node(address, self.sourceNode.id,
|
||||||
|
nodeToFind.id)
|
||||||
return self.handleCallResponse(result, nodeToAsk)
|
return self.handleCallResponse(result, nodeToAsk)
|
||||||
|
|
||||||
async def callFindValue(self, nodeToAsk, nodeToFind):
|
async def callFindValue(self, nodeToAsk, nodeToFind):
|
||||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
address = (nodeToAsk.ip, nodeToAsk.port)
|
||||||
result = await self.find_value(address, self.sourceNode.id, nodeToFind.id)
|
result = await self.find_value(address, self.sourceNode.id,
|
||||||
|
nodeToFind.id)
|
||||||
return self.handleCallResponse(result, nodeToAsk)
|
return self.handleCallResponse(result, nodeToAsk)
|
||||||
|
|
||||||
async def callPing(self, nodeToAsk):
|
async def callPing(self, nodeToAsk):
|
||||||
@ -99,8 +105,10 @@ class KademliaProtocol(RPCProtocol):
|
|||||||
keynode = Node(digest(key))
|
keynode = Node(digest(key))
|
||||||
neighbors = self.router.findNeighbors(keynode)
|
neighbors = self.router.findNeighbors(keynode)
|
||||||
if len(neighbors) > 0:
|
if len(neighbors) > 0:
|
||||||
newNodeClose = node.distanceTo(keynode) < neighbors[-1].distanceTo(keynode)
|
last = neighbors[-1].distanceTo(keynode)
|
||||||
thisNodeClosest = self.sourceNode.distanceTo(keynode) < neighbors[0].distanceTo(keynode)
|
newNodeClose = node.distanceTo(keynode) < last
|
||||||
|
first = neighbors[0].distanceTo(keynode)
|
||||||
|
thisNodeClosest = self.sourceNode.distanceTo(keynode) < first
|
||||||
if len(neighbors) == 0 or (newNodeClose and thisNodeClosest):
|
if len(neighbors) == 0 or (newNodeClose and thisNodeClosest):
|
||||||
asyncio.ensure_future(self.callStore(node, key, value))
|
asyncio.ensure_future(self.callStore(node, key, value))
|
||||||
self.router.addContact(node)
|
self.router.addContact(node)
|
||||||
|
@ -65,14 +65,15 @@ class KBucket(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def depth(self):
|
def depth(self):
|
||||||
sp = sharedPrefix([bytesToBitString(n.id) for n in self.nodes.values()])
|
vals = self.nodes.values()
|
||||||
|
sp = sharedPrefix([bytesToBitString(n.id) for n in vals])
|
||||||
return len(sp)
|
return len(sp)
|
||||||
|
|
||||||
def head(self):
|
def head(self):
|
||||||
return list(self.nodes.values())[0]
|
return list(self.nodes.values())[0]
|
||||||
|
|
||||||
def __getitem__(self, id):
|
def __getitem__(self, node_id):
|
||||||
return self.nodes.get(id, None)
|
return self.nodes.get(node_id, None)
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.nodes)
|
return len(self.nodes)
|
||||||
@ -135,7 +136,8 @@ class RoutingTable(object):
|
|||||||
Get all of the buckets that haven't been updated in over
|
Get all of the buckets that haven't been updated in over
|
||||||
an hour.
|
an hour.
|
||||||
"""
|
"""
|
||||||
return [b for b in self.buckets if b.lastUpdated < (time.time() - 3600)]
|
hrago = time.time() - 3600
|
||||||
|
return [b for b in self.buckets if b.lastUpdated < hrago]
|
||||||
|
|
||||||
def removeContact(self, node):
|
def removeContact(self, node):
|
||||||
index = self.getBucketFor(node)
|
index = self.getBucketFor(node)
|
||||||
@ -153,8 +155,8 @@ class RoutingTable(object):
|
|||||||
if bucket.addNode(node):
|
if bucket.addNode(node):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Per section 4.2 of paper, split if the bucket has the node in its range
|
# Per section 4.2 of paper, split if the bucket has the node
|
||||||
# or if the depth is not congruent to 0 mod 5
|
# in its range or if the depth is not congruent to 0 mod 5
|
||||||
if bucket.hasInRange(self.node) or bucket.depth() % 5 != 0:
|
if bucket.hasInRange(self.node) or bucket.depth() % 5 != 0:
|
||||||
self.splitBucket(index)
|
self.splitBucket(index)
|
||||||
self.addContact(node)
|
self.addContact(node)
|
||||||
@ -173,7 +175,8 @@ class RoutingTable(object):
|
|||||||
k = k or self.ksize
|
k = k or self.ksize
|
||||||
nodes = []
|
nodes = []
|
||||||
for neighbor in TableTraverser(self, node):
|
for neighbor in TableTraverser(self, node):
|
||||||
if neighbor.id != node.id and (exclude is None or not neighbor.sameHomeAs(exclude)):
|
notexcluded = exclude is None or not neighbor.sameHomeAs(exclude)
|
||||||
|
if neighbor.id != node.id and notexcluded:
|
||||||
heapq.heappush(nodes, (node.distanceTo(neighbor), neighbor))
|
heapq.heappush(nodes, (node.distanceTo(neighbor), neighbor))
|
||||||
if len(nodes) == k:
|
if len(nodes) == k:
|
||||||
break
|
break
|
||||||
|
@ -9,31 +9,32 @@ class IStorage:
|
|||||||
Local storage for this node.
|
Local storage for this node.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __setitem__(key, value):
|
def __setitem__(self, key, value):
|
||||||
"""
|
"""
|
||||||
Set a key to the given value.
|
Set a key to the given value.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __getitem__(key):
|
def __getitem__(self, key):
|
||||||
"""
|
"""
|
||||||
Get the given key. If item doesn't exist, raises C{KeyError}
|
Get the given key. If item doesn't exist, raises C{KeyError}
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get(key, default=None):
|
def get(self, key, default=None):
|
||||||
"""
|
"""
|
||||||
Get given key. If not found, return default.
|
Get given key. If not found, return default.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def iteritemsOlderThan(secondsOld):
|
def iteritemsOlderThan(self, secondsOld):
|
||||||
"""
|
"""
|
||||||
Return the an iterator over (key, value) tuples for items older than the given secondsOld.
|
Return the an iterator over (key, value) tuples for items older
|
||||||
|
than the given secondsOld.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def iteritems():
|
def __iter__(self):
|
||||||
"""
|
"""
|
||||||
Get the iterator for this storage, should yield tuple of (key, value)
|
Get the iterator for this storage, should yield tuple of (key, value)
|
||||||
"""
|
"""
|
||||||
@ -55,7 +56,7 @@ class ForgetfulStorage(IStorage):
|
|||||||
self.cull()
|
self.cull()
|
||||||
|
|
||||||
def cull(self):
|
def cull(self):
|
||||||
for k, v in self.iteritemsOlderThan(self.ttl):
|
for _, _ in self.iteritemsOlderThan(self.ttl):
|
||||||
self.data.popitem(last=False)
|
self.data.popitem(last=False)
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
|
@ -8,8 +8,9 @@ class SwappableProtocolTests(unittest.TestCase):
|
|||||||
|
|
||||||
def test_default_protocol(self):
|
def test_default_protocol(self):
|
||||||
"""
|
"""
|
||||||
An ordinary Server object will initially not have a protocol, but will have a KademliaProtocol
|
An ordinary Server object will initially not have a protocol, but will
|
||||||
object as its protocol after its listen() method is called.
|
have a KademliaProtocol object as its protocol after its listen()
|
||||||
|
method is called.
|
||||||
"""
|
"""
|
||||||
server = Server()
|
server = Server()
|
||||||
self.assertIsNone(server.protocol)
|
self.assertIsNone(server.protocol)
|
||||||
@ -19,8 +20,9 @@ class SwappableProtocolTests(unittest.TestCase):
|
|||||||
|
|
||||||
def test_custom_protocol(self):
|
def test_custom_protocol(self):
|
||||||
"""
|
"""
|
||||||
A subclass of Server which overrides the protocol_class attribute will have an instance
|
A subclass of Server which overrides the protocol_class attribute will
|
||||||
of that class as its protocol after its listen() method is called.
|
have an instance of that class as its protocol after its listen()
|
||||||
|
method is called.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Make a custom Protocol and Server to go with hit.
|
# Make a custom Protocol and Server to go with hit.
|
||||||
@ -40,4 +42,4 @@ class SwappableProtocolTests(unittest.TestCase):
|
|||||||
husk_server = HuskServer()
|
husk_server = HuskServer()
|
||||||
husk_server.listen(8469)
|
husk_server.listen(8469)
|
||||||
self.assertIsInstance(husk_server.protocol, CoconutProtocol)
|
self.assertIsInstance(husk_server.protocol, CoconutProtocol)
|
||||||
server.stop()
|
husk_server.stop()
|
||||||
|
@ -9,86 +9,20 @@ from kademlia.node import Node
|
|||||||
from kademlia.routing import RoutingTable
|
from kademlia.routing import RoutingTable
|
||||||
|
|
||||||
|
|
||||||
def mknode(id=None, ip=None, port=None, intid=None):
|
def mknode(node_id=None, ip=None, port=None, intid=None):
|
||||||
"""
|
"""
|
||||||
Make a node. Created a random id if not specified.
|
Make a node. Created a random id if not specified.
|
||||||
"""
|
"""
|
||||||
if intid is not None:
|
if intid is not None:
|
||||||
id = pack('>l', intid)
|
node_id = pack('>l', intid)
|
||||||
id = id or hashlib.sha1(str(random.getrandbits(255)).encode()).digest()
|
if not node_id:
|
||||||
return Node(id, ip, port)
|
randbits = str(random.getrandbits(255))
|
||||||
|
node_id = hashlib.sha1(randbits.encode()).digest()
|
||||||
|
return Node(node_id, ip, port)
|
||||||
|
|
||||||
|
|
||||||
class FakeProtocol(object):
|
class FakeProtocol:
|
||||||
def __init__(self, sourceID, ksize=20):
|
def __init__(self, sourceID, ksize=20):
|
||||||
self.router = RoutingTable(self, ksize, Node(sourceID))
|
self.router = RoutingTable(self, ksize, Node(sourceID))
|
||||||
self.storage = {}
|
self.storage = {}
|
||||||
self.sourceID = sourceID
|
self.sourceID = sourceID
|
||||||
|
|
||||||
def getRefreshIDs(self):
|
|
||||||
"""
|
|
||||||
Get ids to search for to keep old buckets up to date.
|
|
||||||
"""
|
|
||||||
ids = []
|
|
||||||
for bucket in self.router.getLonelyBuckets():
|
|
||||||
ids.append(random.randint(*bucket.range))
|
|
||||||
return ids
|
|
||||||
|
|
||||||
def rpc_ping(self, sender, nodeid):
|
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
|
||||||
self.router.addContact(source)
|
|
||||||
return self.sourceID
|
|
||||||
|
|
||||||
def rpc_store(self, sender, nodeid, key, value):
|
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
|
||||||
self.router.addContact(source)
|
|
||||||
self.log.debug("got a store request from %s, storing value" % str(sender))
|
|
||||||
self.storage[key] = value
|
|
||||||
|
|
||||||
def rpc_find_node(self, sender, nodeid, key):
|
|
||||||
self.log.info("finding neighbors of %i in local table" % long(nodeid.encode('hex'), 16))
|
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
|
||||||
self.router.addContact(source)
|
|
||||||
node = Node(key)
|
|
||||||
return map(tuple, self.router.findNeighbors(node, exclude=source))
|
|
||||||
|
|
||||||
def rpc_find_value(self, sender, nodeid, key):
|
|
||||||
source = Node(nodeid, sender[0], sender[1])
|
|
||||||
self.router.addContact(source)
|
|
||||||
value = self.storage.get(key, None)
|
|
||||||
if value is None:
|
|
||||||
return self.rpc_find_node(sender, nodeid, key)
|
|
||||||
return { 'value': value }
|
|
||||||
|
|
||||||
def callFindNode(self, nodeToAsk, nodeToFind):
|
|
||||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
|
||||||
d = self.find_node(address, self.sourceID, nodeToFind.id)
|
|
||||||
return d.addCallback(self.handleCallResponse, nodeToAsk)
|
|
||||||
|
|
||||||
def callFindValue(self, nodeToAsk, nodeToFind):
|
|
||||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
|
||||||
d = self.find_value(address, self.sourceID, nodeToFind.id)
|
|
||||||
return d.addCallback(self.handleCallResponse, nodeToAsk)
|
|
||||||
|
|
||||||
def callPing(self, nodeToAsk):
|
|
||||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
|
||||||
d = self.ping(address, self.sourceID)
|
|
||||||
return d.addCallback(self.handleCallResponse, nodeToAsk)
|
|
||||||
|
|
||||||
def callStore(self, nodeToAsk, key, value):
|
|
||||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
|
||||||
d = self.store(address, self.sourceID, key, value)
|
|
||||||
return d.addCallback(self.handleCallResponse, nodeToAsk)
|
|
||||||
|
|
||||||
def handleCallResponse(self, result, node):
|
|
||||||
"""
|
|
||||||
If we get a response, add the node to the routing table. If
|
|
||||||
we get no response, make sure it's removed from the routing table.
|
|
||||||
"""
|
|
||||||
if result[0]:
|
|
||||||
self.log.info("got response from %s, adding to router" % node)
|
|
||||||
self.router.addContact(node)
|
|
||||||
else:
|
|
||||||
self.log.debug("no response from %s, removing from router" % node)
|
|
||||||
self.router.removeContact(node)
|
|
||||||
return result
|
|
||||||
|
@ -20,7 +20,8 @@ def digest(s):
|
|||||||
|
|
||||||
class OrderedSet(list):
|
class OrderedSet(list):
|
||||||
"""
|
"""
|
||||||
Acts like a list in all ways, except in the behavior of the :meth:`push` method.
|
Acts like a list in all ways, except in the behavior of the
|
||||||
|
:meth:`push` method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def push(self, thing):
|
def push(self, thing):
|
||||||
@ -51,6 +52,6 @@ def sharedPrefix(args):
|
|||||||
return args[0][:i]
|
return args[0][:i]
|
||||||
|
|
||||||
|
|
||||||
def bytesToBitString(bytes):
|
def bytesToBitString(bites):
|
||||||
bits = [bin(byte)[2:].rjust(8, '0') for byte in bytes]
|
bits = [bin(bite)[2:].rjust(8, '0') for bite in bites]
|
||||||
return "".join(bits)
|
return "".join(bits)
|
||||||
|
Loading…
Reference in New Issue
Block a user