PKvN}ipfshttpclient/__init__.py"""Python IPFS HTTP CLIENT library""" from __future__ import absolute_import from .version import __version__ ################################### # Import stable HTTP CLIENT parts # ################################### from . import exceptions from .client import DEFAULT_ADDR, DEFAULT_BASE from .client import VERSION_MINIMUM, VERSION_MAXIMUM from .client import Client, assert_version, connect PKvNk@  ipfshttpclient/encoding.py# -*- encoding: utf-8 -*- """Defines encoding related classes. .. note:: The XML and ProtoBuf encoders are currently not functional. """ from __future__ import absolute_import import abc import codecs import json import six from . import exceptions class Encoding(object): """Abstract base for a data parser/encoder interface. """ __metaclass__ = abc.ABCMeta is_stream = False @abc.abstractmethod def parse_partial(self, raw): """Parses the given data and yields all complete data sets that can be built from this. Raises ------ ~ipfshttpclient.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- generator """ def parse_finalize(self): """Finalizes parsing based on remaining buffered data and yields the remaining data sets. Raises ------ ~ipfshttpclient.exceptions.DecodingError Returns ------- generator """ return () def parse(self, raw): """Returns a Python object decoded from the bytes of this encoding. Raises ------ ~ipfshttpclient.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- object """ results = list(self.parse_partial(raw)) results.extend(self.parse_finalize()) return results[0] if len(results) == 1 else results @abc.abstractmethod def encode(self, obj): """Serialize a raw object into corresponding encoding. Raises ------ ~ipfshttpclient.exceptions.EncodingError Parameters ---------- obj : object Object to be encoded """ class Dummy(Encoding): """Dummy parser/encoder that does nothing. """ name = "none" is_stream = True def parse_partial(self, raw): """Yields the data passed into this method. Parameters ---------- raw : bytes Any kind of data Returns ------- generator """ yield raw def encode(self, obj): """Returns the bytes representation of the data passed into this function. Parameters ---------- obj : object Any Python object Returns ------- bytes """ return six.b(str(obj)) class Json(Encoding): """JSON parser/encoder that handles concatenated JSON. """ name = 'json' def __init__(self): self._buffer = [] self._decoder1 = codecs.getincrementaldecoder('utf-8')() self._decoder2 = json.JSONDecoder() self._lasterror = None def parse_partial(self, data): """Incrementally decodes JSON data sets into Python objects. Raises ------ ~ipfshttpclient.exceptions.DecodingError Returns ------- generator """ try: # Python 3 requires all JSON data to be a text string lines = self._decoder1.decode(data, False).split("\n") # Add first input line to last buffer line, if applicable, to # handle cases where the JSON string has been chopped in half # at the network level due to streaming if len(self._buffer) > 0 and self._buffer[-1] is not None: self._buffer[-1] += lines[0] self._buffer.extend(lines[1:]) else: self._buffer.extend(lines) except UnicodeDecodeError as error: six.raise_from(exceptions.DecodingError('json', error), error) # Process data buffer index = 0 try: # Process each line as separate buffer #PERF: This way the `.lstrip()` call becomes almost always a NOP # even if it does return a different string it will only # have to allocate a new buffer for the currently processed # line. while index < len(self._buffer): while self._buffer[index]: # Make sure buffer does not start with whitespace #PERF: `.lstrip()` does not reallocate if the string does # not actually start with whitespace. self._buffer[index] = self._buffer[index].lstrip() # Handle case where the remainder of the line contained # only whitespace if not self._buffer[index]: self._buffer[index] = None continue # Try decoding the partial data buffer and return results # from this data = self._buffer[index] for index2 in range(index, len(self._buffer)): # If decoding doesn't succeed with the currently # selected buffer (very unlikely with our current # class of input data) then retry with appending # any other pending pieces of input data # This will happen with JSON data that contains # arbitrary new-lines: "{1:\n2,\n3:4}" if index2 > index: data += "\n" + self._buffer[index2] try: (obj, offset) = self._decoder2.raw_decode(data) except ValueError: # Treat error as fatal if we have already added # the final buffer to the input if (index2 + 1) == len(self._buffer): raise else: index = index2 break # Decoding succeeded – yield result and shorten buffer yield obj if offset < len(self._buffer[index]): self._buffer[index] = self._buffer[index][offset:] else: self._buffer[index] = None index += 1 except ValueError as error: # It is unfortunately not possible to reliably detect whether # parsing ended because of an error *within* the JSON string, or # an unexpected *end* of the JSON string. # We therefor have to assume that any error that occurs here # *might* be related to the JSON parser hitting EOF and therefor # have to postpone error reporting until `parse_finalize` is # called. self._lasterror = error finally: # Remove all processed buffers del self._buffer[0:index] def parse_finalize(self): """Raises errors for incomplete buffered data that could not be parsed because the end of the input data has been reached. Raises ------ ~ipfshttpclient.exceptions.DecodingError Returns ------- tuple : Always empty """ try: try: # Raise exception for remaining bytes in bytes decoder self._decoder1.decode(b'', True) except UnicodeDecodeError as error: six.raise_from(exceptions.DecodingError('json', error), error) # Late raise errors that looked like they could have been fixed if # the caller had provided more data if self._buffer: six.raise_from(exceptions.DecodingError('json', self._lasterror), self._lasterror) finally: # Reset state self._buffer = [] self._lasterror = None self._decoder1.reset() return () def encode(self, obj): """Returns ``obj`` serialized as JSON formatted bytes. Raises ------ ~ipfshttpclient.exceptions.EncodingError Parameters ---------- obj : Union[str, list, dict, int] JSON serializable Python object Returns ------- bytes """ try: result = json.dumps(obj, sort_keys=True, indent=None, separators=(',', ':'), ensure_ascii=False) if isinstance(result, six.text_type): #PY3 return result.encode("utf-8") else: #PY2 return result except (UnicodeEncodeError, TypeError) as error: six.raise_from(exceptions.EncodingError('json', error), error) class Protobuf(Encoding): """Protobuf parser/encoder that handles protobuf.""" name = 'protobuf' class Xml(Encoding): """XML parser/encoder that handles XML.""" name = 'xml' # encodings supported by the IPFS api (default is JSON) __encodings = { Dummy.name: Dummy, Json.name: Json, Protobuf.name: Protobuf, Xml.name: Xml } def get_encoding(name): """ Returns an Encoder object for the named encoding Raises ------ ~ipfshttpclient.exceptions.EncoderMissingError Parameters ---------- name : str Encoding name. Supported options: * ``"none"`` * ``"json"`` * ``"protobuf"`` * ``"xml"`` """ try: return __encodings[name.lower()]() except KeyError: six.raise_from(exceptions.EncoderMissingError(name), None) PKvN4ipfshttpclient/exceptions.py# -*- coding: utf-8 -*- """ The class hierachy for exceptions is:: Error +-- VersionMismatch +-- AddressError +-- EncoderError | +-- EncoderMissingError | +-- EncodingError | +-- DecodingError +-- CommunicationError +-- ProtocolError +-- StatusError +-- ErrorResponse +-- PartialErrorResponse +-- ConnectionError +-- TimeoutError """ import multiaddr.exceptions class Error(Exception): """Base class for all exceptions in this module.""" pass class AddressError(Error, multiaddr.exceptions.Error): """Raised when the provided daemon location Multiaddr does not match any of the supported patterns.""" def __init__(self, addr): self.addr = addr Error.__init__(self, "Unsupported MultiAddr pattern: {0}".format(addr)) class VersionMismatch(Error): """Raised when daemon version is not supported by this client version.""" def __init__(self, current, minimum, maximum): self.current = current self.minimum = minimum self.maximum = maximum msg = "Unsupported daemon version '{}' (not in range: {} – {})".format( current, minimum, maximum ) Error.__init__(self, msg) ############### # encoding.py # ############### class EncoderError(Error): """Base class for all encoding and decoding related errors.""" def __init__(self, message, encoder_name): self.encoder_name = encoder_name Error.__init__(self, message) class EncoderMissingError(EncoderError): """Raised when a requested encoder class does not actually exist.""" def __init__(self, encoder_name): msg = "Unknown encoder: '{}'".format(encoder_name) EncoderError.__init__(self, msg, encoder_name) class EncodingError(EncoderError): """Raised when encoding a Python object into a byte string has failed due to some problem with the input data.""" def __init__(self, encoder_name, original): self.original = original msg = "Object encoding error: {}".format(original) EncoderError.__init__(self, msg, encoder_name) class DecodingError(EncoderError): """Raised when decoding a byte string to a Python object has failed due to some problem with the input data.""" def __init__(self, encoder_name, original): self.original = original msg = "Object decoding error: {}".format(original) EncoderError.__init__(self, msg, encoder_name) ########### # http.py # ########### class CommunicationError(Error): """Base class for all network communication related errors.""" def __init__(self, original, _message=None): self.original = original if _message: msg = _message else: msg = "{}: {}".format(original.__class__.__name__, str(original)) Error.__init__(self, msg) class ProtocolError(CommunicationError): """Raised when parsing the response from the daemon has failed. This can most likely occur if the service on the remote end isn't in fact an IPFS daemon.""" class StatusError(CommunicationError): """Raised when the daemon responds with an error to our request.""" class ErrorResponse(StatusError): """Raised when the daemon has responded with an error message because the requested operation could not be carried out.""" def __init__(self, message, original): StatusError.__init__(self, original, message) class PartialErrorResponse(ErrorResponse): """Raised when the daemon has responded with an error message after having already returned some data. The incomplete data returned may be accessed using the ``partial`` attribute.""" def __init__(self, message, original, partial): self.partial = partial ErrorResponse.__init__(self, message, original) class ConnectionError(CommunicationError): """Raised when connecting to the service has failed on the socket layer.""" class TimeoutError(CommunicationError): """Raised when the daemon didn't respond in time.""" PKN`822ipfshttpclient/http.py# -*- encoding: utf-8 -*- """HTTP client for api requests. This is pluggable into the IPFS Api client and will hopefully be supplemented by an asynchronous version. """ from __future__ import absolute_import import abc import functools import tarfile from six.moves import http_client import os import socket try: #PY3 import urllib.parse except ImportError: #PY2 class urllib: import urlparse as parse import multiaddr from multiaddr.protocols import (P_DNS, P_DNS4, P_DNS6, P_HTTP, P_HTTPS, P_IP4, P_IP6, P_TCP) import six from . import encoding from . import exceptions PATCH_REQUESTS = (os.environ.get("PY_IPFS_HTTP_CLIENT_PATCH_REQUESTS", "yes").lower() not in ("false", "no")) if PATCH_REQUESTS: from . import requests_wrapper as requests else: # pragma: no cover (always enabled in production) import requests def pass_defaults(func): """Decorator that returns a function named wrapper. When invoked, wrapper invokes func with default kwargs appended. Parameters ---------- func : callable The function to append the default kwargs to """ @functools.wraps(func) def wrapper(self, *args, **kwargs): merged = {} merged.update(self.defaults) merged.update(kwargs) return func(self, *args, **merged) return wrapper def _notify_stream_iter_closed(): pass # Mocked by unit tests to determine check for proper closing class StreamDecodeIterator(object): """ Wrapper around `Iterable` that allows the iterable to be used in a context manager (`with`-statement) allowing for easy cleanup. """ def __init__(self, response, parser): self._response = response self._parser = parser self._response_iter = response.iter_content(chunk_size=None) self._parser_iter = None def __iter__(self): return self def __next__(self): while True: # Try reading for current parser iterator if self._parser_iter is not None: try: result = next(self._parser_iter) # Detect late error messages that occured after some data # has already been sent if isinstance(result, dict) and result.get("Type") == "error": msg = result["Message"] raise exceptions.PartialErrorResponse(msg, None, []) return result except StopIteration: self._parser_iter = None # Forward exception to caller if we do not expect any # further data if self._response_iter is None: raise try: data = next(self._response_iter) # Create new parser iterator using the newly recieved data self._parser_iter = iter(self._parser.parse_partial(data)) except StopIteration: # No more data to receive – destroy response iterator and # iterate over the final fragments returned by the parser self._response_iter = None self._parser_iter = iter(self._parser.parse_finalize()) #PY2: Old iterator syntax next = __next__ def __enter__(self): return self def __exit__(self, *a): self.close() def close(self): # Clean up any open iterators first if self._response_iter is not None: self._response_iter.close() if self._parser_iter is not None: self._parser_iter.close() self._response_iter = None self._parser_iter = None # Clean up response object and parser if self._response is not None: self._response.close() self._response = None self._parser = None _notify_stream_iter_closed() def stream_decode_full(response, parser): with StreamDecodeIterator(response, parser) as response_iter: # Collect all responses result = list(response_iter) # Return byte streams concatenated into one message, instead of split # at arbitrary boundaries if parser.is_stream: return b"".join(result) return result class HTTPClient(object): """An HTTP client for interacting with the IPFS daemon. Parameters ---------- addr : Union[str, multiaddr.Multiaddr] The address where the IPFS daemon may be reached base : str The path prefix for API calls timeout : Union[numbers.Real, Tuple[numbers.Real, numbers.Real], NoneType] The default number of seconds to wait when establishing a connection to the daemon and waiting for returned data before throwing :exc:`~ipfshttpclient.exceptions.TimeoutError`; if the value is a tuple its contents will be interpreted as the values for the connection and receiving phases respectively, otherwise the value will apply to both phases; if the value is ``None`` then all timeouts will be disabled defaults : dict The default parameters to be passed to :meth:`~ipfshttpclient.http.HTTPClient.request` """ __metaclass__ = abc.ABCMeta def __init__(self, addr, base, **defaults): addr = multiaddr.Multiaddr(addr) addr_iter = iter(addr.items()) # Parse the `host`, `family`, `port` & `secure` values from the given # multiaddr, raising on unsupported `addr` values try: # Read host value proto, host = next(addr_iter) family = socket.AF_UNSPEC if proto.code in (P_IP4, P_DNS4): family = socket.AF_INET elif proto.code in (P_IP6, P_DNS6): family = socket.AF_INET6 elif proto.code != P_DNS: raise exceptions.AddressError(addr) # Read port value proto, port = next(addr_iter) if proto.code != P_TCP: raise exceptions.AddressError(addr) # Read application-level protocol name secure = False try: proto, value = next(addr_iter) except StopIteration: pass else: if proto.code == P_HTTPS: secure = True elif proto.code != P_HTTP: raise exceptions.AddressError(addr) # No further values may follow; this also exhausts the iterator was_final = all(True for _ in addr_iter) if not was_final: raise exceptions.AddressError(addr) except StopIteration: six.raise_from(exceptions.AddressError(addr), None) # Convert the parsed `addr` values to a URL base and parameters # for `requests` if ":" in host and not host.startswith("["): host = "[{0}]".format(host) self.base = urllib.parse.SplitResult( scheme = "http" if not secure else "https", netloc = "{0}:{1}".format(host, port), path = base, query = "", fragment = "" ).geturl() self._kwargs = {} if PATCH_REQUESTS: # pragma: no branch (always enabled in production) self._kwargs["family"] = family self.defaults = defaults self._session = None def open_session(self): """Open a persistent backend session that allows reusing HTTP connections between individual HTTP requests. It is an error to call this function if a session is already open.""" assert self._session is None self._session = requests.Session() def close_session(self): """Close a session opened by :meth:`~ipfshttpclient.http.HTTPClient.open_session`. If there is no session currently open (ie: it was already closed), then this method does nothing.""" if self._session is not None: self._session.close() self._session = None def _do_request(self, *args, **kwargs): for name, value in self._kwargs.items(): kwargs.setdefault(name, value) try: if self._session: return self._session.request(*args, **kwargs) else: return requests.request(*args, **kwargs) except (requests.ConnectTimeout, requests.Timeout) as error: six.raise_from(exceptions.TimeoutError(error), error) except requests.ConnectionError as error: six.raise_from(exceptions.ConnectionError(error), error) except http_client.HTTPException as error: six.raise_from(exceptions.ProtocolError(error), error) def _do_raise_for_status(self, response): try: response.raise_for_status() except requests.exceptions.HTTPError as error: content = [] try: decoder = encoding.get_encoding("json") for chunk in response.iter_content(chunk_size=None): content += list(decoder.parse_partial(chunk)) content += list(decoder.parse_finalize()) except exceptions.DecodingError: pass # If we have decoded an error response from the server, # use that as the exception message; otherwise, just pass # the exception on to the caller. if len(content) == 1 \ and isinstance(content[0], dict) \ and "Message" in content[0]: msg = content[0]["Message"] six.raise_from(exceptions.ErrorResponse(msg, error), error) else: six.raise_from(exceptions.StatusError(error), error) def _request(self, method, url, params, parser, stream=False, files=None, headers={}, data=None, timeout=120): # Do HTTP request (synchronously) res = self._do_request(method, url, params=params, stream=stream, files=files, headers=headers, data=data, timeout=timeout) # Raise exception for response status # (optionally incorpating the response message, if applicable) self._do_raise_for_status(res) if stream: # Decode each item as it is read return StreamDecodeIterator(res, parser) else: # Decode received item immediately return stream_decode_full(res, parser) @pass_defaults def request(self, path, args=[], files=[], opts={}, stream=False, decoder=None, headers={}, data=None, timeout=120, offline=False, return_result=True): """Makes an HTTP request to the IPFS daemon. This function returns the contents of the HTTP response from the IPFS daemon. Raises ------ ~ipfshttpclient.exceptions.ErrorResponse ~ipfshttpclient.exceptions.ConnectionError ~ipfshttpclient.exceptions.ProtocolError ~ipfshttpclient.exceptions.StatusError ~ipfshttpclient.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send args : list Positional parameters to be sent along with the HTTP request files : Union[str, io.RawIOBase, collections.abc.Iterable] The file object(s) or path(s) to stream to the daemon opts : dict Query string paramters to be sent along with the HTTP request decoder : str The encoder to use to parse the HTTP response timeout : float How many seconds to wait for the server to send data before giving up Defaults to 120 offline : bool Execute request in offline mode, i.e. locally without accessing the network. return_result : bool Defaults to True. If the return is not relevant, such as in gc(), passing False will return None and avoid downloading results. """ url = self.base + path params = [] params.append(('stream-channels', 'true')) if offline: params.append(('offline', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) if (files or data): method = 'post' elif not return_result: method = 'head' else: method = 'get' # Don't attempt to decode response or stream # (which would keep an iterator open that will then never be waited for) if not return_result: decoder = None stream = False parser = encoding.get_encoding(decoder if decoder else "none") ret = self._request(method, url, params, parser, stream, files, headers, data, timeout=timeout) return ret if return_result else None @pass_defaults def download(self, path, args=[], filepath=None, opts={}, compress=True, timeout=120, offline=False): """Makes a request to the IPFS daemon to download a file. Downloads a file or files from IPFS into the current working directory, or the directory given by ``filepath``. Raises ------ ~ipfshttpclient.exceptions.ErrorResponse ~ipfshttpclient.exceptions.ConnectionError ~ipfshttpclient.exceptions.ProtocolError ~ipfshttpclient.exceptions.StatusError ~ipfshttpclient.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send filepath : str The local path where IPFS will store downloaded files Defaults to the current working directory. args : list Positional parameters to be sent along with the HTTP request opts : dict Query string paramters to be sent along with the HTTP request compress : bool Whether the downloaded file should be GZip compressed by the daemon before being sent to the client timeout : float How many seconds to wait for the server to send data before giving up Defaults to 120 offline : bool Execute request in offline mode, i.e. locally without accessing the network. """ url = self.base + path wd = filepath or '.' params = [] params.append(('stream-channels', 'true')) if offline: params.append(('offline', 'true')) params.append(('archive', 'true')) if compress: params.append(('compress', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'get' res = self._do_request(method, url, params=params, stream=True, timeout=timeout) self._do_raise_for_status(res) # try to stream download as a tar file stream mode = 'r|gz' if compress else 'r|' with tarfile.open(fileobj=res.raw, mode=mode) as tf: tf.extractall(path=wd) PKvN)]]ipfshttpclient/multipart.py"""HTTP :mimetype:`multipart/*`-encoded file streaming. """ from __future__ import absolute_import import abc import inspect import os import re from six.moves import urllib import uuid import six from . import utils default_chunk_size = 4096 #PY34: String formattings for binary types not supported if hasattr(six.binary_type, "__mod__"): #PY35+ def bytes_fmt(b, *a): return b % a else: #PY34 def bytes_fmt(base, *args): # Decode each argument as ISO-8859-1 which causes each by to be # reinterpreted as character base = base.decode("iso-8859-1") args = tuple(map(lambda b: bytes(b).decode("iso-8859-1"), args)) # Apply format and convert back return (base % args).encode("iso-8859-1") def content_disposition_headers(filename, disptype="form-data"): """Returns a dict containing the MIME content-disposition header for a file. .. code-block:: python >>> content_disposition_headers('example.txt') {'Content-Disposition': 'form-data; filename="example.txt"'} >>> content_disposition_headers('example.txt', 'attachment') {'Content-Disposition': 'attachment; filename="example.txt"'} Parameters ---------- filename : str Filename to retrieve the MIME content-disposition for disptype : str Rhe disposition type to use for the file """ disp = '%s; filename="%s"' % ( disptype, urllib.parse.quote(filename, safe='') ) return {'Content-Disposition': disp} def content_type_headers(filename, content_type=None): """Returns a dict with the content-type header for a file. Guesses the mimetype for a filename and returns a dict containing the content-type header. .. code-block:: python >>> content_type_headers('example.txt') {'Content-Type': 'text/plain'} >>> content_type_headers('example.jpeg') {'Content-Type': 'image/jpeg'} >>> content_type_headers('example') {'Content-Type': 'application/octet-stream'} Parameters ---------- filename : str Filename to guess the content-type for content_type : str The Content-Type to use; if not set a content type will be guessed """ return {'Content-Type': content_type if content_type else utils.guess_mimetype(filename)} def multipart_content_type_headers(boundary, subtype='mixed'): """Creates a MIME multipart header with the given configuration. Returns a dict containing a MIME multipart header with the given boundary. .. code-block:: python >>> multipart_content_type_headers('8K5rNKlLQVyreRNncxOTeg') {'Content-Type': 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'} >>> multipart_content_type_headers('8K5rNKlLQVyreRNncxOTeg', 'alt') {'Content-Type': 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'} Parameters ---------- boundary : str The content delimiter to put into the header subtype : str The subtype in :mimetype:`multipart/*`-domain to put into the header """ ctype = 'multipart/%s; boundary="%s"' % ( subtype, boundary ) return {'Content-Type': ctype} class StreamBase(object): """Generator that encodes multipart/form-data. An abstract buffered generator class which encodes :mimetype:`multipart/form-data`. Parameters ---------- name : str The name of the file to encode chunk_size : int The maximum size that any single file chunk may have in bytes """ __metaclass__ = abc.ABCMeta def __init__(self, name, chunk_size=default_chunk_size): self.chunk_size = chunk_size self.name = name self._boundary = uuid.uuid4().hex self._headers = content_disposition_headers(name, disptype='form-data') self._headers.update(multipart_content_type_headers(self._boundary, subtype='form-data')) #WORKAROUND: Go-IPFS randomly fucks up streaming requests if they are not # `Connection: close` (https://github.com/ipfs/go-ipfs/issues/5168) self._headers["Connection"] = "close" super(StreamBase, self).__init__() def headers(self): return self._headers.copy() @abc.abstractmethod def _body(self, *args, **kwargs): """Yields the body of this stream with chunks of undefined size. """ def body(self, *args, **kwargs): """Yields the body of this stream. """ # Cap all returned body chunks to the given chunk size #PY2: Use `yield from` instead for chunk in self._gen_chunks(self._body()): yield chunk def _gen_headers(self, headers): """Yields the HTTP header text for some content. Parameters ---------- headers : dict The headers to yield """ for name, value in sorted(headers.items(), key=lambda i: i[0]): yield bytes_fmt(b"%s: %s\r\n", name.encode("ascii"), value.encode("utf-8")) yield b"\r\n" def _gen_chunks(self, gen): """Generates byte chunks of a given size. Takes a bytes generator and yields chunks of a maximum of ``chunk_size`` bytes. Parameters ---------- gen : generator The bytes generator that produces the bytes """ for data in gen: #PERF: This is zero-copy if `len(data) <= self.chunk_size` for offset in range(0, len(data), self.chunk_size): yield data[offset:self.chunk_size] def _gen_item_start(self): """Yields the body section for the content. """ yield bytes_fmt(b"--%s\r\n", self._boundary.encode("ascii")) def _gen_item_end(self): """Yields the body section for the content. """ yield b"\r\n" def _gen_end(self): """Yields the closing text of a multipart envelope.""" yield bytes_fmt(b'--%s--\r\n', self._boundary.encode("ascii")) class StreamFileMixin(object): def _gen_file(self, filename, file_location=None, file=None, content_type=None): """Yields the entire contents of a file. Parameters ---------- filename : str Filename of the file being opened and added to the HTTP body file_location : str Full path to the file being added, including the filename file : io.RawIOBase The binary file-like object whose contents should be streamed No contents will be streamed if this is ``None``. content_type : str The Content-Type of the file; if not set a value will be guessed """ #PY2: Use `yield from` instead for chunk in self._gen_file_start(filename, file_location, content_type): yield chunk if file: for chunk in self._gen_file_chunks(file): yield chunk for chunk in self._gen_file_end(): yield chunk def _gen_file_start(self, filename, file_location=None, content_type=None): """Yields the opening text of a file section in multipart HTTP. Parameters ---------- filename : str Filename of the file being opened and added to the HTTP body file_location : str Full path to the file being added, including the filename content_type : str The Content-Type of the file; if not set a value will be guessed """ #PY2: Use `yield from` instead for chunk in self._gen_item_start(): yield chunk headers = content_disposition_headers(filename.replace(os.sep, "/"), disptype="file") headers.update(content_type_headers(filename, content_type)) if file_location and os.path.isabs(file_location): headers.update({"Abspath": file_location}) #PY2: Use `yield from` instead for chunk in self._gen_headers(headers): yield chunk def _gen_file_chunks(self, file): """Yields chunks of a file. Parameters ---------- fp : io.RawIOBase The file to break into chunks (must be an open file or have the ``readinto`` method) """ while True: buf = file.read(self.chunk_size) if len(buf) < 1: break yield buf def _gen_file_end(self): """Yields the end text of a file section in HTTP multipart encoding.""" return self._gen_item_end() class FilesStream(StreamBase, StreamFileMixin): """Generator that encodes multiples files into HTTP multipart. A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of :class:`~ipfsapi.multipart.StreamBase`. Parameters ---------- files : Union[str, bytes, os.PathLike, io.IOBase, int, collections.abc.Iterable] The name, file object or file descriptor of the file to encode; may also be a list of several items to allow for more efficient batch processing chunk_size : int The maximum size that any single file chunk may have in bytes """ def __init__(self, files, name="files", chunk_size=default_chunk_size): self.files = utils.clean_files(files) super(FilesStream, self).__init__(name, chunk_size=chunk_size) def _body(self): """Yields the body of the buffered file.""" for file, need_close in self.files: try: try: file_location = file.name filename = os.path.basename(file_location) except AttributeError: file_location = None filename = '' #PY2: Use `yield from` instead for chunk in self._gen_file(filename, file_location, file): yield chunk finally: if need_close: file.close() #PY2: Use `yield from` instead for chunk in self._gen_end(): yield chunk def glob_compile(pat): """Translate a shell glob PATTERN to a regular expression. Source code taken from the `fnmatch.translate` function of the python 3.7 standard-library with the glob-style modification of making `*` non-recursive and the adding `**` as recursive matching operator. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i + 1 if c == '/' and len(pat) > (i + 2) and pat[i:(i + 3)] == '**/': # Special-case for "any number of sub-directories" operator since # may also expand to no entries: # Otherwise `a/**/b` would expand to `a[/].*[/]b` which wouldn't # match the immediate sub-directories of `a`, like `a/b`. i = i + 3 res = res + '[/]([^/]*[/])*' elif c == '*': if len(pat) > i and pat[i] == '*': i = i + 1 res = res + '.*' else: res = res + '[^/]*' elif c == '?': res = res + '[^/]' elif c == '[': j = i if j < n and pat[j] == '!': j = j + 1 if j < n and pat[j] == ']': j = j + 1 while j < n and pat[j] != ']': j = j + 1 if j >= n: res = res + '\\[' else: stuff = pat[i:j] if '--' not in stuff: stuff = stuff.replace('\\', r'\\') else: chunks = [] k = i + 2 if pat[i] == '!' else i + 1 while True: k = pat.find('-', k, j) if k < 0: break chunks.append(pat[i:k]) i = k + 1 k = k + 3 chunks.append(pat[i:j]) # Escape backslashes and hyphens for set difference (--). # Hyphens that create ranges shouldn't be escaped. stuff = '-'.join(s.replace('\\', r'\\').replace('-', r'\-') for s in chunks) # Escape set operations (&&, ~~ and ||). stuff = re.sub(r'([&~|])', r'\\\1', stuff) i = j + 1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] in ('^', '['): stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) return re.compile(r'^' + res + r'\Z$', flags=re.M | re.S) class DirectoryStream(StreamBase, StreamFileMixin): """Generator that encodes a directory into HTTP multipart. A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of :class:`~ipfshttpclient.multipart.StreamBase`. Parameters ---------- directory : Union[str, os.PathLike, int] The filepath or file descriptor of the directory to encode File descriptors are only supported on Unix and Python 3. dirname : Union[str, None] The name of the base directroy to upload, use ``None`` for the default of ``os.path.basename(directory) or '.'`` patterns : Union[str, re.compile, collections.abc.Iterable] A single glob pattern or a list of several glob patterns and compiled regular expressions used to determine which filepaths to match chunk_size : int The maximum size that any single file chunk may have in bytes """ def __init__(self, directory, recursive=False, patterns='**', dirname=None, chunk_size=default_chunk_size): self.patterns = [] patterns = [patterns] if isinstance(patterns, str) else patterns for pattern in patterns: self.patterns.append(glob_compile(pattern) if isinstance(pattern, str) else pattern) self.directory = utils.convert_path(directory) if not isinstance(self.directory, int): self.directory = os.path.normpath(self.directory) self.recursive = recursive self.dirname = dirname name = os.path.basename(self.directory) if not isinstance(self.directory, int) else "" super(DirectoryStream, self).__init__(name, chunk_size=chunk_size) def _body_directory(self, short_path, visited_directories): # Do not continue if this directory has already been added if short_path in visited_directories: return # Scan for first super-directory that has already been added dir_base = short_path dir_parts = [] while dir_base: dir_base, dir_name = os.path.split(dir_base) dir_parts.append(dir_name) if dir_base in visited_directories: break # Add missing intermediate directory nodes in the right order while dir_parts: dir_base = os.path.join(dir_base, dir_parts.pop()) # Generate directory as special empty file #PY2: Use `yield from` instead for chunk in self._gen_file(dir_base, content_type="application/x-directory"): yield chunk # Remember that this directory has already been sent visited_directories.add(dir_base) def _body_file(self, short_path, file_location, dir_fd=-1): try: if dir_fd >= 0: f_path_or_desc = os.open(file_location, os.O_RDONLY | os.O_CLOEXEC, dir_fd=dir_fd) else: f_path_or_desc = file_location # Stream file to client with open(f_path_or_desc, "rb") as file: #PY2: Use `yield from` for chunk in self._gen_file(short_path, file_location, file): yield chunk except OSError as e: print(e) # File might have disappeared between `os.walk()` and `open()` pass def _body(self): """Streams the contents of the selected directory as binary chunks.""" def match_short_path(short_path): # Remove initial path component so that all files are based in # the target directory itself (not one level above) if os.path.sep in short_path: path = short_path.split(os.path.sep, 1)[1] else: return False # Convert all path seperators to POSIX style path = path.replace(os.path.sep, '/') # Do the matching and the simplified path for pattern in self.patterns: if pattern.match(path): return True return False visited_directories = set() # Normalize directory path without destroying symlinks sep = os.path.sep directory = self.directory if not isinstance(self.directory, int): directory = os.fspath(directory) if hasattr(os, "fspath") else directory if isinstance(directory, six.text_type) and not isinstance(sep, six.text_type): #PY2 import sys sep = sep.decode(sys.getfilesystemencoding()) elif isinstance(directory, six.binary_type) and not isinstance(sep, six.binary_type): #PY3 noqa sep = os.fsencode(sep) while sep * 2 in directory: directory.replace(sep * 2, sep) if directory.endswith(sep): directory = directory[:-len(sep)] # Determine base directory name to send to IPFS (required and also used # as part of the wrap_with_directory feature) if self.dirname: dirname = self.dirname elif not isinstance(directory, int): dirname = os.path.basename(directory) dirname = dirname if isinstance(dirname, str) else os.fsdecode(dirname) else: dirname = "_" assert type(directory) == type(dirname) or isinstance(directory, int) # Identify the unnecessary portion of the relative path truncate = (directory if not isinstance(directory, int) else ".") + sep # Traverse the filesystem downward from the target directory's uri # Errors: `os.walk()` will simply return an empty generator if the # target directory does not exist. wildcard_directories = set() if not isinstance(self.directory, int): walk_iter = os.walk(self.directory) else: walk_iter = os.fwalk(dir_fd=self.directory) for result in walk_iter: cur_dir, filenames = result[0], result[2] dir_fd = -1 if not isinstance(self.directory, int) else result[3] # find the path relative to the directory being added if len(truncate) > 0: _, _, short_path = cur_dir.partition(truncate) else: short_path = cur_dir # remove leading / or \ if it is present if short_path.startswith(os.path.sep): short_path = short_path[len(os.path.sep):] short_path = os.path.join(dirname, short_path) if short_path else dirname wildcard_directory = False if os.path.split(short_path)[0] in wildcard_directories: # Parent directory has matched a pattern, all sub-nodes should # be added too wildcard_directories.add(short_path) wildcard_directory = True else: # Check if directory path matches one of the patterns if match_short_path(short_path): # Directory matched pattern and it should therefor # be added along with all of its contents wildcard_directories.add(short_path) wildcard_directory = True # Always add directories within wildcard directories - even if they # are empty if wildcard_directory: #PY2: Use `yield from` instead for chunk in self._body_directory(short_path, visited_directories): yield chunk # Iterate across the files in the current directory for filename in filenames: # Find the filename relative to the directory being added short_file_path = os.path.join(short_path, filename) if dir_fd < 0: file_location = os.path.join(cur_dir, filename) else: file_location = filename if wildcard_directory: # Always add files in wildcard directories #PY2: Use `yield from` instead for chunk in self._body_file(short_file_path, file_location, dir_fd=dir_fd): yield chunk else: # Add file (and all missing intermediary directories) # if it matches one of the patterns if match_short_path(short_file_path): #PY2: Use `yield from` instead for chunk in self._body_directory(short_path, visited_directories): yield chunk for chunk in self._body_file(short_file_path, file_location, dir_fd=dir_fd): yield chunk #PY2: Use `yield from` instead for chunk in self._gen_end(): yield chunk class BytesFileStream(FilesStream): """A buffered generator that encodes bytes as file in :mimetype:`multipart/form-data`. Parameters ---------- data : bytes The binary data to stream to the daemon chunk_size : int The maximum size of a single data chunk """ def __init__(self, data, name="bytes", chunk_size=default_chunk_size): super(BytesFileStream, self).__init__([], name=name, chunk_size=chunk_size) self.data = data if inspect.isgenerator(data) else (data,) def body(self): """Yields the encoded body.""" #PY2: Use `yield from` instead for chunk in self._gen_file_start(self.name): yield chunk for chunk in self._gen_chunks(self.data): yield chunk for chunk in self._gen_file_end(): yield chunk for chunk in self._gen_end(): yield chunk def stream_files(files, chunk_size=default_chunk_size): """Gets a buffered generator for streaming files. Returns a buffered generator which encodes a file or list of files as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- files : Union[str, bytes, os.PathLike, io.IOBase, int, collections.abc.Iterable] The file(s) to stream chunk_size : int Maximum size of each stream chunk """ stream = FilesStream(files, chunk_size=chunk_size) return stream.body(), stream.headers() def stream_directory(directory, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming directories. Returns a buffered generator which encodes a directory as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- directory : Union[str, bytes, os.PathLike, int] The filepath of the directory to stream recursive : bool Stream all content within the directory recursively? patterns : Union[str, re.compile, collections.abc.Iterable] Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ def stream_directory_impl(directory, dirname=None): stream = DirectoryStream(directory, recursive=recursive, patterns=patterns, dirname=dirname, chunk_size=chunk_size) return stream.body(), stream.headers() # Note that `os.fwalk` is never available on Windows and Python 2 if hasattr(os, "fwalk") and not isinstance(directory, int): #PY3 def auto_close_iter_fd(fd, iter): try: #PY2: Use `yield from` instead for item in iter: yield item finally: os.close(fd) directory_str = utils.convert_path(directory) dirname = os.path.basename(os.path.normpath(directory_str)) fd = os.open(directory_str, os.O_CLOEXEC | os.O_DIRECTORY) body, headers = stream_directory_impl(fd, dirname) return auto_close_iter_fd(fd, body), headers else: return stream_directory_impl(directory) def stream_filesystem_node(filepaths, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming either files or directories. Returns a buffered generator which encodes the file or directory at the given path as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- filepaths : Union[str, bytes, os.PathLike, int, io.IOBase, collections.abc.Iterable] The filepath of a single directory or one or more files to stream recursive : bool Stream all content within the directory recursively? patterns : Union[str, re.compile, collections.abc.Iterable] Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the paths of files and directories to be added to IPFS (directories only) chunk_size : int Maximum size of each stream chunk """ is_dir = False if isinstance(filepaths, utils.path_types): is_dir = os.path.isdir(utils.convert_path(filepaths)) elif isinstance(filepaths, int): import stat is_dir = stat.S_ISDIR(os.fstat(filepaths).st_mode) if is_dir: return stream_directory(filepaths, recursive, patterns, chunk_size) + (True,) else: return stream_files(filepaths, chunk_size) + (False,) def stream_bytes(data, chunk_size=default_chunk_size): """Gets a buffered generator for streaming binary data. Returns a buffered generator which encodes binary data as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- data : bytes The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ stream = BytesFileStream(data, chunk_size=chunk_size) return stream.body(), stream.headers() def stream_text(text, chunk_size=default_chunk_size): """Gets a buffered generator for streaming text. Returns a buffered generator which encodes a string as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- text : str The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ if inspect.isgenerator(text): def binary_stream(): for item in text: if six.PY2 and isinstance(item, six.binary_type): #PY2 # Allow binary strings under Python 2 since # Python 2 code is not expected to always get the # distinction between text and binary strings right. yield item else: #PY3 yield item.encode("utf-8") data = binary_stream() elif six.PY2 and isinstance(text, six.binary_type): #PY2: See above. data = text else: #PY3 data = text.encode("utf-8") return stream_bytes(data, chunk_size) PK Nc-##"ipfshttpclient/requests_wrapper.py# -*- encoding: utf-8 -*- """Exposes the full ``requests`` HTTP library API, while adding an extra ``family`` parameter to all HTTP request operations that may be used to restrict the address family used when resolving a domain-name to an IP address. """ import socket try: import urllib.parse except ImportError: #PY2 class urllib: import urlparse as parse import requests import requests.adapters import urllib3 import urllib3.connection import urllib3.exceptions import urllib3.poolmanager import urllib3.util.connection AF2NAME = { int(socket.AF_INET): "ip4", int(socket.AF_INET6): "ip6", } NAME2AF = dict((name, af) for af, name in AF2NAME.items()) # This function is copied from urllib3/util/connection.py (that in turn copied # it from socket.py in the Python 2.7 standard library test suite) and accepts # an extra `family` parameter that specifies the allowed address families for # name resolution. # # The entire remainder of this file after this only exists to ensure that this # `family` parameter is exposed all the way up to request's `Session` interface, # storing it as part of the URL scheme while traversing most of the layers. def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None, family=socket.AF_UNSPEC): host, port = address if host.startswith('['): host = host.strip('[]') err = None if not family or family == socket.AF_UNSPEC: family = urllib3.util.connection.allowed_gai_family() for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. if socket_options is not None: for opt in socket_options: sock.setsockopt(*opt) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except socket.error as e: err = e if sock is not None: sock.close() sock = None if err is not None: raise err raise socket.error("getaddrinfo returns an empty list") # Override the `urllib3` low-level Connection objects that do the actual work # of speaking HTTP def _kw_scheme_to_family(kw, base_scheme): family = socket.AF_UNSPEC scheme = kw.pop("scheme", None) if isinstance(scheme, str): parts = scheme.rsplit("+", 1) if len(parts) == 2 and parts[0] == base_scheme: family = NAME2AF.get(parts[1], family) return family class ConnectionOverrideMixin: def _new_conn(self): extra_kw = { "family": self.family } if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: dns_host = getattr(self, "_dns_host", self.host) conn = create_connection( (dns_host, self.port), self.timeout, **extra_kw) except socket.timeout: raise urllib3.exceptions.ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except socket.error as e: raise urllib3.exceptions.NewConnectionError( self, "Failed to establish a new connection: %s" % e) return conn class HTTPConnection(ConnectionOverrideMixin, urllib3.connection.HTTPConnection): def __init__(self, *args, **kw): self.family = _kw_scheme_to_family(kw, "http") super(HTTPConnection, self).__init__(*args, **kw) class HTTPSConnection(ConnectionOverrideMixin, urllib3.connection.HTTPSConnection): def __init__(self, *args, **kw): self.family = _kw_scheme_to_family(kw, "https") super(HTTPSConnection, self).__init__(*args, **kw) # Override the higher-level `urllib3` ConnectionPool objects that instantiate # one or more Connection objects and dispatch work between them class HTTPConnectionPool(urllib3.HTTPConnectionPool): ConnectionCls = HTTPConnection class HTTPSConnectionPool(urllib3.HTTPSConnectionPool): ConnectionCls = HTTPSConnection # Override the highest-level `urllib3` PoolManager to also properly support the # address family extended scheme values in URLs and pass these scheme values on # to the individual ConnectionPool objects class PoolManager(urllib3.PoolManager): def __init__(self, *args, **kwargs): super(PoolManager, self).__init__(*args, **kwargs) # Additionally to adding our variant of the usual HTTP and HTTPS # pool classes, also add these for some variants of the default schemes # that are limited to some specific address family only self.pool_classes_by_scheme = {} for scheme, ConnectionPool in (("http", HTTPConnectionPool), ("https", HTTPSConnectionPool)): self.pool_classes_by_scheme[scheme] = ConnectionPool for name in AF2NAME.values(): self.pool_classes_by_scheme["{0}+{1}".format(scheme, name)] = ConnectionPool self.key_fn_by_scheme["{0}+{1}".format(scheme, name)] = self.key_fn_by_scheme[scheme] # These next two are only required to ensure that our custom `scheme` values # will be passed down to the `*ConnectionPool`s and finally to the actual # `*Connection`s as parameter def _new_pool(self, scheme, host, port, request_context=None): # Copied from `urllib3` to *not* surpress the `scheme` parameter pool_cls = self.pool_classes_by_scheme[scheme] if request_context is None: request_context = self.connection_pool_kw.copy() for key in ('host', 'port'): request_context.pop(key, None) if scheme == "http" or scheme.startswith("http+"): for kw in urllib3.poolmanager.SSL_KEYWORDS: request_context.pop(kw, None) return pool_cls(host, port, **request_context) def connection_from_pool_key(self, pool_key, request_context=None): # Copied from `urllib3` so that we continue to ensure that this will # call `_new_pool` with self.pools.lock: pool = self.pools.get(pool_key) if pool: return pool scheme = request_context['scheme'] host = request_context['host'] port = request_context['port'] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool return pool # Override the lower-level `requests` adapter that invokes the `urllib3` # PoolManager objects class HTTPAdapter(requests.adapters.HTTPAdapter): def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # save these values for pickling (copied from `requests`) self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs) # Override the highest-level `requests` Session object to accept the `family` # parameter for any request and encode its value as part of the URL scheme # when passing it down to the adapter class Session(requests.Session): def __init__(self, *args, **kwargs): super(Session, self).__init__(*args, **kwargs) # Additionally to mounting our variant of the usual HTTP and HTTPS # adapter, also mount it for some variants of the default schemes that # are limited to some specific address family only adapter = HTTPAdapter() for scheme in ("http", "https"): self.mount("{0}://".format(scheme), adapter) for name in AF2NAME.values(): self.mount("{0}+{1}://".format(scheme, name), adapter) def request(self, method, url, *args, **kwargs): family = kwargs.pop("family", socket.AF_UNSPEC) if family != socket.AF_UNSPEC: # Inject provided address family value as extension to scheme url = urllib.parse.urlparse(url) url = url._replace(scheme="{0}+{1}".format(url.scheme, AF2NAME[int(family)])) url = url.geturl() return super(Session, self).request(method, url, *args, **kwargs) session = Session # Import other `requests` stuff to make the top-level API of this more compatible from requests import ( __title__, __description__, __url__, __version__, __build__, __author__, __author_email__, __license__, __copyright__, __cake__, exceptions, utils, packages, codes, Request, Response, PreparedRequest, RequestException, Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError, FileModeWarning, ConnectTimeout, ReadTimeout ) # Re-implement the top-level “session-less” API def request(method, url, **kwargs): with Session() as session: return session.request(method=method, url=url, **kwargs) def get(url, params=None, **kwargs): kwargs.setdefault('allow_redirects', True) return request('get', url, params=params, **kwargs) def options(url, **kwargs): kwargs.setdefault('allow_redirects', True) return request('options', url, **kwargs) def head(url, **kwargs): kwargs.setdefault('allow_redirects', False) return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): return request('delete', url, **kwargs) PKvN Ny{{ipfshttpclient/utils.py"""A module to handle generic operations. """ from __future__ import absolute_import try: #PY3 import collections.abc except ImportError: #PY2: The relevant classes used to be somewhere else class collections: import collections as abc import mimetypes import os import six from functools import wraps path_str_types = (six.text_type, six.binary_type) path_obj_types = () if hasattr(os, "PathLike"): #PY36+ path_obj_types += (os.PathLike,) def convert_path(path): # Not needed since all system APIs also accept an `os.PathLike` return path else: #PY35 try: #PY2: doesn't have `pathlib` import pathlib path_obj_types += (pathlib.PurePath,) except ImportError: pass # Independently maintained forward-port of `pathlib` for Py27 and others try: import pathlib2 path_obj_types += (pathlib2.PurePath,) except ImportError: pass def convert_path(path): # `pathlib`'s PathLike objects need to be treated specially and # converted to strings when interacting with system APIs return str(path) if isinstance(path, path_obj_types) else path path_types = path_str_types + path_obj_types def guess_mimetype(filename): """Guesses the mimetype of a file based on the given ``filename``. .. code-block:: python >>> guess_mimetype('example.txt') 'text/plain' >>> guess_mimetype('/foo/bar/example') 'application/octet-stream' Parameters ---------- filename : str The file name or path for which the mimetype is to be guessed """ fn = os.path.basename(filename) return mimetypes.guess_type(fn)[0] or 'application/octet-stream' def clean_file(file): """Returns a tuple containing a ``file``-like object and a close indicator. This ensures the given file is opened and keeps track of files that should be closed after use (files that were not open prior to this function call). Raises ------ OSError : Accessing the given file path failed Parameters ---------- file : Union[str, bytes, os.PathLike, io.IOBase, int] A filepath or ``file``-like object that may or may not need to be opened """ if isinstance(file, int): return os.fdopen(file, 'rb', closefd=False), True elif not hasattr(file, 'read'): return open(convert_path(file), 'rb'), True else: return file, False def clean_files(files): """Generates tuples with a ``file``-like object and a close indicator. This is a generator of tuples, where the first element is the file object and the second element is a boolean which is True if this module opened the file (and thus should close it). Raises ------ OSError : Accessing the given file path failed Parameters ---------- files : Union[str, bytes, os.PathLike, io.IOBase, int, collections.abc.Iterable] Collection or single instance of a filepath and file-like object """ if not isinstance(files, path_types) and not hasattr(files, "read"): for f in files: yield clean_file(f) else: yield clean_file(files) class return_field(object): """Decorator that returns the given field of a json response. Parameters ---------- field : object The response field to be returned for all invocations """ def __init__(self, field): self.field = field def __call__(self, cmd): """Wraps a command so that only a specified field is returned. Parameters ---------- cmd : callable A command that is intended to be wrapped """ @wraps(cmd) def wrapper(*args, **kwargs): """Returns the specified field of the command invocation. Parameters ---------- args : list Positional parameters to pass to the wrapped callable kwargs : dict Named parameter to pass to the wrapped callable """ res = cmd(*args, **kwargs) return res[self.field] return wrapper PKNO?DJ&&ipfshttpclient/version.py# _Versioning scheme:_ # The major and minor version of each release correspond to the supported # IPFS daemon version. The revision number will be updated whenever we make # a new release for the `py-ipfs-http-client` for that daemon version. # # Example: The first client version to support the `0.4.x`-series of the IPFS # HTTP API will have version `0.4.0`, the second version will have version # `0.4.1` and so on. When IPFS `0.5.0` is released, the first client version # to support it will also be released as `0.5.0`. __version__ = "0.4.12" PKvN%g !ipfshttpclient/client/__init__.py# -*- coding: utf-8 -*- """IPFS API Bindings for Python. Classes: * Client – a TCP client for interacting with an IPFS daemon """ from __future__ import absolute_import import os import warnings import multiaddr DEFAULT_ADDR = multiaddr.Multiaddr(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_ADDR", '/dns/localhost/tcp/5001/http')) DEFAULT_BASE = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_BASE", 'api/v0')) VERSION_MINIMUM = "0.4.3" VERSION_BLACKLIST = ["0.4.20"] VERSION_MAXIMUM = "0.5.0" from . import bitswap from . import block from . import bootstrap from . import config #TODO: `from . import dag` from . import dht from . import files from . import key from . import miscellaneous from . import name from . import object from . import pin from . import pubsub from . import repo #TODO: `from . import stats` from . import swarm from . import unstable from .. import encoding, exceptions, multipart, utils def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM, blacklist=VERSION_BLACKLIST): """Make sure that the given daemon version is supported by this client version. Raises ------ ~ipfshttpclient.exceptions.VersionMismatch Parameters ---------- version : str The actual version of an IPFS daemon minimum : str The minimal IPFS daemon version to allowed maximum : str The maximum IPFS daemon version to allowed """ # Convert version strings to integer tuples version = list(map(int, version.split('-', 1)[0].split('.'))) minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) if minimum > version or version >= maximum: raise exceptions.VersionMismatch(version, minimum, maximum) for blacklisted in blacklist: blacklisted = list(map(int, blacklisted.split('-', 1)[0].split('.'))) if version == blacklisted: raise exceptions.VersionMismatch(version, minimum, maximum) def connect(addr=DEFAULT_ADDR, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, session=False, **defaults): """Create a new :class:`~ipfshttpclient.Client` instance and connect to the daemon to validate that its version is supported. Raises ------ ~ipfshttpclient.exceptions.VersionMismatch ~ipfshttpclient.exceptions.ErrorResponse ~ipfshttpclient.exceptions.ConnectionError ~ipfshttpclient.exceptions.ProtocolError ~ipfshttpclient.exceptions.StatusError ~ipfshttpclient.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the :class:`~ipfshttpclient.Client` class. Returns ------- :class:`~ipfshttpclient.Client` """ # Create client instance client = Client(addr, base, chunk_size, session, **defaults) # Query version number from daemon and validate it assert_version(client.version()['Version']) return client class Client(files.Base, miscellaneous.Base): """The main IPFS HTTP client class Allows access to an IPFS daemon instance using its HTTP API by exposing an `IPFS Interface Core `__ compatible set of methods. It is possible to instantiate this class directly, using the same parameters as :func:`connect`, to prevent the client from checking for an active and compatible version of the daemon. In general however, calling :func:`connect` should be preferred. In order to reduce latency between individual API calls, this class may keep a pool of TCP connections between this client and the API daemon open between requests. The only caveat of this is that the client object should be closed when it is not used anymore to prevent resource leaks. The easiest way of using this “session management” facility is using a context manager:: with ipfshttpclient.connect() as client: print(client.version()) # These calls… print(client.version()) # …will reuse their TCP connection A client object may be re-opened several times:: client = ipfshttpclient.connect() print(client.version()) # Perform API call on separate TCP connection with client: print(client.version()) # These calls… print(client.version()) # …will share a TCP connection with client: print(client.version()) # These calls… print(client.version()) # …will share a different TCP connection When storing a long-running :class:`Client` object use it like this:: class Consumer: def __init__(self): self._client = ipfshttpclient.connect(session=True) # … other code … def close(self): # Call this when you're done self._client.close() """ __doc__ += base.ClientBase.__doc__ bitswap = base.SectionProperty(bitswap.Section) block = base.SectionProperty(block.Section) bootstrap = base.SectionProperty(bootstrap.Section) config = base.SectionProperty(config.Section) dht = base.SectionProperty(dht.Section) key = base.SectionProperty(key.Section) name = base.SectionProperty(name.Section) object = base.SectionProperty(object.Section) pin = base.SectionProperty(pin.Section) pubsub = base.SectionProperty(pubsub.Section) repo = base.SectionProperty(repo.Section) swarm = base.SectionProperty(swarm.Section) unstable = base.SectionProperty(unstable.Section) ###################### # SESSION MANAGEMENT # ###################### def __enter__(self): self._client.open_session() return self def __exit__(self, exc_type, exc_value, traceback): self.close() def close(self): """Close any currently open client session and free any associated resources. If there was no session currently open this method does nothing. An open session is not a requirement for using a :class:`~ipfshttpclient.Client` object and as such all method defined on it will continue to work, but a new TCP connection will be established for each and every API call invoked. Such a usage should therefor be avoided and may cause a warning in the future. See the class's description for details. """ self._client.close_session() ########### # HELPERS # ########### @utils.return_field('Hash') @base.returns_single_item def add_bytes(self, data, **kwargs): """Adds a set of bytes as a file to IPFS. .. code-block:: python >>> client.add_bytes(b"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- data : bytes Content to be added as a file Returns ------- str Hash of the added IPFS object """ body, headers = multipart.stream_bytes(data, self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) @utils.return_field('Hash') @base.returns_single_item def add_str(self, string, **kwargs): """Adds a Python string as a file to IPFS. .. code-block:: python >>> client.add_str(u"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- string : str Content to be added as a file Returns ------- str Hash of the added IPFS object """ body, headers = multipart.stream_text(string, self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) def add_json(self, json_obj, **kwargs): """Adds a json-serializable Python dict as a json file to IPFS. .. code-block:: python >>> client.add_json({'one': 1, 'two': 2, 'three': 3}) 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' Parameters ---------- json_obj : dict A json-serializable Python dictionary Returns ------- str Hash of the added IPFS object """ return self.add_bytes(encoding.Json().encode(json_obj), **kwargs) @base.returns_single_item def get_json(self, cid, **kwargs): """Loads a json object from IPFS. .. code-block:: python >>> client.get_json('QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob') {'one': 1, 'two': 2, 'three': 3} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] CID of the IPFS object to load Returns ------- object Deserialized IPFS JSON object value """ return self.cat(cid, decoder='json', **kwargs)PKvN.ipfshttpclient/client/base.py# -*- coding: utf-8 -*- from __future__ import absolute_import import functools import six from . import DEFAULT_ADDR, DEFAULT_BASE from .. import multipart, http def returns_single_item(func): @functools.wraps(func) def wrapper(*args, **kwargs): result = func(*args, **kwargs) if isinstance(result, list): if len(result) != 1: print(result) assert len(result) == 1, ("Called IPFS HTTP-Client function should " "only ever return one item") return result[0] assert kwargs.get("stream", False), ("Called IPFS HTTP-Client function " "should only ever return a list, " "when not streaming a response") return result return wrapper def returns_no_item(func): @functools.wraps(func) def wrapper(*args, **kwargs): result = func(*args, **kwargs) if isinstance(result, (list, six.binary_type)): assert len(result) == 0, ("Called IPFS HTTP-Client function should " "never return an item") return assert kwargs.get("stream", False), ("Called IPFS HTTP-Client function " "should only ever return a list " "or bytes, when not streaming a " "response") return result return wrapper class SectionProperty(object): def __init__(self, cls): self.__prop_cls__ = cls def __get__(self, client_object, type=None): if client_object is not None: # We are invoked on object try: return client_object.__prop_objs__[self] except AttributeError: client_object.__prop_objs__ = { self: self.__prop_cls__(client_object) } return client_object.__prop_objs__[self] except KeyError: client_object.__prop_objs__[self] = self.__prop_cls__(client_object) return client_object.__prop_objs__[self] else: # We are invoked on class return self.__prop_cls__ class SectionBase(object): # Accept parent object from property descriptor def __init__(self, parent): self.__parent = parent # Proxy the parent's properties @property def _client(self): return self.__parent._client @property def chunk_size(self): return self.__parent.chunk_size @chunk_size.setter def chunk_size(self, value): self.__parent.chunk_size = value class ClientBase(object): """ Parameters ---------- addr : Union[bytes, str, multiaddr.Multiaddr] The `MultiAddr `_ describing the API daemon location, as used in the *API* key of `go-ipfs Addresses section `_ Supported addressing patterns are currently: * ``/{dns,dns4,dns6,ip4,ip6}//tcp/`` (HTTP) * ``/{dns,dns4,dns6,ip4,ip6}//tcp//http`` (HTTP) * ``/{dns,dns4,dns6,ip4,ip6}//tcp//https`` (HTTPS) Additional forms (proxying) may be supported in the future. base : str The HTTP URL path prefix (or “base”) at which the API is exposed on the API daemon chunk_size : int The size of the chunks to break uploaded files and text content into session : bool Create this :class:`~ipfshttpclient.Client` instance with a session already open? (Useful for long-running client objects.) """ _clientfactory = http.HTTPClient def __init__(self, addr=DEFAULT_ADDR, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, session=False, **defaults): """Connects to the API port of an IPFS node.""" self.chunk_size = chunk_size self._client = self._clientfactory(addr, base, **defaults) if session: self._client.open_session()PKvNRN@ ipfshttpclient/client/bitswap.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): @base.returns_single_item def wantlist(self, peer=None, **kwargs): """Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> client.bitswap.wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict +------+----------------------------------------------------+ | Keys | List of blocks the connected daemon is looking for | +------+----------------------------------------------------+ """ args = (peer,) return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs) @base.returns_single_item def stat(self, **kwargs): """Returns some diagnostic information from the bitswap agent. .. code-block:: python >>> client.bitswap.stat() {'BlocksReceived': 96, 'DupBlksReceived': 73, 'DupDataReceived': 2560601, 'ProviderBufLen': 0, 'Peers': [ 'QmNZFQRxt9RMNm2VVtuV2Qx7q69bcMWRVXmr5CEkJEgJJP', 'QmNfCubGpwYZAQxX8LQDsYgB48C4GbfZHuYdexpX9mbNyT', 'QmNfnZ8SCs3jAtNPc8kf3WJqJqSoX7wsX7VqkLdEYMao4u', … ], 'Wantlist': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ] } Returns ------- dict Statistics, peers and wanted blocks """ return self._client.request('/bitswap/stat', decoder='json', **kwargs)PKvNQ=ipfshttpclient/client/block.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base from .. import multipart class Section(base.SectionBase): """ Functions for interacting with raw IPFS blocks. """ def get(self, cid, **kwargs): r"""Returns the raw contents of a block. .. code-block:: python >>> client.block.get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The CID of an existing block to get Returns ------- bytes Contents of the requested block """ args = (str(cid),) return self._client.request('/block/get', args, **kwargs) @base.returns_single_item def put(self, file, **kwargs): """Stores the contents of the given file object as an IPFS block. .. code-block:: python >>> client.block.put(io.BytesIO(b'Mary had a little lamb')) {'Key': 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'Size': 22} Parameters ---------- file : Union[str, bytes, os.PathLike, io.IOBase, int] The data to be stored as an IPFS block Returns ------- dict Information about the new block See :meth:`~ipfshttpclient.Client.block.stat` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/block/put', decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item def stat(self, cid, **kwargs): """Returns a dict with the size of the block with the given hash. .. code-block:: python >>> client.block.stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The CID of an existing block to stat Returns ------- dict Information about the requested block """ args = (str(cid),) return self._client.request('/block/stat', args, decoder='json', **kwargs)PKvNDD"ipfshttpclient/client/bootstrap.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): @base.returns_single_item def add(self, peer, *peers, **kwargs): """Adds peers to the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to add to the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/add', args, decoder='json', **kwargs) @base.returns_single_item def list(self, **kwargs): """Returns the addresses of peers used during initial discovery of the IPFS network. Peers are output in the format ``/``. .. code-block:: python >>> client.bootstrap.list() {'Peers': [ '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRa … ca9z', '/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKD … KrGM', … '/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3p … QBU3' ]} Returns ------- dict +-------+-------------------------------+ | Peers | List of known bootstrap peers | +-------+-------------------------------+ """ return self._client.request('/bootstrap', decoder='json', **kwargs) @base.returns_single_item def rm(self, peer, *peers, **kwargs): """Removes peers from the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to remove from the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/rm', args, decoder='json', **kwargs)PKvNԍipfshttpclient/client/config.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): @base.returns_single_item def get(self, **kwargs): #TODO: Support the optional `key` parameter """Returns the current used server configuration. .. code-block:: python >>> config = client.config.get() >>> config['Addresses'] {'API': '/ip4/127.0.0.1/tcp/5001', 'Gateway': '/ip4/127.0.0.1/tcp/8080', 'Swarm': ['/ip4/0.0.0.0/tcp/4001', '/ip6/::/tcp/4001']}, >>> config['Discovery'] {'MDNS': {'Enabled': True, 'Interval': 10}} Returns ------- dict The entire IPFS daemon configuration """ return self._client.request('/config/show', decoder='json', **kwargs) @base.returns_single_item def replace(self, config, **kwargs): """Replaces the existing configuration with a new configuration tree. Make sure to back up the config file first if neccessary, as this operation can not be undone. """ return self._client.request('/config/replace', (config,), decoder='json', **kwargs) @base.returns_single_item def set(self, key, value=None, **kwargs): """Add or replace a single configuration value. .. code-block:: python >>> client.config.set("Addresses.Gateway") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'} >>> client.config.set("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'} Parameters ---------- key : str The key of the configuration entry (e.g. "Addresses.API") value : dict The value to set the configuration entry to Returns ------- dict +-------+---------------------------------------------+ | Key | The requested configuration key | +-------+---------------------------------------------+ | Value | The new value of the this configuration key | +-------+---------------------------------------------+ """ args = (key, value) return self._client.request('/config', args, decoder='json', **kwargs)PKvN.!ipfshttpclient/client/dht.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base from .. import exceptions class Section(base.SectionBase): @base.returns_single_item def findpeer(self, peer_id, *peer_ids, **kwargs): """Queries the DHT for all of the associated multiaddresses. .. code-block:: python >>> client.dht.findpeer("QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZN … MTLZ") [{'ID': 'QmfVGMFrwW6AV6fTWmD6eocaTybffqAvkVLXQEFrYdk6yc', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmTKiUdjbRjeN9yPhNhG1X38YNuBdjeiV9JXYWzCAJ4mj5', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmTGkgHSsULk8p3AKTAqKixxidZQXFyF7mCURcutPqrwjQ', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 2, 'Responses': [ {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Addrs': [ '/ip4/10.9.8.1/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/164.132.197.107/tcp/4001', '/ip4/127.0.0.1/tcp/4001']} ]}] Parameters ---------- peer_id : str The ID of the peer to search for Returns ------- dict List of multiaddrs """ args = (peer_id,) + peer_ids return self._client.request('/dht/findpeer', args, decoder='json', **kwargs) def findprovs(self, cid, *cids, **kwargs): """Finds peers in the DHT that can provide a specific value. .. code-block:: python >>> client.dht.findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2") [{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 4, 'Responses': [ {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None} ]}, {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 1, 'Responses': [ {'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [ '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.8/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/52.32.109.74/tcp/1028' ]} ]}] Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The DHT key to find providers for Returns ------- dict List of provider Peer IDs """ args = (str(cid),) + tuple(str(c) for c in cids) return self._client.request('/dht/findprovs', args, decoder='json', **kwargs) @base.returns_single_item def get(self, key, *keys, **kwargs): """Queries the DHT for its best value related to given key. There may be several different values for a given key stored in the DHT; in this context *best* means the record that is most desirable. There is no one metric for *best*: it depends entirely on the key type. For IPNS, *best* is the record that is both valid and has the highest sequence number (freshest). Different key types may specify other rules for what they consider to be the *best*. Parameters ---------- key : str One or more keys whose values should be looked up Returns ------- str """ args = (key,) + keys res = self._client.request('/dht/get', args, decoder='json', **kwargs) if isinstance(res, dict) and "Extra" in res: return res["Extra"] else: for r in res: if "Extra" in r and len(r["Extra"]) > 0: return r["Extra"] raise exceptions.Error("empty response from DHT") #TODO: Implement `provide(cid)` def put(self, key, value, **kwargs): """Writes a key/value pair to the DHT. Given a key of the form ``/foo/bar`` and a value of any form, this will write that value to the DHT with that key. Keys have two parts: a keytype (foo) and the key name (bar). IPNS uses the ``/ipns/`` keytype, and expects the key name to be a Peer ID. IPNS entries are formatted with a special strucutre. You may only use keytypes that are supported in your ``ipfs`` binary: ``go-ipfs`` currently only supports the ``/ipns/`` keytype. Unless you have a relatively deep understanding of the key's internal structure, you likely want to be using the :meth:`~ipfshttpclient.Client.name_publish` instead. Value is arbitrary text. .. code-block:: python >>> client.dht.put("QmVgNoP89mzpgEAAqK8owYoDEyB97Mkc … E9Uc", "test123") [{'ID': 'QmfLy2aqbhU1RqZnGQyqHSovV8tDufLUaPfN1LNtg5CvDZ', 'Extra': '', 'Type': 5, 'Responses': None}, {'ID': 'QmZ5qTkNvvZ5eFq9T4dcCEK7kX8L7iysYEpvQmij9vokGE', 'Extra': '', 'Type': 5, 'Responses': None}, {'ID': 'QmYqa6QHCbe6eKiiW6YoThU5yBy8c3eQzpiuW22SgVWSB8', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': 'QmP6TAKVDCziLmx9NV8QGekwtf7ZMuJnmbeHMjcfoZbRMd', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- key : str A unique identifier value : str Abitrary text to associate with the input (2048 bytes or less) Returns ------- list """ args = (key, value) return self._client.request('/dht/put', args, decoder='json', **kwargs) def query(self, peer_id, *peer_ids, **kwargs): """Finds the closest Peer IDs to a given Peer ID by querying the DHT. .. code-block:: python >>> client.dht.query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ") [{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs', 'Extra': '', 'Type': 2, 'Responses': None}, … {'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- peer_id : str The peerID to run the query against Returns ------- dict List of peers IDs """ args = (peer_id,) + peer_ids return self._client.request('/dht/query', args, decoder='json', **kwargs)PKvNC))ipfshttpclient/client/files.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base from .. import multipart class Section(base.SectionBase): """ Functions used to manage files in IPFS's virtual “Mutable File System” (MFS) file storage space. """ @base.returns_no_item def cp(self, source, dest, **kwargs): """Copies files within the MFS. Due to the nature of IPFS this will not actually involve any of the file's content being copied. .. code-block:: python >>> client.files.ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} >>> client.files.cp("/test", "/bla") >>> client.files.ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} Parameters ---------- source : str Filepath within the MFS to copy from dest : str Destination filepath within the MFS to which the file will be copied to """ args = (source, dest) return self._client.request('/files/cp', args, **kwargs) #TODO: Add `flush(path="/")` @base.returns_single_item def ls(self, path, **kwargs): """Lists contents of a directory in the MFS. .. code-block:: python >>> client.files.ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0} ]} Parameters ---------- path : str Filepath within the MFS Returns ------- dict +---------+------------------------------------------+ | Entries | List of files in the given MFS directory | +---------+------------------------------------------+ """ args = (path,) return self._client.request('/files/ls', args, decoder='json', **kwargs) @base.returns_no_item def mkdir(self, path, parents=False, **kwargs): """Creates a directory within the MFS. .. code-block:: python >>> client.files.mkdir("/test") Parameters ---------- path : str Filepath within the MFS parents : bool Create parent directories as needed and do not raise an exception if the requested directory already exists """ kwargs.setdefault("opts", {})["parents"] = parents args = (path,) return self._client.request('/files/mkdir', args, **kwargs) @base.returns_no_item def mv(self, source, dest, **kwargs): """Moves files and directories within the MFS. .. code-block:: python >>> client.files.mv("/test/file", "/bla/file") Parameters ---------- source : str Existing filepath within the MFS dest : str Destination to which the file will be moved in the MFS """ args = (source, dest) return self._client.request('/files/mv', args, **kwargs) def read(self, path, offset=0, count=None, **kwargs): """Reads a file stored in the MFS. .. code-block:: python >>> client.files.read("/bla/file") b'hi' Parameters ---------- path : str Filepath within the MFS offset : int Byte offset at which to begin reading at count : int Maximum number of bytes to read Returns ------- bytes : MFS file contents """ opts = {"offset": offset} if count is not None: opts["count"] = count kwargs.setdefault("opts", {}).update(opts) args = (path,) return self._client.request('/files/read', args, **kwargs) @base.returns_no_item def rm(self, path, recursive=False, **kwargs): """Removes a file from the MFS. .. code-block:: python >>> client.files.rm("/bla/file") Parameters ---------- path : str Filepath within the MFS recursive : bool Recursively remove directories? """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (path,) return self._client.request('/files/rm', args, **kwargs) @base.returns_single_item def stat(self, path, **kwargs): """Returns basic ``stat`` information for an MFS file (including its hash). .. code-block:: python >>> client.files.stat("/test") {'Hash': 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', 'Size': 0, 'CumulativeSize': 4, 'Type': 'directory', 'Blocks': 0} Parameters ---------- path : str Filepath within the MFS Returns ------- dict : MFS file information """ args = (path,) return self._client.request('/files/stat', args, decoder='json', **kwargs) @base.returns_no_item def write(self, path, file, offset=0, create=False, truncate=False, count=None, **kwargs): """Writes to a mutable file in the MFS. .. code-block:: python >>> client.files.write("/test/file", io.BytesIO(b"hi"), create=True) Parameters ---------- path : str Filepath within the MFS file : Union[str, bytes, os.PathLike, io.RawIOBase, int] IO stream object with data that should be written offset : int Byte offset at which to begin writing at create : bool Create the file if it does not exist truncate : bool Truncate the file to size zero before writing count : int Maximum number of bytes to read from the source ``file`` """ opts = {"offset": offset, "create": create, "truncate": truncate} if count is not None: opts["count"] = count kwargs.setdefault("opts", {}).update(opts) args = (path,) body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/files/write', args, data=body, headers=headers, **kwargs) class Base(base.ClientBase): files = base.SectionProperty(Section) def add(self, file, *files, **kwargs): """Add a file, or directory of files to IPFS. .. code-block:: python >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: ... numbytes = f.write('Mary had a little lamb') >>> client.add('nurseryrhyme.txt') {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', 'Name': 'nurseryrhyme.txt'} Parameters ---------- file : Union[str, bytes, os.PathLike, int, io.IOBase] A filepath, path-object, file descriptor or open file object the file or directory to add recursive : bool If ``file`` is some kind of directory, controls whether files in subdirectories should also be added or not (Default: ``False``) pattern : Union[str, list] Single `*glob* `_ pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep trickle : bool Use trickle-dag format (optimized for streaming) when generating the dag; see `the FAQ ` for more information (Default: ``False``) only_hash : bool Only chunk and hash, but do not write to disk (Default: ``False``) wrap_with_directory : bool Wrap files with a directory object to preserve their filename (Default: ``False``) chunker : str The chunking algorithm to use pin : bool Pin this object when adding (Default: ``True``) raw_leaves : bool Use raw blocks for leaf nodes. (experimental). (Default: ``True`` when ``nocopy`` is True, or ``False`` otherwise) nocopy : bool Add the file using filestore. Implies raw-leaves. (experimental). (Default: ``False``) Returns ------- Union[dict, list] File name and hash of the added file node, will return a list of one or more items unless only a single file was given """ #PY2: No support for kw-only parameters after glob parameters recursive = kwargs.pop("recursive", False) pattern = kwargs.pop("pattern", "**") nocopy = kwargs.pop("nocopy", False) opts = { "trickle": kwargs.pop("trickle", False), "only-hash": kwargs.pop("only_hash", False), "wrap-with-directory": kwargs.pop("wrap_with_directory", False), "pin": kwargs.pop("pin", True), "raw-leaves": kwargs.pop("raw_leaves", nocopy), 'nocopy': nocopy } if "chunker" in kwargs: opts["chunker"] = kwargs.pop("chunker") kwargs.setdefault("opts", {}).update(opts) assert not isinstance(file, (tuple, list)), \ "Use `client.add(name1, name2, …)` to add several items" multiple = (len(files) > 0) to_send = ((file,) + files) if multiple else file body, headers, is_dir = multipart.stream_filesystem_node( to_send, recursive, pattern, self.chunk_size ) resp = self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) if not multiple and not is_dir and not kwargs["opts"]["wrap-with-directory"]: assert len(resp) == 1 return resp[0] return resp def get(self, cid, **kwargs): """Downloads a file, or directory of files from IPFS. Files are placed in the current working directory. Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The path to the IPFS object(s) to be outputted """ args = (str(cid),) return self._client.download('/get', args, **kwargs) def cat(self, cid, offset=0, length=-1, **kwargs): r"""Retrieves the contents of a file identified by hash. .. code-block:: python >>> client.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> client.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'\n\n\n\nipfs example viewer</…' Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The name or path of the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- bytes The file's contents """ args = (str(cid),) opts = {} if offset != 0: opts['offset'] = offset if length != -1: opts['length'] = length kwargs.setdefault('opts', opts) return self._client.request('/cat', args, **kwargs) @base.returns_single_item def ls(self, cid, **kwargs): """Returns a list of objects linked to by the given hash. .. code-block:: python >>> client.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Objects': [ {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174, 'Type': 2}, … {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55, 'Type': 2} ] } ]} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The path to the IPFS object(s) to list links from Returns ------- dict Directory information and contents """ args = (str(cid),) return self._client.request('/ls', args, decoder='json', **kwargs) PK�����vN>j  �� �����ipfshttpclient/client/key.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): #TODO: Add `export(name, password)` @base.returns_single_item def gen(self, key_name, type, size=2048, **kwargs): """Adds a new public key that can be used for :meth:`~ipfshttpclient.Client.name.publish`. .. code-block:: python >>> client.key.gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict +------+---------------------------------------------------+ | Name | The name of the newly generated key | +------+---------------------------------------------------+ | Id | The key ID/fingerprint of the newly generated key | +------+---------------------------------------------------+ """ opts = {"type": type, "size": size} kwargs.setdefault("opts", {}).update(opts) args = (key_name,) return self._client.request('/key/gen', args, decoder='json', **kwargs) #TODO: Add `import(name, pam, password)` @base.returns_single_item def list(self, **kwargs): """Returns a list of generated public keys that can be used with :meth:`~ipfshttpclient.Client.name.publish`. .. code-block:: python >>> client.key.list() {'Keys': [ {'Name': 'self', 'Id': 'QmQf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'}, {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} ]} Returns ------- dict +------+--------------------------------------------------------+ | Keys | List of dictionaries with Names and Ids of public keys | +------+--------------------------------------------------------+ """ return self._client.request('/key/list', decoder='json', **kwargs) @base.returns_single_item def rename(self, key_name, new_key_name, **kwargs): """Rename a keypair .. code-block:: python >>> client.key.rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict Information about the key renameal """ args = (key_name, new_key_name) return self._client.request( '/key/rename', args, decoder='json', **kwargs ) @base.returns_single_item def rm(self, key_name, *key_names, **kwargs): """Remove a keypair .. code-block:: python >>> client.key.rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict +------+--------------------------------------------------+ | Keys | List of key names and IDs that have been removed | +------+--------------------------------------------------+ """ args = (key_name,) + key_names return self._client.request('/key/rm', args, decoder='json', **kwargs) PK�����vNL����&���ipfshttpclient/client/miscellaneous.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base from .. import exceptions class Base(base.ClientBase): @base.returns_single_item def dns(self, domain_name, recursive=False, **kwargs): """Resolves DNS links to the referenced object. CIDs are hard to remember, but domain names are usually easy to remember. To create memorable aliases for CIDs, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> client.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict +------+-------------------------------------+ | Path | Resource were a DNS entry points to | +------+-------------------------------------+ """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (domain_name,) return self._client.request('/dns', args, decoder='json', **kwargs) @base.returns_single_item def id(self, peer=None, **kwargs): """Shows IPFS Node ID info. Returns the PublicKey, ProtocolVersion, ID, AgentVersion and Addresses of the connected daemon or some other node. .. code-block:: python >>> client.id() {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc', 'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=', 'AgentVersion': 'go-libp2p/3.3.4', 'ProtocolVersion': 'ipfs/0.1.0', 'Addresses': [ '/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc', '/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc', '/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc', '/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc', '/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc', '/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']} Parameters ---------- peer : str Peer.ID of the node to look up (local node if ``None``) Returns ------- dict Information about the IPFS node """ args = (peer,) if peer is not None else () return self._client.request('/id', args, decoder='json', **kwargs) #TODO: isOnline() def ping(self, peer, *peers, **kwargs): """Provides round-trip latency information for the routing system. Finds nodes via the routing system, sends pings, waits for pongs, and prints out round-trip latency information. .. code-block:: python >>> client.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n") [{'Success': True, 'Time': 0, 'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'}, {'Success': False, 'Time': 0, 'Text': 'Peer lookup error: routing: not found'}] .. hint:: Pass ``stream=True`` to receive ping progress reports as they arrive. Parameters ---------- peer : str ID of peer to be pinged count : int Number of ping messages to send (Default: ``10``) Returns ------- list Progress reports from the ping """ #PY2: No support for kw-only parameters after glob parameters if "count" in kwargs: kwargs.setdefault("opts", {})["count"] = kwargs["count"] del kwargs["count"] args = (peer,) + peers return self._client.request('/ping', args, decoder='json', **kwargs) @base.returns_single_item def resolve(self, name, recursive=False, **kwargs): """Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> client.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> client.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict +------+-------------------------------------+ | Path | IPFS path of the requested resource | +------+-------------------------------------+ """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (name,) return self._client.request('/resolve', args, decoder='json', **kwargs) @base.returns_no_item def stop(self): """Stop the connected IPFS daemon instance. Sending any further requests after this will fail with :class:`~ipfshttpclient.exceptions.ConnectionError`, until you start another IPFS daemon instance. """ try: return self._client.request('/shutdown') except exceptions.ConnectionError: # Sometimes the daemon kills the connection before sending a # response causing an incorrect `ConnectionError` to bubble pass @base.returns_single_item def version(self, **kwargs): """Returns the software version of the currently connected node. .. code-block:: python >>> client.version() {'Version': '0.4.3-rc2', 'Repo': '4', 'Commit': '', 'System': 'amd64/linux', 'Golang': 'go1.6.2'} Returns ------- dict Daemon and system version information """ return self._client.request('/version', decoder='json', **kwargs)PK�����vNL�������ipfshttpclient/client/name.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): @base.returns_single_item def publish(self, ipfs_path, resolve=True, lifetime="24h", ttl=None, key=None, allow_offline=False, **kwargs): """Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> client.name.publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing allow_offline : bool When offline, save the IPNS record to the the local datastore without broadcasting to the network instead of simply failing. lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : string Time duration this record should be cached for. Same syntax like 'lifetime' option. (experimental feature) key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict +-------+----------------------------------------------------------+ | Name | Key ID of the key to which the given value was published | +-------+----------------------------------------------------------+ | Value | Value that was published | +-------+----------------------------------------------------------+ """ opts = {"lifetime": lifetime, "resolve": resolve, "allow-offline": allow_offline} if ttl: opts["ttl"] = ttl if key: opts["key"] = key kwargs.setdefault("opts", {}).update(opts) args = (ipfs_path,) return self._client.request('/name/publish', args, decoder='json', **kwargs) @base.returns_single_item def resolve(self, name=None, recursive=False, nocache=False, dht_record_count=None, dht_timeout=None, **kwargs): """Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> client.name.resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) dht_record_count: int Number of records to request for DHT resolution. dht_timeout: string Max time to collect values during DHT resolution eg "30s". Pass 0 for no timeout Returns ------- dict +------+--------------------------------------+ | Path | The resolved value of the given name | +------+--------------------------------------+ """ opts = {"recursive": recursive, "nocache": nocache} if dht_record_count is not None: opts["dht-record-count"] = dht_record_count if dht_timeout is not None: opts["dht-timeout"] = dht_timeout kwargs.setdefault("opts", {}).update(opts) args = (name,) if name is not None else () return self._client.request('/name/resolve', args, decoder='json', **kwargs)PK�����vN1i'��i'�����ipfshttpclient/client/object.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base from .. import multipart class PatchSection(base.SectionBase): @base.returns_single_item def add_link(self, root, name, ref, create=False, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have a link to the provided object. .. code-block:: python >>> client.object.patch.add_link( ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2', ... 'Johnny', ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2' ... ) {'Hash': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'} Parameters ---------- root : str IPFS hash for the object being modified name : str name for the new link ref : str IPFS hash for the object being linked to create : bool Create intermediary nodes Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ kwargs.setdefault("opts", {})["create"] = create args = ((root, name, ref),) return self._client.request('/object/patch/add-link', args, decoder='json', **kwargs) @base.returns_single_item def append_data(self, cid, new_data, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have the provided data appended to it, and will thus have a new Hash. .. code-block:: python >>> client.object.patch.append_data("QmZZmY … fTqm", io.BytesIO(b"bla")) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] The hash of an ipfs object to modify new_data : Union[str, bytes, os.PathLike, io.IOBase, int] The data to append to the object's data section Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ args = (str(cid),) body, headers = multipart.stream_files(new_data, self.chunk_size) return self._client.request('/object/patch/append-data', args, decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item def rm_link(self, root, link, **kwargs): """Creates a new merkledag object based on an existing one. The new object will lack a link to the specified object. .. code-block:: python >>> client.object.patch.rm_link( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... 'Johnny' ... ) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- root : str IPFS hash of the object to modify link : str name of the link to remove Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ args = ((root, link),) return self._client.request('/object/patch/rm-link', args, decoder='json', **kwargs) @base.returns_single_item def set_data(self, root, data, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have the same links as the old object but with the provided data instead of the old object's data contents. .. code-block:: python >>> client.object.patch.set_data( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... io.BytesIO(b'bla') ... ) {'Hash': 'QmSw3k2qkv4ZPsbu9DVEJaTMszAQWNgM1FTFYpfZeNQWrd'} Parameters ---------- root : str IPFS hash of the object to modify data : Union[str, bytes, os.PathLike, io.IOBase, int] The new data to store in root Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ args = (root,) body, headers = multipart.stream_files(data, self.chunk_size) return self._client.request('/object/patch/set-data', args, decoder='json', data=body, headers=headers, **kwargs) class Section(base.SectionBase): patch = base.SectionProperty(PatchSection) def data(self, cid, **kwargs): r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> client.object.data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] Key of the object to retrieve, in CID format Returns ------- bytes Raw object data """ args = (str(cid),) return self._client.request('/object/data', args, **kwargs) @base.returns_single_item def get(self, cid, **kwargs): """Get and serialize the DAG node named by CID. .. code-block:: python >>> client.object.get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55} ]} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] Key of the object to retrieve, in CID format Returns ------- dict +-------+------------------------------------------------+ | Data | Raw object data (ISO-8859-1 decoded) | +-------+------------------------------------------------+ | Links | List of links associated with the given object | +-------+------------------------------------------------+ """ args = (str(cid),) return self._client.request('/object/get', args, decoder='json', **kwargs) @base.returns_single_item def links(self, cid, **kwargs): """Returns the links pointed to by the specified object. .. code-block:: python >>> client.object.links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] Key of the object to retrieve, in CID format Returns ------- dict +-------+------------------------------------------------+ | Hash | The requested object CID | +-------+------------------------------------------------+ | Links | List of links associated with the given object | +-------+------------------------------------------------+ """ args = (str(cid),) return self._client.request('/object/links', args, decoder='json', **kwargs) @base.returns_single_item def new(self, template=None, **kwargs): """Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> client.object.new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict +-------+----------------------------------------+ | Hash | The hash of the requested empty object | +-------+----------------------------------------+ """ args = (template,) if template is not None else () return self._client.request('/object/new', args, decoder='json', **kwargs) @base.returns_single_item def put(self, file, **kwargs): """Stores input as a DAG object and returns its key. .. code-block:: python >>> client.object.put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : Union[str, bytes, os.PathLike, io.IOBase, int] (JSON) object from which the DAG object will be created Returns ------- dict Hash and links of the created DAG object See the :meth:`~ipfshttpclient.Client.object.links` method for details. """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/object/put', decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item def stat(self, cid, **kwargs): """Get stats for the DAG node named by cid. .. code-block:: python >>> client.object.stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] Key of the object to retrieve, in CID format Returns ------- dict """ args = (str(cid),) return self._client.request('/object/stat', args, decoder='json', **kwargs)PK�����vN.;]V��V�����ipfshttpclient/client/pin.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): @base.returns_single_item def add(self, path, *paths, **kwargs): """Pins objects to local storage. Stores an IPFS object(s) from a given path locally to disk. .. code-block:: python >>> client.pin.add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d") {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be pinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict +------+-----------------------------------------------------------+ | Pins | List of IPFS objects that have been pinned by this action | +------+-----------------------------------------------------------+ """ #PY2: No support for kw-only parameters after glob parameters if "recursive" in kwargs: kwargs.setdefault("opts", {})["recursive"] = kwargs.pop("recursive") args = (path,) + paths return self._client.request('/pin/add', args, decoder='json', **kwargs) @base.returns_single_item def ls(self, *cids, **kwargs): """Lists objects pinned to local storage. By default, all pinned objects are returned, but the ``type`` flag or arguments can restrict that to a specific pin type or to some specific objects respectively. .. code-block:: python >>> client.pin.ls() {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, 'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'}, … 'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'} }} >>> client.pin.ls('/ipfs/QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz') {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}}} >>> client.pin.ls('/ipfs/QmdBCSn4UJP82MjhRVwpABww48tXL3 … mA6z') ipfshttpclient.exceptions.ErrorResponse: path '/ipfs/QmdBCSn4UJP82MjhRVwpABww48tXL3 … mA6z' is not pinned Parameters ---------- cids : str The path(s) of pinned IPFS object(s) to search for. If none are passed, return information about all pinned objects. If any of the passed CIDs is not pinned, then remote will return an error and an ErrorResponse exception will be raised. type : "str" The type of pinned keys to list. Can be: * ``"direct"`` * ``"indirect"`` * ``"recursive"`` * ``"all"`` Raises ------ ~ipfsapi.exceptions.ErrorResponse Remote returned an error. Remote will return an error if any of the passed CIDs is not pinned. In this case, the exception will contain 'not pinned' in its args[0]. Returns ------- dict +------+--------------------------------------------------------------+ | Keys | Mapping of IPFS object names currently pinned to their types | +------+--------------------------------------------------------------+ """ #PY2: No support for kw-only parameters after glob parameters opts = { "type": kwargs.pop("type", "all") } kwargs.setdefault("opts", {}).update(opts) return self._client.request('/pin/ls', cids, decoder='json', **kwargs) @base.returns_single_item def rm(self, path, *paths, **kwargs): """Removes a pinned object from local storage. Removes the pin from the given object allowing it to be garbage collected if needed. .. code-block:: python >>> client.pin.rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d') {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be unpinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict +------+-------------------------------------------------------------+ | Pins | List of IPFS objects that have been unpinned by this action | +------+-------------------------------------------------------------+ """ #PY2: No support for kw-only parameters after glob parameters if "recursive" in kwargs: kwargs.setdefault("opts", {})["recursive"] = kwargs.pop("recursive") args = (path,) + paths return self._client.request('/pin/rm', args, decoder='json', **kwargs) @base.returns_single_item def update(self, from_path, to_path, **kwargs): """Replaces one pin with another. Updates one pin to another, making sure that all objects in the new pin are local. Then removes the old pin. This is an optimized version of using first using :meth:`~ipfshttpclient.Client.pin.add` to add a new pin for an object and then using :meth:`~ipfshttpclient.Client.pin.rm` to remove the pin for the old object. .. code-block:: python >>> client.pin.update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", ... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH") {"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", "/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]} Parameters ---------- from_path : str Path to the old object to_path : str Path to the new object to be pinned unpin : bool Should the pin of the old object be removed? (Default: ``True``) Returns ------- dict +------+-------------------------------------------------------------+ | Pins | List of IPFS objects that have been affected by this action | +------+-------------------------------------------------------------+ """ #PY2: No support for kw-only parameters after glob parameters if "unpin" in kwargs: kwargs.setdefault("opts", {})["unpin"] = kwargs.pop("unpin") args = (from_path, to_path) return self._client.request('/pin/update', args, decoder='json', **kwargs) def verify(self, path, *paths, **kwargs): """Verify that recursive pins are complete. Scan the repo for pinned object graphs and check their integrity. Issues will be reported back with a helpful human-readable error message to aid in error recovery. This is useful to help recover from datastore corruptions (such as when accidentally deleting files added using the filestore backend). This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with client.pin.verify("QmN…TTZ", verbose=True) as pin_verify_iter: ... for item in pin_verify_iter: ... print(item) ... {"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True} {"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True} {"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True} … Parameters ---------- path : str Path to object(s) to be checked verbose : bool Also report status of items that were OK? (Default: ``False``) Returns ------- Iterable[dict] +-----+----------------------------------------------------+ | Cid | IPFS object ID checked | +-----+----------------------------------------------------+ | Ok | Whether the given object was successfully verified | +-----+----------------------------------------------------+ """ #PY2: No support for kw-only parameters after glob parameters if "verbose" in kwargs: kwargs.setdefault("opts", {})["verbose"] = kwargs.pop("verbose") args = (path,) + paths return self._client.request('/pin/verify', args, decoder='json', stream=True, **kwargs)PK�����vN�������ipfshttpclient/client/pubsub.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class SubChannel: """ Wrapper for a pubsub subscription object that allows for easy closing of subscriptions. """ def __init__(self, sub): self.__sub = sub def read_message(self): return next(self.__sub) def __iter__(self): return self.__sub def close(self): self.__sub.close() def __enter__(self): return self def __exit__(self, *a): self.close() class Section(base.SectionBase): @base.returns_single_item def ls(self, **kwargs): """Lists subscribed topics by name This method returns data that contains a list of all topics the user is subscribed to. In order to subscribe to a topic ``pubsub.sub`` must be called. .. code-block:: python # subscribe to a channel >>> with client.pubsub.sub("hello") as sub: ... client.pubsub.ls() { 'Strings' : ["hello"] } Returns ------- dict +---------+-------------------------------------------------+ | Strings | List of topic the IPFS daemon is subscribbed to | +---------+-------------------------------------------------+ """ return self._client.request('/pubsub/ls', decoder='json', **kwargs) @base.returns_single_item def peers(self, topic=None, **kwargs): """List the peers we are pubsubbing with. Lists the id's of other IPFS users who we are connected to via some topic. Without specifying a topic, IPFS peers from all subscribed topics will be returned in the data. If a topic is specified only the IPFS id's of the peers from the specified topic will be returned in the data. .. code-block:: python >>> client.pubsub.peers() {'Strings': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', 'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA', ... 'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a' ] } ## with a topic # subscribe to a channel >>> with client.pubsub.sub('hello') as sub: ... client.pubsub.peers(topic='hello') {'String': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', ... # other peers connected to the same channel ] } Parameters ---------- topic : str The topic to list connected peers of (defaults to None which lists peers for all topics) Returns ------- dict +---------+-------------------------------------------------+ | Strings | List of PeerIDs of peers we are pubsubbing with | +---------+-------------------------------------------------+ """ args = (topic,) if topic is not None else () return self._client.request('/pubsub/peers', args, decoder='json', **kwargs) @base.returns_no_item def publish(self, topic, payload, **kwargs): """Publish a message to a given pubsub topic Publishing will publish the given payload (string) to everyone currently subscribed to the given topic. All data (including the id of the publisher) is automatically base64 encoded when published. .. code-block:: python # publishes the message 'message' to the topic 'hello' >>> client.pubsub.publish('hello', 'message') [] Parameters ---------- topic : str Topic to publish to payload : Data to be published to the given topic Returns ------- list An empty list """ args = (topic, payload) return self._client.request('/pubsub/pub', args, decoder='json', **kwargs) def subscribe(self, topic, discover=False, **kwargs): """Subscribe to mesages on a given topic Subscribing to a topic in IPFS means anytime a message is published to a topic, the subscribers will be notified of the publication. The connection with the pubsub topic is opened and read. The Subscription returned should be used inside a context manager to ensure that it is closed properly and not left hanging. .. code-block:: python >>> sub = client.pubsub.subscribe('testing') >>> with client.pubsub.subscribe('testing') as sub: ... # publish a message 'hello' to the topic 'testing' ... client.pubsub.publish('testing', 'hello') ... for message in sub: ... print(message) ... # Stop reading the subscription after ... # we receive one publication ... break {'from': '<base64encoded IPFS id>', 'data': 'aGVsbG8=', 'topicIDs': ['testing']} # NOTE: in order to receive published data # you must already be subscribed to the topic at publication # time. Parameters ---------- topic : str Name of a topic to subscribe to discover : bool Try to discover other peers subscibed to the same topic (defaults to False) Returns ------- :class:`SubChannel` Generator wrapped in a context manager that maintains a connection stream to the given topic. """ args = (topic, discover) return SubChannel(self._client.request('/pubsub/sub', args, stream=True, decoder='json'))PK�����vNp��p�����ipfshttpclient/client/repo.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): def gc(self, **kwargs): """Removes stored objects that are not pinned from the repo. .. code-block:: python >>> client.repo.gc() [{'Key': 'QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQuwaHG2mpW2'}, {'Key': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'}, {'Key': 'QmRVBnxUCsD57ic5FksKYadtyUbMsyo9KYQKKELajqAp4q'}, … {'Key': 'QmYp4TeCurXrhsxnzt5wqLqqUz8ZRg5zsc7GuUrUSDtwzP'}] Performs a garbage collection sweep of the local set of stored objects and remove ones that are not pinned in order to reclaim hard disk space. Returns the hashes of all collected objects. Parameters ---------- return_result : bool Defaults to True. Passing False will return None and avoid downloading the list of removed objects. Returns ------- dict List of IPFS objects that have been removed """ return self._client.request('/repo/gc', decoder='json', **kwargs) @base.returns_single_item def stat(self, **kwargs): """Displays the repo's status. Returns the number of objects in the repo and the repo's size, version, and path. .. code-block:: python >>> client.repo.stat() {'NumObjects': 354, 'RepoPath': '…/.local/share/ipfs', 'Version': 'fs-repo@4', 'RepoSize': 13789310} Returns ------- dict General information about the IPFS file repository +------------+-------------------------------------------------+ | NumObjects | Number of objects in the local repo. | +------------+-------------------------------------------------+ | RepoPath | The path to the repo being currently used. | +------------+-------------------------------------------------+ | RepoSize | Size in bytes that the repo is currently using. | +------------+-------------------------------------------------+ | Version | The repo version. | +------------+-------------------------------------------------+ """ return self._client.request('/repo/stat', decoder='json', **kwargs) #TODO: `version()` PK�����vN!�������ipfshttpclient/client/swarm.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class FiltersSection(base.SectionBase): @base.returns_single_item def add(self, address, *addresses, **kwargs): """Adds a given multiaddr filter to the filter list. This will add an address filter to the daemons swarm. Filters applied this way will not persist daemon reboots, to achieve that, add your filters to the configuration file. .. code-block:: python >>> client.swarm.filters.add("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address : str Multiaddr to filter Returns ------- dict +---------+-----------------------------+ | Strings | List of swarm filters added | +---------+-----------------------------+ """ args = (address,) + addresses return self._client.request('/swarm/filters/add', args, decoder='json', **kwargs) @base.returns_single_item def rm(self, address, *addresses, **kwargs): """Removes a given multiaddr filter from the filter list. This will remove an address filter from the daemons swarm. Filters removed this way will not persist daemon reboots, to achieve that, remove your filters from the configuration file. .. code-block:: python >>> client.swarm.filters.rm("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address : str Multiaddr filter to remove Returns ------- dict +---------+-------------------------------+ | Strings | List of swarm filters removed | +---------+-------------------------------+ """ args = (address,) + addresses return self._client.request('/swarm/filters/rm', args, decoder='json', **kwargs) class Section(base.SectionBase): filters = base.SectionProperty(FiltersSection) @base.returns_single_item def addrs(self, **kwargs): """Returns the addresses of currently connected peers by peer id. .. code-block:: python >>> pprint(client.swarm.addrs()) {'Addrs': { 'QmNMVHJTSZHTWMWBbmBrQgkA1hZPWYuVJx2DpSGESWW6Kn': [ '/ip4/10.1.0.1/tcp/4001', '/ip4/127.0.0.1/tcp/4001', '/ip4/51.254.25.16/tcp/4001', '/ip6/2001:41d0:b:587:3cae:6eff:fe40:94d8/tcp/4001', '/ip6/2001:470:7812:1045::1/tcp/4001', '/ip6/::1/tcp/4001', '/ip6/fc02:2735:e595:bb70:8ffc:5293:8af8:c4b7/tcp/4001', '/ip6/fd00:7374:6172:100::1/tcp/4001', '/ip6/fd20:f8be:a41:0:c495:aff:fe7e:44ee/tcp/4001', '/ip6/fd20:f8be:a41::953/tcp/4001'], 'QmNQsK1Tnhe2Uh2t9s49MJjrz7wgPHj4VyrZzjRe8dj7KQ': [ '/ip4/10.16.0.5/tcp/4001', '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.1/tcp/4001', '/ip4/178.62.107.36/tcp/4001', '/ip6/::1/tcp/4001'], … }} Returns ------- dict Multiaddrs of peers by peer id +-------+-------------------------------------------------------+ | Addrs | Mapping of PeerIDs to a list its advatised MultiAddrs | +-------+-------------------------------------------------------+ """ return self._client.request('/swarm/addrs', decoder='json', **kwargs) @base.returns_single_item def connect(self, address, *addresses, **kwargs): """Opens a connection to a given address. This will open a new direct connection to a peer address. The address format is an IPFS multiaddr:: /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ .. code-block:: python >>> client.swarm.connect("/ip4/104.131.131.82/tcp/4001/ipfs/Qma … uvuJ") {'Strings': ['connect QmaCpDMGvV2BGHeYERUEnRQAwe3 … uvuJ success']} Parameters ---------- address : str Address of peer to connect to Returns ------- dict Textual connection status report """ args = (address,) + addresses return self._client.request('/swarm/connect', args, decoder='json', **kwargs) @base.returns_single_item def disconnect(self, address, *addresses, **kwargs): """Closes the connection to a given address. This will close a connection to a peer address. The address format is an IPFS multiaddr:: /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ The disconnect is not permanent; if IPFS needs to talk to that address later, it will reconnect. .. code-block:: python >>> client.swarm.disconnect("/ip4/104.131.131.82/tcp/4001/ipfs/Qm … uJ") {'Strings': ['disconnect QmaCpDMGvV2BGHeYERUEnRQA … uvuJ success']} Parameters ---------- address : str Address of peer to disconnect from Returns ------- dict Textual connection status report """ args = (address,) + addresses return self._client.request('/swarm/disconnect', args, decoder='json', **kwargs) @base.returns_single_item def peers(self, **kwargs): """Returns the addresses & IDs of currently connected peers. .. code-block:: python >>> client.swarm.peers() {'Strings': [ '/ip4/101.201.40.124/tcp/40001/ipfs/QmZDYAhmMDtnoC6XZ … kPZc', '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.223.59.174/tcp/4001/ipfs/QmeWdgoZezpdHz1PX8 … 1jB6', … '/ip6/fce3: … :f140/tcp/43901/ipfs/QmSoLnSGccFuZQJzRa … ca9z' ]} Returns ------- dict +---------+----------------------------------------------------+ | Strings | List of MultiAddrs that the daemon is connected to | +---------+----------------------------------------------------+ """ return self._client.request('/swarm/peers', decoder='json', **kwargs)PK�����vNt[q��q��!���ipfshttpclient/client/unstable.py# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class LogSection(base.SectionBase): @base.returns_single_item def level(self, subsystem, level, **kwargs): r"""Changes the logging output of a running daemon. **This API is subject to future change or removal!** .. code-block:: python >>> client.unstable.log.level("path", "info") {"Message": "Changed log level of 'path' to 'info'\n"} Parameters ---------- subsystem : str The subsystem logging identifier (Use ``"all"`` for all subsystems) level : str The desired logging level. Must be one of: * ``"debug"`` * ``"info"`` * ``"warning"`` * ``"error"`` * ``"fatal"`` * ``"panic"`` Returns ------- dict +--------+-----------------------+ | Status | Textual status report | +--------+-----------------------+ """ args = (subsystem, level) return self._client.request('/log/level', args, decoder='json', **kwargs) @base.returns_single_item def ls(self, **kwargs): """Lists the logging subsystems of a running daemon. **This API is subject to future change or removal!** .. code-block:: python >>> client.unstable.log.ls() {'Strings': [ 'github.com/ipfs/go-libp2p/p2p/host', 'net/identify', 'merkledag', 'providers', 'routing/record', 'chunk', 'mfs', 'ipns-repub', 'flatfs', 'ping', 'mockrouter', 'dagio', 'cmds/files', 'blockset', 'engine', 'mocknet', 'config', 'commands/http', 'cmd/ipfs', 'command', 'conn', 'gc', 'peerstore', 'core', 'coreunix', 'fsrepo', 'core/server', 'boguskey', 'github.com/ipfs/go-libp2p/p2p/host/routed', 'diagnostics', 'namesys', 'fuse/ipfs', 'node', 'secio', 'core/commands', 'supernode', 'mdns', 'path', 'table', 'swarm2', 'peerqueue', 'mount', 'fuse/ipns', 'blockstore', 'github.com/ipfs/go-libp2p/p2p/host/basic', 'lock', 'nat', 'importer', 'corerepo', 'dht.pb', 'pin', 'bitswap_network', 'github.com/ipfs/go-libp2p/p2p/protocol/relay', 'peer', 'transport', 'dht', 'offlinerouting', 'tarfmt', 'eventlog', 'ipfsaddr', 'github.com/ipfs/go-libp2p/p2p/net/swarm/addr', 'bitswap', 'reprovider', 'supernode/proxy', 'crypto', 'tour', 'commands/cli', 'blockservice']} Returns ------- dict +---------+-----------------------------------+ | Strings | List of daemon logging subsystems | +---------+-----------------------------------+ """ return self._client.request('/log/ls', decoder='json', **kwargs) def tail(self, **kwargs): r"""Reads log outputs as they are written. **This API is subject to future change or removal!** This function returns an iterator that needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with client.unstable.log.tail() as log_tail_iter: ... for item in log_tail_iter: ... print(item) ... {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.43353297Z"} {"event":"handleAddProviderBegin","system":"dht", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433642581Z"} {"event":"handleAddProvider","system":"dht","duration":91704, "key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433747513Z"} {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.435843012Z"} … Returns ------- Iterable[dict] """ return self._client.request('/log/tail', decoder='json', stream=True, **kwargs) class RefsSection(base.SectionBase): def __call__(self, cid, **kwargs): """Returns a list of hashes of objects referenced by the given hash. **This API is subject to future change or removal!** You likely want to use :meth:`~ipfshttpclient.object.links` instead. .. code-block:: python >>> client.unstable.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] Path to the object(s) to list refs from Returns ------- list """ args = (str(cid),) return self._client.request('/refs', args, decoder='json', **kwargs) def local(self, **kwargs): """Displays the hashes of all local objects. **This API is subject to future change or removal!** .. code-block:: python >>> client.unstable.refs.local() [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Returns ------- list """ return self._client.request('/refs/local', decoder='json', **kwargs) class Section(base.SectionBase): """ Features that are subject to change and are only provided for convinience """ log = base.SectionProperty(LogSection) refs = base.SectionProperty(RefsSection)PK�����Nr$IGST��T��'���ipfshttpclient-0.4.12.dist-info/LICENSEThe MIT License (MIT) Copyright (c) 2015 Andrew Stocker <amstocker@dons.usfca.edu> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK������!HMuS���a���%���ipfshttpclient-0.4.12.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,szd&Y)r$[)T&UD"�PK������!H#?G$��%��(���ipfshttpclient-0.4.12.dist-info/METADATA:R:B=}B(␰S4KH4 d[vxÒ©wpldOR6^jx}WΙt2xTդ%<EO 95]jەΣf]^Qw:-Ro6;$N $b�rA<k,ʕYt<ǃf[կvmhsxl/Qjp`tyxd!  q'@?ؐI!OHvI:.WhdXDyKK󌉒y|ګ[+U{Y]+Z55{:`xK$�@dO bx(!I32I[R KH´G֞ Hy{@p�ZړO*tۋ0 ?vm F3%b&`Hxnje<Y;ExwC&ԓ&`@/RK,&5%AF@kwpW/X/38�g%QJN=/#KV;/dȄN,@g,]]67GP&^⊲e}|+X݇oKSdQ`gp: X95Tb ,9al1 &8d{~HeI<4c90O@3cVu?aG=m★bRG31EIM/g�^؀|Ɂkp)/pqb!fet$ 4E ~CDn!%(" no/⯂(ZևיUάIA.N7[G^MZ9lSq[Ay^ cc$$w0egp[V$r0D@3e-/DiMeҁNZ$�n4/!#N"{s140͘.c!2"1&pyX;8?O),JʃBL$s1)M]cT Y @r2WQd̠:z[�b[ a倇 U*1 ( *(Kͮm&9:e.&K- QdN%_y k�W9dZ.53'J$ nDU\1rh&:$30EȲ*]?EnS *K]^ 2xUYDś..5\AZEb<b2`dDJ}dP+!U"aBefyp$!:yP2O{ saE ϥɳg*%껢eѳ1l6`$oYT@U6Tr,Rf BI1 *UQ.4tl}hYD_LѿKW.*-JalBې=نrgg'Y.,S+E}jofݹqo*qة<g#ģ\[ˮ^/j0ÝS#vY;[* {b@>o\=䦛A.(^کZ^.Pa d`mʃp&j)賄PgS -}P�4bK;j*ݮ޻-C.MR7n,C4,̈́i.,gXA6_M c>1K>o}#3:Y`yɽ�`0=} $) B>:zJ:*҃@Kb[+jyA`#1rY?\pP{B   M]a*WzS[rۅn }o �x0IVUytqNt8U e&xbkЭR{^dĶmvk~vD饅vc,*HX@T`A 4c,#6>Jftn6Ë}:WjK=/+]_')J99I6irx<ȇ; O۝Zwt{?q (ֈ^@ lDۛp<TE,)oHy ;½²O|"rZ7ztrwģ:mtֽ!(w".v ܎ư{؈G3vj⹹ik7[;;Go¦gm?I~9WݛOwaiߴdP\s! Jڧv[@&@/­q=_|7mGSVGNrg'ʾJ|GuS~1j E/揽ύs3u:_߁YZﵬo*q Ao/hթ˛ZvnV+j 0urwRgj;CCR흵[ӫcgmSݩt}4*iFc!*l8} f ڂz,LBțpI m_n.?M舭7~q*[ev纙zyH="I9vX#Yr'!CCBg /J0"+t ڴu w/<f1Nį}89TaX`y L#?bvЁ`1F+hlՂJ]0'C' XӴcPNzX>^8 ^iQ}Pv3ҘL9^Ҵ[Í)l2PR@=ӂ M/t8ӈ5�fn8%˒42\O?0bnB/:\M<QMk5y#5wdf/(zc):BP&O5D8] J'hr=T8[WH?)n}Q<9t؉_ԏ2&,C{5Ebeu~WUrĠaJ#,AKPI%.%D ;J=:ГtdӚڑ^Buux@& na>D"<X TB_I7F, j[~_'S8>q_�_ A9V{< !(6DJ񜴆FO| [~^J%Ek⸳=1#<UC*r7`,UtfZ�-*ԈAVT !1C*/؛O]34.F~Li4(B 8g&0?$\A>| ` `3<ҡo2.У'+&*^YqY5ܼuJAN|͑w rώTTΝ=.e.?ie d fb /!=&䥹!#j2)8T|p㟶ۂ_B pޥc I dB0n К9ϰQKwIgiENl&gTʖݓQX,n *'Fұ1ldA)d65UeݲN!fJU]֡N uo^}TStC:�j+<*`E�SnAPI^iHifȖ:$igͰZ>sy; Ų%ް޲>k( Gx#8fk/M)8f#mtfdUX*,T~:�<WgL#V.܁dž唥VWWuNQ? 'Rb :ɴH1tG/ɜf JS=D\&SW^- F!`*e�MI{2IBdJ|2oqN 8S1_ eǖry٤ #`$c9=E6%n<:戀'tzrHLw tcB�3pR.ʰu:4LB\u86ۇߖMQn PK������!Hid5jJ�� ��&���ipfshttpclient-0.4.12.dist-info/RECORDҪ,p (" LMo' 7;JFWZw Ʊ4߾7$Y UtM% ф{(jd#J) &&jI7DcZP^Az_kr9 ĭ}Ň>xw2'nnA"AO"ܠBR^(:AV<n仄zW,$]R ru/$ENb8WO՘w0nG) 4O^vnH*׻BH2U2/ae( 9#!:0lAPzeZL{qJ?CV78DOc^mDYs jX,oK&hs/Ţ $?y3{DKǿ"_xyp{-5<HɢeL?ƂMc?7T6\γhp9�TwZ* ^:E8¿� 3֥>K*!]cf3Wan]e�gQ.vByکغEn?-PW]&G5/6*7X+5x2=rRZSW}ö8c>պgL<( >d 9 hx|۹PU儧.3Mo�{31<v3z0û2&Jx~;)q5q׀4hiGKz8`%>Aa0I^jYXFם^ʁ3n(;㥎t|~O cȏSJP {TqtE:qSl9b7x=elS)[0U4Mﺰ 4pW̘7a䐨keӖܷG)MPo8{!EN͝y X<!0fHvmXh;|sبL3*˹Bh옕=Z-ot6v _o3&64ʐ6kEd"tl|EXn nl|V:a\3wa;a\;k �vC|kfwQ~U=<6Q[So}#g7g}0]=&mh"$1vWL/$~ًȩcVeT^ k&h3RS0&El~į8#7IYK?~'<<XT wY=N f>$DZw%<;p{-ktBū310?*3tu@G1BQ$KKo{OY>Z%OMJ/4s.""QfS릈 PK�����vN}�������������������ipfshttpclient/__init__.pyPK�����vNk@ �� ���������������ipfshttpclient/encoding.pyPK�����vN4��������������� ��ipfshttpclient/exceptions.pyPK�����N`82��2�������������3/��ipfshttpclient/http.pyPK�����vN)]��]�������������<b��ipfshttpclient/multipart.pyPK����� Nc-#��#��"�������������ipfshttpclient/requests_wrapper.pyPK�����vN Ny{��{���������������ipfshttpclient/utils.pyPK�����NO?DJ&��&�������������n��ipfshttpclient/version.pyPK�����vN%g �� ��!�������������ipfshttpclient/client/__init__.pyPK�����vN.����������������ipfshttpclient/client/base.pyPK�����vNRN@���� �����������^$�ipfshttpclient/client/bitswap.pyPK�����vNQ=���������������Q+�ipfshttpclient/client/block.pyPK�����vND��D��"�����������3�ipfshttpclient/client/bootstrap.pyPK�����vNԍ���������������:�ipfshttpclient/client/config.pyPK�����vN.!���������������MB�ipfshttpclient/client/dht.pyPK�����vNC)��)�������������Z�ipfshttpclient/client/files.pyPK�����vN>j  �� ��������������ipfshttpclient/client/key.pyPK�����vNL����&�����������;�ipfshttpclient/client/miscellaneous.pyPK�����vNL���������������8�ipfshttpclient/client/name.pyPK�����vN1i'��i'�������������)�ipfshttpclient/client/object.pyPK�����vN.;]V��V��������������ipfshttpclient/client/pin.pyPK�����vN���������������_�ipfshttpclient/client/pubsub.pyPK�����vNp��p��������������ipfshttpclient/client/repo.pyPK�����vN!���������������e�ipfshttpclient/client/swarm.pyPK�����vNt[q��q��!�����������(/�ipfshttpclient/client/unstable.pyPK�����Nr$IGST��T��'�����������D�ipfshttpclient-0.4.12.dist-info/LICENSEPK������!HMuS���a���%�����������qI�ipfshttpclient-0.4.12.dist-info/WHEELPK������!H#?G$��%��(�����������J�ipfshttpclient-0.4.12.dist-info/METADATAPK������!Hid5jJ�� ��&�����������qZ�ipfshttpclient-0.4.12.dist-info/RECORDPK��������_���