PKILBDipfsapi/__init__.py"""Python IPFS API client library""" from __future__ import absolute_import from .version import __version__ ########################### # Import stable API parts # ########################### from . import exceptions from .client import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_BASE from .client import VERSION_MINIMUM, VERSION_MAXIMUM from .client import Client, assert_version, connect PKxL**9E9Eipfsapi/client.py# -*- coding: utf-8 -*- """IPFS API Bindings for Python. Classes: * Client – a TCP client for interacting with an IPFS daemon """ from __future__ import absolute_import import os import warnings from . import http, multipart, utils, exceptions, encoding DEFAULT_HOST = str(os.environ.get("PY_IPFSAPI_DEFAULT_HOST", 'localhost')) DEFAULT_PORT = int(os.environ.get("PY_IPFSAPI_DEFAULT_PORT", 5001)) DEFAULT_BASE = str(os.environ.get("PY_IPFSAPI_DEFAULT_BASE", 'api/v0')) VERSION_MINIMUM = "0.4.3" VERSION_MAXIMUM = "0.5.0" def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): """Make sure that the given daemon version is supported by this client version. Raises ------ ~ipfsapi.exceptions.VersionMismatch Parameters ---------- version : str The version of an IPFS daemon. minimum : str The minimal IPFS version to allow. maximum : str The maximum IPFS version to allow. """ # Convert version strings to integer tuples version = list(map(int, version.split('-', 1)[0].split('.'))) minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) if minimum > version or version >= maximum: raise exceptions.VersionMismatch(version, minimum, maximum) def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, **defaults): """Create a new :class:`~ipfsapi.Client` instance and connect to the daemon to validate that its version is supported. Raises ------ ~ipfsapi.exceptions.VersionMismatch ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the :class:`~ipfsapi.Client` class. Returns ------- ~ipfsapi.Client """ # Create client instance client = Client(host, port, base, chunk_size, **defaults) # Query version number from daemon and validate it assert_version(client.version()['Version']) return client class SubChannel: """ Wrapper for a pubsub subscription object that allows for easy closing of subscriptions. """ def __init__(self, sub): self.__sub = sub def read_message(self): return next(self.__sub) def __iter__(self): return self.__sub def close(self): self.__sub.close() def __enter__(self): return self def __exit__(self, *a): self.close() class Client(object): """A TCP client for interacting with an IPFS daemon. A :class:`~ipfsapi.Client` instance will not actually establish a connection to the daemon until at least one of it's methods is called. Parameters ---------- host : str Hostname or IP address of the computer running the ``ipfs daemon`` node (defaults to the local system) port : int The API port of the IPFS deamon (usually 5001) base : str Path of the deamon's API (currently always ``api/v0``) chunk_size : int The size of the chunks to break uploaded files and text content into """ _clientfactory = http.HTTPClient def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, **defaults): """Connects to the API port of an IPFS node.""" self.chunk_size = chunk_size self._client = self._clientfactory(host, port, base, **defaults) def add(self, files, recursive=False, pattern='**', *args, **kwargs): """Add a file, or directory of files to IPFS. .. code-block:: python >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: ... numbytes = f.write('Mary had a little lamb') >>> c.add('nurseryrhyme.txt') {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', 'Name': 'nurseryrhyme.txt'} Parameters ---------- files : str A filepath to either a file or directory recursive : bool Controls if files in subdirectories are added or not pattern : str | list Single `*glob* `_ pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep trickle : bool Use trickle-dag format (optimized for streaming) when generating the dag; see `the FAQ ` for more information (Default: ``False``) only_hash : bool Only chunk and hash, but do not write to disk (Default: ``False``) wrap_with_directory : bool Wrap files with a directory object to preserve their filename (Default: ``False``) chunker : str The chunking algorithm to use pin : bool Pin this object when adding (Default: ``True``) Returns ------- dict: File name and hash of the added file node """ #PY2: No support for kw-only parameters after glob parameters opts = { "trickle": kwargs.pop("trickle", False), "only-hash": kwargs.pop("only_hash", False), "wrap-with-directory": kwargs.pop("wrap_with_directory", False), "pin": kwargs.pop("pin", True) } if "chunker" in kwargs: opts["chunker"] = kwargs.pop("chunker") kwargs.setdefault("opts", opts) body, headers = multipart.stream_filesystem_node( files, recursive, pattern, self.chunk_size ) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) def get(self, multihash, **kwargs): """Downloads a file, or directory of files from IPFS. Files are placed in the current working directory. Parameters ---------- multihash : str The path to the IPFS object(s) to be outputted """ args = (multihash,) return self._client.download('/get', args, **kwargs) def cat(self, multihash, **kwargs): r"""Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'\n\n\n\nipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved Returns ------- str : File contents """ args = (multihash,) return self._client.request('/cat', args, **kwargs) def ls(self, multihash, **kwargs): """Returns a list of objects linked to by the given hash. .. code-block:: python >>> c.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Objects': [ {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174, 'Type': 2}, … {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55, 'Type': 2} ]} ]} Parameters ---------- multihash : str The path to the IPFS object(s) to list links from Returns ------- dict : Directory information and contents """ args = (multihash,) return self._client.request('/ls', args, decoder='json', **kwargs) def refs(self, multihash, **kwargs): """Returns a list of hashes of objects referenced by the given hash. .. code-block:: python >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- multihash : str Path to the object(s) to list refs from Returns ------- list """ args = (multihash,) return self._client.request('/refs', args, decoder='json', **kwargs) def refs_local(self, **kwargs): """Displays the hashes of all local objects. .. code-block:: python >>> c.refs_local() [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Returns ------- list """ return self._client.request('/refs/local', decoder='json', **kwargs) def block_stat(self, multihash, **kwargs): """Returns a dict with the size of the block with the given hash. .. code-block:: python >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- multihash : str The base58 multihash of an existing block to stat Returns ------- dict : Information about the requested block """ args = (multihash,) return self._client.request('/block/stat', args, decoder='json', **kwargs) def block_get(self, multihash, **kwargs): r"""Returns the raw contents of a block. .. code-block:: python >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- multihash : str The base58 multihash of an existing block to get Returns ------- str : Value of the requested block """ args = (multihash,) return self._client.request('/block/get', args, **kwargs) def block_put(self, file, **kwargs): """Stores the contents of the given file object as an IPFS block. .. code-block:: python >>> c.block_put(io.BytesIO(b'Mary had a little lamb')) {'Key': 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'Size': 22} Parameters ---------- file : io.RawIOBase The data to be stored as an IPFS block Returns ------- dict : Information about the new block See :meth:`~ipfsapi.Client.block_stat` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/block/put', decoder='json', data=body, headers=headers, **kwargs) def bitswap_wantlist(self, peer=None, **kwargs): """Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> c.bitswap_wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict : List of wanted blocks """ args = (peer,) return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs) def bitswap_stat(self, **kwargs): """Returns some diagnostic information from the bitswap agent. .. code-block:: python >>> c.bitswap_stat() {'BlocksReceived': 96, 'DupBlksReceived': 73, 'DupDataReceived': 2560601, 'ProviderBufLen': 0, 'Peers': [ 'QmNZFQRxt9RMNm2VVtuV2Qx7q69bcMWRVXmr5CEkJEgJJP', 'QmNfCubGpwYZAQxX8LQDsYgB48C4GbfZHuYdexpX9mbNyT', 'QmNfnZ8SCs3jAtNPc8kf3WJqJqSoX7wsX7VqkLdEYMao4u', … ], 'Wantlist': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ] } Returns ------- dict : Statistics, peers and wanted blocks """ return self._client.request('/bitswap/stat', decoder='json', **kwargs) def bitswap_unwant(self, key, **kwargs): """ Remove a given block from wantlist. Parameters ---------- key : str Key to remove from wantlist. """ args = (key,) return self._client.request('/bitswap/unwant', args, **kwargs) def object_data(self, multihash, **kwargs): r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- str : Raw object data """ args = (multihash,) return self._client.request('/object/data', args, **kwargs) def object_new(self, template=None, **kwargs): """Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> c.object_new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict : Object hash """ args = (template,) if template is not None else () return self._client.request('/object/new', args, decoder='json', **kwargs) def object_links(self, multihash, **kwargs): """Returns the links pointed to by the specified object. .. code-block:: python >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object hash and merkedag links """ args = (multihash,) return self._client.request('/object/links', args, decoder='json', **kwargs) def object_get(self, multihash, **kwargs): """Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links """ args = (multihash,) return self._client.request('/object/get', args, decoder='json', **kwargs) def object_put(self, file, **kwargs): """Stores input as a DAG object and returns its key. .. code-block:: python >>> c.object_put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : io.RawIOBase (JSON) object from which the DAG object will be created Returns ------- dict : Hash and links of the created DAG object See :meth:`~ipfsapi.Object.object_links` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/object/put', decoder='json', data=body, headers=headers, **kwargs) def object_stat(self, multihash, **kwargs): """Get stats for the DAG node named by multihash. .. code-block:: python >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict """ args = (multihash,) return self._client.request('/object/stat', args, decoder='json', **kwargs) def object_patch_append_data(self, multihash, new_data, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have the provided data appended to it, and will thus have a new Hash. .. code-block:: python >>> c.object_patch_append_data("QmZZmY … fTqm", io.BytesIO(b"bla")) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- multihash : str The hash of an ipfs object to modify new_data : io.RawIOBase The data to append to the object's data section Returns ------- dict : Hash of new object """ args = (multihash,) body, headers = multipart.stream_files(new_data, self.chunk_size) return self._client.request('/object/patch/append-data', args, decoder='json', data=body, headers=headers, **kwargs) def object_patch_add_link(self, root, name, ref, create=False, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have a link to the provided object. .. code-block:: python >>> c.object_patch_add_link( ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2', ... 'Johnny', ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2' ... ) {'Hash': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'} Parameters ---------- root : str IPFS hash for the object being modified name : str name for the new link ref : str IPFS hash for the object being linked to create : bool Create intermediary nodes Returns ------- dict : Hash of new object """ kwargs.setdefault("opts", {"create": create}) args = ((root, name, ref),) return self._client.request('/object/patch/add-link', args, decoder='json', **kwargs) def object_patch_rm_link(self, root, link, **kwargs): """Creates a new merkledag object based on an existing one. The new object will lack a link to the specified object. .. code-block:: python >>> c.object_patch_rm_link( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... 'Johnny' ... ) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- root : str IPFS hash of the object to modify link : str name of the link to remove Returns ------- dict : Hash of new object """ args = ((root, link),) return self._client.request('/object/patch/rm-link', args, decoder='json', **kwargs) def object_patch_set_data(self, root, data, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have the same links as the old object but with the provided data instead of the old object's data contents. .. code-block:: python >>> c.object_patch_set_data( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... io.BytesIO(b'bla') ... ) {'Hash': 'QmSw3k2qkv4ZPsbu9DVEJaTMszAQWNgM1FTFYpfZeNQWrd'} Parameters ---------- root : str IPFS hash of the object to modify data : io.RawIOBase The new data to store in root Returns ------- dict : Hash of new object """ args = (root,) body, headers = multipart.stream_files(data, self.chunk_size) return self._client.request('/object/patch/set-data', args, decoder='json', data=body, headers=headers, **kwargs) def file_ls(self, multihash, **kwargs): """Lists directory contents for Unix filesystem objects. The result contains size information. For files, the child size is the total size of the file contents. For directories, the child size is the IPFS link size. The path can be a prefixless reference; in this case, it is assumed that it is an ``/ipfs/`` reference and not ``/ipns/``. .. code-block:: python >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, 'Objects': { 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 0, 'Type': 'Directory', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1463, 'Type': 'File'}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 47, 'Type': 'File'} ] } }} Parameters ---------- multihash : str The path to the object(s) to list links from Returns ------- dict """ args = (multihash,) return self._client.request('/file/ls', args, decoder='json', **kwargs) def resolve(self, name, recursive=False, **kwargs): """Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> c.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict : IPFS path of resource """ kwargs.setdefault("opts", {"recursive": recursive}) args = (name,) return self._client.request('/resolve', args, decoder='json', **kwargs) def key_list(self, **kwargs): """Returns a list of generated public keys that can be used with name_publish .. code-block:: python >>> c.key_list() [{'Name': 'self', 'Id': 'QmQf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'}, {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} ] Returns ------- list : List of dictionaries with Names and Ids of public keys. """ return self._client.request('/key/list', decoder='json', **kwargs) def key_gen(self, key_name, type, size=2048, **kwargs): """Adds a new public key that can be used for name_publish. .. code-block:: python >>> c.key_gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict : Key name and Key Id """ opts = {"type": type, "size": size} kwargs.setdefault("opts", opts) args = (key_name,) return self._client.request('/key/gen', args, decoder='json', **kwargs) def key_rm(self, key_name, *key_names, **kwargs): """Remove a keypair .. code-block:: python >>> c.key_rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name,) + key_names return self._client.request('/key/rm', args, decoder='json', **kwargs) def key_rename(self, key_name, new_key_name, **kwargs): """Rename a keypair .. code-block:: python >>> c.key_rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name, new_key_name) return self._client.request('/key/rename', args, decoder='json', **kwargs) def name_publish(self, ipfs_path, resolve=True, lifetime="24h", ttl=None, key=None, **kwargs): """Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : int Time duration this record should be cached for key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict : IPNS hash and the IPFS path it points at """ opts = {"lifetime": lifetime, "resolve": resolve} if ttl: opts["ttl"] = ttl if key: opts["key"] = key kwargs.setdefault("opts", opts) args = (ipfs_path,) return self._client.request('/name/publish', args, decoder='json', **kwargs) def name_resolve(self, name=None, recursive=False, nocache=False, **kwargs): """Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> c.name_resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) Returns ------- dict : The IPFS path the IPNS hash points at """ kwargs.setdefault("opts", {"recursive": recursive, "nocache": nocache}) args = (name,) if name is not None else () return self._client.request('/name/resolve', args, decoder='json', **kwargs) def dns(self, domain_name, recursive=False, **kwargs): """Resolves DNS links to the referenced object. Multihashes are hard to remember, but domain names are usually easy to remember. To create memorable aliases for multihashes, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> c.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict : Resource were a DNS entry points to """ kwargs.setdefault("opts", {"recursive": recursive}) args = (domain_name,) return self._client.request('/dns', args, decoder='json', **kwargs) def pin_add(self, path, *paths, **kwargs): """Pins objects to local storage. Stores an IPFS object(s) from a given path locally to disk. .. code-block:: python >>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d") {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be pinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict : List of IPFS objects that have been pinned """ #PY2: No support for kw-only parameters after glob parameters if "recursive" in kwargs: kwargs.setdefault("opts", {"recursive": kwargs.pop("recursive")}) args = (path,) + paths return self._client.request('/pin/add', args, decoder='json', **kwargs) def pin_rm(self, path, *paths, **kwargs): """Removes a pinned object from local storage. Removes the pin from the given object allowing it to be garbage collected if needed. .. code-block:: python >>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d') {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be unpinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict : List of IPFS objects that have been unpinned """ #PY2: No support for kw-only parameters after glob parameters if "recursive" in kwargs: kwargs.setdefault("opts", {"recursive": kwargs["recursive"]}) del kwargs["recursive"] args = (path,) + paths return self._client.request('/pin/rm', args, decoder='json', **kwargs) def pin_ls(self, type="all", **kwargs): """Lists objects pinned to local storage. By default, all pinned objects are returned, but the ``type`` flag or arguments can restrict that to a specific pin type or to some specific objects respectively. .. code-block:: python >>> c.pin_ls() {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, 'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'}, … 'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}} Parameters ---------- type : "str" The type of pinned keys to list. Can be: * ``"direct"`` * ``"indirect"`` * ``"recursive"`` * ``"all"`` Returns ------- dict : Hashes of pinned IPFS objects and why they are pinned """ kwargs.setdefault("opts", {"type": type}) return self._client.request('/pin/ls', decoder='json', **kwargs) def pin_update(self, from_path, to_path, **kwargs): """Replaces one pin with another. Updates one pin to another, making sure that all objects in the new pin are local. Then removes the old pin. This is an optimized version of using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove the pin for the old object. .. code-block:: python >>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", ... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH") {"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", "/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]} Parameters ---------- from_path : str Path to the old object to_path : str Path to the new object to be pinned unpin : bool Should the pin of the old object be removed? (Default: ``True``) Returns ------- dict : List of IPFS objects affected by the pinning operation """ #PY2: No support for kw-only parameters after glob parameters if "unpin" in kwargs: kwargs.setdefault("opts", {"unpin": kwargs["unpin"]}) del kwargs["unpin"] args = (from_path, to_path) return self._client.request('/pin/update', args, decoder='json', **kwargs) def pin_verify(self, path, *paths, **kwargs): """Verify that recursive pins are complete. Scan the repo for pinned object graphs and check their integrity. Issues will be reported back with a helpful human-readable error message to aid in error recovery. This is useful to help recover from datastore corruptions (such as when accidentally deleting files added using the filestore backend). This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter: ... for item in pin_verify_iter: ... print(item) ... {"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True} {"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True} {"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True} … Parameters ---------- path : str Path to object(s) to be checked verbose : bool Also report status of items that were OK? (Default: ``False``) Returns ------- iterable """ #PY2: No support for kw-only parameters after glob parameters if "verbose" in kwargs: kwargs.setdefault("opts", {"verbose": kwargs["verbose"]}) del kwargs["verbose"] args = (path,) + paths return self._client.request('/pin/verify', args, decoder='json', stream=True, **kwargs) def repo_gc(self, **kwargs): """Removes stored objects that are not pinned from the repo. .. code-block:: python >>> c.repo_gc() [{'Key': 'QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQuwaHG2mpW2'}, {'Key': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'}, {'Key': 'QmRVBnxUCsD57ic5FksKYadtyUbMsyo9KYQKKELajqAp4q'}, … {'Key': 'QmYp4TeCurXrhsxnzt5wqLqqUz8ZRg5zsc7GuUrUSDtwzP'}] Performs a garbage collection sweep of the local set of stored objects and remove ones that are not pinned in order to reclaim hard disk space. Returns the hashes of all collected objects. Returns ------- dict : List of IPFS objects that have been removed """ return self._client.request('/repo/gc', decoder='json', **kwargs) def repo_stat(self, **kwargs): """Displays the repo's status. Returns the number of objects in the repo and the repo's size, version, and path. .. code-block:: python >>> c.repo_stat() {'NumObjects': 354, 'RepoPath': '…/.local/share/ipfs', 'Version': 'fs-repo@4', 'RepoSize': 13789310} Returns ------- dict : General information about the IPFS file repository +------------+-------------------------------------------------+ | NumObjects | Number of objects in the local repo. | +------------+-------------------------------------------------+ | RepoPath | The path to the repo being currently used. | +------------+-------------------------------------------------+ | RepoSize | Size in bytes that the repo is currently using. | +------------+-------------------------------------------------+ | Version | The repo version. | +------------+-------------------------------------------------+ """ return self._client.request('/repo/stat', decoder='json', **kwargs) def id(self, peer=None, **kwargs): """Shows IPFS Node ID info. Returns the PublicKey, ProtocolVersion, ID, AgentVersion and Addresses of the connected daemon or some other node. .. code-block:: python >>> c.id() {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc', 'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=', 'AgentVersion': 'go-libp2p/3.3.4', 'ProtocolVersion': 'ipfs/0.1.0', 'Addresses': [ '/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc', '/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc', '/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc', '/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc', '/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc', '/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']} Parameters ---------- peer : str Peer.ID of the node to look up (local node if ``None``) Returns ------- dict : Information about the IPFS node """ args = (peer,) if peer is not None else () return self._client.request('/id', args, decoder='json', **kwargs) def bootstrap(self, **kwargs): """Compatiblity alias for :meth:`~ipfsapi.Client.bootstrap_list`.""" self.bootstrap_list(**kwargs) def bootstrap_list(self, **kwargs): """Returns the addresses of peers used during initial discovery of the IPFS network. Peers are output in the format ``<multiaddr>/<peerID>``. .. code-block:: python >>> c.bootstrap_list() {'Peers': [ '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRa … ca9z', '/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKD … KrGM', … '/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3p … QBU3']} Returns ------- dict : List of known bootstrap peers """ return self._client.request('/bootstrap', decoder='json', **kwargs) def bootstrap_add(self, peer, *peers, **kwargs): """Adds peers to the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to add to the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/add', args, decoder='json', **kwargs) def bootstrap_rm(self, peer, *peers, **kwargs): """Removes peers from the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to remove from the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/rm', args, decoder='json', **kwargs) def swarm_peers(self, **kwargs): """Returns the addresses & IDs of currently connected peers. .. code-block:: python >>> c.swarm_peers() {'Strings': [ '/ip4/101.201.40.124/tcp/40001/ipfs/QmZDYAhmMDtnoC6XZ … kPZc', '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.223.59.174/tcp/4001/ipfs/QmeWdgoZezpdHz1PX8 … 1jB6', … '/ip6/fce3: … :f140/tcp/43901/ipfs/QmSoLnSGccFuZQJzRa … ca9z']} Returns ------- dict : List of multiaddrs of currently connected peers """ return self._client.request('/swarm/peers', decoder='json', **kwargs) def swarm_addrs(self, **kwargs): """Returns the addresses of currently connected peers by peer id. .. code-block:: python >>> pprint(c.swarm_addrs()) {'Addrs': { 'QmNMVHJTSZHTWMWBbmBrQgkA1hZPWYuVJx2DpSGESWW6Kn': [ '/ip4/10.1.0.1/tcp/4001', '/ip4/127.0.0.1/tcp/4001', '/ip4/51.254.25.16/tcp/4001', '/ip6/2001:41d0:b:587:3cae:6eff:fe40:94d8/tcp/4001', '/ip6/2001:470:7812:1045::1/tcp/4001', '/ip6/::1/tcp/4001', '/ip6/fc02:2735:e595:bb70:8ffc:5293:8af8:c4b7/tcp/4001', '/ip6/fd00:7374:6172:100::1/tcp/4001', '/ip6/fd20:f8be:a41:0:c495:aff:fe7e:44ee/tcp/4001', '/ip6/fd20:f8be:a41::953/tcp/4001'], 'QmNQsK1Tnhe2Uh2t9s49MJjrz7wgPHj4VyrZzjRe8dj7KQ': [ '/ip4/10.16.0.5/tcp/4001', '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.1/tcp/4001', '/ip4/178.62.107.36/tcp/4001', '/ip6/::1/tcp/4001'], … }} Returns ------- dict : Multiaddrs of peers by peer id """ return self._client.request('/swarm/addrs', decoder='json', **kwargs) def swarm_connect(self, address, *addresses, **kwargs): """Opens a connection to a given address. This will open a new direct connection to a peer address. The address format is an IPFS multiaddr:: /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ .. code-block:: python >>> c.swarm_connect("/ip4/104.131.131.82/tcp/4001/ipfs/Qma … uvuJ") {'Strings': ['connect QmaCpDMGvV2BGHeYERUEnRQAwe3 … uvuJ success']} Parameters ---------- address : str Address of peer to connect to Returns ------- dict : Textual connection status report """ args = (address,) + addresses return self._client.request('/swarm/connect', args, decoder='json', **kwargs) def swarm_disconnect(self, address, *addresses, **kwargs): """Closes the connection to a given address. This will close a connection to a peer address. The address format is an IPFS multiaddr:: /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ The disconnect is not permanent; if IPFS needs to talk to that address later, it will reconnect. .. code-block:: python >>> c.swarm_disconnect("/ip4/104.131.131.82/tcp/4001/ipfs/Qm … uJ") {'Strings': ['disconnect QmaCpDMGvV2BGHeYERUEnRQA … uvuJ success']} Parameters ---------- address : str Address of peer to disconnect from Returns ------- dict : Textual connection status report """ args = (address,) + addresses return self._client.request('/swarm/disconnect', args, decoder='json', **kwargs) def swarm_filters_add(self, address, *addresses, **kwargs): """Adds a given multiaddr filter to the filter list. This will add an address filter to the daemons swarm. Filters applied this way will not persist daemon reboots, to achieve that, add your filters to the configuration file. .. code-block:: python >>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address : str Multiaddr to filter Returns ------- dict : List of swarm filters added """ args = (address,) + addresses return self._client.request('/swarm/filters/add', args, decoder='json', **kwargs) def swarm_filters_rm(self, address, *addresses, **kwargs): """Removes a given multiaddr filter from the filter list. This will remove an address filter from the daemons swarm. Filters removed this way will not persist daemon reboots, to achieve that, remove your filters from the configuration file. .. code-block:: python >>> c.swarm_filters_rm("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address : str Multiaddr filter to remove Returns ------- dict : List of swarm filters removed """ args = (address,) + addresses return self._client.request('/swarm/filters/rm', args, decoder='json', **kwargs) def dht_query(self, peer_id, *peer_ids, **kwargs): """Finds the closest Peer IDs to a given Peer ID by querying the DHT. .. code-block:: python >>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ") [{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs', 'Extra': '', 'Type': 2, 'Responses': None}, … {'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- peer_id : str The peerID to run the query against Returns ------- dict : List of peers IDs """ args = (peer_id,) + peer_ids return self._client.request('/dht/query', args, decoder='json', **kwargs) def dht_findprovs(self, multihash, *multihashes, **kwargs): """Finds peers in the DHT that can provide a specific value. .. code-block:: python >>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2") [{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 4, 'Responses': [ {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None} ]}, {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 1, 'Responses': [ {'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [ '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.8/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/52.32.109.74/tcp/1028' ]} ]}] Parameters ---------- multihash : str The DHT key to find providers for Returns ------- dict : List of provider Peer IDs """ args = (multihash,) + multihashes return self._client.request('/dht/findprovs', args, decoder='json', **kwargs) def dht_findpeer(self, peer_id, *peer_ids, **kwargs): """Queries the DHT for all of the associated multiaddresses. .. code-block:: python >>> c.dht_findpeer("QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZN … MTLZ") [{'ID': 'QmfVGMFrwW6AV6fTWmD6eocaTybffqAvkVLXQEFrYdk6yc', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmTKiUdjbRjeN9yPhNhG1X38YNuBdjeiV9JXYWzCAJ4mj5', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmTGkgHSsULk8p3AKTAqKixxidZQXFyF7mCURcutPqrwjQ', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 2, 'Responses': [ {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Addrs': [ '/ip4/10.9.8.1/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/164.132.197.107/tcp/4001', '/ip4/127.0.0.1/tcp/4001']} ]}] Parameters ---------- peer_id : str The ID of the peer to search for Returns ------- dict : List of multiaddrs """ args = (peer_id,) + peer_ids return self._client.request('/dht/findpeer', args, decoder='json', **kwargs) def dht_get(self, key, *keys, **kwargs): """Queries the DHT for its best value related to given key. There may be several different values for a given key stored in the DHT; in this context *best* means the record that is most desirable. There is no one metric for *best*: it depends entirely on the key type. For IPNS, *best* is the record that is both valid and has the highest sequence number (freshest). Different key types may specify other rules for they consider to be the *best*. Parameters ---------- key : str One or more keys whose values should be looked up Returns ------- str """ args = (key,) + keys res = self._client.request('/dht/get', args, decoder='json', **kwargs) if isinstance(res, dict) and "Extra" in res: return res["Extra"] else: for r in res: if "Extra" in r and len(r["Extra"]) > 0: return r["Extra"] raise exceptions.Error("empty response from DHT") def dht_put(self, key, value, **kwargs): """Writes a key/value pair to the DHT. Given a key of the form ``/foo/bar`` and a value of any form, this will write that value to the DHT with that key. Keys have two parts: a keytype (foo) and the key name (bar). IPNS uses the ``/ipns/`` keytype, and expects the key name to be a Peer ID. IPNS entries are formatted with a special strucutre. You may only use keytypes that are supported in your ``ipfs`` binary: ``go-ipfs`` currently only supports the ``/ipns/`` keytype. Unless you have a relatively deep understanding of the key's internal structure, you likely want to be using the :meth:`~ipfsapi.Client.name_publish` instead. Value is arbitrary text. .. code-block:: python >>> c.dht_put("QmVgNoP89mzpgEAAqK8owYoDEyB97Mkc … E9Uc", "test123") [{'ID': 'QmfLy2aqbhU1RqZnGQyqHSovV8tDufLUaPfN1LNtg5CvDZ', 'Extra': '', 'Type': 5, 'Responses': None}, {'ID': 'QmZ5qTkNvvZ5eFq9T4dcCEK7kX8L7iysYEpvQmij9vokGE', 'Extra': '', 'Type': 5, 'Responses': None}, {'ID': 'QmYqa6QHCbe6eKiiW6YoThU5yBy8c3eQzpiuW22SgVWSB8', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': 'QmP6TAKVDCziLmx9NV8QGekwtf7ZMuJnmbeHMjcfoZbRMd', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- key : str A unique identifier value : str Abitrary text to associate with the input (2048 bytes or less) Returns ------- list """ args = (key, value) return self._client.request('/dht/put', args, decoder='json', **kwargs) def ping(self, peer, *peers, **kwargs): """Provides round-trip latency information for the routing system. Finds nodes via the routing system, sends pings, waits for pongs, and prints out round-trip latency information. .. code-block:: python >>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n") [{'Success': True, 'Time': 0, 'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'}, {'Success': False, 'Time': 0, 'Text': 'Peer lookup error: routing: not found'}] Parameters ---------- peer : str ID of peer to be pinged count : int Number of ping messages to send (Default: ``10``) Returns ------- list : Progress reports from the ping """ #PY2: No support for kw-only parameters after glob parameters if "count" in kwargs: kwargs.setdefault("opts", {"count": kwargs["count"]}) del kwargs["count"] args = (peer,) + peers return self._client.request('/ping', args, decoder='json', **kwargs) def config(self, key, value=None, **kwargs): """Controls configuration variables. .. code-block:: python >>> c.config("Addresses.Gateway") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'} >>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'} Parameters ---------- key : str The key of the configuration entry (e.g. "Addresses.API") value : dict The value to set the configuration entry to Returns ------- dict : Requested/updated key and its (new) value """ args = (key, value) return self._client.request('/config', args, decoder='json', **kwargs) def config_show(self, **kwargs): """Returns a dict containing the server's configuration. .. warning:: The configuration file contains private key data that must be handled with care. .. code-block:: python >>> config = c.config_show() >>> config['Addresses'] {'API': '/ip4/127.0.0.1/tcp/5001', 'Gateway': '/ip4/127.0.0.1/tcp/8080', 'Swarm': ['/ip4/0.0.0.0/tcp/4001', '/ip6/::/tcp/4001']}, >>> config['Discovery'] {'MDNS': {'Enabled': True, 'Interval': 10}} Returns ------- dict : The entire IPFS daemon configuration """ return self._client.request('/config/show', decoder='json', **kwargs) def config_replace(self, *args, **kwargs): """Replaces the existing config with a user-defined config. Make sure to back up the config file first if neccessary, as this operation can't be undone. """ return self._client.request('/config/replace', args, decoder='json', **kwargs) def log_level(self, subsystem, level, **kwargs): r"""Changes the logging output of a running daemon. .. code-block:: python >>> c.log_level("path", "info") {'Message': "Changed log level of 'path' to 'info'\n"} Parameters ---------- subsystem : str The subsystem logging identifier (Use ``"all"`` for all subsystems) level : str The desired logging level. Must be one of: * ``"debug"`` * ``"info"`` * ``"warning"`` * ``"error"`` * ``"fatal"`` * ``"panic"`` Returns ------- dict : Status message """ args = (subsystem, level) return self._client.request('/log/level', args, decoder='json', **kwargs) def log_ls(self, **kwargs): """Lists the logging subsystems of a running daemon. .. code-block:: python >>> c.log_ls() {'Strings': [ 'github.com/ipfs/go-libp2p/p2p/host', 'net/identify', 'merkledag', 'providers', 'routing/record', 'chunk', 'mfs', 'ipns-repub', 'flatfs', 'ping', 'mockrouter', 'dagio', 'cmds/files', 'blockset', 'engine', 'mocknet', 'config', 'commands/http', 'cmd/ipfs', 'command', 'conn', 'gc', 'peerstore', 'core', 'coreunix', 'fsrepo', 'core/server', 'boguskey', 'github.com/ipfs/go-libp2p/p2p/host/routed', 'diagnostics', 'namesys', 'fuse/ipfs', 'node', 'secio', 'core/commands', 'supernode', 'mdns', 'path', 'table', 'swarm2', 'peerqueue', 'mount', 'fuse/ipns', 'blockstore', 'github.com/ipfs/go-libp2p/p2p/host/basic', 'lock', 'nat', 'importer', 'corerepo', 'dht.pb', 'pin', 'bitswap_network', 'github.com/ipfs/go-libp2p/p2p/protocol/relay', 'peer', 'transport', 'dht', 'offlinerouting', 'tarfmt', 'eventlog', 'ipfsaddr', 'github.com/ipfs/go-libp2p/p2p/net/swarm/addr', 'bitswap', 'reprovider', 'supernode/proxy', 'crypto', 'tour', 'commands/cli', 'blockservice']} Returns ------- dict : List of daemon logging subsystems """ return self._client.request('/log/ls', decoder='json', **kwargs) def log_tail(self, **kwargs): r"""Reads log outputs as they are written. This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with c.log_tail() as log_tail_iter: ... for item in log_tail_iter: ... print(item) ... {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.43353297Z"} {"event":"handleAddProviderBegin","system":"dht", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433642581Z"} {"event":"handleAddProvider","system":"dht","duration":91704, "key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433747513Z"} {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.435843012Z"} … Returns ------- iterable """ return self._client.request('/log/tail', decoder='json', stream=True, **kwargs) def version(self, **kwargs): """Returns the software version of the currently connected node. .. code-block:: python >>> c.version() {'Version': '0.4.3-rc2', 'Repo': '4', 'Commit': '', 'System': 'amd64/linux', 'Golang': 'go1.6.2'} Returns ------- dict : Daemon and system version information """ return self._client.request('/version', decoder='json', **kwargs) def files_cp(self, source, dest, **kwargs): """Copies files within the MFS. Due to the nature of IPFS this will not actually involve any of the file's content being copied. .. code-block:: python >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} >>> c.files_cp("/test", "/bla") '' >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} Parameters ---------- source : str Filepath within the MFS to copy from dest : str Destination filepath with the MFS to which the file will be copied to """ args = (source, dest) return self._client.request('/files/cp', args, **kwargs) def files_ls(self, path, **kwargs): """Lists contents of a directory in the MFS. .. code-block:: python >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0} ]} Parameters ---------- path : str Filepath within the MFS Returns ------- dict : Directory entries """ args = (path,) return self._client.request('/files/ls', args, decoder='json', **kwargs) def files_mkdir(self, path, parents=False, **kwargs): """Creates a directory within the MFS. .. code-block:: python >>> c.files_mkdir("/test") b'' Parameters ---------- path : str Filepath within the MFS parents : bool Create parent directories as needed and do not raise an exception if the requested directory already exists """ kwargs.setdefault("opts", {"parents": parents}) args = (path,) return self._client.request('/files/mkdir', args, **kwargs) def files_stat(self, path, **kwargs): """Returns basic ``stat`` information for an MFS file (including its hash). .. code-block:: python >>> c.files_stat("/test") {'Hash': 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', 'Size': 0, 'CumulativeSize': 4, 'Type': 'directory', 'Blocks': 0} Parameters ---------- path : str Filepath within the MFS Returns ------- dict : MFS file information """ args = (path,) return self._client.request('/files/stat', args, decoder='json', **kwargs) def files_rm(self, path, recursive=False, **kwargs): """Removes a file from the MFS. .. code-block:: python >>> c.files_rm("/bla/file") b'' Parameters ---------- path : str Filepath within the MFS recursive : bool Recursively remove directories? """ kwargs.setdefault("opts", {"recursive": recursive}) args = (path,) return self._client.request('/files/rm', args, **kwargs) def files_read(self, path, offset=0, count=None, **kwargs): """Reads a file stored in the MFS. .. code-block:: python >>> c.files_read("/bla/file") b'hi' Parameters ---------- path : str Filepath within the MFS offset : int Byte offset at which to begin reading at count : int Maximum number of bytes to read Returns ------- str : MFS file contents """ opts = {"offset": offset} if count is not None: opts["count"] = count kwargs.setdefault("opts", opts) args = (path,) return self._client.request('/files/read', args, **kwargs) def files_write(self, path, file, offset=0, create=False, truncate=False, count=None, **kwargs): """Writes to a mutable file in the MFS. .. code-block:: python >>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True) b'' Parameters ---------- path : str Filepath within the MFS file : io.RawIOBase IO stream object with data that should be written offset : int Byte offset at which to begin writing at create : bool Create the file if it does not exist truncate : bool Truncate the file to size zero before writing count : int Maximum number of bytes to read from the source ``file`` """ opts = {"offset": offset, "create": create, "truncate": truncate} if count is not None: opts["count"] = count kwargs.setdefault("opts", opts) args = (path,) body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/files/write', args, data=body, headers=headers, **kwargs) def files_mv(self, source, dest, **kwargs): """Moves files and directories within the MFS. .. code-block:: python >>> c.files_mv("/test/file", "/bla/file") b'' Parameters ---------- source : str Existing filepath within the MFS dest : str Destination to which the file will be moved in the MFS """ args = (source, dest) return self._client.request('/files/mv', args, **kwargs) def shutdown(self): """Stop the connected IPFS daemon instance. Sending any further requests after this will fail with ``ipfsapi.exceptions.ConnectionError``, until you start another IPFS daemon instance. """ try: return self._client.request('/shutdown') except exceptions.ConnectionError: # Sometimes the daemon kills the connection before sending a # response causing an incorrect `ConnectionError` to bubble pass ########### # HELPERS # ########### @utils.return_field('Hash') def add_bytes(self, data, **kwargs): """Adds a set of bytes as a file to IPFS. .. code-block:: python >>> c.add_bytes(b"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- data : bytes Content to be added as a file Returns ------- str : Hash of the added IPFS object """ body, headers = multipart.stream_bytes(data, self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) @utils.return_field('Hash') def add_str(self, string, **kwargs): """Adds a Python string as a file to IPFS. .. code-block:: python >>> c.add_str(u"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- string : str Content to be added as a file Returns ------- str : Hash of the added IPFS object """ body, headers = multipart.stream_text(string, self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) def add_json(self, json_obj, **kwargs): """Adds a json-serializable Python dict as a json file to IPFS. .. code-block:: python >>> c.add_json({'one': 1, 'two': 2, 'three': 3}) 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' Parameters ---------- json_obj : dict A json-serializable Python dictionary Returns ------- str : Hash of the added IPFS object """ return self.add_bytes(encoding.Json().encode(json_obj), **kwargs) def get_json(self, multihash, **kwargs): """Loads a json object from IPFS. .. code-block:: python >>> c.get_json('QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob') {'one': 1, 'two': 2, 'three': 3} Parameters ---------- multihash : str Multihash of the IPFS object to load Returns ------- object : Deserialized IPFS JSON object value """ return self.cat(multihash, decoder='json', **kwargs) def add_pyobj(self, py_obj, **kwargs): """Adds a picklable Python object as a file to IPFS. .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. Either switch to :meth:`~ipfsapi.Client.add_json` or use ``client.add_bytes(pickle.dumps(py_obj))`` instead. Please see :meth:`~ipfsapi.Client.get_pyobj` for the **security risks** of using these methods! .. code-block:: python >>> c.add_pyobj([0, 1.0, 2j, '3', 4e5]) 'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji' Parameters ---------- py_obj : object A picklable Python object Returns ------- str : Hash of the added IPFS object """ warnings.warn("Using `*_pyobj` on untrusted data is a security risk", DeprecationWarning) return self.add_bytes(encoding.Pickle().encode(py_obj), **kwargs) def get_pyobj(self, multihash, **kwargs): """Loads a pickled Python object from IPFS. .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. Either switch to :meth:`~ipfsapi.Client.get_json` or use ``pickle.loads(client.cat(multihash))`` instead. .. caution:: The pickle module is not intended to be secure against erroneous or maliciously constructed data. Never unpickle data received from an untrusted or unauthenticated source. Please **read** `this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to understand the security risks of using this method! .. code-block:: python >>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji') [0, 1.0, 2j, '3', 400000.0] Parameters ---------- multihash : str Multihash of the IPFS object to load Returns ------- object : Deserialized IPFS Python object """ warnings.warn("Using `*_pyobj` on untrusted data is a security risk", DeprecationWarning) return self.cat(multihash, decoder='pickle', **kwargs) def pubsub_ls(self, **kwargs): """Lists subscribed topics by name This method returns data that contains a list of all topics the user is subscribed to. In order to subscribe to a topic pubsub_sub must be called. .. code-block:: python # subscribe to a channel >>> with c.pubsub_sub("hello") as sub: ... c.pubsub_ls() { 'Strings' : ["hello"] } Returns ------- dict : Dictionary with the key "Strings" who's value is an array of topics we are subscribed to """ return self._client.request('/pubsub/ls', decoder='json', **kwargs) def pubsub_peers(self, topic=None, **kwargs): """List the peers we are pubsubbing with. Lists the id's of other IPFS users who we are connected to via some topic. Without specifying a topic, IPFS peers from all subscribed topics will be returned in the data. If a topic is specified only the IPFS id's of the peers from the specified topic will be returned in the data. .. code-block:: python >>> c.pubsub_peers() {'Strings': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', 'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA', ... 'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a' ] } ## with a topic # subscribe to a channel >>> with c.pubsub_sub('hello') as sub: ... c.pubsub_peers(topic='hello') {'String': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', ... # other peers connected to the same channel ] } Parameters ---------- topic : str The topic to list connected peers of (defaults to None which lists peers for all topics) Returns ------- dict : Dictionary with the ke "Strings" who's value is id of IPFS peers we're pubsubbing with """ args = (topic,) if topic is not None else () return self._client.request('/pubsub/peers', args, decoder='json', **kwargs) def pubsub_pub(self, topic, payload, **kwargs): """Publish a message to a given pubsub topic Publishing will publish the given payload (string) to everyone currently subscribed to the given topic. All data (including the id of the publisher) is automatically base64 encoded when published. .. code-block:: python # publishes the message 'message' to the topic 'hello' >>> c.pubsub_pub('hello', 'message') [] Parameters ---------- topic : str Topic to publish to payload : Data to be published to the given topic Returns ------- list : empty list """ args = (topic, payload) return self._client.request('/pubsub/pub', args, decoder='json', **kwargs) def pubsub_sub(self, topic, discover=False, **kwargs): """Subscribe to mesages on a given topic Subscribing to a topic in IPFS means anytime a message is published to a topic, the subscribers will be notified of the publication. The connection with the pubsub topic is opened and read. The Subscription returned should be used inside a context manager to ensure that it is closed properly and not left hanging. .. code-block:: python >>> sub = c.pubsub_sub('testing') >>> with c.pubsub_sub('testing') as sub: # publish a message 'hello' to the topic 'testing' ... c.pubsub_pub('testing', 'hello') ... for message in sub: ... print(message) ... # Stop reading the subscription after ... # we receive one publication ... break {'from': '<base64encoded IPFS id>', 'data': 'aGVsbG8=', 'topicIDs': ['testing']} # NOTE: in order to receive published data # you must already be subscribed to the topic at publication # time. Parameters ---------- topic : str Name of a topic to subscribe to discover : bool Try to discover other peers subscibed to the same topic (defaults to False) Returns ------- Generator wrapped in a context manager that maintains a connection stream to the given topic. """ args = (topic, discover) return SubChannel(self._client.request('/pubsub/sub', args, stream=True, decoder='json')) PK�����E;KS!<.��<.�����ipfsapi/encoding.py# -*- encoding: utf-8 -*- """Defines encoding related classes. .. note:: The XML and ProtoBuf encoders are currently not functional. """ from __future__ import absolute_import import abc import codecs import io import json import pickle import six from . import exceptions class Encoding(object): """Abstract base for a data parser/encoder interface. """ __metaclass__ = abc.ABCMeta @abc.abstractmethod def parse_partial(self, raw): """Parses the given data and yields all complete data sets that can be built from this. Raises ------ ~ipfsapi.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- generator """ def parse_finalize(self): """Finalizes parsing based on remaining buffered data and yields the remaining data sets. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ return () def parse(self, raw): """Returns a Python object decoded from the bytes of this encoding. Raises ------ ~ipfsapi.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- object """ results = list(self.parse_partial(raw)) results.extend(self.parse_finalize()) return results[0] if len(results) == 1 else results @abc.abstractmethod def encode(self, obj): """Serialize a raw object into corresponding encoding. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : object Object to be encoded """ class Dummy(Encoding): """Dummy parser/encoder that does nothing. """ name = "none" def parse_partial(self, raw): """Yields the data passed into this method. Parameters ---------- raw : bytes Any kind of data Returns ------- generator """ yield raw def encode(self, obj): """Returns the bytes representation of the data passed into this function. Parameters ---------- obj : object Any Python object Returns ------- bytes """ return six.b(str(obj)) class Json(Encoding): """JSON parser/encoder that handles concatenated JSON. """ name = 'json' def __init__(self): self._buffer = [] self._decoder1 = codecs.getincrementaldecoder('utf-8')() self._decoder2 = json.JSONDecoder() self._lasterror = None def parse_partial(self, data): """Incrementally decodes JSON data sets into Python objects. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ try: # Python 3 requires all JSON data to be a text string lines = self._decoder1.decode(data, False).split("\n") # Add first input line to last buffer line, if applicable, to # handle cases where the JSON string has been chopped in half # at the network level due to streaming if len(self._buffer) > 0 and self._buffer[-1] is not None: self._buffer[-1] += lines[0] self._buffer.extend(lines[1:]) else: self._buffer.extend(lines) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) # Process data buffer index = 0 try: # Process each line as separate buffer #PERF: This way the `.lstrip()` call becomes almost always a NOP # even if it does return a different string it will only # have to allocate a new buffer for the currently processed # line. while index < len(self._buffer): while self._buffer[index]: # Make sure buffer does not start with whitespace #PERF: `.lstrip()` does not reallocate if the string does # not actually start with whitespace. self._buffer[index] = self._buffer[index].lstrip() # Handle case where the remainder of the line contained # only whitespace if not self._buffer[index]: self._buffer[index] = None continue # Try decoding the partial data buffer and return results # from this data = self._buffer[index] for index2 in range(index, len(self._buffer)): # If decoding doesn't succeed with the currently # selected buffer (very unlikely with our current # class of input data) then retry with appending # any other pending pieces of input data # This will happen with JSON data that contains # arbitrary new-lines: "{1:\n2,\n3:4}" if index2 > index: data += "\n" + self._buffer[index2] try: (obj, offset) = self._decoder2.raw_decode(data) except ValueError: # Treat error as fatal if we have already added # the final buffer to the input if (index2 + 1) == len(self._buffer): raise else: index = index2 break # Decoding succeeded – yield result and shorten buffer yield obj if offset < len(self._buffer[index]): self._buffer[index] = self._buffer[index][offset:] else: self._buffer[index] = None index += 1 except ValueError as error: # It is unfortunately not possible to reliably detect whether # parsing ended because of an error *within* the JSON string, or # an unexpected *end* of the JSON string. # We therefor have to assume that any error that occurs here # *might* be related to the JSON parser hitting EOF and therefor # have to postpone error reporting until `parse_finalize` is # called. self._lasterror = error finally: # Remove all processed buffers del self._buffer[0:index] def parse_finalize(self): """Raises errors for incomplete buffered data that could not be parsed because the end of the input data has been reached. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- tuple : Always empty """ try: try: # Raise exception for remaining bytes in bytes decoder self._decoder1.decode(b'', True) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) # Late raise errors that looked like they could have been fixed if # the caller had provided more data if self._buffer: raise exceptions.DecodingError('json', self._lasterror) finally: # Reset state self._buffer = [] self._lasterror = None self._decoder1.reset() return () def encode(self, obj): """Returns ``obj`` serialized as JSON formatted bytes. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : str | list | dict | int JSON serializable Python object Returns ------- bytes """ try: result = json.dumps(obj, sort_keys=True, indent=None, separators=(',', ':')) if isinstance(result, six.text_type): return result.encode("utf-8") else: return result except (UnicodeEncodeError, TypeError) as error: raise exceptions.EncodingError('json', error) class Pickle(Encoding): """Python object parser/encoder using `pickle`. """ name = 'pickle' def __init__(self): self._buffer = io.BytesIO() def parse_partial(self, raw): """Buffers the given data so that the it can be passed to `pickle` in one go. This does not actually process the data in smaller chunks, but merely buffers it until `parse_finalize` is called! This is mostly because the standard-library module expects the entire data to be available up front, which is currently always the case for our code anyways. Parameters ---------- raw : bytes Data to be buffered Returns ------- tuple : An empty tuple """ self._buffer.write(raw) return () def parse_finalize(self): """Parses the buffered data and yields the result. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ try: self._buffer.seek(0, 0) yield pickle.load(self._buffer) except pickle.UnpicklingError as error: raise exceptions.DecodingError('pickle', error) def parse(self, raw): r"""Returns a Python object decoded from a pickle byte stream. .. code-block:: python >>> p = Pickle() >>> p.parse(b'(lp0\nI1\naI2\naI3\naI01\naF4.5\naNaF6000.0\na.') [1, 2, 3, True, 4.5, None, 6000.0] Raises ------ ~ipfsapi.exceptions.DecodingError Parameters ---------- raw : bytes Pickle data bytes Returns ------- object """ return Encoding.parse(self, raw) def encode(self, obj): """Returns ``obj`` serialized as a pickle binary string. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : object Serializable Python object Returns ------- bytes """ try: return pickle.dumps(obj) except pickle.PicklingError as error: raise exceptions.EncodingError('pickle', error) class Protobuf(Encoding): """Protobuf parser/encoder that handles protobuf.""" name = 'protobuf' class Xml(Encoding): """XML parser/encoder that handles XML.""" name = 'xml' # encodings supported by the IPFS api (default is JSON) __encodings = { Dummy.name: Dummy, Json.name: Json, Pickle.name: Pickle, Protobuf.name: Protobuf, Xml.name: Xml } def get_encoding(name): """ Returns an Encoder object for the named encoding Raises ------ ~ipfsapi.exceptions.EncoderMissingError Parameters ---------- name : str Encoding name. Supported options: * ``"none"`` * ``"json"`` * ``"pickle"`` * ``"protobuf"`` * ``"xml"`` """ try: return __encodings[name.lower()]() except KeyError: raise exceptions.EncoderMissingError(name) PK�����E;KJ+V' �� �����ipfsapi/exceptions.py# -*- coding: utf-8 -*- """ The class hierachy for exceptions is:: Error +-- VersionMismatch +-- EncoderError | +-- EncoderMissingError | +-- EncodingError | +-- DecodingError +-- CommunicationError +-- ProtocolError +-- StatusError +-- ErrorResponse +-- ConnectionError +-- TimeoutError """ class Error(Exception): """Base class for all exceptions in this module.""" pass class VersionMismatch(Error): """Raised when daemon version is not supported by this client version.""" def __init__(self, current, minimum, maximum): self.current = current self.minimum = minimum self.maximum = maximum msg = "Unsupported daemon version '{}' (not in range: {} – {})".format( current, minimum, maximum ) Error.__init__(self, msg) ############### # encoding.py # ############### class EncoderError(Error): """Base class for all encoding and decoding related errors.""" def __init__(self, message, encoder_name): self.encoder_name = encoder_name Error.__init__(self, message) class EncoderMissingError(EncoderError): """Raised when a requested encoder class does not actually exist.""" def __init__(self, encoder_name): msg = "Unknown encoder: '{}'".format(encoder_name) EncoderError.__init__(self, msg, encoder_name) class EncodingError(EncoderError): """Raised when encoding a Python object into a byte string has failed due to some problem with the input data.""" def __init__(self, encoder_name, original): self.original = original msg = "Object encoding error: {}".format(original) EncoderError.__init__(self, msg, encoder_name) class DecodingError(EncoderError): """Raised when decoding a byte string to a Python object has failed due to some problem with the input data.""" def __init__(self, encoder_name, original): self.original = original msg = "Object decoding error: {}".format(original) EncoderError.__init__(self, msg, encoder_name) ########### # http.py # ########### class CommunicationError(Error): """Base class for all network communication related errors.""" def __init__(self, original, _message=None): self.original = original if _message: msg = _message else: msg = "{}: {}".format(original.__class__.__name__, str(original)) Error.__init__(self, msg) class ProtocolError(CommunicationError): """Raised when parsing the response from the daemon has failed. This can most likely occur if the service on the remote end isn't in fact an IPFS daemon.""" class StatusError(CommunicationError): """Raised when the daemon responds with an error to our request.""" class ErrorResponse(StatusError): """Raised when the daemon has responded with an error message because the requested operation could not be carried out.""" def __init__(self, message, original): StatusError.__init__(self, original, message) class ConnectionError(CommunicationError): """Raised when connecting to the service has failed on the socket layer.""" class TimeoutError(CommunicationError): """Raised when the daemon didn't respond in time.""" PK�����1L[8n'��n'�����ipfsapi/http.py# -*- encoding: utf-8 -*- """HTTP client for api requests. This is pluggable into the IPFS Api client and will hopefully be supplemented by an asynchronous version. """ from __future__ import absolute_import import abc import contextlib import functools import re import tarfile from six.moves import http_client import requests import six from . import encoding from . import exceptions def pass_defaults(func): """Decorator that returns a function named wrapper. When invoked, wrapper invokes func with default kwargs appended. Parameters ---------- func : callable The function to append the default kwargs to """ @functools.wraps(func) def wrapper(self, *args, **kwargs): merged = {} merged.update(self.defaults) merged.update(kwargs) return func(self, *args, **merged) return wrapper def _notify_stream_iter_closed(): pass # Mocked by unit tests to determine check for proper closing class StreamDecodeIterator(object): """ Wrapper around `Iterable` that allows the iterable to be used in a context manager (`with`-statement) allowing for easy cleanup. """ def __init__(self, response, parser): self._response = response self._parser = parser self._response_iter = response.iter_content(chunk_size=None) self._parser_iter = None def __iter__(self): return self def __next__(self): while True: # Try reading for current parser iterator if self._parser_iter is not None: try: return next(self._parser_iter) except StopIteration: self._parser_iter = None # Forward exception to caller if we do not expect any # further data if self._response_iter is None: raise try: data = next(self._response_iter) # Create new parser iterator using the newly recieved data self._parser_iter = iter(self._parser.parse_partial(data)) except StopIteration: # No more data to receive – destroy response iterator and # iterate over the final fragments returned by the parser self._response_iter = None self._parser_iter = iter(self._parser.parse_finalize()) #PY2: Old iterator syntax def next(self): return self.__next__() def __enter__(self): return self def __exit__(self, *a): self.close() def close(self): # Clean up any open iterators first if self._response_iter is not None: self._response_iter.close() if self._parser_iter is not None: self._parser_iter.close() self._response_iter = None self._parser_iter = None # Clean up response object and parser if self._response is not None: self._response.close() self._response = None self._parser = None _notify_stream_iter_closed() def stream_decode_full(response, parser): with StreamDecodeIterator(response, parser) as response_iter: result = list(response_iter) if len(result) == 1: return result[0] else: return result class HTTPClient(object): """An HTTP client for interacting with the IPFS daemon. Parameters ---------- host : str The host the IPFS daemon is running on port : int The port the IPFS daemon is running at base : str The path prefix for API calls defaults : dict The default parameters to be passed to :meth:`~ipfsapi.http.HTTPClient.request` """ __metaclass__ = abc.ABCMeta def __init__(self, host, port, base, **defaults): self.host = host self.port = port if not re.match('^https?://', host.lower()): host = 'http://' + host self.base = '%s:%s/%s' % (host, port, base) self.defaults = defaults self._session = None def _do_request(self, *args, **kwargs): try: if self._session: return self._session.request(*args, **kwargs) else: return requests.request(*args, **kwargs) except requests.ConnectionError as error: six.raise_from(exceptions.ConnectionError(error), error) except http_client.HTTPException as error: six.raise_from(exceptions.ProtocolError(error), error) except requests.Timeout as error: six.raise_from(exceptions.TimeoutError(error), error) def _do_raise_for_status(self, response, content=None): try: response.raise_for_status() except requests.exceptions.HTTPError as error: # If we have decoded an error response from the server, # use that as the exception message; otherwise, just pass # the exception on to the caller. if isinstance(content, dict) and 'Message' in content: msg = content['Message'] six.raise_from(exceptions.ErrorResponse(msg, error), error) else: six.raise_from(exceptions.StatusError(error), error) def _request(self, method, url, params, parser, stream=False, files=None, headers={}, data=None): # Do HTTP request (synchronously) res = self._do_request(method, url, params=params, stream=stream, files=files, headers=headers, data=data) if stream: # Raise exceptions for response status self._do_raise_for_status(res) # Decode each item as it is read return StreamDecodeIterator(res, parser) else: # First decode received item ret = stream_decode_full(res, parser) # Raise exception for response status # (optionally incorpating the response message, if applicable) self._do_raise_for_status(res, ret) return ret @pass_defaults def request(self, path, args=[], files=[], opts={}, stream=False, decoder=None, headers={}, data=None): """Makes an HTTP request to the IPFS daemon. This function returns the contents of the HTTP response from the IPFS daemon. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send args : list Positional parameters to be sent along with the HTTP request files : :class:`io.RawIOBase` | :obj:`str` | :obj:`list` The file object(s) or path(s) to stream to the daemon opts : dict Query string paramters to be sent along with the HTTP request decoder : str The encoder to use to parse the HTTP response kwargs : dict Additional arguments to pass to :mod:`requests` """ url = self.base + path params = [] params.append(('stream-channels', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'post' if (files or data) else 'get' parser = encoding.get_encoding(decoder if decoder else "none") return self._request(method, url, params, parser, stream, files, headers, data) @pass_defaults def download(self, path, args=[], filepath=None, opts={}, compress=True, **kwargs): """Makes a request to the IPFS daemon to download a file. Downloads a file or files from IPFS into the current working directory, or the directory given by ``filepath``. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send filepath : str The local path where IPFS will store downloaded files Defaults to the current working directory. args : list Positional parameters to be sent along with the HTTP request opts : dict Query string paramters to be sent along with the HTTP request compress : bool Whether the downloaded file should be GZip compressed by the daemon before being sent to the client kwargs : dict Additional arguments to pass to :mod:`requests` """ url = self.base + path wd = filepath or '.' params = [] params.append(('stream-channels', 'true')) params.append(('archive', 'true')) if compress: params.append(('compress', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'get' res = self._do_request(method, url, params=params, stream=True, **kwargs) self._do_raise_for_status(res) # try to stream download as a tar file stream mode = 'r|gz' if compress else 'r|' with tarfile.open(fileobj=res.raw, mode=mode) as tf: tf.extractall(path=wd) @contextlib.contextmanager def session(self): """A context manager for this client's session. This function closes the current session when this client goes out of scope. """ self._session = requests.session() yield self._session.close() self._session = None PK�����ILm2q:X��:X�����ipfsapi/multipart.py"""HTTP :mimetype:`multipart/*`-encoded file streaming. """ from __future__ import absolute_import import re import requests import io import os from inspect import isgenerator from uuid import uuid4 import six from six.moves.urllib.parse import quote from . import utils if six.PY3: from builtins import memoryview as buffer CRLF = b'\r\n' default_chunk_size = 4096 def content_disposition(fn, disptype='file'): """Returns a dict containing the MIME content-disposition header for a file. .. code-block:: python >>> content_disposition('example.txt') {'Content-Disposition': 'file; filename="example.txt"'} >>> content_disposition('example.txt', 'attachment') {'Content-Disposition': 'attachment; filename="example.txt"'} Parameters ---------- fn : str Filename to retrieve the MIME content-disposition for disptype : str Rhe disposition type to use for the file """ disp = '%s; filename="%s"' % ( disptype, quote(fn, safe='') ) return {'Content-Disposition': disp} def content_type(fn): """Returns a dict with the content-type header for a file. Guesses the mimetype for a filename and returns a dict containing the content-type header. .. code-block:: python >>> content_type('example.txt') {'Content-Type': 'text/plain'} >>> content_type('example.jpeg') {'Content-Type': 'image/jpeg'} >>> content_type('example') {'Content-Type': 'application/octet-stream'} Parameters ---------- fn : str Filename to guess the content-type for """ return {'Content-Type': utils.guess_mimetype(fn)} def multipart_content_type(boundary, subtype='mixed'): """Creates a MIME multipart header with the given configuration. Returns a dict containing a MIME multipart header with the given boundary. .. code-block:: python >>> multipart_content_type('8K5rNKlLQVyreRNncxOTeg') {'Content-Type': 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'} >>> multipart_content_type('8K5rNKlLQVyreRNncxOTeg', 'alt') {'Content-Type': 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'} Parameters ---------- boundry : str The content delimiter to put into the header subtype : str The subtype in :mimetype:`multipart/*`-domain to put into the header """ ctype = 'multipart/%s; boundary="%s"' % ( subtype, boundary ) return {'Content-Type': ctype} class BodyGenerator(object): """Generators for creating the body of a :mimetype:`multipart/*` HTTP request. Parameters ---------- name : str The filename of the file(s)/content being encoded disptype : str The ``Content-Disposition`` of the content subtype : str The :mimetype:`multipart/*`-subtype of the content boundary : str An identifier used as a delimiter for the content's body """ def __init__(self, name, disptype='file', subtype='mixed', boundary=None): # If the boundary is unspecified, make a random one if boundary is None: boundary = self._make_boundary() self.boundary = boundary headers = content_disposition(name, disptype=disptype) headers.update(multipart_content_type(boundary, subtype=subtype)) self.headers = headers def _make_boundary(self): """Returns a random hexadecimal string (UUID 4). The HTTP multipart request body spec requires a boundary string to separate different content chunks within a request, and this is usually a random string. Using a UUID is an easy way to generate a random string of appropriate length as this content separator. """ return uuid4().hex def _write_headers(self, headers): """Yields the HTTP header text for some content. Parameters ---------- headers : dict The headers to yield """ if headers: for name in sorted(headers.keys()): yield name.encode("ascii") yield b': ' yield headers[name].encode("ascii") yield CRLF yield CRLF def write_headers(self): """Yields the HTTP header text for the content.""" for c in self._write_headers(self.headers): yield c def open(self, **kwargs): """Yields the body section for the content. """ yield b'--' yield self.boundary.encode() yield CRLF def file_open(self, fn): """Yields the opening text of a file section in multipart HTTP. Parameters ---------- fn : str Filename for the file being opened and added to the HTTP body """ yield b'--' yield self.boundary.encode() yield CRLF headers = content_disposition(fn) headers.update(content_type(fn)) for c in self._write_headers(headers): yield c def file_close(self): """Yields the end text of a file section in HTTP multipart encoding.""" yield CRLF def close(self): """Yields the ends of the content area in a HTTP multipart body.""" yield b'--' yield self.boundary.encode() yield b'--' yield CRLF class BufferedGenerator(object): """Generator that encodes multipart/form-data. An abstract buffered generator class which encodes :mimetype:`multipart/form-data`. Parameters ---------- name : str The name of the file to encode chunk_size : int The maximum size that any single file chunk may have in bytes """ def __init__(self, name, chunk_size=default_chunk_size): self.chunk_size = chunk_size self._internal = bytearray(chunk_size) self.buf = buffer(self._internal) self.name = name self.envelope = BodyGenerator(self.name, disptype='form-data', subtype='form-data') self.headers = self.envelope.headers def file_chunks(self, fp): """Yields chunks of a file. Parameters ---------- fp : io.RawIOBase The file to break into chunks (must be an open file or have the ``readinto`` method) """ fsize = utils.file_size(fp) offset = 0 if hasattr(fp, 'readinto'): while offset < fsize: nb = fp.readinto(self._internal) yield self.buf[:nb] offset += nb else: while offset < fsize: nb = min(self.chunk_size, fsize - offset) yield fp.read(nb) offset += nb def gen_chunks(self, gen): """Generates byte chunks of a given size. Takes a bytes generator and yields chunks of a maximum of ``chunk_size`` bytes. Parameters ---------- gen : generator The bytes generator that produces the bytes """ for data in gen: size = len(data) if size < self.chunk_size: yield data else: mv = buffer(data) offset = 0 while offset < size: nb = min(self.chunk_size, size - offset) yield mv[offset:offset + nb] offset += nb def body(self, *args, **kwargs): """Returns the body of the buffered file. .. note:: This function is not actually implemented. """ raise NotImplementedError def close(self): """Yields the closing text of a multipart envelope.""" for chunk in self.gen_chunks(self.envelope.close()): yield chunk class FileStream(BufferedGenerator): """Generator that encodes multiples files into HTTP multipart. A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of :class:`~ipfsapi.multipart.BufferedGenerator`. Parameters ---------- name : str The filename of the file to encode chunk_size : int The maximum size that any single file chunk may have in bytes """ def __init__(self, files, chunk_size=default_chunk_size): BufferedGenerator.__init__(self, 'files', chunk_size=chunk_size) self.files = utils.clean_files(files) def body(self): """Yields the body of the buffered file.""" for fp, need_close in self.files: try: name = os.path.basename(fp.name) except AttributeError: name = '' for chunk in self.gen_chunks(self.envelope.file_open(name)): yield chunk for chunk in self.file_chunks(fp): yield chunk for chunk in self.gen_chunks(self.envelope.file_close()): yield chunk if need_close: fp.close() for chunk in self.close(): yield chunk def glob_compile(pat): """Translate a shell glob PATTERN to a regular expression. This is almost entirely based on `fnmatch.translate` source-code from the python 3.5 standard-library. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i + 1 if c == '/' and len(pat) > (i + 2) and pat[i:(i + 3)] == '**/': # Special-case for "any number of sub-directories" operator since # may also expand to no entries: # Otherwise `a/**/b` would expand to `a[/].*[/]b` which wouldn't # match the immediate sub-directories of `a`, like `a/b`. i = i + 3 res = res + '[/]([^/]*[/])*' elif c == '*': if len(pat) > i and pat[i] == '*': i = i + 1 res = res + '.*' else: res = res + '[^/]*' elif c == '?': res = res + '[^/]' elif c == '[': j = i if j < n and pat[j] == '!': j = j + 1 if j < n and pat[j] == ']': j = j + 1 while j < n and pat[j] != ']': j = j + 1 if j >= n: res = res + '\\[' else: stuff = pat[i:j].replace('\\', '\\\\') i = j + 1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) return re.compile('^' + res + '\Z(?ms)' + '$') class DirectoryStream(BufferedGenerator): """Generator that encodes a directory into HTTP multipart. A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of :class:`~ipfsapi.multipart.BufferedGenerator`. Parameters ---------- directory : str The filepath of the directory to encode patterns : str | list A single glob pattern or a list of several glob patterns and compiled regular expressions used to determine which filepaths to match chunk_size : int The maximum size that any single file chunk may have in bytes """ def __init__(self, directory, recursive=False, patterns='**', chunk_size=default_chunk_size): BufferedGenerator.__init__(self, directory, chunk_size=chunk_size) self.patterns = [] patterns = [patterns] if isinstance(patterns, str) else patterns for pattern in patterns: if isinstance(pattern, str): self.patterns.append(glob_compile(pattern)) else: self.patterns.append(pattern) self.directory = os.path.normpath(directory) self.recursive = recursive self._request = self._prepare() self.headers = self._request.headers def body(self): """Returns the HTTP headers for this directory upload request.""" return self._request.body def headers(self): """Returns the HTTP body for this directory upload request.""" return self._request.headers def _prepare(self): """Pre-formats the multipart HTTP request to transmit the directory.""" names = [] added_directories = set() def add_directory(short_path): # Do not continue if this directory has already been added if short_path in added_directories: return # Scan for first super-directory that has already been added dir_base = short_path dir_parts = [] while dir_base: dir_base, dir_name = os.path.split(dir_base) dir_parts.append(dir_name) if dir_base in added_directories: break # Add missing intermediate directory nodes in the right order while dir_parts: dir_base = os.path.join(dir_base, dir_parts.pop()) # Create an empty, fake file to represent the directory mock_file = io.StringIO() mock_file.write(u'') # Add this directory to those that will be sent names.append(('files', (dir_base, mock_file, 'application/x-directory'))) # Remember that this directory has already been sent added_directories.add(dir_base) def add_file(short_path, full_path): try: # Always add files in wildcard directories names.append(('files', (short_name, open(full_path, 'rb'), 'application/octet-stream'))) except OSError: # File might have disappeared between `os.walk()` and `open()` pass def match_short_path(short_path): # Remove initial path component so that all files are based in # the target directory itself (not one level above) if os.sep in short_path: path = short_path.split(os.sep, 1)[1] else: return False # Convert all path seperators to POSIX style path = path.replace(os.sep, '/') # Do the matching and the simplified path for pattern in self.patterns: if pattern.match(path): return True return False # Identify the unecessary portion of the relative path truncate = os.path.dirname(self.directory) # Traverse the filesystem downward from the target directory's uri # Errors: `os.walk()` will simply return an empty generator if the # target directory does not exist. wildcard_directories = set() for curr_dir, _, files in os.walk(self.directory): # find the path relative to the directory being added if len(truncate) > 0: _, _, short_path = curr_dir.partition(truncate) else: short_path = curr_dir # remove leading / or \ if it is present if short_path.startswith(os.sep): short_path = short_path[1:] wildcard_directory = False if os.path.split(short_path)[0] in wildcard_directories: # Parent directory has matched a pattern, all sub-nodes should # be added too wildcard_directories.add(short_path) wildcard_directory = True else: # Check if directory path matches one of the patterns if match_short_path(short_path): # Directory matched pattern and it should therefor # be added along with all of its contents wildcard_directories.add(short_path) wildcard_directory = True # Always add directories within wildcard directories - even if they # are empty if wildcard_directory: add_directory(short_path) # Iterate across the files in the current directory for filename in files: # Find the filename relative to the directory being added short_name = os.path.join(short_path, filename) filepath = os.path.join(curr_dir, filename) if wildcard_directory: # Always add files in wildcard directories add_file(short_name, filepath) else: # Add file (and all missing intermediary directories) # if it matches one of the patterns if match_short_path(short_name): add_directory(short_path) add_file(short_name, filepath) # Send the request and present the response body to the user req = requests.Request("POST", 'http://localhost', files=names) prep = req.prepare() return prep class BytesStream(BufferedGenerator): """A buffered generator that encodes bytes as :mimetype:`multipart/form-data`. Parameters ---------- data : bytes The binary data to stream to the daemon chunk_size : int The maximum size of a single data chunk """ def __init__(self, data, chunk_size=default_chunk_size): BufferedGenerator.__init__(self, 'bytes', chunk_size=chunk_size) self.data = data if isgenerator(data) else (data,) def body(self): """Yields the encoded body.""" for chunk in self.gen_chunks(self.envelope.file_open(self.name)): yield chunk for chunk in self.gen_chunks(self.data): yield chunk for chunk in self.gen_chunks(self.envelope.file_close()): yield chunk for chunk in self.close(): yield chunk def stream_files(files, chunk_size=default_chunk_size): """Gets a buffered generator for streaming files. Returns a buffered generator which encodes a file or list of files as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- files : str The file(s) to stream chunk_size : int Maximum size of each stream chunk """ stream = FileStream(files, chunk_size=chunk_size) return stream.body(), stream.headers def stream_directory(directory, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming directories. Returns a buffered generator which encodes a directory as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- directory : str The filepath of the directory to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ stream = DirectoryStream(directory, recursive=recursive, patterns=patterns, chunk_size=chunk_size) return stream.body(), stream.headers def stream_filesystem_node(path, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming either files or directories. Returns a buffered generator which encodes the file or directory at the given path as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- path : str The filepath of the directory or file to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ is_dir = isinstance(path, six.string_types) and os.path.isdir(path) if recursive or is_dir: return stream_directory(path, recursive, patterns, chunk_size) else: return stream_files(path, chunk_size) def stream_bytes(data, chunk_size=default_chunk_size): """Gets a buffered generator for streaming binary data. Returns a buffered generator which encodes binary data as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- data : bytes The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ stream = BytesStream(data, chunk_size=chunk_size) return stream.body(), stream.headers def stream_text(text, chunk_size=default_chunk_size): """Gets a buffered generator for streaming text. Returns a buffered generator which encodes a string as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- text : str The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ if isgenerator(text): def binary_stream(): for item in text: if six.PY2 and isinstance(text, six.binary_type): #PY2: Allow binary strings under Python 2 since # Python 2 code is not expected to always get the # distinction between text and binary strings right. yield text else: yield text.encode("utf-8") data = binary_stream() elif six.PY2 and isinstance(text, six.binary_type): #PY2: See above. data = text else: data = text.encode("utf-8") return stream_bytes(data, chunk_size) PK�����IL,ne��e�����ipfsapi/utils.py"""A module to handle generic operations. """ from __future__ import absolute_import import mimetypes import os from functools import wraps import six def guess_mimetype(filename): """Guesses the mimetype of a file based on the given ``filename``. .. code-block:: python >>> guess_mimetype('example.txt') 'text/plain' >>> guess_mimetype('/foo/bar/example') 'application/octet-stream' Parameters ---------- filename : str The file name or path for which the mimetype is to be guessed """ fn = os.path.basename(filename) return mimetypes.guess_type(fn)[0] or 'application/octet-stream' def ls_dir(dirname): """Returns files and subdirectories within a given directory. Returns a pair of lists, containing the names of directories and files in ``dirname``. Raises ------ OSError : Accessing the given directory path failed Parameters ---------- dirname : str The path of the directory to be listed """ ls = os.listdir(dirname) files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))] dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))] return files, dirs def clean_file(file): """Returns a tuple containing a ``file``-like object and a close indicator. This ensures the given file is opened and keeps track of files that should be closed after use (files that were not open prior to this function call). Raises ------ OSError : Accessing the given file path failed Parameters ---------- file : str | io.IOBase A filepath or ``file``-like object that may or may not need to be opened """ if not hasattr(file, 'read'): return open(file, 'rb'), True else: return file, False def clean_files(files): """Generates tuples with a ``file``-like object and a close indicator. This is a generator of tuples, where the first element is the file object and the second element is a boolean which is True if this module opened the file (and thus should close it). Raises ------ OSError : Accessing the given file path failed Parameters ---------- files : list | io.IOBase | str Collection or single instance of a filepath and file-like object """ if isinstance(files, (list, tuple)): for f in files: yield clean_file(f) else: yield clean_file(files) def file_size(f): """Returns the size of a file in bytes. Raises ------ OSError : Accessing the given file path failed Parameters ---------- f : io.IOBase | str The file path or object for which the size should be determined """ if isinstance(f, (six.string_types, six.text_type)): return os.path.getsize(f) else: cur = f.tell() f.seek(0, 2) size = f.tell() f.seek(cur) return size class return_field(object): """Decorator that returns the given field of a json response. Parameters ---------- field : object The response field to be returned for all invocations """ def __init__(self, field): self.field = field def __call__(self, cmd): """Wraps a command so that only a specified field is returned. Parameters ---------- cmd : callable A command that is intended to be wrapped """ @wraps(cmd) def wrapper(*args, **kwargs): """Returns the specified field of the command invocation. Parameters ---------- args : list Positional parameters to pass to the wrapped callable kwargs : dict Named parameter to pass to the wrapped callable """ res = cmd(*args, **kwargs) return res[self.field] return wrapper PK�����~LH$��$�����ipfsapi/version.py# _Versioning scheme:_ # The major and minor version of each release correspond to the supported # IPFS daemon version. The revision number will be updated whenever we make # a new release for the `py-ipfs-api` client for that daemon version. # # Example: The first client version to support the `0.4.x`-series of the IPFS # HTTP API will have version `0.4.0`, the second version will have version # `0.4.1` and so on. When IPFS `0.5.0` is released, the first client version # to support it will also be released as `0.5.0`. __version__ = "0.4.3" PK�����Nr$IGST��T�����ipfsapi-0.4.3.dist-info/LICENSEThe MIT License (MIT) Copyright (c) 2015 Andrew Stocker <amstocker@dons.usfca.edu> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK������!Hp!Q���a������ipfsapi-0.4.3.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,zd&Y)r$[)T&UD"�PK������!H8,�� �� ���ipfsapi-0.4.3.dist-info/METADATA9ko:+X^8.b9vr4qlvޢ$ZWDʏ,_?;CR~)Dgq PAK,~Y14dgOƜ&3}!Mg^u:| V MRB] rYeD^N%|JӷYu'øcy:\1CS@HԷ2lX'!6j D0 ^2?epF~;ǻ=|V'q;$[Ǒ�2[2eȨs3`r *2N>)Zxt Q:!Y A5PaoqTʥl/]\ϖ.㌦u' FL+qXWG~�v _gm&oj[A{$i<mN) C@E4r34E[W_</8Gk׸';#)|2.8AP| 'n 634wi+G4/߇_NyZ6,>wwvtVZºqߔ{XiʊײMe10&|8E*d3o\,v>( (7hƋ8e,VOQ30&8YoE,:Q'dbi4?aluN,~WrO]xJ/D #,d ʚj)I7j3N]u\.2?pt8])d #Bjl<T=Z4twwH]廦wۋr7N:׭KYUN(7L^AFcę C<b~M٘#SE9 qh`!hzM4On$N٧O qHCjO1a-0vb 0GPo8Dd(&. 9KK6?IR{NHeZÚtYӛp8 󇼞$&Us)T0$Nx@t@%tQ"8Ò潴W]?ڱ`J>RE ]G�7xi`c)=B%&F`,] 5Ȓ:dƒ/2⭲[KCՀnY*sc DTݐ3@{Q* eCn|.Jg)xL4]IaUWW0Jdx z+ZOx`. \wZ유60s*F "P DZ^+L%$^HȲȹ֑hj@%;yjK [Q X0J'g1g;b^u�#OKPZW Y%4Х4^$K~#J+*y ;J =R%{{P 0<#&u)5QR8#pv>}ZOZoּW^ӗ+E紐_V d |-xL> rg2{TP9Psa8ðh|Kwvsq�; =+@yzU>eլ?41?gObPy^nz]0?~T>;? 5|M7(< JtKvl׸+B,thq^v4MmOjnV:5Jt035NGG#;/CP;@Fu\|| w}һ&W?_Gxn7ygwvRgqnbD+Q\Y4j<\:<6ƯAksϿ;d)@b]ysUd !)H}mppYyɜ$.j6H^̞a8pVvr^bF'&+WQB{V~ܜ Ѽr'Y~?>=~.nztwri8d6mM~{ytO78~97?ƽpU=%ᙁgX|_c:e�ļ={#{.Znq>kΎ>7'K~7Nt08sN<D;5cA <]>~0c(^O9S7wsS-}&W_ivޢC̢Wq0GVb]x~/ T`U34nW.jic,&: 1x(EV=L80\.ݬ&-mudX%ǐ8(| AK6q@C|1Iy^e:<zHr|wkϧ)lbrPlˋj0>Ɏ [p&`YΨ3oO?Gd;RG,o-3 +pxa 0|,T UEh)ջ'oذ-j̶ hVpK=eDדrd)�Oz2{#_N%)vn59(ah ? E-ߨNEuV>Ru>z F, nq`rG nޫ!}ta8*0#y~*G.1@Z)S` P `g rƾ=aP,jt-ঠ)ce ue2\^`fp;Fcm[cl46R683R{Ρ`R6ǁqР޺簤t$c黖Lёr. ˒7)"o < /[룒֚gGlrUCV ģ Í@㧹9V].{*g >F7G⺉m (-1\` >$ lTB]]nCl6N08؜#jDӑ . <'BMO,|A4\_0 %w2M߀s_ % �p*;?)R~E1`d (aH%Z軼̹ͭHcjY2vg8$c5מMrϋǕc-TxY'Җ1 (j jT׭F#KȸL:G}ae݌'pM-a1E'D9^ \ΠϔRCmƔǐ6bk~Mf�'RK<SV[V3&5Dl@LB.TQi/lQlP$]s3#5WF_,l6+',_iooJ!F~l_ )1IF�%HdH+s 7M|flI4 WU[F K QU6Cm- =3JI퍈kFͲ�n OyC18d H%ܤA+#b`~*gUN7\.SSpU ' %<A6 8PNՕla~]}/E <"8_PK������!HE<�������ipfsapi-0.4.3.dist-info/RECORDuIr@�}hE (("* ZTNx1nr0ɺ0 pFTWD ~1rW <cuyooA1x tGYiӿA[1 v?xԂ^KK}51%p;%N^Tm SID,E1c)h-ຮk;XV/S޸$$F f@!Dgv3l.�"Ҋ'of%6<1wV7K<'ہLxR,( $7�6cgDmWujz{7=1پiyī:C.WG<p@kp|1Ö Ͼ1u]Q5GlYQ=3٭p@Q@33~f z7Y2#yLl5G0pO"D evՏi, \pf:@"J J4S/Θ6x3*�+7'>iL|_Y`]p%E}PK�����ILBD�������������������ipfsapi/__init__.pyPK�����xL**9E�9E��������������ipfsapi/client.pyPK�����E;KS!<.��<.�������������G�ipfsapi/encoding.pyPK�����E;KJ+V' �� �������������u�ipfsapi/exceptions.pyPK�����1L[8n'��n'�������������݂�ipfsapi/http.pyPK�����ILm2q:X��:X�������������x�ipfsapi/multipart.pyPK�����IL,ne��e��������������ipfsapi/utils.pyPK�����~LH$��$�������������w�ipfsapi/version.pyPK�����Nr$IGST��T��������������ipfsapi-0.4.3.dist-info/LICENSEPK������!Hp!Q���a��������������\�ipfsapi-0.4.3.dist-info/WHEELPK������!H8,�� �� ������������ipfsapi-0.4.3.dist-info/METADATAPK������!HE<���������������6(�ipfsapi-0.4.3.dist-info/RECORDPK���� � �3��*���