PK!fimplant/__init__.py# noqa PK!U2Og g implant/bootstrap/__init__.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Bootstrap of a remote python process.""" import base64 import inspect import logging import types import zlib import pkg_resources from .. import core, msgpack log = logging.getLogger(__name__) VENV_DEFAULT = '~/.implant' class Bootstrap(dict): """Provide an iterator over the bootstrap code.""" def __init__(self, code, options=None): super(Bootstrap, self).__init__() self.__dict__ = self if options is None: options = {} self.options = base64.b64encode(msgpack.encode(options)).decode(), venv = options.get('venv') self.venv = VENV_DEFAULT if venv is True\ else None if venv is False\ else venv if isinstance(code, types.ModuleType): code_source = inspect.getsource(code).encode() self.code_path = 'remote://{}'.format(inspect.getsourcefile(code)) else: code_source = code self.code_path = 'remote-string://' self.code = base64.b64encode(zlib.compress(code_source, 9)).decode() raw_len = len(code_source) comp_len = len(self.code) log.debug("Code compression ratio of %s -> %s: %.2f%%", raw_len, comp_len, comp_len * 100 / raw_len) msgpack_code_source = inspect.getsource(msgpack).encode() self.msgpack_code_path = 'remote://{}'.format( inspect.getsourcefile(msgpack)) self.msgpack_code = base64.b64encode( zlib.compress(msgpack_code_source, 9)).decode(), def formatsourcelines(self, lines): """Remove full line comments.""" # TODO think about using pyminifier # lines, _ = inspect.getsourcelines(module) for line in map(lambda l: l.decode('utf-8', 'replace'), lines): stripped = line.strip() if stripped and not stripped.startswith('#'): yield line.format(**self) def __iter__(self): if self.venv: _with_venv_fmt = pkg_resources.resource_stream( __name__, '_with_venv.py.fmt') yield from self.formatsourcelines(_with_venv_fmt.readlines()) _main_fmt = pkg_resources.resource_stream(__name__, '_main.py.fmt') yield from self.formatsourcelines(_main_fmt.readlines()) def __str__(self): return ''.join(self) PK!+Q--implant/bootstrap/_main.py.fmt# coding: utf-8 # this is part of the core bootstrap import sys, imp, base64, zlib sys.modules["implant"] = implant = imp.new_module("implant") setattr(implant, "__path__", []) # just a msgpack fallback if no venv is used or msgpack somehow failed to install try: from implant import msgpack except ImportError: sys.modules["implant.msgpack"] = msgpack = imp.new_module("implant.msgpack") c = compile(zlib.decompress(base64.b64decode(b"{msgpack_code}")), "{msgpack_code_path}", "exec") exec(c, msgpack.__dict__) sys.modules["implant.core"] = core = imp.new_module("implant.core") implant.__dict__["core"] = core c = compile(zlib.decompress(base64.b64decode(b"{code}")), "{code_path}", "exec", dont_inherit=True) exec(c, core.__dict__) core.main(**msgpack.decode(base64.b64decode(b"{options}"))) PK!Ck~==#implant/bootstrap/_with_venv.py.fmt# this is part of the core bootstrap # use only double quotes! import os, sys, site, pkg_resources venv_path = os.path.expanduser("{venv}") entry = site.getsitepackages([venv_path])[0] # create venv if missing if not os.path.isdir(entry): import venv venv.create(venv_path, system_site_packages=False, clear=True, symlinks=False, with_pip=True) # insert venv at first position # pkg_resources is not adding site-packages if there is no distribution sys.prefix = venv_path sys.path.insert(0, entry) site.addsitedir(entry) pkg_resources.working_set.add_entry(entry) # pip should come from venv now try: import umsgpack except ImportError: # try to install umsgpack import pip # TODO use ssh port forwarding to install via master pip.main(["install", "--prefix", venv_path, "-q", "u-msgpack-python"]) PK!cdP P implant/commands/__init__.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Core features """ import asyncio import concurrent import logging import os import time from implant import core log = logging.getLogger(__name__) class Echo(core.Command): """Demonstrate the basic command API.""" data = core.Parameter(default='ping', description='Meaningful data.') async def local(self, context): # custom protocol # first: send await context.channel.send_iteration("send to remote") # second: receive from_remote = [] async for x in context.channel: from_remote.append(x) log.debug("************ receiving from remote: %s", from_remote) # third: wait for remote to finish and return result remote_result = await context.remote_future result = { 'local_data': self.data, 'from_remote': ''.join(from_remote), } result.update(remote_result) return result remote = core.CommandRemote('implant.commands.remotes.Echo') class SystemLoad(core.Command): async def local(self, context): t, load = await context.remote_future return t, load async def remote(self, context): t, load = time.time(), os.getloadavg() return t, load class Copy(core.Command): src = core.Parameter(description='Source file at local side.') dest = core.Parameter(description='Desatination file at remote side.') def __init__(self, *args, **kwargs): super(Copy, self).__init__(*args, **kwargs) self.executor = concurrent.futures.ThreadPoolExecutor() self.loop = asyncio.get_event_loop() def __del__(self): self.executor.shutdown(wait=True) async def local(self, context): with open(self.src, "rb") as f: while True: data = await self.loop.run_in_executor(self.executor, f.read, 0x8000) if not data: context.channel.send(StopAsyncIteration()) break await context.channel.send(data) result = await context.remote_future return result async def remote(self, context): with open(self.dest, "wb") as f: async for data in context.channel: await self.loop.run_in_executor(self.executor, f.write, data) PK!+2+implant/commands/remotes.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os from implant import commands, core log = logging.getLogger(__name__) class Echo(commands.Echo): async def remote(self, context): # first: receive from_local = [] async for x in context.channel: from_local.append(x) log.debug("************ receiving from local: %s", from_local) # second: send await context.channel.send_iteration("send to local") # third: return result return { 'from_local': ''.join(from_local), 'remote_data': self.data, 'remote_pid': os.getpid() } PK!) 77implant/connect.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Remote connection is established by a `Connector`.""" import abc import asyncio import collections import logging import os import re import shlex import sys import traceback import urllib from implant import bootstrap, core log = logging.getLogger(__name__) class RemoteMisbehavesError(Exception): """Exception is raised, when a remote process seems to be not what we expect. """ class Remote(metaclass=abc.ABCMeta): """A remote receiving commands.""" def __init__(self, *, stdin=None, stdout=None, stderr=None, loop=None): self.loop = loop if loop is None else asyncio.get_event_loop() self.stdin = stdin self.stdout = stdout self.stderr = stderr self.channels = core.Channels(reader=stdout, writer=stdin, loop=self.loop) self.dispatcher = core.Dispatcher(self.channels, loop=self.loop) self._lck_communicate = asyncio.Lock(loop=self.loop) async def execute(self, *args, **kwargs): """Just call dispatcher.execute.""" # forward to dispatcher return await self.dispatcher.execute(*args, **kwargs) @abc.abstractmethod async def wait(self): """Wait for Remote to finish.""" async def _shutdown(self, *futures): log.info("Send shutdown: %s", self) shutdown_event = core.ShutdownRemoteEvent() event = self.execute(core.NotifyEvent, event=shutdown_event) await event for fut in futures: fut.cancel() await fut await self.wait() async def communicate(self): """Schedule the dispatcher.""" async with self._lck_communicate: never_ending = asyncio.Future(loop=self.loop) async def _enqueue(): try: await self.channels.enqueue() except Exception as ex: # pylint: disable=W0703 never_ending.set_exception(ex) async def _dispatch(): try: await self.dispatcher.dispatch() except Exception as ex: # pylint: disable=W0703 never_ending.set_exception(ex) fut_enqueue = asyncio.ensure_future(_enqueue(), loop=self.loop) fut_dispatch = asyncio.ensure_future(_dispatch(), loop=self.loop) try: await never_ending except asyncio.CancelledError: await self._shutdown(fut_dispatch, fut_enqueue) except Exception: log.error("Error while processing:\n%s", traceback.format_exc()) raise class SubprocessRemote(Remote): """A remote process.""" def __init__(self, transport, protocol, *, loop=None): super().__init__( stdin=protocol.stdin, stdout=protocol.stdout, stderr=protocol.stderr, loop=loop ) self._transport = transport self._protocol = protocol self.pid = transport.get_pid() def __repr__(self): return '<%s %s>' % (self.__class__.__name__, self.pid) @property def returncode(self): """The exit code of the process.""" return self._transport.get_returncode() async def wait(self): """Wait until the process exit and return the process return code.""" log.info('Waiting for Remote to finish: %s, %s', self.pid, self.returncode) # TODO if we use loops in threads, we have to redirect the watcher # of the main loop to the thread loop return await self._transport._wait() # pylint: disable=W0212 def send_signal(self, signal): """Send a sgnal to the process.""" self._transport.send_signal(signal) def terminate(self): """Terminate the process.""" self._transport.terminate() def kill(self): """Kill the process.""" self._transport.kill() re_sudo_user = re.compile(r'(?:(?P.*)(?=!)(?:!))?(?P.*)') class ConnectorParams( collections.namedtuple('ConnectorParams', [ 'scheme', 'sudo', 'user', 'hostname', 'container'])): __slots__ = () def __new__(cls, scheme, sudo=None, user=None, hostname=None, container=None): return super().__new__(cls, scheme, sudo, user, hostname, container) @classmethod def parse(cls, connection_str): """Parse the connection string into its parts.""" p = urllib.parse.urlparse(connection_str) connector_name, _, container_name, *_ = p sudo, user = False, None if p.username: m = re_sudo_user.match(p.username) if m: sudo, user = m.groups() return cls( connector_name, True if sudo == '' else False if sudo is None else sudo, None if not user else user, p.hostname, None if not container_name else container_name[1:] ) def unparse(self): return ''.join([ self.scheme, '://', '' if not self.sudo else '!' if self.sudo is True else self.sudo +'!', '' if not self.user else self.user, '@' if (self.sudo or self.user) else '', '' if not self.hostname else self.hostname, '' if not self.container else '/' + self.container ]) @classmethod def create(cls, connector): connector_cls = connector.__class__ params = cls(connector_cls.scheme, *(getattr(connector, k) for k in ('sudo', 'user', 'hostname', 'container') if k in connector_cls.__slots__) ) return params def create_connector(self): """Lookup the connector for that string.""" kwargs = { 'sudo': self.sudo, 'user': self.user, 'hostname': self.hostname, 'container': self.container } # just create the connector by filtering args via slots connector_cls = Connector.connectors[self.scheme] connector = connector_cls( **{ k: v for k, v in kwargs.items() if k in connector_cls.__slots__ } ) return connector class ConnectorMeta(abc.ABCMeta): """Connector meta base.""" base = None connectors = {} def __new__(mcs, name, bases, dct): cls = super().__new__(mcs, name, bases, dct) if mcs.base is None: mcs.base = cls elif not cls.__abstractmethods__: mcs.connectors[cls.scheme] = cls return cls @property def scheme(cls): """The unique connector scheme is the lowered class name.""" return cls.__name__.lower() class Connector(metaclass=ConnectorMeta): """Base Connector class.""" __slots__ = () def __repr__(self): connector_params = ConnectorParams.create(self) print(connector_params) return connector_params.unparse() class SubprocessConnector(Connector): """A `Connector` uniquely defines a remote target.""" __slots__ = () def __hash__(self): return hash(frozenset(map(lambda k: (k, getattr(self, k)), self.__slots__))) def __eq__(self, other): return hash(self) == hash(other) @staticmethod def bootstrap_code(code=core, options=None): """Create the python bootstrap code.""" if code is None: code = core bootstrap_code = str(bootstrap.Bootstrap(code, options)) return bootstrap_code @abc.abstractmethod def arguments(self, *, code=None, options=None, python_bin=None): """Iterate over the arguments to start a process. :param code: the code to bootstrap the remote process :param options: options for the remote process :param python_bin: the path to the python binary """ async def launch(self, *, loop=None, code=None, options=None, python_bin=None, **kwargs): """Launch a remote process. :param code: the python module to bootstrap :param options: options to send to remote :param python_bin: the path to the python binary to execute :param kwargs: further arguments to create the process """ if python_bin is None: python_bin = sys.executable loop = loop if loop is not None else asyncio.get_event_loop() if options is None: options = {} # TODO handshake options['echo'] = echo = b''.join((b'implant', os.urandom(64))) *command_args, bootstrap_code = self.arguments( code=code, options=options, python_bin=python_bin ) log.debug("Connector arguments: %s", ' '.join(command_args)) remote = await create_subprocess_remote(*command_args, bootstrap_code, loop=loop, **kwargs) # TODO protocol needs improvement # some kind of a handshake, which is independent # of sending echo via process options try: # wait for remote behavior to echo remote_echo = await remote.stdout.readexactly(len(echo)) assert echo == remote_echo, "Remote process misbehaves!" except AssertionError: raise RemoteMisbehavesError( "Remote does not echo `{}`!".format(echo)) except EOFError: errors = [] async for line in remote.stderr: errors.append(line) log.error("Remote close stdout on bootstrap:\n%s", (b''.join(errors)).decode('utf-8')) raise RemoteMisbehavesError("Remote closed stdout!", errors) log.info("Started remote process: %s", remote) return remote _DEFAULT_LIMIT = 2 ** 16 async def create_subprocess_remote(program, *args, loop=None, limit=_DEFAULT_LIMIT, **kwds): """Create a remote subprocess.""" if loop is None: loop = asyncio.events.get_event_loop() def _preexec_detach_from_parent(): # prevents zombie processes via ssh os.setpgrp() def _protocol_factory(): return asyncio.subprocess.SubprocessStreamProtocol(limit=limit, loop=loop) transport, protocol = await loop.subprocess_exec( _protocol_factory, program, *args, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, preexec_fn=_preexec_detach_from_parent, **kwds ) return SubprocessRemote(transport, protocol) class Local(SubprocessConnector): """A `Connector` to a local python process.""" __slots__ = ('sudo',) def __init__(self, *, sudo=None): super().__init__() self.sudo = sudo def arguments(self, *, code=None, options=None, python_bin=None): if python_bin is None: python_bin = sys.executable bootstrap_code = self.bootstrap_code(code, options) # sudo if self.sudo: yield 'sudo' # optionally with user if self.sudo is not True: yield from ('-u', self.sudo) yield from (str(python_bin), '-c', bootstrap_code) class Ssh(Local): """A `Connector` for remote hosts reachable via SSH. If a hostname is omitted, this connector acts like `Local`. """ __slots__ = ('sudo', 'hostname', 'user') def __init__(self, *, hostname=None, user=None, sudo=None): super().__init__(sudo=sudo) self.hostname = hostname self.user = user def arguments(self, *, code=None, options=None, python_bin=None): *local_arguments, _, _, bootstrap_code = super().arguments( code=code, options=options, python_bin=python_bin ) # ssh if self.hostname is not None: bootstrap_code = shlex.quote(bootstrap_code) yield from ('ssh', '-T') # optionally with user if self.user is not None: yield from ('-l', self.user) # # remote port forwarding # yield '-R' # yield '10001:localhost:10000' yield self.hostname yield from local_arguments yield from (str(python_bin), '-c', bootstrap_code) class Lxd(Ssh): """A `Connector` for accessing a lxd container. If the hostname is omitted, the lxd container is local. """ __slots__ = ('sudo', 'hostname', 'user', 'container') def __init__(self, *, container, hostname=None, user=None, sudo=None): super().__init__(hostname=hostname, user=user, sudo=sudo) self.container = container def arguments(self, *, code=None, options=None, python_bin=None): *ssh_arguments, _, _, bootstrap_code = super().arguments( code=code, options=options, python_bin=python_bin ) yield from ssh_arguments yield from shlex.split( '''lxc exec {self.container} {python_bin} -- -c''' .format(**locals())) yield bootstrap_code # yield from ( # '(', 'lxc', 'exec', self.container, # str(python_bin), '--', '-c', bootstrap_code, # '||', 'printf', '"\xff\xff"', ')' # ) PK!x-Himplant/core.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The core module is transfered to the remote process and will bootstrap pipe communication. It creates default channels and dispatches commands accordingly. """ # pylint: disable=C0302 import abc import asyncio import collections import concurrent import functools import importlib.abc import importlib.machinery import importlib.util import logging import logging.config import os import signal import struct import sys import time import traceback import uuid import weakref import zlib from . import msgpack PY_35 = sys.version_info >= (3, 5) PY_352 = sys.version_info >= (3, 5, 2) log = logging.getLogger(__name__) class reify: """Taken from pyramid: create a cached property.""" def __init__(self, wrapped): self.wrapped = wrapped functools.update_wrapper(self, wrapped) def __get__(self, inst, objtype=None): if inst is None: return self val = self.wrapped(inst) setattr(inst, self.wrapped.__name__, val) return val @msgpack.register(object, 0x01) class CustomEncoder: """Encode custom objects registered before.""" @classmethod def __msgpack_encode__(cls, data, data_type): data_type = type(data) encoder = msgpack.get_custom_encoder(data_type) if encoder is None: raise TypeError( "There is no custom encoder for this type registered: {}" .format(data_type)) wrapped = { 'type': data_type.__name__, 'module': data_type.__module__, 'data': encoder.__msgpack_encode__(data, data_type) } return msgpack.encode(wrapped) @classmethod def __msgpack_decode__(cls, encoded_data, data_type): wrapped = msgpack.decode(encoded_data) module = sys.modules[wrapped['module']] data_type = getattr(module, wrapped['type']) encoder = msgpack.get_custom_encoder(data_type) if encoder is None: raise TypeError( "There is no custom encoder for this type registered: {}" .format(data_type)) data = encoder.__msgpack_decode__(wrapped['data'], data_type) return data @msgpack.register(tuple, 0x02) class TupleEncoder: """Encoder for :py:obj:`tuple`.""" @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode(list(data)) @classmethod def __msgpack_decode__(cls, encoded_data, data_type): return tuple(msgpack.decode(encoded_data)) @msgpack.register(set, 0x03) class SetEncoder: """Encoder for :py:obj:`set`.""" @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode(list(data)) @classmethod def __msgpack_decode__(cls, encoded_data, data_type): return set(msgpack.decode(encoded_data)) @msgpack.register(Exception, 0x04) class ExceptionEncoder: """Encoder for :py:obj:`Exception`.""" @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode(data.args) @classmethod def __msgpack_decode__(cls, encoded_data, data_type): return data_type(*msgpack.decode(encoded_data)) @msgpack.register(StopAsyncIteration, 0x05) class StopAsyncIterationEncoder: """Encoder for :py:obj:`StopAsyncIteration`.""" @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode(data.args) @classmethod def __msgpack_decode__(cls, encoded_data, data_type): return StopAsyncIteration(*msgpack.decode(encoded_data)) class Uid(uuid.UUID): """A unique id, which is basically a :py:obj:`python:uuid.uuid1` instance.""" def __init__(self, bytes=None): # pylint: disable=W0622 if bytes is None: super().__init__(bytes=uuid.uuid1().bytes, version=1) else: super().__init__(bytes=bytes, version=1) @property def time(self): """The timestamp of the uuid1.""" uid_time = (super().time - 0x01b21dd213814000) * 100 / 1e9 return uid_time @classmethod def __msgpack_encode__(cls, data, data_type): return data.bytes @classmethod def __msgpack_decode__(cls, encoded, data_type): return cls(bytes=encoded) class ConnectionLostStreamReaderProtocol(asyncio.StreamReaderProtocol): """Call a callback on connection_lost.""" def __init__(self, *args, connection_lost_cb, **kwargs): super().__init__(*args, **kwargs) self.connection_lost_cb = connection_lost_cb def connection_lost(self, exc): super().connection_lost(exc) self.connection_lost_cb(exc) class Incomming(asyncio.StreamReader): """A context for an incomming pipe.""" def __init__(self, *, connection_lost_cb=None, pipe=sys.stdin, loop=None): super(Incomming, self).__init__(loop=loop) self.pipe = os.fdopen(pipe) if isinstance(pipe, int) else pipe self.connection_lost_cb = connection_lost_cb async def __aenter__(self): await self.connect() return self async def connect(self): """Connect the pipe.""" if self.connection_lost_cb: protocol = ConnectionLostStreamReaderProtocol( self, connection_lost_cb=self.connection_lost_cb, loop=self._loop ) else: protocol = asyncio.StreamReaderProtocol(self, loop=self._loop) transport, protocol = await self._loop.connect_read_pipe( lambda: protocol, self.pipe, ) return transport, protocol async def __aexit__(self, exc_type, value, tb): self._transport.close() async def readexactly(self, n): """Read exactly n bytes from the stream. This is a short and faster implementation then the original one (see of https://github.com/python/asyncio/issues/394). """ buffer, missing = bytearray(), n while missing: if not self._buffer: await self._wait_for_data('readexactly') if self._eof or not self._buffer: raise asyncio.IncompleteReadError(bytes(buffer), n) length = min(len(self._buffer), missing) buffer.extend(self._buffer[:length]) del self._buffer[:length] missing -= length self._maybe_resume_transport() return buffer class Outgoing: """A context for an outgoing pipe.""" def __init__(self, *, pipe=sys.stdout, reader=None, loop=None): self.loop = loop if loop is not None else asyncio.get_event_loop() self.pipe = os.fdopen(pipe) if isinstance(pipe, int) else pipe self.transport = None self.reader = reader self.writer = None async def __aenter__(self): writer = await self.connect() return writer async def connect(self): """Connect the pipe.""" self.transport, protocol = await self.loop.connect_write_pipe( asyncio.streams.FlowControlMixin, self.pipe ) writer = asyncio.streams.StreamWriter( self.transport, protocol, self.reader, self.loop ) return writer async def __aexit__(self, exc_type, value, tb): self.transport.close() def split_data(data, size=1024): """Create a generator to split data into chunks.""" data_view, data_len, start = memoryview(data), len(data), 0 while start < data_len: end = min(start + size, data_len) yield data_view[start:end] start = end class OrderedMeta(type): """Preserve the order of instance creation.""" items = [] def __call__(cls, *args, **kwargs): inst = super().__call__(*args, **kwargs) cls.items.append(inst) return inst @classmethod def ordered_items(mcs, dct, cls_order=None): """Sort and filter items by type and instance creation.""" def _sort(item): if cls_order is not None: try: cls_index = cls_order.index(item[1].__class__) except ValueError: cls_index = None return cls_index, mcs.items.index(item[1]) return mcs.items.index(item[1]) items = collections.OrderedDict(sorted( (item for item in dct.items() if item[1] in mcs.items), key=_sort )) return items class BaseHeaderItem(metaclass=OrderedMeta): """A base item of a header.""" def __init__(self, *, default=None, encoder=None, decoder=None): self.index = None self.name = None self.default = default self.encoder = encoder self.decoder = decoder def set_index(self, index): """Set the index of the item in the header.""" self.index = index def __set_name__(self, owner, name): self.name = name def encode(self, value): """Encode the value of that item.""" if self.encoder: value = self.encoder(value, self) return value @staticmethod def decode(value): """Decode the value of that item.""" return value def __get__(self, inst, cls): if inst is None: return self value = self.decode(inst) if self.decoder: value = self.decoder(value, self) setattr(inst, self.name, value) return value class HeaderItem(BaseHeaderItem): """An item of a header.""" def __init__(self, fmt, **kwargs): super().__init__(**kwargs) self.fmt = fmt @property def size(self): """The size of the item.""" return struct.calcsize(self.fmt) def encode(self, value): value = super().encode(value) return struct.pack(self.fmt, value or self.default) def decode(self, value): value, = struct.unpack_from(self.fmt, value, self.index) return value class Flag(BaseHeaderItem): """A boolean flag of a header.""" def encode(self, value): value = super().encode(value) return 0 if not value else value << self.index def decode(self, value): value, = struct.unpack_from('!1B', value, 0) value = value >> self.index & 1 return value class HeaderMeta(type): """Order items and set the size of the header.""" def __new__(mcs, name, bases, dct): items = dct['items'] = OrderedMeta.ordered_items( dct, (Flag, HeaderItem)) dct['size'] = mcs.apply_items_index(items) cls = type.__new__(mcs, name, bases, dct) if sys.version_info < (3, 6): for item_name, item in items.items(): item.__set_name__(cls, item_name) return cls @classmethod def apply_items_index(mcs, items): """Apply the index of each item.""" flag_count = 0 item_size = 0 for item in items.values(): if isinstance(item, Flag): item.set_index(flag_count) flag_count += 1 item_size = (flag_count // 8) + (flag_count % 8 != 0) else: # items are ordered after type item.set_index(item_size) item_size += item.size return item_size def __call__(cls, encoded_bytes=None, **items): if encoded_bytes: inst = super().__call__(encoded_bytes) return inst flags = 0 encoded_items = [] for name, item in cls.items.items(): if isinstance(item, Flag): flags += item.encode(items.get(name, False)) else: encoded_items.append(item.encode(items.get(name, None))) encoded_flags = bytes([flags]) encoded_items.insert(0, encoded_flags) inst = super().__call__(b''.join(encoded_items)) return inst class Header(bytes, metaclass=HeaderMeta): """The chunk header with flags and items.""" eom = Flag() send_ack = Flag() recv_ack = Flag() compression = Flag() uid = HeaderItem('!16s', encoder=lambda uid, _: uid.bytes, decoder=lambda x, _: Uid(bytes=x)) channel_name_len = HeaderItem('!H', default=0) data_len = HeaderItem('!I', default=0) def __repr__(self): return '
'\ .format(self=self, eom=' eom' if self.eom else '') Chunk = collections.namedtuple('Chunk', ('header', 'channel_name', 'data')) class Channels: """Hold references to all channel queues and route messages accordingly.""" chunk_size = 0x8000 # pylint: disable=E0602 log = logging.getLogger(__module__ + '.' + __qualname__) def __init__(self, reader, writer, *, loop=None): """Create a :py:obj:`Channels` instance which delegates incomming messages into their appropriate :py:obj:`Channel` queues. :param reader: :py:obj:`python:asyncio.StreamReader` :param writer: :py:obj:`python:asyncio.StreamWriter` :param loop: the event loop """ self.loop = loop if loop is not None else asyncio.get_event_loop() self.acknowledgements = {} """Global acknowledgment futures distinctive by uid.""" self.incomming = weakref.WeakValueDictionary() """A collection of all active channels.""" self.reader = reader self.writer = writer self._lock_communicate = asyncio.Lock(loop=self.loop) def get_channel(self, channel_name): """Create a channel and weakly register its queue. :param channel_name: the name of the channel to create :returns: :py:obj:`Channel` instance with a bound send method """ channel = Channel( channel_name, send=functools.partial(self.send, channel_name), loop=self.loop ) try: return channel finally: self.incomming[channel_name] = channel async def enqueue(self): """Schedule receive tasks. Incomming chunks are collected and stored in the appropriate channel queue. """ async with self._lock_communicate: # start receiving fut_receive_reader = asyncio.ensure_future(self._receive_reader(), loop=self.loop) try: # never ending await asyncio.Future(loop=self.loop) except asyncio.CancelledError: self.log.info("Shutdown of message enqueueing") # stop receiving new messages fut_receive_reader.cancel() await fut_receive_reader async def _read_chunk(self): """Read a single chunk from the :py:obj:`Channel.reader`.""" # read header raw_header = await self.reader.readexactly(Header.size) header = Header(raw_header) self.log.debug('read header: %s', repr(header)) # read channel name channel_name = \ (await self.reader.readexactly(header.channel_name_len)).decode() \ if header.channel_name_len else None self.log.debug('read channel_name: %s', channel_name) # read data if header.data_len: data = await self.reader.readexactly(header.data_len) if header.compression: data = zlib.decompress(data) else: data = None self.log.debug('read data: %s', header.data_len) chunk = Chunk(header, channel_name, data) return chunk async def _finalize_message(self, buffer, chunk): """Finalize the message if :py:obj:`Header.eom` is :py:obj:`True`. This will also acknowledge the message if :py:obj:`Header.send_ack` is :py:obj:`True`. """ if chunk.header.send_ack: # we have to acknowledge the reception await self._send_ack(chunk.header.uid) if chunk.header.eom: # put message into channel queue if chunk.header.uid in buffer and chunk.channel_name: msg = msgpack.decode(buffer[chunk.header.uid]) self.log.debug('%s - decoded message %s for channel: %s', chunk.header.uid, msg, chunk.channel_name) try: # try to store message in channel queue = self.incomming[chunk.channel_name] self.log.debug('Put message %s into queue: %s', msg, chunk.channel_name) await queue.put(msg) except Exception: # pylint: disable=W0703 self.log.error( 'Error while putting message %s into queue: %s', msg, chunk.channel_name) finally: del buffer[chunk.header.uid] # acknowledge reception ack_future = self.acknowledgements.get(chunk.header.uid) if ack_future and chunk.header.recv_ack: try: self.log.debug("%s: acknowledge", chunk.header.uid) duration = time.time() - chunk.header.uid.time ack_future.set_result((chunk.header.uid, duration)) finally: del self.acknowledgements[chunk.header.uid] @classmethod def _feed_data(cls, buffer, chunk): if chunk.data: if chunk.header.uid not in buffer: buffer[chunk.header.uid] = bytearray() buffer[chunk.header.uid].extend(chunk.data) # debug if chunk.channel_name: cls.log.debug("%s: channel `%s` receives: %s bytes", chunk.header.uid, chunk.channel_name, len(chunk.data) if chunk.data else 0) else: cls.log.debug("%s: no channel received: %s", chunk.header.uid, chunk.header) async def _receive_single_message(self, buffer): chunk = await self._read_chunk() self._feed_data(buffer, chunk) await self._finalize_message(buffer, chunk) def _log_incomming(self): self.log.debug('Active channels:') for key, queue in self.incomming.items(): self.log.debug('\t%s: %s', key, queue.qsize()) async def _receive_reader(self): """Start reception of messages.""" # receive incomming data into queues self.log.info("Start receiving from %s...", self.reader) buffer = {} try: while True: await self._receive_single_message(buffer) self._log_incomming() except (asyncio.CancelledError, GeneratorExit): if buffer: self.log.warning("Receive buffer was not empty when canceled!") except EOFError: self.log.info("While waiting for data, we received EOF.") except: # noqa self.log.error("Error while receiving:\n%s", traceback.format_exc()) raise async def _send_ack(self, uid): """Send an acknowledgement message. :param uid: :py:obj:`Uid` """ # no channel_name, no data header = Header(uid=uid, eom=True, recv_ack=True) self.log.debug("%s: send acknowledgement", uid) await self._send_raw(header) async def _send_raw(self, *data): for part in data: self.writer.write(part) await self.writer.drain() self.log.debug('send raw data: %s', sum(map(len, data))) async def send(self, channel_name, data, ack=False, compress=6): """Send data in a encoded form to the channel. :param channel_name: the name of the channel :param data: the python object to send :param ack: request acknowledgement of the reception of that message :param compress: compress the data with zlib Messages are split into chunks and put into the outgoing queue. """ uid = Uid() encoded_channel_name = channel_name.encode() encoded_data = msgpack.encode(data) channel_name_len = len(encoded_channel_name) self.log.debug("%s: channel `%s` sends: %s bytes", uid, channel_name, len(encoded_data)) for part in split_data(encoded_data, self.chunk_size): if compress: raw_len = len(part) part = zlib.compress(part, compress) comp_len = len(part) self.log.debug("%s: compression ratio of %s -> %s: %.2f%%", uid, raw_len, comp_len, comp_len * 100 / raw_len) header = Header(uid=uid, channel_name_len=channel_name_len, data_len=len(part), eom=False, send_ack=False, compression=bool(compress)) self.log.debug('%s - send part', uid) await self._send_raw(header, encoded_channel_name, part) # if acknowledgement is asked for, we await this future # and return its result # see _receive_reader for resolution of future if ack: ack_future = asyncio.Future(loop=self.loop) self.acknowledgements[uid] = ack_future header = Header(uid=uid, channel_name_len=channel_name_len, data_len=0, eom=True, send_ack=ack, compression=False) self.log.debug('%s - send eom', uid) await self._send_raw(header, encoded_channel_name) if ack: self.log.debug("%s: wait for acknowledgement...", uid) acknowledgement = await ack_future self.log.debug("%s: acknowldeged: %s", uid, acknowledgement) return acknowledgement class Channel(asyncio.Queue): """Channel provides means to send and receive messages bound to a specific channel name. """ def __init__(self, name=None, *, send, loop=None): """Initialize the channel. :param name: the channel name :param send: the partial send method of Channels :param loop: the event loop """ super().__init__(loop=loop) self.name = name self.send = send """The send method bound to this channel's name. See :py:func:`Channels.send` for details. """ def __repr__(self): return '<{0.name} {in_size}>'.format( self, in_size=self.qsize(), ) async def pop(self): """Get one item from the queue and remove it on return.""" msg = await super().get() try: return msg finally: self.task_done() def __await__(self): """Receive the next message in this channel.""" return self.pop().__await__() if PY_35: if PY_352: def __aiter__(self): return self else: async def __aiter__(self): return self async def __anext__(self): data = await self if isinstance(data, StopAsyncIteration): raise data return data async def send_iteration(self, iterable): """Send an iterable to the remote.""" if isinstance(iterable, collections.abc.AsyncIterable): log.debug("Channel %s sends async iterable: %s", self, iterable) async for value in iterable: await self.send(value) else: log.debug("Channel %s sends iterable: %s", self, iterable) for value in iterable: await self.send(value) await self.send(StopAsyncIteration()) def exclusive(fun): """Make an async function call exclusive.""" lock = asyncio.Lock() log.debug("Locking function: %s -> %s", lock, fun) async def _locked_fun(*args, **kwargs): log.debug("Wait for lock releasing: %s -> %s", lock, fun) async with lock: log.debug("Executing locked function: %s -> %s", lock, fun) return await fun(*args, **kwargs) return _locked_fun DispatchLocalContext = collections.namedtuple( 'DispatchContext', ('loop', 'channel', 'execute', 'fqin', 'remote_future') ) DispatchRemoteContext = collections.namedtuple( 'DispatchContext', ('loop', 'channel', 'execute', 'fqin', 'pending_remote_task') ) DISPATCHER_CHANNEL_NAME = 'Dispatcher' class Dispatcher: """Enables execution of :py:obj:`Command` s. A :py:obj:`Command` is split into local and remote part, where a context with a dedicated :py:obj:`Channel` is provided to enable streaming of arbitrary data. The local part also gets a remote future passed, which resolves to the result of the remote part of the :py:obj:`Command`. """ log = logging.getLogger(__module__ + '.' + __qualname__) # noqa def __init__(self, channels, *, loop=None): """Create a dispatcher, which executes messages on its own :py:obj:`Channel` to enable Command execution and communication via distinct :py:obj:`Channel` s. """ self.loop = loop if loop is not None else asyncio.get_event_loop() self.channels = channels """The collection of all channels.""" self.channel = self.channels.get_channel(DISPATCHER_CHANNEL_NAME) """A channel for the dispatcher itself.""" self.pending_commands = collections.defaultdict( functools.partial(asyncio.Future, loop=self.loop)) """Futures of :py:obj:`Command` s which are not finished yet.""" self.pending_dispatches = collections.defaultdict( functools.partial(asyncio.Event, loop=self.loop)) """A collection of dispatches, which are still not finished.""" self.pending_remote_tasks = set() self._lock_dispatch = asyncio.Lock(loop=self.loop) async def dispatch(self): """Start sending and receiving messages and executing them.""" async with self._lock_dispatch: fut_execute_channels = asyncio.ensure_future( self._execute_channels(), loop=self.loop) try: # never ending await asyncio.Future(loop=self.loop) except asyncio.CancelledError: self.log.info("Shutdown of dispatcher") for task in self.pending_remote_tasks: self.log.info("Waiting for task to finalize: %s", task) await task fut_execute_channels.cancel() await fut_execute_channels async def _execute_channels(self): """Execute messages sent via our :py:obj:`Dispatcher.channel`.""" self.log.info("Listening on channel %s for command dispatch...", self.channel) def _handle_message_exception(message, fut): try: fut.result() except Exception as ex: # pylint: disable=W0703 tb = traceback.format_exc() self.log.error("traceback:\n%s", tb) asyncio.ensure_future( self.channel.send(DispatchException(message.fqin, exception=ex, tb=tb)) ) try: while True: message = await self.channel.pop() self.log.info("[a] %s - received dispatch message: %s", message.fqin, message) fut_message = asyncio.ensure_future(message(self)) fut_message.add_done_callback( functools.partial(_handle_message_exception, message)) except asyncio.CancelledError: pass except Exception: # pylint: disable=W0703 self.log.error('Error in dispatcher:\n%s', traceback.format_exc()) finally: # teardown here for fqin, fut in list(self.pending_commands.items()): self.log.warning("Teardown pending command: %s, %s", fqin, fut) await fut del self.pending_commands[fqin] async def execute(self, command_name, **params): """Execute a command. First creating the remote side and its future and second executing its local part. """ command, fqin = Command.create_command_fqin(command_name, params) self.log.info('[1] %s - send command', fqin) await self.channel.send(DispatchCommand(fqin, *command.dispatch_data)) async with self.remote_future(fqin, command) as future: context = self.local_context(fqin, future) try: evt_dispatch_ready = self.pending_dispatches[fqin] self.log.info( "[2] %s - waiting for remote dispatch to be ready", fqin) await evt_dispatch_ready.wait() self.log.info("[4] %s - execute local command", fqin) # execute local side of command result = await command.local(context) # get remote_future future.result() return result except: # noqa self.log.error("Error while executing command: %s\n%s", command, traceback.format_exc()) raise def remote_future(self, fqin, command): # noqa """Create a context for remote command future by sending `DispatchCommand` and returning its pending future. """ class _context: async def __aenter__(ctx): # noqa # send execution request to remote future = self.pending_commands[fqin] return future async def __aexit__(ctx, *args): # noqa del self.pending_commands[fqin] return _context() def local_context(self, fqin, remote_future): """Create a local context to pass to a :py:obj:`Command` s local part. The :py:obj:`Channel` is built via a fully qualified instance name (fqin). """ channel = self.channels.get_channel(fqin) context = DispatchLocalContext( loop=self.loop, channel=channel, execute=self.execute, fqin=fqin, remote_future=remote_future ) return context def remote_context(self, fqin, pending_remote_task): """Create a remote context to pass to a :py:obj:`Command` s remote part. The :py:obj:`Channel` is built via a fully qualified instance name (fqin). """ channel = self.channels.get_channel(fqin) context = DispatchRemoteContext( loop=self.loop, channel=channel, execute=self.execute, fqin=fqin, pending_remote_task=pending_remote_task ) return context async def execute_remote(self, fqin, command): """Execute the remote part of a `Command`. This method is called by a `DispatchCommand` message. The result is send via `Dispatcher.channel` to resolve the pending command future. """ # TODO current_task is not a stable API see PyO3/tokio#54 current_task = asyncio.Task.current_task() self.pending_remote_tasks.add(current_task) self.log.info("[d] %s - starting remote task", fqin) context = self.remote_context(fqin, current_task) self.log.info("[e] %s - sending remote dispatch ready", fqin) await self.channel.send(DispatchReady(fqin)) try: # execute remote side of command self.log.info('[f] %s - execute remote side', fqin) result = await command.remote(context) self.log.info('[g] %s - send remote result', fqin) await self.channel.send(DispatchResult(fqin, result=result)) return result except asyncio.CancelledError: self.log.info("Remote execution canceled") finally: self.log.info("[h] %s - Finalizing remote task...", fqin) self.pending_remote_tasks.remove(current_task) def set_dispatch_ready(self, fqin): """Sets the pending dispatch ready, so the command execution continues. """ evt = self.pending_dispatches[fqin] evt.set() self.log.info("[3] %s - set dispatch ready", fqin) def set_dispatch_exception(self, fqin, tb, exception): """Set an exception for a pending command.""" future = self.pending_commands[fqin] future.set_exception(exception) self.log.info("[0] %s - Dispatch exception:\n%s", fqin, tb) def set_dispatch_result(self, fqin, result): """Set a result for a pending command.""" future = self.pending_commands[fqin] future.set_result(result) self.log.info("[5] %s - Dispatch result", fqin) async def execute_dispatch_command(self, fqin, command_name, params): """Create a command and execute it.""" try: self.log.info('[b] %s - create command', fqin) command = await Command.create_command(command_name, params, loop=self.loop) self.log.info('[c] %s - start execute_remote', fqin) await self.execute_remote(fqin, command) self.log.info('%s - finished execute_remote', fqin) except Exception as ex: # noqa tb = traceback.format_exc() self.log.error("traceback:\n%s", tb) asyncio.ensure_future( self.channel.send(DispatchException(fqin, exception=ex, tb=tb)) ) class _CommandMeta(type): __command_base__ = None __commands__ = {} def __new__(mcs, name, bases, dct): """Create Command class. Add command_name as __module__:__qualname__ Collect parameters """ dct['command_name'] = dct['__module__'] + ':' + dct['__qualname__'] dct['parameters'] = {name: attr for name, attr in dct.items() if isinstance(attr, Parameter)} cls = type.__new__(mcs, name, bases, dct) cls.__params__ = params = collections.OrderedDict() for base in reversed(cls.__mro__): base_params = [(n, p) for (n, p) in base.__dict__.items() if isinstance(p, Parameter)] if base_params: params.update(base_params) # set parameter names in python < 3.6 if sys.version_info < (3, 6): for name, param in params.items(): param.__set_name__(cls, name) # check for remote outsourcing if hasattr(cls, 'remote')\ and isinstance(cls.remote, CommandRemote): cls.remote.__set_name__(cls, 'remote') if mcs.__command_base__ is None: mcs.__command_base__ = cls else: # only register classes except base class mcs.__commands__[cls.command_name] = cls return cls def create_fqin(cls): """Create a fully qualified instance name.""" uid = Uid() fqin = cls.command_name + '/' + str(uid) return fqin def create_command_fqin(cls, command_name, params): """Create a command and its fully qualified instance name.""" command_class = (cls.__commands__[command_name] if isinstance(command_name, str) else command_name) fqin = command_class.create_fqin() command = command_class(**params) return command, fqin async def create_command(cls, command_name, params, *, loop): """Create a command.""" module_name, command_class_name = command_name.split(':') module = sys.modules.get(module_name, await async_import(module_name, loop=loop)) command_class = getattr(module, command_class_name) if isinstance(command_class.remote, CommandRemote): await command_class.remote.prepare() command = command_class(**params) return command class RemoteClassNotSetException(Exception): """Raised when remote class is not set for :py:obj:`CommandRemote`""" class CommandRemote: """Delegates remote task to another class. This is usefull, if one wants not to import remote modules at the master side. """ log = logging.getLogger(__module__ + '.' + __qualname__) # noqa def __init__(self, full_classname): self.name = None self.module_name, self.class_name = full_classname.rsplit('.', 1) self.remote_class = None def __set_name__(self, owner, name): self.name = name def __get__(self, inst, cls): if inst is None: return self if self.remote_class is None: raise RemoteClassNotSetException( 'remote_class must be set before accessing the descriptor') remote_inst = self.remote_class() for name, param in inst: setattr(remote_inst, name, param) setattr(inst, self.name, remote_inst.remote) self.log.debug('Remote of %s outsourced to %s', inst, remote_inst.remote) return remote_inst.remote def set_remote_class(self, module): """Set remote class.""" self.remote_class = getattr(module, self.class_name) async def prepare(self): """Import the module for remote class.""" module = await async_import(self.module_name) self.set_remote_class(module) class NoDefault: """Just a marker class to represent no default. This is to separate really nothing and `None`. """ class Parameter: """Define a `Command` parameter.""" def __init__(self, *, default=NoDefault, description=None): self.name = None self.default = default self.description = description def __get__(self, instance, owner): if instance is None: return self try: return instance.__dict__[self.name] except KeyError: if self.default is NoDefault: raise AttributeError( "The Parameter has no default value " "and another value was not assigned yet: {}" .format(self.name)) return self.default def __set__(self, instance, value): instance.__dict__[self.name] = value def __set_name__(self, owner, name): self.name = name class Command(metaclass=_CommandMeta): """Common ancestor of all Commands.""" def __init__(self, **parameters): if parameters is not None: for name, value in parameters.items(): setattr(self, name, value) def __iter__(self): return iter((name, getattr(self, name)) for name in self.__class__.__params__) def __repr__(self): _repr = super().__repr__() command_name = self.__class__.command_name return "<{command_name} {_repr}>".format( command_name=command_name, _repr=_repr) @property def dispatch_data(self): """Data to be dispatched.""" return ( self.__class__.command_name, self.__class__.__name__, self.__class__.__module__, dict(self.__iter__()) ) class DispatchMessage(metaclass=abc.ABCMeta): """Base class for command dispatch communication.""" log = logging.getLogger(__module__ + '.' + __qualname__) # noqa def __init__(self, fqin): self.fqin = fqin def __repr__(self): return "<{self.__class__.__name__} {self.fqin}>".format( **locals()) @abc.abstractmethod async def __call__(self, dispatcher): """Executes appropriate :py:obj:`Dispatcher` methods to implement the core protocol.""" class DispatchCommand(DispatchMessage): """Arguments for a command dispatch.""" log = logging.getLogger(__module__ + '.' + __qualname__) # noqa def __init__(self, fqin, command_name, command_class, command_module, params): super().__init__(fqin) self.command_name = command_name self.command_class = command_class self.command_module = command_module self.params = params self.log.info("Dispatch created: %s", self) async def __call__(self, dispatcher): # schedule remote execution await dispatcher.execute_dispatch_command(self.fqin, self.command_name, self.params) @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode(( data.fqin, data.command_name, data.command_class, data.command_module, data.params, )) @classmethod def __msgpack_decode__(cls, encoded, data_type): return cls(*msgpack.decode(encoded)) class DispatchReady(DispatchMessage): """Set the dispatch ready.""" async def __call__(self, dispatcher): dispatcher.set_dispatch_ready(self.fqin) @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode(data.fqin) @classmethod def __msgpack_decode__(cls, encoded, data_type): fqin = msgpack.decode(encoded) return cls(fqin) class DispatchException(DispatchMessage): """Remote execution ended in an exception.""" def __init__(self, fqin, exception, tb=None): super().__init__(fqin) self.exception = exception self.tb = tb or traceback.format_exc() async def __call__(self, dispatcher): dispatcher.set_dispatch_exception(self.fqin, self.tb, self.exception) @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode((data.fqin, data.exception, data.tb)) @classmethod def __msgpack_decode__(cls, encoded, data_type): fqin, exc, tb = msgpack.decode(encoded) return cls(fqin, exc, tb) class DispatchResult(DispatchMessage): """The result of a remote execution.""" def __init__(self, fqin, result=None): super().__init__(fqin) self.result = result async def __call__(self, dispatcher): dispatcher.set_dispatch_result(self.fqin, self.result) @classmethod def __msgpack_encode__(cls, data, data_type): return msgpack.encode((data.fqin, data.result)) @classmethod def __msgpack_decode__(cls, encoded, data_type): return cls(*msgpack.decode(encoded)) # events are taken from https://github.com/zopefoundation/zope.event # function names are modified and adopted to asyncio event_subscribers = [] event_registry = {} async def notify_event(event): """Notify all subscribers of ``event``.""" for subscriber in event_subscribers: await subscriber(event) def event_handler(event_class, handler_=None, decorator=False): """Define an event handler for a (new-style) class. This can be called with a class and a handler, or with just a class and the result used as a handler decorator. """ if handler_ is None: return lambda func: event_handler(event_class, func, True) if not event_registry: event_subscribers.append(event_dispatch) if event_class not in event_registry: event_registry[event_class] = [handler_] else: event_registry[event_class].append(handler_) if decorator: return event_handler async def event_dispatch(event): """Dispatch an event to every handler.""" for event_class in event.__class__.__mro__: for handler in event_registry.get(event_class, ()): await handler(event) class NotifyEvent(Command): """Notify about an event. If the remote side registers for this event, it gets notified. """ log = logging.getLogger(__module__ + '.' + __qualname__) # noqa event = Parameter(default=None, description='the event instance,' ' which has to be de/encodable via message pack') dispatch_local = Parameter(default=False, description='if True, the local' ' side will also be notified') async def local(self, context): # noqa # we wait for remote events to be dispatched first await context.remote_future if self.dispatch_local: self.log.info("Notify local %s", self.event) await notify_event(self.event) async def remote(self, context): # noqa async def _notify_after_pending_command_finalized(): self.log.debug("Waiting for finalization of remote task: %s", context.fqin) await context.pending_remote_task self.log.debug("Notify remote %s", self.event) await notify_event(self.event) asyncio.ensure_future(_notify_after_pending_command_finalized(), loop=context.loop) class InvokeImport(Command): """Invoke an import of a module on the remote side. The local side will import the module first. The remote side will trigger the remote import hook, which in turn will receive all missing modules from the local side. The import is executed in a separate executor thread, to have a separate event loop available. """ fullname = Parameter(description='The full module name to be imported') async def local(self, context): # noqa module = importlib.import_module(self.fullname) log.debug("Local module: %s", module) result = await context.remote_future return result async def remote(self, context): # noqa await async_import(self.fullname) class FindSpecData(Command): """Find spec data for a module to import from the remote side.""" fullname = Parameter(description='The full module name to find.') async def local(self, context): # noqa spec_data = await context.remote_future return spec_data async def remote(self, context): # noqa return self.spec_data() def spec_data(self): """Find spec data.""" spec = importlib.util.find_spec(self.fullname) if spec is None: return None spec_data = { 'name': spec.name, 'origin': spec.origin, # 'submodule_search_locations': spec.submodule_search_locations, 'namespace': (spec.loader is None and spec.submodule_search_locations is not None), 'package': isinstance(spec.submodule_search_locations, list), 'source': (spec.loader.get_source(spec.name) if isinstance(spec.loader, importlib.abc.InspectLoader) else None), } return spec_data class RemoteModuleFinder(importlib.abc.MetaPathFinder): """Import hook that execute a :py:obj:`FindSpecData` command in the main loop. See `pep-0302`_, `pep-0420`_ and `pep-0451`_ for internals. .. _pep-0302: https://www.python.org/dev/peps/pep-0302/ .. _pep-0420: https://www.python.org/dev/peps/pep-0420/ .. _pep-0451: https://www.python.org/dev/peps/pep-0451/ """ log = logging.getLogger(__module__ + '.' + __qualname__) # noqa def __init__(self, dispatcher, *, loop): self.dispatcher = dispatcher self.loop = loop def _find_remote_spec_data(self, fullname): self.log.debug('Find spec data: %s', fullname) future = asyncio.run_coroutine_threadsafe( self.dispatcher.execute(FindSpecData, fullname=fullname), loop=self.loop ) spec_data = future.result() self.log.debug('Spec data found: %s', fullname) return spec_data @staticmethod def _create_namespace_spec(spec_data): spec = importlib.machinery.ModuleSpec( name=spec_data.name, loader=None, origin='remote namespace', is_package=True ) return spec @staticmethod def _create_remote_module_spec(spec_data): origin = 'remote://{}'.format(spec_data['origin']) is_package = spec_data['package'] loader = RemoteModuleLoader( spec_data.get('source', ''), filename=origin, is_package=is_package ) spec = importlib.machinery.ModuleSpec( name=spec_data['name'], loader=loader, origin=origin, is_package=is_package ) return spec def find_spec(self, fullname, path, target=None): """Find the spec of the module.""" self.log.debug('find spec: %s', fullname) spec_data = self._find_remote_spec_data(fullname) if spec_data is None: spec = None elif spec_data['namespace']: spec = self._create_namespace_spec(spec_data) else: spec = self._create_remote_module_spec(spec_data) return spec class RemoteModuleLoader(importlib.abc.ExecutionLoader): """Load the found module spec.""" def __init__(self, source, filename=None, is_package=False): self.source = source self.filename = filename self._is_package = is_package def is_package(self): return self._is_package def get_filename(self, fullname): if not self.filename: raise ImportError return self.filename def get_source(self, fullname): return self.source @classmethod def module_repr(cls, module): return "".format(module.__name__) async def async_import(fullname, *, loop=None): """Import module via executor.""" if loop is None: loop = asyncio.get_event_loop() def _import_module(): log.debug("Importing module: %s", fullname) try: module = importlib.import_module(fullname) except ImportError: log.error("Error when importing %s:\n%s", fullname, traceback.format_exc()) raise else: log.debug("Remotelly imported module: %s", module) return module module = await loop.run_in_executor(None, _import_module) return module class ShutdownRemoteEvent: """A Shutdown event. Shutting down a remote connection is done by gracefully canceling all remote tasks. See :py:obj:`Core.communicate` for details. """ @classmethod def __msgpack_encode__(cls, data, data_type): return None @classmethod def __msgpack_decode__(cls, encoded, data_type): return data_type() class Core: """:py:obj:`Core` starts the :py:obj:`Dispatcher`.""" log = logging.getLogger(__module__ + '.' + __qualname__) # noqa def __init__(self, loop, *, echo=None, **kwargs): self.loop = loop self.echo = echo self.kill_on_connection_lost = True async def communicate(self, reader, writer): """Start the dispatcher and register the :py:obj:`ShutdownRemoteEvent`. On shutdown: 1. the import hook is removed 2. the :py:obj:`Dispatcher.dispatch` task is stopped 3. the :py:obj:`Channels.enqueue` task is stopped """ try: channels = Channels(reader=reader, writer=writer, loop=self.loop) dispatcher = Dispatcher(channels, loop=self.loop) fut_enqueue = asyncio.ensure_future(channels.enqueue(), loop=self.loop) fut_dispatch = asyncio.ensure_future(dispatcher.dispatch(), loop=self.loop) remote_module_finder = RemoteModuleFinder(dispatcher, loop=self.loop) # shutdown is done via event @event_handler(ShutdownRemoteEvent) async def _shutdown(event): self.log.info("Shutting down...") self.teardown_import_hook(remote_module_finder) fut_dispatch.cancel() await fut_dispatch fut_enqueue.cancel() await fut_enqueue self.log.info('Shutdown end.') self.setup_import_hook(remote_module_finder) await asyncio.gather(fut_enqueue, fut_dispatch) except asyncio.CancelledError: self.log.info("Cancelled communicate??") self.log.info('Communication end.') def handle_connection_lost(self, exc): """We kill the process on connection lost, to avoid orphans.""" log.info('Connection lost: exc=%s', exc) if self.kill_on_connection_lost: pending_tasks = [ task for task in asyncio.Task.all_tasks() if task._state == asyncio.futures._PENDING # noqa ] log.warning('Pending tasks: %s', pending_tasks) pid = os.getpid() log.warning('Force shutdown of process: %s', pid) os.kill(pid, signal.SIGHUP) async def connect_sysio(self): """Connect to :py:obj:`sys.stdin` and :py:obj:`sys.stdout`.""" return await self.connect(stdin=sys.stdin, stdout=sys.stdout) async def connect(self, *, stdin, stdout, stderr=None): """Connect to stdin and stdout pipes.""" self.log.info("Starting process %s: pid=%s of ppid=%s", __name__, os.getpid(), os.getppid()) async with Incomming(pipe=stdin, connection_lost_cb=self.handle_connection_lost) \ as reader: async with Outgoing(pipe=stdout) as writer: # TODO think about generic handshake # send echo to master to prove behavior if self.echo is not None: writer.write(self.echo) await writer.drain() await self.communicate(reader, writer) # suspend kill of process, since we have a clean shutdown self.kill_on_connection_lost = False @staticmethod def setup_import_hook(module_finder): """Add module finder to :py:obj:`sys.meta_path`.""" sys.meta_path.append(module_finder) @staticmethod def teardown_import_hook(module_finder): """Remove a module finder from :py:obj:`sys.meta_path`.""" if module_finder in sys.meta_path: sys.meta_path.remove(module_finder) def setup_logging(self, debug=False, log_config=None): """Setup a minimal logging configuration.""" if log_config is None: log_config = { 'version': 1, 'disable_existing_loggers': False, 'formatters': {'simple': { 'format': ('{asctime} - {process}/{thread} - ' '{levelname} - {name} - {message}'), 'style': '{'}}, 'handlers': {'console': {'class': 'logging.StreamHandler', 'formatter': 'simple', 'level': logging.NOTSET, 'stream': 'ext://sys.stderr'}, 'logfile': {'class': 'logging.FileHandler', 'filename': '/tmp/implant.log', 'formatter': 'simple', 'level': logging.NOTSET}}, 'root': {'handlers': ['console'], 'level': logging.DEBUG if debug else logging.INFO}, } logging.config.dictConfig(log_config) if debug: self.loop.set_debug(debug) @classmethod def main(cls, debug=False, log_config=None, *, loop=None, **kwargs): """Start the event loop and schedule core communication.""" loop = loop if loop is not None else asyncio.get_event_loop() thread_pool_executor = concurrent.futures.ThreadPoolExecutor() loop.set_default_executor(thread_pool_executor) core = cls(loop, **kwargs) core.setup_logging(debug, log_config) try: loop.run_until_complete(core.connect_sysio()) finally: loop.close() thread_pool_executor.shutdown() cls.log.info("exit") main = Core.main PK!U implant/logging.yaml%YAML 1.2 --- disable_existing_loggers: true version: 1 formatters: simple: format: '{asctime} - {process}/{thread} - {levelname} - {name} - {message}' style: '{' colored: (): colorlog.TTYColoredFormatter format: '{asctime} - {process}/{thread} - {log_color}{levelname}{reset} - {name} - {message}' #format: '{log_color}{levelname}{reset} - {name} - {message}' style: '{' handlers: console: class: logging.StreamHandler level: !!python/name:logging.NOTSET #level: DEBUG formatter: colored stream: ext://sys.stderr logfile: class: logging.FileHandler filename: /tmp/implant.log formatter: simple level: !!python/name:logging.NOTSET loggers: implant: level: DEBUG handlers: [console, logfile] propagate: no implant.core: level: DEBUG handlers: [console, logfile] propagate: no implant.core.Channels: level: INFO handlers: [console, logfile] propagate: no implant.evolve.specs: level: INFO root: level: DEBUG handlers: [console] PK!implant/master.py"""Controlles a bunch of remotes.""" import asyncio import functools import logging import os import pathlib import signal import sys import traceback from implant import commands, connect, core, testing log = logging.getLogger(__name__) PLUGINS_ENTRY_POINT_GROUP = 'implant.plugins' def parse_command(line): """Parse a command from line.""" args = [] kwargs = {} command, *parts = line.split(' ') for part in parts: if '=' in part: k, v = part.split('=') kwargs[k] = v else: args.append(part) return command, args, kwargs async def _execute_command(io_queues, line): default_lines = { b'e\n': (b'implant.commands:Echo data=bar\n', {}), b'i\n': (b'implant.core:InvokeImport fullname=implant.commands\n', {}), b'\n': (b'implant.commands:SystemLoad data=bar\n', {}), } if line in default_lines: line, _ = default_lines[line] command_name, _, params = parse_command(line[:-1].decode()) log.info("sending: %s %s", command_name, params) try: result = await io_queues.execute(command_name, **params) except Exception as ex: # noqa log.error("Error:\n%s", traceback.format_exc()) else: return result async def log_remote_stderr(remote): # await remote.launched() if remote.stderr: log.info("Logging remote stderr: %s", remote) async for line in remote.stderr: log.debug("\tRemote #%s: %s", remote.pid, line[:-1].decode()) class Console: def __init__(self, connectors, *, loop=None, **options): self.loop = loop if loop is not None else asyncio.get_event_loop() self.options = options self.connectors = connectors async def feed_stdin_to_remotes(self, remotes): try: async with core.Incomming(pipe=sys.stdin, loop=self.loop) as reader: while True: line = await reader.readline() if line == b'': break result = await asyncio.gather( *(_execute_command(remote, line) for remote, *_ in remotes.values()), loop=self.loop ) print("< {}\n >".format(result), end="") except asyncio.CancelledError: log.info("Terminating...") except Exception as ex: log.info(ex) for remote, fut_remote, error_log in remotes.values(): fut_remote.cancel() await fut_remote error_log.cancel() await error_log async def connect(self): remotes = {} for connector, default_args in self.connectors.items(): if remotes.get(connector, None) is not None: log.warning('Process for %s already launched! Skipping...', connector) continue remote = await connector.launch( options=self.options, **default_args, loop=self.loop ) fut_remote = asyncio.ensure_future(remote.communicate(), loop=self.loop) error_log = asyncio.ensure_future(log_remote_stderr(remote), loop=self.loop) remotes[connector] = (remote, fut_remote, error_log) return remotes async def run(self): never_ending = asyncio.Future(loop=self.loop) remotes = await self.connect() feeder = asyncio.ensure_future(self.feed_stdin_to_remotes(remotes), loop=self.loop) def _sigint_handler(): log.info('SIGINT...') never_ending.cancel() self.loop.add_signal_handler(signal.SIGINT, _sigint_handler) try: await never_ending except asyncio.CancelledError: log.debug('Cancelled') pass feeder.cancel() await feeder def main(debug=False, log_config=None): log.info('deballator master process: %s', os.getpid()) loop = asyncio.get_event_loop() # replace existing signal handler with noop as long as our remotes are not fully running # otherwise cancellation of process startup will lead to orphaned remote processes def noop(): log.error('Noop on signal SIGINT') loop.add_signal_handler(signal.SIGINT, noop) options = { 'debug': debug, 'log_config': log_config, # 'venv': False, # 'venv': True, # 'venv': '~/.implant', } # if debug: # log.setLevel(logging.DEBUG) console = Console({ # testing.PipeConnector(loop=loop): {}, connect.Local(): { 'python_bin': pathlib.Path('~/.pyenv/versions/3.5.2/bin/python').expanduser(), }, # connect.Ssh(hostname='localhost'): { # 'python_bin': pathlib.Path('~/.pyenv/versions/3.5.2/bin/python').expanduser(), # }, # connect.Lxd( # container='zesty', # hostname='localhost', # ): { # 'python_bin': pathlib.Path('/usr/bin/python3').expanduser() # }, }, loop=loop, **options) task = asyncio.ensure_future(console.run()) try: loop.run_until_complete(task) except KeyboardInterrupt: log.error('Keyboard interrupt...') task.cancel() loop.run_until_complete(task) except BaseException as ex: core.log.error("Error %s:\n%s", type(ex), traceback.format_exc()) finally: for task in asyncio.Task.all_tasks(): if not task.done(): log.error("pending: %s", task) log.info(' - '.join(["this is the end"] * 3)) loop.stop() loop.close() PK!̽mmimplant/msgpack.py# u-msgpack-python v2.4.1 - v at sergeev.io # https://github.com/vsergeev/u-msgpack-python # # u-msgpack-python is a lightweight MessagePack serializer and deserializer # module, compatible with both Python 2 and 3, as well CPython and PyPy # implementations of Python. u-msgpack-python is fully compliant with the # latest MessagePack specification.com/msgpack/msgpack/blob/master/spec.md). In # particular, it supports the new binary, UTF-8 string, and application ext # types. # # MIT License # # Copyright (c) 2013-2016 vsergeev / Ivan (Vanya) A. Sergeev # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # """This is a modified Python 3 only version of umsgpack.""" import abc import collections import io import struct import sys ############################################################################## # Ext Class ############################################################################## # Extension type for application-defined types and data class Ext: """ The Ext class facilitates creating a serializable extension object to store an application-defined type and data byte array. """ def __init__(self, type, data): """ Construct a new Ext object. Args: type: application-defined type integer from 0 to 127 data: application-defined data byte array Raises: TypeError: Specified ext type is outside of 0 to 127 range. Example: >>> foo = umsgpack.Ext(0x05, b"\x01\x02\x03") >>> umsgpack.packb({u"special stuff": foo, u"awesome": True}) '\x82\xa7awesome\xc3\xadspecial stuff\xc7\x03\x05\x01\x02\x03' >>> bar = umsgpack.unpackb(_) >>> print(bar["special stuff"]) Ext Object (Type: 0x05, Data: 01 02 03) >>> """ # Application ext type should be 0 <= type <= 127 if not isinstance(type, int) or not (type >= 0 and type <= 127): raise TypeError("ext type out of range") # Check data is type bytes elif sys.version_info[0] == 3 and not isinstance(data, bytes): raise TypeError("ext data is not type \'bytes\'") elif sys.version_info[0] == 2 and not isinstance(data, str): raise TypeError("ext data is not type \'str\'") self.type = type self.data = data def __eq__(self, other): """Compare this Ext object with another for equality.""" return (isinstance(other, self.__class__) and self.type == other.type and self.data == other.data) def __ne__(self, other): """Compare this Ext object with another for inequality.""" return not self.__eq__(other) def __str__(self): """String representation of this Ext object.""" s = "Ext Object (Type: 0x%02x, Data: " % self.type s += " ".join(["0x%02x" % ord(self.data[i:i + 1]) for i in range(min(len(self.data), 8))]) if len(self.data) > 8: s += " ..." s += ")" return s def __hash__(self): """Provide a hash of this Ext object.""" return hash((self.type, self.data)) class InvalidString(bytes): """Subclass of bytes to hold invalid UTF-8 strings.""" ############################################################################## # Exceptions ############################################################################## # Base Exception classes class PackException(Exception): """Base class for exceptions encountered during packing.""" class UnpackException(Exception): """Base class for exceptions encountered during unpacking.""" # Packing error class UnsupportedTypeException(PackException): """Object type not supported for packing.""" # Unpacking error class InsufficientDataException(UnpackException): """Insufficient data to unpack the serialized object.""" class InvalidStringException(UnpackException): """Invalid UTF-8 string encountered during unpacking.""" class ReservedCodeException(UnpackException): """Reserved code encountered during unpacking.""" class UnhashableKeyException(UnpackException): """Unhashable key encountered during map unpacking. The serialized map cannot be deserialized into a Python dictionary. """ class DuplicateKeyException(UnpackException): """Duplicate key encountered during map unpacking.""" ############################################################################## # Packing ############################################################################## # You may notice struct.pack("B", obj) instead of the simpler chr(obj) in the # code below. This is to allow for seamless Python 2 and 3 compatibility, as # chr(obj) has a str return type instead of bytes in Python 3, and # struct.pack(...) has the right return type in both versions. class EncoderMeta(type): """Provides methods for msgpack packing.""" # Auto-detect system float precision if sys.float_info.mant_dig == 53: _float_precision = "double" else: _float_precision = "single" def _pack_integer(cls, obj, fp, options): if obj < 0: if obj >= -32: fp.write(struct.pack("b", obj)) elif obj >= -2**(8 - 1): fp.write(b"\xd0" + struct.pack("b", obj)) elif obj >= -2**(16 - 1): fp.write(b"\xd1" + struct.pack(">h", obj)) elif obj >= -2**(32 - 1): fp.write(b"\xd2" + struct.pack(">i", obj)) elif obj >= -2**(64 - 1): fp.write(b"\xd3" + struct.pack(">q", obj)) else: raise UnsupportedTypeException("huge signed int") else: if obj <= 127: fp.write(struct.pack("B", obj)) elif obj <= 2**8 - 1: fp.write(b"\xcc" + struct.pack("B", obj)) elif obj <= 2**16 - 1: fp.write(b"\xcd" + struct.pack(">H", obj)) elif obj <= 2**32 - 1: fp.write(b"\xce" + struct.pack(">I", obj)) elif obj <= 2**64 - 1: fp.write(b"\xcf" + struct.pack(">Q", obj)) else: raise UnsupportedTypeException("huge unsigned int") def _pack_nil(cls, obj, fp, options): fp.write(b"\xc0") def _pack_boolean(cls, obj, fp, options): fp.write(b"\xc3" if obj else b"\xc2") def _pack_float(cls, obj, fp, options): float_precision = options.get('force_float_precision', cls._float_precision) if float_precision == "double": fp.write(b"\xcb" + struct.pack(">d", obj)) elif float_precision == "single": fp.write(b"\xca" + struct.pack(">f", obj)) else: raise ValueError("invalid float precision") def _pack_string(cls, obj, fp, options): obj = obj.encode('utf-8') if len(obj) <= 31: fp.write(struct.pack("B", 0xa0 | len(obj)) + obj) elif len(obj) <= 2**8 - 1: fp.write(b"\xd9" + struct.pack("B", len(obj)) + obj) elif len(obj) <= 2**16 - 1: fp.write(b"\xda" + struct.pack(">H", len(obj)) + obj) elif len(obj) <= 2**32 - 1: fp.write(b"\xdb" + struct.pack(">I", len(obj)) + obj) else: raise UnsupportedTypeException("huge string") def _pack_binary(cls, obj, fp, options): if len(obj) <= 2**8 - 1: fp.write(b"\xc4" + struct.pack("B", len(obj)) + obj) elif len(obj) <= 2**16 - 1: fp.write(b"\xc5" + struct.pack(">H", len(obj)) + obj) elif len(obj) <= 2**32 - 1: fp.write(b"\xc6" + struct.pack(">I", len(obj)) + obj) else: raise UnsupportedTypeException("huge binary string") def _pack_oldspec_raw(cls, obj, fp, options): if len(obj) <= 31: fp.write(struct.pack("B", 0xa0 | len(obj)) + obj) elif len(obj) <= 2**16 - 1: fp.write(b"\xda" + struct.pack(">H", len(obj)) + obj) elif len(obj) <= 2**32 - 1: fp.write(b"\xdb" + struct.pack(">I", len(obj)) + obj) else: raise UnsupportedTypeException("huge raw string") def _pack_ext(cls, obj, fp, options): if len(obj.data) == 1: fp.write(b"\xd4" + struct.pack("B", obj.type & 0xff) + obj.data) elif len(obj.data) == 2: fp.write(b"\xd5" + struct.pack("B", obj.type & 0xff) + obj.data) elif len(obj.data) == 4: fp.write(b"\xd6" + struct.pack("B", obj.type & 0xff) + obj.data) elif len(obj.data) == 8: fp.write(b"\xd7" + struct.pack("B", obj.type & 0xff) + obj.data) elif len(obj.data) == 16: fp.write(b"\xd8" + struct.pack("B", obj.type & 0xff) + obj.data) elif len(obj.data) <= 2**8 - 1: fp.write(b"\xc7" + struct.pack("BB", len(obj.data), obj.type & 0xff) + obj.data) elif len(obj.data) <= 2**16 - 1: fp.write(b"\xc8" + struct.pack(">HB", len(obj.data), obj.type & 0xff) + obj.data) elif len(obj.data) <= 2**32 - 1: fp.write(b"\xc9" + struct.pack(">IB", len(obj.data), obj.type & 0xff) + obj.data) else: raise UnsupportedTypeException("huge ext data") def _pack_array(cls, obj, fp, options): if len(obj) <= 15: fp.write(struct.pack("B", 0x90 | len(obj))) elif len(obj) <= 2**16 - 1: fp.write(b"\xdc" + struct.pack(">H", len(obj))) elif len(obj) <= 2**32 - 1: fp.write(b"\xdd" + struct.pack(">I", len(obj))) else: raise UnsupportedTypeException("huge array") for e in obj: cls.pack(e, fp, **options) def _pack_map(cls, obj, fp, options): if len(obj) <= 15: fp.write(struct.pack("B", 0x80 | len(obj))) elif len(obj) <= 2**16 - 1: fp.write(b"\xde" + struct.pack(">H", len(obj))) elif len(obj) <= 2**32 - 1: fp.write(b"\xdf" + struct.pack(">I", len(obj))) else: raise UnsupportedTypeException("huge array") for k, v in obj.items(): cls.pack(k, fp, **options) cls.pack(v, fp, **options) # Pack for Python 3, with unicode 'str' type, 'bytes' type, # and no 'long' type def pack(cls, obj, fp, **options): # pylint: disable=W0212 ext_handlers = options.get("ext_handlers") # lookup mro except object for matching handler ext_handler_match = next(( obj_cls for obj_cls in obj.__class__.__mro__[:-1] if obj_cls in ext_handlers ), None) if ext_handlers else None if obj is None: cls._pack_nil(obj, fp, options) elif ext_handler_match: cls._pack_ext(ext_handlers[ext_handler_match](obj), fp, options) elif isinstance(obj, bool): cls._pack_boolean(obj, fp, options) elif isinstance(obj, int): cls._pack_integer(obj, fp, options) elif isinstance(obj, float): cls._pack_float(obj, fp, options) elif isinstance(obj, str): cls._pack_string(obj, fp, options) elif isinstance(obj, bytes): cls._pack_binary(obj, fp, options) elif isinstance(obj, (tuple, list)): cls._pack_array(obj, fp, options) elif isinstance(obj, dict): cls._pack_map(obj, fp, options) elif isinstance(obj, Ext): cls._pack_ext(obj, fp, options) # default fallback elif ext_handlers and object in ext_handlers: cls._pack_ext(ext_handlers[object](obj), fp, options) else: raise UnsupportedTypeException( "unsupported type: %s" % str(type(obj))) def packb(cls, obj, **options): r"""Serialize a Python object into MessagePack bytes. Args: obj: a Python object Kwargs: ext_handlers (dict): dictionary of Ext handlers, mapping a custom type to a callable that packs an instance of the type into an Ext object force_float_precision (str): "single" to force packing floats as IEEE-754 single-precision floats, "double" to force packing floats as IEEE-754 double-precision floats. Returns: A 'bytes' containing serialized MessagePack bytes. Raises: UnsupportedType(PackException): Object type not supported for packing. Example: >>> umsgpack.packb({u"compact": True, u"schema": 0}) b'\x82\xa7compact\xc3\xa6schema\x00' >>> """ fp = io.BytesIO() cls.pack(obj, fp, **options) return fp.getvalue() # Map packb and unpackb to the appropriate version dump = pack dumps = packb class Encoder(metaclass=EncoderMeta): pass class _DecoderMeta(type): _unpack_dispatch_table = {} def __new__(mcs, name, bases, dct): # noqa cls = type.__new__(mcs, name, bases, dct) # Build a dispatch table for fast lookup of unpacking function # Fix uint for code in range(0, 0x7f + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_integer # Fix map for code in range(0x80, 0x8f + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_map # Fix array for code in range(0x90, 0x9f + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_array # Fix str for code in range(0xa0, 0xbf + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_string # Nil cls._unpack_dispatch_table[b'\xc0'] = cls._unpack_nil # Reserved cls._unpack_dispatch_table[b'\xc1'] = cls._unpack_reserved # Boolean cls._unpack_dispatch_table[b'\xc2'] = cls._unpack_boolean cls._unpack_dispatch_table[b'\xc3'] = cls._unpack_boolean # Bin for code in range(0xc4, 0xc6 + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_binary # Ext for code in range(0xc7, 0xc9 + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_ext # Float cls._unpack_dispatch_table[b'\xca'] = cls._unpack_float cls._unpack_dispatch_table[b'\xcb'] = cls._unpack_float # Uint for code in range(0xcc, 0xcf + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_integer # Int for code in range(0xd0, 0xd3 + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_integer # Fixext for code in range(0xd4, 0xd8 + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_ext # String for code in range(0xd9, 0xdb + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_string # Array cls._unpack_dispatch_table[b'\xdc'] = cls._unpack_array cls._unpack_dispatch_table[b'\xdd'] = cls._unpack_array # Map cls._unpack_dispatch_table[b'\xde'] = cls._unpack_map cls._unpack_dispatch_table[b'\xdf'] = cls._unpack_map # Negative fixint for code in range(0xe0, 0xff + 1): cls._unpack_dispatch_table[struct.pack("B", code)]\ = cls._unpack_integer return cls @staticmethod def _read_except(fp, n): data = fp.read(n) if len(data) < n: raise InsufficientDataException() return data def _unpack_integer(cls, code, fp, options): # noqa if (ord(code) & 0xe0) == 0xe0: return struct.unpack("b", code)[0] elif code == b'\xd0': return struct.unpack("b", cls._read_except(fp, 1))[0] elif code == b'\xd1': return struct.unpack(">h", cls._read_except(fp, 2))[0] elif code == b'\xd2': return struct.unpack(">i", cls._read_except(fp, 4))[0] elif code == b'\xd3': return struct.unpack(">q", cls._read_except(fp, 8))[0] elif (ord(code) & 0x80) == 0x00: return struct.unpack("B", code)[0] elif code == b'\xcc': return struct.unpack("B", cls._read_except(fp, 1))[0] elif code == b'\xcd': return struct.unpack(">H", cls._read_except(fp, 2))[0] elif code == b'\xce': return struct.unpack(">I", cls._read_except(fp, 4))[0] elif code == b'\xcf': return struct.unpack(">Q", cls._read_except(fp, 8))[0] raise Exception("logic error, not int: 0x%02x" % ord(code)) @staticmethod def _unpack_reserved(code, fp, options): if code == b'\xc1': raise ReservedCodeException( "encountered reserved code: 0x%02x" % ord(code)) raise Exception( "logic error, not reserved code: 0x%02x" % ord(code)) @staticmethod def _unpack_nil(code, fp, options): if code == b'\xc0': return None raise Exception("logic error, not nil: 0x%02x" % ord(code)) @staticmethod def _unpack_boolean(code, fp, options): if code == b'\xc2': return False elif code == b'\xc3': return True raise Exception("logic error, not boolean: 0x%02x" % ord(code)) def _unpack_float(cls, code, fp, options): if code == b'\xca': return struct.unpack(">f", cls._read_except(fp, 4))[0] elif code == b'\xcb': return struct.unpack(">d", cls._read_except(fp, 8))[0] raise Exception("logic error, not float: 0x%02x" % ord(code)) def _unpack_string(cls, code, fp, options): if (ord(code) & 0xe0) == 0xa0: length = ord(code) & ~0xe0 elif code == b'\xd9': length = struct.unpack("B", cls._read_except(fp, 1))[0] elif code == b'\xda': length = struct.unpack(">H", cls._read_except(fp, 2))[0] elif code == b'\xdb': length = struct.unpack(">I", cls._read_except(fp, 4))[0] else: raise Exception("logic error, not string: 0x%02x" % ord(code)) # Always return raw bytes in compatibility mode # global compatibility # if compatibility: # return cls._read_except(fp, length) data = cls._read_except(fp, length) try: return bytes.decode(data, 'utf-8') except UnicodeDecodeError: if options.get("allow_invalid_utf8"): return InvalidString(data) raise InvalidStringException("unpacked string is invalid utf-8") def _unpack_binary(cls, code, fp, options): if code == b'\xc4': length = struct.unpack("B", cls._read_except(fp, 1))[0] elif code == b'\xc5': length = struct.unpack(">H", cls._read_except(fp, 2))[0] elif code == b'\xc6': length = struct.unpack(">I", cls._read_except(fp, 4))[0] else: raise Exception("logic error, not binary: 0x%02x" % ord(code)) return cls._read_except(fp, length) def _unpack_ext(cls, code, fp, options): # noqa if code == b'\xd4': length = 1 elif code == b'\xd5': length = 2 elif code == b'\xd6': length = 4 elif code == b'\xd7': length = 8 elif code == b'\xd8': length = 16 elif code == b'\xc7': length = struct.unpack("B", cls._read_except(fp, 1))[0] elif code == b'\xc8': length = struct.unpack(">H", cls._read_except(fp, 2))[0] elif code == b'\xc9': length = struct.unpack(">I", cls._read_except(fp, 4))[0] else: raise Exception("logic error, not ext: 0x%02x" % ord(code)) ext = Ext(ord(cls._read_except(fp, 1)), cls._read_except(fp, length)) # Unpack with ext handler, if we have one ext_handlers = options.get("ext_handlers") if ext_handlers and ext.type in ext_handlers: ext = ext_handlers[ext.type](ext) return ext def _unpack_array(cls, code, fp, options): if (ord(code) & 0xf0) == 0x90: length = (ord(code) & ~0xf0) elif code == b'\xdc': length = struct.unpack(">H", cls._read_except(fp, 2))[0] elif code == b'\xdd': length = struct.unpack(">I", cls._read_except(fp, 4))[0] else: raise Exception("logic error, not array: 0x%02x" % ord(code)) return [cls._unpack(fp, options) for i in range(length)] def _deep_list_to_tuple(cls, obj): if isinstance(obj, list): return tuple([cls._deep_list_to_tuple(e) for e in obj]) # noqa return obj def _unpack_map(cls, code, fp, options): if (ord(code) & 0xf0) == 0x80: length = (ord(code) & ~0xf0) elif code == b'\xde': length = struct.unpack(">H", cls._read_except(fp, 2))[0] elif code == b'\xdf': length = struct.unpack(">I", cls._read_except(fp, 4))[0] else: raise Exception("logic error, not map: 0x%02x" % ord(code)) d = {} if not options.get('use_ordered_dict') \ else collections.OrderedDict() for _ in range(length): # Unpack key k = cls._unpack(fp, options) if isinstance(k, list): # Attempt to convert list into a hashable tuple k = cls._deep_list_to_tuple(k) # noqa elif not isinstance(k, collections.Hashable): raise UnhashableKeyException( "encountered unhashable key: %s, %s" % (str(k), str(type(k)))) elif k in d: raise DuplicateKeyException( "encountered duplicate key: %s, %s" % (str(k), str(type(k)))) # Unpack value v = cls._unpack(fp, options) try: d[k] = v except TypeError: raise UnhashableKeyException( "encountered unhashable key: %s" % str(k)) return d class Decoder(metaclass=_DecoderMeta): @classmethod def _unpack(cls, fp, options): code = cls._read_except(fp, 1) return cls._unpack_dispatch_table[code](code, fp, options) @classmethod def unpack(cls, fp, **options): return cls._unpack(fp, options) @classmethod def unpackb(cls, s, **options): if not isinstance(s, (bytes, bytearray)): raise TypeError("packed data must be type 'bytes' or 'bytearray'") return cls._unpack(io.BytesIO(s), options) # Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class MsgpackMeta(abc.ABCMeta): """Manages ext handler and custom encoder registration.""" ext_handlers_encode = {} ext_handlers_decode = {} custom_encoders = {} def register(cls, data_type=None, ext_code=None): def decorator(handler): if not issubclass(handler, Msgpack): raise TypeError( "Msgpack handler must be a subclass of abstract `Msgpack`" " class: {}".format(handler)) if data_type is None: _data_type = handler else: _data_type = data_type if ext_code is not None: cls.ext_handlers_encode[_data_type] = \ lambda data: Ext( ext_code, handler.__msgpack_encode__(data, _data_type)) cls.ext_handlers_decode[ext_code] = \ lambda ext: handler.__msgpack_decode__(ext.data, _data_type) else: cls.custom_encoders[_data_type] = handler return handler return decorator def encode(cls, data): encoded_data = Encoder.packb(data, ext_handlers=cls.ext_handlers_encode) return encoded_data def decode(cls, encoded_data): data = Decoder.unpackb(encoded_data, ext_handlers=cls.ext_handlers_decode) return data def get_custom_encoder(cls, data_type): if issubclass(data_type, Msgpack): return data_type # lookup data types for registered encoders for subclass in data_type.__mro__: try: return cls.custom_encoders[subclass] except KeyError: continue return None # Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class Msgpack(metaclass=MsgpackMeta): """Add msgpack en/decoding to a type.""" @abc.abstractclassmethod def __msgpack_encode__(cls, data, data_type): return None @abc.abstractclassmethod def __msgpack_decode__(cls, encoded_data, data_type): return None @classmethod def __subclasshook__(cls, C): if cls is Msgpack: if any("__msgpack_encode__" in B.__dict__ for B in C.__mro__) \ and any("__msgpack_decode__" in B.__dict__ for B in C.__mro__): return True return NotImplemented encode = Msgpack.encode decode = Msgpack.decode register = Msgpack.register get_custom_encoder = Msgpack.get_custom_encoder PK!]403 implant/pool.pyimport asyncio import logging import threading class RemotesPool(dict): """Has references to all created remotes.""" # pylint: disable=E0602 log = logging.getLogger(__module__ + '.' + __qualname__) def __init__(self, options, *, loop=None): super().__init__() self.options = options or {} self.loop = loop if loop is not None else asyncio.get_event_loop() def get_next_loop(self): """Just return the main loop.""" return self.loop async def connect(self, connector, **kwargs): """Connect to a remote and pool it.""" loop = self.get_next_loop() if connector in self: remote, _, _ = self[connector] # TODO does this deserve a warning? self.log.warning('Process for %s already launched! Using: %s', connector, remote) else: remote = await connector.launch( options=self.options, **kwargs, loop=loop ) fut_remote = asyncio.ensure_future( remote.communicate(), loop=loop) error_log = asyncio.ensure_future( self.log_remote_stderr(remote), loop=loop) self[connector] = (remote, fut_remote, error_log) return remote async def shutdown(self, connector): """Shutdown a remote identified by its connector.""" loop = asyncio.get_event_loop() _, fut_remote, error_log = self[connector] loop.call_soon_threadsafe(fut_remote.cancel) await fut_remote loop.call_soon_threadsafe(error_log.cancel) await error_log async def stop(self): """Shutdown all remotes.""" loop = asyncio.get_event_loop() for _, fut_remote, error_log in self.values(): loop.call_soon_threadsafe(fut_remote.cancel) await fut_remote loop.call_soon_threadsafe(error_log.cancel) await error_log async def log_remote_stderr(self, remote): """Just log remote stderr.""" # await remote.launched() if remote.stderr: self.log.info("Logging remote stderr: %s", remote) async for line in remote.stderr: self.log.debug("\tRemote #%s: %s", remote.pid, line[:-1].decode()) @classmethod async def run_command_on_remotes(cls, *connectors, options, command_cls, command_params, loop=None): """Connect remotes and run the command.""" remotes_pool = cls(options, loop=loop) # connect for connector in connectors: await remotes_pool.connect(connector) # execute results = {connector: await remote.execute(command_cls, **command_params) for connector, (remote, *_) in remotes_pool.items()} await remotes_pool.stop() return results PK!,implant/remote-logging.yaml%YAML 1.2 --- disable_existing_loggers: true version: 1 formatters: simple: #format: '{asctime} - {process}/{thread} - {levelname} - {name} - {message}' format: '{levelname} - {name} - {message}' style: '{' handlers: console: class: logging.StreamHandler level: !!python/name:logging.NOTSET #level: DEBUG formatter: simple stream: ext://sys.stderr logfile: class: logging.FileHandler filename: /tmp/implant.log formatter: simple level: !!python/name:logging.NOTSET loggers: implant: level: DEBUG handlers: [console, logfile] propagate: no implant.core: level: DEBUG handlers: [console, logfile] propagate: no implant.core.Channels: level: INFO handlers: [console, logfile] propagate: no root: level: DEBUG handlers: [console] implant.evolve.specs: level: INFO PK!implant/scripts.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """CLI scripts.""" import asyncio import importlib import logging import logging.config import click from ruamel import yaml import json from implant import connect, core, pool log = logging.getLogger(__name__) LOGGING_LEVEL_NAMES = map(logging.getLevelName, sorted(( logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR, logging.CRITICAL, ))) DEFAULT_LOGGING_LEVEL = logging.getLevelName(logging.WARNING) _LOG_SIMPLE_FMT = { 'format': ('{asctime} - {process}/{thread} - ' '{levelname} - {name} - {message}'), 'style': '{'} _LOG_COLORED_FMT = { '()': 'colorlog.TTYColoredFormatter', 'format': ('{asctime} - {process}/{thread} - ' '{log_color}{levelname}{reset} - {name} - {message}'), 'style': '{'} def setup_logging(debug, log_config, remote_log_config, log_level=DEFAULT_LOGGING_LEVEL): default_log_config = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'simple': _LOG_SIMPLE_FMT, 'colored': _LOG_COLORED_FMT, }, 'handlers': {'console': {'class': 'logging.StreamHandler', 'formatter': 'colored', 'level': logging.NOTSET, 'stream': 'ext://sys.stderr'}, 'logfile': {'class': 'logging.FileHandler', 'filename': '/tmp/implant.log', 'formatter': 'simple', 'level': logging.NOTSET}}, 'root': {'handlers': ['console', 'logfile'], 'level': log_level}, } default_remote_log_config = { 'version': 1, 'disable_existing_loggers': False, 'formatters': {'simple': _LOG_SIMPLE_FMT}, 'handlers': {'console': {'class': 'logging.StreamHandler', 'formatter': 'simple', 'level': logging.NOTSET, 'stream': 'ext://sys.stderr'}, 'logfile': {'class': 'logging.FileHandler', 'filename': '/tmp/implant.log', 'formatter': 'simple', 'level': logging.NOTSET}}, 'root': {'handlers': ['console'], 'level': log_level}, } log_config = yaml.load(log_config)\ if log_config else default_log_config remote_log_config = yaml.load(remote_log_config)\ if remote_log_config else default_remote_log_config logging.config.dictConfig(log_config) return log_config, remote_log_config def find_loop_specs(): """Just find specs for common loops.""" loop_specs = { module_name: importlib.util.find_spec(module_name) for module_name in ('asyncio', 'uvloop', 'tokio') } return loop_specs def run(): """Main entry point.""" return cli(obj={}, auto_envvar_prefix='IMPLANT') # noqa @click.group(invoke_without_command=True) @click.option('event_loop', '--loop', default='asyncio', type=click.Choice(find_loop_specs().keys()), help='Use a different loop policy.') @click.option('--debug/--no-debug', default=False, help='Enable or disable debug.') @click.option('--log-config', type=click.File('r'), default=None, help='Logging configuration in yaml format.') @click.option('--remote-log-config', type=click.File('r'), default=None, help='Logging configuration in yaml format.') @click.option('--log-level', default=DEFAULT_LOGGING_LEVEL, type=click.Choice(LOGGING_LEVEL_NAMES), help=f'The logging level, defaults to `{DEFAULT_LOGGING_LEVEL}`') # @click.option('--config', '-c', default=None, help='General configuration.') @click.pass_context def cli(ctx, event_loop, debug, log_config, remote_log_config, log_level): """Main CLI entry point.""" log_config, remote_log_config = setup_logging( debug=debug, log_config=log_config, remote_log_config=remote_log_config, log_level=log_level) if event_loop == 'uvloop': try: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) log.info("Using uvloop event loop policy") except ImportError: log.warning("uvloop is not available.") elif event_loop == 'tokio': try: import tokio asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) log.info("Using tokio event loop policy") except ImportError: log.warning("tokio is not available.") if ctx.invoked_subcommand is None: # we need to import master lazy because master imports core, # which may use exclusive decorator, # which tries to get the actual loop, # which is only set after running set_event_loop_policy from implant import master if debug: log.info("Enable asyncio debug") master.core.log.setLevel(logging.DEBUG) asyncio.get_event_loop().set_debug(debug) master.main(log_config=remote_log_config, debug=debug) else: if debug: asyncio.get_event_loop().set_debug(debug) log.setLevel(logging.DEBUG) ctx.obj['log_config'] = log_config ctx.obj['remote_log_config'] = remote_log_config ctx.obj['debug'] = debug def resolve_command_class(dotted_command_name): """Resolve the command class for the given string. :param dotted_command_name: : """ module_name, command_name = dotted_command_name.split(':') module = importlib.import_module(module_name) command_cls = getattr(module, command_name) assert issubclass(command_cls, core.Command), \ '{} is not a subclass of {}'.format(dotted_command_name, core.Command) return command_cls @cli.command('cmd') @click.option('dotted_command_name', '-c', '--command', required=True, help='the path to a implant command') @click.option('command_params_file', '-p', '--params', type=click.File('rb')) @click.option('remote_uri', '-r', '--remote', help='The remote connection', default='local://') @click.pass_context def cli_cmd(ctx, dotted_command_name, command_params_file, remote_uri): """Execute a :py:obj:`implant.core.Command` in a remote process.""" loop = asyncio.get_event_loop() # lookup command command_cls = resolve_command_class(dotted_command_name) command_params = yaml.load(command_params_file.read(), Loader=yaml.Loader) options = { 'log_config': ctx.obj['remote_log_config'], 'debug': ctx.obj['debug'] } connector_params = connect.ConnectorParams.parse(remote_uri) connector = connector_params.create_connector() task = pool.RemotesPool.run_command_on_remotes( connector, options=options, command_cls=command_cls, command_params=command_params, loop=loop ) results = loop.run_until_complete(task) # for connector, result in results.items(): # print(connector, result) print(yaml.dump(results)) loop.stop() loop.close() PK!% % implant/testing.py"""Provide a pytest fixture for testing commands. The :py:obj:`implant_remote_task` fixture is provided as if it is a running :py:obj:`implant.connect.Remote` instance. You can execute a :py:obj:`implant.core.Command` passing the whole stack. """ import asyncio import os import pytest from async_generator import async_generator, yield_ from implant import connect, core class RemoteTask(connect.Remote): """In-process remote task. Useful to run implant for testing. """ def __init__(self, remote_core_fut, *, stdin, stdout): super().__init__(stdin=stdin, stdout=stdout) self.remote_core_fut = remote_core_fut async def wait(self): await self.remote_core_fut class PipeConnector(connect.Connector): """A connector which executes the remote core in a task in the current process. """ def __init__(self, *, loop=None): self.loop = loop if loop is not None else asyncio.get_event_loop() self.stdin_pipe = os.pipe() self.stdout_pipe = os.pipe() self.stderr_pipe = os.pipe() async def launch(self, *args, **kwargs): """Launch the remote.""" remote = await create_pipe_remote(self.stdin_pipe, self.stdout_pipe, self.stderr_pipe, loop=self.loop) return remote async def create_pipe_remote(stdin_pipe, stdout_pipe, stderr_pipe, *, loop=None): """Launch remote core as a background task.""" if loop is None: loop = asyncio.events.get_event_loop() stdin_r, stdin_w = stdin_pipe stdout_r, stdout_w = stdout_pipe _, stderr_w = stderr_pipe remote_core = core.Core(loop=loop) remote_core_fut = asyncio.ensure_future( remote_core.connect(stdin=stdin_r, stdout=stdout_w, stderr=stderr_w) ) reader = core.Incomming(pipe=stdout_r) await reader.connect() writer = await core.Outgoing(pipe=stdin_w).connect() remote = RemoteTask(remote_core_fut, stdin=writer, stdout=reader) return remote @pytest.fixture @async_generator async def implant_remote_task(event_loop): """Create the remote task as a fixture. You use it like: .. code-block:: python @pytest.mark.asyncio async def test_testing(implant_remote_task): from implant import core result = await implant_remote_task.execute( core.InvokeImport, fullname='implant.commands') result = await implant_remote_task.execute( 'implant.commands:Echo', data='foobar') assert result['remote_data'] == 'foobar' """ connector = PipeConnector(loop=event_loop) remote = await connector.launch() com_remote = asyncio.ensure_future(remote.communicate()) await yield_(remote) com_remote.cancel() await com_remote PK!Hm8S(implant-0.1.0.dist-info/entry_points.txtN+I/N.,()-I+zP <.ʒCCLu ̼t..PK!%0u^,^,#implant-0.1.0.dist-info/LICENSE.txt Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. PK!HNɡSTimplant-0.1.0.dist-info/WHEEL HM K-*ϳR03rOK-J,/R(O-)T03"]b݀ҢԜ$+TtJcݼTļJ.PK!HRhw2 implant-0.1.0.dist-info/METADATAXko6_AdǺB-KXm/àmHxkΥ([]'L^E8^pg cV)܋~H+&ы٦침L\u`əuTs|ֱZXR']Y[:t>_KW$bŕ&/d.YRbV¦ۭʥZ[N_OMɩTD.x֕ڤJna &DewkJ.EśVag["Nj飯yŭ+)5Rx 7FoDA; roK׆5.ZHۃOO~?7=֥,dzhZ JXX@ߜFO͍l5sPnr۠N\XE=(J&U^pZYgJ{#:3|#,61Y4\q}(:c(^/*c+m:F+ZƋR笃%hWEX!I,%,_FCA31 f˖J: S\Z;h"ZqLgiMUz7F$+V-XY  pCAH밍ȁ6+%+L(r0KP̖w:HRB(Vi$0BΑb_ZXF&h K`Z:N'҂V(LC []ԷMKD/ kS&WLi6֊[XZw ӮC,,w åBw:JkZR Ek"E(6 '&?#ѐOFD qpWVrىY PS| {4P iwFyv vȷvwb/ߓ" ! l'ӃR[@"}NMe` {htfK&iɼfy'`~|g99`-n[R߹si4cQEPAEjU %\g{K JOjn;k>: AуPu&y.CC \z#Oz`^'Ӡlpb%qtlf,1D5/W/Ze,c0;QiW30 }MLV9Ye0vp}當(9=KD A"^=s#d'm8O^UOi] RExrr+#vC;|@hbTb+5_Bc7%zia{,[(NeTI|>l[d $w!H{z;՘2񹹷 wX24>R;v3ʕD QlE)WCk] S nˠ̍ugk) &Yj@^mc.yHݢ=ʲN&Xa^LP̺8LdU Ӆ& C T&-aiP(ʋf/%m%;79<ձY OR,'T(Q6Űb)|Fi5b}r&?+ԧi콚^XVZ$;vxy\Jmls' [KvS(Q]iB]ۙ$^n"9$kud4/PSޯ[|εk('\yʚTT823L{r-NtxZgTP}]ږ y]׊'f#۶K1v9Ko&2F]2_);}? zrhv><[,Ӌ^,)gc'E3ov-;B(ǿ3D 4DϨc-y 𤄃[Yu>=l磯ds4*uOdiU8';n*$CmptAy}1pc