PKLT:Mpytest_docker_tools/__init__.py''' An opionated set of helpers for defining Docker integration test environments with py.test fixtures. ''' from .factories import build, container, fetch, network, volume __version__ = '0.0.4' __all__ = [ 'build', 'container', 'fetch', 'network', 'volume', ] PKLypytest_docker_tools/plugin.pyimport docker import pytest from .wrappers import Container @pytest.fixture(scope='session') def docker_client(request): return docker.from_env() @pytest.hookimpl(tryfirst=True, hookwrapper=True) def pytest_runtest_makereport(item, call): ''' This hook allows Docker containers to contribute their logs to the py.test report. ''' outcome = yield rep = outcome.get_result() if not rep.failed: return if 'request' in item.funcargs: for name, fixturedef in item.funcargs['request']._fixture_defs.items(): fixture = fixturedef.cached_result[0] if isinstance(fixture, Container): rep.sections.append(( name + ': ' + fixture.name, fixture.logs(), )) else: for name, fixture in item.funcargs.items(): if isinstance(fixture, Container): rep.sections.append(( name + ': ' + fixture.name, fixture.logs(), )) PK\L`͵pytest_docker_tools/utils.pyimport sys import time def wait_for_callable(message, callable, timeout=30): ''' Runs a callable once a second until it returns True or we hit the timeout. ''' sys.stdout.write(message) try: for i in range(timeout): sys.stdout.write('.') sys.stdout.flush() if callable(): return time.sleep(1) finally: sys.stdout.write('\n') raise RuntimeError('Timeout exceeded') PKzL)pytest_docker_tools/factories/__init__.pyfrom .build import build from .container import container from .fetch import fetch from .network import network from .volume import volume __all__ = [ 'build', 'container', 'fetch', 'network', 'volume', ] PKzL|P?&pytest_docker_tools/factories/build.pyimport sys import pytest def build(*, path, scope='session'): ''' Fixture factory for creating container images from a Dockerfile. For example in your conftest.py you can: from pytest_docker_tools import build test_image = build(path='path/to/buildcontext') Where the path is a folder containing a Dockerfile. By default the fixture has a session scope. ''' def build(request, docker_client): sys.stdout.write(f'Building {path}') try: image, logs = docker_client.images.build( path=path ) for line in logs: sys.stdout.write('.') sys.stdout.flush() finally: sys.stdout.write('\n') # request.addfinalizer(lambda: docker_client.images.remove(image.id)) return image pytest.fixture(scope=scope)(build) return build PKL 8vv*pytest_docker_tools/factories/container.pyimport inspect from string import Formatter import pytest from pytest_docker_tools.utils import wait_for_callable from pytest_docker_tools.wrappers import Container def create_container(request, docker_client, *args, **kwargs): kwargs.update({'detach': True}) container = docker_client.containers.run(*args, **kwargs) request.addfinalizer(lambda: container.remove(force=True) and container.wait(timeout=10)) return Container(container) class FixtureFormatter(Formatter): def __init__(self, request): self.request = request def get_value(self, key, args, kwargs): return self.request.getfixturevalue(key) def _process_val(request, val): if isinstance(val, str): return FixtureFormatter(request).format(val) elif callable(val): return val(*[request.getfixturevalue(f) for f in inspect.getargspec(val)[0]]) return val def _process_list(request, val): return [_process(request, v) for v in val] def _process_dict(request, mapping): return {_process(request, k): _process(request, v) for (k, v) in mapping.items()} def _process(request, val): if isinstance(val, dict): return _process_dict(request, val) elif isinstance(val, list): return _process_list(request, val) else: return _process_val(request, val) def container(*, scope='function', **kwargs): ''' Fixture factory for creating containers. For example in your conftest.py you can: from pytest_docker_tools import container_fixture test_container = container_fixture('test_container', 'redis') This will create a container called 'test_container' from the 'redis' image. ''' def container(request, docker_client): local_kwargs = dict(kwargs) container = create_container( request, docker_client, **_process_dict(request, local_kwargs) ) wait_for_callable( f'Waiting for container to be ready', lambda: container.reload() or container.ready(), ) return container pytest.fixture(scope=scope)(container) return container PKzLūuj&pytest_docker_tools/factories/fetch.pyimport sys import pytest def fetch(tag, scope='session'): ''' Fixture factory for fetching container images from a repository. For example in your conftest.py you can: from pytest_docker_tools import factories factories.repository_image('test_image', 'redis:latest') By default the fixture has a session scope. ''' if ':' not in tag: tag += ':latest' def fetch(request, docker_client): sys.stdout.write(f'Fetching {tag}\n') image = docker_client.images.pull(tag) # request.addfinalizer(lambda: docker_client.images.remove(image.id)) return image pytest.fixture(scope=scope)(fetch) return fetch PKzLlHH(pytest_docker_tools/factories/network.pyimport uuid import pytest def network(scope='function'): ''' Fixture factory for creating networks. For example in your conftest.py you can: from pytest_docker_tools import network_fixture test_storage = network_fixture('test_storage') Then you can reference that network from your test: def test_a_docker_network(test_storage): print(test_storage.id) The fixture has a function scope - it will be destroyed after your test exits. ''' def network(request, docker_client): network_id = 'pytest-' + str(uuid.uuid4()) print(f'Creating network {network_id}') network = docker_client.networks.create(network_id) request.addfinalizer(lambda: network.remove()) return network pytest.fixture(scope=scope)(network) return network PKzLʐq22'pytest_docker_tools/factories/volume.pyimport uuid import pytest def volume(scope='function'): ''' Fixture factory for creating volumes. For example in your conftest.py you can: from pytest_docker_tools import volume_fixture test_storage = volume_fixture('test_storage') Then you can reference that volume from your test: def test_a_docker_volume(test_storage): print(test_storage.id) The fixture has a function scope - it will be destroyed after your test exits. ''' def volume(request, docker_client): vol_id = 'pytest-' + str(uuid.uuid4()) print(f'Creating volume {vol_id}') volume = docker_client.volumes.create(vol_id) request.addfinalizer(lambda: volume.remove(True)) return volume pytest.fixture(scope=scope)(volume) return volume PKU_L'kBB(pytest_docker_tools/wrappers/__init__.pyfrom .container import Container __all__ = [ 'Container', ] PKL*)pytest_docker_tools/wrappers/container.py''' This module contains a wrapper that adds some helpers to a Docker Container object that are useful for integration testing. ''' import io import tarfile class _Map(object): def __init__(self, container): self._container = container def values(self): return [self[k] for k in self.keys()] def items(self): return [(k, self[k]) for k in self.keys()] def __iter__(self): return iter(self.keys()) class IpMap(_Map): @property def primary(self): return next(iter(self.values())) def keys(self): return self._container.attrs['NetworkSettings']['Networks'].keys() def __getitem__(self, key): if not isinstance(key, str): key = key.name networks = self._container.attrs['NetworkSettings']['Networks'] if key not in networks: raise KeyError(f'Unknown network: {key}') return networks[key]['IPAddress'] class PortMap(_Map): def __init__(self, container): self._container = container def keys(self): return self._container.attrs['NetworkSettings']['Ports'].keys() def __getitem__(self, key): ports = self._container.attrs['NetworkSettings']['Ports'] if key not in ports: raise KeyError(f'Unknown port: {key}') if not ports[key]: return [] return [int(p['HostPort']) for p in ports[key]] class Container(object): def __init__(self, container): self._container = container self.ips = IpMap(container) self.ports = PortMap(container) def ready(self): if self.status == 'exited': raise RuntimeError(f'Container {self.name} has already exited before we noticed it was ready') if self.status != 'running': return False networks = self._container.attrs['NetworkSettings']['Networks'] for name, network in networks.items(): if not network['IPAddress']: return False # If a user has exposed a port then wait for LISTEN socket to show up in netstat ports = self._container.attrs['NetworkSettings']['Ports'] for port, listeners in ports.items(): if not listeners: continue port, proto = port.split('/') assert proto in ('tcp', 'udp') if proto == 'tcp' and port not in self.get_open_tcp_ports(): return False if proto == 'udp' and port not in self.get_open_udp_ports(): return False return True @property def attrs(self): return self._container.attrs @property def id(self): return self._container.id @property def name(self): return self._container.name @property def env(self): kv_pairs = map(lambda v: v.split('=', 1), self._container.attrs['Config']['Env']) return {k: v for k, v in kv_pairs} @property def status(self): return self._container.status def reload(self): return self._container.reload() def kill(self, signal=None): return self._container.kill(signal) def remove(self, *args, **kwargs): raise RuntimeError('Do not remove this container manually. It will be removed automatically by py.test after the test finishes.') def logs(self): return self._container.logs().decode('utf-8') def get_files(self, path): ''' Retrieve files from a container at a given path. This is meant for extracting log files from a container where it is not using the docker logging capabilities. ''' archive_iter, _ = self._container.get_archive(path) archive_stream = io.BytesIO() [archive_stream.write(chunk) for chunk in archive_iter] archive_stream.seek(0) archive = tarfile.TarFile(fileobj=archive_stream) files = {} for info in archive.getmembers(): if not info.isfile(): continue reader = archive.extractfile(info.name) files[info.name] = reader.read().decode('utf-8') return files def get_open_tcp_ports(self): ''' Gets all TCP sockets in the LISTEN state ''' netstat = self._container.exec_run('cat /proc/net/tcp')[1].decode('utf-8').strip() ports = [] for line in netstat.split('\n'): # Not interested in empty lines if not line: continue line = line.split() # Only interested in listen sockets if line[3] != '0A': continue ports.append(str(int(line[1].split(':', 1)[1], 16))) return ports def get_open_udp_ports(self): ''' Gets all UDP sockets in the LISTEN state ''' netstat = self._container.exec_run('cat /proc/net/udp')[1].decode('utf-8').strip() ports = [] for line in netstat.split('\n'): # Not interested in empty lines if not line: continue line = line.split() # Only interested in listen sockets if line[3] != '0A': continue ports.append(str(int(line[1].split(':', 1)[1], 16))) return ports PK!HG^(44pytest_docker_tools-0.0.4.dist-info/entry_points.txt.,I-.14JON-/)#䔦gqqPK!HNO)pytest_docker_tools-0.0.4.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,zd&Y)r$[)T&UrPK!HCB/K,pytest_docker_tools-0.0.4.dist-info/METADATA\oGE%ƂT@d9b8{XdɊ=}޿}^Uu5I_9v`Hl;w3Nu'?E~N'zmTmL\TMQd ;INZW3u1MZp(*mTx5e,+`]omUk7ffa_5medm6R/A۪ix,Oo<۹kL'{'Yՙk}]UĬOxm^- fGkAj¿K<0%frf|[bƼjk{?0&+J:z覭ٙ\M7Ыkbc/[p 0l}^wo>TeY1|(|eԳblte9 csl2;F=u%i\`pGߋVQ6Ҫ\gdUUc>2GTѐ x[i. Bƈ86`ŬY#c0@DJ!$3725 Mp@}v%3HSźڮm+L UM]gnM/3m5,۪9v%U%V8DUlS'U\Z&,oB5 a}D̮ǧ'fM^ Ěw2Vff"4UY#n TlSgp|#dHb!)ȍID]6 ɼ1ܿV<;5s-V5cYf=F%B@'s.CT AV[fUpÔ-L#3" Kl =ef),=c#eŦf] tET1k ̙OTҶ]""Vf[V55׹0ȤpLi bfRnL5QVXy2eO޺/A1 X|iu&.6#?pquI̅h\55X6QHbۆTB.>_WjJ&[9}Qf-1Q &֏!0lO״Qv!֏#Qd4F+^u '$Td<~dHS(SHrOwTJ;g DjԷϟ_wt5pY;ۋ}IZ ~Jݬ·A1[dz{i(aaY=$ḳ#M?~]=~x}w35{UBhꮛ֥^` #l[}q3/A"7Z. IlY'ee $RKpV`8: 8aj^t4|qGV;3Z nUhLs_H(VldWcZ4>VfadIcM h;XsN{9w¦,Fϊl,;eAbUTEN^ȄYߦIm}&ai(S|IS~KXY"k*mD1m ;hGwn]w;}?ÈBLL84}ݾIV@; p~k/3LmS\^f*QVzVW uE"y˯OyE0-S•@/Bf*9ŷN:Λ`[mF;e+5Wp@p.0i^3:B*O&.m%_&_}>ݣOcvr'(Vg%8%r6!DZir:&.b;9!v:XnN Kuߧ?"AM`:E>1l ~ 鿽7P[ o翆LA]<Y~;~H%^D7N@Ȁ\`Ng|=kc,ӎ)c>aiU=/JIq#0]9[Ӟ7ьz)9AX͞l-9H݉kܚ9 LA^P:ȗPF^c?wdլ;2%Y$6}xj Cg8r~ANE&٥&]ZHqDHWɨ:ws"&k(t0ӻ]y0Z+|dN272ÑVu-9D=s`Wv6ԛG H)UL̔3n)+_ \fe]ZTR[&(3-u/T$8Gho&9sx//B"lGmk1,@Ul50ak zY4p&42@9֌rˌba>[Є.ղ(G-BRƯf']<S~J?ֹ8 :>owM(KEÈ~/ ǝۖݑ.eOqN)+mkt֚UIPa?aq~ĖOY5NGآW|x[lN6f= 1tC Osc'{3;"k%jQJP9k8QWVJZ*8ՙGN/.#e zM.8ApMT-w%]*{7 ;Z2c낈,%]z-ԥϜ6##qXF {\= e)i/3s>r@Ųt@Ѕbʯ1Ɯ2 GuD!0,< iqbۋ6G%{E8G\t6|Q[8 (,'z\P.qׄeEޝקqJVl88Ek‘XoW@P'rw*ȧP˥p76աV#We#k QN1TYý9ECwj:OHT FGa6̯zoS\c+C# _'2A<[E~ƱS̹aLޝ\5[uB-աaS-:CEm~kVChccIˇh}yK]i =/>$$Haw#}oUt*-␬6Qv1 B]vNh+A|?HdA5p]bVlg z"qRq޳Je y` :&xwQo?i,p~FNex~qj[QB (#kC#0+kCMS9Ru|қ17զ[!GX R2^5) |@_1yw% !3tvk R n|T)IV,G˴vlu(F1#$ g6͖X6z+9iU8+Um#qMt+ެ +GG}o.L'ă< Ή 9RNE*2Jt^smRj\\͸蜓@FEmr]_bWWh|@e.]'ecq3p=GT+YoQ/b`a*eU R콋w9˳3sYvY$B1C23KؒX{8fy8 ;ԁȝҴvyّWđAf c "Ѓ$I`ҵ  iX2oimY _FV$Vz 3M>iζ:Y)YUu_L ?p׏-dp1T## X=0[Z^zoB=Vq?vPv.W@[4;Rum[(h^5 Q%2jG@oY4t[!C!r ,]8O$07D\ "ez2(a_`n먼( sLꀛ^_GxˣW_wqX[l @uMŕ)1Ef5]I}= r1~P~лO-Cw|jO/NX%%aqED!sc$86YΚûBP(#N-Eu"49NZ76*s–7kV=ZEyTd C7ZM/~ARUh9"/r9B0B/v.BsN}5{jXwޅ~rݻ1!z}c⸈M:z^wvև]V*1h,wI8Gzq%}S)~F9QC[z|'GBMBo2wF*T|Sއ8zOlCcN#{p_:3 u?'%#/I%V?2zI߭6Z[k;UQ镩V=H/w{v/۸^V:-a_Gv5+W؃k܂Oz2ֿ#y!4:\a=XəLQ/u٢PBtF=&㶾VE?\^ =Jo ߨ}lLs/BKD* N7bux!dh齐|c zk[U`У 79T5yLg >:&hױ;X_oh V-L"!P؆*/7m9g6ޟPK!Hf8_*pytest_docker_tools-0.0.4.dist-info/RECORD͖kJyr(28At$ &H|B=~LpOI︋HӔ(*DѪ^3ׯØ;eʇp=TL^@¯vmRo~.,awve]ﶯ|2qC ⢙)i/)yю4fv K$S|Hމ^:Wvz4xzĆ%:nPCI,FOEb&ڽ%G0d7$ Azr n^h`[M䘤7UFG +Me~n ݲsi?4ȓ#$^c25ELƧAgh)RzSJ{()?6P˹;{^F~\%:iN!vywsNV'o25~f!UC?_.` NQF?U5;ɬ8D' I 4bVp=:o~ts6EMy!H3IlK^㽪bhBq}5i0@Yдo[\x#Lm?YEbD4N&%J?M͕Uٕi$pĴo胱ME| &xW1Dq?8b9*EPKLT:Mpytest_docker_tools/__init__.pyPKLy[pytest_docker_tools/plugin.pyPK\L`͵pytest_docker_tools/utils.pyPKzL)pytest_docker_tools/factories/__init__.pyPKzL|P?&pytest_docker_tools/factories/build.pyPKL 8vv* pytest_docker_tools/factories/container.pyPKzLūuj&pytest_docker_tools/factories/fetch.pyPKzLlHH(pytest_docker_tools/factories/network.pyPKzLʐq22'pytest_docker_tools/factories/volume.pyPKU_L'kBB(pytest_docker_tools/wrappers/__init__.pyPKL*) pytest_docker_tools/wrappers/container.pyPK!HG^(445pytest_docker_tools-0.0.4.dist-info/entry_points.txtPK!HNO)5pytest_docker_tools-0.0.4.dist-info/WHEELPK!HCB/K,&6pytest_docker_tools-0.0.4.dist-info/METADATAPK!Hf8_* Lpytest_docker_tools-0.0.4.dist-info/RECORDPK"O