PK!%&GGpypare/__init__.py__version__ = '0.3.0' __version_info__ = tuple(__version__.split('.')) PK!xKKpypare/config.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import functools import importlib import inspect import itertools import sys import aiohttp.web import structlog from . import logging class Config(dict): defaults = {} """Some config defaults.""" sanitizer = {} """Some converter to sanitize config values.""" def __init__(self, config=None, **kwargs): if config is None: config = {} sanitizer = self.sanitizer sane_config = ( (k, sanitizer[k](v) if k in sanitizer else v) for k, v in itertools.chain(config.items(), kwargs.items()) ) super().__init__(sane_config) self.set_defaults(self.defaults) self._loaded_plugins = set() def set_defaults(self, config): """Applies default values from config. :param config: another :py:obj:`dict` """ for key, value in config.items(): self.setdefault(key, value) class AioHttpConfig(Config): defaults = { 'host': '0.0.0.0', 'port': 8080, 'plugins_function_name': 'plug_me_in', 'plugins': [], 'debug': True, } """Some config defaults.""" sanitizer = { 'plugins': list, 'port': int, 'debug': bool } """Some converter to sanitize config values.""" def create_app(self, loop=None): """Create the aiohttp app.""" app = aiohttp.web.Application(loop=loop) app._debug = self['debug'] # we hook in our self app['config'] = self app.config = self app.plugin = functools.partial(self.plugin, app) return app def prepare_app(self, app): self.hook_plugins(app) async def plugin(self, app, name): plugin = resolve_dotted_name(name) if inspect.ismodule(plugin): # apply default name plugin = getattr(plugin, self.get('plugins_function_name')) if not inspect.iscoroutinefunction(plugin): raise ValueError(f'{plugin.__name__} must be an async function.') log = structlog.get_logger() if plugin in self._loaded_plugins: log.warning('Plugin already loaded', plugin=f'{plugin.__module__}.{plugin.__qualname__}') return try: return await plugin(app) finally: log.info('Loaded plugin', plugin=f'{plugin.__module__}.{plugin.__qualname__}') self._loaded_plugins.add(plugin) def hook_plugins(self, app): """Actually load all async plugins.""" async def _apply_plugins(): for name in self['plugins']: await self.plugin(app, name) loop = asyncio.get_event_loop() loop.run_until_complete(_apply_plugins()) def run(self): app = self.create_app() self.prepare_app(app) log = structlog.get_logger() log.info('Starting aiohttp', host=self['host'], port=self['port']) aiohttp.web.run_app( app, host=self['host'], port=self['port'], access_log_class=logging.AccessLogger, print=None ) def resolve_dotted_name(name): """Use pkg_resources style dotted name to resolve a name. Modules are cached in sys.modules """ part = ':' module_name, _, attr_name = name.partition(part) if part in attr_name: raise ValueError(f'Invalid name: {name}') if module_name in sys.modules: resolved = sys.modules[module_name] else: spec = importlib.util.find_spec(module_name) resolved = importlib.util.module_from_spec(spec) spec.loader.exec_module(resolved) sys.modules[resolved.__name__] = resolved if attr_name: resolved = getattr(resolved, attr_name) return resolved PK!1pypare/logging.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import logging.config import sys import aiohttp import structlog LOGGING_LEVEL_NAMES = list(map(logging.getLevelName, sorted(( logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR, logging.CRITICAL, )))) DEFAULT_LOGGING_LEVEL = logging.getLevelName(logging.INFO) CONTEXT_SETTINGS = dict(token_normalize_func=lambda x: x.upper()) def is_debug(level): return level == logging.getLevelName(logging.DEBUG) class StdioToLog: """Delegate sys.stdout to a logger.""" def __init__(self, logger, log_level=logging.INFO): self.logger = logger self.log_level = log_level self.linebuf = '' def write(self, buf): for line in buf.rstrip().splitlines(): self.logger.log(self.log_level, line.rstrip()) def flush(self): pass class ExtractLogExtra: # noqa: R0903 """Extract log record attributes to structlog event_dict.""" def __init__(self, *attrs): self.attrs = attrs def __call__(self, logger, method_name, event_dict): """ Add the logger name to the event dict. """ record = event_dict.get("_record") for attr_name in self.attrs: if hasattr(record, attr_name): attr = getattr(record, attr_name) event_dict[attr_name] = attr return event_dict def setup_logging(*, tty=sys.stdout.isatty(), level=logging.DEBUG, capture_warnings=True, redirect_print=False, json_indent=None): """Set up structured logging for logstash. :param tty: if True renders colored logs :param level: the log level to be applied :param capture_warnings: redirect warnings to the logger :param redirect_print: use the logger to redirect printed messages """ # normalize level if isinstance(level, str): level = logging.getLevelName(level.upper()) renderer = (structlog.dev.ConsoleRenderer() if tty else structlog.processors.JSONRenderer(indent=json_indent, sort_keys=True) ) timestamper = structlog.processors.TimeStamper(fmt="ISO", utc=True) pre_chain = [ # Add the log level and a timestamp to the event_dict if the log entry # is not from structlog. structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.processors.format_exc_info, ExtractLogExtra('spec', 'url', 'mimetype', 'has_body', 'swagger_yaml', 'method', 'path', 'operation_id', 'data'), timestamper, ] logging.config.dictConfig({ "version": 1, "disable_existing_loggers": False, "formatters": { "plain": { "()": structlog.stdlib.ProcessorFormatter, "processor": renderer, "foreign_pre_chain": pre_chain, }, "colored": { "()": structlog.stdlib.ProcessorFormatter, "processor": renderer, "foreign_pre_chain": pre_chain, }, }, "handlers": { "default": { # "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "colored", }, # "file": { # # "level": "DEBUG", # "class": "logging.handlers.WatchedFileHandler", # "filename": "test.log", # "formatter": "plain", # }, }, "loggers": { "": { "handlers": ["default"], "level": level, "propagate": True, }, } }) logging.captureWarnings(capture_warnings) processors = [ structlog.stdlib.add_log_level, structlog.stdlib.add_logger_name, structlog.stdlib.PositionalArgumentsFormatter(), timestamper, structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, structlog.stdlib.ProcessorFormatter.wrap_for_formatter, ] structlog.configure( processors=processors, context_class=structlog.threadlocal.wrap_dict(dict), logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, ) if redirect_print: # redirect stdio print print_log = structlog.get_logger('print') sys.stderr = StdioToLog(print_log) sys.stdout = StdioToLog(print_log) # log uncaught exceptions sys.excepthook = uncaught_exception def uncaught_exception(ex_type, ex_value, tb): # noqa: C0103 log_ = structlog.get_logger('sys.excepthook') log_.critical(event='uncaught exception', exc_info=(ex_type, ex_value, tb)) def merge_override_maps(maps): """Merge all maps and remove emptied keys.""" merged = {} for m in maps: merged.update(m) remove_keys = {k for k, v in merged.items() if not v} for k in remove_keys: del merged[k] return merged class AccessLogger(aiohttp.abc.AbstractAccessLogger): # noqa: R0903 def log(self, request, response, time): log = structlog.get_logger() log.info('Access', remote=request.remote, method=request.method, path=request.path, time=time, status=response.status) PK!pypare/plugins.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Common middlewares to enhance microservices.""" import asyncio import aiohttp.web import aiohttp.client import aiotask_context as context async def plugin_task_context(app): """Add actual config, request and a :py:obj:`ClientSession` to the task context.""" loop = asyncio.get_event_loop() loop.set_task_factory(context.task_factory) client_session = aiohttp.client.ClientSession() async def close_client_session(app): # noqa: W0613 await client_session.close() app.on_cleanup.append(close_client_session) @aiohttp.web.middleware async def task_context_middleware(request, handler): # noqa: W0613 context.set('request', request) context.set('config', app.config) context.set('client_session', client_session) response = await handler(request) return response app.middlewares.append(task_context_middleware) PK!} pypare/pypi/__init__.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Very simple caching pypi proxy.""" from pathlib import Path import aiohttp import aiohttp.web import aiohttp_jinja2 import jinja2 from packaging.version import VERSION_PATTERN import structlog from . import model from .. import utils RELEASES_ROUTE = 'releases' log = structlog.get_logger() routes = aiohttp.web.RouteTableDef() def strip_verbose_pattern(pattern): def _gen(): for line in pattern.split('\n'): stripped, *_ = line.rsplit('#', 1) yield stripped.strip() return r''.join(_gen()) VERSION_PATTERN = strip_verbose_pattern(VERSION_PATTERN) @routes.get('/') async def index(request): config = request.config_dict['config'] context = { 'projects': [], 'principal': 'default', } response = aiohttp_jinja2.render_template( 'simple.j2', request, context) return response @routes.get(r'/{channel_name}/+releases/{path:[a-f0-9/]+}/{filename}', name=RELEASES_ROUTE) async def serve_release(request): channel_name = request.match_info['channel_name'] path = request.match_info['path'] filename = request.match_info['filename'] cache = request.config_dict['cache'] channel = await cache.channel( name=channel_name, releases_route=request.app.router[RELEASES_ROUTE] ) streamer = await model.CachingStreamer.from_channel_release_url( channel, path, filename ) return await streamer.response() @routes.get(r'/{channel_name}/{project_name}') @routes.get(r'/{channel_name}/{project_name}/') @routes.get(r'/{channel_name}/{project_name}/{json:json}') @routes.get(r'/{channel_name}/{project_name}/{version:' + VERSION_PATTERN + r'}') # noqa @routes.get(r'/{channel_name}/{project_name}/{version:' + VERSION_PATTERN + r'}/') # noqa @routes.get(r'/{channel_name}/{project_name}/{version:' + VERSION_PATTERN + r'}/{json:json}') # noqa async def get_project(request): channel_name = request.match_info['channel_name'] project_name = request.match_info['project_name'] version = request.match_info.get('version', None) version = model.Version(version) if version else None cache = request.config_dict['cache'] channel = await cache.channel( name=channel_name, releases_route=request.app.router[RELEASES_ROUTE] ) project = channel.project(project_name) try: metadata = await project.load_metadata() except model.MetadataNotFound: raise aiohttp.web.HTTPNotFound() except model.MetadataRetrievingError: raise aiohttp.web.HTTPInternalServerError() # if we have a json request, like the new pypi API if request.match_info.get('json', False): return utils.json_response(metadata) context = { 'project_name': project_name, 'metadata': metadata, } response = aiohttp_jinja2.render_template( 'simple_package.j2', request, context) return response def jinja2_filter_basename(path): return Path(str(path)).name def jinja2_filter_dirname(path): return Path(str(path)).parent async def plug_me_in(app): defaults = { 'upstream_channel_name': 'pypi', 'upstream_channel_api_base': 'https://pypi.org/pypi', 'upstream_channel_timeout': 60 * 60 * 24, } app.config.set_defaults(defaults) await app.plugin('pypare.plugins:plugin_task_context') # create pypi subapp pypi_app = app.config['pypi_app'] = aiohttp.web.Application() cache = model.Cache(root=app.config['cache_root']) log.info('Using cache', cache_root=cache.root) pypi_app['cache'] = cache pypi_app.add_routes(routes) pypi_app['releases_route'] = pypi_app.router[RELEASES_ROUTE] # setup default upstream channel # TODO should we really take upstream config from CLI??? # creates the conflict of overriding an already persistent upstream config # better to provide another CLI command for creating channels at all upstream_channel_name = app.config['upstream_channel_name'] try: upstream_channel = await cache.channel( upstream_channel_name, releases_route=pypi_app['releases_route'], ) except FileNotFoundError: # create upstream channel upstream_channel = model.Channel.from_cache_only( cache, name=upstream_channel_name, releases_route=pypi_app['releases_route'], upstream_api_base=app.config['upstream_channel_api_base'], timeout=app.config['upstream_channel_timeout'], ) await upstream_channel.store() log.info('Created default upstream channel', name=upstream_channel_name) app['upstream_channel'] = upstream_channel template_path = str(Path(__spec__.origin).parent / 'templates') aiohttp_jinja2.setup( pypi_app, loader=jinja2.FileSystemLoader(template_path), filters={ 'basename': jinja2_filter_basename, 'dirname': jinja2_filter_dirname, }, ) app.add_subapp(app.config['base_path'], pypi_app) log.info('Added subapp', prefix=app.config['base_path'], resources=pypi_app.router._resources) PK!6Voeoepypare/pypi/model.pyimport asyncio import collections import collections.abc import functools import hashlib from pathlib import Path import time import typing import aiofiles import aiohttp.web import aiotask_context import attr import inotipy from packaging import version as pkg_version import structlog from .. import utils log = structlog.get_logger() DEFAULT_CACHE_TIMEOUT = 60 * 60 * 24 """Cache timeout defaults to one day.""" CHANNELS_NAME = 'channels' PROJECTS_NAME = 'projects' RELEASES_NAME = 'releases' LATEST_NAME = 'latest' METADATA_FILENAME = 'metadata.json' UPSTREAM_METADATA_FILENAME = 'upstream-metadata.json' FILES_NAME = 'files' PREPARING_SUFFIX = '.preparing' def ensure_cls(cls, *containers, key_converter=None): """If the attribute is an instance of cls, pass, else try constructing.""" def converter(val): val = val if isinstance(val, cls) else cls(**val) return val def converter_list(converter, val): return [converter(item) for item in val] def converter_dict(converter, val): return {key_converter(key) if key_converter else key: converter(value) for key, value in val.items()} if containers: for container in reversed(containers): if container is list: converter = functools.partial(converter_list, converter) if container is dict: converter = functools.partial(converter_dict, converter) return converter def ensure_semver(val): if isinstance(val, pkg_version._BaseVersion): return val val = pkg_version.parse(val) return val class str_keyed_dict(dict): """A str keyed dict.""" def __init__(self, *args, **kwargs): if len(args) == 1: it, = args super().__init__(( (str(key), value) for key, value in (it if isinstance(it, collections.abc.Iterable) else it.iter()) ), **kwargs) def __setitem__(self, key, value): super().__setitem__(str(key), value) def ensure_parents(path): """Create the parent path if not existing.""" if not path.parent.exists(): path.parent.mkdir(parents=True) def symlink_relative_to(from_path, to_path): """Create a relative symlink. The common base path is search and its distance to from_path is substituted with `..` parts and prefixed before to_path. """ from_parts = from_path.parts to_parts = to_path.parts i = 0 common_path = None while i <= len(from_parts) and i <= len(to_parts): if to_parts[i] == from_parts[i]: i += 1 continue common_path = Path(*from_parts[:i]) break if common_path: prefix_path = Path( *['..'] # parents containes the dot dir as last parent # so we start from parent * len(from_path.relative_to(common_path).parent.parents) ) to_path = prefix_path / to_path.relative_to(common_path) if from_path.exists(): from_path.unlink() from_path.symlink_to(to_path) class CacheException(Exception): pass class MetadataNotFound(CacheException): pass class MetadataRetrievingError(CacheException): pass @attr.s(kw_only=True, auto_attribs=True) # noqa: R0903 class Info: author: str author_email: str bugtrack_url: str classifiers: typing.List[str] description: str description_content_type: str docs_url: typing.List[str] download_url: str downloads: typing.Dict[str, int] home_page: str keywords: str license: str maintainer: str maintainer_email: str name: str package_url: str platform: str project_url: str project_urls: typing.List[str] release_url: str requires_dist: typing.List[str] requires_python: str summary: str version: pkg_version._BaseVersion = attr.ib(converter=ensure_semver) @classmethod def from_dict(cls, dct): info = cls(**dct) return info @attr.s(kw_only=True, auto_attribs=True) class Release: comment_text: str digests: typing.Dict[str, str] downloads: int filename: str has_sig: bool md5_digest: str packagetype: str python_version: str requires_python: str size: int upload_time: str url: str @classmethod def from_dict(cls, dct): release = cls(**dct) return release @utils.reify def path_hashs(self): """A unique path to the release. We assume that the name of the file is unique over project and version and packagetype. """ unique_name = ''.join(( self.filename, )).encode('utf-8') path_hashs = [ hashlib.blake2s(unique_name, # noqa: E1101 digest_size=digest_size, person=b'ChxRel').hexdigest() for digest_size in (1, 2, hashlib.blake2s.MAX_DIGEST_SIZE) # noqa: E1101 ] return path_hashs def channel_path(self, channel): path = channel.releases_path(*self.path_hashs) return path @attr.s(kw_only=True, auto_attribs=True) class Metadata: # noqa: R0903 info: Info = attr.ib(converter=ensure_cls(Info)) releases: typing.Dict[str, typing.List[Release]] = attr.ib( converter=ensure_cls( Release, dict, list, key_converter=ensure_semver), factory=dict) urls: typing.List[Release] = attr.ib( converter=ensure_cls(Release, list), factory=list) last_serial: int = 0 @classmethod def from_dict(cls, dct): project = cls(**dct) return project def __json__(self): dct = attr.asdict(self, dict_factory=str_keyed_dict) return dct class ACL: # noqa: R0903 allow = 'access:allow' deny = 'accces:deny' unauthorized = 'auth:unauthorized' authorized = 'auth:authorized' all = 'auth:all' admin = 'perm:admin' group = 'perm:group' read = 'perm:read' write = 'perm:write' @attr.s(kw_only=True, auto_attribs=True) # noqa: R0903 class Cache: # noqa: R0903 root: Path = attr.ib(converter=Path) def path(self, *path): """Return a path relativ to the cache root.""" return self.root / Path(*path) async def channel(self, name, **kwargs): channel = await Channel.from_cache(self, name=name, **kwargs) return channel @attr.s(auto_attribs=True) # noqa: R0903 class ACE: permits: bool principal: str permission: str @attr.s(kw_only=True, auto_attribs=True) # noqa: R0903 class Channel(Cache): name: str = 'pypi' acl: typing.List[ACE] = [ACE(ACL.allow, ACL.unauthorized, ACL.read)] timeout: float = DEFAULT_CACHE_TIMEOUT upstream_enabled: bool = True upstream_api_base: str = 'https://pypi.org/pypi' releases_route: aiohttp.web.AbstractRoute """The route which handles releases requests.""" @classmethod async def from_cache(cls, cache, *, name, **kwargs): root = cache.path(CHANNELS_NAME, name) channel = await cls.from_path(root, name=name, **kwargs) return channel @classmethod def from_cache_only(cls, cache, *, name, **kwargs): root = cache.path(CHANNELS_NAME, name) channel = cls(root=root, **kwargs) return channel @classmethod async def from_path(cls, root, **kwargs): async with aiofiles.open(root / METADATA_FILENAME) as f: data = utils.json_loads(await f.read()) data.update(kwargs) channel = cls(root=root, **data) return channel async def store(self): data = attr.asdict(self, dict_factory=str_keyed_dict) # sanitize del data['releases_route'] del data['root'] metadata_path = self.root / METADATA_FILENAME ensure_parents(metadata_path) async with aiofiles.open(metadata_path, 'x') as f: await f.write(utils.json_dumps(data)) async def release(self, path): """The release for this path.""" release_path = self.releases_path(path) / METADATA_FILENAME async with aiofiles.open(release_path) as f: data = utils.json_loads(await f.read()) release = Release.from_dict(data) return release async def upstream_release(self, path): """The upstream release for this path.""" release_path = self.releases_path(path) / UPSTREAM_METADATA_FILENAME async with aiofiles.open(release_path) as f: data = utils.json_loads(await f.read()) release = Release.from_dict(data) return release def projects_path(self, *path): projects_path = self.path(PROJECTS_NAME, *path) return projects_path def releases_path(self, *path): releases_path = self.path(RELEASES_NAME, *path) return releases_path def project(self, project_name): """Retrun the project. If this channel is an upstream channel, we create an :py:obj:`UpstreamProject` """ if self.upstream_enabled: project = UpstreamProject(channel=self, name=project_name, api_base=self.upstream_api_base) else: project = Project(channel=self, name=project_name) return project async def store_project(self, data): """ Create or update a projects metadata structures. :param data: the raw parsed metadata, as the json API from pypi provides it. channels/ | +- projects/// | | | +- metadata.json | | | +- files/.json <-+ | | +- releases | | | +- /metadata.json ------+ """ metadata = Metadata.from_dict(data) project_name = metadata.info.name project = self.project(project_name) await project.store_metadata(metadata) @attr.s(kw_only=True, auto_attribs=True) class Project: """A Project embodies a pypi project.""" log = structlog.get_logger(':'.join((__module__, __qualname__))) # noqa: E0602 channel: Channel name: str @utils.reify def path(self): """The project root path.""" path = self.channel.projects_path(self.name[0], self.name) return path @utils.reify def path_latest(self): """The directory of the latest version.""" return self.path / LATEST_NAME @property def latest_version(self): """The version number of the latest version.""" if not self.path_latest.is_symlink(): return None name = self.path_latest.resolve().name version = pkg_version.parse(name) return version def version_path(self, version): """The path of a version.""" # we need to cast version nto str path = self.path / str(version) return path def info_path(self, version): """The path to the metadata of a version.""" path = self.version_path(version) / METADATA_FILENAME return path async def load_metadata(self, version=None): """Return the metadata for that project and version.""" if version is None: version = self.latest_version info_path = self.info_path(version) async with aiofiles.open(info_path) as f: data = await f.read() info = Info.from_dict(utils.json_loads(data)) self.log.debug('Loaded project info', path=info_path) # contains all version releases releases = collections.defaultdict(list) # contans only releases of the selected version urls = [] # find all /files/*.json files for version_path in self.path.iterdir(): name = version_path.name if name in ('latest',): continue found_version = pkg_version.parse(name) for path in version_path.glob('files/*.json'): async with aiofiles.open(path) as f: data = await f.read() release = Release.from_dict(utils.json_loads(data)) self.log.debug('Loaded release metadata', path=path, version=found_version) releases[found_version].append(release) if found_version == version: urls.append(release) metadata = Metadata( info=info, releases=releases, urls=urls ) return metadata async def store_metadata(self, metadata): self.log.debug('Update metadata', project=self) await self.store_info(metadata.info) await self.store_releases(metadata.releases) async def store_info(self, info): # lookup the latest info version_path = self.version_path(info.version) info_path = version_path / METADATA_FILENAME ensure_parents(info_path) async with aiofiles.open(info_path, 'w') as f: data = utils.json_dumps(attr.asdict(info)) await f.write(data) self.log.debug('Stored project info', name=self.name, path=info_path, version=info.version) if self.latest_version is None or info.version > self.latest_version: self.path_latest.symlink_to( version_path.relative_to(self.path_latest.parent) ) async def store_releases(self, releases): for version, packages in releases.items(): for release in packages: await self.store_release(version, release) async def store_release(self, version, release): """Update a package release.""" version_path = self.version_path(version) release_metadata_path = ( version_path / FILES_NAME / release.filename ).with_suffix('.json') ensure_parents(release_metadata_path) # write metadata self.log.debug('Storing release metadata', version=version, filename=release.filename, path=release_metadata_path) async with aiofiles.open(release_metadata_path, 'w') as f: data = utils.json_dumps(attr.asdict(release, dict_factory=str_keyed_dict)) await f.write(data) self.log.debug('Stored release metadata', version=version, filename=release.filename, path=release_metadata_path) # link metadata to release path channel_release_path = release.channel_path(self.channel) release_path_metadata_link = channel_release_path / METADATA_FILENAME ensure_parents(release_path_metadata_link) symlink_relative_to(release_path_metadata_link, release_metadata_path) @attr.s(kw_only=True, auto_attribs=True) class UpstreamProject(Project): log = structlog.get_logger(':'.join((__module__, __qualname__))) # noqa: E0602 api_base: str = 'https://pypi.org/pypi' @property def mtime(self): try: max_mtime = max(path.stat().st_mtime for path in self.path.glob('**/*') if path.is_file()) return max_mtime except ValueError: return None def url(self, version=None): """The upstream url for a or the latest version.""" def _gen(): yield self.api_base yield self.name if version: yield str(version) yield 'json' url = '/'.join(_gen()) return url @property def needs_update(self): """Either there is no latest version or the timeout is over.""" mtime = self.mtime return (time.time() - mtime) >= self.channel.timeout if mtime else True async def load_metadata(self, version=None): if not self.needs_update: metadata = await super().load_metadata(version) return metadata # update and cache url = self.url(version) client_session = aiotask_context.get('client_session') self.log.info('Loading upstream metadata', url=url) async with client_session.get(url) as r: if r.status == 200: data = await r.json() metadata = Metadata.from_dict(data) await self.store_metadata(metadata) self.log.info('Loading metadata from upstream', response=r) metadata = await super().load_metadata(version) return metadata if r.status == 404: log.error('Metadata not found', project=self, response=r) raise MetadataNotFound(self, r) self.log.error('Error while retrieving metadata', project=self, response=r) raise MetadataRetrievingError(self, r) async def store_release(self, version, release): """Update a package release. The release url is transformed to point to ourself, but the original upstream metadata are stored too for later processing. """ # create the our own url channel_release_path = release.channel_path(self.channel) # store upstream metadata upstream_release_metadata_path \ = channel_release_path / UPSTREAM_METADATA_FILENAME ensure_parents(upstream_release_metadata_path) self.log.debug('Storing upstream release metadata', version=version, path=upstream_release_metadata_path) async with aiofiles.open(upstream_release_metadata_path, 'w') as f: data = utils.json_dumps(attr.asdict(release, dict_factory=str_keyed_dict)) await f.write(data) self.log.debug('Stored upstream release metadata', version=version, path=upstream_release_metadata_path) # inject our own url release.url = self.release_url(release) await super().store_release(version, release) def release_url(self, release): """Create the release url for the actual configuration.""" path = Path('/') / Path(*release.path_hashs) url = self.channel.releases_route.url_for( channel_name=self.channel.name, path=str(path.relative_to('/')), filename=release.filename ) return url @attr.s(kw_only=True, auto_attribs=True) class CachingStreamer: log = structlog.get_logger(':'.join((__module__, __qualname__))) # noqa: E0602 url: str path: Path upstream_enabled: bool @classmethod async def from_channel_release_url(cls, channel, path, filename): release = await channel.release(path) upstream_release = await channel.upstream_release(path) streamer = cls( url=upstream_release.url, path=release.channel_path(channel) / filename, upstream_enabled=channel.upstream_enabled ) return streamer @utils.reify def path_preparing(self): path = self.path.with_name(self.path.name + PREPARING_SUFFIX) return path async def stream(self, force=False): """Stream a released version file. If there is a cache, we assume this release version is final and we stream from it. Otherwise we download the data, store and stream it. :param force: force recaching """ if not self.path.is_file() or force: if self.path_preparing.is_file(): async def _gen(): # serve from intermediate file async for data in self._stream_from_intermediate(): yield data elif self.upstream_enabled: # testing for 404 client_session = aiotask_context.get('client_session') async with client_session.head(self.url) as r: if r.status != 200: if r.status == 404: raise aiohttp.web.HTTPNotFound() raise aiohttp.web.HTTPInternalServerError() async def _gen(): async for data in self._stream_and_cache(): yield data else: raise aiohttp.web.HTTPNotFound() else: async def _gen(): self.log.info('Serving cache', path=self.path) async with aiofiles.open(self.path, 'rb') as f: async for data in ChunkedFileIterator(f, chunk_size=2**14): self.log.debug('Stream data', size=len(data)) yield data return _gen() async def _stream_and_cache(self): """Stream data from upstream and cache them. The download and caching is done in the background, to prevent disconnecting clients from stopping it. """ client_session = aiotask_context.get('client_session') self.log.info('Caching upstream', url=self.url, path=self.path) queue = asyncio.Queue() fut_finished = asyncio.Future() cur_task = asyncio.current_task() async def _stream_queue(): while queue.qsize() or not fut_finished.done(): data = await queue.get() try: yield data finally: queue.task_done() async def _enqueue_upstream(): try: log.debug('Streaming from upstream into file and queue', file=self.path_preparing, url=self.url) async with aiofiles.open(self.path_preparing, 'xb') as f: async with client_session.get(self.url) as r: async for data, _ in r.content.iter_chunks(): await f.write(data) await queue.put(data) fut_finished.set_result(True) self.path_preparing.rename(self.path) self.log.info('Finished download', path=self.path) except (asyncio.CancelledError, IOError, Exception) as ex: # noqa: W0703 cur_task.cancel() # cleanup broken download self.log.error('Cleaning broken download', path=self.path_preparing, error=ex) try: self.path_preparing.unlink() except FileNotFoundError: pass # TODO use aiojobs ??? to cancel this future graceully # GeneratorExit asyncio.ensure_future(_enqueue_upstream()) async for data in _stream_queue(): yield data async def _stream_from_intermediate(self): self.log.info('Stream from intermediate', path=self.path_preparing) watcher = inotipy.Watcher.create() watcher.watch(str(self.path_preparing), inotipy.IN.MOVE_SELF | inotipy.IN.DELETE_SELF | inotipy.IN.CLOSE_WRITE | inotipy.IN.MODIFY ) fut_finished = asyncio.Future() ev_write = asyncio.Event() async def _wait_for_event(): while True: event = await watcher.get() self.log.debug('File event', file_event=event, watch=event.watch) if event.mask & inotipy.IN.MODIFY: ev_write.set() if event.mask & inotipy.IN.DELETE_SELF: fut_finished.set_exception(FileNotFoundError(event)) break if event.mask & ( inotipy.IN.MOVE_SELF | inotipy.IN.CLOSE_WRITE): fut_finished.set_result(event) break async with aiofiles.open(self.path_preparing, 'rb') as f: while True: data = await f.read() if data: yield data elif fut_finished.done(): self.log.info('Intermediate finished', result=await fut_finished) break else: # wait for next write event await ev_write.wait() ev_write.clear() async def response(self): headers = { 'Content-disposition': f'attachment; filename={self.path.name}' } stream = await self.stream() return aiohttp.web.Response( body=stream, headers=headers ) class ChunkedFileIterator: """Iterate and yield binary data chunks from a file. :param file: the file :param chunk_size: the size of the chunks to yield .. code-block:: python async for data in ChunkedFileIterator(f): yield data """ def __init__(self, file, chunk_size=2**14): self.file = file self.chunk_size = chunk_size def __aiter__(self): """We are our own iterator.""" return self async def __anext__(self): """Simulate normal file iteration.""" chunk = await self.file.read(self.chunk_size) if chunk: return chunk raise StopAsyncIteration PK! n;pypare/pypi/templates/simple.j2 Projects for {{ principal }}

Projects for {{ principal }}

{% for version, release in metadata.releases.items() %} {% for release_file in release %} {{ release_file.url | basename }}
{% endfor %} {% endfor %} PK!c'pypare/pypi/templates/simple_package.j2 Links for {{ project_name }}

Links for {{ project_name }}

{% for version, release in metadata.releases.items() %} {% for release_file in release %} {{ release_file.url | basename }}
{% endfor %} {% endfor %} PK!5NNpypare/scripts/__init__.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import importlib from pathlib import Path import click import structlog from .. import __version__, config, logging def find_loop_specs(): """Just find specs for common loops.""" module_specs = ( (module_name, importlib.util.find_spec(module_name)) for module_name in ('asyncio', 'uvloop', 'tokio') ) available_specs = { module_name: spec for module_name, spec in module_specs if spec is not None } return available_specs def set_loop_policy(event_loop): log = structlog.get_logger() if event_loop == 'uvloop': try: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) log.info("Using uvloop event loop policy") except ImportError: log.warning("uvloop is not available.") elif event_loop == 'tokio': try: import tokio asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) log.info("Using tokio event loop policy") except ImportError: log.warning("tokio is not available.") else: # set default policy asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy()) # loop = asyncio.get_event_loop() # return loop @click.group(context_settings=logging.CONTEXT_SETTINGS) @click.option('--log-level', default=logging.DEFAULT_LOGGING_LEVEL, show_default=True, type=click.Choice(logging.LOGGING_LEVEL_NAMES), help='The logging level.') @click.option('event_loop', '--loop', default='asyncio', show_default=True, type=click.Choice(find_loop_specs().keys()), help='Use a different loop policy.') @click.version_option(__version__) @click.pass_obj def cli(obj, log_level, event_loop): obj['debug'] = logging.is_debug(log_level) logging.setup_logging(level=log_level) set_loop_policy(event_loop) class ClickPath(click.Path): def convert(self, value, param, ctx): value = super().convert(value, param, ctx) value = Path(value).expanduser().resolve() return value @cli.command('pypi') @click.option('-p', '--port', default=3141, type=int, show_default=True, help='The port to run the server') @click.option('-h', '--host', default='0.0.0.0', show_default=True, help='The server host IP.') @click.option('base_path', '-b', '--base-path', type=click.Path(), default='/pypi', show_default=True, help='The base path for this application.') @click.option('cache_root', '-c', '--cache-root', type=ClickPath(file_okay=False, dir_okay=True, writable=True), default='~/.cache/pypare', show_default=True, help='The cache directory, where files are stored.', ) @click.option('upstream_channel_name', '-u', '--upstream-channel', default='pypi', help='The name of the upstream channel.') @click.option('upstream_channel_api_base', '--upstream-channel-url', default='https://pypi.org/pypi', help='The base API URL of the upstream channel.') @click.option('upstream_channel_timeout', '--upstream-channel-timeout', default=60 * 60 * 24, help='The timeout upstream is aksed for new metadata') @click.option('plugins', '--plugin', multiple=True, type=list, help='A plugin in pkg_resources notation to load.') @click.pass_obj def cli_cache(obj, **pypi_config): """Run a simple pypi caching proxy.""" conf = config.AioHttpConfig(pypi_config, debug=obj['debug']) conf['plugins'].append('pypare.pypi') conf.run() # @cli.command('resolve') # def cli_resolve(): # pass def main(): cli(obj={}, auto_envvar_prefix='PYPARE') # noqa: E1123 PK! pypare/utils.py# Copyright 2018 Oliver Berger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import functools import json import pathlib import aiohttp.web from packaging.version import Version import structlog import yarl json_adapter_registry = {} """A mapping of types to json adapter functions.""" def json_adapter(json_cls, adapter=None): """Register an adapter function to transform an obj into something json serializable.""" if adapter is None: return lambda func: json_adapter(json_cls, func) if json_cls not in json_adapter_registry: json_adapter_registry[json_cls] = adapter else: log = structlog.get_logger() log.warning('JSON adapter for this type already registered', type=json_cls, adapter=json_adapter_registry[json_cls]) return adapter def find_json_adapter(json_obj): """Find the appropriate adapter based on the MRO of the instance type.""" for json_obj_cls in json_obj.__class__.__mro__: adapter = json_adapter_registry.get(json_obj_cls) return adapter def json_default_handler(obj): """Try to convert an object to a json renderable type. If that object has a ``__json__`` method we call it first. After all we try to adapt that result via registered json adapters. """ if hasattr(obj, '__json__') and callable(obj.__json__): result = obj.__json__() else: result = obj # try to adapt adapter = find_json_adapter(obj) if adapter: result = adapter(obj) return result json_loads = json.loads json_dumps = functools.partial(json.dumps, default=json_default_handler) json_response = functools.partial(aiohttp.web.json_response, dumps=json_dumps) @json_adapter(pathlib.Path) def adapt_path(obj): return str(obj) @json_adapter(yarl.URL) def adapt_url(obj): return str(obj) @json_adapter(Version) def adapt_version(obj): return str(obj) class reify: # noqa: R0903 def __init__(self, wrapped): self.wrapped = wrapped functools.update_wrapper(self, wrapped) def __get__(self, inst, cls=None): if inst is None: return self val = self.wrapped(inst) setattr(inst, self.wrapped.__name__, val) return val PK!H&.'pypare-0.3.0.dist-info/entry_points.txtN+I/N.,()*,H,JPzPa<..PK!Y9P,P,"pypare-0.3.0.dist-info/LICENSE.txt Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2018 Oliver Berger Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. PK!HW"TTpypare-0.3.0.dist-info/WHEEL A н#J."jm)Afb~ ڡ5 G7hiޅF4+-3ڦ/̖?XPK!H#9)+ pypare-0.3.0.dist-info/METADATAVms8_r7eri˴K饐z/IaXETI&M~B m䃱}vWc s,+T%(w43<جiiT]#+n*bE%G 13U95[9mQ.+4VyF qsx NzD[m1[PAt2]2n4_C3!DL2g &s) e;T_ui %V/FxkOTl4jaXbA.X(!fŷS;m{&&T* [?~uDOZ/!ڿX)ĥM"NcOĎkޣM ]MZ?ۃzJ*Mu 4KQ('tU㼃mEH-0x{b:SN5x;66B;,\84݇ƺ hJP H9s%'BRZHmۛ)? sɍxx7Hr(DX@3=ZnV.9C B*@ l<t5%`4QBEt󶋠_TE.Zc-;2QQ㥍X1IN0EFV$Ds 6Q&[(\!2>X2MV@s hBJs)vESPڏ_>'V Rn7Xߢl8MI}otFz561]!w(Kr5Ϧ͇_h8:L&C2 fJF)Jf:<+}H"Ҕ(6C\=~5,[U=ZPF^818XȂ7W0d_dRW s_{*{Jůn4'Eԗ*-8lp>8E@>՘bm򑡵LYGfg;mw{rGSm6h \d\fo[W}֠*{|_0;4J9r6 䟚ٯ ?e[Pvݡ fgݞzͥl'r/(Hp xg "tP)B ){?OO\Z p:bK4k𯗋 S!}.P=8~YnԨ&q%JBd^Zh=ToNq "Ĉ5/} TBH=+arcY oXmfGyS(2E?k+LԴ<u֠p=PK!H<%@pypare-0.3.0.dist-info/RECORD}IH{$K.HR&#&#N-]> ڪp-.cǥ]L˫ic6t11j %ͫ⍱۩ۈ A#Q&JⳊt  jPQTo C-E'0b]V6̻`. \;%u𛴆M3z;b,Q.%ًKOFw{ytߪ2:BR_95'q]qXξnx`8û4k0g=Ś[`JMIKԶ55q\=S2*5X7IT陽5c" @i!ea'u\7ܬoQ_om6 t쭇ɹuzq1F^C1@\$f [D1.@P9U>7*:)ڥI< 9u ^fmZ~S9UmܿNM5zTf,)XW19L帝xyq'];0i8+koWuBa! N[Y=$)TeY{{^e" 2tbCf?~E).$K5 xppVCJ#I#移2}Ʋ_O-W["_PK!%&GGpypare/__init__.pyPK!xKKwpypare/config.pyPK!1pypare/logging.pyPK!)pypare/plugins.pyPK!} /pypare/pypi/__init__.pyPK!6VoeoeFpypare/pypi/model.pyPK! n;wpypare/pypi/templates/simple.j2PK!c'pypare/pypi/templates/simple_package.j2PK!5NNpypare/scripts/__init__.pyPK! pypare/utils.pyPK!H&.'pypare-0.3.0.dist-info/entry_points.txtPK!Y9P,P,"pypare-0.3.0.dist-info/LICENSE.txtPK!HW"TT{pypare-0.3.0.dist-info/WHEELPK!H#9)+  pypare-0.3.0.dist-info/METADATAPK!H<%@qpypare-0.3.0.dist-info/RECORDPK1u