PK!q&&reprobench/__init__.pyname = "reprobench" VERSION = "0.9.2" PK!, reprobench/console/decorators.pyimport functools import os import sys import click from loguru import logger def common(func): @click.option("-q", "--quiet", is_flag=True) @click.option( "--verbose", "-v", "verbosity", count=True, default=0, help="Verbosity" ) @functools.wraps(func) def wrapper(*args, **kwargs): quiet = kwargs.pop("quiet") verbosity = kwargs.pop("verbosity") sys.path.append(os.getcwd()) logger.remove() if not quiet: verbosity_levels = ["INFO", "DEBUG", "TRACE"] verbosity = min(verbosity, 2) logger.add(sys.stderr, level=verbosity_levels[verbosity]) return func(*args, **kwargs) return wrapper def server_info(func): @click.option("-a", "--address", default="tcp://127.0.0.1:31313", show_default=True) @functools.wraps(func) def wrapper(*args, **kwargs): server_address = kwargs.pop("address") return func(server_address=server_address, *args, **kwargs) return wrapper PK!**reprobench/console/main.py#!/usr/bin/env python import click from reprobench.core.bootstrap import cli as bootstrap_cli from .status import benchmark_status @click.group() @click.version_option() def cli(): pass cli.add_command(bootstrap_cli) try: from reprobench.core.server import cli as server_cli cli.add_command(server_cli) cli.add_command(benchmark_status) except ImportError: pass try: from reprobench.core.worker import cli as worker_cli cli.add_command(worker_cli) except ImportError: pass try: from reprobench.managers import cli as manager_cli cli.add_command(manager_cli) except ImportError: pass try: from reprobench.core.analyzer import cli as analyzer_cli cli.add_command(analyzer_cli) except ImportError: pass if __name__ == "__main__": cli() PK!Bߘreprobench/console/status.pyimport time import click from tqdm import tqdm from reprobench.core.db import Run from reprobench.utils import init_db def get_total_count(): return Run.select().count() def get_done_count(): return Run.select().where(Run.status == Run.DONE).count() @click.command("status") @click.option("-d", "--database", default="./output/benchmark.db", show_default=True) @click.option("-n", "--interval", default=2, show_default=True, type=int) def benchmark_status(database, interval): init_db(database) total = get_total_count() last = get_done_count() progress = tqdm(total=total, initial=last) while last < total: time.sleep(interval) current = get_done_count() progress.update(current - last) last = current PK!nR\\reprobench/core/analyzer.pyimport json import click from loguru import logger from playhouse.apsw_ext import APSWDatabase from reprobench.console.decorators import common from reprobench.core.db import Step, db from reprobench.utils import get_db_path, import_class class BenchmarkAnalyzer(object): def __init__(self, output_dir, **kwargs): self.output_dir = output_dir self.db_path = get_db_path(output_dir) db.initialize(APSWDatabase(self.db_path)) def run(self): steps = Step.select().where(Step.category == Step.ANALYSIS) context = dict(output_dir=self.output_dir, db_path=self.db_path) for step in steps: logger.debug(f"Running {step.module}") module = import_class(step.module) config = json.loads(step.config) module.execute(context, config) @click.command(name="analyze") @click.option( "-d", "--output-dir", type=click.Path(), default="./output", show_default=True ) @common def cli(output_dir, **kwargs): analyzer = BenchmarkAnalyzer(output_dir, **kwargs) analyzer.run() if __name__ == "__main__": cli() PK!K+ureprobench/core/base.pyfrom reprobench.utils import recv_event try: import zmq.green as zmq except ImportError: pass class Observer: SUBSCRIBED_EVENTS = [] @classmethod def observe(cls, context, backend_address, reply): socket = context.socket(zmq.SUB) socket.connect(backend_address) for event in cls.SUBSCRIBED_EVENTS: socket.setsockopt(zmq.SUBSCRIBE, event) while True: event_type, payload, address = recv_event(socket) cls.handle_event(event_type, payload, reply=reply, address=address) @classmethod def handle_event(cls, event_type, payload, **kwargs): pass class Step: @classmethod def register(cls, config=None): pass @classmethod def execute(cls, context, config=None): pass class Tool: name = "Base Tool" def __init__(self, context): self.cwd = context["run"]["directory"] self.parameters = context["run"]["parameters"] self.task = context["run"]["task"] def run(self, executor): raise NotImplementedError def get_output(self): raise NotImplementedError def get_error(self): raise NotImplementedError @classmethod def setup(cls): pass @classmethod def version(cls): return "1.0.0" @classmethod def is_ready(cls): pass @classmethod def teardown(cls): pass PK!"=[F##reprobench/core/bootstrap.pyimport atexit import itertools import json import re import shutil from ast import literal_eval from collections.abc import Iterable from pathlib import Path import click import numpy import zmq from loguru import logger from tqdm import tqdm from reprobench.console.decorators import common, server_info from reprobench.core.db import ( MODELS, Limit, Observer, Parameter, ParameterGroup, Run, Step, Task, TaskGroup, Tool, db, ) from reprobench.core.events import BOOTSTRAP from reprobench.core.exceptions import NotSupportedError from reprobench.task_sources.doi import DOISource from reprobench.task_sources.local import LocalSource from reprobench.task_sources.url import UrlSource from reprobench.utils import ( get_db_path, import_class, init_db, is_range_str, read_config, send_event, str_to_range, ) try: from ConfigSpace.read_and_write import pcs except ImportError: pcs = None def _bootstrap_db(config): logger.info("Bootstrapping db...") db.connect() db.create_tables(MODELS) Limit.insert_many( [{"key": key, "value": value} for (key, value) in config["limits"].items()] ).execute() Step.insert_many( [ { "category": key, "module": step["module"], "config": json.dumps(step.get("config", None)), } for key, steps in config["steps"].items() for step in steps ] ).execute() Observer.insert_many( [ { "module": observer["module"], "config": json.dumps(observer.get("config", None)), } for observer in config["observers"] ] ).execute() def _get_pcs_parameter_range(parameter_str, is_categorical): functions = dict( range=range, arange=numpy.arange, linspace=numpy.linspace, logspace=numpy.logspace, geomspace=numpy.geomspace, ) function_re = re.compile(r"(?P[A-Za-z_]+)\((?P.*)\)") match = function_re.match(parameter_str) parameter_range = None if match: function = match.group("function") if function not in functions: raise NotSupportedError(f"Declaring range with {function} is not supported") args = literal_eval(match.group("arguments")) parameter_range = functions[function](*args) else: parameter_range = literal_eval(parameter_str) if not isinstance(parameter_range, Iterable) or isinstance( parameter_range, str ): parameter_range = (parameter_range,) if is_categorical: parameter_range = map(str, parameter_range) return parameter_range def _parse_pcs_parameters(lines): parameter_range_indicator = "-->" parameters = {} parameter_key = None is_categorical = False for line in lines: if ("{" in line or "[" in line) and not line.startswith("#"): parameter_key = line[: line.find(" ")] is_categorical = "{" in line if "#" not in line or parameter_range_indicator not in line: continue comment_pos = line.find("#") pos = line.find(parameter_range_indicator, comment_pos) parameter_str = line[pos + len(parameter_range_indicator) :].strip() parameter_range = _get_pcs_parameter_range(parameter_str, is_categorical) parameters[parameter_key] = parameter_range return parameters def _check_valid_config_space(config_space, parameters): base = config_space.get_default_configuration() for key, value in parameters.items(): if key in base: base[key] = value # ValueError if invalid value def _create_parameter_group(tool, group, parameters): PCS_KEY = "__pcs" pcs_parameters = {} use_pcs = PCS_KEY in parameters config_space = None if use_pcs: pcs_text = parameters.pop(PCS_KEY) lines = pcs_text.split("\n") config_space = pcs.read(lines) pcs_parameters = _parse_pcs_parameters(lines) ranged_enum_parameters = { key: value for key, value in parameters.items() if isinstance(parameters[key], list) } ranged_numbers_parameters = { key: str_to_range(value) for key, value in parameters.items() if isinstance(value, str) and is_range_str(value) } ranged_parameters = { **pcs_parameters, **ranged_enum_parameters, **ranged_numbers_parameters, } if len(ranged_parameters) == 0: parameter_group = ParameterGroup.create(name=group, tool=tool) for (key, value) in parameters.items(): Parameter.create(group=parameter_group, key=key, value=value) return constant_parameters = { key: value for key, value in parameters.items() if key not in ranged_parameters } tuples = [ [(key, value) for value in values] for key, values in ranged_parameters.items() ] for combination in itertools.product(*tuples): parameters = {**dict(combination), **constant_parameters} if use_pcs: _check_valid_config_space(config_space, parameters) combination_str = ",".join(f"{key}={value}" for key, value in combination) parameter_group = ParameterGroup.create( name=f"{group}[{combination_str}]", tool=tool ) for (key, value) in parameters.items(): Parameter.create(group=parameter_group, key=key, value=value) def _bootstrap_tools(config): logger.info("Bootstrapping and running setups on tools...") for tool_name, tool in config["tools"].items(): tool_module = import_class(tool["module"]) if not tool_module.is_ready(): tool_module.setup() version = import_class(tool["module"]).version() Tool.create(name=tool_name, module=tool["module"], version=version) if "parameters" not in tool: _create_parameter_group(tool["module"], "default", {}) continue for group, parameters in tool["parameters"].items(): _create_parameter_group(tool["module"], group, parameters) def _bootstrap_tasks(config): logger.info("Bootstrapping tasks...") for (group, task) in config["tasks"].items(): task_group = TaskGroup.create(name=group) source = None # @TODO use chain of command if task["type"] == "local": source = LocalSource(**task) elif task["type"] == "url": source = UrlSource(**task) elif task["type"] == "doi": source = DOISource(**task) else: raise NotImplementedError( f"No implementation for task source {task['type']}" ) files = source.setup() for file in files: Task.create(group=task_group, path=str(file)) def _register_steps(config): logger.info("Registering steps...") for step in itertools.chain.from_iterable(config["steps"].values()): import_class(step["module"]).register(step.get("config", {})) def _bootstrap_runs(config, output_dir, repeat=1): parameter_groups = ParameterGroup.select().iterator() tasks = Task.select().iterator() total = ParameterGroup.select().count() * Task.select().count() for (parameter_group, task) in tqdm( itertools.product(parameter_groups, tasks), desc="Bootstrapping runs", total=total, ): directory = ( Path(output_dir) / parameter_group.tool_id / parameter_group.name / task.group_id / Path(task.path).name ) with db.atomic(): for _ in range(repeat): Run.create( tool=parameter_group.tool_id, task=task, parameter_group=parameter_group, directory=directory, status=Run.PENDING, ) def bootstrap(config=None, output_dir=None, repeat=1): Path(output_dir).mkdir(parents=True, exist_ok=True) atexit.register(shutil.rmtree, output_dir) db_path = get_db_path(output_dir) init_db(db_path) _bootstrap_db(config) _bootstrap_tools(config) _bootstrap_tasks(config) _register_steps(config) _bootstrap_runs(config, output_dir, repeat) atexit.unregister(shutil.rmtree) @click.command(name="bootstrap") @click.option("-r", "--repeat", type=int, default=1) @click.option( "-d", "--output-dir", type=click.Path(), default="./output", required=True, show_default=True, ) @click.argument("config", type=click.Path(), default="./benchmark.yml") @server_info @common def cli(server_address, config, output_dir, **kwargs): config = read_config(config, resolve_files=True) context = zmq.Context() socket = context.socket(zmq.DEALER) socket.connect(server_address) payload = dict(config=config, output_dir=output_dir, **kwargs) send_event(socket, BOOTSTRAP, payload) if __name__ == "__main__": cli() PK!rUreprobench/core/db.pyfrom datetime import datetime from playhouse.apsw_ext import ( Model, Proxy, CharField, CompositeKey, DateTimeField, ForeignKeyField, IntegerField, TextField, ) db = Proxy() class BaseModel(Model): class Meta: database = db class Limit(BaseModel): key = CharField(max_length=32, primary_key=True) value = CharField() class TaskGroup(BaseModel): name = CharField(primary_key=True) class Task(BaseModel): group = ForeignKeyField(TaskGroup, backref="tasks") path = CharField(primary_key=True) class Tool(BaseModel): module = CharField(primary_key=True) name = CharField() version = CharField(null=True) class ParameterGroup(BaseModel): name = CharField() tool = ForeignKeyField(Tool, backref="parameter_groups") class Meta: indexes = ((("name", "tool"), True),) class Parameter(BaseModel): group = ForeignKeyField(ParameterGroup, backref="parameters") key = CharField() value = CharField() class Meta: primary_key = CompositeKey("group", "key") class BasePlugin(BaseModel): module = CharField(index=True) config = TextField() class Step(BasePlugin): RUN = "run" ANALYSIS = "analysis" CATEGORY_CHOICES = ((RUN, "Single run step"), (ANALYSIS, "Analysis step")) category = CharField(choices=CATEGORY_CHOICES, index=True) class Observer(BasePlugin): pass class Run(BaseModel): FAILED = -2 CANCELED = -1 PENDING = 0 SUBMITTED = 1 RUNNING = 2 DONE = 3 STATUS_CHOICES = ( (FAILED, "Failed"), (CANCELED, "Canceled"), (PENDING, "Pending"), (SUBMITTED, "Submitted"), (RUNNING, "Running"), (DONE, "Done"), ) created_at = DateTimeField(default=datetime.now) tool = ForeignKeyField(Tool, backref="runs") tool_version = CharField(null=True) parameter_group = ForeignKeyField(ParameterGroup, backref="runs") task = ForeignKeyField(Task, backref="runs") status = IntegerField(choices=STATUS_CHOICES, default=PENDING) directory = CharField(null=True) last_step = ForeignKeyField(Step, null=True) MODELS = (Limit, TaskGroup, Task, Tool, ParameterGroup, Parameter, Run, Step, Observer) PK!<reprobench/core/events.pyBOOTSTRAP = b"core:bootstrap" REQUEST_PENDING = b"server:request_pending" WORKER_JOIN = b"worker:join" WORKER_LEAVE = b"worker:leave" RUN_START = b"run:start" RUN_STEP = b"run:step" RUN_INTERRUPT = b"run:interrupt" RUN_FINISH = b"run:finish" PK!a_hhreprobench/core/exceptions.pyclass ExecutableNotFoundError(RuntimeError): pass class NotSupportedError(RuntimeError): pass PK!!N reprobench/core/observers.pyfrom functools import lru_cache from peewee import fn from reprobench.core.base import Observer from reprobench.core.update import update from reprobench.core.db import Limit, Run, Step from reprobench.core.events import ( BOOTSTRAP, REQUEST_PENDING, RUN_FINISH, RUN_INTERRUPT, RUN_START, RUN_STEP, WORKER_JOIN, ) from reprobench.utils import encode_message class CoreObserver(Observer): SUBSCRIBED_EVENTS = ( BOOTSTRAP, WORKER_JOIN, RUN_START, RUN_STEP, RUN_FINISH, REQUEST_PENDING, ) @classmethod @lru_cache(maxsize=1) def get_limits(cls): return {l.key: l.value for l in Limit.select()} @classmethod def get_run(cls, run_id): run = Run.get_by_id(run_id) if run is None: return None run.status = Run.SUBMITTED run.save() last_step = run.last_step_id or 0 runsteps = Step.select().where( (Step.category == Step.RUN) & (Step.id > last_step) ) limits = cls.get_limits() parameters = {p.key: p.value for p in run.parameter_group.parameters} run_dict = dict( id=run.id, task=run.task_id, tool=run.tool_id, directory=run.directory, parameters=parameters, steps=list(runsteps.dicts()), limits=limits, ) return run_dict @classmethod def get_pending_run_ids(cls): last_step = ( Step.select(fn.MAX(Step.id)).where(Step.category == Step.RUN).scalar() ) Run.update(status=Run.PENDING).where( (Run.status < Run.DONE) | (Run.last_step_id != last_step) ).execute() pending_runs = Run.select(Run.id).where(Run.status == Run.PENDING) return [r.id for r in pending_runs] @classmethod def handle_event(cls, event_type, payload, **kwargs): reply = kwargs.pop("reply") address = kwargs.pop("address") if event_type == BOOTSTRAP: update(**payload) elif event_type == REQUEST_PENDING: run_ids = cls.get_pending_run_ids() reply.send_multipart([address, encode_message(run_ids)]) elif event_type == WORKER_JOIN: run = cls.get_run(payload) reply.send_multipart([address, encode_message(run)]) elif event_type == RUN_INTERRUPT: Run.update(status=Run.PENDING).where(Run.id == payload).execute() elif event_type == RUN_START: run_id = payload.pop("run_id") Run.update(status=Run.RUNNING, **payload).where(Run.id == run_id).execute() elif event_type == RUN_STEP: step = Step.get(module=payload["step"]) Run.update(last_step=step).where(Run.id == payload["run_id"]).execute() elif event_type == RUN_FINISH: Run.update(status=Run.DONE).where(Run.id == payload).execute() PK!~~reprobench/core/schema.pyfrom strictyaml import Any, Enum, Int, Map, MapPattern, Optional, Regex, Seq, Str limits_schema = Map( { "time": Int(), Optional("memory", default=8192): Int(), Optional("output"): Int(), Optional("cores"): Int(), } ) module_schema = Regex(r"\.?\w+(\.\w+)*") plugin_schema = Map( {"module": module_schema, Optional("config"): MapPattern(Str(), Any())} ) task_sources = Enum(["local", "url"]) schema = Map( { "title": Str(), Optional("description"): Str(), "limits": limits_schema, "steps": Map( {"run": Seq(plugin_schema), Optional("analysis"): Seq(plugin_schema)} ), "observers": Seq(plugin_schema), "tasks": MapPattern(Str(), MapPattern(Str(), Any())), "tools": MapPattern( Str(), Map( { "module": module_schema, Optional("parameters"): MapPattern(Str(), MapPattern(Str(), Any())), } ), ), } ) PK!@ @ reprobench/core/server.pyfrom pathlib import Path import click import gevent import zmq.green as zmq from loguru import logger from playhouse.apsw_ext import APSWDatabase from reprobench.console.decorators import common, server_info from reprobench.core.bootstrap import bootstrap from reprobench.core.db import db, Observer from reprobench.core.events import BOOTSTRAP from reprobench.core.observers import CoreObserver from reprobench.utils import import_class, decode_message, get_db_path class BenchmarkServer(object): BACKEND_ADDRESS = "inproc://backend" def __init__(self, output_dir, frontend_address, **kwargs): db_path = get_db_path(output_dir) db.initialize(APSWDatabase(db_path)) self.bootstrapped = Path(db_path).exists() self.frontend_address = frontend_address self.observers = [CoreObserver] def wait_for_bootstrap(self): while True: address, event_type, payload = self.frontend.recv_multipart() logger.trace((address, event_type, payload)) if event_type == BOOTSTRAP: break payload = decode_message(payload) bootstrap(**payload) self.bootstrapped = True self.frontend.send_multipart([address, b"done"]) def loop(self): while True: address, event_type, payload = self.frontend.recv_multipart() logger.trace((address, event_type, payload)) self.backend.send_multipart([event_type, payload, address]) def run(self): self.context = zmq.Context() self.frontend = self.context.socket(zmq.ROUTER) self.frontend.bind(self.frontend_address) self.backend = self.context.socket(zmq.PUB) self.backend.bind(self.BACKEND_ADDRESS) logger.info(f"Listening on {self.frontend_address}...") if not self.bootstrapped: logger.info(f"Waiting for bootstrap event...") self.wait_for_bootstrap() self.observers += [ import_class(o.module) for o in Observer.select(Observer.module) ] observer_greenlets = [] for observer in self.observers: greenlet = gevent.spawn( observer.observe, self.context, backend_address=self.BACKEND_ADDRESS, reply=self.frontend, ) observer_greenlets.append(greenlet) serverlet = gevent.spawn(self.loop) logger.info(f"Ready to receive events...") serverlet.join() gevent.killall(observer_greenlets) @click.command(name="server") @click.option( "-d", "--output-dir", type=click.Path(), default="./output", show_default=True ) @server_info @common def cli(server_address, output_dir, **kwargs): server = BenchmarkServer(output_dir, server_address, **kwargs) server.run() if __name__ == "__main__": cli() PK!}7^B B reprobench/core/sysinfo.pyimport platform import psutil from cpuinfo import get_cpu_info from playhouse.apsw_ext import CharField, FloatField, ForeignKeyField, IntegerField from reprobench.core.base import Step, Observer from reprobench.core.db import BaseModel, Run, db from reprobench.utils import send_event class Node(BaseModel): hostname = CharField(primary_key=True) platform = CharField(null=True) arch = CharField(null=True) python = CharField(null=True) cpu = CharField(null=True) cpu_count = IntegerField(null=True) cpu_min_freq = FloatField(null=True) cpu_max_freq = FloatField(null=True) mem_total = IntegerField(null=True) mem_available = IntegerField(null=True) swap_total = IntegerField(null=True) swap_available = IntegerField(null=True) class RunNode(BaseModel): run = ForeignKeyField(Run, backref="run_node", primary_key=True) node = ForeignKeyField(Node, backref="runs") MODELS = (Node, RunNode) STORE_SYSINFO = b"sysinfo:store" class SystemInfoObserver(Observer): SUBSCRIBED_EVENTS = (STORE_SYSINFO,) @classmethod def handle_event(cls, event_type, payload, **kwargs): if event_type == STORE_SYSINFO: node = payload["node"] run = payload["run_id"] Node.insert(**node).on_conflict("ignore").execute() RunNode.insert(run=run, node=node["hostname"]).on_conflict( "replace" ).execute() class CollectSystemInfo(Step): @classmethod def register(cls, config=None): db.create_tables(MODELS) @classmethod def _get_system_info(cls): cpu_info = get_cpu_info() cpu_freq = psutil.cpu_freq() mem_info = psutil.virtual_memory() swap_info = psutil.swap_memory() info = {} info["platform"] = platform.platform(aliased=True) info["arch"] = cpu_info["arch"] info["python"] = cpu_info["python_version"] info["cpu"] = cpu_info["brand"] info["cpu_count"] = psutil.cpu_count() info["cpu_min_freq"] = cpu_freq.min info["cpu_max_freq"] = cpu_freq.max info["mem_total"] = mem_info.total info["mem_available"] = mem_info.available info["swap_total"] = swap_info.total info["swap_available"] = swap_info.free return info @classmethod def execute(cls, context, config=None): hostname = platform.node() info = cls._get_system_info() run_id = context["run"]["id"] payload = dict(run_id=run_id, node=dict(hostname=hostname, **info)) send_event(context["socket"], STORE_SYSINFO, payload) PK!yellreprobench/core/update.pyimport json from reprobench.core.db import Step from reprobench.utils import get_db_path, init_db, import_class def _update_step(category, steps): current_step_count = Step.select().where(Step.category == category).count() for step in steps[current_step_count:]: import_class(step["module"]).register(step.get("config", {})) Step.create( category=category, module=step["module"], config=json.dumps(step.get("config", None)), ) def update_steps(config): step_categories = ((Step.RUN, "run"), (Step.ANALYSIS, "analysis")) for category, key in step_categories: _update_step(category, config["steps"][key]) def update(config=None, output_dir=None, repeat=1): db_path = get_db_path(output_dir) init_db(db_path) update_steps(config) # TODO: update tasks, parameters, runs PK!9reprobench/core/worker.pyimport atexit import json from pathlib import Path import click import zmq from loguru import logger from reprobench.console.decorators import common, server_info from reprobench.core.events import ( RUN_FINISH, RUN_INTERRUPT, RUN_START, RUN_STEP, WORKER_JOIN, WORKER_LEAVE, ) from reprobench.utils import decode_message, import_class, send_event REQUEST_TIMEOUT = 15000 class BenchmarkWorker: """ Request for a work from server, if there's no more work, terminate. else, do the work and request for more. """ def __init__(self, server_address, run_id): self.server_address = server_address self.run_id = run_id def killed(self, run_id): send_event(self.socket, RUN_INTERRUPT, run_id) send_event(self.socket, WORKER_LEAVE) def run(self): atexit.register(self.killed, self.run_id) context = zmq.Context() self.socket = context.socket(zmq.DEALER) self.socket.connect(self.server_address) send_event(self.socket, WORKER_JOIN, self.run_id) run = decode_message(self.socket.recv()) tool = import_class(run["tool"]) context = {} context["socket"] = self.socket context["tool"] = tool context["run"] = run logger.info(f"Processing task: {run['directory']}") Path(run["directory"]).mkdir(parents=True, exist_ok=True) payload = dict(tool_version=tool.version(), run_id=self.run_id) send_event(self.socket, RUN_START, payload) for runstep in run["steps"]: logger.debug(f"Running step {runstep['module']}") step = import_class(runstep["module"]) config = json.loads(runstep["config"]) step.execute(context, config) payload = {"run_id": self.run_id, "step": runstep["module"]} send_event(self.socket, RUN_STEP, payload) send_event(self.socket, RUN_FINISH, self.run_id) send_event(self.socket, WORKER_LEAVE, self.run_id) @click.command("worker") @click.argument("run_id") @common @server_info def cli(server_address, run_id): worker = BenchmarkWorker(server_address, run_id) worker.run() if __name__ == "__main__": cli() PK!s݊tt reprobench/executors/__init__.pyfrom .base import RunStatisticObserver # from .runsolver import RunsolverExecutor from .psmon import PsmonExecutor PK! .reprobench/executors/base.pyfrom reprobench.core.base import Step, Observer from reprobench.executors.events import STORE_RUNSTATS from .db import RunStatistic class RunStatisticObserver(Observer): SUBSCRIBED_EVENTS = (STORE_RUNSTATS,) @classmethod def handle_event(cls, event_type, payload, **kwargs): if event_type == STORE_RUNSTATS: RunStatistic.create(**payload) class Executor(Step): def __init__(self, *args, **kwargs): pass def run( self, cmdline, out_path=None, err_path=None, input_str=None, directory=None, **kwargs ): raise NotImplementedError @classmethod def register(cls, config=None): RunStatistic.create_table() @classmethod def execute(cls, context, config=None): tool = context["tool"] executor = cls(context, config) tool(context).run(executor) PK!%o-NNreprobench/executors/db.pyfrom datetime import datetime from reprobench.core.db import BaseModel, Run from playhouse.apsw_ext import ( ForeignKeyField, FloatField, CharField, IntegerField, DateTimeField, ) class RunStatistic(BaseModel): TIMEOUT = "TLE" MEMOUT = "MEM" RUNTIME_ERR = "RTE" OUTPUT_LIMIT = "OLE" SUCCESS = "OK" VERDICT_CHOICES = ( (TIMEOUT, "Time Limit Exceeded"), (MEMOUT, "Memory Limit Exceeded"), (RUNTIME_ERR, "Runtime Error"), (OUTPUT_LIMIT, "Output Limit Exceeded"), (SUCCESS, "Run Successfully"), ) created_at = DateTimeField(default=datetime.now) run = ForeignKeyField( Run, backref="statistics", on_delete="cascade", primary_key=True ) cpu_time = FloatField(help_text="CPU Time (s)", null=True) wall_time = FloatField(help_text="Wall Clock Time (s)", null=True) max_memory = FloatField(help_text="Max Memory Usage (KiB)", null=True) return_code = IntegerField(help_text="Process Return Code", null=True) verdict = CharField(choices=VERDICT_CHOICES, max_length=3, null=True) PK!d,C,,reprobench/executors/events.pySTORE_RUNSTATS = b"executor:store_runstats" PK!&(R reprobench/executors/psmon.pyfrom loguru import logger try: from psmon import ProcessMonitor from psmon.limiters import CpuTimeLimiter, MaxMemoryLimiter, WallTimeLimiter except ImportError: logger.warning( "You may need to install the `psmon` extra to run with this executor." ) from reprobench.utils import send_event from .base import Executor from .db import RunStatistic from .events import STORE_RUNSTATS class PsmonExecutor(Executor): def __init__(self, context, config): self.socket = context["socket"] self.run_id = context["run"]["id"] if config is None: config = {} wall_grace = config.get("wall_grace", 15) self.nonzero_as_rte = config.get("nonzero_rte", True) limits = context["run"]["limits"] time_limit = float(limits["time"]) MB = 1024 * 1024 self.wall_limit = time_limit + wall_grace self.cpu_limit = time_limit self.mem_limit = float(limits["memory"]) * MB def compile_stats(self, stats): verdict = None if stats["error"] == TimeoutError: verdict = RunStatistic.TIMEOUT elif stats["error"] == MemoryError: verdict = RunStatistic.MEMOUT elif stats["error"] or (self.nonzero_as_rte and stats["return_code"] != 0): verdict = RunStatistic.RUNTIME_ERR else: verdict = RunStatistic.SUCCESS del stats["error"] return dict(run_id=self.run_id, verdict=verdict, **stats) def run( self, cmdline, out_path=None, err_path=None, input_str=None, directory=None, **kwargs, ): out_file = open(out_path, "wb") err_file = open(err_path, "wb") monitor = ProcessMonitor( cmdline, cwd=directory, stdout=out_file, stderr=err_file, input=input_str, freq=15, ) monitor.subscribe("wall_time", WallTimeLimiter(self.wall_limit)) monitor.subscribe("cpu_time", CpuTimeLimiter(self.cpu_limit)) monitor.subscribe("max_memory", MaxMemoryLimiter(self.mem_limit)) logger.debug(f"Running {directory}") stats = monitor.run() logger.debug(f"Finished {directory}") out_file.close() err_file.close() payload = self.compile_stats(stats) send_event(self.socket, STORE_RUNSTATS, payload) PK!Greprobench/managers/__init__.pyimport click from .local import LocalManager from .local import cli as local_cli from .slurm import SlurmManager from .slurm import cli as slurm_cli @click.group("manage") def cli(): pass cli.add_command(local_cli) cli.add_command(slurm_cli) PK!%$$reprobench/managers/base.pyimport zmq from reprobench.core.events import REQUEST_PENDING from reprobench.utils import send_event, decode_message class BaseManager(object): def __init__(self, server_address, **kwargs): self.server_address = server_address context = zmq.Context() self.socket = context.socket(zmq.DEALER) self.socket.connect(self.server_address) def prepare(self): pass def spawn_workers(self): raise NotImplementedError def get_pending_runs(self): send_event(self.socket, REQUEST_PENDING) self.queue = decode_message(self.socket.recv()) def wait(self): pass def stop(self): pass def run(self): self.prepare() self.get_pending_runs() self.spawn_workers() self.wait() PK!E//%reprobench/managers/local/__init__.pyfrom multiprocessing import cpu_count import click from loguru import logger from reprobench.console.decorators import server_info, common from reprobench.utils import read_config from .manager import LocalManager @click.command("local") @click.option("-w", "--num-workers", type=int, default=cpu_count(), show_default=True) @click.argument("command", type=click.Choice(("run",))) @server_info @common def cli(command, **kwargs): manager = LocalManager(**kwargs) if command == "run": manager.run() if __name__ == "__main__": cli() PK!QMNN$reprobench/managers/local/manager.pyimport atexit import time from multiprocessing import Pool from loguru import logger from tqdm import tqdm from reprobench.core.worker import BenchmarkWorker from reprobench.managers.base import BaseManager class LocalManager(BaseManager): def __init__(self, **kwargs): super().__init__(**kwargs) self.num_workers = kwargs.pop("num_workers") self.start_time = None self.workers = [] def exit(self): for worker in self.workers: worker.terminate() worker.join() logger.info(f"Total time elapsed: {time.perf_counter() - self.start_time}") def prepare(self): atexit.register(self.exit) self.start_time = time.perf_counter() @staticmethod def spawn_worker(job): server_address, run_id = job worker = BenchmarkWorker(server_address, run_id) worker.run() def spawn_workers(self): self.pool = Pool(self.num_workers) jobs = ((self.server_address, run_id) for run_id in self.queue) self.pool_iterator = self.pool.imap_unordered(self.spawn_worker, jobs) self.pool.close() def wait(self): progress_bar = tqdm(desc="Executing runs", total=len(self.queue)) for _ in self.pool_iterator: progress_bar.update() progress_bar.close() self.pool.join() PK!Ik%reprobench/managers/slurm/__init__.pyimport click from reprobench.console.decorators import common, server_info from reprobench.utils import read_config from .manager import SlurmManager @click.command("slurm") @click.option( "-d", "--output-dir", type=click.Path(), default="./output", required=True, show_default=True, ) @click.argument("command", type=click.Choice(("run", "stop"))) @click.argument("config", type=click.Path(), default="./benchmark.yml") @server_info @common def cli(command, *args, **kwargs): manager = SlurmManager(*args, **kwargs) if command == "run": manager.run() elif command == "stop": manager.stop() if __name__ == "__main__": cli() PK!ߥ 0 and output != b"None assigned\n": return output.decode().strip() PK!p'reprobench/statistics/plots/__init__.pyfrom .cactus import CactusPlot PK!=6#reprobench/statistics/plots/base.pyfrom pathlib import Path from reprobench.core.base import Step try: import papermill as pm except ImportError: pass class NotebookExecutor(Step): INPUT_NOTEBOOK = None DEFAULT_OUTPUT = None @classmethod def execute(cls, context, config=None): if config is None: config = {} output_dir = context.get("output_dir", None) output = Path(output_dir) / config.get("output", cls.DEFAULT_OUTPUT) output.parent.mkdir(parents=True, exist_ok=True) parameters = dict(db_path=context.get("db_path"), **config) pm.execute_notebook(cls.INPUT_NOTEBOOK, str(output), parameters=parameters) PK!8V.reprobench/statistics/plots/cactus/__init__.pyimport os from reprobench.statistics.plots.base import NotebookExecutor DIR = os.path.dirname(__file__) class CactusPlot(NotebookExecutor): DEFAULT_OUTPUT = "output/statistics/cactus.ipynb" INPUT_NOTEBOOK = os.path.join(DIR, "template.ipynb") PK! 1reprobench/statistics/plots/cactus/template.ipynb{ "cells": [ { "cell_type": "code", "source": [ "db_path = \"benchmark.db\"\n", "measure = \"cpu_time\"" ], "outputs": [], "execution_count": null, "metadata": { "collapsed": false, "outputHidden": false, "inputHidden": false, "tags": [ "parameters" ] } }, { "cell_type": "code", "source": [ "from reprobench.utils import init_db\n", "init_db(db_path)" ], "outputs": [], "execution_count": null, "metadata": { "collapsed": false, "outputHidden": false, "inputHidden": false } }, { "cell_type": "code", "source": [ "import itertools\n", "\n", "import pandas as pd\n", "import seaborn as sns\n", "\n", "from reprobench.core.db import Run, Tool, ParameterGroup\n", "from reprobench.executors.db import RunStatistic\n", "\n\n", "def cactus_plot(measure, **kwargs):\n", " cactus_df = pd.DataFrame()\n", " \n", " for group in ParameterGroup.select():\n", " tool_name = f\"{group.tool_id}_{group.name}\"\n", " measure_field = getattr(RunStatistic, measure)\n", " values_query = (\n", " RunStatistic\n", " .select(measure_field)\n", " .join(Run)\n", " .where(Run.tool_id == group.tool_id)\n", " .where(Run.parameter_group_id == group.id)\n", " .order_by(measure_field)\n", " )\n", " series = pd.Series(\n", " data=[*itertools.chain.from_iterable(values_query.tuples())],\n", " name=tool_name,\n", " ).sort_values()\n", " cactus_df = cactus_df.append(series, sort=False)\n", " \n", " cactus_df = cactus_df.transpose().reset_index(drop=True)\n", " \n", " return sns.scatterplot(data=cactus_df, **kwargs)" ], "outputs": [], "execution_count": null, "metadata": { "collapsed": false, "outputHidden": false, "inputHidden": false } }, { "cell_type": "code", "source": [ "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as ticker\n", "\n", "fig, ax = plt.subplots(figsize=(8, 6))\n", "plt.xticks()\n", "plt.xlabel(\"Instance solved\")\n", "plt.ylabel(\"Time (s)\")\n", "cactus_plot(measure, ax=ax)\n", "plt.show()" ], "outputs": [], "execution_count": null, "metadata": { "collapsed": false, "outputHidden": false, "inputHidden": false } } ], "metadata": { "kernel_info": { "name": "python3" }, "language_info": { "name": "python", "version": "3.7.2", "mimetype": "text/x-python", "codemirror_mode": { "name": "ipython", "version": 3 }, "pygments_lexer": "ipython3", "nbconvert_exporter": "python", "file_extension": ".py" }, "kernelspec": { "name": "python3", "language": "python", "display_name": "Python 3" }, "nteract": { "version": "0.12.3" } }, "nbformat": 4, "nbformat_minor": 4 }PK!&++(reprobench/statistics/tables/__init__.pyfrom .run import RunTable, RunSummaryTable PK!Qk[aa$reprobench/statistics/tables/base.pyfrom pathlib import Path from reprobench.core.base import Step class PandasExporter(Step): @classmethod def get_dataframe(cls, config): raise NotImplementedError @classmethod def save_df(cls, df, output): if output.endswith(".csv"): df.to_csv(output) elif output.endswith(".json"): df.to_json(output) else: raise NotImplementedError @classmethod def execute(cls, context, config=None): if config is None: config = {} output_dir = context.get("output_dir", None) output = Path(output_dir) / config.pop("output") output.parent.mkdir(parents=True, exist_ok=True) df = cls.get_dataframe(config) # remove duplicated columns df = df.loc[:, ~df.columns.duplicated()] cls.save_df(df, str(output)) PK!r#reprobench/statistics/tables/run.pyfrom reprobench.core.db import ParameterGroup, Run, db from reprobench.executors.db import RunStatistic from reprobench.utils import import_class from .base import PandasExporter try: import pandas as pd except ImportError: pass class RunTable(PandasExporter): @classmethod def get_dataframe(cls, config): joins = config.get("joins", []) query = Run.select() for model_class in joins: model = import_class(model_class) query = query.join_from(Run, model).select_extend( *model._meta.fields.values() ) sql, params = query.sql() return pd.read_sql_query(sql, db, params=params) class RunSummaryTable(PandasExporter): DEFAULT_COLUMNS = ("cpu_time", "wall_time", "max_memory") @classmethod def get_dataframe(cls, config): columns = config.get("columns", cls.DEFAULT_COLUMNS) tool_names = [ f"{group.tool_id}_{group.name}" for group in ParameterGroup.select() ] multiindex = pd.MultiIndex.from_product((tool_names, columns)) df = pd.DataFrame(index=multiindex).transpose() for group in ParameterGroup.select(): tool_name = f"{group.tool_id}_{group.name}" query = ( RunStatistic.select() .join(Run) .where(Run.tool_id == group.tool_id) .where(Run.parameter_group_id == group.id) ) sql, params = query.sql() tool_df = pd.read_sql(sql, db, params=params) for col in columns: df.loc(axis=1)[tool_name, col] = tool_df[col] return df.describe() PK!ϗ?reprobench/task_sources/base.pyclass BaseTaskSource(object): def __init__(self, path=None, **kwargs): self.path = path def setup(self): return [] PK!11'reprobench/task_sources/doi/__init__.pyfrom reprobench.task_sources.url import UrlSource from reprobench.task_sources.doi.zenodo import ZenodoHandler, ZenodoSandboxHandler class DOISource(UrlSource): handlers = [ZenodoHandler, ZenodoSandboxHandler] def __init__(self, doi, **kwargs): super().__init__(**kwargs) self.doi = doi for handler in self.handlers: if handler.is_compatible(self.doi): self.urls = handler.get_urls(self.doi) break else: raise NotImplementedError(f"No handler for doi: {doi}") PK!RO#reprobench/task_sources/doi/base.pyclass BaseDOIHandler(object): @classmethod def is_compatible(cls, doi): return False @classmethod def get_urls(cls, doi): return [] PK!Sʺ%reprobench/task_sources/doi/zenodo.pyimport requests from reprobench.task_sources.doi.base import BaseDOIHandler class ZenodoHandler(BaseDOIHandler): doi_prefix = "10.5281/zenodo." api_url = "https://zenodo.org/api" @classmethod def is_compatible(cls, doi): return doi.startswith(cls.doi_prefix) @classmethod def get_urls(cls, doi): record_id = doi[len(cls.doi_prefix) :] # remove doi_prefix url = "{}/records/{}".format(cls.api_url, record_id) record = requests.get(url).json() return [file["links"]["self"] for file in record["files"]] class ZenodoSandboxHandler(ZenodoHandler): doi_prefix = "10.5072/zenodo." api_url = "https://sandbox.zenodo.org/api" PK!z reprobench/task_sources/local.pyfrom pathspec import PathSpec from pathlib import Path from .base import BaseTaskSource class LocalSource(BaseTaskSource): def __init__(self, path=None, patterns="", **kwargs): super().__init__(path) self.patterns = patterns def setup(self): spec = PathSpec.from_lines("gitwildmatch", self.patterns.splitlines()) matches = spec.match_tree(self.path) return map(lambda match: Path(self.path).resolve() / match, matches) PK!WWreprobench/task_sources/url.pyfrom pathlib import Path from loguru import logger from reprobench.utils import download_file, extract_archives from .local import LocalSource class UrlSource(LocalSource): def __init__( self, urls=None, path=None, patterns="", skip_existing=True, extract_archives=True, **kwargs, ): super().__init__(path, patterns=patterns) self.urls = urls or [] self.extract_archives = extract_archives self.skip_existing = skip_existing def setup(self): root = Path(self.path) root.mkdir(parents=True, exist_ok=True) for url in self.urls: filename = url.split("/")[-1].split("?")[0] path = root / filename if not path.exists() or not self.skip_existing: logger.debug(f"Downloading {url} to {path}") download_file(url, path) else: logger.debug(f"Skipping already downloaded file {path}") if self.extract_archives: extract_archives(path) return super().setup() PK!чIreprobench/tools/executable.pyfrom pathlib import Path from loguru import logger from reprobench.core.base import Tool class ExecutableTool(Tool): name = "Basic Executable Tool" path = None prefix = "--" @classmethod def is_ready(cls): return True def get_arguments(self): return [f"{self.prefix}{key}={value}" for key, value in self.parameters.items()] def get_cmdline(self): return [self.path, *self.get_arguments()] def get_out_path(self): return Path(self.cwd) / "run.out" def get_err_path(self): return Path(self.cwd) / "run.err" def get_output(self): return self.get_out_path().read_bytes() def get_error(self): return self.get_err_path().read_bytes() def run(self, executor): logger.debug([*self.get_cmdline(), self.task]) executor.run( [*self.get_cmdline(), self.task], directory=self.cwd, out_path=self.get_out_path(), err_path=self.get_err_path(), ) PK!'"Creprobench/utils.pyimport importlib import re import tarfile import zipfile from collections.abc import Iterable from pathlib import Path from shutil import which import requests import strictyaml from tqdm import tqdm from reprobench.core.db import db from reprobench.core.exceptions import ExecutableNotFoundError from reprobench.core.schema import schema try: import msgpack from playhouse.apsw_ext import APSWDatabase except ImportError: pass def find_executable(executable): path = which(executable) if path is None: raise ExecutableNotFoundError return path def import_class(path): module_path, tail = ".".join(path.split(".")[:-1]), path.split(".")[-1] module = importlib.import_module(module_path) return getattr(module, tail) def copyfileobj(fsrc, fdst, callback, length=16 * 1024): while True: buf = fsrc.read(length) if not buf: break fdst.write(buf) callback(len(buf)) def download_file(url, dest): r = requests.get(url, stream=True) with tqdm( total=int(r.headers.get("content-length", 0)), unit="B", unit_scale=True, unit_divisor=1024, ) as progress_bar: progress_bar.set_postfix(file=Path(dest).name, refresh=False) with open(dest, "wb") as f: copyfileobj(r.raw, f, progress_bar.update) ranged_numbers_re = re.compile(r"(?P\d+)\.\.(?P\d+)(\.\.(?P\d+))?") def is_range_str(range_str): return ranged_numbers_re.match(range_str) def str_to_range(range_str): matches = ranged_numbers_re.match(range_str).groupdict() start = int(matches["start"]) end = int(matches["end"]) if matches["step"]: return range(start, end, int(matches["step"])) return range(start, end) def encode_message(obj): return msgpack.packb(obj, use_bin_type=True) def decode_message(msg): return msgpack.unpackb(msg, raw=False) def send_event(socket, event_type, payload=None): """ Used in the worker with a DEALER socket """ socket.send_multipart([event_type, encode_message(payload)]) def recv_event(socket): """ Used in the SUB handler """ event_type, payload, address = socket.recv_multipart() return event_type, decode_message(payload), address def get_db_path(output_dir): return str((Path(output_dir) / f"benchmark.db").resolve()) def init_db(db_path): database = APSWDatabase(db_path) db.initialize(database) def resolve_files_uri(root): protocol = "file://" iterator = None if isinstance(root, dict): iterator = root elif isinstance(root, list) or isinstance(root, tuple): iterator = range(len(root)) for k in iterator: if isinstance(root[k], str) and root[k].startswith(protocol): root[k] = Path(root[k][len(protocol) :]).read_text() elif isinstance(root[k], Iterable) and not isinstance(root[k], str): resolve_files_uri(root[k]) def read_config(config_path, resolve_files=False): with open(config_path, "r") as f: config_text = f.read() config = strictyaml.load(config_text, schema=schema).data if resolve_files: resolve_files_uri(config) return config def extract_zip(path, dest): if not dest.is_dir(): with zipfile.ZipFile(path, "r") as f: f.extractall(dest) def extract_tar(path, dest): if not dest.is_dir(): with tarfile.TarFile.open(path) as f: f.extractall(dest) def extract_archives(path): extract_path = Path(path).with_name(path.stem) if zipfile.is_zipfile(path): extract_zip(path, extract_path) elif tarfile.is_tarfile(path): extract_tar(path, extract_path) PK!HJ.:+reprobench-0.9.2.dist-info/entry_points.txtN+I/N.,()*J-(OJKΰE0r3s2PK!XB33"reprobench-0.9.2.dist-info/LICENSEMIT License Copyright (c) 2019 Rakha Kanz Kautsar Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK!Hu)GTU reprobench-0.9.2.dist-info/WHEEL HM K-*ϳR03rOK-J,/R(O-)$qzd&Y)r$UV&UrPK!Hq.N#reprobench-0.9.2.dist-info/METADATAUMo8W)P4@DGl7nݠm4(K\"CRΪCɱ^k͛82¿X1Ih'bĀ6j %σiLG4 n*)䓇k̔!o`jUBj!㗫wW  猼cw|T2< A2QJlx]#1Ap^0kL.|$4J^@1+FeI)ʌgeVzo_ݟo:iH79ƄndE;'PVr'j`wõL֡msG2`+8fbӟl;UDf5q1BcҢJL;9K*hOTf|vx<|pH)+붬ΠkY2ܣl3L(ZΈv8]n5FV~d6P?yALGMw~w!ו{2c^.*l9t_f`}]dOwʦ.E"?XnvsU:T~5n"Ru_ͭ|}\5K3*!1hbs0ISm#|I}% r˯ Q (RKt>-nvmTle٤ 3U8*tuոn# _ʺ=ϐVCc PK!HU(>!reprobench-0.9.2.dist-info/RECORDI[}3xGQ@aB wyQqKo(~{kK{=|W} 8*b5ځdFK6\LP1=E8.¶4BYXxujb- dUz9JGg!gHjWTnA:a17P]چ8D_]wptOHS9)F饼*gHS }ÜKN^gD^b2߼@XX^Y T1~]>YdZ1d".}k rd-r(rŌL: s~5"EށK4C keW6q0^{F&eMi|jeG7j~3)I#3,1<-";j7븜eR LEh|dڙG@R-8Q6o3'A:13ywM%|FStOp.O^]T3\Ej_hWsE |A=Q|K۷z#`X//_m |M>7**o2gg*>5ƁosdQ/ @p#[JO卐]ٍ<"Wḕz{HbZ/2@FodYGA*&bѲ&Ѩ!!KRDr-_(6e,jݼ^%y9БDW6`>rOהqC)$]TC>n2EW o@fMW'SEQyyTʀ=byKm >y< #<e۫SS{WI-bNΞ2w_DysݯrI vQNqSidI5K5J5 g{pN4 %*i zZ£ vOwh93H YGB!v7gJr7_ IՔ_7=mTt!5ԍ'~b/bO/r v-?uH_(?<4lH> *E΄}Y0 Agb\{lȚĢ7;[+*^D.;$>Kdu35j'V\TMбNKT᜼h)v;x%m뱋%X:C0A(Ftϓv5`q +vHrQL1ޙ9s3̲\D #8$JRR~pN6{x?_D?{?4FxyLAʶ#-!reprobench-0.9.2.dist-info/RECORDPK00Y