PKH]]emva1288/__init__.py from ._version import get_versions __version__ = get_versions()['version'] del get_versions PKHlAlAemva1288/_version.py # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.16 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" keywords = {"refnames": git_refnames, "full": git_full} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "emva1288-" cfg.versionfile_source = "emva1288/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) return None return stdout def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. """ dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%s', but '%s' doesn't start with " "prefix '%s'" % (root, dirname, parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs-tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %s" % root) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree"} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} PK jHᏉemva1288/report/__init__.pyfrom emva1288.report.report import info_setup from emva1288.report.report import info_basic from emva1288.report.report import info_marketing from emva1288.report.report import info_op from emva1288.report.report import Report1288 PK1I %55emva1288/report/report.pyimport jinja2 import os import shutil from distutils.dir_util import copy_tree from collections import OrderedDict import posixpath from matplotlib.figure import Figure from matplotlib.backends.backend_pdf import FigureCanvas import numpy as np from emva1288.process import Results1288 from emva1288.process.plotting import EVMA1288plots def info_setup(**kwargs): """Container for setup information. All kwargs are used to update the setup information dictionary. Returns ------- dict : A dictionary containing setup informations. The keys are: - *'Light source'* : The light source type (e.g. integrating sphere). - *'Light source non uniformity'* : The light source introducing non uniformity. - *'Irradiation calibration accuracy'* : The irradiation calibration incertainty. - *'Irradiation measurement error'* : The irradiation measurement incertainty. - *'Standard version'* : The EMVA1288 standard version number used. """ s = OrderedDict() s['Light source'] = None s['Light source non uniformity'] = None s['Irradiation calibration accuracy'] = None s['Irradiation measurement error'] = None s['Standard version'] = None s.update(kwargs) return s def info_basic(**kwargs): """Container for basic information. All kwargs are used to update the basic information dictionary for the report. Returns ------- dict : A dictionary containing basic informations for the report. The keys are: - *'vendor'* : The vendor name that manufactures the camera. - *'model'* : The model of the tested camera. - *'data_type'* : The label given to the data used for the test. - *'sensor_type'*: The type of the tested sensor within the camera. - *'sensor_diagonal'* : The number of pixel in the sensor diagonal. - *'lens_category'* : The lens category used for the test. - *'resolution'* : The camera's resolution. - *'pixel_size'* : The sensor's pixel size. - *'readout_type'* : The readout type of the sensor (for CCD sensors). - *'transfer_type'* : The transfer type of the sensor (for CCDs). - *'shutter_type'* : The shutter type of the sensor (for CMOS sensors). - *'overlap_capabilities'* : The overlap capabilities of the sensor (for CMOS sensors). - *'maximum_readout_rate'* : The camera's maximal readout rate. - *'dark_current_compensation'* : If the camera support dark current compensation, specify it in this entry. - *'interface_type'* : The camera's interface type. - *'qe_plot'* : The sensor's quantum efficency plots. """ b = {'vendor': None, 'model': None, 'data_type': None, 'sensor_type': None, 'sensor_diagonal': None, 'lens_category': None, 'resolution': None, 'pixel_size': None, ######### # For CCD 'readout_type': None, 'transfer_type': None, # For CMOS 'shutter_type': None, 'overlap_capabilities': None, ######### 'maximum_readout_rate': None, 'dark_current_compensation': None, 'interface_type': None, 'qe_plot': None } b.update(kwargs) return b def info_marketing(**kwargs): """Container for marketing informations. All kwargs are used to update the returned dictionary containing the marketing informations. Returns ------- dict : A dictionary containing the marketing informations. The keys are: - *'logo'* : The path to the logo icon. - *'watermark'* : A text that will be printed on every page of the report in the background in transparent red. - *'missingplot'* : The path to a missing plot icon. - *'cover_page'* : The path to a custom cover page for the report. """ m = {'logo': None, 'watermark': None, 'missingplot': None, 'cover_page': None } m.update(kwargs) return m def info_op(): """Container for operation points informations. The returned dictionary must be filled after calling this function. Returns ------- dict : An empty dictionary with the following keys: - *'name'* : The test name. - *'id'* : The test id. - *'summary_only'* : True or False, tells if, for a specific OP, the report should do a summary of the test instead of a full description. - *'results'* : The results of the test. - *'camera_settings'* : (OrderedDict) the dictionary of the camera's settings for the test. - *'test_parameters'* : (OrderedDict) the dictionary of the other test parameters. """ d = {'name': None, 'id': None, 'summary_only': None, 'results': None, 'camera_settings': OrderedDict(), 'test_parameters': OrderedDict()} return d _CURRDIR = os.path.abspath(os.path.dirname(__file__)) class Report1288(object): """Class that has the purpose of creating a pdf report of one or more optical tests. This class only creates the report TeX files using the templates. TeX files must be compiled afterwards to generate the pdf files. """ def __init__(self, outdir, setup=None, basic=None, marketing=None, cover_page=False): """Report generator init method. Informations stored in the report can be specified by the kwargs passed to the object. It can be useful to use the :func:`info_marketing`, :func:`info_setup` and :func:`info_basic` functions to generate the corresponding dictionaries. The report generator uses `jinja2` to render the templates. Upon init, it calls the :meth:`template_renderer` method to get the `jinja2` object that will interact with the templates. Then it creates the output directories and files that will contain the output files. To create the report for different operating point/tests, one must call the :meth:`add` method to add each test he wants to publish in the report. Then, to conclude the report, he must call the :meth:`latex` method to generate the TeX files. Parameters ---------- outdir : str The path to the directory that will contain the report files. setup : dict, optional A dictionary containing the setup informations. If None, the report uses the dictionary of the :func:`info_setup` function. basic : dict, optional A dictionary containing basic informations about the test. If None, the report generator takes the dictionary from the :func:`info_basic` function. marketing : dict, optional A dictionary containing cover_page : str, optional The path to the cover page for the report. If False, no cover page will be included in the report. """ self._outdir = os.path.abspath(outdir) self.renderer = self.template_renderer() self.ops = [] self.marketing = marketing or info_marketing() self.basic = basic or info_basic() self.setup = setup or info_setup() self.cover_page = cover_page self._make_dirs(outdir) @staticmethod def template_renderer(dirname=None): """Method that creates the renderer for the TeX report file. Uses the :class:`jinja2:jinja2.Environment` object to create the renderer. Also defines some filters for the environment for the missing numbers and general missings. Parameters ---------- dirname : str, optional The path to the template directory containing the TeX templates. If None, it will get the templates from the `./templates/` directory. Returns ------- The renderer. """ if not dirname: dirname = os.path.join(_CURRDIR, 'templates') renderer = jinja2.Environment( block_start_string='%{', block_end_string='%}', variable_start_string='%{{', variable_end_string='%}}', comment_start_string='%{#', comment_end_string='%#}', loader=jinja2.FileSystemLoader(dirname)) def missingnumber(value, precision): # Filter for missing numbers if value in (None, np.nan): return '-' t = '{:.%df}' % precision return t.format(value) def missingfilter(value, default='-'): # General filter for missing objects that are not numbers if value in (None, np.nan): return default return value renderer.filters['missing'] = missingfilter renderer.filters['missingnumber'] = missingnumber return renderer def _make_dirs(self, outdir): """Create the directory structure for the report If the directory exist, raise an error """ try: os.makedirs(self._outdir) except FileExistsError: # pragma: no cover pass print('Output Dir: ', self._outdir) files_dir = os.path.join(self._outdir, 'files') try: os.makedirs(files_dir) except FileExistsError: # pragma: no cover pass currfiles = os.path.join(_CURRDIR, 'files') copy_tree(currfiles, files_dir) upload_dir = os.path.join(self._outdir, 'upload') try: os.makedirs(upload_dir) except FileExistsError: # pragma: no cover pass def uploaded_file(fname, default): if fname: # pragma: no cover shutil.copy(os.path.abspath(fname), upload_dir) v = posixpath.join( 'upload', os.path.basename(fname)) else: v = posixpath.join('files', default) return v self.marketing['logo'] = uploaded_file(self.marketing['logo'], 'missinglogo.pdf') self.marketing['missingplot'] = uploaded_file( self.marketing['missingplot'], 'missingplot.pdf') self.basic['qe_plot'] = uploaded_file(self.basic['qe_plot'], 'missingplot.pdf') def _write_file(self, name, content): # write content into a file fname = os.path.join(self._outdir, name) with open(fname, 'w') as f: f.write(content) return fname def _stylesheet(self): # generate the stylesheet content stylesheet = self.renderer.get_template('emvadatasheet.sty') return stylesheet.render(marketing=self.marketing, basic=self.basic) def _report(self): # Generate the report contents report = self.renderer.get_template('report.tex') return report.render(marketing=self.marketing, basic=self.basic, setup=self.setup, operation_points=self.ops, cover_page=self.cover_page) def latex(self): """Generate report latex files. """ self._write_file('emvadatasheet.sty', self._stylesheet()) self._write_file('report.tex', self._report()) def _results(self, data): return Results1288(data) def _plots(self, results, id_): """Create the plots for the report. The report will include all the plots contained in the `~emva1288.process.plotting.EVMA1288plots` list. All plots will be saved in pdf format in the output directory. """ names = {} savedir = os.path.join(self._outdir, id_) try: os.mkdir(savedir) except FileExistsError: # pragma: no cover pass for plt_cls in EVMA1288plots: figure = Figure() _canvas = FigureCanvas(figure) plot = plt_cls(figure) plot.plot(results) plot.rearrange() fname = plt_cls.__name__ + '.pdf' figure.savefig(os.path.join(savedir, fname)) names[plt_cls.__name__] = posixpath.join(id_, fname) return names def add(self, op, data, results=None): """Method that adds an operation point to the report. The data supplied are passed through a :class:`~emva1288.process.results.Results1288` object to be processed for the report. Also creates the plots which will appears in the report. Parameters ---------- op : dict The dictionary containing the operation point informations. This dictionary must absolutely contain a 'name' key. See the :func:`info_op` function to get an idea to what keys to give. data : dict The corresponding operation point data. It must be able to be processed by an instance of the :class:`~emva1288.process.results.Results1288` class. """ n = len(self.ops) + 1 op['id'] = 'OP%d' % (n) if not op['name']: op['name'] = op['id'] if not results: results = self._results(data) op['results'] = results.results_by_section results.id = n op['plots'] = self._plots(results, op['id']) self.ops.append(op) PK2G%::%emva1288/report/templates/report.tex~\documentclass[a4paper,twoside,12pt,american,hidelinks]{article} \usepackage[T1]{fontenc} \usepackage[cp1257]{inputenc} \usepackage{lmodern} \usepackage{graphicx} \usepackage{fancyhdr} \usepackage{xcolor} \usepackage{emvadatasheet} \usepackage{lastpage} \usepackage[printwatermark]{xwatermark} \usepackage{tikz} \usepackage{multicol} \title{} \author{} \date{} \begin{document} \newcommand{\TheReportSection}{} \newcommand{\ReportSection}[1]{\renewcommand{\TheReportSection}{#1}} %{ if marketing.watermark %} \newsavebox\wtmkbox \savebox\wtmkbox{\tikz[color=red,opacity=0.3]\node{%{{marketing.watermark%}}};} \newwatermark*[ allpages, angle=45.0, scale=8.0, xpos=-20, ypos=15 ]{\usebox\wtmkbox} %{ endif %} %Cover preamble \ReportSection{EMVA\,1288 Summary Sheet} \pagestyle{fancy} \scriptsize %Cover header \textbf{EMVA\,1288 Summary Sheet} \vspace*{5mm} \begin{minipage}[t]{0.975\linewidth} \vspace*{2mm} This datasheet describes the specification according to the standard 1288 Standard for Characterization and Presentation of Specification Data for Image Sensors and Cameras of European Machine Vision Association (EMVA) (See www.standard1288.org). \vspace*{1mm} \vspace*{1mm} \end{minipage} \vspace*{5mm} %{ include 'general_info.tex' %} %{ for op in operation_points %} %{ include 'op.tex' %} %{ endfor %} \end{document} PK2G,!emva1288/report/templates/op.tex~\newpage \ReportSection{Operation Point: \textbf%{{ op.name %}}} \underline{\textsl{Operation Point: \textbf{%{{ op.name %}}}\label{%{{ op.id %}}}}} %{ include 'op_header.tex' %} \\[10mm] %Plots \begin{minipage}[t]{0.690\linewidth} \includegraphics[width=0.95\linewidth,keepaspectratio]{%{{op.plots.PlotPTC %}}} \includegraphics[width=0.95\linewidth,keepaspectratio]{%{{op.plots.PlotSNR %}}} \end{minipage} %Results \begin{minipage}[t]{0.290\linewidth} \begin{tabular}{lrl} \multicolumn{3}{c}{\textbf{Results}} \\[2mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.QE.short %}}$^*$}} \\ %{{ op.results.sensitivity.QE.symbol %}} & %{{ '%.2f' % op.results.sensitivity.QE.value %}} & %{{ op.results.sensitivity.QE.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.K.short %}}}} \\ %{{ op.results.sensitivity.K.symbol %}} & %{{ '%.3f' % op.results.sensitivity.K.value %}} & %{{ op.results.sensitivity.K.unit %}} \\ %{{ op.results.sensitivity.inverse_K.symbol %}} & %{{ '%.3f' % op.results.sensitivity.inverse_K.value %}} & %{{ op.results.sensitivity.inverse_K.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Temporal dark noise}} \\ %{{ op.results.sensitivity.sigma_y_dark.symbol %}} & %{{ '%.3f' % op.results.sensitivity.sigma_y_dark.value %}} & %{{ op.results.sensitivity.sigma_y_dark.unit %}} \\ %{{ op.results.sensitivity.sigma_d.symbol %}} & %{{ '%.3f' % op.results.sensitivity.sigma_d.value %}} & %{{ op.results.sensitivity.sigma_d.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.SNR_max.short %}}}} \\ %{{ op.results.sensitivity.SNR_max.symbol %}} & %{{ '%.0f' % op.results.sensitivity.SNR_max.value %}} \\ %{{ op.results.sensitivity.SNR_max_dB.symbol %}} & %{{ '%.2f' % op.results.sensitivity.SNR_max_dB.value %}} & %{{ op.results.sensitivity.SNR_max_dB.unit %}} \\ %{{ op.results.sensitivity.SNR_max_bit.symbol %}} & %{{ '%.1f' % op.results.sensitivity.SNR_max_bit.value %}} & %{{ op.results.sensitivity.SNR_max_bit.unit %}} \\ %{{ op.results.sensitivity.inverse_SNR_max.symbol %}} & %{{ '%.3f' % op.results.sensitivity.inverse_SNR_max.value %}} & %{{ op.results.sensitivity.inverse_SNR_max.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.u_p_min.short %}}}} \\ %{{ op.results.sensitivity.u_p_min.symbol %}}$^*$ & %{{ op.results.sensitivity.u_p_min.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_p_min.unit %}} \\ %{{ op.results.sensitivity.u_p_min_area.symbol %}}$^*$ & %{{ op.results.sensitivity.u_p_min_area.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_p_min_area.unit %}} \\ %{{ op.results.sensitivity.u_e_min.symbol %}} & %{{ '%.3f' % op.results.sensitivity.u_e_min.value %}} & %{{ op.results.sensitivity.u_e_min.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.u_p_sat.short %}}}} \\ %{{ op.results.sensitivity.u_p_sat.symbol %}}$^*$ & %{{ '%.0f' % op.results.sensitivity.u_p_sat.value %}} & %{{ op.results.sensitivity.u_p_sat.unit %}} \\ %{{ op.results.sensitivity.u_p_sat_area.symbol %}}$^*$ & %{{ op.results.sensitivity.u_p_sat_area.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_p_sat_area.unit %}} \\ %{{ op.results.sensitivity.u_e_sat.symbol %}} & %{{ '%.0f' % op.results.sensitivity.u_e_sat.value %}} & %{{ op.results.sensitivity.u_e_sat.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.DR.short %}}}} \\ %{{ op.results.sensitivity.DR.symbol %}} & %{{ '%.0f' % op.results.sensitivity.DR.value %}} \\ %{{ op.results.sensitivity.DR_dB.symbol %}} & %{{ '%.1f' % op.results.sensitivity.DR_dB.value %}} & %{{ op.results.sensitivity.DR_dB.unit %}} \\ %{{ op.results.sensitivity.DR_bit.symbol %}} & %{{ '%.1f' % op.results.sensitivity.DR_bit.value %}} & %{{ op.results.sensitivity.DR_bit.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Spatial Nonuniformities}} \\ %{{ op.results.spatial.DSNU1288.symbol %}} & %{{ '%.1f' % op.results.spatial.DSNU1288.value %}} & %{{ op.results.spatial.DSNU1288.unit %}} \\ %{{ op.results.spatial.DSNU1288_DN.symbol %}} & %{{ '%.1f' % op.results.spatial.DSNU1288_DN.value %}} & %{{ op.results.spatial.DSNU1288_DN.unit %}} \\ %{{ op.results.spatial.PRNU1288.symbol %}} & %{{ '%.1f' % op.results.spatial.PRNU1288.value %}} & %{{ op.results.spatial.PRNU1288.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Linearity error}} \\ %{{ op.results.linearity.LE_min.symbol %}} & %{{ '%.3f' % op.results.linearity.LE_min.value %}} & %{{ op.results.linearity.LE_min.unit %}} \\ %{{ op.results.linearity.LE_max.symbol %}} & %{{ '%.3f' % op.results.linearity.LE_max.value %}} & %{{ op.results.linearity.LE_max.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{$^*$Result depends on wavelenght}} \\ \end{tabular} \end{minipage} %{ if not op.summary_only %} %Extra plots \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotSensitivity %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotUyDark %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotLinearity %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotDeviationLinearity %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotHorizontalSpectogramPRNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotHorizontalSpectrogramDSNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotVerticalSpectrogramPRNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotVerticalSpectrogramDSNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotLogarithmicHistogramDSNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotLogarithmicHistogramPRNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotAccumulatedLogHistogramDSNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotAccumulatedLogHistogramPRNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotHorizontalProfile %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotVerticalProfile %}}} \end{center} \vfill %{ endif %} PKۮH=:\\$emva1288/report/templates/report.tex\documentclass[a4paper,twoside,12pt,american,hidelinks]{article} \usepackage[T1]{fontenc} \usepackage[cp1257]{inputenc} \usepackage{lmodern} \usepackage{graphicx} \usepackage{fancyhdr} \usepackage{xcolor} \usepackage{emvadatasheet} \usepackage{lastpage} \usepackage[printwatermark]{xwatermark} \usepackage{tikz} \usepackage{multicol} \title{} \author{} \date{} \begin{document} \newcommand{\TheReportSection}{} \newcommand{\ReportSection}[1]{\renewcommand{\TheReportSection}{#1}} %{ if marketing.watermark %} \newsavebox\wtmkbox \savebox\wtmkbox{\tikz[color=red,opacity=0.3]\node{%{{marketing.watermark%}}};} \newwatermark*[ allpages, angle=45.0, scale=8.0, xpos=-20, ypos=15 ]{\usebox\wtmkbox} %{ endif %} %First marketing cover page %{ if cover_page %} \input{%{{cover_page%}}} %{ endif %} %Cover preamble \ReportSection{EMVA\,1288 Datasheet} \pagestyle{fancy} \scriptsize %Cover header \textbf{EMVA\,1288 Datasheet} \vspace*{5mm} \begin{minipage}[t]{0.975\linewidth} This datasheet describes the specification according to the standard 1288 Standard for Characterization and Presentation of Specification Data for Image Sensors and Cameras of European Machine Vision Association (EMVA) (See www.standard1288.org). \end{minipage} \vspace*{5mm} %{ include 'general_info.tex' %} %{ for op in operation_points %} %{ include 'op.tex' %} %{ endfor %} \end{document} PKHQ'emva1288/report/templates/op_header.tex\begin{minipage}[t]{0.975\linewidth} \begin{multicols}{2} \textbf{Camera setting} \hfill \\[1mm] %{ for key, value in op.camera_settings.items() -%} \textsl{%{{ key %}}} \hfill %{{ value %}} \\[1mm] %{ endfor -%} \textbf{Operation point parameters} \\[1mm] %{ for key, value in op.test_parameters.items() -%} \textsl{%{{ key %}}} \hfill %{{ value %}} \\[1mm] %{ endfor -%} \end{multicols} \end{minipage} PK2Gea+emva1288/report/templates/general_info.tex~\begin{minipage}[t]{0.975\linewidth} \begin{multicols}{2} \textsl{Vendor} \hfill %{{ basic.vendor|missing %}} \\[1mm] \textsl{Model} \hfill %{{ basic.model|missing %}} \\[1mm] \textsl{Data type} \hfill %{{ basic.data_type|missing %}} \\[1mm] \textsl{Sensor type} \hfill %{{ basic.sensor_type|missing %}} \\[1mm] \textsl{Diagonal} \hfill %{{ basic.sensor_diagonal|missing %}} \\[1mm] \textsl{Lens category} \hfill %{{ basic.lens_category|missing %}} \\[1mm] \textsl{Resolution} \hfill %{{ basic.resolution|missing %}} pixels \\[1mm] \textsl{Pixel size} \hfill %{{ basic.pixel_size|missing %}} $\mu m$ \\[1mm] %{- if basic.sensor_type == "CCD" -%} \textsl{Readout type} \hfill %{{ basic.readout_type|missing %}} \\[1mm] \textsl{Transfer type} \hfill %{{ basic.transfer_type|missing %}} \\[1mm] %{- elif basic.sensor_type == "CMOS" -%} \textsl{Shutter type} \hfill %{{ basic.shutter_type|missing %}} \\[1mm] \textsl{Overlap capabilities} \hfill %{{ basic.overlap_capabilities|missing %}} \\[1mm] %{- endif -%} \textsl{Maximum readout rate} \hfill %{{ basic.maximum_readout_rate|missing %}} \\[1mm] \textsl{Dark current compensation} \hfill %{{ basic.dark_current_compensation|missing %}} \\[1mm] \textsl{Interface type} \hfill %{{ basic.interface_type|missing %}} \\[1mm] %%%%%%%%%% SETUP \textsl{Light source} \hfill %{{ setup.light_source|missing %}} \\[1mm] \textsl{Standard version} \hfill %{{ setup.standard_version|missing %}} \\[1mm] \end{multicols} \end{minipage} \\[5mm] %{ for op in operation_points %} \underline{\textsl{Operation Point \textbf{%{{ op.name %}}} (Page \pageref{%{{ op.id %}}})}} %{ include 'op_header.tex' %} \\[5mm] %{ endfor %} %\\* \vfill \begin{center} \includegraphics[height=100mm,keepaspectratio]{%{{ basic.qe_plot|missing(marketing.missingplot) %}}} \end{center} PKH, Version 1.0.} \makeatletter %% PAGE LAYOUT \setlength{\textheight}{25cm} \setlength{\textwidth}{18.4cm} %Textbreite \setlength{\oddsidemargin}{1cm} %Rechter Rand der rechten Seite \setlength{\evensidemargin}{1cm} %Rechter Rand der linken Seite \setlength{\marginparwidth}{0cm} %Legt die Breite des Randnotizen-Bereichs fest. \setlength{\topmargin}{0cm} \setlength{\parindent}{0pt} %Einzug bei Anfaengen von Absaetzen \setlength\headheight{15mm} %Hoehe der Kopfzeile \voffset-19mm %Vertikaler Versatz \hoffset-23mm %Horizontaler Versatz % Header \fancyhead{}% \fancyhead[LO,LE]{\includegraphics[height=12mm]{files/EMVA1288Logo.pdf}} \fancyhead[C]{\textbf{%{{ marketing.vendor %}} \, %{{ basic.model|missing %}}}} \fancyhead[RE,RO]{\includegraphics[height=12mm]{%{{ marketing.logo %}}}} \renewcommand{\headrulewidth}{0.5pt} % Footer \fancyfoot{}% \fancyfoot[LO,RE]{\scriptsize\sffamily \TheReportSection} \fancyfoot[LE,RO]{\scriptsize\sffamily\textup\thepage\ of \pageref{LastPage}} \renewcommand{\footrulewidth}{0.4pt} \setlength{\columnsep}{1cm} % Spaltenabstand \def\ind#1{\ensuremath{_{\mbox{\scriptsize #1}}}} \makeatother PK2Gޕ(emva1288/report/templates/op_header.tex~\begin{minipage}[t]{0.975\linewidth} \begin{multicols}{2} \textbf{Camera setting} \hfill \\[1mm] \textsl{Bit depth} \hfill %{{ op.bit_depth|missing %}} \\[1mm] \textsl{Gain} \hfill %{{ op.gain|missing %}} \\[1mm] \textsl{Exposure time} \hfill %{{ op.exposure_time|missing %}} \\[1mm] \textsl{Black level} \hfill %{{ op.black_level|missing %}} \\[1mm] \textsl{FPN correction} \hfill %{{ op.fpn_correction|missing %}} \\[1mm] % \textbf{External conditions} \\[1mm] \textsl{Illumination wavelength} \hfill %{{ op.wavelength|missing %}} $nm$ \\[1mm] \textsl{Environmental temperature} \hfill %{{ op.temperature|missing %}} \\[1mm] \textsl{Housing temperature} \hfill %{{ op.housing_temperature|missing %}} \\[1mm] \end{multicols} \end{minipage} PKH/CC emva1288/report/templates/op.tex\newpage \ReportSection{Operation Point: \textbf{%{{ op.name %}}} (@%{{ op.test_parameters.Wavelength|missing %}})} \underline{\textsl{Summary sheet for Operation Point: \textbf{%{{ op.name %}}}\label{%{{ op.id %}}}} (@%{{ op.test_parameters.Wavelength|missing %}} wavelength)} %{ include 'op_header.tex' %} \\[10mm] %Plots \begin{minipage}[t]{0.690\linewidth} \includegraphics[width=0.95\linewidth,keepaspectratio]{%{{op.plots.PlotPTC %}}} \includegraphics[width=0.95\linewidth,keepaspectratio]{%{{op.plots.PlotSNR %}}} \end{minipage} %Results \begin{minipage}[t]{0.290\linewidth} \begin{tabular}{lr@{\hspace{1.4mm}}l} \multicolumn{3}{c}{\textbf{Performance}} \\[2mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.QE.short %}} }} \\ %{{ op.results.sensitivity.QE.symbol %}} & %{{ '%.2f' % op.results.sensitivity.QE.value %}} & %{{ op.results.sensitivity.QE.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.K.short %}}}} \\ %{{ op.results.sensitivity.K.symbol %}} & %{{ '%.3f' % op.results.sensitivity.K.value %}} & %{{ op.results.sensitivity.K.unit %}} \\ %{{ op.results.sensitivity.inverse_K.symbol %}} & %{{ '%.3f' % op.results.sensitivity.inverse_K.value %}} & %{{ op.results.sensitivity.inverse_K.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Temporal dark noise}} \\ %{{ op.results.sensitivity.sigma_d.symbol %}} & %{{ '%.3f' % op.results.sensitivity.sigma_d.value %}} & %{{ op.results.sensitivity.sigma_d.unit %}} \\ %{{ op.results.sensitivity.sigma_y_dark.symbol %}} & %{{ '%.3f' % op.results.sensitivity.sigma_y_dark.value %}} & %{{ op.results.sensitivity.sigma_y_dark.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.SNR_max.short %}}}} \\ %{{ op.results.sensitivity.SNR_max.symbol %}} & %{{ '%.0f' % op.results.sensitivity.SNR_max.value %}} \\ & % %{{ op.results.sensitivity.SNR_max_dB.symbol %}} & %{{ '%.2f' % op.results.sensitivity.SNR_max_dB.value %}} & %{{ op.results.sensitivity.SNR_max_dB.unit %}} \\ & % %{{ op.results.sensitivity.SNR_max_bit.symbol %}} & %{{ '%.1f' % op.results.sensitivity.SNR_max_bit.value %}} & %{{ op.results.sensitivity.SNR_max_bit.unit %}} \\ %{{ op.results.sensitivity.inverse_SNR_max.symbol %}} & %{{ '%.3f' % op.results.sensitivity.inverse_SNR_max.value %}} & %{{ op.results.sensitivity.inverse_SNR_max.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.u_p_min.short %}}}} \\ %{{ op.results.sensitivity.u_p_min.symbol %}} & %{{ op.results.sensitivity.u_p_min.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_p_min.unit %}} \\ %{{ op.results.sensitivity.u_p_min_area.symbol %}} & %{{ op.results.sensitivity.u_p_min_area.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_p_min_area.unit %}} \\ %{{ op.results.sensitivity.u_e_min.symbol %}} & %{{ '%.3f' % op.results.sensitivity.u_e_min.value %}} & %{{ op.results.sensitivity.u_e_min.unit %}} \\ %{{ op.results.sensitivity.u_e_min_area.symbol %}} & %{{ op.results.sensitivity.u_e_min_area.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_e_min_area.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.u_p_sat.short %}}}} \\ %{{ op.results.sensitivity.u_p_sat.symbol %}} & %{{ '%.0f' % op.results.sensitivity.u_p_sat.value %}} & %{{ op.results.sensitivity.u_p_sat.unit %}} \\ %{{ op.results.sensitivity.u_p_sat_area.symbol %}} & %{{ op.results.sensitivity.u_p_sat_area.value|missingnumber(3) %}} & %{{ op.results.sensitivity.u_p_sat_area.unit %}} \\ %{{ op.results.sensitivity.u_e_sat.symbol %}} & %{{ '%.0f' % op.results.sensitivity.u_e_sat.value %}} & %{{ op.results.sensitivity.u_e_sat.unit %}} \\ %{{ op.results.sensitivity.u_e_sat_area.symbol %}} & %{{ op.results.sensitivity.u_e_sat_area.value|missingnumber(0) %}} & %{{ op.results.sensitivity.u_e_sat_area.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{%{{ op.results.sensitivity.DR.short %}}}} \\ %{{ op.results.sensitivity.DR.symbol %}} & %{{ '%.0f' % op.results.sensitivity.DR.value %}} \\ & % %{{ op.results.sensitivity.DR_dB.symbol %}} & %{{ '%.1f' % op.results.sensitivity.DR_dB.value %}} & %{{ op.results.sensitivity.DR_dB.unit %}} \\ & % %{{ op.results.sensitivity.DR_bit.symbol %}} & %{{ '%.1f' % op.results.sensitivity.DR_bit.value %}} & %{{ op.results.sensitivity.DR_bit.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Spatial Nonuniformities}} \\ %{{ op.results.spatial.DSNU1288.symbol %}} & %{{ '%.1f' % op.results.spatial.DSNU1288.value %}} & %{{ op.results.spatial.DSNU1288.unit %}} \\ & % %{{ op.results.spatial.DSNU1288_DN.symbol %}} & %{{ '%.1f' % op.results.spatial.DSNU1288_DN.value %}} & %{{ op.results.spatial.DSNU1288_DN.unit %}} \\ %{{ op.results.spatial.PRNU1288.symbol %}} & %{{ '%.1f' % op.results.spatial.PRNU1288.value %}} & %{{ op.results.spatial.PRNU1288.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Linearity error}} \\ %{{ op.results.linearity.LE_min.symbol %}} & %{{ '%.3f' % op.results.linearity.LE_min.value %}} & %{{ op.results.linearity.LE_min.unit %}} \\ %{{ op.results.linearity.LE_max.symbol %}} & %{{ '%.3f' % op.results.linearity.LE_max.value %}} & %{{ op.results.linearity.LE_max.unit %}} \\[5mm] \multicolumn{3}{l}{\textbf{Dark current}} \\ %{{ op.results.dark_current.u_I_mean.symbol %}} & %{{ op.results.dark_current.u_I_mean.value|missing %}} & %{{ op.results.dark_current.u_I_mean.unit %}} \\ %{{ op.results.dark_current.u_I_var.symbol %}} & %{{ op.results.dark_current.u_I_var.value|missing %}} & %{{ op.results.dark_current.u_I_var.unit %}} \\[5mm] \end{tabular} \end{minipage} %{ if not op.summary_only %} %Extra plots \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotSensitivity %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotUyDark %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotLinearity %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotDeviationLinearity %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotHorizontalSpectrogramPRNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotHorizontalSpectrogramDSNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotVerticalSpectrogramPRNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotVerticalSpectrogramDSNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotLogarithmicHistogramDSNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotLogarithmicHistogramPRNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotAccumulatedLogHistogramDSNU %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotAccumulatedLogHistogramPRNU %}}} \end{center} \vfill \newpage \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotHorizontalProfile %}}} \end{center} \begin{center} \includegraphics[height=0.45\textheight,keepaspectratio]{%{{ op.plots.PlotVerticalProfile %}}} \end{center} \vfill %{ endif %} PK2GU5oo%emva1288/report/files/missingplot.pdf%PDF-1.4 % 1 0 obj << /Type /Catalog /Pages 2 0 R >> endobj 8 0 obj << /XObject 7 0 R /Pattern 5 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /ExtGState 4 0 R /Shading 6 0 R /Font 3 0 R >> endobj 10 0 obj << /Group << /CS /DeviceRGB /S /Transparency /Type /Group >> /Parent 2 0 R /MediaBox [ 0 0 576 432 ] /Resources 8 0 R /Type /Page /Contents 9 0 R >> endobj 9 0 obj << /Filter /FlateDecode /Length 11 0 R >> stream xLK$9~ @q;9 '?B͝*<>WwZluuzsxg h#Vk }_{= X Ϸw;O{+YM1v;#tvYsoFI?j3Ḽ2k,8~]t Fv|Ys6Sշ)->ٴws3Vs9רS#]#o?Kmf[|VV2,-5xRVG#gzcVZ{U{:NxWZW.ksvyϾXe{M}k//+?~>”Wqhz,6yx:3K^ዷsbX2#η&f&\(1]c= ˮq ujXX@xvn,|j2N=}]jhqZL+D őVL2:o8Vyc;Z,yN \4_8w'Yg?fg6ɜ[v\.x W*_\){o;x+`A9K{kոoM/,CC(눯#1u奭[+ghzeP|Ww2?q]:DƯ2a+Sx}ؘʐ,"a+N&1jqW2D@h1-SU z#`WnW;vlMMk-ZȈXC>qVI݈ů;X~%dq%0#8Ot[=.kQm߉CB3#Y3nF/S'te]Wy*Ly&V`Nl$`\Cq5SĴ_plq t$FvJId|驪$C ߵ &!zk }1@_A1(ю4T%+p3}YM牃aLTՐ]8qXvY#Voo/hb8;0k/]h3v,m԰^m ؒtU8XXg:jU@qś+$x.#UX;rV9B[, ^K. q誖71~ħK&]m峾!TWnk?c=cqp%dF`3coѶ 7p|l7,m/%f© cUlKiNؖS1!keN3^>E[0 WxףYųYCKG@І}%5~DzJPB29*p-$j=g$ֶD8!M.L؊{e^Z#gO~_tJוcq$5YoHݾ! j{3~W `qb +]2SS@]ǟ{Px"JX b! BW\]X17ڍH՝8C d9bGCgy6QB`3 1ئ';SRGZ B OӼoq/ Z|QHR_#\0ǖUH.Vqbdݔ'wˤl,e[h@qK@_3(8':t @4C|h.d64"x N07A9a2Fѱ1j3>ǭ 3l HsњlLDw5.8NmC_Is]Cŗ g#ba%+3m44:kmF`WW=vƶ;bKxG" 9R-F!!_}0-^bK8qU:Bܓ|P-SK3B^L_90rAĵhw| lpr\j71WtOx^lsXM!} 9V{н Eu\Xʱ]KFBpo"s1@TV{ϡ_z{,^x4rI7@ 1Tx`{zciq<g?činx?musl܌,i-; oZMJ3<օ _' DOǐ<2az;6Įa4U ^3}TP;]ۖ (?sHARqʡ={6e 5-\똚qUBeLh B8@9lP-\{ѕ^cQ(8hGG pmVd&@Y26(`cay5 ku.+w~ox|&f3_0 2֣#lP1JLC< h'TwΠfB?{@wFodXc k'0?6q{,e%q"muAr>=<+}!)qX)fbaPHM`7xM1d&7ǚtC*Nnw18V; 9.lsh|cW G(OW6(GRW/khxkx^6Z:Nà{+ !0(;LX!Cc$ԓKgWYh_>6Ad 58J 3Aٺ i*'S1(cQѥ 2 KTNkP`qieFZ<4~Ԙ ]ϣ~BDȑLl?LA}]sXZam+=_ȍJDU~8ÿg\v 6/a\MhWQLd.v|0"2 hi/r )mLq ? v0<3d1dj Ցh#Df8^J`d YŘ gzPY5q ]@6Bx5Wﳭ"؇Ϙ\Gwh [Һ ɗ+D5M"MΫ`,@M(Âm#,Z(!rt4)2C!ic>|C̍ Q!DnF}x +@] mSFCr,.m Gu]r!raSv #D!oMCvV̢Hc. Y }H#j\4Dd4Wo$"ԪaDsj#21rFrSw9V?qyDsɴ~_ x#4-rΏd+# ݔt 18;n+B|2U:3!g$g mf$"ǫנ!D\ʗL!r#R5ȱ )Dn(bB䊒Ӑ\VE"N"Kzx(VbU1Sۊy+9 \.a:!}2 [xJrbv7RGb{l_3QhXvٸm46ø܈;$ܛÞn8evXyӟ&4Cѩb*H0?'vDbƆ "a4OQDΔW+b: ˺|Ґ_!4p}Ri?8%63%\Ȧ=@􆀑;/J;2 J+>3өZRZg єCQo 1>rˊ;rHLLʞ8jRrKXᶃoLr\ ;(b 24=m^o&`Ǎ]ZRC#[q7,顉(&Nr7.eƐ-q$$>qJεgH3YvzV~d%W!y͡^CLq?'vT%߅B\u]zg.h<뱠8*#Q8bMz,5c W}}N&:Ndԉ7?*`;B iTI4kru ٘3c(LLPȰTB %2 q20i0&kwaw7Zws!6vSSGCGD`##pKeq""ȟNYB} )xhȶ@"dͮ4a?/zQ&!pfn8!>] 8ƠO `Q&?i!BF?uVA߁rh\=O!hEE|)cPQ ZkCu&a}jZ[(. "L5iqYNzxr4 4RO;-~Aq(| "w&t0( VAݧ 9+L. 48瓐VBq y= P Ǧ e,NA5K0J)c4핁Xꅜ 'Z*2OGBµ+D\ @>!0d&?HWG RM=h#I}*B\>8M)lj,!OIx7_i - ؠi aPFJI}ZB"bFڧ%U@;џi ec2Lۧ%$ IHA+99%!au yx>ĝJONݟL,֮%( }t§'O AUa+aDOW۩@{8$-}z Z?B*жvA'Nr ͈cE`.ă{C韶Q6 ^XXKy)dػ$ n<(C[韶BT@j<#~ %R/f*%A_^D Dϡ@; kَ[S:^zTqzJ/eLGI?]nKMwpWLpye/Ph}( {OBZ'3T7#;;H I ?Uac޿puj*+:d`//dp&(6BOT8RFgWjt,g4lNTNNQDQ(Pb ~羡X %a)=:vW~.s2EdL_HJF9~C6~ %+ Dž eI Vtxb3?S\%744 %J\8Ε)} '*dCPOSOSdt) !%i.;9t 0OOZ$:y`ΉCCG!'l9JEIM%M_X\!#&y X_?=?yCN8N< + X{(]UJ(Q u>>IjKP߫O~ZߐjO< d`2I BBh&Ǧ. A?=X"CIՉqV}#V`jGn+9L| [{ۉOWOUx\yvCrPk'.(s~ N7(«ԄS.S}|eT 2d$ l \O?Qǹ~k1u@te]'V%pcyWrrS*%ug q(%`U (VOf, ]ESG(, ~(DX6p|ZTdDt3Xp`\Rl'/>2~U9A: ZY@4q,^}?N,x} )9(%I9﨟_V\'П_;<4Χ)?;E)VXo|ZB1ATϣNjhU)9suUvB5KSP(oNVOM*xIGa#y si0g?G_`(k\?!r?'V\?hruAQ' llL RY2a)$d_ɢ)^l+qfc\> fԝJ>!ɩ~ӛ \r=UG.ZB ]` ߃QI)A/(G5 ?X+ʩ?7mJ"h۩W?2 `tvSc){sqP2ek꓅ǽ",mt9BțuA(𔼢BRJ9¸ґ;N~2LUۏWqB7q._a= W咍1x+iAٓbz?Y[SWxUf^ZuSqŚS;RfG݇{E ^+SG*x橃xcc="eaI}gOmN18v `V:Ћon+A0ʶ2+UOSJH,Djee ˊ2qx[qǾy 6uq5Xx:kt19NH#WcԑG[OǞ8h18}#hrV!Rh-}xDdwa>,̠ )kHqBIWI'y{=tj+q X2^Oh!(ӏZ 2GqsVxx\-5}{׽+:*#!xoA#Ď_g{E/ !z]rMq#R ~+K@#̑]be6Q!j{ȒG/\-c5d!\!QnXяc }Y߮`Agi^@]%LYhЯ3@3q+0t/'j=˕ބVrM~w&d wbkhTBnQZ q*' 6i'z Ѷ Ny.D(`Bkʜ]1cs*~IJ,@Y p!rOB+[{+5$Uq5$yDơMx| AI YMB0EV?P [ q8q$ {$*CӞB]J ^od:3) M.=SE!$M¶zNrTnNQͬCH&L"-ycfIș;ph 'Ed7^ΫLў Qb ()bFؘ!DBcVm>,&Ǡ$V#Htk*C!#R)QtLB~6V(]iaiKm E4g-4'|I(ާ7OZ [WpEݓx$/)pS[URkO}9NYԮ_<~aF&m׫ n-tb+`SYhW[ Lo1hb=O[@WSY W[.nҮ@(fʾ zt|*Փ_]mcnѮ;AՒ]unW] qjU^4U;p˟Lui- U`KBo&\mj-D_~/Qj *b:- 7<۩-!zAEm=5LN8#IK./SLLmޔ5B Dq np-"/8r[HXnsI[VN3-p|l~&u^h-c ca.BõSW}Er5uX _0x,ukPUrXlP !4YI}g4cIu^2S=֫*Tl4GNsI\SU? TP0:ˬ*У%"UQ&z B^ge: XSU)wNKRȹ^sRQ/x3:XSS~\55ߥ\@WN+:Ʃ(Le4-ElO}4tIEaSm2Jn8R:k* aU{iZҋAHR1Cjj !7KYMMBC PB+ozr\vEx5p:0;BUɆR%=YPxAOUIzR8^+H%sHnihSU ? &*saT5g;p`*\( m#Cv0IEO1U 8x4g H$PRKGd6$tD{hRIql:O#Uiu3,Lv/8%|@2 8Z4L. ga ޛx3dBJB zw{o2Zܕ|duݫ@OJ [Aww޹@*b/I1z5~ t#UcސF,֍M@ -RFCPgԋYPnȩ B2VxC '7L'n{$0Č.Rжz&tb@`,Q^a:u b< $\XƮC#gXL_308c٠]b1 {biHXdΎ<T(Ўnsv kJ'M^ST )/*:0c cv[怩oUN/8x"x7K ,ͭhT.4U{Iv>Ci/%͡'CRVį8;13 =go7&aaEhPumTP4qH`Qî PJ=Luqn=7tIhب!wˤw)C宄aSSf:,bizU7)ATZ!҃ݟ ͷ,ceV ;U'kZdT,J@9yN\YP< fWH@XjRBFEX)^eax/YZdLWf]0pqgfVpNJ1a]<{RA5]8~Ep=V 6,Df:fJ2$*\BOYQq?,ٲx/;P,r)4? @B]7Ad ZC5v>;?(T_=PZyH$D) b3s tɪUrM4UU&9 8El*?y7* + nh,L+y{%3 \ɱ]')tQEM1J( JZ4 R<ō]ʼ?'c-g KiNi  CKhP(1cPDiϽQ3⸔K$LRwoqz2^ړ&DBy%킳K9W2w3rd|Ddt->xIJ"3[Dv4h'hS4"'K'r?@k4 %]`& ,4S5T "]`^+0b /^_<ѷz{8zD=71{:TB ,]`?0azL)@,]`.jz` ̰}OB8a҅Or \,LQMS1!QHy4UɓIQ5%2L]b,F'yM",<⟒ q՛ S#M(X9׌f EdJe9z$B$ޖIuu\#M X/"yDdsQU?5k; k"pqס8j-GgS7׍;؞DUD&̯&$OiۓWͪsV2]q !A6W'0q}WK3[Ln]tU51p~QML.若J梛&9AB=~]Fz5=I/4R'n ̤)_Al[2e!ڄ~R"N}>?%qgmN: E׷XgP#sdJW_d,O6f{\ٌBDӨhdaYqRB|NJHH@!3$R>h㗴l]!VIL^m|Lb=|BC-cWсvQ٤Cf`"og.+s"2V01KhF/+DIvy=b&Aۇ2(E. 6w]0'{܁,.xT M'/7oΞ TH\Pwb'ɭܢl=Z;;2mN`[Uμe UԀ/fw85;eɾ"E@$o4lv M/PSz%ti.Hiyllj!ԟ>u]t4\#.:WVIؔp{+I%E2IE z 2~$C&PKRYXʀl%{p*_%r,9|<&bXT'!NCJ34mRP9P ],4U ꬩ) D%}iϞ΄x.TsMbphmSOnS?V{zo/'EOaAȝaS?Z /r B.Rr,ܙNj3)A2(]0/X3Taf!'H5ę_y'Aƍp1m +K$.- [CNE׌ IRtϔ\VNurH(>^Y@JL*"݌0M.8iU6 Cq.(9/T,8_nqI#6lO,w`%O$-;FSVkCG/:h IҘRiƛ7+JRsQ"%uILW[S!8զYX0^]֜) 9"YF?nhjǺگ,)uN,Pi _ Ð5[99Lbz:*kI'BYZLrd˭; `6GASҲqMASLҕf=]bvzBGrp)mq#u,\x71|CdfK|Xꫝo8k 1z0ۡw\6ER}}7jyM:Wi;Ȝn[j=Z\V􋭙zX$ wJM8Zmg- ##q+V T@i[Sf.J&]U4R\ǙEZQ\^PD$FHTΡi.how3,Nx;Ĺ4^7D+z2ҝIPi-k+J#>*dL0I7 . JQ9ǯ9ZCjEgdنlBq$ A!vBU;u2#IЅ6bjhXÀj OCfw* %K;TX!:@:?;ipPOP_񏼻P]` M6NIoNC#oƥe7bvn3OF>Wk/8y`ۼU72 /$[2ר `yCFL&[Qel&XsÕT&XBnhXsY$|8\"GB?h"e[I$0B$zV1KW])><:$$QMC~-299HpxE#BEHR z\*& 2g,&'Ȱ4Or3@NujhŤ^M5bd#|ds4n@aZY^0h%;K ?hv~ S\S#e>sQLN/n ZUMUl%&Q36tgxaOַgr:i\ DN9N AfPL\D2:MhڢFaI{Y#ZUedt6M3:L52ʫla^[ot& ϞљpAcG'݇ch#5MRpMNdRJ$m0 IVi 2esЉz{yڏqjNFf'&g*TUl0=j%4!H ^L*f?I-j$hHʛ1ldp {=d,Srɞ:j/wY׎Q AS4TkRf7"wȹ0װ"Gj?m:܅`b,mzFM9mޞըFOf * 5fzܤFCNK?gTY5H:ygj84{#k>/-C>*fTشc l3Q)L)0KR*> &Su783Իnp0o2635rYKE\ ΊeRޯm|0^&] ԍ&ɑ=q혨c=xle_ vMCa|HP_9Nx&!Jx$nycCmԺKxM#X=T 6UŞ3EA{JzvqP/ B7>IB=fe]ʹ8V,^g`k+ƍQ* ٯߚA;Z j|#xκ֮u$8O % ,|x Y0QkK6D9 !HȤbs NI8 ݶd DaznєC6 Ա_(&BxpqmGDٕGBq*se TQ*ÏF))}y (uXoIkZWX1/p`x+]2V/]ݐtrQ_( >&a e%oj*j_ÀHc)[d\u9vt g7(BK  #7Xv2zȟc%+&P} cju_M'2$_:Q%ұ18ĝL/3yl0_>sWgC"x|$/xC5qhKDf2,/ēM8.fc2!HR3t` }L؂afN?N{Y+V;'n{Q5nbaPT×o!"|@ *aP +dhL&EQO.]gBa0N6Ld.aL&F,aP:^K0(|͌A%*5a֐xhpknLk{㞐M#1p HH~>C<ui15&X,F%"oy9lL0]"2a$ Õ<,GT~w8tyV2`=l<ÈLY/rwi[ҡM);xAR`d2_dC-o6t9$dۋCʝA22u! us crW[a1֤-Nrf®` Yd7lk.<0&W *ǵTyAІؒO\!r7:}jyEk"wQU3R0|CFF1ZjDQ7|"svp۟ rVw!@W….1 }JE vjQڀ c-7D È,Q8 8d,nDaLfˆ&-9u֛{!4kÈZG9v #zUd#ƞO"#dZ5I}%7B2 ÖdZ35 /tSi4ٹGR!D^a%C th(\>ݥPCLLk"C}wSfIMݲPQ7ȴu5eS Zd,ZPQ 7k6dRf+.͚+F54&CZIlmL-iTT6fpCYlRs< m;L5,5<;eX>JS8<2N"_X?4vjy97`I̖xƄ" ʤ͇>˶-80H9HKb R4L s 4ӰL,l鷡u̖2EONzw4sl_@=]f}uDc:ˍ ]j:Q2쀡7_mR]H.>A*&,nf=E""FǘnJ~M9"KҵjB贮$ 4mBwI,5K' mqA>6דB5:sԐZ | hqjA )~ٶW֊s~S@1m.'c$a.'P%89=q껜׸29v͕I.xr,PN+(CLӃ%YMjv00nv0Aq؅%z;!2uwCabB$|CpA`oO0G~MS;q%}ɲ_8V~$ԚևEt=Ct prJ(5*](Uw[q(Ym9ccUi2C $ANڗY,bE}Q>Kf]Ӕ>~$1-FQW߫^cWu6F.eïb079ǫF'd6HN:Ӝ5d!_C7XCusy>ӝ.dΫ:c2J{}}b~Wq^\@=UU6x.ʕ2-"`}(ĠSMս7:Ȓ#Vy\"2]+T_A9Q $#52 ]QsZHJ2hh sh53* SgC B](;W53n=%2tw\T"_*]ʣG8(]\*b{+m6u/T+$/~hֽ*@TW+IMj]v\ XOHۡr=):jl"UT)bj2TMR]]Y.X%1=EV il^yeə`*%S(j17ГbM<|cCy-o~lo*hnB-WAm\﷊@%~ $6u0~]}EwЍx5vԚ& r4Λ5@[v ΀ax}ޅb;ST^m4] O{gꌶqj.qDID=w 1LeQC'xђGI]RTר;>娾yy)#˜w˽ jrɬ lfZ傣&>jp--K <4Cj匬(S\tc~N#z =8K?Lq ֳY2> ғ˭` NCz <˔Nsr)?:Iqvpn}Hh< S˔uI8q?ޏxT@] /S,q)"saopև)E܇d脙.0[ 'u?_gAuYbM'R$P+np#74Cd0ԢoMHL+u^EVi| c,9.cy2A=&~sDMi騂&l,*@1Q$u$>] MrK(k0&-FeBC:歂Hu x4Bj!vUЊ|%OM! ݓ7# -O-@k%,{DhB5;s I+ `33ש 17ie~O=lm2&|mO?O5bTS (w!YU]MIm'فZp9W?u8t@ _}5U3+E%e_x` +EBN(g+()f=ikK«Dij6,^;oQڧ#l[ c-?HWerlagwIS|S|)nI&r7}Z ;ߟi aPFJI}ZJiZ* 1WZB2ͩri 65C1%0{rrPT8*v0iM&7\rra>?-T7Zr/O 4# OMdΠ m^S=iJ@9O';N';t5X䦙/SoG58ZO?dz2Z[E+$I7qpW ɻ }V >OE <%g2VO,qØXA#|]ޒg/9/PygZCGN0ay 3y'-b$')%7m?G^U -t}B#:^er?|Э BN!>M2h0Ax`>oA!Qi" ԆP24(T5XS\^Yb4rPȹY]E$C3Cފsqb&MߊH&Ekds,p q =b}/{ǵ})p/?M!^CWjZ4nՀZeOCƒf>\Y }>ԳF@x&î-N>aQ";ݶW<^JHD԰mVZ3#> 强Lc+DP] Q k@܁ƃjb/'nQxI%G?5TKT$H vvMpkvGd!~=eg&u+a ڹ x[г]J Y8kJdaTkE)6!FU*QVH6VOfo=mTQ. )qk}-jǧ%xKJxCt3Xpm#epuLp*[ʼnTυUEپ*l}a#H?-,R HOOϊ4py]zqe.(0 zy fEc1kv 3Tp>=+%t![Uf 3 ܦj]P< sSSJ!^RQѧj[{R~ ~!h4nP<6:pzEp;EN X7d2Sx{=ݮMgEÿWbrv֤y.lYqwlZ=BiDF[$T.ZBv 2Ntw'-ST={Avv̪8IԾiS1Fs9>WYxtvSc̙ϧlJleZ~ fn[]yβV1t;Bsl¸ґ;iZ5s<8eg.PO39n[O՛c[cKzř@F9z@$΄UBDO|fpVy~73V w2P(3PPh5Di nWWoh =`J> /s9r*F@^YBDwd$ TV$kś@xz2,%}DR^BT+l/gH_R(nϳhۊTI{.!7359l]TrJV%E9h 'th0y pZѢjQUE>0tEС)G{Z[TC D>@~ƙiNoc W:Pk1!;x;^&[i@΂`*O#[ӎl<0]n?*n'sk+wbh]1cݲe0##_h3d]_ ;6, 2v)x]*P D8=t9+Z:[))b h[aBPs(g7l+ЗaP<ѽgOoDa'"Mz8 P+WZ5koeZO·vko!dm,Oאg@w?8ҶU8Aà(+ ۟q.2r\ke^CLp%ãZ鐖U*Dk7p־u[*9u[ Z50TTPU$Je{ظY*Aøgewq;A &0VXnqXPD :±2Ծ0̎Y4_wp2uhHƨ*da96}t=9{?ߴ89ʹ5"g [l92\\Q5*r}rA'.BBAAB܈`u\Wnp1"_"YC?Dǒ6s Q5;.7Nk8HBr^zIMZCt߁1O3v(dӪQt7T}JD xD֋@ʴsde0ocFDаٻD0tDg^k}l6m)WO@"v[Vٶi1M:͇X"F<=HBxmvVa}+:!v @W OG~"ؙ擰i sD}RҵD=qK #F`kX:&~,QPxhHKק"̥d-5ʷ9HN@axd·'Z~7jy]T8aN4n8SXó%&9ZqԈIC脚׆9.K9>'|O1c)6p qs"a:fbV."hVht!E۷~*ZWY;LYɿ'K]Pi|ve#{TعAqTOZGK sCѴND|?f ڨ<AB ]D.M,bL1SM^ n->9f9k }; [eڏ},Ap|9eP3~pLJ.0) e1 H}D0he;U;D>g~dh|K!x:-330``W-"BfvDbGcF!!O>{1Ž-Y6|"GMmO:Pt!E4 MڍP^8U[yv7V7K^6rTy?NϦ(3S`B6E6 h􇦝*(r9# $R&9M'T>*= i LE?|H֠FNcH>_Z\(6G3CW@f _PAFxV<[}郛gjI/yU0v&[kՆ\BE$W9'M] yG)Or)G@ЦD2sD y=Z#Z9(_u-9Q B0>͠>. jz]v }L| ?e77]9ԛ#Z@UJ=P>uma`w3z3h>Um> >\, 麣 hdTAhXa\7Z)=%^m@~O-T~|DY#r*U>0O@/bpZ'\Lf.[e 4>+P@p"pȍO•h- t Vҟ{ދ(kD wjv!/nWvSD (ʓrZCbEA' %\^^O$3l5Þh1Бn 6(}5GXz;18e (?S;CMUF y1H!Q tB5 PȠkB#Rpgu#)9]"ɋW:}!(}u*Oݚ-G_| EU@Z-'\&T!H8Bޗq)B4tT""TeI$eM|2L۞L*CzGS !l9 &K=6s;'H_Rm& myzoNJ:zK+҇&L |S]EИKAEslB$QE<"wABhBY 6H#@T#'L"<̔E|w }y) J됆EAlgu ^Ldh6@HIAyJ)3Fp3s;1s@H 8 &/V96> J"}'`X?1d5~ 1Ҫo#Vx'HF&{L CJWg]h-";*@ tSԁE,kQ>2ώ6T0@^rP9}JZEhŠ) €Ɓ R5ŢݶsE1wk4-1<C.ڣNQ>L2;[a |l4A5&DSV=bI4/>D1':9faFlcݘe]FIZ$HË&x!G<1s BܣWxZ ' o]k_Ya zRm!p8DjӁg-| )s)kѻqS?>V-j|Iǜ'rOL %"1 ɤ fS+tss4Վj:U(|4NbEhÖeN68o0 &E7^oVW"K\D&>ϐA*f<@R8?_{?ǘ~StkΞ/N@ H.ڊРj;4͛ؑ\KA6@;Mr@MCo%E=^L}zr܉6=pbAjc( rWc|IBk۸#D6 G4`sxx|-nr m #ՀH: y,H$L#8\jM,4 1b B>H1ylݷ*]i?)֠fϕA|R)À)Ӻ:-U]Unw -?oљ񈁸}EugLB@k;P{ 6SF#$) M&e(-$ZpR3cA^R1-x7/JX,'H<=0N!1:M|!,%w]̒9{lXeRhd ^"7gJOҡD{ar,3,iR9(0 F谯;?Ě~ 6rhX;@ 895PSlNi셬bg{OacDu(8.Á3?,ޡ(!i< 6)Z]%510 rfd 냭~u`~dP\IrHv +<` QEA`LT,0\MBݢ{=2XT | [mQxu)՗jMX_f?x`pc^|Tq,=>YO6M)pr$?ÙM2م.M86\\C @9Όœe<\ OƌwbF2q IV?I2wلX~x1=@rI}B1l>oeW/ V>ep`I3Jx bp2UUp @*'9 غ&?N!cS' AwL #%W0.a b>ߑ+3}'eJ$TZB7S&2o۪2D$@8fZ갖`xL;uD=4=vr&U|s#FL=W5=b*!iBͪ6ޯ$0Li?|#|3lēr#LTV $m0ߏ2bZ74c¢HHyiɃI¨ k Jl5S')wp)¢b+)RG|dSL؇G WW w5ͣ稏er|L,ҽvw(@ktU$PT!< H)cw,&V{hj`5QE[WfxFURc:^ɃnS&]{b*p6N% Ƒ%3̇Z h{twh9aDA>R hq1h.in;6\Ӏh^OzÙinFdw-.Fq4ɑCb@j.?GDOL'#>&=4mjHw<:4`zW2?ںhU<ǃH86of8ĺYuuj%ej`;si]> EtRFTt[ao^|Wͤ}jP#)kTDNhOQ"L55UoR߮1/`4dI5wxAªRC:vFa=Yɽ =\ XXq@;kyt|3I<͠ea*[&rv YdCP)T$N9lam?K#l1WӶ8^!A[+%%g,R P8}_ᒷlL^ d#w.y0-Z_ <0lA:g愶sGE+nxd$Wa#8C@.͑)?hBsFy$ ~'] ý? vCP~0:Br,;>QOvo IGQ<Β[_N*N[__?yS` 1b C2*Мڏud6&m7pdg'O]]49zvWO5`мkjCNp܊! 6: >0P:Ӳ) 8mck߈HRbe،%VqҲc ZYWZ Ȍg5 Dc7ߓBlٽ=IȐOb6W-s/"۶rB[ y CEgk{c489 ĢS$GG;\$hLʹP^܆1C`N5MúlAfP2ȀpIY1K0Xm\O=Emk(?uUksicd,*JdFVEh]Fiš&4w{sŭkMN37נ_ mw1# 溍Ȥj*_"R29HU6**WZ"@quk'V_`n&{(e5d&m)*8Ytf5W:C=U2b_/ap^Kz<+'fr5GZ :6A(xq`H6׉täSe*Rg'A7V]Ѡj B2T+^@#$K@CVL/b81!rNtg int8CΈK]nDd;-{ O2V:zF<q&cW*wji\ -B$@¹.sx9d*Za$e{ryM;w6f{r?dQ Ŧ)\[=!ʷgZ2P5|`\2H҈]Z t" .@m7I# >̕\i:8#tw2zC-#1 Z;$peTE)PDFS=fOf!`v0=1,݉\`c؂L} ~lO~nfsh$Bm|9?Ըٮ|ڡE$fTD7b#P|w` LLHl̡;˴Vk9(vFHa_TZ;#WI)$aICƞּ]`n6\^b@Zcu_CTy@G9gF|Mrc: Ж!>=9蒉dOJ=[_>1$cԓ`?D30Kё{̒S\bR :&zdҍ*b0"n6 ',V$fٕAT3ٕ^lnPD^2*O_Y@OHLfON#)XɑV3V1fHdxB'ٙ>UQj'4j8# ']>_̍.z9 F::jw%Gbc~Xz)P.h"YK+VCz75?pB"0٭ #{IiW-PM̉[U Thyю!ck^'4 JS5;G7Ep#.]غ8j|\MULPIuZCՏ',C/~]6Nop[50at$] ۏ&ڏ#4I3f[" @Ð lmb |&Uv\ a>ެUu<*AKhnKL؟kxrkϱ?d)<>_UTE \?THd'15S0ϸ}yfsM:)'vzI(}+vTq)&gHEH0jW"16ܞ\shjg8%U5 [S8{-N 2|}('rka =Ľwu0>Z $EYH{ RA NJ ~ ;r*=b >:I:jd?q 8R]ϊ(N̗^bp[IUSzFѧ(n3AcT"Mmҍ h$0C} QXrB;16-XC$\I(LJBTZQP?o*Ylo^0eWm0G~@ JFΥuJhxu}'ۭL""}LLm zl@2Y̅\YbFUDžBU7eVdVcA"񰹵JN8eH-CWlVkS1[oMPpl>^@TwBq>M۹ʑ,2 Rs&pƆyli&E">׶֖QFVJDBYe6$7N.M5dNr ]8bJʱ}8%}8<W7X@Fߟ}D~ I(l.b v KڅD+$U+3 -<8,Â4Dg('v N ȀdE"q͔1~ OnsQ[ܩ8)Q/Izd8 *] '[աaοŨO_.RjIE* Evb?j{W3wD6t&88 *\iU(v{o3һʐ vZW@y0m l|h^ }M`|~}4Hh5{y/˯[wX?6@V)0ar)=&Ғ^А`Lhoo%Cqw rEQwۨ[&zzW 4W^Uo7mA /n彂 + ߨo鞒߶>F/ OwB)Bc/>W0H"9۱ϣ!(+Hp|?";"Cr/$٫!C?Gkv߮wz 4۳w75S5J >q=E߇rۑѳ_G]f͂ CL+ِy綆5"Ki'-/ ̫ M`UwF} -@r}<^1K%kl2V|$]ߋL;Srhxil}HX,h5'D/3 eDؘ f݋$0Ls(;9>[ ].q]HoE =.6\m1.Od S5szp=0{ DN$> b!;؞][Ef}[o ׻_ŝQVHq~ %F#iqz?p$}ocGiz-]zZ?7w>e뻍_Ŷ Eu,ݯӿxF>Ma4Z u/BX(.Dny|b}f(_Ξ צ2;#ػ]LתB ^GB,kW9I1 }^ӊXU=@ڛɰ ikqpG#3N$ =뽈&~K !<DF|A=Zf[zG 0n;M 0$^Uz+HƢ8fwP)? %],29JiRMyؠD2LQ7(0xJ^78U#ChOȚ cj8e& h'5  Ħ  ' "&Sڟh-)|S P O8[ 3 bv!"zd^db`rYf I@2 ep2ih(O(KѬV.P:`hNynMHyM w3W:nVպ׶(ry+)m[ 5bs}$Mc'd?z&(X@^@%Ìܻ`۩o(PnKB15x?N@( 2Ov;˧ 0A 0)8t[E.k Q?Jv_-\M@X tm OLVBД)} y*N r=vQ+k08m䉘{Ax#e-C:MϘ+Ƞ3~Xx w@Sd^:(Tw&3/܏S܆uӧCDݧTE(SGi1KP>!B9P^ 4ݤkD$4&Q~ 1f4d('9TYcZǐ]~j['zOƐ<Ā +S|=R֩ _R>N(FRy_%؏kPv0V>2~1\F;FkJl_2^) w*d$I!?h0HTH o,W;X#UR?5 ύ'lژ;|+>(?.B)>$4~Hkt`4G B<#\JL5:ȁsm `b?{#4HO(Ol2i>gKu (Fnj5;k>q*Xg>UC3^J L"SY@TD'AL3"L-^m縓s7"aWvިQ)(`A|s<r$\AvE\&gβhEpy_(v*I>ˋ9oE1LvSgL*{mwt.2EfLXہB$q&6O4VQs_}ſ^N2dyth(^,5!NRuI}NS"J^G@U|hgm36WF/" ܛ7~-҉cVow q1%Y۲vZ֑F> V3 :ЉehvH}aңkT\l?;z|).R" u_UM2<; Lq>DO'Sf\$!ш_S}#):OIW"|*9-5sJ1 Co8]د5WG:VX-1XfzoE8KȜ<Ā8) Z1WΨίNP4wUTiP5z[9NwZ" ֶls廪9=85~~*DF{n ToCF0(>Wt}oqFٯ/ix_[HxtSKW"aT|ٯ0>cL,c;ٯ(lٯsuE笞DkPxsڒ<Q5M+.$U!&?#xyʵO6ON9ٖjGL, hZT)Ǿ_$Qev>NN.Xe[CUWYRXɴSO<>~klq*1~ËH@#/}"vpo> y`'Y|6+h ڭ| Ay3fV A-J7@ۯ܈!wH'xZx2fsQE WnX Ҷ2iڕǣu+bc]* n&ЂڀlQᆵ_t+qt).wD0 y. i}\p;%5rXb2O( l*Q{P`2TȀ1n$B⍯q#Jbי ʁlN-Ь8H6dP XPF;U7o$~Px^gt %/o(И;w5(O_Xy9AFu@";djgZ;*C|`QUkSbMfRٮ{zhPʳ]C$^<-J4u,cy+] F<3f8 Aw}"&ierͦo=aUzD\n@U$^B3R݁9&.7ڡhqY`RpPȋ` \[zL+IzMkY!YbD"ِ-VV%)Ƕ +.Kגf긩<P^Aʟ' x 8 Pi\z?|jPHSo( @YB tF2r r7iIUVGq$1ؖ|IDΠnCtB/3>/9 >pيԸ|BH]5x:a{F;tpx'( bZ>)03ngy;h&yφ[Avi[w pSg48 6\T~ycsnX>t-íٓ­[i,Q9X#n9 ٲTtH&fj1;ya!MbA0 GgU&똝&*hG?cZ+[jf9H{l<FE'o\r*8QJ4mqϤc[EL)T+'@$x 99O^' ώ0nJHWlqCZcLs{ 6_7*&ӷ) n,ceBx|x],7ymQu'JՉL7rP7|9 ۧώR, ]8"{:xۂlX♝04Mu 2*seDClX({[ch267r ƕ:wT=}T캐$#eQnP3@py tm  2p:lUzÁ*Z}Woj~7W8xMI ZрT4hP7\o4T', v] X%o3# jAbnGs-c y&!q7xfIƳHINnȃwz:oϕɪmi%*m?bY$c-Ǎ4v%cUlXi~>~C`[ JmR<=4˂`::'z~HikhFk,AO4!_F<;YQu1L?0u'6ghǾjƪc_ݹdǾn&vc_X&5QP!.ȿ(0tw1 *`$^Л|c]=$Z~ gBP#2 8$"T?0L7/M (˴XB*o,@4lqo❠ PVb[[ԳX tZtәx Vw 鼿tSIxJW w`*v4,Rn}W5,Fs|[B`q7* }X-8p , T@T92<ۇ"í1ȁfsMGV(;GvA4`cVaCXv֚; C~']˷:$bFtc3.@ qU1]0/:tvv9vUtO*R{0w/rC/mt3HM6A,0#e Zpp 6%E׍+'7n\cdFt~԰n Var~쨹.c]O`UY7O/3-͝›GHnp4_TYQ/D/eAOJ ۺgJ>ٚLv$+2ntNٍqven,rt,$bzdJ#aݏpÙ*qeqccVeX!JLAOfGR*@?ڄZLj`ev賺lP`=0 {1 L~Tyax|:<Q0~irȺ lFmvLNmڃ[4Hu^v*4 l6S-\muuc>FS i\`{`RH?Y:|wu[hR\dC0;"ݽ"T^;/R tU[UАs 읏 ۝R_vvդ^\w:iݭC!!wr9M >xQWl_vqgXTzX@xNO=XQh靣;G˅j^"PwSv^e j<1%ۤf1;RP//0ʻ1# ]3 ͥ@+y^3&SC"vq  z;y^3 09y^3)$$Y9Tv݀xǻ1pwT5<}w|"QVsb3 JtyS](u;p`| |~?k#ojas I=7uGu븫e0OnTF$۞ԯW`tu7@{RiΟ rw?>Hto^}ZOo ԝ$;zCj&߽@SnoI!naw un~ϼnE:"f)V.Α(qlBdsUXv29V$s[ach0 IM,W>De2\I*IZc9S4OYE˶k!!/j]E[5} zY)wzYT@v"Q #wVIC'E:6x`Hc4Y~ίc%T'xa 1rίFÈ6sU t w>U4>wGxGM:I*9]dfT{_Ht]K!(2C#Qv꾝+Sͷ EpDSGpsW#ʒo8BʗKۍ"a4rEms6@~ΆHuTlȎ0ZM[t7X,hUOt9nvc,Z Gx;4##oĵuf ZkЪaҺHCXh@`Ek "p4.u) hOH TXg;tuَ)DZ} ݌Xg.4CE-kwzSdʗ`Iڎy-$0-4uױ1ζ4?Eh;JliN{Mz#Vڹ^Nuoa`hvW듔@GGސ^NP`.) O<Ь"} @VA.Y.!% 5nV`7[E0ljo m!|W'y'DDmwC2)OəHj iv hݐM :(jݏ@S{RKS(@2`yVT sr:hqIQUhhg6 YLfώq*cb&~jf5t/<'P3k"+C͡1 B"j # oW8*H: R Ȧr6@]gv}=B#{/otSL_v\G }ÑUjnH4!}fuE|7Yԏ=ʮϞȈפֿ,&/)O0됝c?PE#}qI=sfB*}j#;A#FkTͨ{^#4@ZdkVJO)}>fvL ]PKom!JB FIw7"E[]%V)+swc!f_1v~f!&9)g;g WMsZbv6w+kw"cHF;[<"Qw]yDvH|Ut#Fvl뵌ho-oR~Bk!cى0C C譆h4H>o蹳1R!`(ZۚA+3d+ʙ0Rco_r :oc[7o}@oWf{t)COɟEZi- ?C>w>}1:¹VoQ0c>z&FK4v0}B@24aYlq{ 2lA!@mV<"[6lC5 V2(=]1TLw Q?(Q{UcfηrX-za6ѯw h7A&IW_;54?g߿U}6ʪ:7-um)Fdyt&+%u D/K U)\^e-(jm,Yg Y3AH>$rF[NF=D{Oʾ*xXSz0?{6"}W+x!䅡mR`+ h+NqI"̝[o~DFR;^{GTQzB2S'm14W/{z9=RiG!)'X~9КuD`c~ѢQ\aIFf3-5 cR`0""ScyRT6B %hu4 S @xR(bm(f"Ȉz 'FbKd 34+5Ka31%(C#KV$[\D@~c⫒Ѳ${hCMIH"yVJM\+bsƬ{!YZ{-e+ %͡YhOڒl lŽ ^B^cԟEiqd) yc pZ ¨gyNҞ9 #6U@vTcP\"6x_ =c25kCfY }~Hey)Y!:dCoł5&R=2*faH+!AFkUD e)PdO> cMU(- pi)aWa@ ɞY~DbBuIW}MN+{GiBkQ48߄n~- LD.*@v ؘ iEp #^EǺLRGg<''t栐ȉK5~HFpL.ۏ03c]aln0Z Ft._&9< ʊp{)2r (j|f a /R>y>'' ~m75@$8@x>`+LwLڣ.Ki]3OV> Zֺp2us>dhi$# nl_.}7Ui]2Q/_ȾǃVӵ0:Yp(٪AllUz="/-Q6Qʖɻ)hUa`*{m8fG^ e /a H \hL'bpЮg=dh/[Լ(QζYq; ؝mSQC^v_K:;Ih*!<00s"HEq8g%0-Rm#G=(^c(:RkH0.|@aBZv!(miߔV0vO99og{;n1\@cΜNeՁ&ŸlMֺtlOT49Ysڐ8mAYbvl_7߳NW>6'1#Es:5fqGjpwc/21}/_faf+NwKObEN ixzdsI)kkR-X ~cSkx%]bI[XIo6wXD$I69RtB2xtS-})sRtGRn$~MMFDF;rl=7~':p—3^ HjB5xf|6E#gDoO Ga%͋ؑCE$EaSkMhM,Em+'arժ؏c\l/pns׊xgPe"j08=N5$\=L 濫€h0ƴ_ɒk3tk6 s@:>ZKUaOR,UjwzO.5 K(t60fx(֤XMz-֤Gr ,Y%v /^]ZRڲyi?ݴ^vu#r_etIfDԣC h2("?$&j"1xLФGkv?eF4?$VB-Ye=r|2C-GEEC T8$Z)&rYُo %R*WڝnG8"Kͮ9Lv80 9 *ZPi q/Aw.NX5T?$^Zh%<|Bv#t[_2b/1# ! hlaN*H90Wρ(;ftՇQnAAxd8TZ J%|M'X/mD4HLm7hϴfƕ'bam}3;%yhOd?ޔca>̓b?j4vE3ѩ@9RC~J9I _ܩO$Yn2eTCy TqGћjh?4,[Ц/YNÓ-LuNNIVÓ3ZXk~!w $)<>_Ud2APf{{q@'x4k$n5GY .^I(~ZZN܊]9,K) _x "d['RA$3fja=ah{S vK M$@:/D4fGv H1͗m%#S* NBqoGwYku,F, g@ة1MnecLٞ2!=9Kc^$Hf|%{q #Uvӑ))n;E &UկJ:*,O *p; 06;@%0M^ljj:.KhLydJK"袦ܿOdfȷoݨI(íD ~y~*&cT1PO΁Z o4~/0:{W~~!+FMNFΥE2?^׀4ͷ7]Sθ"(Xsah~QX2)0 ۅf6p֊ ۰H")fLB}kNv7;b&]ZSpXvXXփ^r2'^5έ}+guDԜV|wӕ 8[Q~.?]af#onUu)4цrFaDتv᰸LF-!c+r24~J NÀ>~o|w3߫ LkϏ'3GC { OnGHN츰fˡ~Y~U>,mٿL$GC@nc09zrjEF>r WDl%zƟ Qt㤉G! ~Dd'iꔾ?`4$=D>Q{;I![!^FG؄F?C@ p_>"3rWC7k@e {q?0`n< q"H+T]"D[-i .r7&Ю AK6fR\&@e.VS"#lW䏊1|5ΐ3ufV($M*\pVv"6.WU_dp>bKxz;1Ն0JE>H)X ~vCCkÚ% ZG<>W^G}6I%MU=W ̈xW}{?B0Gkh1}֗ RG}t mq|*WcQ(O8CH(ӧc`ah,3)F6Ƿ>+"OV 0ǽ#$,P~L4;l/}/\ .hS\@B}!*`~GsE7 2:pi|"7F^aBlUD)+v`iB0QWTUߦl0FKG6maX߀'FmWcfcCi-)։97h ^7(}4п42:Tz᝻wВIk_.H@ s(r}֑LRn~? }xxy`׷=ңMu"YMp`{&βeK.+`xh`ƅ7}0fQbpC Jh .Q$F^^5 _)ߧa<"hU7& :LzÁ gߌH>@j\OF4  di^R:S@ލh_uGn}Iba&5<7uO,rHO,Vj։{O|%"hc /c0w'"ʸG#D|BSZ|B&q_JYPuɆR F-׶Ug1ܦ?jCzq|vvD'g|^ *I0[ f,`4'Aq,v7 H(Hr)OSŸY (Q"y]Vwyj9^Qn_Iyw*X'N6ћ)b.EH{iO L( cNP @L@,tzN!ǑC`bDZoN d?D>&,=J=Y۔/ { 8&T5WnRU +|3X˙O[rgboh1hi;z3^Pz[GhYdjGB'E@G>j{:&ouJj?U"U^2;ԏ!@# -8 ?c0vt{HD7@Qcr #4,DIxG9DPQBO)Fu)Eo: 3ۍK84*]TtFۋ˽9d m`pB1bچMUa)YϽ/M>ZhF /3y_PA'ҜfܛG$U%EC8ÛRM2@d hסkb fͩDVok&=Wޜj4c&OJ,1_oU3񢪾XF8ʬF^ +LܣUbzf*/t*bf$ YW[7<ʨR(èAi^lX7"yZC)3MNKyC)O3TPxZpv\6 \{(}fu/+61ςM.\*y͊c*wի&!L&[u@0p:̜~ Rx^D58oByd7e>\6 &[/-Kp8Kjnj y3p'E-qNjJ dVmr@ٸ!1+5 ̰yq'gq8fW1;/%B&^e+—_+So/%/ɡe6"` ϛ{AJ!R/)ccw9րߝi8o2.~1))V54\Ͻ SBǸ1{cfjqT:Ϸ(jv*߼M C=W!}<|;'s&OzoX>f[7%qSr<9.JlÇ7eM8yޭŚ[Ɇy#|F/m"s\;Iv5mu@;/cX2(k"Z^gy yJüt["͵nBmށ l޺0ŪŹSNH/Ncx{/Z9d[W&N!hCg&iLswSR-Hޢ5dÛl=ڛ@!G)[%iVlY7ٺ- nmdp =xrf?Z'?p9L"m\Xtj:ܣM҉%/ \@lkUZ)?Zt_pqݖ8j!mQ ?߶wq M"K}S 6,vGC95aB ut;SGC0@"#>,2IHMviGx츍bo ݪ.475v+]?N;ctm΃1v.v|`˳=HMfA9$Kh\]H2%Y[-JnV.nu^ }$H>.Hcѩc"7T֎a:Y2vlzJ q%1AЮ$:E|/Ĕë6y* b 7l&fLl7r,} o:7|aL >fMC'M)X`oJ&LaZ'5F5Sqٗ=aB 8|˞L!C{*8~&{. >y`L! ?fdt Q S೑6)&C/ +[Iƺ fwdZk]X7#i F;7d b~wdfx WaqoKR_lOBXba%ƉNd$b$-;ٺ{s^N#@_TD{~Ƀp(P vo[ XrPCN>4uH< D;9x1xm7|)sx0(P|;"C.C$jPg;c Jh<h}rrX3A:tٶx [o;Eᘮxm$n&"e^(;o_Lf嘈hm^"S/x[)$d  &!-Hh M c t-\\;fAZa&tmzD1x(ݷfGQw+;n8NB>F^Կmң}a; Wu:,%x;qw1,Js;Ua ccoTF;N\"KtQ%#c:|22Hݔڬd*aG( ;agATKM%q֑΁*KG (,ńz4c8Oض#jY1\ öuh1{UUu~{&|~=(o*it}$Ahz.eŕ,iǨypl#轉c6rmJ{ӡ)1 r,2?30 TjW)2V$ J(c3T?OI@ռd7C5IboB`'9)>&W'ur>ښLmui)[m딍'zڕKsҐ]CBP)kOKڧ֎V_a3΄9#XUYE WE_fPqtSIJ/^.~i:dt3Q"AuL:fOaF8$^psUp&DȣJRCV \қab53o0F҃oee(v༃"_+78yqԀ*340QG2+IdfD$ Y*͡g;6>*Dgp#>3PRZ7-юRS>p-DJQPnNmFgBʝ*4菽\V %Ԉ{3AQ\KSml:ܩj+J;' e߂n6u;:%TQJLO35~7^0J3ѡ"?o|8ʆ7$`FXmPKD); E迼M=K樉OSNiS⾘NBQӅ~kr~ qy[?[$AȤIF)':2"p +7GDtIA NJͿ[^|bH ?S$S8R}oIUD߼:m^` [Λ,ߺ H*ɷV3M@uQ= AY2|{$4d(x鰹l$!PU {oN ZH%mM@9Y%!FNR{k;;PfoޡYUu8WF 4IƱ$vsU!? ~|ޒT{%HiQJ譣B7%ߒPc @pΛDY!z?%Aɼ%v*\޼CتT޶(YV&oI`.մ@O{k(bHPЦ}I.Rɉ]A(`=9ض&A.FM[EO黥ewoe>8\EvqhmGmZmP )im[sA9cOu!q&iVX LE)_@*`&I X.6U35%i[ |psRf{c4:Ey^Y掳Z\HY&X9[G" yǴW6Js c>LҸ0oXqKaD9+F t( P>YNwxGI5qWn йd9F(]5Q=ozS݇0;teԕFo{ɪنNc+T>TՁsWæ僰 ,aEhnk4v[ uQY!+P|h%»Z92Uq|u(vC}~ncZ ah{GoDq d)۶dE cKHGj: 8 "`4e $h]9-ٖ%@5EY%!,٥%rw;zXk,4EI)-ͽ3W!8󤸉cmLַM E=Vl7##L`-GWpUw;**k ] 4YۣЫQƒ @jǜ0vt88~ )b =P~B4ζz6uk4{Hr8ģdڶw]-MʤaA10QސTJDeіL%tt `ސE <(JA]}R( C( ir_B[fvPmޡ(+\l4I 5I .tFϖp1Rhʞ-DWB"~bY>^ q9)uWd"J;A)q)b"%Ζfp7w(,p$E)kTT2dC:f A٫rf[t#|d߰`HG"VlFG =rH7xt65MP*Y~j4X#1\!Ƞ~x# i<gd.a' ʑ;@W1z\1IW:ZF*hhIJ%WBx}D̠] '6BՔ{Bxj#MPGƶPRckˊ?m`nLK&%ޏAĤ:'(EbLѯURlTM)uRJ+ I2bK@"|4T([:^ԡ@AtZǝsF8:8vm9FH&-~b3XcPC'z Qe/ s;͋QBn3_c|ܔh>'5WFV[Saö/>ߓh0À:>C-]1. P ip$!I6#A5h08OO{$d0\WOd"󂹖?HB1ۺ]'C!AΫ aƴ ZC04aBCLl;fJ zx#,¶120qV?'8!3:3LH w10A VR(>5F{6U\A"&S2j~oh+D8(#LY>7sX0?^Mfr7'۠k(QH,"xʶ& rƋuR?_?''|k;1e޴BF#2-׶Wq2|(_Ff[2nU,|EVN1jيCIi˖iέc,FJQQP19~m `S\I_hZEj-^Dv(AN*12.h7CGő*:ABaEBwLtN:7l?HRu!ۀ'm㡺QE~2I P3Әָlo'%*5 $8 Ҥ>ԟ,uvvrC$(f4U$p0%ʢT$sB쩨j$%?LP,,P&:ѡ;Aec\#=_mv[W=%Z,Q4!'Co$OAE!!-2JH拰+Sp(0V1vE+nwy Z17/4wktu ~T4CϢM,A(U9C]0x28Ƀ-ı,Ftpe GݸA&ThP҉[. %PP+觡ړ JQi3~}6UL[Qh^Kf > \$51z&QmQ{JLu;XTvF(V*Sc+Q^Նl&&ݥbԘȏ6ˡO4E]i5(tBUիUg2zlgX34:@Ue;DvcH>ʀc~1*dIX "|N]BI1T"0!^t*X ɩۘV a4 ݯV]&,&֤ˡmDP*uM֏ǿ:f y+>$\Ga|XQ4"M5KA'+Pͯ.> E^֬H`CZ@9j:ltV>5-ۦ 'ނp1!CQؖi!im>^Z f޿vuB䟪HĶd?ꊪIYUQQ.U h3/$7M!Q9ŶiJꃻCn_*܂Os`z,Ihx(#j`,OgAAgzU >( 㛐 X.<)+ b9=?Na>l9eZd*Z4M3؈W< BK{Yz... Ϫ$N/t~Phu(EWWN )/~])"QCC\ĴI؎9=rDNQ2%lXM8cEI |~D_ڶm4;IS .(V'qRf-E\~}n{p*y2JL *H U ❫.AE,J"$؉뛚V !Zu@|D`z]WB)M)1&Ӌ,d "!{vWR:Ps&6Bľ{7 ĭ0ܥ% oځ\HjL+J^TflibĶc@#2(::"IHpWwpbIL|4?Xc 7 ; Y:,/y6S7w \ ;J$I6Qh,UGG&R o,t.( ?Aң0$>n[0`.DvJ`CGm,d i,uFVذQN}Ȏ _nڽ4 /3r@4ç)rLTY2D7>DL*P9kI =uv=Hmq,4(i-y,J;UE wf3ETȣ04^Le]_ω'f#Rf鈠FrkQgZ=`if؇4dF3p=~Țm>'~}TlH!gW]DN:X=9><@v sQRʡ^XLoHbQILuΪsU~b4>b):ȼi$mMHQх8d@dBE礟$8K ew='+&DPoj 2󼾀LUk'IBuT/" 4nM{SPқ빞LF$nl1T@I\ FlL v կ ty1G Ͻ W8چ&n9:voB)=m3g'F| YTRK)H"ٙNCr\]`;.Q4yQ`0|И/ю ܐުXLTN\f׽-I{lPnrqrd UԒAgS(6(g;[L4M7^51 Ԛ>L $xNh.Zv4. Hz]K oUWσ!Bi \&Pp_=(' )qIǻ:򘧥_~D9; ;(Lc[A7&# u|0-cC6O,nDE.D%{|ϤgdA/Q,&Se03Mhl7 ̿/P0Ioþàuǻp1)$[s3,؅7d+%E`ia@dcw?|O(w#RE0/IP uL(NbxTD=C!Hr0#`3ᜧ7oȂ6;>%0. ˙b$Sh ]'@da 48<`I[zZNb^HS)VAeHWo=0yȂSieB^[׎p \#H1T$u >-Al,h"e.sYX!LUXpCػI:lAvY2r]J \Q D<ηlyS{1'HQ"y>,%L@Sm ;}8jTNpQB}B(Xw=Tg9.ǡd ܴVՓmSRTy4nX7ACxS$]Ӻx= ϒ)Ԗ75ME2܍UK*&P#XD@5LN.j!OQ&$ ӰMt]|=!Z8kD8Rkq9e XJC0jT*D'h&,+[/S?}ܴ' vheF&Džw+K׶ۛH$!SBH1OP8D(1VjS^u9D}GIN|s>4]֣>dWIV9}h 5E=Q'>,Z$& .r%֦T{$EѴB_$:Pa@.!!4TiM 6܊AhڶB", ;k41щʾHZ{ͷj~0 RfzN*֥onGYD'8Ap 3> ]h'={ggܝUry7H]+qNo K6t)E^vo:Ei}8Q @SŠ톬Fcp=>nҼ~Їs:1G]M&`}!kvU9إ`*tK([Żq$TBO򬜼PZ'~xJd"d;bcB Lppo9%" EJC]MnT wQ f;p4(6mD^hh"pɇ^G,m'sK^.'\WVFR(]w^Hxg-hғFg*SZT2LjT% hƷa`2v"c7rvETу2^ Yj"-$E`x%׳8jvu)辷.9ټxyZP\[ /s`8':` zˤcIiqQlb&n&XB>#Eo1yW hIAC!5ar4ovZPxBjJS 6shbQT@vXu) m6Y#әYH4޳6ى,b)gN^Vlf RqΩ+*xP}NJR}Kex` p£ngZb>N4 ] OJ>ʸh`p l1]z(qlx?v /1-2] wJCB!n\ ֧{ d{C9uMn0yMNt{X ֑Tc7d9 VR@%*ce_--Y$:?_ڲD`̋ܿ]< Pi:diK},4s@St@RƼt KpY8h:ېob\|Z~\ӆeh g7? 7rzW(TFaocgk^@=3F57G'LzOWby7qHPjMVmsmY VǷ#x+: b+wfrJ@Sp!qJlp #bAz\dtDyo>ѩ7lZAxL4a+&@ofTɥ|sGɽ ꊚ=@ΧTiż@Q۴XDF2͈!J=KbH R""Dc,*zDրQ)!\NH-tۡ2͊QpLNJnN$}~t7?l4{꜕A, s AG+CCw= E0u9`DxԟLkB+%>AAFNiGGzswE$xg{ԗDBK^UH5,O9[扡"EXy/L*$e uQЛl~旃3AT?9zy[4N!$v/InR:s͛[z8%mG=0(ņ8X??WtNYl=&ej#ɚHj2qG~3S`2$ ,x \"@3HN=CU^xPM* 'H8 %@$4O~A(Ɔ'(UsOI-Sa8䘝3Za-jqZ}1O?P 9"Ѥ9LE{)2UyHDM3Nc'0ks&wRw?ִ[QG pw<AmQH,9XUb 49j齤d&q3݄G3|Z} DU$rLӁa\e85MI%Q Hz wB'Q5"yڤ,h]l'Mu"leX*YNzRZ$+QHPFٕ׳&G ~}V"NSG52`-xS@!U4vV+ –Tk:8"MF'/NQe7\8mAȬ$o !O5kA$i/j#y 6? E mjEUפ[0?Ds8MDK lKXI3m.*]ݕR `9j-7:Q.R"O :`b)Śڧ\E Uh`]T=gJ.9=o0_TMV'k즠H qNduZb,otchZ}04U7mĎq7UrQL`?/HA3g,ҞQE-ɪįj'%um5xYL2$1s&4<Q :q:z"}cuJսcSqׯ~=lwkH?e y:>X Dk'KyQLu:XPv09]ͫ[ȝYTJG^` H){GTrC1 o*/rT{HsPC]E} ?oUȠ}ݡoMg3*3z}o9m|YQHJ;#W_WR5mS0=l:^Ky a[YULJ.c픁7?AhJ4ž=[8sƯġ)-p^o8x(.SGQ˨;oANP 0Fo$ں k~9ѧ+d6ck4] =b 5}&RDt˱nHEսTq:Nc1\y ar9).#ĸ̎4L?/Vҏm6lq&V0y;U~h5PSXG[0"fxR":ڡ8 m\Z8WjFRkʧizT|ZG^{B?dze\_*eƗa+/+Όwq[/9~Km}?ϝs3zQo)IIіq=330Czp(:>-b xu75ܾ,nD_GҠei^)oxK{lamі_QC[=_ <J2HZQc\z __C 0Bz)빢ϧkxɰ=! k[^BSGo]hMoŭzF|TR*W %,/MR!\i0-y̿OcU`<҅L;AWPKW5;?˿Pv/XS\m={?bRv?Z#pM$3zy2 ΢DYmp4J~k}r}5zǷf''>͋po/wGg/ˌ9"iǯ܎zz<ξ<: 䯌هuDڄSy}߇ ԎI`X 'cZ'qF]4Nد5ϰkl+Jd;+=n+J~%{_ޯdW+wJ_W+_WА2?ϯdW+JDƯdJ~%{_ޯdWJ~%{g//BlQ a6O> z7f-DbgP-1.VfN*@>ط0K|8ΫgdTHm+ZuW4" FD]_Tߡ ùK<7}8"yw?xra]-p O~i}[LM:xTET,6 ]X@jǺ[}:)?o}yБEDIŴ~7P :mK#"}lqtp.5}7nů"9MNbhEs]]-k6T1y*'3 >\H |yy&sZk m?vXt!3h]ٵj U=3Tlg:!?_tz4+3 !^z鳃<ǩ$@ 6U>N^cUkIι4+REQ- zߟLtC#]8g֑EccK 8y<4'7[g"9<'Q#) 1 RU-(:L>S66Q3u)5{ٵ'kUN[1uFv[oW>L >}C 9ԃCWz G˅Bf~7,_8{U,^ϫYk/z{=^̗GY?MLδ L9]HTEN1F {0zv)ɳӒsz0ZGfj9>_vPD_)<ͯi?>oYX [Mbf} a2_u=%~OЮʚ^7"FWO%őJb21IBtyaU_rǂ|V2YNw2QlӇ.fO/'| +Mm6Mau-aetI\}&[W2mM+Sy.2R`ϚUr-#e;$0?UWi36{T S}WXkñu/ɠc:<׽k(x1KZGt1AO7 6N/Br ;Y{%袷-wzDzJBF3CxͲ;fU{/?Ć/ڒhQ_0poLK!dZ ={j6T`sH49%BOf[y)=- 1IiXPeX>.nAO +^.s:ʀ!xMπ|ΒV̋ɌmⱳF;ʹPfܧ0933Fp@GzȉѵCN&kʁ3zt X]3  Ѱm@֯3X=7O?yy? 4G$KC5@b8-!{M#'-'3/-a>KN] sѥjꠚ83}XQn(鏹׆95Eh7/2`VA^cF @f2xgnn@*8b@?78fx pW8^,Mo9%dIlfWY,]dH[0So*mx%k սTtgO, |_Cq\+:o,yrď輜P-g-f> 'ZpŒE5g̏Xq+XGP!"׉>miblkZ6- nfg0ͧS<Wi??f|Ϳ-OinFCr {wޚ-ٗ?ŵPcnqk'BjeD:,YlV\>Vrlh:W%h4Z/;e*MLZڞLS~t)^՞.JOw]LYfk6cǽ?{ГH,Q ҿ $hi:/`q43 <>Hu =]:xbC;Ȧ%4$_f=e\1@| Yt(K<}5q!m1#8=0Jdt$eΗtG2n#Cwbᖹ+htN*esy>o-IMTb4!Ҷ mLԘWK׹9Tןc|?S,.ݶI"22]vIUݙ[@@*?s;K $IRiŵĦ>av6QpIuEMc]$NJ"[ŦDYnm7.6m"jÈ/Ry9ڄ~uȔiFZ6[ůWQw_~l5*:bֹ`¿hN[  4]Mugb?|%%Ep: o.n]qv&쮚A2}{d`-iF0;M?}ipdt:m[yrhǘ{!z-b3K&ޕ ->7p{ ז}-MRQL-IJ*>qv&HD$>\%-)RGmW5\7H"Fc sZ/c-]z1%6%A?6MDGPFLoCqcrAmHSk.`-&d%?t1/uC-M{N͵ Y{%50?}Uez1#dӔ 4cOW3Ls&99AS4i~Qm{ױôtҗg<.}A:MZmU'?4{)uЦMkޡM%ghk3$[~ٷX趕ު$ibCV],K4})v_9?\&kYqf\3nk"p\t &S O'./rq2gPjD[_Iͪٚeo@xFt&"Q %roӲq⑲Ι+9}aT(.P 0j6ߵ=D,F0[{"f*_|BOӞnt>-m0͵bfcd /_+@׹s:qz-:oX<9qVUv1&@iv-z4Y L|a-œ7+0=Ҵ37^9s]]1kE )4f3>k~2aho#d/9W_gȠه͌w/\R2H C%CC[8>)뤚 3m# h&;#o;Q!лi7^[ZwmZbFhȭmʺ+YdX@J`F"c:mu]k9_kzy_ӯy&u6ՎRPn7!qMxX go>D_*yZ"Kc!5[i%*Qݢ-/0̚.~-o0Lg4N }ktGI(lIO']&$œi͘4)&$ɱZTsmF侫YTJq[=r \Fs=վ{1Y!>0Yrn2'swS8ZOJʠt||fɧcx|EזX]%U`9yQ̯ _uڜٜwOը ,y_)[|+b;N|HdT͹+\wmZ>AҶ1{cQAHc#( 7|w6v!:K$Iςyݵބ7m(߾mHy^a}Z߅֥<gߏhMSj{[̶w25o_2$q+cd|NSuS;N밷:,, _K%גkdZ2?_K%גk<%גkdZ2-_K%גkdZ2-_K%גkdZ2-_K%גkdZ2-_K%גkdZ2-_K%גkdZ2-%{6,W|Mk8C$M=# lNj߆eCHiӃ=_j'_u QtT`Dq%72)K"UǎP,!6dF"]nwIy'C[ {A9 ?h/:娿&rnbK 58]/r)\3ݫ6 .:QhÌb=Ո mۢfiĖ9-:(*>dUݫb!v@46C-ʎfsvkf}%p0%4`a JAlPQi^fј}r(~_.t"C̓,63KhO#1䗝>z5%YГ {ljǷmfƴ.?@/hri0x`(%h>h:2xTTGJґXm2ނ+udāzWxGsi̱U5bd5Z}AfDai U 捙JErw D1tG=Kg,ۭ#wh̎ i+;ͻј4vbzoQczsYbhp M+1F1 ރ~=c|KbP!]YE3DŽskۇ(5(k,XR0}OPy$P zE ߂ث :I0c։gFokN61i.0;fk&ک-5$NR[8MJĀ ^sKJM5,70saۥ0/HN*ࡓ1C~&cLtKF,juEtt#TɽZ|Ӭo߂d2\{|ajo3\wSkW&s>VweDav7/g̭Tt*~@P%FqfNU?lCþm6Blt.~惰#ql }GTAk֮\fơP#F oI1hדKnQcz3@[H˺4ߦ*c{ܼ܎rϪ Vfanph o.-9}]Xf߹ ozsaekbw`K F<\RDXTC.s;ĺQ=#n"SV1dzţkhǬAtMn$ l]Gծw)KI؇U뿞5}e;<ҏbK_ST9WE=JMh KSanHR3wmmo5oȳIUFyigB<#aսZ;w}mĆmEVW^VM>M g[ uQ>plifTj5g@5H.hmVytM~QskȊpM~kXY\`C:+ҺN;o K>y&ިiB9R a[Rj$$/7Jz}̷,U֛6dT1:Op؁ И@-Y<wz"/.7tqB4Kx'|Cn;Ndž 6q-k!5MwpeLWL#}ãV85w4nsOגq2J~JӬO),h>&_DY iL .4ፕa{CU*8.g;GsQMbYCt"{feRC•fjY?56 ~zfD4R$5㟍u}_VJr=NLW6KVwMؚ5\Xf)'j.4 +2z7sh.]=|n:'w~QtR v([5.MCA[k1tJV, 8xb[{6˥>lTrl{5uEnmރr"ۤE^4]Ih~ Hݺ+@Jmv!k!%pi5lѤ4Ztp h& HE6MGw1f+3kWvUnjOz9@-|7HDwbt>&|!i]gS=&)ǺYѮ_/TuG_UyqFS+Yalt_KްdZw'&7q0.m 2eW"j.-ƁlNbdPV!q8~M);C{/sB؞!p)+(mndvB ȉe/dk@s +@ ǜ4]l\vz+T +AeE2bYz Σ+,|EM X\ʁ1\RTf3F Yg76U2%=/AJ=F:R\ Rs=CG9@"M=:4(= ^ԿkغtZXt|Qc+Gt!3IdaG\FkS jU .vBo2֙02O|6lB~ěc p9{P9uLc%&x+!@)bqƱ09b PyMq8#l|7^F+3p{8"]dFчWRg~3Pa#S;[eglUvV*;[eglUvJ*;[%lmUvV*;[eg䝭2(pglUV*;[eglr_*+t6r269YGM#Gm)7): }„WW -4ar bf@BX`)\[gj~F} St?bȇ-֔G/,KZOI X֣ \_xdT'S @?DBOm?Oa> ѿI~"A欶>J9&5bQjtHvCcGZ.V$/+{r\uC0$SuC7܂hp0N_[~r%DLȕ #r~Tꆟ4OAB!tԞQcOt;=G7wub-:)޾ D!hbZs[{@zT"5Ꮥ%"n"sE>zNbesڟK[#$͈柋>QYxX@)S$F~4LsDkmx#9 q f1]T:ʌؕt lyA ^s-m\ )^JHIVe'zk$E;GN5MUIF7e$/| "Kً̙ R%* b ĵ\\MTh& ťߚnHp5+5ch6oA^bLfCxFbx25j&.3!5 })"HVh2ِR_a&ڄufZqf3'g.-ummt570:GMjw uL6ZtϩF3:Yl?ˍN'mFAýNBU#ѐfZ )bv: KPCuvAΛmvY`ir[!䶨4֡N$+m?-w"JYnS**<1_ˉBb J& i4gPde6䎾]>B o[IȢ>NjtA9m؈qhep~!Ɏy`DghӦHaB~"~uhyD5sD@㫅}FuaB8[$e׷ydoRh<eX@%|@(j\'g5iUu5JGu&L64Z]s&}G$G Q+l{$q}68# L7qfќ`w#LC/wV O?hso<;iu]:Qܾ݀$7d鵍j#L72D'$HN<Ԁ({{LnWPZzIUP=5z]<$_Lv iz 9j*uw"KMkܪ2Itjά WMe#{S {ZD$#+݈3!]B~Uy$25*b_UydV SIP[:4ep?#1 k&}4C_ k:CCHf"H-L!A>@6,hD;BCw=4x$M D\#H@V#)$XN)B`ޑ71VNw[W|Vԑl$bA DNe f{4&^!y Km#;m H>ItƆtkru:TFydb zkP&ݠmaHv3kE8k oJ}5'@EF Yw2Gz:ޔtt'BzH}N&d"{tcF"ߔCx4f)HSnڐIp NV~*""k˄q3dܦ?D~cQ'&+Pl]7&=A!jȉSaqoGТHSa()m9h$(P R!A΄09Q)Z"If Uir5!eh/8ی*WW&ce\#({A;"PȏF~AҜBp.()7[ .tspB" 8l62y?Bƫ2*42p:ńvB=/ [hmQ30s[mbB>b"NT*R; 㻤"CGV .{)c\l3ڃ'M!Zfkg cBgz TVGG nj5xL;0v ҫlCIcv*මDƝId0ex> yw䌇pwep2Jr vFw%Љp|ܮ ȧG"|/ W3w#]EGVļs'-q'w?!WOʿ/2O[?l W)R> gt<ޙ~`9d[t0nF6m PpA:CwFA2 (OI:|Fk}Ԧh}Tu@؉97ؽxb hr=>t7?ٸ8siGtPc8p:?~ *H:_O6X{,GPG?ÏhCV_JцA9~AZ:mZ;~7ocO`I'sQѭqO63ӯEwo,8ЧHXH`9߇|u|H'0~=}/6xoKm\_(<~.>ԀA>n $ C_v>Bqy_mTNË7uq9 .or#DhA>,P?`4ڐ,6h/2^"餁tAqQr軌ɟ?`: ,/a@^i緬??тrad.>} ?Y{5PѿO i>&|.^|H-δsG%“s>%Đb8{?|;}~/[o_/GkyEa`w?Xhs?h`Oev@* GA44`sN/b [8?Q} G<58[hX:-qN\}?XppAv`Che Ix?, /_`/_ J~ N,8 {gi?^,k?_\\z;ۿ9NU'Fqqϡ }O`C,HB;"}?hu~op,H1%w<[`0g"~G]`/ox!Y %,B;[hG_BFh胎!#Q(}@ hD-_q ڐhm-HB[G"X_tx,A_CkD1 غ![ADO i߀?у/p" mFcH?xxY=#o O|vEVbjD_ EiϞ<}/#"}GdvD^G",#'B,~WOB;""h#I>e0Pgq',8>b_qP_8x~^<y|C&6s|GA!<' QX| ,埛g<8Hr{ /4r9| ;0I0>XV#Z4/+ja|сEGc FcLאI yl~ $$L?qQ>/-TƟe{2|Ig -ȃUy7j@Ϩfmgt>{{h'7gI([d9u?.[ȟB?2~?><Џ߿G)2x k@x({$㝳$-EE%z y!Kgcp4qp4T(W @ Q@r:<*a0((gT~nG<BP%!8lVE '!F (s[a;8 IsXA(1"5dž98@̣3̿xAb&} Ҍ:i9@E4i0F4GBk 1 {|Bjk1 H~\j갆PBJ_a|aca E>& : }}6,,0ɍ+ȏeA~,?w#}D k@X,|\-JgeG?ژO]W~XP+tEa .z@F52}Q$T0c 0( 3[xq?_̢=H்>h-AG^a?Eu(?GeW#~F z2Fx9]/A"?"OWJYwみq~ŮR^ϱ(jҿQ{oIޠ.硢&Y4xIJf-?ϋhvcۨVyq2_>4mZ}h׼HA2~ P|_K.Ժ|k yhܽ$:_$jT~ᡸ]JLQF|zdN%P ;;i}}Gw{ Ad+hq4Ի:$JVw ;0=p`$ o"Z|pC(XPA<"yC(dE 8x| 9C d3B=FXYN;ECB . C(Q ?ֿmrDx ׷ذa w V6xц6P_BJ6Ώ}?fF3#^|(C/vYԐoe|CLo1C&jчsF}Ti7^4oO_40*hh92rU)Eb%sdZeF(ƅ??''J-Krw0Gӹ_'/"uqE'*c#h}?EQ[~[qF291.c&20AşӊCEqa)|MBes1 ?+<a ?Vыys-,GPs?/z% уo5|2_(@"}Ef=E^Bk|~^^\ŏE/׻x%3IPO+_Dž?kx?52m{ #c<]F3Le0EȐ-@_LJ|o 0~z~̿*{w5f!,{d|Bc\8lj'90YGa#1^ 0(r i 1,qHoCD DH)(0Y6%ƙq訃ftȠ󏯙2j p[pT` 20$xa1u ׁD68qCxmG}(ϤFJk%A5a5Aea9d0~F n0`q  oGJO.߬<&kX ,(|: oO#Z/EHc3 >,*U><l`?1Llҝ)Pl1p@xC,X ,C,/I6l(Ά('XYGNxpC  89R D!2bCm¢ $l%h,4Qz2Plz*Qz2QהQŊQŊQB-b9 ^L,%dwއt\c{MB'1NxNp8 q9/fLe<p8ACr_4(,EjB g̤R+BF .VQZ4@\b?򇘇 mX$j`t:a/F,JԀ}@w>C=fBCbm| >胋/FQQ(_Aa нbD oXAY,8x~z~z x0 e/cx qŮdx=r.S蟟NJ7؋X8^Č^05y}$! @x Eh( o`.Q>jP2)+冾B2RB'ֳxEqED ,GTE_=/w>AEAd d" kT^b5;wѷї7=s&59i}~E?AQ:G·!/(C= Y:z"_?xğ8N( Q()٣ћτHX?߿8Οzvnyvsnyw;sY#ɬ|,49آB:;C:A<礌#C=> XJ/~ X{9oKQ!GPh@zV\k|T^7l s vGfWZD5{rh (5;3n`e}> +^ X_Ú1 l8 -X2M4Aӛ{+l ;ڸv-_3UrgQ~wd7Ur5Q艨[>s:N0wb}sO`pߚyA ̳^ ]G7L9 m*mi&0zOn px.p`ӀxŔ;|Մzom$Dgladn%R. i~HuiRPq05Q}bNLz`&rAf: 79y ;b9)hRm,\1/uKgҔW9VrjIOAjDM\籩-Sݞ ŗٮ9H!Sq3(9.f^}zDԝPrX2e}F )]*vNsV_$MSΜp גw1G]EPI0ż}ݛ>f}\I6oM3n.&~cR# :ɿǔ'<6ybW/Zv̄);Y'-iMV";y$iɖL~<8Yiz@)Řy#:>V﹡ X"eՁXFE':c0QT<^,sIzeN^,wrMQٗ,.v5fVrW]B3),șH UcS BGS:wԭ.\ܳ3N x")_Áx;V91Ɖ<@[0(pAـi!H/5;fRg:`Z椨en maT(Qq@qpN?H ~&N^,P\jP< cUEC+S`SY.Ey|Hw\f8<0OOaӓ%s@@:Z~`nFu:Xd$-AL"ꁏ+8 R\u3rӲ wCA@ӓc,Am]\(E4/HlOש~O_(}+PW5i*vs/1%Ce<8yp l紿, ^Y(u?- ga( '>n`Vg(_#X(A=bDoӫ'|fq0Fأ\,p(R|{Nfik& qp+Ȯ1cniLNF"fqvU1a"mdv%L9OV7bv*cVq67AA~yPAUSKk'xhx(c/6qR{PmФ|ER|ImA셸A']916j3П ,0rF˂ڷKhBΟ%J1?iMXYA=Lg~fM u=یcO07BZ\W62WkPkz\_;\E`,β% sOG@Ț(CG/YDvœC<ݢB΁ FbxBÛ@Dr*m 9O& ' LmFC\ M}tzZm)y 5=#eH_+ 80_) ςFg Jk1\Z>^rw~oȌ 9yF{4HATXO6S`MRRi^@4\q"k:ޡ5U!7&@i-ԁ4e㟩L|jAsY = fVTDmjܗRyp԰L1?EC9pGJBuZW/~[fp_sMxDI:m5Qsg<(HAm ogvc)eM.z 4T2X d1yf@ș P'4Iӑ(n0 󺧀-02TIԚ!'9z}P#$[PṸ<4/Xp7ރx" yByuǧaRR.Ln.?׆|04Tz7yGz $ǫޠUi>7ZL9]f65_6mJzPG<%k}u2ƎgJ>\L{]ihMehTfsgVg߳- xmr˜/}*ћ'2 u8glNoioPQszЙ)x|DТK;h'"'? p`V<}$_GS;q41u]TSA3 5 2fMu}򁧙I}p̥$;;N nWӠZ6ׅ.T R~9*/KIy ▓h,4@Mznj BS f55qg jf^LB5PsWBdVT2)&(1Mcvd݌q<7[8:UMjK_kV ayye[^tN=:Nsq:'Γ0j[L0G.Lq ͓eÿ6Pu6˄T WL:_ 8znҝIp]ciԴP 㠘&eMIa# THW:NKUI{Vuיf>M; q Xi@uƯwq48Pl'88nq=g`g.8 q*Qiсql&tVG]Ŭt6ES0S3'8_1-Q//W֨(W:NYH1s:1)uWQZ,6mzr8F,%Iez5<+7y8OQQqx DytgZ8kT)JLs4jvroX̤mT) ءx0C$.Qݐ8>%WOAIݲA+.ۋj׀Erɟ0P0Ȣ8EAM yȉ/,īZ:r(hx>2VDD"//O T<.x۞o^ZKd?W:g( Zi3#U')IMQ)/Iuukm CJipP/|Iڿ>O ''`R1+TEW3BhJG#U6ީ"ۈQlщ՜k 4 !`|u ^"[(*䪟Viz*zHzCR2D/Mc-b[xmJy!L??kakGr6-LTQ6BG,*AJJBejVrQEZuŤ68ي6DFL/Tn?ǀeMSPYye߿}s]WZo ˼@ƇK Ed̮Ǡv333u)~?r36}q{$쥇GD{ hM7FA׊Iz Ay3s/Qy)9CBM`3=_JIl`~c ?*yse:^sLuO?ӎ{cκ'{'Iu"^kw̓^rb PM0*՞?J tKVCnSXg.@Qo2kx3eI悙TqvG(]eJWZf,mc3fcJEN5/F"g\(U3{8r 'uFQP OP, t*n U(ݲkKK k^Le_ҁ)^؁(h9 ubÏ'yNOMٴO?]5zKɢ۪o\(IfaX/T+R Gp&~fS#RHz_^\VUoNw2:"umch~,kDp%uj{˹ ^=1R뵊y<\Q$xπcӫ+JANs5Dڗ1ӳQ#2c2H-s@MS]s(IR=iY)|{ytϴp৾ E~/x9[[fPeyj5 s*uDP߸ :SNZ'G(Z=wx`]ew EWSG)~2d?YA]vPcyO"9X)6Jk R' 1rqSg[w37pI< 3+%[Jɏ)\$4ZGՌ '_ IfUkoMes> nd|/f1V~ yvbgؐ`H)YR΍&097f.q>ࡨrN̹EO*F zz8 9"8OH97Z(i臥N yQ@^"# oX Y1]Ig"S7]r_vNYg1(3v',ZE!%CG߶&;51QCH e9@-@d&)ݚQͦP>wQReʃn Fzx;facO䘙NL7){/V%"f90-Pr=T8J`@ 20#QW_HϧYTvAgP7UÖ%M6RS 糫 RUO:}3` D`f޵z7G[\u~YPdݴߗ*8sޞ#5.̣ }ս%2yіEcU iT걟[MSc A0@6zʇ uKe)qfq៟el՟-3\F^%D.N\2,Mϟcաu@Čp0xAϪYtr8~~m{Jj[*eu':]VK}.|͕Au%fFC_H׺Т})hKEFc՘P}O6^^TQgrwRtG2E2:\ȄdXWlne[FIh3Y䳽l#MMr+yZ`<ׇDeTL_hztX0$xAu+rdF;VӇ-DL%_`g%U@Wpgb 0ò]N g:6e!6OxT w^2@؀mf#Uц..5 =VBz寳t:"? mv݆}amdamw݆}amwvZnþ۰6 nþ۰6۰m nw+8vv݆Enþ۰6 ;v݆}amv۰_>w݆6Nmw݆]ba3mw݆}amwv nþ۰v݆=8}agn۰6 n'm؃2۰gam۰6 {bam؝mw݆}amw݆ݑnþ݆}aWnþ۰6 |mw݆}aw nþ۰Znþ۰6wv/w݆}amw݆}am۰6 4v> endobj 4 0 obj << /A2 << /CA 1 /Type /ExtGState /ca 1 >> /A1 << /CA 0 /Type /ExtGState /ca 1 >> >> endobj 5 0 obj << >> endobj 6 0 obj << >> endobj 7 0 obj << >> endobj 2 0 obj << /Count 1 /Kids [ 10 0 R ] /Type /Pages >> endobj 12 0 obj << /CreationDate (D:20131018073429-04'00') /Producer (matplotlib pdf backend) /Creator (matplotlib 1.4.x, http://matplotlib.org) >> endobj xref 0 13 0000000000 65535 f 0000000016 00000 n 0000103754 00000 n 0000103571 00000 n 0000103592 00000 n 0000103691 00000 n 0000103712 00000 n 0000103733 00000 n 0000000065 00000 n 0000000373 00000 n 0000000208 00000 n 0000103548 00000 n 0000103814 00000 n trailer << /Info 12 0 R /Root 1 0 R /Size 13 >> startxref 103962 %%EOF PK2GfZ &emva1288/report/files/EMVA1288Logo.pdf%PDF-1.5 % 3 0 obj << /Length 4 0 R /Filter /FlateDecode >> stream xKr,Id9U`h~1TZYDq +ߋ̴I5}#"|Tu{~ s<J?H/e_/9K)#֗u 50АZY 5Rkϝ~Zx-a0՗2׉:ZQk(-9_\/5:sv\|Y'hjE 6shymuX_[)c~t2'. ; A_zcLky=?i6:zWC ugw;7LksV%=>k͞HJl둴 Zdº_[q_Fqț>Z{myZD/$6x8:xY59޽Rͺpu6ؐ{DC׻X_>q]7ϯA| nn4}NF+X{JGmuYR˨Ek\8z/G޺@ē/s':A,ozf<ð.eqװ\ׁǹ[/15k[ W:b()ѹG\$v͔^kg*Mzֳ_qh(qy?mVL{x%KYsfb-yV^lҺ5]:>c dĺ4x'jhxc 5zWC];Nk]9p=ڂ2l:ZiEkU˪5Ҡ"UG0ߢ:]k&Hke] 5WY 3vyУaF6OÛuU=#76i Xt ˹Z GZo%PH&kkYY O3]qˤ =8VѬ@)J]}=?1כìcj-tK>ny=ֲfs<:Ѽ{{MSbޜ`񭆸:ll^&"a}WtRRT\&$M}Vzede՘dafc=G|WfcpӯlAzhk\#̵ֵ>Pdv9kXъVz?؏j׀1l هX`L4[*g5tײ3֨X3fT=zᚻܯ^. ՆzI}ǚ?{r[4x G~sy(P_4|vz|^:sk&(xu8ϵ==>8δ $|o^\w >7tiѓLkkqJZ؋za,{ e<mB0VE|c.bX-e5 +Nsz. >Kc\G˙*k">@hl{*kaNvV,#0#0{˖|uޏT5;tXR| m8̊<ɴ.wv\N:` p8]2v\F2qSβRoʆw^}l&v^5/-8xe5?8nk^'590yhA]Ĥ Y*]6z[x1f⸅&az O ֍4u5DS:4t,K2i"Y\.hh]Z_ͺe˒h*NF?~;3C{kы;1v~F7\ՓyHtQZE*穑SAz1=G_<04y.o8Q 0̈r`ѬBKؖ%ƺny 3VfIpmv v旡 ]j <Y k22WͯOz"|1r==5 tVh0)U넓b4y@8t.}r~hM x8,n6Iֈis/wZ\ZEQ5%p1%e{)δdV ¡)J ^bWIJ5[Ɵ^>ޡcIesϰ =FKq dŹZlmhI˔%Q;+ԳIM XtYbT8Zl/p~LvX"l~խ0(_GC0㘓\}jqO0EE$ D Eό.׿+~4??P_$XzzoaY 9.F=BZ81|Ղ<>a~l}5Q wY$#'j29F9ps1;3W[ƽFS*If.2B-nDRf*dVC_˻xQYIM@ijI i 疼溰Ή؋aVd_ѵ/XLRj]b( Wm2 cY}cIM?VK9>5YpX?xqdV8xVmhQ\SbP?Ӵ[B(EŬ`bVHԠ'>/^Jy<1:60iaK' 5lZCgxIGltn{ "P8q\DgErl_t`E'cuKc^G7kOH:pk:[|=9@r-;v?Y[b⯗W:b@jjVdz\0_[}Y~8z]^pMld׊޻E60r`e6 vr`D\8ʵM"Fg> chQ&J1-1H=8;6_% +-9DsuE^vd\#bhTL0@9˖$Ůa$C+/~y]dfik:Pi,&":EƈQU3 |0e`&B-0ÊѮ5V`IΩƏ=-^$8Laxߵ ;j3@SQY6#xtg 4! PwyM?!.SÙs Xs60A>hAdO+Î :6 %YK@v~pDm`/2E[f&Íp_d8)1>fT!03~C,ƴ䷡q?̘TKL1`\Dq s#:ʟ&N;IӋ֠^d*0<#g9O&M ) 11j27 YZn&-b UqMaO4L@*@$: &:!*UT|#"Q׋+ж#ʈbJ"a/=e8Thr&Ԇck95!N*$i& R=ZE^F<" xP[=cS2ZF}2 fr5F !hG6zh]3%>Bۇ @7\ֽF1z$@8/A'hQ/݈qq ◰OWN-PIqaS*;ѫD E1г%CxgxD-r6Afx@ hl"y 蓖D 'Z1*2DXyEC `K09|\ʓad\N}(ht)M%ͰL̢@{`vʼn`=DsHdm)<-ˤ)c!Y`=̩Cb;lu~WD꘻KX3)ͨ(ywd,a5o 'Ϡ^i,g7(">Eт@zY}la>$oQ?HfJIB $=4Ld & `œĐմ:n ' }]8A٦"R eJ ڪU«AhVz 0&[)CG (DwuNKՒ;Oى0\]>2+ TϴXwCzE"UxtxnϢjj!i@u6w==NX7|; s8q+`pۂkl6SP 7FH"xfs(_ _DM3 ltkmI Tχ CXP@4ydEm!.7 Ttd_Cbp<ڮixSP,M~S*S[yYXuD-#uiAfm T1>/.0VȠ|pzto{{Ja ~yxh7X| &yiApf#a\i8Mǰ 7qȕ(JkG'hvRMӘɠ&PH~hw׀ ܆M[ R+d̵z BͶWn 1,/jf\FLۅI4HcջJ oSw0bmA;k`]c9Qv:mn GgB[p5σzܢ`<;jtӦ[j@JAYvY<.b!8YT֋6 Qn]j`qxX@XY8 b&5+ {4z0ɔ Y2 95` $9a 3O!^4آОx(cWpֵId$>A t0$ {MTۡ`IL$S & w@?RϮYh4-'w}2랓0,Nf'GFr)jGQ1=yDvOϩaB=Ht+P&9.1>؋hhv7#4*1D{In,`y tsnAX PkHڕP. +Z#轠Hlj\a ?w @!BN SXtPD=`mU5l >86wOKE!ЀrB1fN"T\mCI0dᑓ C: :ߋ@ҳj"p@1){6#]xwף5B4_ iD<^*D3a`{*Jy9~vH@Eny[5rKƐqcMk \(SʏT#Uy|*bgXV<~U~*`xҕ,E<6.e8VYP:},i| lTf&FyÍ!aC`O ;%Hqi׊b o{K"*Bb#Ҏ$aX/5}k<>`w3\9-l:x|6~k<>5llL¼ȵ[N1F.70F#1p:v9Cog="$/_N ? GBI{vqh"z˗1=MI9Ӷnx(I'8)M^}!AHc c> >E&mNݪX$FcD!Y4X[iI{p,S{I7t`Š;ݮcXgBK5'coәx1#+5<ɶ7 (*qR&hN4CV2lmqIhӕ 4T,f>xCn|sL9 )4(dj%6p9g6NЦv( tqM2:eꩼ-O#P X&ŵ퐴U/"$}YcL)&DXLz 6 z;7 0P| $u: &}^#bZ>W)i睽biϛ|ӻ?VNfx'!-0A\vwf}) OKKBlF9f#Eۭr5XRR?R6KQ$.GbS;OӓHx #3]ki&E'{`D"["5މq]Jv I7aʌk&YBHFQ0}FrI~^ym!Guɚ36=6^St\o}s*5= iS&B1I Ҳ 0s)r&mNMY|!M:"#"XQ<=ʶm6#@kJd`IqlMSi({>I%#A _o7X#)2zQM.2͉AߵAnk-! +1r7vm7vm7vm7vm7vm7vm7vmے6M~G瀇ݖ$|{%<+LZN+jQd w|E{3]7x(X3[ LK&&􍸍_y-D4ðUf)B!=VKi ȏWBȒE1ЄŮma`dU6X $DF6oG*ʕG ,XЎkau ͆l-yζ9xB"s().θ zRIGladNv$rNOTHXxR>t[I #)%&쥸[<>'AmI r=O>L'wBHj aEEV\3)Y)N°88T(! R>4EJ`wcL$Ș15_9p-؇;5I5 IpGo"u[] T2׼v,xS؀׊=*LCWlZUyEݝFMnC0oc7I a^c(Xpo%p^QPiU=Jvg4P웙'8ΦM*-? 5rZb+c{#5y=zUA&Yĵ+bR4"ĹvPIӄ( V0@ߔ Inio<uqUfݒأb/ 3G @goRۙ.E ,2UBhBdeLuJ?7#&K'1.6o"YQ`iCuU(U<ţgTʼzz?`(hD2Bk' N4*qkQ„10Itf| u,/Hu̩gVxf_Aٶ2Cvp6X0J9k yW3+lt tiiƸ[c聉.L3Ƿb-oJO#UiW Ƅ]ZLfz4vjMKQ(kV3,͚>IheKRDUB 8qSH$|D~2nUkF'|NTJQ>Vh[)8т|3㓔|[E30>啬~ X! H.@@iW,Gq̐!CS"evfk[rX]tD NʗzY"K aJy!bU2ma)M_Y7!/8!F;6sp>RכDZ&D4mR!fXh9k~GXO+D'M!{i a `tH`n{1EX-i 3#; "=MxYLȰQ@(?U|װtpQ֣n26LbWVOd.|DGc$sqװJ jHI (dͤcGZ2uٻ@vޟw!o},@4 BSjt0Im&f<ǡhP7$T-M}rO=;YWp'uPyB6>sp+褷~aЀaNxS@Åopgb515T7#tC e Jmc:HI \Ygs6o3cCW]aSjeip+'Z-\dY@DY Tw_! R s+:s{ ;T٪͌u7-" gr1SeVʹѭ^4~uĂʐkb[LFRcJ:OKQ4䌅2? AM8;hXF7f`YZS6!A'vMGX:俁YzV5-J+tP8{D"_D sL!?G* BBJ+deۮ!}|AWT7Hf=N5`hnڞ E7b*ԕaۗ-zKUJ![ ^k2BW&ʱ$2hY!uY_K&=> M M M M M M M M M M7Bʎ8 r/#X.bjˇ1\bTu'/-CK6ŧ z6Z(k ak1ruڜ9\(gbTs3SIhE#PHGfN# `r#2AH[D/~l;!8 -@n"D(QџQ7Ia/qgP$yRh0|8(\ܮ{ÜhYxTn1 FR3-qD;H -KSD.!J{s*{ v*ad3PIxYe-H684j1̄4rͅkKŪrSR^OЙEC%Va8 M-말=FZNٲUfFoZb6JYσPjmj=7jge/*Iv *OO;CU )E Ap6<&oeaFRC7{LшnWg~=UYj^1F fxfh BX^.”I& XIVE;0&C 4Fjn(s6tk\:Er܈k~YrF 66La ck n/z_fҗC$8oߔe/Z3|G(ƁYG4Im #ƌ/H.&E7'P߼~]o Y"U`t+Qb6AE(+`TP{i`Ti4r>U}|>p !w6V^x[[׶ n $op.M% iD`z$վ""]9w PpW1ķZbjf_qp92ܸY (ɘo e$k<md! $Tt$.wm忾Bϡnxh@lTwP! <+>&޲p V&ɶDpc01,{`_A$"7!oŀV2qrCDxd"}ۅ-EvG4c]5R{Vl0tq&opy 4:IkDڤ9]2cI7 ,z8ٰ&qpYBlQX-G_ٕPSP-Ѐq' eAF7/fkO;H4x鰕;XJ7&7cMUH3'>xH=࿧ |ݝڞW?`xݷ`{9,0L$G0%g>XA3ew*f57%vp7<8! h-sݴA40_+0.5[C $Vjm!4 O8 Kx(z$e(@(EրT6]иŲA rڹ^Ψ  i._3'wqXS cY A]6pŢ_i;9yx=r,$+2; THgܙ .ov2jPƄ.'rnN~xY I/-t66$7dN^LITE>yt4Gf &l2rsڷR { VDcc6r^pt}?-5d`LQ"eV Fpj SmDSeC%]Pc,q:2xogt,n]vE~4"k@0FT*Jh.I2:1oU#oWWÂIDP2أ2P{Z̩_Rik9рORxLMV-:ƖO}qZ86#5%`QqS؇iU;7,) vx vUfgp!|Y沎g0g\u|~eqYG0\ u./}]:~/}]2>/}]:>KeOoKg7إSxtit|.Oҏ٥LKK t&rE*|a$osaܲyFnFnFnFnFQtNN؉zdžfuÎKRledQ0{-9\ey5T3` H)܃I̯w*dÄ8rhRT`bν$,e;O$ݫd>S HMTMӌ@ 03,0d2/tچ/ '׸&b<ʳvvO@ϝMA!t?EֱB1<-¬ܹuADyEM0RBpM U,bCOsp !ʈ<{3w`EbtgoL"uaTر[`d #.u+_-teR#.~ĿX@?PPtoF>U!elvDцi=44&+̤,SÁ>LBIGꤾmژ>ڨlƺ44x*ż+iiTRcKU^o~(^Z7&0B51 M\VvxIk]I ]-) #BD AJb !DЌ) CD_$:D&Uj [6Ԟ/(FD="Z UR_LÙ- =-E3q̀,n]ViZ)" :qPX9PM`(jZKMx,k8,"YǏg$U3d?fH6_,0YUèbF aaeo{?US'0onn3ߐg-ɒm+OlGDJucM hE$vXUu y_veɫeGK#X L6̛:h+5Cr,vԵxfVPɵVԙ>i1Og:6@;qp AT/&) Ƒmok+$G%JD8 cCqF a#Xjb;F?pQg<Կe{&X !~2 !^2<A\p[@KY|Īb/Ik2/,mSQaqZFe4hTvbGeFQ+l2~_aD2,}b&lD_>e%rH꿮~SIjLOt7=+Dvh[* gÉI%JFfkvhZwl!r-+3uelŒ9̱jz0vWYMp V3l` 70̉q41l+e`9>R"`1ѢKN6LaONV0*$<;znud2pjZlyvc*TYNRLJr ?Gi$ԒrLf7ِń!aB0<zXz}%s, 0fmpsT12@gGnnno.KMJ6*'cj!=>|m3Hhq\>᳿|V <<<<<<<<<<.CxͶN b"nkM$ 2=Ra-3k3ؙ$aH1Ҋ$̼5(."o qvL!#Gshp=61,[q`0ޥņ2u3:/)+0Ta'~ğǗP G'Ty(pNȑZ'lzF_'~O'>'>q8~' 8z9D{'r\H.R NrbWyA/|e2>=yOߛi-nш4,,B;йb xr?lłN^J-NuQkP3k@-x%ѐJT/&ԒUc&! !J  AbX;=@XX'<xX/[("OS]lC '/.0_'ŋa1΢`d@jWBu 88"qS9XdDaK]210#)xo%^`4QGl_Y|@oF"# R5d=>̟ތ! !_;B]y[ _?~K'L >ؙvxV e{*Ex-Rp0_8ql+H 8@Ğ5J!wWyWyWyWy 7p#$ w? OH _ O6Fsch <)i FD^c+>z>E#ć}N$$ ]A =? 3CSWBjd< |NmͲ ӹŦ#Ҭ9_;EjSgӠ\Ҋ]k99 OVFVDS,Ώak!C RhTt sl+ː7Γ$ka"OU7mW4n"N+nke&1'lQ/wAfAįTͨ+Y`H=6| ʊXwBogVy]B/^:Ց_6T6=x X_ 9)o6 x0IdcN孱 AfߘF3!S`|p :i"Cu  i{>͗$ %@x,&o׌SVy˓$Vmc75YɓqꯈIx m^`p~%eJ5z`"$Ya)~o7{F@qQ=ِ٣7?"WYxҥw-,EKV2'fZiU?̑- U)⸤]w,KϮPInI F$hn1-wm nFva:9$*fG$ H"aAy~X@ >*XglEwmxF` THTvURX XZL?}JUD)r~h|$vSg!©tsdz|(v+ 8Va!!?)T:DzOV ?Y@=/5K v!#=A8Y+ZQDqo|J+'&-N@7 -ؓl_ \Œi[& ^/-H6p2n TׇDFXcܟ% L>Fb!& Y0֓tI-?H1ڭǨ RfO}ȴ9Hέobm᪄z=8' bhxoiUp)snJaV( tnLl)%Nw虩OҖTo?!W=䞴?pDʽHes3Y;p-BI<̾4US|=CL|hHsMxŇ盏Lف; N~>>(ְ \.3L~p7dŢIi(a$,E>Z7sn'K30K ˈjg7໠~♵]3{ۘؒi񆊃e 1Kkިs %=Ň VQimOހ `a ;aaa"Cjΰ :%k*܉ svI5 p9(~\ɚb&]3KuSa"#fʃzL!USM[ Lc Z+;T8L0nf&ĥ-ֹFli&!XrRtR黲yI5Y"&7 ” j2 (<5^m?Tl{a޼|<ەQbfTdJᆥ,My >[P 0D 0"])Usjiy0h`t %uJ6Sj1AaVȓ?T%TkB0{mN| [Hv AX9?2QVjluYjl"/B+|ϛ Ji0&Hŝ?EċS\2s iYc܌`٠B2ܲaCVM.q6y&/ ):l`.k. _bhlU9$w/8,V)uu&CL$ xxȸ2j(/6,k/ jTfxͪsnjO-КTNַz$4dwl-4[0DL L9sOJ7 t>M|HzZl/"DaZskWTyA J{@׈l[GY%PǬW\!`M >g%:'?#!w{Yk/r(e+du e@|l G/:xl;7wwȽ9.x(.'TbޱET$ҹ邘nGCZAoXh1x16=]A@:|u m)Jw)u+#6 J!!xz|@(ܦm&-4(aRH4"! Nxz(>̍_Ȭh'=d>[ K3LfHWiuwhUy`=jp|*S-A[޴ adON^eG-l XQ`L "-V*ڑ9w.wPoU!6$WWpKXЍ d:ے۫ dx6yCJ agS G8$ILFm|ohX]?cۖg?2_:!U{:\[3%0 ]wӗA; +%ž\<\ȇaLWjDaG Γ!pGxd|ǷR\.\; Ҥ#]Az)_di[2q`8|:1j)ԛ_|DQ~m+B,mRB,W'O=(8C$t>K9XONk 4I++,Ky!xi 5($`s , @T!ZXʼn@xD (@%A%Q5@>Bv92}P|;JsCO-1rCЄ ڴԋ ˉO;P6gգvۻx86fLgoIt}a\E 䃮QݠH#R2< 7%p)H7\ $r8IJrL+s)X ddT$.qH{BJ-ZRghadJmS؟w]zJsH.4Q` ""x_T;ص߂/!O d̀jN87'( ojR )6G% m<>p%(r- %`=$t:uKKm*Mt(3K%-MuR"!IΒsq@(fL͟l _]h{tъDc}t&E`K,_c&i|"+Ds׿:K]]psK3t)E 7fDǗLD;+v`1f;RJ"i =# ؘ)FKjaZ 5 _J؁n5 $?8ЍlO<}>yk}_鿒Ӵ{+RAkXΦ7cjԛ)$= zs[au/ g |7d6jq ' L%'kbV=d/wwѷ}ѷ}msʭGzG_giX9RTǙ[%=rBH5.2ck\nRΎ?ΛLX{K{K{K~K{K{K{K[p̈́%[½% Wdb`eZ  5hsP#]Gݡ0}<d;`S%W7rAbUCbVZ 6yq,A"oF(<2X l"gts 3 a*zZBiiL^!O L!Zv^_!䕽2jЦ=I{٤]HOo/[4E]Wc&#C|-w);2 4îSwr=)f#EX]浮ZЙ-{.F)$Rqg/AG=X׈ @/Bz蹑 fF3s@ 36 4K溮-4AyW0)tߞHKxw-,. AzP1PCˆuS2_:Uw&BY!V|eS+$h^d.r33p3>$U>\m[Q8߬ڥ @vQ \γhv|M`Ǒ8z9[2&LM-`rx)"9ȴ7icU(duݒEށadCTflu^, PMKlfI/s8G*/(VmN]Y\,IΈD^`1Q_5W e)/, =KKy_$'o8S5,}]㓠l}С>d15{2%0BLg:j &Q B|=aiMU%'UpL:࿲$Rd7Oܺ_e\;Wyxd\g|`\YE:NQ)A kCUr1Kl|^ jͥUgY-lh'#ykUcy-~|a E2%m[24_ljGVP<^4⌘R ϸ&.eEgLH>;H<Ԕ;gժ".DjR4~󩁙Ol ^X=Q8\NāG /&5'y gzdu\J)n½֣=<$xȖ ?˱ KZKx*EO~iWfy vMp/vEqF}lfAn!KBeeEEV-Hrqi?Sw%#f+c7}Dbԍh)sd8!jTư LyK7tK7tK7tK7tK7tK7tK7~K7tK7tK7tK7.ńTZ=)nlƖnlƖnlN) t׭Ir=IԲĂAI ƭl~P"f:3}N3_Ǚi՗Ys3~ϗMOtn:~7}MxG n:7}MW)i"~լ0 x8r]Z*bɼB% ZPHE& &:>(*9u|:G=QJ§} JgIz ; ^$ƞ$`N{Mr"v|zn]WGYgzjݪ5ׅW8]Fu,VU7߯UǷOת㷋UXd}^7Xyi'D 5&[iXM"Uȇ_IQ+͇P'7PSCe8"' ȅ<+rBd1;SBB_nXAupz% )q!QªuVYm]O0&0eoc#ZHJc Bqjd~-9X9]TKX%k4N#ub[O9-I-iӂmy{B? R^g3=E'mwJ~ > >> /Font << /f-0-0 5 0 R /f-1-0 6 0 R >> >> endobj 7 0 obj << /Type /Page /Parent 1 0 R /MediaBox [ 0 0 386.620453 184.729156 ] /Contents 3 0 R /Group << /Type /Group /S /Transparency /CS /DeviceRGB >> /Resources 2 0 R >> endobj 8 0 obj << /Length 9 0 R /Filter /FlateDecode /Length1 11084 >> stream xz \Us:W~30ad@ x ^2F!A% jvQ-ohɶf7m.#Tn- z{Y-27hߙ<99|sAheC3`s.ZF:Z>%+H}]\hɊ;oŨ[3a+bӄ֕Ƈ0)M+׬Q'810'Xa"lX̆30-7nlQÀ>^ӽh>G ^>AGQ`IWK_O,0,*l/g\`?EQa)z]D!5OLLN&2֮3oaujӥ+uӾid-*!Z\a:Ȧ(>ckh/> "M@([qqr325|viyI4PN,41ϠUL^fG<<-Ve6gs4h49E8+jJEQ7L&YQGA+C`";9Y ~QMbsL XBE%"\=.LuYx Zǎџ/p=Wb'!Aƍmpr(FgCU0*SL&U8S*Sg36閴4701QyrOEvZ[s_#K>-ָB Eu-d3fL~7՛c ^{gclwH޴ ]];nh̥K6~mz&XX^MW?w¥b;Wx/=ټg.Owd3k>|{MB^r{t.E!OMz€M0#QBWQ/4#Y8 ^ =tɘ#AهDuj#-BKnY}>ONmaqZ ,nK͖C Wdw_ב#ѾKEb](v 9Aj+%LT?x%rх8yم@!)Gdɗ=V5+BNB'zKT!沰?0zxT\i+B~^;!u<`hŹ /)VTb[vI_Br+o3)dVKq OY7ޛ+2})!ţ=@egaʨW4|ҦeM?;4t߂9N^3D&/}3vյ" 6l;Ǫ%}wIO_han}e郧#+Ϙ x.Y S1M\~L:G?+fc ]5;zw?})r.:%ۊNaM1|S0ULvܢkZbj|mH,c6HQ0"b<($[ֲƚrϢ "ND!0B|r^u=Lk}Wɢ: ]gUgɵ u!PƓYܽ{߮]䡻?]WO}%>+>˸+# M$8/ i ͬV78\~wuo98~i[Ǘ5&>2ԉ%ϩ6SnI/˒ 9E kp_R;c4cMB_?,fk%)I,947عM򤻭;}C z9b AXH}ؙVHԹ61:$&:52!$:ɋjO=au:,^5%7>НXՕe_iKC>rLxo$c܉ܗ~'+)}.;.yCmջO5A<诳 xD~8K'Yl ZerYR6˾[[,-Abu$spWd'n=Q1;4U¨?!ꉋK0{-&uGvu¤4!gg8>&\q_/>W4e5zb ?E(Nl-jv;wm%:OUBAugkHt.͵zz ^F !ΉvqhaikقBxhاb)D_3^$_S-O퉢]vy}}KhY +~xEd,I&rF~N7Y-srzibZ?efVn'Pɡ?KQLf/Q18aL\fr7~r"ϐn,|N"ߐLyH]t7]M_31q 0pTۘv G s=`qq/qQLpV>X\q zAm@x2-,#-d=Z>-30Ji mnz~bbtf"S42k;=L9||\a~zɎ`.`ײr%^ϯaKa0^f unpNGt G\d63xOOFN7n­qd: ^+t?B1H Hk| ,beg-d#P!G&1o_Y=#iQvOKe$'YH3 Fڏkzj*MH'?`?lƺђ dm%fFxZQDmt^${TYfDՓTޔ[jclpB3Ƅ(Bqe?cJ?㐡 SeHɒS B'폔.%g1F:Џ1|D?M"Z3 H' ]FE 0|H?D i~H6Z%H cW(OǡYPsL|YÂ3,% V[NHg~%J9!Q6~djzf$( R;ucXiGFN&Ltz˜6=/Y'}Aב'#?E_LvBH%YXRNBOyg! @Z'N+6줝Ɵ"9 1YE7~y{bR<yۅyڌyWCIFy PR#9(a>%Y0c9Kh;Jwg;V#h}~tg,t`2 I2M$~; "I$!I?L]s5VRu_!i|Nхu!]O`|IR~TGDU>+$Ut}+2\@bq^F Da\2ģn-8 i&H6HV 6APSe E]$!InJ&3dZ6,%LLGe_&Еt7$Bݝ%9N X+Hfuă|,j|p*}yNc.Vf8{Yu#LQ]9̒N;Q9Yas{Ysu>JS݌:UvԹ1ѹܡ4F nmŔ?9;9 +w,t[ͣǭVZ38ؑSwa5 |*Kp IB#ZEI4FQ/"/"A +>0ᵿ,_ش{*TѪe*Իʡ+a .#!kT) UeVWt@sCt{m L5kKzu?Xܟ-`+X[ +MT?~7I=UkB&B9$BPWdRecxU,5_T\Md%!bdU=޾^PO4=z)))N NkSSx4[Ni:lAU'4^Sq8P%١ph*Q%kXeuZO Q1]c:XqE껨]шTڹ .E~&74"w1תP[1X9?PXWpC_;WoVSXo j_j_j_@xuMekDW&5;eߘ؃C`W_&$(4T-=p}8Wb94\$a]5k[ׂbiy׊fY<Z_U;u @U(}vUBE ~G2Gaf0ռb5OVvk/H_"dZL(jEW0gу%xh [^kC6DdP{֬fGjakatU @ endstream endobj 9 0 obj 8163 endobj 10 0 obj << /Length 11 0 R /Filter /FlateDecode >> stream x]j0EYGc(Ƌ>+e!+ }B2ͽٵ}nIuX-(DQ*nD_5J/d9:3#=[`uxy#hhv/ҿ!#թnO?J[RKAE@mh:^-ɋ$!*4ӁT1V0ʜbLt>2=U„LiS+YiiӰ{)r)&_u8{xO]BHSSko_]4a endstream endobj 11 0 obj 301 endobj 12 0 obj << /Type /FontDescriptor /FontName /TOVQSG+ArialMT /FontFamily (Arial) /Flags 4 /FontBBox [ -664 -324 2028 1037 ] /ItalicAngle 0 /Ascent 905 /Descent -211 /CapHeight 1037 /StemV 80 /StemH 80 /FontFile2 8 0 R >> endobj 13 0 obj << /Type /Font /Subtype /CIDFontType2 /BaseFont /TOVQSG+ArialMT /CIDSystemInfo << /Registry (Adobe) /Ordering (Identity) /Supplement 0 >> /FontDescriptor 12 0 R /W [0 [ 750 666 833 666 666 277 666 277 556 556 556 333 722 556 833 556 222 222 ]] >> endobj 5 0 obj << /Type /Font /Subtype /Type0 /BaseFont /TOVQSG+ArialMT /Encoding /Identity-H /DescendantFonts [ 13 0 R] /ToUnicode 10 0 R >> endobj 14 0 obj << /Length 15 0 R /Filter /FlateDecode /Length1 8880 >> stream xY xTյ^{3<&c8'f2 $ m 8D&h$QTbV+j@|p28!QZ-Z1Zk> -u2'^wk׿k?bDMH"VJ?uJkִ|m"x"Kխܰ]k2QͺE@y` [uD Olj @No$?%JDlHd"oC>ƛZ͟$|<֟l|P'.5ᅷٰZa-044̥MQLgv{l_-c|rl;0+!+LgTfoImhuolBhQ&F1Q ZdEVe]}lyXn(-|/NN+*dɒ{]xqU˻DMF\c9/e%a [22{2trͥUtxR[^$c"'2w99TGcNGGSﰑJ\BlTSP?,&>iN%fR PӓdLIasr?h[ƽ_cjUylI8YuiXU8%L4T-#L&|LMi&kx4 YCk6g܎g _6鮤Αa1"KR&"NAbҚ!wgLs-FA0WaU%*٬"{n6mdtIZӋswrd UiVyNBdA$i U anwɔTvf爢ţc$] Kohݣ\X1|77lMҹsQf)uS{myEn[׾-j¹Kg/ixۚknJ1nI&f)X*%H)E"60lN%޴2 A2"qQ'8>A\RT" wYz-GX"fU ]N1:=Y.lcd_72R˩>>&di ͠,/Ãn|ɴ핓W#g(LQ/̑ G=c?sz>yX|)euci/ =LO0 R,]Fh%NEoDVo4QM\>c%l6F̌|HWR_Rar"k4^f{TlzS)!"s`gA/#AX~9܈1һ4mt>O}̒Y[`1|9?"=.~#3YI.Ĩ6CG zv-=~ƆOe||62'%t!=kzN>OC|.og UDߑ?4ebi ~pᣑ#G1wоj1+C.Z~G`Ub/Q9zIƛgjXqC;d*)IҀg&rw420wI']ݍX짷_!βXjYͮ`KX3f?fv"z>p |p/_y3{ ? S%䑊ˤҕJMZ/݉> 풎HJHJ'1j8]^'D!~hv!S錙͙Ik;-S-͖Xame,tӰ]|bױ;ɦX)i#۹̢e]^NWѿ|4>~RoG0سt"'I؍܋Į׀uցz8Ig7}fڇU"02ډuBb>,9-J^RU]Oi9݆]y"rGȍ+`O|vgwo⽟>`[g~gx9\i˴˴s}'=Gh5bl(_/w]AbԊ5}r'FD XO,Whq5!K݃Pvce 4v~Y| /TEhkTgTًjUKo#㙍*Y6{Vh9hOSH_!7&n|2 ^ģgi,%)_ӴϘ>dI =yrsܮ,UHOt`eM~~6.^Z F4^PU^CMXSKSk2ZNjSߩvax}N4نeȱPk-ժj^&PzDW9 7z 1Tgk/K LdSzFOsV tUӸ\竩묪ɹL'g1T0taF]!zC[{6Z,w.o\ӥFj=uqwY4X ƾBλU,hX tb&Og`R=_FU(g&S7gӵ>k΅>g^7Vg&SCivqMA~-a$qBLB:C2ԅT7|dy&6DD\FMePg@1"+@m(x9/ 3y%%fK'ɺǣ)b˜ǙF MOuT0FIV-a!oɫ,#H$_Qffù3yq[m)I5-u/G8-5-(7R_vnTғ|RxdbR.9,2]vlL庄Ii0V.IYYX/#`Fԧ{.ϸ(w1ݼnjuv:@gc8aS9;&pn@Ñ}[2{D ^0r3GOl>ni|4Er%#O9k.nh.2sxUyٌ"y5mM--ĵw%;\NMdFۓyz|A)~鱐-X K? 'k^0Ճ8lqQz:@u>!mB*hHnk %"#`n NB6{q7Y,]o%>_&-XO-o+{P hR T-&ڃq#vڃyhJ*R,>d,bE/iT E->(m,r)h*h=Y-H ͅm3RMZDCW#eHI\V4~,ZAdBq(i&jui+"հrrHb0NHN M'^tFfl$iJk &>C['IvD¶c(ME $C+8( BKyFWC h F t#c`R4ΈÈUg/)[|@D+.1(gzG@V0Oc4A|??L|  A䗃OzS p ?M凃IR3FĔb_E^C\Ĥ~J3s@P0N-{1H#I"wQttg=^ߏ|mAO1J8nS",tJ]TsiZT*,(TZ=ƷbkƱ`r⛃r=>~qڀېH[ W=eH|qR_t+6 ZV Zh5@ Dh5"D"Dz ꁨ@QD=z f 4 4 4 4B (@QD!B j T T T T6 l@ 6 lfO;H b!1C@ ! k{ 2Ȁd@  vǴYm l?o`X@@@@BBB7@t Dn D1qAO{hgǕo` A'  ~V1-kmpg6R,{S- :@^e񖹖mݖCnːǛ皷wMCfz3xbk@ 0 >v`-;ONcGء<;ݟǼQ|NRb3AP;g&v{O*AT% * @.bAߧe6yؓ`x, KQNN.p9``\9oK9^=;G #lgP̙ve0.'EЅ|-j0O0-`ȅ G]#AeXvP&#p\ 1̴1IAA`1=>P2_OrI({7Z;u_Rz\sU~LT(~o6L<%)BาZҨW\(*KM3nR/C/\Ae+lXܬhJ2M= Ke#o+,aYNY,WZ*-3,NKeaI&Zm8k5j[ɚ iqpN60" G#`9rIR[P&[_-pY4&g%na^ ["ROnV?Ju~OB_EDѦ qe龌>b,m}~?ST+g&L$0z{>}ï !鷋y5}u 1* nC endstream endobj 15 0 obj 6008 endobj 16 0 obj << /Length 17 0 R /Filter /FlateDecode >> stream x]Pj0+`%дԒX}d) fgg_0N.~%0dV'ʿe`> endobj 19 0 obj << /Type /Font /Subtype /CIDFontType2 /BaseFont /SMQDPG+Arial-BoldMT /CIDSystemInfo << /Registry (Adobe) /Ordering (Identity) /Supplement 0 >> /FontDescriptor 18 0 R /W [0 [ 750 556 556 556 ]] >> endobj 6 0 obj << /Type /Font /Subtype /Type0 /BaseFont /SMQDPG+Arial-BoldMT /Encoding /Identity-H /DescendantFonts [ 19 0 R] /ToUnicode 16 0 R >> endobj 1 0 obj << /Type /Pages /Kids [ 7 0 R ] /Count 1 >> endobj 20 0 obj << /Creator (cairo 1.10.2 (http://cairographics.org)) /Producer (cairo 1.10.2 (http://cairographics.org)) >> endobj 21 0 obj << /Type /Catalog /Pages 1 0 R >> endobj xref 0 22 0000000000 65535 f 0000046107 00000 n 0000029225 00000 n 0000000015 00000 n 0000029201 00000 n 0000038807 00000 n 0000045945 00000 n 0000029353 00000 n 0000029567 00000 n 0000037825 00000 n 0000037848 00000 n 0000038228 00000 n 0000038251 00000 n 0000038511 00000 n 0000038964 00000 n 0000045068 00000 n 0000045092 00000 n 0000045406 00000 n 0000045429 00000 n 0000045700 00000 n 0000046172 00000 n 0000046300 00000 n trailer << /Size 22 /Root 21 0 R /Info 20 0 R >> startxref 46353 %%EOF PK2G%emva1288/report/files/missinglogo.pdf%PDF-1.5 % 3 0 obj << /Length 4 0 R /Filter /FlateDecode >> stream xePJCA +4<:Ņ :;q!R7 9yH!_\)O+CXp3nS;{ }UC?%f րJbʒ34D@͹čTF"˦R_hlnad)Z+ϴ:|kU^q$QIe<9p_AhwӠ "0GpDGrd]IY,DeiERG;=4:/J@T8 endstream endobj 4 0 obj 234 endobj 2 0 obj << /ExtGState << /a0 << /CA 1 /ca 1 >> >> /Font << /f-0-0 5 0 R >> >> endobj 6 0 obj << /Type /Page /Parent 1 0 R /MediaBox [ 0 0 257.600006 112 ] /Contents 3 0 R /Group << /Type /Group /S /Transparency /CS /DeviceRGB >> /Resources 2 0 R >> endobj 7 0 obj << /Length 8 0 R /Filter /FlateDecode /Length1 4088 >> stream x]Wklו> g H#QTi-ٴeE Y+ݸmkga'ilmE(}AH[tmw@_͢?]wܑ}Ľ|s9C`@kuh;_!y9Ku|B;η>^~+ |xƥf' ǟ]ip곛Uhю]۸y ou[? ho ȾaeiD>~|?k8rQx ߵ kzF/t;H_ɰ=n̾.q q8W6YĐiL캪2UAeZ*n D՚>DlavVU&Zu3Rl4voԛ؏iAڟOfac9i4s!Qοʚ1esQEu&2ztP-kek?'Gٽٗ0; 6!wݼ4$y͍1%WTnX^7\.j;GrE(LwE#(ާF4SL2Fb_y}Ŏl<|b2Ld&v ΉGnO($9˅ڹjɕCpv`P XNmG`7]tL]eQ* P|VRuL[NC/憙Nh !!Ж8ɗ܇FI(V[-l #V QreEt- rE:Fr*ia|d19$~#T2"6dtP.tD.Q")dŸ]ghBo$6[eޓOK62搨6!)9nES#uMeHLm!4t6Ӻ-%i@Mpcȝ" $Z`YJxImE;P':nAW$: mz,L!9Te{3TTd'm5)t:L 1ptA 0lA#e9Ԋd2r ,桒9nB,\_!%YxlXA`_sHUqua8TT":Ru6Uy8h(`|9Ce@TΡlXzC4KQTԷEt 3YykWbN =>Ű?i䱹 ©Ϟ:lß@wq4/{y(grt9͝BaNYشQ ʽyؚWa|8!+ydeM'Y1V'9* F ^ϣz[Ӊ hxXiIl?VT$7v6x)Q9G WDǂc }qF"bDe}I2!K24*܄LS_g4pV*L3~oZ"q!T=.0΄8 `&gFg>2~ẙi+?Z!B2/ϛp{PC_6tTTnEi,庶EIk[m̚L}.(N3 Ψx -=:n/oףz~wo/X1"2Y PG7`EM2ryZ2Ay g~-4G[]C"Ã/޿> stream x]n <ݡ"IvHSwal@H By\u,&NAn aXVV7ʻg0Aa0v~' ֶP^}Rul#TK*I:#HBS 5hy!ˤ.Te:i":f:D+NDHt&eMD/H\ݭm!%|n|kXw~s LC endstream endobj 10 0 obj 266 endobj 11 0 obj << /Type /FontDescriptor /FontName /HBLHCP+Purisa-Bold /FontFamily (Purisa Bold) /Flags 4 /FontBBox [ -643 -520 1257 1120 ] /ItalicAngle 0 /Ascent 1120 /Descent -520 /CapHeight 1120 /StemV 80 /StemH 80 /FontFile2 7 0 R >> endobj 12 0 obj << /Type /Font /Subtype /CIDFontType2 /BaseFont /HBLHCP+Purisa-Bold /CIDSystemInfo << /Registry (Adobe) /Ordering (Identity) /Supplement 0 >> /FontDescriptor 11 0 R /W [0 [ 364 779 703 922 707 608 671 578 760 756 ]] >> endobj 5 0 obj << /Type /Font /Subtype /Type0 /BaseFont /HBLHCP+Purisa-Bold /Encoding /Identity-H /DescendantFonts [ 12 0 R] /ToUnicode 9 0 R >> endobj 1 0 obj << /Type /Pages /Kids [ 6 0 R ] /Count 1 >> endobj 13 0 obj << /Creator (cairo 1.10.2 (http://cairographics.org)) /Producer (cairo 1.10.2 (http://cairographics.org)) >> endobj 14 0 obj << /Type /Catalog /Pages 1 0 R >> endobj xref 0 15 0000000000 65535 f 0000004973 00000 n 0000000348 00000 n 0000000015 00000 n 0000000326 00000 n 0000004813 00000 n 0000000457 00000 n 0000000664 00000 n 0000003884 00000 n 0000003907 00000 n 0000004251 00000 n 0000004274 00000 n 0000004545 00000 n 0000005038 00000 n 0000005166 00000 n trailer << /Size 15 /Root 14 0 R /Info 13 0 R >> startxref 5219 %%EOF PK̆IT /#emva1288/camera/points_generator.pyimport numpy as np from collections import OrderedDict class PointsGenerator: """Class that generates a dictionary of operation points for an emva test. The points are stored in the :attr:`points` attribute as a dictionary. The points are ordered if they are a 'spatial' or 'temporal' operation point. Under each keys there is a dictionary whose keys are the different exposure times and their values are the list of radiances under which the camera is illuminated. """ def __init__(self, cam, exposure_min=None, exposure_max=None, exposure_fixed=None, radiance_min=None, radiance_max=None, gain=None, blackref=None, steps=100): """Point generator init method. Parameters ---------- cam : The camera object that will be taking the images. exposure_min : float, optional The minimal exposure time (in ns). exposure_max : float, optional The maximal exposure time (in ns). exposure_fixed : float, optional By default, the points given are for an exposure time variation test (if this is None). If a value is given to this kwarg, this will be the camera's exposure time (in ns) at which the operation points will be set for an illumination variation test. radiance_min : float, optional The minimal radiance (in W/cm^2/sr). If None, a value above dark illumination will be automatically chosen. radiance_max : float, optional The maximal radiance (in W/cm^2/sr). If None, the maximal radiation will be taken as the saturation radiation for the exposition time given in the exposure_fixed kwarg. gain : float, optional The camera's gain at which we want the test to run. blackref : float, optional The camera's blackoffset at which we want the test to run. steps : int, optional The number of points in the test. """ self._cam = cam self._steps = steps self._exposure_min = exposure_min or self._cam.exposure_min self._exposure_max = exposure_max or self._cam.exposure_max self._exposure = exposure_fixed self._radiance_min = radiance_min self._radiance_max = radiance_max self._gain = gain or self._cam.K self._blackref = blackref or self._cam.blackoffset if self._exposure is None: # Get radiance for saturation at maximal exposure time # Only if it is for an exposure time variation test self._cam.exposure = self._exposure_max self._radiance = self._cam.get_radiance_for() else: # get radiances for radiation variation self._cam.exposure = self._exposure self._cam.K = self._gain self._cam.blackoffset = self._blackref m = self._cam.grab(0.0).mean() target = (self._cam.img_max - m) / self._steps + m self._radiance_min = self._cam.get_radiance_for(mean=target) self._radiance_max = self._cam.get_radiance_for() # By default, an exposure time variation data points # If exposure fixed is given, it is photons variation self._points = self._get_points() def _get_points(self): spatial = OrderedDict() temporal = OrderedDict() if self._exposure is None: # Exposure time variation # round to only have one decimal exposures = np.round(np.linspace(self._exposure_min, self._exposure_max, self._steps), 1) # only one radiance radiances = [self._radiance, 0.0] # Main loop to compute points for n, texp in enumerate(exposures): if self._is_point_spatial(n): spatial[texp] = radiances temporal[texp] = radiances else: # Photons variations # only one exposure time radiances = np.linspace(self._radiance_min, self._radiance_max, self._steps).tolist() # round to only have one decimal self._exposure = round(self._exposure) radiances.append(0.0) spatial[self._exposure] = [radiances[self._steps // 2], 0.0] temporal[self._exposure] = radiances return {'spatial': spatial, 'temporal': temporal} @property def points(self): """The operation points.""" return self._points def _is_point_spatial(self, n): """Checks if a spatial test must be done at this point. Spatial test are executed at mid run for both dark and bright tests. """ middle = self._steps // 2 if n in (middle, self._steps + middle): return True return False PK jHԌemva1288/camera/__init__.pyfrom .camera import Camera PK̆I3 emva1288/camera/routines.pyimport numpy as np def qe(wavelength): """Simulate quantum efficiency for a specific wavelengths. Parameters ---------- wavelength : float The wavelength to compute the quantum efficency for. Returns ------- float : The simulated quantum efficiency. """ # For the time being we just simulate a simple gaussian s = 0.5 u = 0. min_ = 350 max_ = 800 w = -1 + (wavelength) / (max_ - min_) qe = -.1 + np.exp((-(w - u) ** 2) / (2 * s ** 2)) / (np.sqrt(np.pi * 2 * s ** 2)) if qe < 0: return 0 return qe def nearest_value(value, array): """Returns the nearest value in vals. Parameters ---------- value : float The value we want to get as near as possible. array : array_like The array containing the available values to get near the value. Returns ------- The nearest element of `array` to `value`. """ # http://stackoverflow.com/questions/2566412/find-nearest-value-in-numpy-array idx = (np.abs(array - value)).argmin() return array[idx] def get_irradiance(radiance, f): """Get The irradiance, in w/cm^2. Parameters ---------- radiance : float The radiance (in W/sr/cm^2) to compute the irradiance from. f : float The f number of the setup. Returns ------- float : The irradiance in W/cm^2 """ j = np.pi * radiance / (1 + ((2 * f) ** 2)) return j def get_photons(exposure, wavelength, radiance, pixel_area, f_number): """Get the number of photons hitting one pixel. Parameters ---------- exposure : float The pixel exposure time (in ns) to the light. wavelength : float The light wavelength hitting the pixel (in nm). radiance : float The radiance hitting the sensor (in W/sr/cm^2). pixel_area : float The pixel area in um ^ 2 f_number : float The f number of the setup. Returns ------- float : The number of photons that hit the pixel. """ h = 6.63e-34 c = 3.00e8 w = wavelength * 1e-9 t = exposure * 1e-9 a = pixel_area * 1e-12 j = get_irradiance(radiance, f_number) return j * a * t * w / (h * c) def get_radiance(exposure, wavelength, photons, pixel_area, f_number): """From the number of photons, get the radiance hitting a pixel. Parameters ---------- exposure : float The pixel exposure time to the light (in ns). wavelength : float The photons' wavelength (in nm). photons : float The number of photons that hit the pixel. pixel_area : float The pixel area in um^2. f_number : float The f number of the setup. Returns ------- float : The radiance that hit the pixel and gave the number of photons. """ h = 6.63e-34 c = 3.00e8 w = wavelength * 1e-9 t = exposure * 1e-9 a = pixel_area * 1e-12 # p = j * a * t * w / (h * c) # j = np.pi * radiance / (1 + ((2 * f) ** 2)) j = photons * h * c / (a * t * w) r = j * (1 + ((2 * f_number) ** 2)) / np.pi return r PK I;tt#t#$emva1288/camera/dataset_generator.py# This module contains a generator that generates a descriptor file and the # corresponding images using the implemented camera. from emva1288.camera.camera import Camera as Cam from emva1288.camera.points_generator import PointsGenerator from collections import OrderedDict import numpy as np import tempfile import os from PIL import Image def _get_emva_gain(cam): """Find the gain to satisfy EMVA1288 requirements""" gini = cam.K # Find gain with a minum temporal noise of 0.5DN g = cam.Ks[0] for gain in cam.Ks: cam.K = gain g = gain img1 = cam.grab(0).astype(np.int64) img2 = cam.grab(0).astype(np.int64) if (img1 - img2).std() > 0.5: break cam.K = gini return g def _get_emva_blackoffset(cam): """Find the blackoffset to satifsfy EMVA1288 requirements""" bini = cam.blackoffset # Find black offset with a maximum of 0.5% of values at Zero bo = cam.blackoffsets[0] pixels = cam.width * cam.height for i in cam.blackoffsets: cam.blackoffset = i img = cam.grab(0) bo = i if np.count_nonzero(img) > pixels * .995: break cam.blackoffset = bini return bo class DatasetGenerator: """Dataset generator. Creates a descriptor file and the corresponding linked images for a a exposure variant test example according to the emva1288 standart. The images are created using the implemented camera in the emva module. """ def __init__(self, steps=100, L=50, version='3.0', image_format='png', # best memory consumption outdir=None, # directory where to save the dataset radiance_min=None, radiance_max=None, exposure_fixed=None, **kwargs ): """Dataset generator init method. The generator uses a :class:`~emva1288.camera.points_generator.PointsGenerator` object to create the operation points. It then grabs the images for these points using a :class:`~emva1288.camera.camera.Camera` simulator object. The camera is intialized according to the given kwargs. Then, after getting the test points, it :meth:`makes ` the images with it by changing its exposure time, or the radiation and :meth:`saves ` the images and the descriptor file. Parameters ---------- L : int, optional The number of image taken during a spatial test point. version : str, optional Data version to add in descriptor file. image_format : str, optional The image's format when they are saved. outdir : str, optional The output directory where the descriptor file and the images will be saved. If None, it will create a tempory directory that will be deleted (and its contents) when the dataset generator object is deleted. radiance_min : float, optional Same as in :class:`~emva1288.camera.points_generator.PointsGenerator`. radiance_max : float, optional Same as in :class:`~emva1288.camera.points_generator.PointsGenerator`. exposure_fixed : float, optional Same as in :class:`~emva1288.camera.points_generator.PointsGenerator`. kwargs : All other kwargs are passed to the camera. """ self._steps = steps # number of points to take self.cam = Cam(**kwargs) # set the camera parameters for the test self.cam.exposure = self.cam.exposure_min # If no blackoffset/gain are specified find them according to standard if 'blackoffset' not in kwargs: self.cam.blackoffset = _get_emva_blackoffset(self.cam) if 'K' not in kwargs: self.cam.K = _get_emva_gain(self.cam) # create test points points = PointsGenerator(self.cam, radiance_min=radiance_min, radiance_max=radiance_max, exposure_fixed=exposure_fixed, steps=self._steps) self._points = points.points self._L = L # number of images to take for a spatial test self._version = version # data version # store image format self._image_format = image_format # images will be saved one at a time during the generation into outdir self.outdir = outdir # create temporary directory to store the dataset if outdir is None: self.tempdir = tempfile.TemporaryDirectory() self.outdir = self.tempdir.name # create dir where images will be saved os.makedirs(os.path.join(self.outdir, 'images')) # run test self._descriptor_path = self.run_test() @property def points(self): """The test points suite.""" return self._points @property def descriptor_path(self): """The absolute path to the descriptor file.""" return self._descriptor_path def _is_point_spatial_test(self, i): """Check if a point index should be a spatial test. Spatial points are done at midpoint of bright and dark series. """ v = self._steps // 2 if i in (v, self._steps + v): return True return False def _get_descriptor_line(self, exposure, radiance): """Create the line introducing a test point images in descriptor.""" if radiance == 0.0: # dark image return "d %.1f" % exposure # bright image # round photons count to three decimals return "b %.1f %.3f" % (exposure, round(self.cam.get_photons(radiance), 3)) def _get_image_names(self, number, L): """Create an image filename.""" names = [] for l in range(L): names.append("img_%04d.%s" % (number, self._image_format)) number += 1 return names, number def _get_imgs(self, radiance, L): """Create a list of image from the given radiances. """ # computes an array of dict whose keys are the name of the file # and the data of the image to save imgs = [] for l in range(L): imgs.append(self.cam.grab(radiance)) return imgs def run_test(self): """Run the test points, save the images and generate descriptor.""" descriptor_text = OrderedDict() image_number = 0 # descriptor file path path = os.path.join(self.outdir, "EMVA1288descriptor.txt") # open descriptor file to write images in it with open(path, "w") as f: # write version f.write("v %s\n" % self._version) # wtite camera's properties f.write("n %i %i %i\n" % (self.cam.bit_depth, self.cam.width, self.cam.height)) for kind in ('temporal', 'spatial'): # number of image to take L = 2 if kind == 'spatial': L = self._L for texp, radiances in self.points[kind].items(): # set camera self.cam.exposure = texp # Grab all images for these radiances for radiance in radiances: # Get descriptor line introducting the images f.write("%s\n" % self._get_descriptor_line(texp, radiance)) for l in range(L): # grab img = self.cam.grab(radiance) # write the name in descriptor name = "image%i.%s" % (image_number, self._image_format) f.write("i images\\%s\n" % name) image_number += 1 # save image self.save_image(img, name) # return descriptor path return path def save_image(self, img, name): """Save the image. """ # save the images contained in d dtype = np.uint32 mode = 'I' if self.cam.bit_depth <= 8: # 8 bit images have special format for PIL dtype = np.uint8 mode = 'L' im = Image.fromarray(img.astype(dtype), mode) path = os.path.join(self.outdir, 'images', name) # (filename already contains image format) im.save(path) # erase image from dict to reduce memory consuption PK INC>C>emva1288/camera/camera.pyimport numpy as np from emva1288.camera import routines class Camera(object): """Camera simulator. It creates images according to the given parameters. """ def __init__(self, f_number=8, # F-number of the light source/camera setup pixel_area=25, # um^2 bit_depth=8, # Bit depth of the image [8, 10, 12, 14] width=640, height=480, temperature=22, # Sensor temperature in ^oC temperature_ref=30, # Reference temperature temperature_doubling=8, # Doubling temperature wavelength=525, # illumination wavelength qe=None, # Quantum efficiency for the given wavelength exposure=1000000, # Exposure time in ns exposure_min=50000, # Minimum exposure time in ns exposure_max=500000000, # Maximum exposure time in ns K=0.1, # Overall system gain K_min=0.1, K_max=17., K_steps=255, blackoffset=0, blackoffset_min=0, blackoffset_max=None, blackoffset_steps=255, dark_current_ref=30, dark_signal_0=0, sigma2_dark_0=0, dsnu=None, prnu=None ): """Camera simulator init method. Parameters ---------- f_number : float, optional The emva1288 f_number for the camera. pixel_area : float, optional The area of one pixel (in um ^ 2) bit_depth : int, optiona The number of bits allowed for one pixel value. width : int, optional The number of columns in the the image. height : int, optional The number of rows in the image. temperature : float, optional The camera's sensor temperature in degrees Celsius. temperature_ref : float, optional The reference temperature (at which the dark current is equal to the reference dark current). temperature_doubling: float, optional The doubling temperature (at which the dark current is two times the reference dark current). wavelength : float, optional The light wavelength hitting the sensor (in nm). qe : float, optional Quantum efficiency (between 0 and 1). If None, a simulated quantum efficiency is choosen with the :func:`~emva1288.camera.routines.qe` function. exposure : float, optional The camera's exposure time in ns. exposure_min : float, optional The camera's minimal exposure time in ns. exposure_max : float, optional The camera's maximal exposure time in ns. K : float, optional The overall system gain (in DN/e^-). K_min : float, optional The overall minimal system gain (in DN/e^-). K_max : float, optional The overall maximal system gain (in DN/e^-). K_steps : int, optional The number of available intermediate overall system gains between K_min and K_max. blackoffset : float, optional The dark signal offset for each pixel (in DN). blackoffset_min: float, optional The minimal dark signal offset for each pixel (in DN). blackoffset_max : float, optional The maximal dark signal offset for each pixel (in DN). blackoffset_steps : int, optional The number of available blackoffsets between the mimimal and maximal blackoffsets. dark_current_ref : float, optional The reference dark current used for computing the total dark current. dark_signal_0 : float, optional The offset for the computation of the mean number of electrons in the dark for one pixel (in DN/s). sigma2_dark_0 : float, optional The offset for the computation of the dark signal (the dark signal standart deviation). dsnu : np.array, optional DSNU image in DN, array with the same shape of the image that is added to every image prnu : np.array, optional PRNU image in percentages (1 = 100%), array with the same shape of the image. Every image is multiplied by it """ self._pixel_area = pixel_area self._bit_depth = bit_depth self._img_max = 2 ** int(bit_depth) - 1 self._width = width self._height = height self._temperature_ref = temperature_ref self._temperature_doubling = temperature_doubling self._qe = qe # When no specific qe is provided we simulate one if qe is None: self._qe = routines.qe(wavelength) self._dark_current_ref = dark_current_ref self._dark_signal_0 = dark_signal_0 self._sigma2_dark_0 = sigma2_dark_0 self._exposure = exposure self._exposure_min = exposure_min self._exposure_max = exposure_max self.__Ks = np.linspace(K_min, K_max, num=K_steps) self._K = None self.K = K # A good gestimate for maximum blackoffset is 1/16th of the full range if not blackoffset_max: blackoffset_max = self.img_max // 16 self.__blackoffsets = np.linspace(blackoffset_min, blackoffset_max, num=blackoffset_steps) self._blackoffset = None self.blackoffset = blackoffset self._dsnu = dsnu self._prnu = prnu if dsnu is None: self._dsnu = np.zeros((self.height, self.width)) if prnu is None: self._prnu = np.ones((self.height, self.width)) self.environment = {'temperature': temperature, 'wavelength': wavelength, 'f_number': f_number} @property def bit_depth(self): """The number of bits allowed for a gray value for one pixel.""" return self._bit_depth @property def pixel_area(self): """The area of one pixel (in um ^ 2).""" return self._pixel_area @property def img_max(self): """The maximal value for one pixel (in DN).""" return self._img_max @property def width(self): """The number of columns""" return self._width @property def height(self): """The number of rows""" return self._height @property def exposure(self): """The camera's exposure time (in ns).""" return self._exposure @exposure.setter def exposure(self, value): self._exposure = value @property def exposure_min(self): """The camera's minimal exposure time (in ns).""" return self._exposure_min @property def exposure_max(self): """The camera's maximal exposure time (in ns).""" return self._exposure_max @property def K(self): """The overall system gain (in DN/e^-). :Setter: The setter uses the :func:`~emva1288.camera.routines.nearest_value` function to set the system gain to the nearest value given to the setter. This is because not all system gains are possible but rather a linear sample between the minimal and maximal value. """ return self._K @K.setter def K(self, value): self._K = routines.nearest_value(value, self.__Ks) @property def Ks(self): """The array of all the available system gains (in DN/e^-).""" return self.__Ks @property def blackoffset(self): """The system dark signal offset (in DN). :Setter: The setter uses the :func:`~emva1288.camera.routines.nearest_value` function to set the black signal offset to the nearest value given to the setter. This is because not all black offsets are possible but rather a linear sample between the minimal and maximal value. """ return self._blackoffset @blackoffset.setter def blackoffset(self, value): self._blackoffset = routines.nearest_value(value, self.__blackoffsets) @property def blackoffsets(self): """The array of all blackoffsets (in DN).""" return self.__blackoffsets def grab(self, radiance, temperature=None, wavelength=None, f_number=None): """ Create an image based on the mean and standard deviation from the EMVA1288 parameters. The image is generated using a normal distribution for each pixel. Parameters ---------- radiance : float The sensor's illumination in W/cm^2/sr. This is the only mandatory argument because it is frequently changed during an test. temperature : float, optional The camera's temperature in degrees Celsius. If None, the environment's temperature will be taken. wavelength : float, optional The illumination wavelength in nanometers. If None, the environment's wavelength will be taken. f_number : float, optional The optical setup f_number. If None, the environment's f_number will be taken. """ clipping_point = int(self.img_max) u_y = self._u_y(radiance, temperature=temperature, wavelength=wavelength, f_number=f_number) s2_y = np.sqrt(self._s2_y(radiance, temperature=temperature, wavelength=wavelength, f_number=f_number)) img = np.random.normal(loc=u_y, scale=s2_y, size=(self.height, self.width)) # not the best but hope it works as approach for prnu dsnu img *= self._prnu img += self._dsnu img += self.blackoffset np.rint(img, img) np.clip(img, 0, clipping_point, img) return img.astype(np.uint64) def _u_y(self, radiance, temperature=None, wavelength=None, f_number=None): """ Mean digital value (in DN) of the image. """ uy = self.K * (self._u_d(temperature=temperature) + self._u_e(radiance, wavelength=wavelength, f_number=f_number)) return uy def _u_e(self, radiance, wavelength=None, f_number=None): """ Mean number of electrons per pixel during exposure time. """ u_e = self._qe * self.get_photons(radiance, wavelength=wavelength, f_number=f_number) return u_e def _s2_e(self, radiance, wavelength=None, f_number=None): """ Variance of the number of electrons. Same as u_e because the number of electrons is supposed to be distributed by a Poisson distribution where the mean equals the variance. """ return self._u_e(radiance, wavelength=wavelength, f_number=f_number) def _u_d(self, temperature=None): """ Mean number of electrons without light. """ u_d = ((self._u_i(temperature=temperature) * self.exposure / (10 ** 9)) + self._dark_signal_0) return u_d def _s2_q(self): """ Variance of the quantization noise. """ return 1.0 / 12.0 def _s2_y(self, radiance, temperature=None, wavelength=None, f_number=None): """ Variance of the digital signal (= temporal noise). """ s2_y = ((self.K ** 2) * (self._s2_d(temperature=temperature) + self._s2_e(radiance, wavelength=wavelength, f_number=f_number)) + self._s2_q()) return s2_y def _u_i(self, temperature=None): """ Dark current (in DN/s). """ if temperature is None: temperature = self.environment['temperature'] u_i = 1. * self._dark_current_ref * 2 ** ( (temperature - self._temperature_ref) / self._temperature_doubling) return u_i def _s2_d(self, temperature=None): """ Variance of the dark signal = Dark temporal noise. """ s2_d = self._sigma2_dark_0 + (self._u_i(temperature=temperature) * self.exposure / (10 ** 9)) return s2_d def get_radiance_for(self, mean=None, exposure=None): """Radiance to achieve saturation. Calls the :func:`~emva1288.camera.routines.get_radiance` function to get the radiance for saturation. Parameters ---------- mean : float, optional The saturation value of the camera. If None, this value is set to the :attr:`img_max` attribute. exposure : float, optional The camera's exposure time at which the radiance for saturation value will be computed. If None, the exposure time taken will be the camera's actual exposure time. Returns ------- float : The radiance at which, for the given saturation value and the given exposure time, the camera saturates. """ if not mean: mean = self.img_max if not exposure: exposure = self.exposure ud = self._u_d() ue = (mean / self.K) - ud up = ue / self._qe radiance = routines.get_radiance(exposure, self.environment['wavelength'], up, self.pixel_area, self.environment['f_number']) return radiance def get_photons(self, radiance, exposure=None, wavelength=None, f_number=None): """Computes the number of photons received by one pixel. Uses the :func:`~emva1288.camera.routines.get_photons` function to compute this number. Parameters ---------- radiance : float The radiance exposed to the camera (in Wsr^-1cm^-2). exposure : float, optional The pixel's exposure time in ns. Returns ------- float : The number of photons received by one pixel. """ if exposure is None: exposure = self.exposure if f_number is None: f_number = self.environment['f_number'] if wavelength is None: wavelength = self.environment['wavelength'] return routines.get_photons(exposure, wavelength, radiance, self.pixel_area, f_number) PK jHemva1288/unittests/__init__.pyPK=Iֻ$p p !emva1288/unittests/test_loader.pyimport unittest from emva1288.camera.dataset_generator import DatasetGenerator from emva1288.process.parser import ParseEmvaDescriptorFile from emva1288.process.loader import LoadImageData class TestLoader(unittest.TestCase): _height = 50 _width = 100 _bit_depth = 8 _L = 50 _steps = 10 def _init(self): # Create dataset to load dataset = DatasetGenerator(height=self._height, width=self._width, bit_depth=self._bit_depth, L=self._L, steps=self._steps) descriptor_path = dataset.descriptor_path # create the parser parser = ParseEmvaDescriptorFile(descriptor_path) # create loader loader = LoadImageData(parser.images) return dataset, parser, loader def test_loader(self): d, p, l = self._init() self.dataset = d self.parser = p self.loader = l # Test that checks if loader actually loads data from images given # by the parser # test that the data attribute contains the good infos data = self.loader.data self.assertEqual(data['height'], self._height) self.assertEqual(data['width'], self._width) first_exp_time = self.dataset.cam.exposure_min # temporal data should contain 2 datasets (one bright one dark) temporal_data = data['temporal'][first_exp_time] self.assertEqual(len(temporal_data), 2) self.assertTrue(0.0 in temporal_data.keys()) # there should be steps data sets for temporal self.assertEqual(len(self.loader.data['temporal']), self._steps) spatial_texp = list(self.dataset.points['spatial'].keys())[0] # spatial data should contain 2 sets (one dark and one bright) spatial_data = data['spatial'][spatial_texp] self.assertEqual(len(spatial_data), 2) self.assertTrue(0.0 in spatial_data.keys()) # data should be made of L images self.assertEqual(spatial_data[0.0]['L'], self._L) # check data type and format for typ in ('sum', 'pvar'): # data is sum and pvar self.assertTrue(typ in spatial_data[0.0].keys()) self.assertTrue(typ in temporal_data[0.0].keys()) # spatial data is sum images and pvar self.assertEqual(spatial_data[0.0][typ].shape, (self._height, self._width)) del self.parser del self.dataset del self.loader def test_loader_errors(self): # check that images with no dark images raise ValueError with self.assertRaises(ValueError): images = {'temporal': {0: {0.1: ""}}, 'spatial': {0: {0.1: ""}}} l = LoadImageData(images) # check that one image for temporal instead of 2 raise valueerror with self.assertRaises(ValueError): images = {'temporal': {0: {0.0: ""}}, 'spatial': {0: {0.0: ""}}} l = LoadImageData(images) # Check that an image that does not exist raise an IOError with self.assertRaises(IOError): images = {'temporal': {0: {0.0: ["."], 0.1: ["."]}}, 'spatial': {0: {0.0: ["."], 0.1: ["."]}}} l = LoadImageData(images) PK7~ IDdemva1288/unittests/test_data.pyimport unittest from emva1288.camera.dataset_generator import DatasetGenerator from emva1288.process.parser import ParseEmvaDescriptorFile from emva1288.process.loader import LoadImageData from emva1288.process.data import Data1288 from emva1288.unittests.test_routines import del_obj class TestData(unittest.TestCase): _height = 50 _width = 100 _bit_depth = 8 _L = 50 _steps = 10 _radiance_min = None _exposure_max = 50000000 def _init(self): # create dataset dataset = DatasetGenerator(height=self._height, width=self._width, bit_depth=self._bit_depth, L=self._L, steps=self._steps, radiance_min=self._radiance_min, exposure_max=self._exposure_max) # parse dataset parser = ParseEmvaDescriptorFile(dataset.descriptor_path) # load images loader = LoadImageData(parser.images) # create data data = Data1288(loader.data) return dataset, parser, loader, data def test_data(self): """Test that data1288 retrieves information.""" ds, p, l, d = self._init() self.dataset = ds self.parser = p self.loader = l self.data = d # test number of pixels self.assertEqual(self.data.pixels, self._height * self._width) # test data attribute ##################### data = self.data.data # Test spatial self.assertEqual(data['spatial']['L'], self._L) self.assertEqual(data['spatial']['L_dark'], self._L) # same L for dark # spatial exposure time texp = list(self.dataset.points['spatial'].keys())[0] self.assertEqual(data['spatial']['texp'], texp) # spatial photons radiance = self.dataset.points['spatial'][texp][0] photons = round(self.dataset.cam.get_photons(radiance), 3) self.assertEqual(data['spatial']['u_p'], photons) # spatial data are images for typ in ('avg', 'avg_dark', 'pvar', 'pvar_dark', 'sum', 'sum_dark', 'var', 'var_dark'): self.assertTrue(typ in data['spatial'].keys()) self.assertEqual(data['spatial'][typ].shape, (self._height, self._width)) # test temporal # all temporal data are arrays of length steps for typ in ('s2_y', 's2_ydark', 'texp', 'u_p', 'u_y', 'u_ydark'): self.assertTrue(typ in data['temporal'].keys()) self.assertEqual(len(data['temporal'][typ]), self._steps) # test exposure times and photons have well be retrieved times = list(self.dataset.points['temporal'].keys()) for i, (exp, photons) in enumerate(zip(data['temporal']['texp'], data['temporal']['u_p'])): time = times[i] radiance = self.dataset.points['temporal'][time][0] photon = round(self.dataset.cam.get_photons(radiance, time), 3) self.assertEqual(exp, times[i]) self.assertEqual(photons, photon) # delete objects del_obj(self.dataset, self.parser, self.loader, self.data) def test_1exposure(self): """Test that when there is only one exposure time, the temporal data dictionary has same length than the number of photons.""" self._radiance_min = 0.1 self._exposure_max = 1000000 ds, p, l, d = self._init() self.dataset = ds self.parser = p self.loader = l self.data = d temporal = self.data.data['temporal'] l = len(temporal['u_p']) # test that all temporal data arrays have same length self.assertEqual(len(temporal['texp']), l) self.assertEqual(len(temporal['u_ydark']), l) self.assertEqual(len(temporal['s2_ydark']), l) del_obj(self.dataset, self.parser, self.loader, self.data) def test_data_errors(self): # Test that given an incomplete data dictionary, it will raise errors # if there is no dark data in temporal with self.assertRaises(ValueError): dat = {'width': 1, 'height': 1, 'temporal': {0: {0.1: None}}, 'spatial': {0: {0.1: None}}} d = Data1288(dat) # if no dark data in spatial with self.assertRaises(ValueError): dat = {'width': 1, 'height': 1, 'temporal': {0: {0.0: {'sum': 0, 'pvar': 0}, 0.1: {'sum': 0, 'pvar': 0}}}, 'spatial': {0: {0.1: {'sum': 0, 'pvar': 0}}}} d = Data1288(dat) # if there is no bright image for each dark with self.assertRaises(ValueError): dat = {'width': 1, 'height': 1, 'temporal': {0: {0.0: None}}, 'spatial': {0: {0.0: None}}} d = Data1288(dat) # If there is no bright image for spatial with self.assertRaises(ValueError): dat = {'width': 1, 'height': 1, 'temporal': {0: {0.0: {'sum': 0, 'pvar': 0}, 0.1: {'sum': 0, 'pvar': 0}}}, 'spatial': {0: {0.0: {'sum': 0, 'pvar': 0}}}} d = Data1288(dat) # If there is more than 1 exposure time with spatial data with self.assertRaises(ValueError): dat = {'width': 1, 'height': 1, 'temporal': {0: {0.0: {'sum': 0, 'pvar': 0}, 0.1: {'sum': 0, 'pvar': 0}}}, 'spatial': {0: {0.0: {'sum': 0, 'pvar': 0}, 0.1: {'sum': 0, 'pvar': 0}}, 1: {0.0: {'sum': 0, 'pvar': 0}, 0.1: {'sum': 0, 'pvar': 0}}}} d = Data1288(dat) PK7~ I !emva1288/unittests/test_parser.pyimport unittest from emva1288.process.parser import ParseEmvaDescriptorFile from emva1288.camera.dataset_generator import DatasetGenerator class TestParser(unittest.TestCase): # attrbutes for dataset generator _bit_depth = 8 _version = '3.0' _height = 50 _width = 100 _L = 50 _steps = 10 # Don't test error raised because those errors appears when descriptor file # is not well formatted and not because of a code failure. def setUp(self): # create data descriptor file for parser self.d_generator = DatasetGenerator(bit_depth=self._bit_depth, height=self._height, width=self._width, L=self._L, version=self._version, steps=self._steps) def tearDown(self): # delete generator to delete all the generated files del self.d_generator def test_good_descriptorfile(self): # test that the parser actually parses the file with the generated file descriptor_file = self.d_generator.descriptor_path parser = ParseEmvaDescriptorFile(descriptor_file) # data manually taken from the file: bits = self._bit_depth height = self._height width = self._width times = self.d_generator.points['temporal'].keys() first_exp_time = list(times)[0] first_rad = self.d_generator.points['temporal'][first_exp_time][0] first_pcount = round(self.d_generator.cam.get_photons(first_rad, first_exp_time), 3) # check data have correctly been parsed self.assertEqual(parser.version, self._version) self.assertEqual(parser.format['bits'], bits) self.assertEqual(parser.format['height'], height) self.assertEqual(parser.format['width'], width) # for this expTime and pcount, there is only 2 images thus temporal im = parser.images['temporal'][first_exp_time][first_pcount] # The length of this dict should be 2 self.assertEqual(len(im), 2) # For spatial data points = self.d_generator.points['spatial'] spatial_texp = list(points.keys())[0] spatial_rad = list(points.values())[0][0] # round here because pcount are rounded in descriptor file spatial_pcount = round(self.d_generator.cam.get_photons(spatial_rad), 3) im_spatial = parser.images['spatial'][spatial_texp][spatial_pcount] # the length of this dict should be greater than 2 self.assertGreater(len(im_spatial), 2) # For dark images, pcount should be 0 # for this time, there is a dark image # dark images are normal images with 0.0 photon count keys = parser.images['temporal'][first_exp_time].keys() self.assertTrue(0.0 in keys) PKK IVƋ>5>5"emva1288/unittests/test_results.pyimport unittest from emva1288.process.parser import ParseEmvaDescriptorFile from emva1288.process.loader import LoadImageData from emva1288.process.data import Data1288 from emva1288.process.results import Results1288 from emva1288.camera.dataset_generator import DatasetGenerator from emva1288.unittests.test_routines import del_obj import numpy as np def _init(pixel_area=0, **kwargs): # create dataset dataset = DatasetGenerator(**kwargs) # parse dataset parser = ParseEmvaDescriptorFile(dataset.descriptor_path) # load image data loader = LoadImageData(parser.images) # create data data = Data1288(loader.data) # Make results object px = pixel_area if pixel_area == 0: px = dataset.cam.pixel_area results = Results1288(data.data, pixel_area=px) return dataset, parser, loader, data, results class TestResults(unittest.TestCase): _height = 50 _width = 100 _bit_depth = 8 _L = 50 _qe = 0.5 _steps = 10 _radiance_min = None _exposure_max = 5000000000 _dark_current_ref = 30 _exposure_fixed = 10000000 _temperature = 20 _temperature_ref = 20 _K = 0.1 _exposure_min = 50000 _dark_signal_0 = 1 _sigma2_dark_0 = 1 _dsnu = np.zeros((_height, _width)) _dsnu[0, :] += 5 _prnu = np.ones((_height, _width)) _prnu[-1, :] += 1.5 def test_results_exposure_variation(self): dt, p, l, da, r = _init(height=self._height, width=self._width, bit_depth=self._bit_depth, L=self._L, qe=self._qe, steps=self._steps, radiance_min=self._radiance_min, exposure_max=self._exposure_max, exposure_min=self._exposure_min, K=self._K, dark_signal_0=self._dark_signal_0, sigma2_dark_0=self._sigma2_dark_0, dark_current_ref=self._dark_current_ref, temperature=self._temperature, temperature_ref=self._temperature_ref, dsnu=self._dsnu, prnu=self._prnu) self.dataset = dt self.parser = p self.loader = l self.data = da self.results = r ########################### # Test results properties # ########################### # test that quantification noise is really 1/12 self.assertEqual(self.results.s2q, 1.0 / 12.0) # test that indexes are integers and in good range for attr in ('index_start', 'index_u_ysat', 'index_sensitivity_max', 'index_sensitivity_min', 'index_linearity_min', 'index_linearity_max'): value = getattr(self.results, attr) self.assertTrue(type(value) is int or type(value) is np.int64, msg="%s is not an integer but should be!" % attr) self.assertTrue(value < len(self.data.data['temporal']['u_y'])) self.assertTrue(value >= 0) self.assertTrue(attr in self.results.results.keys(), msg="%s does not appear in the results!" % attr) # Test that EMVA values are float and positive for a in ('s2q', 'R', 'K', 'QE', 'sigma_y_dark', 'sigma_d', 'u_p_min', 'u_p_min_area', 'u_e_min', 'u_e_min_area', 'u_p_sat', 'u_p_sat_area', 'u_e_sat', 'SNR_max', 'DR', 'LE_min', 'LE_max', 'u_I_var', 'u_I_mean', 'sigma_2_y_stack', 'sigma_2_y_stack_dark', 's_2_y_measured', 's_2_y', 's_2_y_dark', 'DSNU1288', 'PRNU1288'): value = getattr(self.results, a) self.assertTrue(isinstance(value, float), msg="%s is not a float but should be!" % a) if not a == 's2q': self.assertTrue(a in self.results.results, msg="%s does not appear in the results!" % a) # except for linearity errors and dark currents, # everything always should be positive if (a not in ('LE_min', 'LE_max') and value is not np.nan): self.assertGreaterEqual(value, 0.0, msg="%s is negative but" " should be positive!" % a) ############################################################### # The following deltas are purely guesstimates and are prone to # errors in the future if they are not really significant ############################################################### # Test quantum efficiency is retrieved with a +/- 5% incertainty self.assertAlmostEqual(self._qe * 100, self.results.QE, delta=5.0, msg="The difference between the expected QE and" "the retrieved one is greater than 5%!") # Test that overall system gain # is retrieved with a +/- 0.01 incertainty self.assertAlmostEqual(self.dataset.cam.K, self.results.K, delta=0.01, msg="The difference between expected" "system gain" "and the retrieved one" "is greater than 0.01!") self.assertEqual(self.results.inverse_K(), 1 / self.results.K) # Test that responsivity is coherent with QE and system gain self.assertAlmostEqual(self.results.R, self.results.QE * self.results.K / 100, delta=0.001) # division errors compensation # Test that dark current is actually retrieved from both methods self.assertAlmostEqual(self._dark_current_ref, self.results.u_I_mean, delta=5.0, msg="Dark current is not well retrieved from" " mean dark signal.") self.assertAlmostEqual(self._dark_current_ref, self.results.u_I_var, delta=5.5, msg="Dark current is not well retrieved from" " dark signal variance.") # Test that u_e_sat_area = u_e_sat / area self.assertAlmostEqual(self.results.u_e_sat_area, self.results.u_e_sat / self.dataset.cam.pixel_area, delta=0.01) # Test that SNR_max is sqrt of u_e_sat self.assertAlmostEqual(self.results.SNR_max, np.sqrt(self.results.u_e_sat), delta=0.01) # Test that SNR_max_db is 20log_10(SNR_max) self.assertAlmostEqual(self.results.SNR_max_dB(), 20 * np.log10(self.results.SNR_max), delta=0.01) # Test that SNR_max_bit is log_2(SNR_max) self.assertAlmostEqual(self.results.SNR_max_bit(), np.log2(self.results.SNR_max), delta=0.01) # Test that SNR_max inverse is 100 / SNR_max self.assertAlmostEqual(self.results.inverse_SNR_max(), 100 / self.results.SNR_max, delta=0.01) # Test that DR is u_p_sat / u_p_min self.assertAlmostEqual(self.results.DR, self.results.u_p_sat / self.results.u_p_min, delta=0.01) # Test that DR_dB is 20log_10(DR) self.assertAlmostEqual(self.results.DR_dB(), 20 * np.log10(self.results.DR), delta=0.01) # Test that DR_bit is log_2(DR) self.assertAlmostEqual(self.results.DR_bit(), np.log2(self.results.DR), delta=0.01) # Test that DSNU is sqrt(s2_ydark) / gain self.assertAlmostEqual(self.results.DSNU1288, np.sqrt(self.results.s_2_y_dark) / self.results.K, delta=0.01) # Test that DSNU in DN is DSNU * K self.assertAlmostEqual(self.results.DSNU1288_DN(), self.results.DSNU1288 * self.results.K, delta=0.01) # Test that PRNU is the same as defined in EMVA1288 standard self.assertAlmostEqual(self.results.PRNU1288, np.sqrt(self.results.s_2_y - self.results.s_2_y_dark) * 100 / (np.mean(self.data.data['spatial']['avg']) - np.mean(self.data.data['spatial']['avg_' 'dark']))) # Test that histograms contains relevant keys and are numpy arrays hists = ('histogram_PRNU', 'histogram_PRNU_accumulated', 'histogram_DSNU', 'histogram_DSNU_accumulated') keys = ('bins', 'model', 'values') for hist in hists: h = getattr(self.results, hist) for key in keys: self.assertTrue(key in h.keys()) self.assertTrue(isinstance(h[key], np.ndarray)) # delete objects del_obj(self.dataset, self.parser, self.loader, self.data, self.results) def test_results_current_variation(self): dt, p, l, da, r = _init(height=self._height, width=self._width, bit_depth=self._bit_depth, L=self._L, steps=self._steps, dark_current_ref=self._dark_current_ref, exposure_fixed=self._exposure_fixed, radiance_min=self._radiance_min, exposure_max=self._exposure_max, exposure_min=self._exposure_min, dark_signal_0=self._dark_signal_0, sigma2_dark_0=self._sigma2_dark_0, temperature=self._temperature, temperature_ref=self._temperature_ref, K=self._K, dsnu=self._dsnu, prnu=self._prnu) self.dataset = dt self.parser = p self.loader = l self.data = da self.results = r data = self.data.data # Test that s_ydark is not a fit because only 1 texp self.assertAlmostEqual(self.results.sigma_y_dark, np.sqrt(data['temporal']['s2_ydark'][0]), delta=0.01) del_obj(self.dataset, self.parser, self.loader, self.data, self.results) def test_results_without_pixel_area(self): dt, p, l, da, r = _init(pixel_area=None, height=self._height, width=self._width, bit_depth=self._bit_depth, L=self._L, steps=self._steps, dark_current_ref=self._dark_current_ref, exposure_max=self._exposure_max, exposure_min=self._exposure_min, dark_signal_0=self._dark_signal_0, sigma2_dark_0=self._sigma2_dark_0, temperature=self._temperature, temperature_ref=self._temperature_ref, K=self._K, dsnu=self._dsnu, prnu=self._prnu) self.dataset = dt self.parser = p self.loader = l self.data = da self.results = r # Test relevant properties are None self.assertIs(self.results.u_p_min_area, None) self.assertIs(self.results.u_e_min_area, None) self.assertIs(self.results.u_p_sat_area, None) self.assertIs(self.results.u_e_sat_area, None) del_obj(self.dataset, self.parser, self.loader, self.data, self.results) def test_nans(self): # Test that less than 2 texp will yield a NaN for u_I_mean data = {'temporal': {'texp': [0, 1]}, 'spatial': {}} r = Results1288(data) self.assertIs(r.u_I_mean, np.nan) # Test that a negative slope for t vs s2_ydark will yield Nan for # u_I_var data['temporal']['s2_ydark'] = [1, 0] r = Results1288(data) self.assertIs(r.u_I_var, np.nan) # Test that a negative s2y_dark will yield a Nan for DSNU1288 data['spatial'] = {'avg_dark': [0, 0, 0], 'var_dark': [1, 1, 1], 'L_dark': 3} r = Results1288(data) self.assertIs(r.DSNU1288, np.nan) self.assertIs(r.DSNU1288_DN(), np.nan) del r PK=I!emva1288/unittests/test_camera.pyimport unittest import emva1288.camera as cam import numpy as np class CameraTestCase(unittest.TestCase): def setUp(self): self.cam = cam.Camera() def tearDown(self): del self.cam def test_img(self): img = self.cam.grab(0) self.assertEqual((self.cam.height, self.cam.width), np.shape(img)) def test_radiance(self): img1 = self.cam.grab(0) img2 = self.cam.grab(self.cam.get_radiance_for(mean=250)) self.assertLess(img1.mean(), img2.mean()) PK7~ ISbJJ#emva1288/unittests/test_routines.py# delete objects def del_obj(*args): for obj in args: del obj PK"H$}ZZ+emva1288/unittests/test_coding_standards.pyimport pep8 import os import emva1288 from nose.tools import assert_equal PEP8_ADDITIONAL_IGNORE = [] EXCLUDE_FILES = [] def test_pep8_conformance(): dirs = [] dirname = os.path.dirname(emva1288.__file__) dirs.append(dirname) examplesdir = os.path.join(dirname, '..', 'examples') examplesdir = os.path.abspath(examplesdir) dirs.append(examplesdir) pep8style = pep8.StyleGuide() # Extend the number of PEP8 guidelines which are not checked. pep8style.options.ignore = (pep8style.options.ignore + tuple(PEP8_ADDITIONAL_IGNORE)) pep8style.options.exclude.extend(EXCLUDE_FILES) result = pep8style.check_files(dirs) msg = "Found code syntax errors (and warnings)." assert_equal(result.total_errors, 0, msg) if __name__ == '__main__': import nose nose.runmodule() PK1Ir( ( !emva1288/unittests/test_report.pyimport unittest import tempfile import os from emva1288.process import ParseEmvaDescriptorFile, LoadImageData, Data1288 from emva1288.camera.dataset_generator import DatasetGenerator from emva1288.report import info_op, Report1288 from emva1288.process.plotting import EVMA1288plots class TestReportGenerator(unittest.TestCase): _height = 50 _width = 100 _bit_depth = 8 _L = 50 _steps = 10 _radiance_min = None _exposure_max = 50000000 def setUp(self): # create dataset self.dataset = DatasetGenerator(height=self._height, width=self._width, bit_depth=self._bit_depth, L=self._L, steps=self._steps, radiance_min=self._radiance_min, exposure_max=self._exposure_max) # parse dataset self.parser = ParseEmvaDescriptorFile(self.dataset.descriptor_path) # load images self.loader = LoadImageData(self.parser.images) # create data self.data = Data1288(self.loader.data) # create operation point dict self.op = info_op() def tearDown(self): del self.dataset del self.parser del self.loader del self.op del self.data def test_report_generation(self): with tempfile.TemporaryDirectory() as outdir: # create report report = Report1288(outdir) report.add(self.op, self.data.data) # check that output directory has been created for directory in ('files', 'OP1', 'upload'): self.assertTrue(os.path.isdir(os.path.join(outdir, directory))) # check that plots have been created and saved for plt in EVMA1288plots: name = plt.__name__ path = os.path.join(outdir, 'OP1', name + '.pdf') self.assertTrue(os.path.isfile(path)) # generate tex files report.latex() # check that tex files have been created for fil in ('emvadatasheet.sty', 'report.tex'): path = os.path.join(outdir, fil) self.assertTrue(os.path.isfile(path)) PKڜHPDemva1288/process/__init__.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """EMVA 1288 Package initialisation file""" from emva1288.process.data import Data1288 from emva1288.process.loader import LoadImageData from emva1288.process.parser import ParseEmvaDescriptorFile from emva1288.process.results import Results1288 from emva1288.process.plotting import Plotting1288 class Emva1288(object): def __init__(self, fname): parser = ParseEmvaDescriptorFile(fname) imgs = LoadImageData(parser.images) dat = Data1288(imgs.data) self._results = Results1288(dat.data) def results(self): return self._results.print_results() def plot(self): plot = Plotting1288(self._results) plot.plot() def xml(self, filename=None): return self._results.xml(filename) PK|I577emva1288/process/routines.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """Utils functions """ from __future__ import print_function import numpy as np import os from scipy.optimize import leastsq from lxml import etree from PIL import Image from collections import OrderedDict SIGNIFICANT_DIGITS = 7 def load_image(fname): img = Image.open(fname) img = np.asarray(img.split()[0]) # img = cv2.imread(fname, cv2.CV_LOAD_IMAGE_UNCHANGED) # img = cv2.split(img)[0] return img def get_int_imgs(imgs): ''' Returns the sum and pseudo-variance from list of images sum is just the image resulting on the addition of all the images pvar is the pseudo-variance, this is pvar = SUM((Li - SUM(i))^2) to get variance from pseudo-variance var = (1/(L^2) * 1/(L - 1)) * pvar ''' L = len(imgs) sum_ = 0 sq_ = 0 for img in imgs: # we force the images as int64 to make sure we do not clip i = img.astype(np.int64) sum_ += i sq_ += np.square(i) # the pseudo variance can be computed from the sum image and the sum of # the square images var_ = L * (L * sq_ - np.square(sum_)) return {'L': L, 'sum': sum_, 'pvar': var_} def LinearB0(Xi, Yi): X = np.asfarray(Xi) Y = np.asfarray(Yi) # we want a function y = m * x def fp(v, x): return x * v[0] # the error of the function e = x - y def e(v, x, y): return (fp(v, x) - y) # the initial value of m, we choose 1, because we thought YODA would # have chosen 1 v0 = [1.0] vr, _success = leastsq(e, v0, args=(X, Y)) # compute the R**2 (sqrt of the mean of the squares of the errors) err = np.sqrt(sum(np.square(e([vr], X, Y))) / (len(X) * len(X))) # Some versions of leastsq returns an array, other a scalar, so here we # make sure # it is an array val = np.array([vr]).flatten() return val, err def LinearB(Xi, Yi): X = np.asfarray(Xi) Y = np.asfarray(Yi) # we want a function y = m * x + b def fp(v, x): return x * v[0] + v[1] # the error of the function e = x - y def e(v, x, y): return (fp(v, x) - y) # the initial value of m, we choose 1, because we thought YODA would # have chosen 1 v0 = np.array([1.0, 1.0]) vr, _success = leastsq(e, v0, args=(X, Y)) # compute the R**2 (sqrt of the mean of the squares of the errors) err = np.sqrt(sum(np.square(e(vr, X, Y))) / (len(X) * len(X))) # print vr, success, err return vr, err def GetImgShape(img): rows = 1 if img.ndim == 1: cols, = img.shape else: rows, cols = img.shape return rows, cols def FFT1288(m, rotate=False): mm = np.asfarray(np.copy(m)) if rotate is True: mm = mm.transpose() _rows, cols = GetImgShape(mm) # This is just in case we are talking about really small or really # big arrays if (cols < 10) or (cols > 50000): return [] # Substract the mean of the image mm = mm - np.mean(mm) # perform the fft in the x direction fft = np.fft.fft(mm, axis=1) fft = fft / np.sqrt(cols) fabs = np.real(fft * np.conjugate(fft)) # extract the mean of each column of the fft r = np.mean(fabs, axis=0) return r def GetFrecs(fft): n = len(fft) x = np.arange(n) x = x * 1.0 / (2 * n) return x def Histogram1288(img, Qmax): y = np.ravel(img) ymin = np.min(y) ymax = np.max(y) # Because we are working with integers, minimum binwidth is 1 W = 1 q = ymax - ymin Q = q + 1 # When too many bins, create a new integer binwidth if Q > Qmax: # We want the number of bins as close as possible to Qmax (256) W = int(np.ceil(1. * q / (Qmax - 1))) Q = int(np.floor(1. * q / W)) + 1 # The bins # we need one more value for the numpy histogram computation # numpy used bin limits # in our interpretation we use the lower limit of the bin B = [ymin + (i * W) for i in range(Q + 1)] # Normal distribution with the original sigma, and mean mu = np.mean(y) sigma = np.std(y) normal = ((1. * (ymax - ymin) / Q) * np.size(y) / (np.sqrt(2 * np.pi) * sigma) * np.exp(-0.5 * (1. / sigma * (B[:-1] - mu)) ** 2)) ############################################# # # # Reference algorithm, it's pretty slow # # the numpy version gives the same results # # #The histogram container # H = np.zeros((Q,), dtype=np.int64) # # #Histogram computation # for yi in y: # q = (yi - ymin) / W # H[q] += 1 ############################################# H, _b = np.histogram(y, B, range=(ymin, ymax)) return {'bins': np.asfarray(B[:-1]), 'values': H, 'model': normal} def cls_1288_info(cls): """Dictionnary that represents results. Parameters ---------- cls : Class from wich to extract the information. Returns ------- dict : Dictionnary extracted using the format defined by a custom sphinx directive with the following format:: {attribute1: {'section': section name, 'units': attribute units, 'short': attribute short description, 'latexname': latex name for the attribute, 'symbol': symbol to represent the value}} """ d = OrderedDict() items = [name for name in sorted(cls.__dict__.keys())] for attribute_name in items: # Extract the doc from the Processing methods doc = getattr(cls, attribute_name).__doc__ if not doc: continue # All the lines in the docstring lines = [s.strip() for s in doc.splitlines()] # to store the relevant tag lines tag_lines = [] n = len(lines) flag = False for line in lines: # Get only those that are relevant (start with .. emva1288::) if line.startswith('.. emva1288::'): flag = True continue if flag: if not line.strip(): flag = False continue tag_lines.append(line) # if there are not relevant lines skip and go to next method if not tag_lines: continue # To store the info from the doc attribute_info = {} for line in tag_lines: tags = [x.strip() for x in line.split(':', 2) if x.strip()] # Each valid tag has to be xx:yy if len(tags) != 2: continue # Fill the dict attribute_info[tags[0].lower()] = tags[1] # If there is no section set it as other attribute_info.setdefault('section', 'other') d[attribute_name] = attribute_info return d def _sections_first(dct): """For backwards compatibility where we use to have results with section as first keys """ d = OrderedDict() sections = sorted({k['section'] for k in dct.values()}) for section in sections: d[section] = OrderedDict() for k, v in dct.items(): if v['section'] != section: continue d[section][k] = v return d def obj_to_dict(obj): ''' Get the info dict from the object class Add the values or Data to this dict for each method if the return value is a dict, it is inserted as d[SectionName][MethodName][Data] = ReturnValue if not d[SectionName][MethodName][Value] = ReturnValue ''' d = cls_1288_info(obj.__class__) for attribute in d.keys(): # Get the value for the given attribute val = getattr(obj, attribute) if callable(val): val = val() if isinstance(val, dict): d[attribute]['data'] = val else: d[attribute]['value'] = val return d def dict_to_xml(d, root='results', filename=None): ''' Takes a dict and return a well formed xml string ''' def key_to_xml(d, r): ''' Recursive call to add the key/value from dict to the r element tree If the value is an array joint the values casted as strings with whitespaces separator if the value is something else, it is casted as string ''' for k in d.keys(): e = etree.SubElement(r, k) # when the value for the key is a dict, call the function again if isinstance(d[k], dict): r.append(key_to_xml(d[k], e)) # if the value is an array # add the values of the array as a string separated by whitespaces # Note to self: add other array types as needed elif isinstance(d[k], np.ndarray): a = [str(x) for x in d[k]] e.text = ' '.join(a) # if something else, just add the corresponding string value else: e.text = str(d[k]) return r tree = etree.Element(root) xml = key_to_xml(d, tree) t = etree.tostring(xml, pretty_print=True) if filename is None: return t with open(filename, 'w') as f: f.write(t.decode('utf-8')) def xml_to_dict(xml): ''' If xml is a file, opens and parse, if string, parse it from string Convert the xml to a dict using element_to_dict Process the resulting dict: Cast Data to numpy float arrays (split the string by whitespaces) Cast Value to float ''' try: if os.path.isfile(xml): tree = etree.parse(xml) else: tree = etree.ElementTree.fromstring(xml) except: print('Problems loading XML') return None def element_to_dict(r): ''' Recursive call to add dictionnary elements from the r xml element ''' dout = {} for child in r: if list(child): # if the element has children call the function again with # children as r dout[child.tag] = element_to_dict(child) else: dout[child.tag] = child.text return dout root = tree.getroot() d = element_to_dict(root) # loop to reconstruct arrays from strings in Data elements for section, method in d.items(): for methodname, value in method.items(): if 'data' in value: for data in value['data']: v = value['data'][data] v = v.strip() v = v.split() d[section][methodname]['data'][data] = np.asfarray(v) else: # sometimes the decimal point is written with , instead of . v = value['value'].replace(',', '.') # special cases, None, etc... if v == 'None': v = None else: v = float(v) d[section][methodname]['value'] = v return d def round_significant(v, sig=SIGNIFICANT_DIGITS): ''' Round up to the given significant digits, used for comparison ''' if v == 0.0: return 0.0 return round(v, sig - np.int(np.floor(np.log10(np.abs(v)))) - 1) round_array = np.vectorize(round_significant) def compare_xml(x1, x2): # load the xml into dicts f1 = xml_to_dict(x1) f2 = xml_to_dict(x2) # if something is wrong abort if f1 is None or f2 is None: return c1 = list(f1.keys()) c2 = list(f2.keys()) # loop throught the combined categories for category in set(c1) | set(c2): print('') print('*' * 70) print(category) print('*' * 70) # check if missing category in one of the dicts if category not in c1 or category not in c2: t1 = category in c1 t2 = category in c2 print('{0:<35}'.format('PRESENT'), end=" ") print('{0:<20}{1:<20}FAIL'.format(str(t1), str(t2))) continue m1 = f1[category].keys() m2 = f2[category].keys() # loop throught the combined methodnames for methodname in set(m1) | set(m2): print('{0:<35}'.format(methodname), end=" ") # check if methodname in dict if methodname not in m1: v1 = None a2 = None # get the value and the data else: v1 = f1[category][methodname].get('value', None) a1 = f1[category][methodname].get('data', None) if methodname not in m2: v2 = None a2 = None else: v2 = f2[category][methodname].get('value', None) a2 = f2[category][methodname].get('data', None) # first we check for values and then for data # if both present only values will be taken in count for comparison # If both are values if v1 is not None and v2 is not None: try: r = (round_significant(v1) - round_significant(v2)) == 0.0 except: r = False t1 = v1 t2 = v2 # if both are arrays elif a1 is not None and a2 is not None: k1 = a1.keys() k2 = a2.keys() t1 = 'Array' t2 = 'Array' # if different keys, is invalid if (set(k1) ^ set(k2)): r = False else: # loop throught the keys for k in k1: try: r = np.max(np.abs(round_array(a1[k]) - round_array(a2[k]))) == 0.0 except: r = False if not r: break print('{0:<20}{1:<20}'.format(t1, t2), end=" ") if r: print('OK') else: print('FAIL') PK̆I llemva1288/process/plotting.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """Plot the results This class takes a results.Results1288 object and produces all the plots needed to create a reference datasheet of the EMVA1288 test """ from __future__ import print_function import numpy as np from . import routines class Emva1288Plot(object): """Base class for emva plots.""" name = "" """The figure's name (used as title if title is none).""" title = None """The figure's title.""" xlabel = None """The x axis label.""" ylabel = None """The y axis label.""" xscale = None """The x axis scale.""" yscale = None """The y axis scale.""" def __init__(self, figure): """Base class for emva plots init function. The only mandatory attribute is the name, the rest are for use in the :meth:`setup_figure` method. Parameters ---------- figure : The :class:`matplotlib:matplotlib.figure.Figure` object to plot. """ self.figure = figure self.setup_figure() def setup_figure(self): """Simple wrapper for one plot per figure Takes the name, xlabel, ylabel, xscale and yscale for one plot case. If more than one plot, just overwrite as you wish. """ ax = self.figure.add_subplot(111) if self.title: ax.set_title(self.title) else: ax.set_title(self.name) if self.xlabel: ax.set_xlabel(self.xlabel) if self.ylabel: ax.set_ylabel(self.ylabel) if self.xscale: ax.set_xscale(self.xscale) if self.yscale: ax.set_yscale(self.yscale) self.ax = ax def plot(self, test): """Method to show the figures. Parameters ---------- test : Do nothing for this method but can be used for the subclass method. Raises ------ NotImplementedError If this method is not overridden. Notes ----- Must be overridden in subclasses. """ raise NotImplementedError def set_legend(self, ax): """Shortcut to add legend. Parameters ---------- ax : The :class:`matplotlib:matplotlib.axes.Axes` object to which the legend will be added. """ ax.legend(loc='best') legend = ax.get_legend() if legend is not None: if getattr(legend, 'draggable', False): legend.draggable(True) def rearrange(self): """Opportunity to change axis or limits after all the tests have been plotted. Uses :meth:`matplotlib:matplotlib.figure.Figure.tight_layout` method. """ self.figure.tight_layout() def reduce_ticks(self, ax, axis, n=4): """Reduce the number of ticks in ax.axis Uses the :meth:`matplotlib:matplotlib.axes.Axes.locator_params` method. Parameters ---------- ax : The :class:`matplotlib:matplotlib.axes.Axes` object to which the number of ticks will be changed. axis : str, {'x', 'y', 'both'} Axis on which to operate. n : int, optional Number of bins between ticks to be left. """ ax.locator_params(axis=axis, nbins=n) class PlotSensitivity(Emva1288Plot): name = 'Sensitivity' xlabel = '$\mu_p$ [mean number of photons/pixel]' ylabel = '$\mu_y - \mu_{y.dark}$ [DN]' def plot(self, test): ax = self.ax ax.plot(test.temporal['u_p'], test.temporal['u_y'] - test.temporal['u_ydark'], label='Data', gid='%d:data' % test.id) ax.plot(test.temporal['u_p'], test.R * test.temporal['u_p'], '--', label='Fit', gid='%d:fit' % test.id) xi = test.temporal['u_p'][test.index_sensitivity_min] xf = test.temporal['u_p'][test.index_sensitivity_max] yi = (test.temporal['u_y'] - test.temporal['u_ydark'])[test.index_sensitivity_min] yf = (test.temporal['u_y'] - test.temporal['u_ydark'])[test.index_sensitivity_max] ax.plot((xi, xf), (yi, yf), label='Fit range', linestyle='None', marker='o', gid='%d:marker' % test.id) self.set_legend(ax) def rearrange(self): self.ax.ticklabel_format(axis='x', style='sci', scilimits=(1, 4)) self.figure.tight_layout() class PlotUyDark(Emva1288Plot): '''Create $\mu_{y.dark}$ plot''' name = 'Mean gray value in dark' title = '$\mu_{y.dark}$' xlabel = 'exposure time [ns]' ylabel = '$\mu_{y.dark}$ [DN]' def plot(self, test): ax = self.ax if np.mean(test.temporal['texp']) == test.temporal['texp'][0]: ax.plot(test.temporal['texp'], test.temporal['u_ydark'], 'o', markersize=5, label='Data', gid='%d:data' % test.id) else: ax.plot(test.temporal['texp'], test.temporal['u_ydark'], label='Data', gid='%d:data' % test.id) ax.ticklabel_format(axis='x', style='sci', scilimits=(1, 4)) self.set_legend(ax) class PlotPTC(Emva1288Plot): '''Create Photon Transfer plot''' name = 'Photon Transfer' xlabel = '$\mu_y - \mu_{y.dark}$ [DN]' ylabel = '$\sigma^2_y - \sigma^2_{y.dark}$ [DN$^2$]' def plot(self, test): ax = self.ax X = test.temporal['u_y'] - test.temporal['u_ydark'] Y = test.temporal['s2_y'] - test.temporal['s2_ydark'] ax.plot(X, Y, label='Data', gid='%d:data' % test.id) ax.plot(X, test.K * X, linestyle='--', label='Fit', gid='%d:fit' % test.id) ax.plot((X[test.index_u_ysat], ), (Y[test.index_u_ysat], ), marker='s', linestyle='None', label='Saturation', gid='%d:marker' % test.id) ax.plot((X[test.index_sensitivity_min], X[test.index_sensitivity_max]), (Y[test.index_sensitivity_min], Y[test.index_sensitivity_max]), linestyle='None', marker='o', label='Fit range', gid='%d:marker' % test.id) self.set_legend(ax) class PlotSNR(Emva1288Plot): '''Create SNR plot ''' name = 'Signal to Noise Ratio' xlabel = '$\mu_{p}$ [mean number of photons/pixel]' ylabel = 'SNR' def setup_figure(self): super(PlotSNR, self).setup_figure() self.ax.loglog() max_ideal = [] self.max_ideal = max_ideal def plot(self, test): ax = self.ax X = np.arange(test.u_p_min, test.u_p_sat, (test.u_p_sat - test.u_p_min) / 100.0) # remove the zeros on the denominator, at saturation the temporal # noise is zero! nz = np.nonzero(test.temporal['s2_y']) ax.plot(test.temporal['u_p'][nz], (test.temporal['u_y'] - test.temporal['u_ydark'])[nz] / np.sqrt(test.temporal['s2_y'][nz]), marker='.', ls=' ', label='Data', gid='%d:data' % test.id) ax.plot(X, ((test.QE / 100) * X) / np.sqrt((test.sigma_d) ** 2 + (test.s2q / (test.K) ** 2) + ((test.QE / 100) * X)), linestyle=':', label='Theoretical', gid='%d:fit' % test.id) ideal = np.sqrt(X) self.max_ideal.append(ideal[-1]) ax.plot((X), ideal, linestyle='-.', label='Ideal', gid='%d:fit' % test.id) ax.axvline(test.u_p_min, label='$\mu_{p.min} = %.1f[p]$' % test.u_p_min, gid='%d:marker' % test.id) ax.axvline(test.u_p_sat, label='$\mu_{p.sat} = %.1f[p]$' % test.u_p_sat, gid='%d:marker' % test.id) ax.plot(X, ((test.QE / 100) * X) / np.sqrt((test.sigma_d) ** 2 + (test.s2q / (test.K) ** 2) + ((test.QE / 100) * X) + (test.DSNU1288 ** 2) + (((test.PRNU1288 / 100) * (test.QE / 100.) * X) ** 2)), linestyle='--', label='Total SNR', gid='%d:fit' % test.id) ax.set_ylim(1, max(self.max_ideal)) self.set_legend(ax) class PlotLinearity(Emva1288Plot): '''Create Linearity plot''' name = 'Linearity' xlabel = '$\mu_{p}$ [mean number of photons/pixel]' ylabel = '$\mu_y - \mu_{y.dark}$ [DN]' def plot(self, test): ax = self.ax X = test.temporal['u_p'] Y = test.temporal['u_y'] - test.temporal['u_ydark'] ax.plot(X, Y, label='Data', gid='%d:data' % test.id) ax.plot(X, test.linearity()['fit_slope'] * X + test.linearity()['fit_offset'], linestyle='--', label='Fit', gid='%d:fit' % test.id) ax.plot((X[test.index_linearity_min], X[test.index_linearity_max]), (Y[test.index_linearity_min], Y[test.index_linearity_max]), label='Fit range', linestyle='None', marker='o', gid='%d:marker' % test.id) self.set_legend(ax) self.ax.ticklabel_format(axis='x', style='sci', scilimits=(1, 4)) class PlotDeviationLinearity(Emva1288Plot): '''Create Deviation Linearity plot''' name = 'Deviation linearity' xlabel = '$\mu_{p}$ [mean number of photons/pixel]' ylabel = 'Linearity error LE [%]' def plot(self, test): ax = self.ax X = test.temporal['u_p'][test.index_linearity_min: test.index_linearity_max] deviation = test.linearity()['relative_deviation'] Y = deviation[test.index_linearity_min: test.index_linearity_max] ax.plot(X, Y, label='Data', gid='%d:data' % test.id) ax.plot((X[0], X[-1]), (Y[0], Y[-1]), label='Fit range', linestyle='None', marker='o', gid='%d:marker' % test.id) self.set_legend(ax) self.ax.ticklabel_format(axis='x', style='sci', scilimits=(1, 4)) class PlotHorizontalSpectrogramPRNU(Emva1288Plot): '''Create Horizontal spectrogram PRNU plot''' name = 'Horizontal spectrogram PRNU' xlabel = 'cycles [periods/pixel]' ylabel = 'Standard deviation and\nrelative presence of each cycle [%]' yscale = 'log' def plot(self, test): ax = self.ax data = test.spatial['avg'] - test.spatial['avg_dark'] data_mean = np.mean(data) spectrogram = routines.FFT1288(data) / data_mean ax.plot(routines.GetFrecs(spectrogram[:(np.shape(spectrogram)[0] // 2)]), (np.sqrt(spectrogram[:(np.shape(spectrogram)[0] // 2)])), label='Data', gid='%d:data' % test.id) ax.axhline(test.PRNU1288, label='$PRNU_{1288}$', linestyle='--', color='r', gid='%d:marker' % test.id) ax.axhline(np.sqrt(test.sigma_2_y_stack), label='$\sigma^2_{y.stack}$', linestyle='-.', color='g', gid='%d:marker' % test.id) self.set_legend(ax) class PlotHorizontalSpectrogramDSNU(Emva1288Plot): '''Create Horizontal spectrogram DSNU plot''' name = 'Horizontal spectrogram DSNU' xlabel = 'cycles [periods/pixel]' ylabel = 'Standard deviation and\nrelative presence of each cycle [DN]' yscale = 'log' def plot(self, test): ax = self.ax spectrogram = routines.FFT1288(test.spatial['avg_dark']) ax.plot(routines.GetFrecs(spectrogram[:(np.shape(spectrogram)[0] // 2)]), np.sqrt(spectrogram[:(np.shape(spectrogram)[0] // 2)]), label='Data', gid='%d:data' % test.id) ax.axhline(test.DSNU1288_DN(), label='$DSNU_{1288.DN}$', linestyle='--', color='r', gid='%d:marker' % test.id) ax.axhline(np.sqrt(test.sigma_2_y_stack_dark), label='$\sigma^2_{y.stack.dark}$', linestyle='-.', color='g', gid='%d:marker' % test.id) self.set_legend(ax) class PlotVerticalSpectrogramPRNU(Emva1288Plot): '''Create Vertical spectrogram PRNU plot''' name = 'Vertical spectrogram PRNU' xlabel = 'cycles [periods/pixel]' ylabel = 'Standard deviation and\nrelative presence of each cycle [%]' yscale = 'log' def plot(self, test): ax = self.ax data = test.spatial['avg'] - test.spatial['avg_dark'] data_mean = np.mean(data) spectrogram = routines.FFT1288(data, rotate=True) / data_mean ax.plot((routines.GetFrecs(spectrogram[:(np.shape(spectrogram)[0] // 2)])), (np.sqrt(spectrogram[:(np.shape(spectrogram)[0] // 2)])), label='Data', gid='%d:data' % test.id) ax.axhline(test.PRNU1288, label='$PRNU_{1288}$', linestyle='--', color='r', gid='%d:marker' % test.id) ax.axhline(np.sqrt(test.sigma_2_y_stack) / data_mean, label='$\sigma^2_{y.stack}$', linestyle='-.', color='g', gid='%d:marker' % test.id) self.set_legend(ax) class PlotVerticalSpectrogramDSNU(Emva1288Plot): '''Create Vertical spectrogram DSNU plot''' name = 'Vertical spectrogram DSNU' xlabel = 'Cycles [periods/pixel]' ylabel = 'Standard deviation and\nrelative presence of each cycle [DN]' yscale = 'log' def plot(self, test): ax = self.ax spectrogram = routines.FFT1288(test.spatial['avg_dark'], rotate=True) ax.plot(routines.GetFrecs(spectrogram[:(np.shape(spectrogram)[0] // 2)]), np.sqrt(spectrogram[:(np.shape(spectrogram)[0] // 2)]), label='Data', gid='%d:data' % test.id) ax.axhline(test.DSNU1288_DN(), label='$DSNU_{1288.DN}$', linestyle='--', color='r', gid='%d:marker' % test.id) ax.axhline(np.sqrt(test.sigma_2_y_stack_dark), label='$\sigma^2_{y.stack.dark}$', linestyle='-.', color='g', gid='%d:marker' % test.id) self.set_legend(ax) class PlotLogarithmicHistogramDSNU(Emva1288Plot): '''Create Logarithmic histogram DSNU plot''' name = 'Logarithmic histogram DSNU' xlabel = 'Deviation from the mean [DN]' ylabel = 'Number of pixels' yscale = 'log' def plot(self, test): ax = self.ax hist = test.histogram_DSNU ax.plot(hist['bins'], hist['values'], gid='%d:data' % test.id, label='Data') ax.plot(hist['bins'], hist['model'], '--', gid='%d:fit' % test.id, label='Model') self.set_legend(ax) ax.axis(ymin=1.0, ymax=np.max(hist['values']) * 2) class PlotLogarithmicHistogramPRNU(Emva1288Plot): '''Create Logarithmic histogram PRNU plot''' name = 'Logarithmic histogram PRNU' xlabel = 'Deviation from the mean [%]' ylabel = 'Number of pixels' yscale = 'log' def plot(self, test): ax = self.ax hist = test.histogram_PRNU ax.plot(hist['bins'], hist['values'], gid='%d:data' % test.id, label='Data') ax.plot(hist['bins'], hist['model'], '--', gid='%d:fit' % test.id, label='Model') self.set_legend(ax) ax.axis(ymin=0.5, ymax=np.max(hist['values']) * 2) class PlotAccumulatedLogHistogramDSNU(Emva1288Plot): '''Create Accumulated log histogram DSNU plot''' name = 'Accumulated log histogram DSNU' xlabel = 'Minimal deviation from the mean [DN]' ylabel = 'Percentage of pixels\ndeviating from the mean at least of : ' yscale = 'log' def plot(self, test): ax = self.ax hist = test.histogram_DSNU_accumulated ax.plot(hist['bins'], hist['values'], gid='%d:data' % test.id, label='Data') self.set_legend(ax) self.figure.tight_layout() class PlotAccumulatedLogHistogramPRNU(Emva1288Plot): '''Create Accumulated log histogram PRNU plot''' name = 'Accumulated log histogram PRNU' xlabel = 'Minimal deviation from the mean [%]' ylabel = 'Percentage of pixels\ndeviating from the mean at least of : ' yscale = 'log' def plot(self, test): ax = self.ax hist = test.histogram_PRNU_accumulated ax.plot(hist['bins'], hist['values'], gid='%d:data' % test.id, label='Data') self.set_legend(ax) class ProfileBase(Emva1288Plot): def __init__(self, *args, **kwargs): Emva1288Plot.__init__(self, *args, **kwargs) # Dict to keep track of max and min values to adjust the # limit of the axis self.axis_limits = {'bright': {'max': [], 'min': [], 'length': []}, 'dark': {'max': [], 'min': [], 'length': []}} def _get_extremes(self, mean_, min_, max_): min_min_i = np.argmax(mean_ - min_) min_min = min_[min_min_i] min_perc = np.abs(100. - (min_min * 100. / mean_[min_min_i])) min_label = 'Min ({:.1f} {:.1f}%)'.format(min_min, min_perc) max_max_i = np.argmax(max_ - min_) max_max = max_[max_max_i] max_perc = np.abs(100. - (max_max * 100. / mean_[max_max_i])) max_label = 'Max ({:.1f} {:.1f}%)'.format(max_max, max_perc) return {'min_deviation': min_min, 'min_precentage': min_perc, 'min_label': min_label, 'max_label': max_label, 'max_deviation': max_max, 'max_percentage': max_perc} def _get_image_profiles(self, image): """Get the images profiles. Supports the masked arrays. Masked arrays cannot be shown directly with continuous lines. To fix this, we only return the profiles excluding the masks constants. Also returns the x arrays corresponding the the profile. """ img = image if self.vertical: img = np.transpose(image) profile = np.mean(img, axis=0) profile_min = np.min(img, axis=0) profile_max = np.max(img, axis=0) mid_i = np.shape(img)[0] profile_mid = img[mid_i // 2, :] # Verifications for profile_mid if isinstance(profile_mid, np.ma.core.MaskedArray): if profile_mid.mask.all(): # If by chance mid is a column of masked constant, # take the next one profile_mid = img[mid_i // 2 + 1, :] # use _get_x_y to get the x and profile of unmasked values d = {'mean': self._get_x_y(profile), 'min': self._get_x_y(profile_min), 'max': self._get_x_y(profile_max), 'mid': self._get_x_y(profile_mid), } return d def _get_x_y(self, profile): x = np.arange(len(profile)) if isinstance(profile, np.ma.MaskedArray): masks = profile.mask # Index lists of where there is masked constants index = [i for i in range(len(masks)) if not masks[i]] # chop out masked values x = x[index] profile = profile[index] return (x, profile) def get_profiles(self, bright, dark): b_p = self._get_image_profiles(bright) # index 1 is the profile (0 is x-axis) b_mean = np.mean(b_p['mean'][1]) self.axis_limits['bright']['min'].append(0.9 * b_mean) self.axis_limits['bright']['max'].append(1.1 * b_mean) d_p = self._get_image_profiles(dark) self.axis_limits['dark']['min'].append(0.9 * np.mean(d_p['min'][1])) self.axis_limits['dark']['max'].append(1.1 * np.mean(d_p['max'][1])) return {'bright': b_p, 'dark': d_p} def plot(self, test): # index of profiles of what will be the x and y axis x = 0 y = 1 legend_loc = 'upper right' if self.vertical: x = 1 y = 0 legend_loc = (0.8, 0.65) ax = self.ax ax2 = self.ax2 bimg = test.spatial['avg'] - test.spatial['avg_dark'] dimg = test.spatial['avg_dark'] profiles = self.get_profiles(bimg, dimg) # to keep the lines number for legend bright_plots = [] labels = [] for typ in ('mid', 'min', 'max', 'mean'): # label has first letter capital label = typ.capitalize() labels.append(label) # bright plot l = ax.plot(profiles['bright'][typ][x], profiles['bright'][typ][y], label=label, gid='%d:marker' % test.id)[0] bright_plots.append(l) # dark plot ax2.plot(profiles['dark'][typ][x], profiles['dark'][typ][y], label=label, gid='%d:data' % test.id) # Place legend self.figure.legend(bright_plots, labels, loc=legend_loc) class PlotHorizontalProfile(ProfileBase): '''Create Horizontal profile plot Profile is done with spatial images. ''' name = 'Horizontal profile' vertical = False def setup_figure(self): self.ax = self.figure.add_subplot(211) self.ax2 = self.figure.add_subplot(212) self.figure.suptitle(self.name) self.ax.set_title('PRNU') self.ax.set_ylabel('Vertical line [DN]') self.ax2.set_title('DSNU') self.ax2.set_xlabel('Index of the line') self.ax2.set_ylabel('Vertical line [DN]') def rearrange(self): self.ax.set_xticks([]) self.ax.axis(ymin=min(self.axis_limits['bright']['min']), ymax=max(self.axis_limits['bright']['max'])) self.ax2.axis(ymin=min(self.axis_limits['dark']['min']), ymax=max(self.axis_limits['dark']['max'])) self.reduce_ticks(self.ax2, 'y') self.reduce_ticks(self.ax, 'y') self.figure.tight_layout(pad=2) class PlotVerticalProfile(ProfileBase): '''Create Vertical profile plot. Profile is done with spatial images. ''' name = 'Vertical profile' vertical = True def setup_figure(self): self.ax2 = self.figure.add_subplot(121) self.ax = self.figure.add_subplot(122) self.figure.suptitle(self.name) self.ax2.set_title('DSNU') self.ax2.set_xlabel('Vertical line [DN]') self.ax2.set_ylabel('Index of the line') self.ax.set_title('PRNU') self.ax.set_xlabel('Vertical line [DN]') def rearrange(self): self.ax.set_yticks([]) self.ax.axis(xmin=min(self.axis_limits['bright']['min']), xmax=max(self.axis_limits['bright']['max'])) self.ax2.axis(xmin=min(self.axis_limits['dark']['min']), xmax=max(self.axis_limits['dark']['max'])) self.ax2.invert_yaxis() self.ax.invert_yaxis() self.reduce_ticks(self.ax2, 'x') self.reduce_ticks(self.ax, 'x') self.figure.tight_layout() EVMA1288plots = [PlotPTC, PlotSNR, PlotSensitivity, PlotUyDark, PlotLinearity, PlotDeviationLinearity, PlotHorizontalSpectrogramPRNU, PlotHorizontalSpectrogramDSNU, PlotVerticalSpectrogramPRNU, PlotVerticalSpectrogramDSNU, PlotLogarithmicHistogramDSNU, PlotLogarithmicHistogramPRNU, PlotAccumulatedLogHistogramDSNU, PlotAccumulatedLogHistogramPRNU, PlotHorizontalProfile, PlotVerticalProfile] """ This list is quite exhaustive. There are the links to corresponding documentation: - :class:`~emva1288.process.plotting.PlotPTC` - :class:`~emva1288.process.plotting.PlotSNR` - :class:`~emva1288.process.plotting.PlotSensitivity` - :class:`~emva1288.process.plotting.PlotUyDark` - :class:`~emva1288.process.plotting.PlotLinearity` - :class:`~emva1288.process.plotting.PlotDeviationLinearity` - :class:`~emva1288.process.plotting.PlotHorizontalSpectrogramPRNU` - :class:`~emva1288.process.plotting.PlotHorizontalSpectrogramDSNU` - :class:`~emva1288.process.plotting.PlotVerticalSpectrogramPRNU` - :class:`~emva1288.process.plotting.PlotVerticalSpectrogramDSNU` - :class:`~emva1288.process.plotting.PlotLogarithmicHistogramDSNU` - :class:`~emva1288.process.plotting.PlotLogarithmicHistogramPRNU` - :class:`~emva1288.process.plotting.PlotAccumulatedLogHistogramPRNU` - :class:`~emva1288.process.plotting.PlotAccumulatedLogHistogramDSNU` - :class:`~emva1288:emva1288.process.plotting.PlotHorizontalProfile` - :class:`~emva1288.process.plotting.PlotVerticalProfile` """ class Plotting1288(object): def __init__(self, test): ''' Creates and shows all plots necessary to prepare a camera or sensor descriptive report compliant with EMVA Standard 1288 version 3.1. ''' if not getattr(test, 'id', False): test.id = id(test) self.test = test def plots_to_plot(self, *plots): p = [] if not plots: plots = range(len(EVMA1288plots)) for i in plots: if i not in range(len(EVMA1288plots)): print('Error ', i, 'is not valid index') print('Plot has to be integer in ', range(len(EVMA1288plots))) continue p.append(i) return p def plot(self, *ids): import matplotlib.pyplot as plt plots = self.plots_to_plot(*ids) for i in plots: figure = plt.figure(i) plot = EVMA1288plots[i](figure) plot.plot(self.test) plot.rearrange() figure.canvas.set_window_title(plot.name) plt.show() PKIp7$7$emva1288/process/parser.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """EMVA 1288 descriptor parser This class takes an EMVA1288 descriptor file and loads its content into a python dictionary. An EMVA1288 descriptor file is a file that contains the description of an EMVA1288 test including exposure times, photon count and corresponding images """ import numpy as np import os import pprint import logging class ParseEmvaDescriptorFile(object): """Take an image descriptor file and transform it into an usable directory """ def __init__(self, filename, path=None, loglevel=logging.INFO): """Parser init method. Uses a :class:`python:logging.Logger` object to print infos of the parcing process. This method :meth:`loads <_load_file>` the file and :meth:`gets <_fill_info>` the information from it. Parameters ---------- filename : str The descriptor file's name or the complete path to it. path : str, optional The path to the descriptor file. loglevel : int, optional The logger level. """ # The items are in the form of # exposure:{photons:[fname1, fname2,...]}, photons....} # for dark, the number of photons is 0.0 # If no path is given, the filename path will be used to fill # the images dict self._path = path self.format = {} # bits, witdth, height self.version = None self.images = {'temporal': {}, 'spatial': {}} logging.basicConfig() self.log = logging.getLogger('Parser') self.log.setLevel(loglevel) self._load_file(filename) self._fill_info() self.log.debug(pprint.pformat(self.images)) def _get_images_filenames(self): """ From the current line in self._lines array get all the consecutive "i filename" if less than 2 consecutive, raise an error """ fnames = [] while self._lines: line = self._lines.pop() l = self._split_line(line) if l[0] != 'i': # Ups, to the end of images, reappend last line that is not an # image line self._lines.append(line) break if len(l) != 2: # pragma: no cover raise SyntaxError('Wrong format: "%s" should be "i filename"' % line) break # append image path to fnames npath = os.path.normpath(l[1]) path = os.path.join(self._path, *npath.split('\\')) fnames.append(path) if len(fnames) < 2: # pragma: no cover raise SyntaxError('Each image series, has to ' 'have at least two images') return fnames def _get_kind(self, fnames): """ Guess what kind of data based on the number of images Temporal = 2 images for each measurement point Spatial = >2 images for each measurement point """ L = len(fnames) if L == 2: kind = 'temporal' else: kind = 'spatial' return kind def _add_pcount(self, exposure, photons, fnames): """Add images to a given exposure/phton For a given exposure and photon count add the appropiate image filenames to the self.images dict """ # is it temporal or spatial data kind = self._get_kind(fnames) # create the exposure time dictionary for this exposure time # if it is not already existing self.images[kind].setdefault(exposure, {}) # if this dict for this exposure time and this # photons count already existed, raise an error in order to not # overwrite existing data. if photons in self.images[kind][exposure]: # pragma: no cover raise SyntaxError('Only one set of images exp %.3f photons %.3f' % (exposure, photons)) # append the images path to a dict whose key is the photons count # inside the exposure time dict self.images[kind][exposure][photons] = fnames def _fill_info(self): """ Iterate through all the lines in the descriptor file and parse them by their first character. Fill self.images """ # Start at the end of the file self._lines.reverse() while self._lines: # pop it such that other methods know the current processed line line = self._lines.pop() # check line is good format and split it l = self._split_line(line) # descriptor file supposed format ## # n bits width height # b exposureTime(ns) numberPhotons (bright image) # i relativePathToTheImage # d exposureTime(ns) (dark image) if l[0] == 'v': # if line starts with 'v', this is the version self.version = l[1] self.log.info('Version ' + l[1]) continue if l[0] == 'n': # for lines that starts with n, there is always 4 elements # n + bits + width + height # There should be only one of this line in the file if len(l) != 4: # pragma: no cover raise SyntaxError('Wrong format: "%s" should be "n bits ' 'width height"' % line) if self.format: # pragma: no cover # if it is the second line found # of this type raise error raise SyntaxError('Only one "n bits width height" is ' 'allowed per file') self.format['bits'] = int(l[1]) self.format['width'] = int(l[2]) self.format['height'] = int(l[3]) continue if l[0] == 'b': # For lines that starts with b. there is always 3 elements # b + exposureTime + numberPhotons (bright images) if len(l) != 3: # pragma: no cover raise SyntaxError('Wrong format: "%s" should be "b ' 'exposure photons"' % line) # Replace floating point representation if wrong format. exposure = np.float(l[1].replace(',', '.')) photons = np.float(l[2].replace(',', '.')) # For this settings, get all the corresponding images fnames = self._get_images_filenames() # Add the images path to the images[kind][exposure][photons] # dictionary where kind = temporal or spatial self._add_pcount(exposure, photons, fnames) continue if l[0] == 'd': # For lines that starts with d, there is always 2 elements # d + exposureTime (dark images) if len(l) != 2: # pragma: no cover raise SyntaxError('Wrong format: "%s" should be "d ' 'exposure"' % line) # replace floating point representation if wring format exposure = np.float(l[1].replace(',', '.')) # For this exposure, get all the corresponding images fnames = self._get_images_filenames() # Add the images path to the images dict. self._add_pcount(exposure, np.float(0.0), fnames) continue # If line is of an unknown format, warn user. self.log.warning('Unknown command ' + line) # pragma: no cover def _split_line(self, line): """ For every line of descriptorfile check that it has at least two arguments split by white spaces and strip white spaces from elements """ l = [x.strip() for x in line.split()] if (not l) or (len(l) < 2): # pragma: no cover raise SyntaxError('Wrong format line: %s' % line) return l def _load_file(self, filename): """ Load a file, split by lines removing the comments (starts with #) """ self.log.info('Opening ' + filename) f = open(filename, 'r') # To add location when opening images # If no path was passed as kwarg, set it to the filename path if self._path is None: self._path = os.path.dirname(filename) # get the lines and strip them if they are not comments try: self._lines = [x.strip() for x in f.readlines() if x.strip() and not x.strip().startswith('#')] except UnicodeDecodeError: # pragma: no cover # If there is an unknown character in the file, speak it! raise UnicodeDecodeError("File: '%s', has non-utf8 characters." "Find them and kill them!" % filename) PK7~ IrwNooemva1288/process/results.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """Compute EMVA1288 values from data This class takes the data from data.Data1288 and compute the actual EMVA1288 values. """ from __future__ import print_function import logging import numpy as np from emva1288.process import routines from scipy.ndimage import convolve class Results1288(object): """Class used to process data and to generate pdf report using LaTeX. When properties from this class are computed, their docstring are also parsed using the :func:`~emva1288.process.routines.cls_1288_info` function to retrieve more data informations (like units or full name). """ ######################################################################### # Docstrings with .. emva1288:: flags are used when computing the # # properties to add relevant information to data for further processing # # like plotting or creating a report # ######################################################################### def __init__(self, data, pixel_area=None, index_u_ysat=None, loglevel=logging.DEBUG): """Results computation init method. This class uses a :class:`python:logging.Logger` object to display informations for users. Parameters ---------- data : dict The data dictionary to compute the results from. pixel_area : float, optional The area of one pixel in um^2. index_u_ysat : int, optional The index of the u_y array at which we consider that the camera saturates. This is used if forcing the saturation point is necessary. loglevel : int, optional The level for the :class:`python:logging.Logger` object. """ logging.basicConfig() self.log = logging.getLogger('Results') self.log.setLevel(loglevel) self.temporal = data['temporal'] self.spatial = data['spatial'] self.pixel_area = pixel_area self._s2q = 1.0 / 12.0 self._index_start = 0 self._index_sensitivity_min = 0 self._histogram_Qmax = 256 # Maximum number of bins in histograms # Convolution kernel for high pass filter in defect pixel self._histogram_high_pass_box = (-1) * np.ones((5, 5), dtype=np.int64) self._histogram_high_pass_box[2, 2] = 24 # Sometimes we need to force the saturation point # in those cases pass the index in the initialization of Results1288 self._index_u_ysat = index_u_ysat @property def s2q(self): """Quantification noise.""" return self._s2q @property def index_start(self): """The array's starting index. .. emva1288:: :Section: info :Short: Start array index """ return self._index_start @property def index_u_ysat(self): """Index of saturation. .. emva1288:: :Section: sensitivity :Short: Saturation index """ if self._index_u_ysat: return self._index_u_ysat max_ = 0 max_i = 0 # we have to loop backwards because sometimes we have some # noise pics that really bother the computation s2_y = self.temporal['s2_y'] for i in range(len(s2_y) - 1, -1, -1): # Check that is not a local max if (s2_y[i] >= max_) or \ (s2_y[abs(i - 1)] >= max_): max_ = s2_y[i] max_i = i elif (s2_y[i] < max_) and \ (s2_y[abs(i - 1)] < max_): break self._index_u_ysat = max_i return self._index_u_ysat @property def index_sensitivity_max(self): """Index for linear fits in sensitivity part of the standard (70% of saturation). .. emva1288:: :Section: sensitivity :Short: Sensitivity fit maximum index """ Y = self.temporal['u_y'] - self.temporal['u_ydark'] m = 0.7 * (Y[self.index_u_ysat]) return max(np.argwhere(Y <= m))[0] @property def index_sensitivity_min(self): """Sensitivity minimum index. Index for linear fits in sensitivity part of the standard (70% of saturation) .. emva1288:: :Section: sensitivity :Short: Sensitivity fit minimum index """ return self._index_sensitivity_min @property def R(self): """Responsivity. Slope of the (u_y - u_y_dark) Vs u_p. Fit with offset = 0 Uses the :func:`~emva1288.process.routines.LinearB0` function to make the fit. .. emva1288:: :Section: sensitivity :Short: Responsivity :Symbol: R :Unit: DN/p """ Y = self.temporal['u_y'] - self.temporal['u_ydark'] X = self.temporal['u_p'] val, _error = routines.LinearB0(X[self.index_sensitivity_min: self.index_sensitivity_max + 1], Y[self.index_sensitivity_min: self.index_sensitivity_max + 1]) return val[0] @property def K(self): """Overall system gain. Slope of (s2_y - s2_y_dark) Vs (u_y - u_y_dark). Fit with offset = 0. Uses the :func:`~emva1288.process.routines.LinearB0` to make the fit. .. emva1288:: :Section: sensitivity :Short: System gain :Symbol: K :Unit: $DN/e^-$ :LatexName: K """ X = self.temporal['u_y'] - self.temporal['u_ydark'] Y = self.temporal['s2_y'] - self.temporal['s2_ydark'] val, _error = routines.LinearB0(X[self.index_sensitivity_min: self.index_sensitivity_max + 1], Y[self.index_sensitivity_min: self.index_sensitivity_max + 1]) return val[0] def inverse_K(self): """Inverse of overall system gain. .. emva1288:: :Section: sensitivity :Short: Inverse of overall system gain :Symbol: 1/K :Unit: $e^-/DN$ :LatexName: InvK """ return 1. / self.K @property def QE(self): """Quantum efficiency. It is retrieved as the ratio of the responsivity to the overall system gain. .. emva1288:: :Section: sensitivity :Short: Quantum efficiency :Symbol: $\eta$ :Unit: \% :LatexName: QE """ return 100.0 * self.R / self.K @property def sigma_y_dark(self): """Temporal Dark Noise. Uses :func:`~emva1288.process.routines.LinearB` to make the fit. .. emva1288:: :Section: sensitivity :Short: Temporal Dark Noise :Symbol: $\sigma_{y.dark}$ :Unit: DN :LatexName: SigmaYDark """ if len(np.unique(self.temporal['texp'])) <= 2: s2_ydark = self.temporal['s2_ydark'][0] else: fit, _error = routines.LinearB(self.temporal['texp'], self.temporal['s2_ydark']) s2_ydark = fit[1] # Lower limit for the temporal dark noise # The temporal dark noise in this range is dominated by the # quantization noise if s2_ydark < 0.24: s2_ydark = 0.24 return np.sqrt(s2_ydark) @property def sigma_d(self): """Temporal Dark Noise. .. emva1288:: :Section: sensitivity :Short: Temporal Dark Noise :Symbol: $\sigma_d$ :Unit: $e^-$ :LatexName: SigmaDark """ return np.sqrt((self.sigma_y_dark ** 2) - self._s2q) / self.K @property def u_p_min(self): """Absolute sensitivity threshold. .. emva1288:: :Section: sensitivity :Short: Absolute sensitivity threshold :Symbol: $\mu_{p.min}$ :Unit: $p$ :LatexName: UPMin """ return (100.0 / self.QE) * ((self.sigma_y_dark / self.K) + 0.5) @property def u_p_min_area(self): """Sensitivity threshold per pixel area. Returns None if pixel area is not defined or 0. .. emva1288:: :Section: sensitivity :Short: Sensitivity threshold :Symbol: $\mu_{p.min.area}$ :Unit: $p/\mu m^2$ :LatexName: UPMin """ if not self.pixel_area: return None return self.u_p_min / self.pixel_area @property def u_e_min(self): """Sensitivity threshold. .. emva1288:: :Section: sensitivity :Short: Sensitivity threshold :Symbol: $\mu_{e.min}$ :Unit: $e^-$ :LatexName: UEMin """ return self.QE * self.u_p_min / 100.0 @property def u_e_min_area(self): """Sensitivity threshold per pixel area. Returns None if the pixel area is not defined or 0. .. emva1288:: :Section: sensitivity :Short: Sensitivity threshold :Symbol: $\mu_{e.min.area}$ :Unit: $e^-/\mu m^2$ :LatexName: UEMin """ if not self.pixel_area: return None return self.u_e_min / self.pixel_area @property def u_p_sat(self): """Saturation Capacity. .. emva1288:: :Section: sensitivity :Short: Saturation Capacity :Symbol: $\mu_{p.sat}$ :Unit: $p$ :LatexName: UPSat """ return self.temporal['u_p'][self.index_u_ysat] @property def u_p_sat_area(self): """Saturation Capacity per pixel area. Returns None if pixel area is not defined or 0. .. emva1288:: :Section: sensitivity :Short: Saturation Capacity :Symbol: $\mu_{p.sat.area}$ :Unit: $p/\mu m^2$ :LatexName: UPSat """ if not self.pixel_area: return None return self.u_p_sat / self.pixel_area @property def u_e_sat(self): """Saturation Capacity. Number of electrons at saturation. .. emva1288:: :Section: sensitivity :Short: Saturation Capacity :Symbol: $\mu_{e.sat}$ :Unit: $e^-$ :LatexName: UESat """ return self.QE * self.u_p_sat / 100.0 @property def u_e_sat_area(self): """Saturation Capacity per pixel area. Returns None if pixel area is not defined or 0. .. emva1288:: :Section: sensitivity :Short: Saturation Capacity :Symbol: $\mu_{e.sat.area}$ :Unit: $e^-/\mu m^2$ :LatexName: UESat """ if not self.pixel_area: return None return self.u_e_sat / self.pixel_area @property def SNR_max(self): """Maximum Signal-to-Noise Ratio. .. emva1288:: :Section: sensitivity :Short: Signal-to-Noise Ratio :Symbol: $SNR_{max}$ :LatexName: SNRMax """ return np.sqrt(self.u_e_sat) def SNR_max_dB(self): """Maximum Signal to Noise Ratio in Db. .. emva1288:: :Section: sensitivity :Short: Maximum Signal to Noise Ratio in Db :Symbol: $SNR_{max.dB}$ :Unit: dB :LatexName: SNRMaxDB """ return 20. * np.log10(self.SNR_max) def SNR_max_bit(self): """Maximum Signal to Noise Ratio in Bits. .. emva1288:: :Section: sensitivity :Short: Maximum Signal to Noise Ratio in Bits :Symbol: $SNR_{max.bit}$ :Unit: bit :LatexName: SNRMaxBit """ return np.log2(self.SNR_max) def inverse_SNR_max(self): """Inverse Maximum Signal to Noise Ratio. .. emva1288:: :Section: sensitivity :Short: Inverse Maximum Signal to Noise Ratio :Symbol: $SNR_{max}^{-1}$ :Unit: \% :LatexName: InvSNRMax """ return 100. / self.SNR_max @property def DR(self): """Dynamic Range. Defined as the saturation capacity devided by the absolute sensitivity threshold. The greater this number is, the greater the operational range of a camera (between the dark noise level and the saturation level). .. emva1288:: :Section: sensitivity :Short: Dynamic Range :Symbol: DR :LatexName: DR """ return self.u_p_sat / self.u_p_min def DR_dB(self): """Dynamic Range in deciBels. It is defined as 20 * log_10 ( Dynamic Range ). .. emva1288:: :Section: sensitivity :Short: Dynamic Range :Symbol: $DR_{dB}$ :Unit: dB :LatexName: DRDB """ return 20. * np.log10(self.DR) def DR_bit(self): """Dynamic Range in bits. It is defined as log_2 ( Dynamic Range ). .. emva1288:: :Section: sensitivity :Short: Dynamic Range :Symbol: $DR_{bit}$ :Unit: bit :LatexName: DRBit """ return np.log2(self.DR) @property def index_linearity_min(self): """Linearity fit minimun index. Minimum index for linear fit (5% of saturation). .. emva1288:: :Section: linearity :Short: Linearity fit minimun index """ Y = self.temporal['u_y'] - self.temporal['u_ydark'] vmin = 0.05 * (Y[self.index_u_ysat]) return min(np.argwhere(Y >= vmin))[0] @property def index_linearity_max(self): """Linearity fit maximum index. Maximum index for linear fit (95% of saturation). .. emva1288:: :Section: linearity :Short: Linearity fit maximum index """ Y = self.temporal['u_y'] - self.temporal['u_ydark'] vmax = 0.95 * (Y[self.index_u_ysat]) return max(np.argwhere(Y <= vmax))[0] def linearity(self): """Returns a dictionary containing linearity information. It fits the mean digital signal in function of the mean photon count (Linear fit) using the EMVA1288 standard for linear fit. Returns ------- dict : Linearity dictionary. The keys are: - *'fit_slope'* : The slope of the linear fit. - *'fit_offset'* : The offset of the fit. - *'relative_deviation'* : The relative deviation of the real data from the fit (in %) for the whole array. - *'linearity_error_min'* : The minimal value of the relative deviation. - *'linearity_error_max'* : The maximal value of the relative deviation. """ Y = self.temporal['u_y'] - self.temporal['u_ydark'] X = self.temporal['u_p'] # The maximum index has to be included, this is the reason for the +1 imax = self.index_linearity_max + 1 imin = self.index_linearity_min ################################################################## # Following the emva1288 standart for the computation of the fit # ################################################################## X_ = X[imin: imax] Y_ = Y[imin: imax] xy = np.sum(X_ / Y_) xy2 = np.sum(X_ / (Y_ ** 2)) x2y2 = np.sum((X_ / Y_) ** 2) _y = np.sum(1. / Y_) _y2 = np.sum(1. / (Y_ ** 2)) b = ((xy * xy2) - (x2y2 * _y)) / ((xy2 ** 2) - (x2y2 * _y2)) a = (xy - (b * xy2)) / x2y2 dev = 100. * (Y - (a * X + b)) / (a * X + b) lin = {} lin['fit_slope'] = a lin['fit_offset'] = b lin['relative_deviation'] = dev lin['linearity_error_min'] = np.min(dev[imin: imax]) lin['linearity_error_max'] = np.max(dev[imin: imax]) return lin @property def LE_min(self): """ Min Linearity error. .. emva1288:: :Section: linearity :Short: Min Linearity error :Symbol: $LE_{min}$ :Unit: \% :LatexName: LEMin """ return self.linearity()['linearity_error_min'] @property def LE_max(self): """Max Linearity error. .. emva1288:: :Section: linearity :Short: Max Linearity error :Symbol: $LE_{max}$ :Unit: \% :LatexName: LEMax """ return self.linearity()['linearity_error_max'] @property def u_I_var(self): """Dark Current from variance. The dark current from variance is computed as the square root of the slope of the dark signal variance in function of the exposure times divided by the overall system gain. It uses the :func:`~emva1288.process.routines.LinearB` function to make the fit. Returns NaN if u_I_var is imaginary (if the fit slope is negative). .. emva1288:: :Section: dark_current :Short: Dark Current from variance :Symbol: $\mu_{I.var}$ :Unit: $e^-/s$ :LatexName: UIVar """ fit, _error = routines.LinearB(self.temporal['texp'], self.temporal['s2_ydark']) if fit[0] < 0: return np.nan # Multiply by 10^9 because exposure times are in nanoseconds return fit[0] * (10 ** 9) / (self.K ** 2) @property def u_I_mean(self): """Dark Current from mean. The dark current from mean is computed as the slope of the dark signal mean in function of the exposure times divided by the overall system gain. Returns NaN if the number of different exposure times is less than 3. It uses the :func:`~emva1288.process.routines.LinearB` to compute the linear fit. .. emva1288:: :Section: dark_current :Short: Dark Current from mean :Symbol: $\mu_{I.mean}$ :Unit: e/s """ if len(np.unique(self.temporal['texp'])) <= 2: return np.nan fit, _error = routines.LinearB(self.temporal['texp'], self.temporal['u_ydark']) # Multiply by 10 ^ 9 because exposure time in nanoseconds return fit[0] * (10 ** 9) / self.K @property def sigma_2_y_stack(self): """Temporal variance stack. Mean value of the bright variance image. .. emva1288:: :Section: spatial :Short: Temporal variance stack :Symbol: $\sigma^2_{y.stack}$ :Unit: DN2 """ return np.mean(self.spatial['var']) @property def sigma_2_y_stack_dark(self): """Temporal variance stack dark. Mean value of the dark variance image. .. emva1288:: :Section: spatial :Short: Temporal variance stack dark :Symbol: $\sigma^2_{y.stack.dark}$ :Unit: DN2 """ return np.mean(self.spatial['var_dark']) @property def s_2_y_measured(self): """Spatial variance measure. Variance value of the bright variance image. .. emva1288:: :Section: spatial :Short: Spatial variance measure :Symbol: $s^2_{y.measured}$ :Unit: DN2 """ # ddof = 1 (delta degrees of freedom) accounts for the minus 1 in the # divisor for the calculation of variance return np.var(self.spatial['avg'], ddof=1) @property def s_2_y_measured_dark(self): """Spatial variance measured dark. Variance value of the dark variance image. .. emva1288:: :Section: spatial :Short: Spatial variance measured dark :Symbol: $s^2_{y.measured.dark}$ :Unit: DN2 """ # ddof = 1 (delta degrees of freedom) accounts for the minus 1 in the # divisor for the calculation of variance return np.var(self.spatial['avg_dark'], ddof=1) @property def s_2_y(self): """Spatial variance from image. .. emva1288:: :Section: spatial :Short: Spatial variance from image :Symbol: $s^2_{y}$ :Unit: DN2 """ return self.s_2_y_measured - (self.sigma_2_y_stack / self.spatial['L']) @property def s_2_y_dark(self): """Spatial variance from image, .. emva1288:: :Section: spatial :Short: Spatial variance from image :Symbol: $s^2_{y}$ :Unit: DN2 """ return self.s_2_y_measured_dark - (self.sigma_2_y_stack_dark / self.spatial['L_dark']) @property def DSNU1288(self): """DSNU. Dark Signal NonUniformity (in e^-) is defined as the deviation standard of the dark signal devided by the overall system gain. If the variance is negative, it will return NaN instead of an imaginary number. .. emva1288:: :Section: spatial :Short: DSNU :Symbol: $DSNU_{1288}$ :Unit: $e^-$ :LatexName: DSNU """ if self.s_2_y_dark < 0.: return np.nan return np.sqrt(self.s_2_y_dark) / self.K def DSNU1288_DN(self): """DSNU in DN. Defined as the DSNU in e^- multiplied by the overall system gain. Returns NaN if the dark signal variance is negative. Returns ------- float : The DSNU in DN. .. emva1288:: :Section: spatial :Short: DSNU in DN :Symbol: $DSNU_{1288.DN}$ :Unit: DN :LatexName: DSNUDN """ if self.s_2_y_dark < 0: return np.nan return np.sqrt(self.s_2_y_dark) @property def PRNU1288(self): """PRNU. Photo Response NonUniformity (in %) is defined as the square root of the difference between the spatial variance of a bright image (or from an average of bright images to remove temporal difformities) and the spatial variance of dark signal, divided by the difference between the mean of a bright image and the mean of a dark image. .. emva1288:: :Section: spatial :Short: PRNU :Symbol: $PRNU_{1288}$ :Unit: \% :LatexName: PRNU """ return (np.sqrt(self.s_2_y - self.s_2_y_dark) * 100 / (np.mean(self.spatial['avg']) - np.mean(self.spatial['avg_dark']))) @property def histogram_PRNU(self): """PRNU histogram. Uses the :func:`~emva1288.process.routines.Histogram1288` function to make the histogram. .. emva1288:: :Section: defect_pixel :Short: PRNU histogram """ # For prnu, perform the convolution y = self.spatial['sum'] - self.spatial['sum_dark'] # Slicing at the end is to remove boundary effects. y = convolve(y, self._histogram_high_pass_box)[2:-2, 2:-2] h = routines.Histogram1288(y, self._histogram_Qmax) # Rescale the bins h['bins'] /= (self.spatial['L'] * 25.) return h @property def histogram_PRNU_accumulated(self): """Accumulated PRNU histogram. Uses the :func:`~emva1288.process.routines.Histogram1288` function to make the histogram. .. emva1288:: :Section: defect_pixel :Short: accumulated PRNU histogram """ # For prnu, perform the convolution y = self.spatial['sum'] - self.spatial['sum_dark'] y = convolve(y, self._histogram_high_pass_box)[2:-2, 2:-2] # For the accumulated histogram substract the mean y = np.abs(y - int(np.mean(y))) h = routines.Histogram1288(y, self._histogram_Qmax) # Rescale the bins h['bins'] /= (self.spatial['L'] * 25.) # Perform the cumulative summation h['values'] = np.cumsum(h['values'][::-1])[::-1] return h @property def histogram_DSNU(self): """DSNU histogram. Uses the :func:`~emva1288.process.routines.Histogram1288` function to make the histogram. .. emva1288:: :Section: defect_pixel :Short: DSNU histogram """ # For dsnu, the image is just the dark image, upscaled to have # only integers y = self.spatial['sum_dark'] h = routines.Histogram1288(y, self._histogram_Qmax) # Rescale the bins, this is due to upscaling the average image to have # only integers h['bins'] /= (self.spatial['L_dark'] * 25.) return h @property def histogram_DSNU_accumulated(self): """Accumulated DSNU histogram. Uses the :func:`~emva1288.process.routines.Histogram1288` function to make the histogram. .. emva1288:: :Section: defect_pixel :Short: accumulated DSNU histogram """ # Dark image upscaled to have only integers y = self.spatial['sum_dark'] # For the accumulated dsnu histogram, substract the mean from the image y = np.abs(y - int(np.mean(y))) h = routines.Histogram1288(y, self._histogram_Qmax) # Rescale the bins h['bins'] /= (self.spatial['L_dark'] * 25.) # Perform the cumulative summation (the ::-1 means backwards) # because the cumsum function is performed contrary to what we need h['values'] = np.cumsum(h['values'][::-1])[::-1] return h def xml(self, filename=None): # pragma: no cover """Method that writes the results in xml format to a file. Parameters ---------- filename : str, optional The file to write the results. If None, the xml string won't be written but will be returned instead. Returns ------- str : If the xml string is not written into a file, it is returned. """ results = self.results_by_section if not filename: return routines.dict_to_xml(results) routines.dict_to_xml(results, filename=filename) @property def results(self): """Dictionnary with all the values and metadata for EMVA1288 values. It uses the :func:`~emva1288.process.routines.obj_to_dict` to compute all the results at once. """ return routines.obj_to_dict(self) @property def results_by_section(self): # pragma: no cover """Results ordered by section.""" return routines._sections_first(self.results) def print_results(self): # pragma: no cover """Print results to the screen.""" results = self.results_by_section for section, attributes in results.items(): print('*' * 50) print(section) print('-' * 50) for attribute, info in attributes.items(): if 'value' not in info: continue print('{:<50}{:<30}{:>10}'.format(info.get('short'), str(info.get('symbol')), str(info.get('value')))) print('*' * 50) print(' ') PKpIۯ))emva1288/process/data.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """Transform image data into data This class takes a dictionary with image data (product of :class:`emva1288.process.loader.LoadImageData`) and transforms it into data that can be used for the EMVA1288 computations. It is important to note, that this is separate from LoadImageData because this step, produces float values that are not easily transportable (db, json, etc...) without loosing accuracy. """ import numpy as np import logging class Data1288(object): """Take data from parsed images (descriptor file) and expose it as temporal and spatial dicts This dicts have the appropriate form for the processing """ def __init__(self, data, pixels=None, loglevel=logging.INFO): """Data processing object init method. It sets the logging configuration and creates a :class:`python:logging.Logger` object using :func:`python:logging.getLogger` and sets the loglevel. Creates the data dictionaries with the call of :meth:`_get_temporal` for temporal data and :meth:`_get_spatial` for spatial data. Parameters ---------- data : The data descriptor file. loglevel : int, optional The loglevel for the Logger object. """ logging.basicConfig() self.log = logging.getLogger('Data') self.log.setLevel(loglevel) self.pixels = pixels or data['width'] * data['height'] self.data = {} self.data['temporal'] = self._get_temporal(data['temporal']) self.data['spatial'] = self._get_spatial(data['spatial']) def _get_temporal(self, data): """Fill the temporal dict, with the stuff that we need. Compute the averages and variances from the sums (sum and pvar) If there is only one exposure time, the arrays in the returned dict will all have the same length as the photon count array. For this case, the exposure times and the dark value data array elements will all be the same. Parameters ---------- data : The data dictionary containing the temporal data sets. Returns ------- dict : A dict containing all temporal test data. The keys are the following: *'texp'*: the array of the exposure times used for the test, *'u_p'*: the array of photon count in a pixel for each exposure time and photon count, *'u_y'*: the array of the mean digital value for each exposure time and photon count, *'s2_y'*: the array of the digital value variance for each exposure time and photon count, *'u_ydark'*: the array of the mean digital dark value for each exposure time and *'s2_ydark'*: the array of the digital dark value variance for each exposure time. Raises ------ ValueError If there is no one 0.0 photon count entry for each exposure time and at least one bright point """ temporal = {} # List of exposure times exposures = np.asarray(sorted(data.keys())) # texp is now an array of exposure times temporal['texp'] = exposures u_p = [] u_y = [] s2_y = [] u_ydark = [] s2_ydark = [] for t in exposures: # photons is a list of photon counts # images for each exposure time photons = sorted(data[t].keys()) if 0.0 not in photons: raise ValueError('Every exposure point must have a 0.0 photon') if len(photons) < 2: raise ValueError('There must be at least one bright photon') # get data for dark image d = self._get_temporal_data(data[t][0.0]) u_ydark.append(d['mean']) s2_ydark.append(d['var']) for p in photons[1:]: # For each photon count, get the data u_p.append(p) d = self._get_temporal_data(data[t][p]) u_y.append(d['mean']) s2_y.append(d['var']) # Append all data to temporal dict temporal['u_p'] = np.asarray(u_p) temporal['u_y'] = np.asarray(u_y) temporal['s2_y'] = np.asarray(s2_y) temporal['u_ydark'] = np.asarray(u_ydark) temporal['s2_ydark'] = np.asarray(s2_ydark) # In case we have only one exposure, we need arrays with the # same length as the up # we just repeat the same value over and over if len(exposures) == 1: l = len(temporal['u_p']) v = temporal['texp'][0] temporal['texp'] = np.asarray([v for _i in range(l)]) v = temporal['u_ydark'][0] temporal['u_ydark'] = np.asarray([v for _i in range(l)]) v = temporal['s2_ydark'][0] temporal['s2_ydark'] = np.asarray([v for _i in range(l)]) return temporal def _get_temporal_data(self, d): """Convert temporal image data to mean and variance The mean is the sum of the pixels of the two images divided by (2 * self.pixels) The variance is the pseudo variance(integer), divided by (4 * self.pixels) Parameters ---------- d : dict The data dictionary that contains the sum and pvar of the pixels of a sum of two consecutive images with the same photon count and exposure time. Returns ------- dict : A data dictionary with the following keys: *'mean'*: the mean as described above and *'var'*: the variance as described above. """ mean_ = d['sum'] / (2.0 * self.pixels) var_ = d['pvar'] / (4.0 * self.pixels) return {'mean': mean_, 'var': var_} def _get_spatial(self, data): """Fill the spatial dictionary. The images (sum and pvar) are preserved, they are needed for processing. Parameters ---------- data : The data dictionary to take data from. Returns ------- dict : A dict containing all spatial test data. The keys are the following: - *'texp'*: the array of exposure times for spatial tests, - *'u_p'*: the array of photon count average for each exposure times. - *'sum'*: the array of the image sum for each photon count and exposure time, - *'pvar'*: the array of the pvar image for each photon count and exposure time, - *'L'*: the number of image taken to make the sum and pvar images for each photon count and each exposure time, - *'avg'*: the average computed from the sum image for each photon count and each exposure time, - *'var'*: the varianve computed from the pvar image for each photon count and each exposure time, - *'sum_dark'*: the sum image in the dark for each exposure time, - *'pvar_dark'*: the pvar image in the dark for each exposure time, - *'L_dark'*: the number of image taken in the dark to compose the sum and pvar images for each exposure time, - *'avg_dark'*: the average computed from the dark sum image for each exposure time, - *'var_dark'*: the variance computed from the dark pvar image for each exposure time. Raises ------ ValueError If the there is no exactly one dark and one bright point """ if len(data) != 1: raise ValueError('Spatial data must contain only one exposure' ' time') # the spatial exposure is the only exposure exposure = list(data.keys())[0] spatial = {} spatial['texp'] = exposure photons = sorted(data[exposure].keys()) if 0.0 not in photons: raise ValueError('there must be a 0.0 photon count for spatial') if len(photons) != 2: raise ValueError('There must be one bright and one dark') # get dark spatial data d = self._get_spatial_data(data[exposure][0.0], postfix='_dark') spatial.update(d) # Photon count for bright spatial bphoton = photons[1] d = self._get_spatial_data(data[exposure][bphoton]) spatial['u_p'] = bphoton spatial.update(d) return spatial def _get_spatial_data(self, d, postfix=''): """Add the mean and variance to the spatial image data. The mean is the sum of the images divided by L, which is the number of images for the spatial test. The variance is the pseudovariance divided by (L^2 * (L-1)). Parameters ---------- d : dict The data dictionary containing the sum and pvar of the images. postfix: str, optional String to add in the resulting dictionary keys Returns ------- dict : A data dictionary processed from the input. The keys (+ postfix) are: - *'sum'*: the sum image preserved from input, - *'pvar'*: the pvar image preserved from input, - *'L'*: the number of image summed, - *'avg'*: the average of the sum image as described above and - *'var'*: the variance as described above computed from the pvar image. """ # This cast is just in case the original images are unint sum_ = d['sum'].astype(np.int64) pvar_ = d['pvar'].astype(np.int64) L = d['L'] avg_ = sum_ / (1.0 * L) var_ = pvar_ / (1.0 * np.square(L) * (L - 1)) return {'sum' + postfix: sum_, 'pvar' + postfix: pvar_, 'L' + postfix: L, 'avg' + postfix: avg_, 'var' + postfix: var_} PK̆I{emva1288/process/loader.py# -*- coding: utf-8 -*- # Copyright (c) 2014 The EMVA1288 Authors. All rights reserved. # Use of this source code is governed by a GNU GENERAL PUBLIC LICENSE that can # be found in the LICENSE file. """Load image data This class takes a dictionary (product of parser.ParseEmvaDescriptorFile). Load the related images and reduce it's data to the minimum possible, preserving all relevant image data in as integers """ import os import numpy as np from . import routines import logging class LoadImageData(object): """Using an info dictionary, load the images and fill data with the relevant information. """ def __init__(self, images, path='', loglevel=logging.INFO, fload=routines.load_image, fload_args=[], fload_kwargs={}): """Loader init method. Uses a :class:`python:logging.Logger` object to print infos for the user. In the initialization, it :meth:`loads <_load_data>` the infos from the images into the data dictionary. Parameters ---------- images : dict The :attr:`~emva1288.process.parser.ParseEmvaDescriptorFile.images` attribute of a :class:`~emva1288.process.parser.ParseEmvaDescriptorFile` object that have parsed the images of a descriptor file. path : str, optional The path to the directory containing the images. loglevel : int, optional The information level for the :class:`~python:logging.Logger`. fload : func. optional The function that will load the images (one at a time). fload_args : list, optional The list of args for the fload function. fload_kwargs : dict, optional The kwargs dictionary for the fload function. """ self.data = {'temporal': {}, 'spatial': {}} self._fload = fload self._fload_args = fload_args self._fload_kwargs = fload_kwargs self._shape = set() logging.basicConfig() self.log = logging.getLogger('Loader') self.log.setLevel(loglevel) self._path = path # load the images self._load_data(images) def _load_data(self, images): """Using the information in images dict fill self.data when loading the images and fills the temporal and spatial dicts """ # iterate over the kind for kind in ('temporal', 'spatial'): exposures = set(images[kind].keys()) # iterate over the exposure times for each kind for exposure in exposures: # for this exposure time, get the list of photons counts photon_counts = list(images[kind][exposure].keys()) if 0.0 not in photon_counts: # Every exposure time should have at least one dark # image with a 0 photon count. raise ValueError('Every exposure must have a 0.0 photons' ' for dark information') if len(photon_counts) < 2: # Every exposure time should have at least one dark image # and one bright image. raise ValueError('All exposure must have at least 2 points' ' one dark and one bright') # Iterate over the images for this exposure time and this # photon count for photons, fnames in images[kind][exposure].items(): # get the image data data_imgs = self._get_imgs_data(fnames, kind) # fill data dict self.data[kind].setdefault(exposure, {}) self.data[kind][exposure][photons] = data_imgs shape = self._shape.pop() self.data['height'] = shape[0] self.data['width'] = shape[1] def _get_imgs_data(self, fnames, kind): """Returns the desired image data This depends on the kind of data """ # Load a list containing the images arr_imgs = self._load_imgs(fnames) # Get the sum and the pseudo-variance of this list of images imgs = routines.get_int_imgs(arr_imgs) # For spatial we only want the sum and pseudo-var of the images if kind == 'spatial': return imgs # For temporal, we only want the basic statistics (we don't need # the whole sum and pvar images) d = {} d['sum'] = np.sum(imgs['sum']) # sum of the sum of images d['pvar'] = np.sum(imgs['pvar']) # sum of the pvar image return d def _load_imgs(self, fnames): """For a list of path to images, load and append them to a returned list. """ imgs = [] # Iterate over all images for fname in fnames: # Get the path to the specific image. filename = os.path.join(self._path, fname) self.log.debug('Loading ' + fname) if not os.path.isfile(filename): # If the path is not good, raise an error. raise IOError('Not such file: ' + filename) # If path is good, load the image using the fload function img = self._fload(filename, *self._fload_args, **self._fload_kwargs) # Append the loaded image to the list imgs.append(img) # Add the image shape to the set self._shape.add(img.shape) if len(self._shape) > 1: # pragma: no cover # If the shape set contains more than 1 element, this means # not all images have the same shape. raise ValueError('All images must have the same shape') return imgs PKձIE(emva1288-0.4.1.dist-info/DESCRIPTION.rstEMVA1288 ======== The EMVA1288 module is a Python package to process and print results of an EMVA tests. This is the reference implementation for the EMVA1288 Standard for Measurement and Presentation of Specifications for Machine Vision Sensors and Cameras. Please visit `Emva1288 Website `__ for information and latest releases of the standard. Documentation ------------- The whole module documentation is hosted on `this website `__ There, one can find installation details of the EMVA1288 module and the api documentation with tutorials on how to use the module. PKձIUg&emva1288-0.4.1.dist-info/metadata.json{"classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4"], "extensions": {"python.details": {"contacts": [{"email": "ariza.federico@gmail.com", "name": "Federico Ariza", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/EMVA1288/emva1288"}}}, "extras": [], "generator": "bdist_wheel (0.26.0)", "keywords": ["sensors", "cameras"], "metadata_version": "2.0", "name": "emva1288", "run_requires": [{"requires": ["jinja2", "lxml", "matplotlib", "numpy", "pillow", "scipy"]}], "summary": "EMVA1288 reference implementation", "version": "0.4.1"}PK2G?=//!emva1288-0.4.1.dist-info/pbr.json{"is_release": false, "git_version": "f1fe73b"}PKձIR &emva1288-0.4.1.dist-info/top_level.txtemva1288 PKձIndnnemva1288-0.4.1.dist-info/WHEELWheel-Version: 1.0 Generator: bdist_wheel (0.26.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any PKձIO"!emva1288-0.4.1.dist-info/METADATAMetadata-Version: 2.0 Name: emva1288 Version: 0.4.1 Summary: EMVA1288 reference implementation Home-page: https://github.com/EMVA1288/emva1288 Author: Federico Ariza Author-email: ariza.federico@gmail.com License: UNKNOWN Keywords: sensors,cameras Platform: UNKNOWN Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Requires-Dist: jinja2 Requires-Dist: lxml Requires-Dist: matplotlib Requires-Dist: numpy Requires-Dist: pillow Requires-Dist: scipy EMVA1288 ======== The EMVA1288 module is a Python package to process and print results of an EMVA tests. This is the reference implementation for the EMVA1288 Standard for Measurement and Presentation of Specifications for Machine Vision Sensors and Cameras. Please visit `Emva1288 Website `__ for information and latest releases of the standard. Documentation ------------- The whole module documentation is hosted on `this website `__ There, one can find installation details of the EMVA1288 module and the api documentation with tutorials on how to use the module. PKձIPPemva1288-0.4.1.dist-info/RECORDemva1288/__init__.py,sha256=JOl41NviMN-qDV0Z8ZPmhNIxvgyauGGJHdB4A-8MhqM,93 emva1288/_version.py,sha256=UzSNkyUjhG4YVdkxjPCiSdonYNalIM1eLhDYe5Ckpfw,16748 emva1288/camera/__init__.py,sha256=Pq1mM2Bc57e2WZ4Zn7RT-d9CFkwYQ9PH8qXmpr8GLRc,27 emva1288/camera/camera.py,sha256=ri4pqMXTS0lbCmouF9mljZuL4SomiOrJtEdBV7KKKdw,15939 emva1288/camera/dataset_generator.py,sha256=l4vCHQOR6RH4qjOQ_7cEhbStTSmAiKxsAXglMf2v9O4,9076 emva1288/camera/points_generator.py,sha256=aw_TR4XoDiJEXf9QngxH8Y0gArzGlcbi1J1OcDaEH3s,5358 emva1288/camera/routines.py,sha256=WWq48ttIZypF2V57iaK1IIEyGpazgf7n34YDuJKFCdU,3287 emva1288/process/__init__.py,sha256=e8F6oly4AZ5dXTtJrnMwjCF0NkCTTty3hhNCoHtk6y0,988 emva1288/process/data.py,sha256=BdW4TwLfzEYF13qtLBgDHg3dCF3UKbA355oqVA5wE5k,10515 emva1288/process/loader.py,sha256=PDGgqzrppoK0a3QMJg7hPtn3vc4lpFdC6s564GE-BVA,6023 emva1288/process/parser.py,sha256=A3Y40MeEzXEXSAFdp9wlujxxCBTTEHzDYzL6C0V5ncg,9271 emva1288/process/plotting.py,sha256=rzOvMbUweJMwNLtmwlBUgCOz8hh8ngurfaHpCsarEJI,27655 emva1288/process/results.py,sha256=xnID-V3YrEO1gWxeizN9z_pMcilke6az9NJqc02c45s,28442 emva1288/process/routines.py,sha256=D8UbzB-zDo4zToZASbbJCoacbFwqwf__6ULf_4Lbcns,14308 emva1288/report/__init__.py,sha256=n0j8_uXLlwdyQa_nd2QZG_w-022O3iemza2DKzHCw1E,231 emva1288/report/report.py,sha256=aFomprXPxyUTduYxgPWNa3B0cNJEPLrV7m8g6ScEdK0,13812 emva1288/report/files/EMVA1288Logo.pdf,sha256=KieoAbbRztpjEwJZ0cBEJMoL98QNO1fPkvU3ny49VUU,46880 emva1288/report/files/missinglogo.pdf,sha256=a5Ro2mznm4lBjmWgO34R5cwEikRDTmTBTTIBGpKmCzI,5605 emva1288/report/files/missingplot.pdf,sha256=Wtpm7aFIbHyTkaugo34LNdKbMIZoyjY5znpwIj0gqls,104303 emva1288/report/templates/emvadatasheet.sty,sha256=iFfWVV3LamwrmrUB1HR9GoXA_oszby0XsnUGRFNEvX8,1193 emva1288/report/templates/general_info.tex,sha256=KmwdqxYOdYmWjFjXHVy8f-hOOeeVkLUYcvU2QeUjZog,1759 emva1288/report/templates/general_info.tex~,sha256=WKJmNBQSnd1KkjV6ZqYpTjwpgaK8KF1Z2g3LkZp3SgQ,1807 emva1288/report/templates/op.tex,sha256=Ivhz0Q46HnMlQCGF6ivGksvHsUzjeei6bKQm0lgT7mA,7491 emva1288/report/templates/op.tex~,sha256=4lcIiBOMfMIZx96vBpG8HY_w-EXX5fuHMyqw4tfaoJ8,6686 emva1288/report/templates/op_header.tex,sha256=vrXylwZbktQ-j0Ys18opX_zb8FNP-Wo2H79sL72N90o,404 emva1288/report/templates/op_header.tex~,sha256=W3KKvbZ_NeSY4QBU4_CYGr-L_1okCqx3JGMtMWJblto,744 emva1288/report/templates/report.tex,sha256=h_gQOWCTm40dXmlJwhdhcpNtdQmTZL2pSGQNklvYnmY,1372 emva1288/report/templates/report.tex~,sha256=A17PE0RyBEarH46kHYzzj8NbHiT6JVmGBFJq7aWcRAI,1338 emva1288/unittests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 emva1288/unittests/test_camera.py,sha256=MGXJxeo8wv1lRH3cV6zqdfLOBB4qhEI4ffx6v34e6d4,516 emva1288/unittests/test_coding_standards.py,sha256=KpdBchFSZgeUY-lNAbBVumarhY6S8sM45wcvDf3RguI,858 emva1288/unittests/test_data.py,sha256=kr6OE1igkX4gkgGO_NO8tAz_Y3wosvoiFiUDTCUjcFw,6037 emva1288/unittests/test_loader.py,sha256=PFLrieEYRfW4-xrOjL2fFT2YZQBn_xR4Z33uHsaUQwI,3440 emva1288/unittests/test_parser.py,sha256=cIFh7e7OiJTLATWxoPwx5uIsMfoRrDP68Vt2UXPK7nw,3057 emva1288/unittests/test_report.py,sha256=BLrnL3QseMuq6fouupJdRb3NBec9oCVzO7aeWHNsdAI,2344 emva1288/unittests/test_results.py,sha256=-_5vLs5sjiAehB-TZKS_i9vLx6Aq8l8wU5LjXnsUjws,13630 emva1288/unittests/test_routines.py,sha256=Gr1Uj4l21YUqt_GXdSitV6uV-jjRAaPbY0dDXngXXFs,74 emva1288-0.4.1.dist-info/DESCRIPTION.rst,sha256=mb-k3kRLTat3-lkXsi0wGpcUm5cWmgPaVrCceW2lqH4,662 emva1288-0.4.1.dist-info/METADATA,sha256=imkI4NHiVH-qZjvxK6w5D0bXVcMuv7QwBm8I9z8bDDU,1257 emva1288-0.4.1.dist-info/RECORD,, emva1288-0.4.1.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 emva1288-0.4.1.dist-info/metadata.json,sha256=jFs5kQuLFNxwS6ImSP6NAirfi4VPAyzI6wn5-OImgi4,726 emva1288-0.4.1.dist-info/pbr.json,sha256=T7BgAHIEYfJg_t1aPKF1ry5BmvjYUannjwbEbOmrvFo,47 emva1288-0.4.1.dist-info/top_level.txt,sha256=p5KOqoegEA-y5wbBml1lVHJtlav1mY3wi0aeHZcJZZw,9 PKH]]emva1288/__init__.pyPKHlAlAemva1288/_version.pyPK jHᏉ-Bemva1288/report/__init__.pyPK1I %55MCemva1288/report/report.pyPK2G%::%xyemva1288/report/templates/report.tex~PK2G,!~emva1288/report/templates/op.tex~PKۮH=:\\$Remva1288/report/templates/report.texPKHQ'emva1288/report/templates/op_header.texPK2Gea+ɠemva1288/report/templates/general_info.tex~PKHC>pemva1288/camera/camera.pyPK jHemva1288/unittests/__init__.pyPK=Iֻ$p p !&emva1288/unittests/test_loader.pyPK7~ IDdemva1288/unittests/test_data.pyPK7~ I !emva1288/unittests/test_parser.pyPKK IVƋ>5>5"emva1288/unittests/test_results.pyPK=I!U&emva1288/unittests/test_camera.pyPK7~ ISbJJ#(emva1288/unittests/test_routines.pyPK"H$}ZZ+#)emva1288/unittests/test_coding_standards.pyPK1Ir( ( !,emva1288/unittests/test_report.pyPKڜHPD-6emva1288/process/__init__.pyPK|I577C:emva1288/process/routines.pyPK̆I llaremva1288/process/plotting.pyPKIp7$7$emva1288/process/parser.pyPK7~ IrwNooemva1288/process/results.pyPKpIۯ))dremva1288/process/data.pyPK̆I{emva1288/process/loader.pyPKձIE(lemva1288-0.4.1.dist-info/DESCRIPTION.rstPKձIUg&Hemva1288-0.4.1.dist-info/metadata.jsonPK2G?=//!bemva1288-0.4.1.dist-info/pbr.jsonPKձIR &йemva1288-0.4.1.dist-info/top_level.txtPKձIndnnemva1288-0.4.1.dist-info/WHEELPKձIO"!Ǻemva1288-0.4.1.dist-info/METADATAPKձIPPemva1288-0.4.1.dist-info/RECORDPK,, |