PK!eHHjson_syntax/__init__.py""" The JSON syntax library is a combinatorial parser / generator library for managing conversion of Python objects to and from common JSON types. It's not strictly limited to JSON, but that's the major use case. """ from .ruleset import RuleSet from .cache import SimpleCache from .std import ( # noqa atoms, decimals, decimals_as_str, floats, floats_nan_str, iso_dates, iso_dates_loose, optional, enums, faux_enums, lists, sets, dicts, ) from .attrs import attrs_classes, named_tuples, tuples from .unions import unions from .helpers import J2P, P2J, IP, IJ # noqa def std_ruleset( *, floats=floats, decimals=decimals, dates=iso_dates, enums=enums, lists=lists, sets=sets, unions=unions, extras=(), custom=RuleSet, cache=None, ): """ Constructs a RuleSet with the provided rules. The arguments here are to make it easy to override. For example, to replace ``decimals`` with ``decimals_as_str`` just call ``std_ruleset(decimals=decimals_as_str)`` """ return custom( atoms, floats, decimals, dates, optional, enums, lists, attrs_classes, sets, dicts, named_tuples, tuples, unions, *extras, cache=cache, ) PK!Ijson_syntax/action_v1.pyfrom .helpers import ErrorContext from datetime import datetime import math from operator import attrgetter def convert_date_loosely(value): return datetime.fromisoformat(value).date() def check_parse_error(value, *, parser, error): try: parser(value) except error: return False else: return True def check_isinst(value, *, typ): return isinstance(value, typ) def convert_float(value): value = float(value) if math.isfinite(value): return value elif math.isnan(value): return "NaN" elif value < 0.0: return "-Infinity" else: return "Infinity" def check_float(value): return ( isinstance(value, (int, float)) or isinstance(value, str) and value.lower() in ("nan", "inf", "infinity" "-inf", "-infinity", "+inf", "+infinity") ) convert_enum_str = attrgetter("name") def convert_none(value): if value is not None: raise ValueError("Expected None") return None def check_str_enum(value, *, mapping): return isinstance(value, str) and value in mapping def convert_str_enum(value, *, mapping): return mapping[value] def convert_optional(value, *, inner): if value is None: return None return inner(value) def check_optional(value, *, inner): return value is None or inner(value) def convert_collection(value, *, inner, con): with ErrorContext("[:]"): return con(map(inner, value)) def check_collection(value, *, inner, con): return isinstance(value, con) and all(map(inner, value)) def convert_mapping(value, *, key, val, con): with ErrorContext("[:]"): return con((key(k), val(v)) for k, v in value.items()) def check_mapping(value, *, key, val, con): return isinstance(value, con) and all(key(k) and val(v) for k, v in value.items()) def convert_dict_to_attrs(value, *, pre_hook, inner_map, con): value = pre_hook(value) args = {} for name, inner in inner_map: with ErrorContext(f"[{name!r}]"): try: arg = value[name] except KeyError: pass else: args[name] = inner(arg) return con(**args) def check_dict(value, *, inner_map, pre_hook): value = pre_hook(value) if not isinstance(value, dict): return False for name, inner, required in inner_map: with ErrorContext(f"[{name!r}]"): try: arg = value[name] except KeyError: if required: return False else: if not inner(arg): return False return True def convert_attrs_to_dict(value, *, post_hook, inner_map): out = {} for name, inner, default in inner_map: with ErrorContext(f".{name}"): field = getattr(value, name) if field == default: continue out[name] = inner(field) if post_hook is not None: out = getattr(value, post_hook)(out) return out def convert_tuple_as_list(value, *, inner, con): with ErrorContext("[:]"): return con(cvt(val) for val, cvt in zip(value, inner)) def check_tuple_as_list(value, *, inner, con): return ( isinstance(value, con) and len(value) == len(inner) and all(chk(val) for val, chk in zip(value, inner)) ) def check_union(value, *, steps): return any(step(value) for step in steps) def convert_union(value, *, steps, typename): for check, convert in steps: if check(value): return convert(value) raise ValueError(f"Expected value of type {typename} got {value!r}") PK!ko?00json_syntax/attrs.pyfrom .helpers import ( IJ, IP, J2P, JPI, P2J, SENTINEL, has_origin, identity, is_attrs_field_required, issub_safe, resolve_fwd_ref, ) from .action_v1 import ( check_dict, check_isinst, check_tuple_as_list, convert_attrs_to_dict, convert_dict_to_attrs, convert_tuple_as_list, ) from functools import partial def attrs_classes( *, verb, typ, ctx, pre_hook="__json_pre_decode__", post_hook="__json_post_encode__", check="__json_check__", ): """ Handle an ``@attr.s`` or ``@dataclass`` decorated class. """ if verb not in JPI: return try: fields = typ.__attrs_attrs__ except AttributeError: try: fields = typ.__dataclass_fields__ except AttributeError: return else: fields = fields.values() if verb == IP: return partial(check_isinst, typ=typ) inner_map = [] for field in fields: if field.init or verb == P2J: tup = ( field.name, ctx.lookup( verb=verb, typ=resolve_fwd_ref(field.type, typ), accept_missing=True ), ) if verb == P2J: tup += (field.default,) elif verb == IJ: tup += (is_attrs_field_required(field),) inner_map.append(tup) if verb == J2P: pre_hook_method = getattr(typ, pre_hook, identity) return partial( convert_dict_to_attrs, pre_hook=pre_hook_method, inner_map=tuple(inner_map), con=typ, ) elif verb == P2J: post_hook = post_hook if hasattr(typ, post_hook) else None return partial( convert_attrs_to_dict, post_hook=post_hook, inner_map=tuple(inner_map) ) elif verb == IJ: check = getattr(typ, check, None) if check: return check pre_hook_method = getattr(typ, pre_hook, identity) return partial(check_dict, inner_map=inner_map, pre_hook=pre_hook_method) def named_tuples(*, verb, typ, ctx): """ Handle a ``NamedTuple(name, [('field', type), ('field', type)])`` type. Also handles a ``collections.namedtuple`` if you have a fallback handler. """ if verb not in JPI or not issub_safe(typ, tuple): return try: fields = typ._field_types except AttributeError: try: fields = typ._fields except AttributeError: return fields = [(name, None) for name in fields] else: fields = fields.items() if verb == IP: return partial(check_isinst, typ=typ) defaults = {} defaults.update(getattr(typ, "_fields_defaults", ())) defaults.update(getattr(typ, "_field_defaults", ())) inner_map = [] for name, inner in fields: tup = ( name, ctx.lookup(verb=verb, typ=resolve_fwd_ref(inner, typ), accept_missing=True), ) if verb == P2J: tup += (defaults.get(name, SENTINEL),) elif verb == IJ: tup += (name not in defaults,) inner_map.append(tup) if verb == J2P: return partial( convert_dict_to_attrs, pre_hook=identity, inner_map=tuple(inner_map), con=typ, ) elif verb == P2J: return partial( convert_attrs_to_dict, post_hook=None, inner_map=tuple(inner_map) ) elif verb == IJ: return partial(check_dict, pre_hook=identity, inner_map=tuple(inner_map)) def tuples(*, verb, typ, ctx): """ Handle a ``Tuple[type, type, type]`` product type. Use a ``NamedTuple`` if you don't want a list. """ if verb not in JPI or not has_origin(typ, tuple): return args = typ.__args__ if Ellipsis in args: # This is a homogeneous tuple, use the lists rule. return inner = [ctx.lookup(verb=verb, typ=arg) for arg in args] if verb == J2P: return partial(convert_tuple_as_list, inner=inner, con=tuple) elif verb == P2J: return partial(convert_tuple_as_list, inner=inner, con=list) elif verb == IP: return partial(check_tuple_as_list, inner=inner, con=tuple) elif verb == IJ: return partial(check_tuple_as_list, inner=inner, con=list) PK!HM  json_syntax/cache.pyfrom warnings import warn class UnhashableType(UserWarning): pass class ForwardAction: """ A mutable callable. Since actions are simply functions, this lets us create a promise of a function and replace it when we have the actual function ready. This is a simple way to handle cycles in types. """ __slots__ = ("__call__",) def __init__(self, call): self.__call__ = call def __repr__(self): return f"" class SimpleCache: def __init__(self): self.cache = {} def get(self, *, verb, typ): result = self._lookup(verb, typ) return result if result is not NotImplemented else None def _lookup(self, verb, typ): """ Handle unhashable types by warning about them. """ try: return self.cache.get((verb, typ)) except TypeError: warn( f"Type {typ} is unhashable; json_syntax probably can't handle this", category=UnhashableType, ) return NotImplemented def in_flight(self, *, verb, typ): """ Called when we begin determining the action for a type. We construct a forward action that will be fulfilled by the ``complete`` call. """ if self._lookup(verb, typ) is None: def unfulfilled(value): # This can't be pickled, which is a good thing. raise TypeError( f"Forward reference was never fulfilled to {verb} for {typ}" ) forward = ForwardAction(unfulfilled) self.cache[verb, typ] = forward return forward def de_flight(self, *, verb, typ, forward): """ If a lookup fails, this removes the entry so that further attempts can be made. """ present = self._lookup(verb, typ) if present is forward: del self.cache[verb, typ] def complete(self, *, verb, typ, action): """ Once a type is complete, we fulfill any ForwardActions and replace the cache entry with the actual action. """ present = self._lookup(verb, typ) if present is NotImplemented: return # Unhashable. elif present is None: self.cache[verb, typ] = action elif isinstance(present, ForwardAction): present.__call__ = action # Replace the cache entry, if it's never been used let the ForwardAction be # garbage collected. self.cache[verb, typ] = action class ThreadLocalCache(SimpleCache): """ Avoids threads conflicting while looking up rules by keeping the cache in thread local storage. You can also prevent this by looking up rules during module loading. """ def __init__(self): self._local = threading.local() @property def cache(self): local = self._local try: return local.cache except AttributeError: _cache = local.cache = {} return _cache PK!#!yyjson_syntax/examples/README.md# The flags rule This rule lets you use enums as strings without losing all Enums as the `faux_enums` rule does. ## Demonstrates * How to write a rule * How to write an action * How to write a fake type that's compatible with `typing.Union` ## Caveats * Requires Python 3.7 * A user could mistakenly create a Flag instance * You'd probably be better off using enums PK!HRQ<< json_syntax/examples/__init__.py""" Examples of additional rules are in this directory. """ PK! json_syntax/examples/flags.py""" This module constructs its own fake type and a rule to support it. This lets you construct a quick set of enums that are represented as strings. """ from ..helpers import JP, II from functools import partial class Flag(type): """ An example of a custom type that lets you quickly create string-only flags. This also demonstrates a technique that makes it possible to create a fake type that can be used within ``typing.Union``. Thanks to __class_getitem__, you can invoke this as ``Flag['foo', 'bar', 'etc']`` but this requires Python 3.7! """ def __new__(cls, *args, **kwds): """This is necessary to be a subclass of `type`, which is necessary to be used in a Union.""" return super().__new__(cls, cls.__name__, (), {}) def __init__(self, *elems): """""" if not elems: raise TypeError("Flag must be called with at least one string argument.") if not all(isinstance(elem, str) for elem in elems): raise TypeError("Flag elements must all be strings.") self.elems = frozenset(elems) if len(self.elems) != len(elems): raise TypeError("Duplicate elements are prohibited.") def __class_getitem__(cls, elems): return cls(*elems) if isinstance(elems, tuple) else cls(elems) def __repr__(self): return f'{self.__class__.__name__}[{", ".join(map(repr, self.elems))}]' def _check_flag(elems, value): """ Checks that a value is a member of a set of flags. Note that we use a top-level function and `partial`. The trouble with lambdas or local defs is that they can't be pickled because they're inaccessible to the unpickler. If you don't intend to pickle your encoders, though, they're completely fine to use in rules. """ return isinstance(value, str) and value in elems def _convert_flag(elems, value): """ Checks the value is in elems and returns it. """ if value not in elems: raise ValueError(f'Expect {value!r} to be one of {", ".join(map(repr, elems))}') return value def flags(*, verb, typ, ctx): """ A simple rule to allow certain strings as flag values, but without converting them to an actual Enum. This rule is triggered with a fake type ``Flag['string', 'string', 'string']``. """ if not isinstance(typ, Flag): return if verb in JP: return partial(_convert_flag, typ.elems) elif verb in II: return partial(_check_flag, typ.elems) PK!Njson_syntax/helpers.pyfrom importlib import import_module import logging try: from typing import _eval_type except ImportError: _eval_type = None logger = logging.getLogger(__name__) J2P = "json_to_python" P2J = "python_to_json" IJ = "inspect_json" IP = "inspect_python" II = (IJ, IP) JP = (J2P, P2J) JPI = (J2P, P2J, IP, IJ) NoneType = type(None) SENTINEL = object() def identity(value): return value def has_origin(typ, origin, *, num_args=None): """ Determines if a concrete class (a generic class with arguments) matches an origin and has a specified number of arguments. The typing classes use dunder properties such that ``__origin__`` is the generic class and ``__args__`` are the type arguments. """ try: t_origin = typ.__origin__ except AttributeError: return False else: if not isinstance(origin, tuple): origin = (origin,) return t_origin in origin and ( num_args is None or len(typ.__args__) == num_args ) def issub_safe(sub, sup): try: return issubclass(sub, sup) except TypeError: return False def resolve_fwd_ref(typ, context_class): """ Tries to resolve a forward reference given a containing class. This does nothing for regular types. """ resolved = None try: namespace = vars(import_module(context_class.__module__)) except AttributeError: logger.warning("Couldn't determine module of %r", context_class) else: resolved = _eval_type(typ, namespace, {}) if resolved is None: return typ else: return resolved if _eval_type is None: # If typing's internal API changes, we have tests that break. def resolve_fwd_ref(typ, context_class): # noqa return typ _missing_values = set() try: import attr _missing_values.add(attr.NOTHING) except ImportError: pass try: import dataclasses _missing_values.add(dataclasses.MISSING) except ImportError: pass def is_attrs_field_required(field): """ Determine if a field's default value is missing. """ if field.default not in _missing_values: return False try: factory = field.default_factory except AttributeError: return True else: return factory in _missing_values class ErrorContext: """ Inject contextual information into an exception message. This won't work for some exceptions like OSError that ignore changes to `args`; likely not an issue for this library. There is a neglible performance hit if there is no exception. >>> with ErrorContext('.foo'): ... with ErrorContext('[0]'): ... with ErrorContext('.qux'): ... 1 / 0 Traceback (most recent call last) ZeroDivisionError: division by zero; at .foo[0].qux The `__exit__` method will catch the exception and look for a `_context` attribute assigned to it. If none exists, it appends `; at ` and the context string to the first string argument. As the exception walks up the stack, outer ErrorContexts will be called. They will see the `_context` attribute and insert their context immediately after `; at ` and before the existing context. Thus, in the example above: ('division by zero',) -- the original message ('division by zero; at .qux',) -- the innermost context ('division by zero; at [0].qux',) ('division by zero; at .foo[0].qux',) -- the outermost context For simplicity, the method doesn't attempt to inject whitespace. To represent names, consider surrounding them with angle brackets, e.g. `` """ def __init__(self, context): self.context = str(context) def __enter__(self): pass def __exit__(self, exc_type, exc_value, traceback): try: if exc_value is None or not self.context: return args = list(exc_value.args) arg_num, point = getattr(exc_value, "_context", (None, None)) if arg_num is None: for arg_num, val in enumerate(args): if isinstance(val, str): args[arg_num] = args[arg_num] + "; at " if val else "At " break else: # This 'else' clause runs if we don't `break` arg_num = len(args) args.append("At ") point = len(args[arg_num]) arg = args[arg_num] args[arg_num] = arg[:point] + self.context + arg[point:] exc_value.args = tuple(args) exc_value._context = (arg_num, point) except Exception: # Swallow exceptions to avoid adding confusion. pass PK!le(E55json_syntax/ruleset.pyfrom .cache import SimpleCache import logging logger = logging.getLogger(__name__) TRACE = 5 class RuleSet: def __init__(self, *rules, cache=None): self.rules = rules self.cache = cache or SimpleCache() def lookup(self, *, verb, typ, accept_missing=False): logger.log(TRACE, "lookup(%s, %r): start", verb, typ) if typ is None: if not accept_missing: raise TypeError(f"Attempted to find {verb} for 'None'") return self.fallback(verb=verb, typ=typ) action = self.cache.get(verb=verb, typ=typ) if action is not None: logger.log(TRACE, "lookup(%s, %r): cached", verb, typ) return action forward = self.cache.in_flight(verb=verb, typ=typ) try: for rule in self.rules: action = rule(verb=verb, typ=typ, ctx=self) if action is not None: self.cache.complete(verb=verb, typ=typ, action=action) logger.log(TRACE, "lookup(%s, %r): computed", verb, typ) return action logger.log(TRACE, "lookup(%s, %r): fallback", verb, typ) action = self.fallback(verb=verb, typ=typ) if action is not None: self.cache.complete(verb=verb, typ=typ, action=action) logger.log(TRACE, "lookup(%s, %r): computed by fallback", verb, typ) return action finally: self.cache.de_flight(verb=verb, typ=typ, forward=forward) def fallback(self, *, verb, typ): pass PK!h-$-$json_syntax/std.pyfrom .helpers import has_origin, issub_safe, NoneType, JP, J2P, P2J, IJ, IP, II, JPI from .action_v1 import ( check_collection, check_float, check_isinst, check_mapping, check_optional, check_parse_error, check_str_enum, convert_collection, convert_date_loosely, convert_enum_str, convert_float, convert_mapping, convert_none, convert_optional, convert_str_enum, ) from collections import OrderedDict from datetime import datetime, date from decimal import Decimal from enum import Enum from functools import partial from typing import Union """ These are standard rules to handle various types. All rules take a verb, a Python type and a context, which is generally a RuleSet. A rule returns a conversion function for that verb. """ def atoms(*, verb, typ, ctx): "Rule to handle atoms on both sides." if issub_safe(typ, (str, int, NoneType)): if verb in JP: if typ is NoneType: return convert_none for base in (str, bool, int): if issubclass(typ, base): return base elif verb == IP: for base in (NoneType, str, bool, int): if issubclass(typ, base): return partial(check_isinst, typ=base) elif verb == IJ: for base in (NoneType, str, bool, int): if issubclass(typ, base): return partial(check_isinst, typ=base) def floats(*, verb, typ, ctx): """ Rule to handle floats passing NaNs through unaltered. JSON technically recognizes integers and floats. Many JSON generators will represent floats with integral value as integers. Thus, this rule will convert both integers and floats in JSON to floats in Python. Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is not standard JSON. This rule simply treats them as regular float values. If you want to catch them, you can set ``allow_nan=False`` in ``json.dump()``. """ if issub_safe(typ, float): if verb in JP: return float elif verb == IP: return partial(check_isinst, typ=float) elif verb == IJ: return partial(check_isinst, typ=(int, float)) def floats_nan_str(*, verb, typ, ctx): """ Rule to handle floats passing NaNs through as strings. Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is not standard JSON. This rule converts special constants to string names. """ if issub_safe(typ, float): if verb == J2P: return float elif verb == P2J: return convert_float elif verb == IP: return partial(check_isinst, typ=float) elif verb == IJ: return check_float def decimals(*, verb, typ, ctx): """ Rule to handle decimals natively. This rule requires that your JSON library has decimal support, e.g. simplejson. Other JSON processors may convert values to and from floating-point; if that's a concern, consider `decimals_as_str`. This rule will fail if passed a special constant. """ if issub_safe(typ, Decimal): if verb in JP: return Decimal elif verb in II: return partial(check_isinst, typ=Decimal) def decimals_as_str(*, verb, typ, ctx): """ Rule to handle decimals as strings. This rule bypasses JSON library decimal support, e.g. simplejson. This rule will fail if passed a special constant. """ if issub_safe(typ, Decimal): if verb == J2P: return Decimal elif verb == P2J: return str elif verb == IP: return partial(check_isinst, typ=Decimal) elif verb == IJ: return partial(check_parse_error, parser=Decimal, error=ArithmeticError) def iso_dates(*, verb, typ, ctx, loose_date=False): """ Rule to handle iso formatted datetimes and dates. This is the strict variant that simply uses the `fromisoformat` and `isoformat` methods of `date` and `datetime`. There is a loose variant that will accept a datetime in a date. A datetime always accepts both dates and datetimes. """ if issub_safe(typ, date): if verb == P2J: if issubclass(typ, datetime): return datetime.isoformat return date.isoformat elif verb == J2P: if issubclass(typ, datetime): return datetime.fromisoformat return convert_date_loosely if loose_date else date.fromisoformat elif verb == IP: return partial( check_isinst, typ=datetime if issubclass(typ, datetime) else date ) elif verb == IJ: base = datetime if issubclass(typ, datetime) or loose_date else date return partial( check_parse_error, parser=base.fromisoformat, error=(TypeError, ValueError), ) #: A loose variant of ``iso_dates`` that will accept time data in a ``date``. iso_dates_loose = partial(iso_dates, loose_date=True) def enums(*, verb, typ, ctx): "Rule to convert between enumerated types and strings." if issub_safe(typ, Enum): if verb == P2J: return convert_enum_str elif verb == J2P: return partial(convert_str_enum, mapping=dict(typ.__members__)) elif verb == IP: return partial(check_isinst, typ=typ) elif verb == IJ: return partial(check_str_enum, mapping=frozenset(typ.__members__.keys())) def faux_enums(*, verb, typ, ctx): "Rule to fake an Enum by actually using strings." if issub_safe(typ, Enum): if verb in JP: mapping = {name: name for name in typ.__members__} return partial(convert_str_enum, mapping=mapping) elif verb in II: return partial(check_str_enum, mapping=frozenset(typ.__members__.keys())) def optional(*, verb, typ, ctx): """ Handle an ``Optional[inner]`` by passing ``None`` through. """ if verb not in JPI: return if has_origin(typ, Union, num_args=2): if NoneType not in typ.__args__: return inner = None for arg in typ.__args__: if arg is not NoneType: inner = arg if inner is None: raise TypeError(f"Could not find inner type for Optional: {typ}") else: return inner = ctx.lookup(verb=verb, typ=inner) if verb in JP: return partial(convert_optional, inner=inner) elif verb in II: return partial(check_optional, inner=inner) def lists(*, verb, typ, ctx): """ Handle a ``List[type]`` or ``Tuple[type, ...]``. Trivia: the ellipsis indicates a homogenous tuple; ``Tuple[A, B, C]`` is a product type that contains exactly those elements. """ if verb not in JPI: return if has_origin(typ, list, num_args=1): (inner,) = typ.__args__ elif has_origin(typ, tuple, num_args=2): (inner, ell) = typ.__args__ if ell is not Ellipsis: return else: return inner = ctx.lookup(verb=verb, typ=inner) con = list if verb in (P2J, IJ) else typ.__origin__ if verb in JP: return partial(convert_collection, inner=inner, con=con) elif verb in II: return partial(check_collection, inner=inner, con=con) def sets(*, verb, typ, ctx): """ Handle a ``Set[type]`` or ``FrozenSet[type]``. """ if verb not in JPI: return if not has_origin(typ, (set, frozenset), num_args=1): return (inner,) = typ.__args__ con = list if verb in (P2J, IJ) else typ.__origin__ inner = ctx.lookup(verb=verb, typ=inner) if verb in JP: return partial(convert_collection, inner=inner, con=con) elif verb in II: return partial(check_collection, inner=inner, con=con) def _stringly(*, verb, typ, ctx): """ Rule to handle types that reliably convert directly to strings. This is used internally by dicts. """ if verb not in JPI or not issub_safe(typ, (int, str, date, Enum)): return for base in str, int: if issubclass(typ, base): if verb in JP: return base elif verb in II: return partial(check_isinst, typ=base) for rule in enums, iso_dates: action = rule(verb=verb, typ=typ, ctx=ctx) if action is not None: return action def dicts(*, verb, typ, ctx): """ Handle a ``Dict[key, value]`` where key is a string, integer or enum type. """ if verb not in JPI: return if not has_origin(typ, (dict, OrderedDict), num_args=2): return (key_type, val_type) = typ.__args__ key_type = _stringly(verb=verb, typ=key_type, ctx=ctx) if key_type is None: return val_type = ctx.lookup(verb=verb, typ=val_type) if verb in JP: return partial(convert_mapping, key=key_type, val=val_type, con=typ.__origin__) elif verb in II: return partial(check_mapping, key=key_type, val=val_type, con=typ.__origin__) PK!a3json_syntax/unions.pyfrom .helpers import has_origin, JP, J2P, P2J, IJ, IP, II from .action_v1 import convert_union, check_union from functools import partial from typing import Union def unions(*, verb, typ, ctx): """ Handle undiscriminated unions of the form ``Union[A, B, C, D]`` by inspecting the inner types one by one. This is the "implicit discriminant" technique, exploiting the fact that Python already tags all values with their type. A potential problem is that the JSON structure may not retain that information. So another rule could attempt to add a discriminant to the JSON data. For example, if you had two ``attrs`` style classes, they could add a `type` field with the class name. As there are many ways to do that, this rule doesn't attempt to pick one for you. Note: The optional rule handles the common case of ``Union[T, NoneType]`` more efficiently, so it should be before this. """ if has_origin(typ, Union): if verb in JP: if verb == P2J: check_verb = IP elif verb == J2P: check_verb = IJ else: return steps = [] for arg in typ.__args__: check = ctx.lookup(verb=check_verb, typ=arg) convert = ctx.lookup(verb=verb, typ=arg) steps.append((check, convert)) return partial(convert_union, steps=steps, typename=repr(typ)) elif verb in II: steps = [] for arg in typ.__args__: check = ctx.lookup(verb=verb, typ=arg) steps.append(check) return partial(check_union, steps=steps) PK!++#json_syntax-0.1.1.dist-info/LICENSEMIT License Copyright (c) 2019 Ben Samuel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK!HڽTU!json_syntax-0.1.1.dist-info/WHEEL A н#Z;/"d&F[xzw@Zpy3Fv]\fi4WZ^EgM_-]#0(q7PK!H[,K 3$json_syntax-0.1.1.dist-info/METADATA[rƒ?((Ȏvnt7vRܔJE ! I˾>ɞ=YMVH|t>=~Z_Չ4}[wb~uuUk'|ƻzWx^qPecs~xyv/ں^t~;ыkwֽoaoy}[%Ée&q ȳ| 4K:ٶVmM7M7.D{<յ|[obtCvle/nŵ33V,yۦh950:4xŶƶ,!g;ܛjiV<-4%j/)XY50dŪRnE̺nFoRcsZSz]ڛ+(m"CK( ɋ]K:seoFUܵW7u,Ϭ#HJtt+pek|>!wzm1hi Vt -TS;5m;ok>Ds r~2:<2gqwe]` kْkěxvŹP.`6(+0 (4#l!}o% `\5 D"SV!Fl2 tƷk-:ncW4] n'ŗ|``ݏv x4.< 1y@ ~YWr9h+<#WB͑HW΋e[Xngs#,Zj(ĘO[ 6]%/]Ҹ ~ j=4nW72\)îNp){SW.lY%xg]V> *KͱB8ޕk t ŧɣkEK΀1οۿ>~2 z-Cm @VA/*}=gk۽_pr;lje8•")+Fu W?NgˋWSoa5YՕЯz-ڎ+ v[scN}Q[E5Lm[N1ﯼh&Ms'r'(wuSp3 q:;OfgvQO E#5BN "Rq{Rؠ{Xvs SI=6`qDo'"#p'F[.F"Kq[1L>TsFDwHsA,xdHsU"7dSՋDA Կr@ kv7.5ewL.N櫹yq9غ\Fr8~ijJ%B1' {؃-zdQyy)O6$Ya `˅*O okFd£%c@Rm] F9CW)Ie?Sн=€4_O?%nsl5LJTOݥ--.DIͳ0 SXdPE`{˅\S|r2 kv+k*݉j $?0fwjw@;q IN}/,6Kz*N2G Qn!v мАȤ(wND.t+ 1yW;\cn >XxXdVF0Md-^p41ϟ+!fњDo/ PT}-{vؽgY>鬋^Еf:R0!09,1$@voϡ^ۀ ᐠ $oRE5q>liLü[Z`s..VdjL[OASiͦ:PR"5\m52e_ÓR6qpRTU}+t[PZjkM c6.% eSzJiۭ%t@!h'2և@i0PkᯔS[}۪C3R.PnВ+J.W 4[)OV&^7~PbJUQNX(z1 &w 3- i`?==ӧӧù@|P?<}x >@mbvdqp ɛxWޑ;x/ߑ\o&Hd!C0 B K2!9'3 dk(CD*((+!}<]D*Z^I8"5R<\`S,(kH-Ii;$ $ mw hO8' ]YvI `ymw&4olC#iԜB;oa{@4>҅،ī]i-.\I&9%'rJ]ndVeZŶ&ڑ!ctIj^=eˈ1.+uP)g/\<\?9W',%52L*1ذZe@[*BjD`EƠCڣ!w ^mV ٳ`M?1d)! [RʴZ`DUкV.',,7Y ˔8ZՅ%6cS`]BH OYǢJ@ꁿDjHƵ3evh=C bAwY,>^==#n%Pgc_^\~<§y>u-Ta<4l* ̆hF0(Ir4ENgc3B4,W`̐Cam'f\[Y`0/*/ȭ %؋{2Km.o"We0¼|tf!83]n\ham, KNH^XߍKzR$^ƒ$>v^FMΤ%Q ݄[ Y{vMO'KI5mu4j5P5SHn ZS@j O)Ծ֎ЊL9M.~M̦ }r7*,~FC}'q -,Kzd,b1 z)34ek 2ڶg׫nH1DN&\(rް8z8ahj(q2*T Xs1)GLt霓LDVu@ÝާC>9$}~4E>,/ ~~ɃO8^>?1枢 3Elkz:ED0P>r>m= .`&K )H|<@B|CNZexKif>DZ0P3 )=DJ\кd*HyjK+ Z`WMpSs_8H4!>*poooSMҺxv'z}F9QiNq8w6k/W/_ePK!HMn "json_syntax-0.1.1.dist-info/RECORD}Ɏ@}wʰ!L2H1pn:ѤөԢ6_SpۄxieSa-."rfhew,NCEt gŋ|fTvWcUWPj"]:kbQ;8~v@Pו|>f%oZOǥ C< :, CoX%E3Rsc>'SiU8{La2Ėޓeuu?-y'~i_Fe AF(?qsToYODUm5jڽyϐ!IX|%fouFtM)pPd{WNe1^PU2 EVw[=Qk{Ga]*ҭJuq/q!0_82 xUK9}?J/jwv|M5 xcB1 EːgPH5r$?pÖVRojson_syntax/helpers.pyPK!le(E55Pjson_syntax/ruleset.pyPK!h-$-$eWjson_syntax/std.pyPK!a3{json_syntax/unions.pyPK!++#json_syntax-0.1.1.dist-info/LICENSEPK!HڽTU!json_syntax-0.1.1.dist-info/WHEELPK!H[,K 3$json_syntax-0.1.1.dist-info/METADATAPK!HMn "json_syntax-0.1.1.dist-info/RECORDPKA