PK=M=?,,kll/__init__.py#!/usr/bin/env python3 ''' KLL Compiler KLL - Keyboard Layout Language ''' # Copyright (C) 2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ## Imports import argparse import os import sys import kll.common.stage as stage ## Variables __version__ = '0.5.5.5' kll_name = 'kll' ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' # Python Text Formatting Fixer... # Because the creators of Python are averse to proper capitalization. textFormatter_lookup = { "usage: ": "\033[1mUsage\033[0m: ", "optional arguments": "\033[1mOptional Arguments\033[0m", } def textFormatter_gettext(s): return textFormatter_lookup.get(s, s) argparse._ = textFormatter_gettext ### Misc Utility Functions ### def git_revision(kll_path): ''' Retrieve git information using given path @param kll_path: Path to git directory @return: (revision, changed, revision_date, long_version) ''' import git # Default values if git is not available revision = "" changed = [] date = "" long_version = "" # Just in case git can't be found try: # Initialize repo repo = git.Repo(kll_path) # Get hash of the latest git commit revision = repo.head.object.hexsha # Get list of files that have changed since the commit changed = [item.a_path for item in repo.index.diff(None)] + [item.a_path for item in repo.index.diff('HEAD')] # Get commit date date = repo.head.commit.committed_datetime long_version = ".{0} - {1}".format(revision, date) except git.exc.InvalidGitRepositoryError: pass return revision, changed, date, long_version ### Argument Parsing ### def checkFileExists(filename): ''' Validate that file exists @param filename: Path to file ''' if not os.path.isfile(filename): print("{0} {1} does not exist...".format(ERROR, filename)) sys.exit(1) def command_line_args(control, input_args): ''' Initialize argparse and process all command line arguments @param control: ControlStage object which has access to all the group argument parsers ''' # Setup argument processor parser = argparse.ArgumentParser( usage="{} [options..] [..]".format(kll_name), description="KLL Compiler - Generates specified output from KLL .kll files.", epilog="Example: {0} scan_map.kll".format(kll_name), formatter_class=argparse.RawTextHelpFormatter, add_help=False, ) # Install path install_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) # Get git information control.git_rev, control.git_changes, control.git_date, long_version = git_revision( os.path.join(install_path, '..') ) control.version = "{0}{1}".format(__version__, long_version) # Optional Arguments parser.add_argument( '-h', '--help', action="help", help="This message." ) parser.add_argument( '-v', '--version', action="version", version="{0} {1}".format(kll_name, control.version), help="Show program's version number and exit" ) parser.add_argument( '--path', action="store_true", help="Shows the absolute path to the kll compiler installation directory. Then exits.", ) parser.add_argument( '--layout-cache-path', action="store_true", help="Shows the absolute path to the kll layouts cache director. Then exits.", ) # Add stage arguments control.command_line_flags(parser) # Process Arguments args = parser.parse_args(input_args) # If --path defined, lookup installation path, then exit if args.path: print(install_path) sys.exit(0) # If --layout-cache-path defined, lookup cache directory for layouts cache, then exit if args.layout_cache_path: import layouts mgr = layouts.Layouts() layout_path = mgr.layout_path print(layout_path) sys.exit(0) # Utilize parsed arguments in each of the stages control.command_line_args(args) ### Main Entry Point ### def main(args): # Initialize Control Stages control = stage.ControlStage() # Process Command-Line Args command_line_args(control, args) # Process Control Stages control.process() # Successful completion sys.exit(0) if __name__ == '__main__': main(sys.argv[1:]) PKZLukll/__main__.py#!/usr/bin/env python3 ''' KLL Compiler Keyboard Layout Langauge ''' # Copyright (C) 2014-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Paths ### import os import sys path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..') sys.path.insert(0, path) ### Imports ### import kll ### Main Entry Point ### if __name__ == '__main__': # See __init__.py kll.main(sys.argv[1:]) PKYLukll/kll#!/usr/bin/env python3 ''' KLL Compiler Keyboard Layout Langauge ''' # Copyright (C) 2014-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Paths ### import os import sys path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..') sys.path.insert(0, path) ### Imports ### import kll ### Main Entry Point ### if __name__ == '__main__': # See __init__.py kll.main(sys.argv[1:]) PKشLLIkll/common/README.md# KLL Compiler - common This is where the bulk of the KLL compiler processing occurs. Including all of the datastructures used to contain the parsed expressions. ## Files Brief description of each of the files. ### Process Files that deal with file and expression processing. Including parsing and tokenization. * [emitter.py](emitter.py) - Base classes for [KLL emitters](../emitters). * [parse.py](parse.py) - Contains most of the KLL xBNF parsing rules and how to map them to datastructure. * [stage.py](stage.py) - Handles each stage of KLL file processing, from file reading to emitter output. This is where to start if you're unsure. ### Datastructure Datastructure assembly classes used to contain KLL data. * [channel.py](channel.py) - Container classes for KLL pixel channels. * [context.py](context.py) - Container classes for KLL contexts (e.g. Generic, Configuration, BaseMap, DefaultMap, PartialMap and Merge). * [expression.py](expression.py) - Container classes for KLL expressions (e.g. MapExpression, etc.). * [file.py](file.py) - Container class for reading kll files. * [id.py](id.py) - Container classes for KLL id elements (e.g. HIDId, PixelId, ScanCodeId, NoneId, etc.). * [modifier.py](modifier.py) - Container classes for KLL modifiers (e.g. AnimationModifier, PixelModifier, etc.). * [organization.py](organization.py) - Container classes for expression organizations. Handles expression merging. * [position.py](position.py) - Container class for physical KLL positions. * [schedule.py](schedule.py) - Container class for KLL schedules and schedule parameters. ### Constants Lookup tables and other static data. * [hid_dict.py](hid_dict.py) - Dictionary lookup for USB HID codes (both symbolic and numeric). PKUDKkll/common/__init__.pyPKUDK܄kll/common/channel.py#!/usr/bin/env python3 ''' KLL Channel Containers ''' # Copyright (C) 2016-2017 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class Channel: ''' Pixel Channel Container ''' def __init__(self, uid, width): self.uid = uid self.width = width def __repr__(self): return "{0}:{1}".format(self.uid, self.width) class ChannelList: ''' Pixel Channel List Container ''' def __init__(self): self.channels = [] def setChannels(self, channel_list): ''' Apply channels to Pixel ''' for channel in channel_list: self.channels.append(Channel(channel[0], channel[1])) def strChannels(self): ''' __repr__ of Channel when multiple inheritance is used ''' output = "" for index, channel in enumerate(self.channels): if index > 0: output += "," output += "{0}".format(channel) return output def __repr__(self): return self.strChannels() PKLZLE"E"kll/common/context.py#!/usr/bin/env python3 ''' KLL Context Definitions * Generic (auto-detected) * Configuration * BaseMap * DefaultMap * PartialMap ''' # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### import copy import os import kll.common.organization as organization ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class Context: ''' Base KLL Context Class ''' def __init__(self): ''' Context initialization ''' # Each context may have one or more included kll files # And each of these files will have at least 1 Context self.kll_files = [] # File data assigned to each context # This info is populated during the PreprocessorStage self.lines = [] self.data = "" self.parent = None # Tokenized data sets self.classification_token_data = [] self.expressions = [] # Organized Expression Datastructure self.organization = organization.Organization(self) # Layer Information (unset, unless a PartialMapContext) self.layer = None # Connect Id information (unset, but usually initialized) self.connect_id = None # HID Mapping object self.hid_mapping = None def __repr__(self): # Build list of all the info return "(kll_files={}, hid_mapping={}, lines={}, data='''{}''')".format( self.kll_files, self.hid_mapping, self.lines, self.data, ) def layer_info(self): ''' Returns a text string indicating which layer this is ''' if self.layer is None: return "0" return "{}".format(self.layer + 1) def initial_context(self, lines, data, parent): ''' Used in the PreprocessorStage to update the initial line and kll file data @param lines: Data split per line @param data: Entire context in a single string @param parent: Parent node, always a KLLFile ''' self.lines = lines self.data = data self.parent = parent self.connect_id = parent.connect_id def query(self, kll_expression, kll_type=None): ''' Query Returns a dictionary of the specified property. Most queries should use this. See organization.py:Organization for property_type details. @param kll_expression: String name of expression type @param kll_type: String name of the expression sub-type If set to None, return all @return: context_name: (dictionary) ''' if kll_type is None: return self.organization.data_mapping[kll_expression] return self.organization.data_mapping[kll_expression][kll_type] class GenericContext(Context): ''' Generic KLL Context Class ''' class ConfigurationContext(Context): ''' Configuration KLL Context Class ''' class BaseMapContext(Context): ''' Base Map KLL Context Class ''' class DefaultMapContext(Context): ''' Default Map KLL Context Class ''' class PartialMapContext(Context): ''' Partial Map KLL Context Class ''' def __init__(self, layer): ''' Partial Map Layer Context Initialization @param: Layer associated with Partial Map ''' super().__init__() self.layer = layer class MergeContext(Context): ''' Container class for a merged Context Has references to the original contexts merged in ''' def __init__(self, base_context): ''' Initialize the MergeContext with the base context Another MergeContext can be used as the base_context @param base_context: Context used to seed the MergeContext ''' super().__init__() # Setup list of kll_files self.kll_files = base_context.kll_files # Transfer layer, whenever merging in, we'll use the new layer identifier self.layer = base_context.layer # List of context, in the order of merging, starting from the base self.contexts = [base_context] # Copy the base context Organization into the MergeContext self.organization = copy.copy(base_context.organization) self.organization.parent = self # Set the layer if the base is a PartialMapContext if base_context.__class__.__name__ == 'PartialMapContext': self.layer = base_context.layer def merge(self, merge_in, map_type, debug): ''' Merge in context Another MergeContext can be merged into a MergeContext @param merge_in: Context to merge in to this one @param map_type: Used for map specific merges (e.g. BaseMap reductions) @param debug: Enable debug out ''' # Extend list of kll_files self.kll_files.extend(merge_in.kll_files) # Use merge_in layer identifier as the master (most likely to be correct) self.layer = merge_in.layer # Append to context list self.contexts.append(merge_in) # Merge context self.organization.merge( merge_in.organization, map_type, debug ) # Set the layer if the base is a PartialMapContext if merge_in.__class__.__name__ == 'PartialMapContext': self.layer = merge_in.layer def cleanup(self, debug=False): ''' Post-processing step for merges that may need to remove some data in the organization. Mainly used for dropping BaseMapContext expressions after generating a PartialMapContext. ''' self.organization.cleanup(debug) def reduction(self, debug=False): ''' Simplifies datastructure NOTE: This will remove data, therefore, context is lost ''' self.organization.reduction(debug) def paths(self): ''' Returns list of file paths used to generate this context ''' file_paths = [] for kll_context in self.contexts: # If context is a MergeContext then we have to recursively search if kll_context.__class__.__name__ is 'MergeContext': file_paths.extend(kll_context.paths()) else: file_paths.append(kll_context.parent.path) return file_paths def files(self): ''' Short form list of file paths used to generate this context ''' file_paths = [] for file_path in self.paths(): file_paths.append(os.path.basename(file_path)) return file_paths def __repr__(self): return "(kll_files={0}, organization={1})".format( self.files(), self.organization, ) def query_contexts(self, kll_expression, kll_type): ''' Context Query Returns a list of tuples (dictionary, kll_context) doing a deep search to the context leaf nodes. This results in pre-merge data and is useful for querying information about files used during compilation. See organization.py:Organization for property_type details. @param kll_expression: String name of expression type @param kll_type: String name of the expression sub-type @return: context_name: (dictionary, kll_context) ''' # Build list of leaf contexts leaf_contexts = [] for kll_context in self.contexts: # Recursively search if necessary if kll_context.__class__.__name__ == 'MergeContext': leaf_contexts.extend( kll_context.query_contexts( kll_expression, kll_type)) else: leaf_contexts.append(( kll_context.query( kll_expression, kll_type ), kll_context )) return leaf_contexts PKUDK kll/common/emitter.py#!/usr/bin/env python3 ''' KLL Emitter Base Classes ''' # Copyright (C) 2016-2017 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### import json import os import re import sys ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class Emitter: ''' KLL Emitter Base Class NOTE: Emitter should do as little as possible in the __init__ function. ''' def __init__(self, control): ''' Emitter initialization @param control: ControlStage object, used to access data from other stages ''' self.control = control self.color = False # Signal erroring due to an issue # We may not want to exit immediately as we could find other potential # issues that need fixing self.error_exit = False def command_line_args(self, args): ''' Group parser for command line arguments @param args: Name space of processed arguments ''' print("{0} '{1}' '{2}' has not been implemented yet" .format( WARNING, self.command_line_args.__name__, type(self).__name__ ) ) def command_line_flags(self, parser): ''' Group parser for command line options @param parser: argparse setup object ''' print("{0} '{1}' '{2}' has not been implemented yet" .format( WARNING, self.command_line_flags.__name__, type(self).__name__ ) ) def process(self): ''' Emitter Processing ''' print("{0} '{1}' '{2}' has not been implemented yet" .format( WARNING, self.process.__name__, type(self).__name__ ) ) def output(self): ''' Final Stage of Emitter Generate desired outputs ''' print("{0} '{1}' '{2}' has not been implemented yet" .format( WARNING, self.output.__name__, type(self).__name__ ) ) def check(self): ''' Determines whether or not we've successfully emitted. ''' return not self.error_exit class FileEmitter: ''' KLL File Emitter Class Base class for any emitter that wants to output a file. Generally, it is recommended to use the TextEmitter as templates are more readable. ''' def __init__(self): ''' FileEmitter Initialization ''' self.output_files = [] def generate(self, output_path): ''' Generate output file @param contents: String contents of file @param output_path: Path to output file ''' for name, contents in self.output_files: with open("{0}/{1}".format(output_path, name), 'w') as outputFile: outputFile.write(contents) class TextEmitter: ''' KLL Text Emitter Class Base class for any text emitter that wants to use the templating functionality If multiple files need to be generated, call load_template and generate multiple times. e.g. load_template('_myfile.h') generate('/tmp/myfile.h') load_template('_myfile2.h') generate('/tmp/myfile2.h') TODO - Generate list of unused tags ''' def __init__(self): ''' TextEmitter Initialization ''' # Dictionary used to do template replacements self.fill_dict = {} self.tag_list = [] self.template = None def load_template(self, template): ''' Loads template file Looks for <|tags|> to replace in the template @param template: Path to template ''' # Does template exist? if not os.path.isfile(template): print("{0} '{1}' does not exist...".format(ERROR, template)) sys.exit(1) self.template = template # Generate list of fill tags with open(template, 'r') as openFile: for line in openFile: match = re.findall(r'<\|([^|>]+)\|>', line) for item in match: self.tag_list.append(item) def generate(self, output_path): ''' Generates the output file from the template file @param output_path: Path to the generated file ''' # Make sure we've called load_template at least once if self.template is None: print( "{0} TextEmitter template (load_template) has not been called.".format(ERROR)) sys.exit(1) # Process each line of the template, outputting to the target path with open(output_path, 'w') as outputFile: with open(self.template, 'r') as templateFile: for line in templateFile: # TODO Support multiple replacements per line # TODO Support replacement with other text inline match = re.findall(r'<\|([^|>]+)\|>', line) # If match, replace with processed variable if match: try: outputFile.write(self.fill_dict[match[0]]) except KeyError as err: print("{0} '{1}' not found, skipping...".format( WARNING, match[0] )) outputFile.write("\n") # Otherwise, just append template to output file else: outputFile.write(line) class JsonEmitter: ''' ''' def __init__(self): ''' JsonEmitter Initialization ''' self.json_dict = {} def generate_json(self, output_path): ''' Generates the output json file using an self.json_dict ''' output = json.dumps(self.json_dict, indent=4, sort_keys=True) # Write json file with open(output_path, 'w') as outputFile: outputFile.write(output) PKLDnnkll/common/expression.py#!/usr/bin/env python3 ''' KLL Expression Container ''' # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### import copy from kll.common.id import CapId ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class Expression: ''' Container class for KLL expressions ''' def __init__(self, lparam, operator, rparam, context): ''' Initialize expression container @param lparam: LOperatorData token @param operator: Operator token @param rparam: ROperatorData token @param context: Parent context of expression ''' # First stage/init self.lparam_token = lparam self.operator_token = operator self.rparam_token = rparam self.context = context # TODO, set multiple contexts for later stages # Second stage self.lparam_sub_tokens = [] self.rparam_sub_tokens = [] # BaseMap expression self.base_map = False # Default ConnectId self.connect_id = 0 # Mutate class into the desired type self.__class__ = { '=>': NameAssociationExpression, '<=': DataAssociationExpression, '=': AssignmentExpression, ':': MapExpression, }[self.operator_type()] def operator_type(self): ''' Determine which base operator this operator is of All : (map) expressions are tokenized/parsed the same way @return Base string representation of the operator ''' if ':' in self.operator_token.value: return ':' return self.operator_token.value def final_tokens(self, no_filter=False): ''' Return the final list of tokens, must complete the second stage first @param no_filter: If true, do not filter out Space tokens @return Finalized list of tokens ''' ret = self.lparam_sub_tokens + \ [self.operator_token] + self.rparam_sub_tokens if not no_filter: ret = [x for x in ret if x.type != 'Space'] return ret def regen_str(self): ''' Re-construct the string based off the original set of tokens ; ''' return "{0}{1}{2};".format( self.lparam_token.value, self.operator_token.value, self.rparam_token.value, ) def point_chars(self, pos_list): ''' Using the regenerated string, point to a given list of characters Used to indicate where a possible issue/syntax error is @param pos_list: List of character indices i.e. > U"A" : : U"1"; > ^ ''' out = "\t{0}\n\t".format(self.regen_str()) # Place a ^ character at the given locations curpos = 1 for pos in sorted(pos_list): # Pad spaces, then add a ^ out += ' ' * (pos - curpos) out += '^' curpos += pos return out def rparam_start(self): ''' Starting positing char of rparam_token in a regen_str ''' return len(self.lparam_token.value) + len(self.operator_token.value) def __repr__(self): # Build string representation based off of what has been set # lparam, operator and rparam are always set out = "Expression: {0}{1}{2}".format( self.lparam_token.value, self.operator_token.value, self.rparam_token.value, ) # TODO - Add more depending on what has been set return out def kllify(self): ''' Returns KLL version of the expression May not look like the original expression if simplication has taken place ''' print( "{0} kllify not defined for {1}".format( WARNING, self.__class__.__name__)) out = "{0}{1}{2};".format( self.lparam_token.value, self.operator_token.value, self.rparam_token.value, ) return out def unique_keys(self): ''' Generates a list of unique identifiers for the expression that is mergeable with other functional equivalent expressions. This method should never get called directly as a generic Expression ''' return [('UNKNOWN KEY', 'UNKNOWN EXPRESSION')] class AssignmentExpression(Expression): ''' Container class for assignment KLL expressions ''' type = None name = None pos = None value = None ## Setters ## def array(self, name, pos, value): ''' Assign array assignment parameters to expression @param name: Name of variable @param pos: Array position of the value (if None, overwrite the entire array) @param value: Value of the array, if pos is specified, this is the value of an element @return: True if parsing was successful ''' self.type = 'Array' self.name = name self.pos = pos self.value = value # If pos is not none, flatten if pos is not None: self.value = "".join(str(x) for x in self.value) return True def merge_array(self, new_expression=None): ''' Merge arrays, used for position assignments Merges unconditionally, make sure this is what you want to do first If no additional array is specified, just "cap-off" array. This does a proper array expansion into a python list. @param new_expression: AssignmentExpression type array, ignore if None ''' # First, check if base expression needs to be capped if self.pos is not None: # Generate a new string array new_value = [""] * self.pos # Append the old contents to the list new_value.append(self.value) self.value = new_value # Clear pos, to indicate that array has been capped self.pos = None # Next, if a new_expression has been specified, merge in if new_expression is not None and new_expression.pos is not None: # Check if we need to extend the list new_size = new_expression.pos + 1 - len(self.value) if new_size > 0: self.value.extend([""] * new_size) # Assign value to array self.value[new_expression.pos] = new_expression.value def variable(self, name, value): ''' Assign variable assignment parameters to expression @param name: Name of variable @param value: Value of variable @return: True if parsing was successful ''' self.type = 'Variable' self.name = name self.value = value # Flatten value, often a list of various token types self.value = "".join(str(x) for x in self.value) return True def __repr__(self): if self.type == 'Variable': return "{0} = {1};".format(self.name, self.value) elif self.type == 'Array': # Output KLL style array, double quoted elements, space-separated if isinstance(self.value, list): output = "{0}[] =".format(self.name) for value in self.value: output += ' "{0}"'.format(value) output += ";" return output # Single array assignment else: return "{0}[{1}] = {2};".format( self.name, self.pos, self.value) return "ASSIGNMENT UNKNOWN" def kllify(self): ''' Returns KLL version of the expression May not look like the original expression if simplication has taken place __repr__ is formatted correctly with assignment expressions ''' return self.__repr__() def unique_keys(self): ''' Generates a list of unique identifiers for the expression that is mergeable with other functional equivalent expressions. ''' return [(self.name, self)] class NameAssociationExpression(Expression): ''' Container class for name association KLL expressions ''' type = None name = None association = None ## Setters ## def capability(self, name, association, parameters): ''' Assign a capability C function name association @param name: Name of capability @param association: Name of capability in target backend output @return: True if parsing was successful ''' self.type = 'Capability' self.name = name self.association = CapId(association, 'Definition', parameters) return True def define(self, name, association): ''' Assign a define C define name association @param name: Name of variable @param association: Name of association in target backend output @return: True if parsing was successful ''' self.type = 'Define' self.name = name self.association = association return True def __repr__(self): return "{0} <= {1};".format(self.name, self.association) def kllify(self): ''' Returns KLL version of the expression May not look like the original expression if simplication has taken place ''' return "{0}".format(self) def unique_keys(self): ''' Generates a list of unique identifiers for the expression that is mergeable with other functional equivalent expressions. ''' return [(self.name, self)] class DataAssociationExpression(Expression): ''' Container class for data association KLL expressions ''' type = None association = None value = None ## Setters ## def animation(self, animations, animation_modifiers): ''' Animation definition and configuration @return: True if parsing was successful ''' self.type = 'Animation' self.association = animations self.value = animation_modifiers return True def animationFrame(self, animation_frames, pixel_modifiers): ''' Pixel composition of an Animation Frame @return: True if parsing was successful ''' self.type = 'AnimationFrame' self.association = animation_frames self.value = pixel_modifiers return True def pixelPosition(self, pixels, position): ''' Pixel Positioning @return: True if parsing was successful ''' for pixel in pixels: pixel.setPosition(position) self.type = 'PixelPosition' self.association = pixels return True def scanCodePosition(self, scancodes, position): ''' Scan Code to Position Mapping Note: Accepts lists of scan codes Alone this isn't useful, but you can assign rows and columns using ranges instead of individually @return: True if parsing was successful ''' for scancode in scancodes: scancode.setPosition(position) self.type = 'ScanCodePosition' self.association = scancodes return True def update(self, new_expression): ''' Update expression @param new_expression: Expression used to update this one ''' supported = ['PixelPosition', 'ScanCodePosition'] if new_expression.type in supported: for scancode in self.association: scancode.updatePositions(new_expression.association[0]) def __repr__(self): if self.type in ['PixelPosition', 'ScanCodePosition']: output = "" for index, association in enumerate(self.association): if index > 0: output += "; " output += "{0}".format(association) return "{0};".format(output) return "{0} <= {1};".format(self.association, self.value) def kllify(self): ''' Returns KLL version of the expression May not look like the original expression if simplication has taken place __repr__ is formatted correctly with assignment expressions ''' if self.type in ['PixelPosition', 'ScanCodePosition']: output = "" for index, association in enumerate(self.association): if index > 0: output += "; " output += "{0}".format(association.kllify()) return "{0};".format(output) if self.type in ['AnimationFrame']: output = "{0} <= ".format(self.association[0].kllify()) for index, association in enumerate(self.value): if index > 0: output += ", " output += "{0}".format(association[0].kllify()) return "{0};".format(output) return "{0} <= {1};".format( self.association.kllify(), self.value.kllify()) def unique_keys(self): ''' Generates a list of unique identifiers for the expression that is mergeable with other functional equivalent expressions. ''' keys = [] # Positions require a bit more introspection to get the unique keys if self.type in ['PixelPosition', 'ScanCodePosition']: for index, key in enumerate(self.association): uniq_expr = self # If there is more than one key, copy the expression # and remove the non-related variants if len(self.association) > 1: uniq_expr = copy.copy(self) # Isolate variant by index uniq_expr.association = [uniq_expr.association[index]] keys.append(("{0}".format(key.unique_key()), uniq_expr)) # AnimationFrames are already list of keys # TODO Reorder frame assignments to dedup function equivalent mappings elif self.type in ['AnimationFrame']: for index, key in enumerate(self.association): uniq_expr = self # If there is more than one key, copy the expression # and remove the non-related variants if len(self.association) > 1: uniq_expr = copy.copy(self) # Isolate variant by index uniq_expr.association = [uniq_expr.association[index]] keys.append(("{0}".format(key), uniq_expr)) # Otherwise treat as a single element else: keys = [("{0}".format(self.association), self)] # Remove any duplicate keys # TODO Stat? Might be at neat report about how many duplicates were # squashed keys = list(set(keys)) return keys class MapExpression(Expression): ''' Container class for KLL map expressions ''' type = None triggers = None operator = None results = None animation = None animation_frame = None pixels = None position = None trigger_identifiers = ['IndCode', 'GenericTrigger', 'Layer', 'LayerLock', 'LayerShift', 'LayerLatch', 'ScanCode'] def __init__(self, triggers, operator, results): ''' Initialize MapExpression Used when copying MapExpressions from different expressions @param triggers: Sequence of combos of ranges of namedtuples @param operator: Type of map operation @param results: Sequence of combos of ranges of namedtuples ''' self.type = 'TriggerCode' self.triggers = triggers self.operator = operator self.results = results self.connect_id = 0 ## Setters ## def triggerCode(self, triggers, operator, results): ''' Trigger Code mapping Takes in any combination of triggers and sets the expression accordingly. @param triggers: Sequence of combos of ranges of namedtuples @param operator: Type of map operation @param results: Sequence of combos of ranges of namedtuples @return: True if parsing was successful ''' self.type = 'TriggerCode' self.triggers = triggers self.operator = operator self.results = results return True def pixelChannels(self, pixelmap, trigger): ''' Pixel Channel Composition @return: True if parsing was successful ''' self.type = 'PixelChannel' self.pixel = pixelmap self.position = trigger return True def triggersSequenceOfCombosOfIds(self, index=0): ''' Takes triggers and converts into explicit ids Only uses the first index by default. @param index: Which trigger sequence to expand @return: list of lists Example (index=0) [[[S10, S16], [S42]], [[S11, S16], [S42]]] -> [[10, 16], [42]] ''' nsequence = [] for combo in self.triggers[index]: ncombo = [] for identifier in combo: ncombo.append(identifier.json()) nsequence.append(ncombo) return nsequence def resultsSequenceOfCombosOfIds(self, index=0): ''' Takes results and converts into explicit capabilities Only uses the first index by default. @param index: Which result sequence to expand @return: list of lists ''' nsequence = [] for combo in self.results[index]: ncombo = [] for identifier in combo: ncombo.append(identifier.json()) nsequence.append(ncombo) return nsequence def sequencesOfCombosOfIds(self, expression_param): ''' Prettified Sequence of Combos of Identifiers @param expression_param: Trigger or Result parameter of an expression Scan Code Example [[[S10, S16], [S42]], [[S11, S16], [S42]]] -> (S10 + S16, S42)|(S11 + S16, S42) ''' output = "" # Sometimes during error cases, might be None if expression_param is None: return output # Iterate over each trigger/result variants (expanded from ranges), # each one is a sequence for index, sequence in enumerate(expression_param): if index > 0: output += "|" output += "(" # Iterate over each combo (element of the sequence) for index, combo in enumerate(sequence): if index > 0: output += ", " # Iterate over each trigger identifier for index, identifier in enumerate(combo): if index > 0: output += " + " output += "{0}".format(identifier) output += ")" return output def sequencesOfCombosOfIds_kll(self, expression_param): ''' Prettified Sequence of Combos of Identifiers, kll output edition @param expression_param: Trigger or Result parameter of an expression Scan Code Example [[[S10, S16], [S42]], [[S11, S16], [S42]]] -> ['S10 + S16, S42', 'S11 + S16, S42'] ''' output = [''] # Sometimes during error cases, might be None if expression_param is None: return output # Iterate over each trigger/result variants (expanded from ranges), # each one is a sequence for index, sequence in enumerate(expression_param): if index > 0: output.append('') # Iterate over each combo (element of the sequence) for index, combo in enumerate(sequence): if index > 0: output[-1] += ", " # Iterate over each trigger identifier for index, identifier in enumerate(combo): if index > 0: output[-1] += " + " output[-1] += "{0}".format(identifier.kllify()) return output def trigger_id_list(self): ''' Returns a list of ids within the sequence of combos May contain duplicates ''' id_list = [] # Iterate over each trigger/result variants (expanded from ranges) for sequence in self.triggers: # Iterate over each combo (element of the sequence) for combo in sequence: # Iterate over each trigger identifier for identifier in combo: id_list.append(identifier) return id_list def min_trigger_uid(self): ''' Returns the min numerical uid Used for trigger identifiers ''' min_uid = 0xFFFF # Iterate over list of identifiers in trigger for identifier in self.trigger_id_list(): if identifier.type in self.trigger_identifiers and identifier.get_uid() < min_uid: min_uid = identifier.get_uid() return min_uid def max_trigger_uid(self): ''' Returns the max numerical uid Used for trigger identifiers ''' max_uid = 0 # Iterate over list of identifiers in trigger for identifier in self.trigger_id_list(): if identifier.type in self.trigger_identifiers and identifier.get_uid() > max_uid: max_uid = identifier.get_uid() return max_uid def add_trigger_uid_offset(self, offset): ''' Adds a uid/scancode offset to all triggers This is used when applying the connect_id interconnect offset during mapping indices generation ''' # Iterate over list of identifiers in trigger for identifier in self.trigger_id_list(): if identifier.type == 'ScanCode': identifier.updated_uid = identifier.uid + offset def elems(self): ''' Return number of trigger and result elements Useful for determining if this is a trigger macro (2+) Should always return at least (1,1) unless it's an invalid calculation @return: ( triggers, results ) ''' elems = [0, 0] # XXX Needed? if self.type == 'PixelChannel': return tuple(elems) # Iterate over each trigger variant (expanded from ranges), each one is # a sequence for sequence in self.triggers: # Iterate over each combo (element of the sequence) for combo in sequence: # Just measure the size of the combo elems[0] += len(combo) # Iterate over each result variant (expanded from ranges), each one is # a sequence for sequence in self.results: # Iterate over each combo (element of the sequence) for combo in sequence: # Just measure the size of the combo elems[1] += len(combo) return tuple(elems) def trigger_str(self): ''' String version of the trigger Used for sorting ''' # Pixel Channel Mapping doesn't follow the same pattern if self.type == 'PixelChannel': return "{0}".format(self.pixel) return "{0}".format( self.sequencesOfCombosOfIds(self.triggers), ) def result_str(self): ''' String version of the result Used for sorting ''' # Pixel Channel Mapping doesn't follow the same pattern if self.type == 'PixelChannel': return "{0}".format(self.position) return "{0}".format( self.sequencesOfCombosOfIds(self.results), ) def __repr__(self): # Pixel Channel Mapping doesn't follow the same pattern if self.type == 'PixelChannel': return "{0} : {1};".format(self.pixel, self.position) return "{0} {1} {2};".format( self.sequencesOfCombosOfIds(self.triggers), self.operator, self.sequencesOfCombosOfIds(self.results), ) def sort_trigger(self): ''' Returns sortable trigger ''' if self.type == 'PixelChannel': return "{0}".format(self.pixel.kllify()) return "{0}".format( self.sequencesOfCombosOfIds_kll(self.triggers)[0], ) def sort_result(self): ''' Returns sortable result ''' if self.type == 'PixelChannel': result = self.position # Handle None pixel mapping case if isinstance(self.position, list): result = self.sequencesOfCombosOfIds_kll(self.position)[0] return "{0}".format(result) return "{0}".format( self.sequencesOfCombosOfIds_kll(self.results)[0], ) def kllify(self): ''' Returns KLL version of the expression May not look like the original expression if simplication has taken place ''' # TODO Handle variations? Instead of just the first index if self.type == 'PixelChannel': result = self.position # Handle None pixel mapping case if isinstance(self.position, list): result = self.sequencesOfCombosOfIds_kll(self.position)[0] return "{0} : {1};".format(self.pixel.kllify(), result) return "{0} {1} {2};".format( self.sequencesOfCombosOfIds_kll(self.triggers)[0], self.operator, self.sequencesOfCombosOfIds_kll(self.results)[0], ) def unique_keys(self): ''' Generates a list of unique identifiers for the expression that is mergeable with other functional equivalent expressions. TODO: This function should re-order combinations to generate the key. The final generated combo will be in the original order. ''' keys = [] # Pixel Channel only has key per mapping if self.type == 'PixelChannel': keys = [("{0}".format(self.pixel), self)] # Split up each of the keys else: # Iterate over each trigger/result variants (expanded from ranges), # each one is a sequence for index, sequence in enumerate(self.triggers): key = "" uniq_expr = self # If there is more than one key, copy the expression # and remove the non-related variants if len(self.triggers) > 1: uniq_expr = copy.copy(self) # Isolate variant by index uniq_expr.triggers = [uniq_expr.triggers[index]] # Iterate over each combo (element of the sequence) for index, combo in enumerate(sequence): if index > 0: key += ", " # Iterate over each trigger identifier for index, identifier in enumerate(combo): if index > 0: key += " + " key += "{0} {1}".format(self.connect_id, identifier) # Add key to list keys.append((key, uniq_expr)) # Remove any duplicate keys # TODO Stat? Might be at neat report about how many duplicates were # squashed keys = list(set(keys)) return keys PKLЕ kll/common/file.py#!/usr/bin/env python3 ''' KLL File Container ''' # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### import os import kll.common.context as context ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class KLLFile: ''' Container class for imported KLL files ''' def __init__(self, path, file_context): ''' Initialize file container @param path: Path to filename, if relative, relative to the execution environment @param context: KLL Context object ''' self.path = path self.context = file_context self.lines = [] self.data = "" self.connect_id = None # Add filename to context for debugging self.context.kll_files.append(self.filename()) def __repr__(self): context_str = type(self.context).__name__ # Show layer info if this is a PartialMap if isinstance(self.context, context.PartialMapContext): context_str = "{0}({1})".format(context_str, self.context.layer) return "({}, {}, connect_id={})".format( self.path, context_str, self.connect_id ) def check(self): ''' Make sure that the file exists at the initialized path ''' exists = os.path.isfile(self.path) # Display error message, will exit later if not exists: print("{0} {1} does not exist...".format(ERROR, self.path)) return exists def filename(self): filename = str(os.path.basename(self.path)) return filename def read(self): ''' Read the contents of the file path into memory Reads both per line and complete copies ''' try: # Read file into memory, removing newlines with open(self.path) as f: self.data = f.read() self.lines = self.data.splitlines() except BaseException: print( "{0} Failed to read '{1}' into memory...".format( ERROR, self.path)) return False return True def write(self, output_filename, debug=False): ''' Writes the contents to a file This can be useful for dumping processed files to disk ''' try: # Write the file to the specified file/folder if debug: print("Writing to {0}".format(output_filename)) directory = os.path.dirname(output_filename) if not os.path.exists(directory): os.makedirs(directory) with open(output_filename, 'w') as f: f.write(self.data) except BaseException: print("{0} Failed to write to file '{1}'".format(ERROR, self.path)) return False return True PKLoz<\<\kll/common/id.py#!/usr/bin/env python3 ''' KLL Id Containers ''' # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### from kll.common.channel import ChannelList from kll.common.modifier import AnimationModifierList, PixelModifierList from kll.common.position import Position from kll.common.schedule import Schedule from kll.extern.funcparserlib.parser import NoParseError ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class Id: ''' Base container class for various KLL types ''' def __init__(self): self.type = None self.uid = None def get_uid(self): ''' Some Id types have alternate uid mappings self.uid stores the original uid whereas it may be updated due to multi-node configurations ''' return self.uid def json(self): ''' JSON representation of Id Generally each specialization of the Id class will need to enhance this function. ''' return { 'type' : self.type, 'uid' : self.uid, } def kllify(self): ''' Returns KLL version of the Id In most cases we can just the string representation of the object ''' return "{0}".format(self) class HIDId(Id, Schedule): ''' HID/USB identifier container class ''' secondary_types = { 'USBCode': 'USB', 'SysCode': 'SYS', 'ConsCode': 'CONS', 'IndCode': 'IND', } kll_types = { 'USBCode': 'U', 'SysCode': 'SYS', 'ConsCode': 'CONS', 'IndCode': 'I', } type_width = { 'USBCode': 1, 'SysCode': 1, 'ConsCode': 2, 'IndCode': 1, } type_locale = { 'USBCode': 'to_hid_keyboard', 'SysCode': 'to_hid_sysctrl', 'ConsCode': 'to_hid_consumer', 'IndCode': 'to_hid_led', } def __init__(self, type, uid, locale): ''' @param type: String type of the Id @param uid: Unique integer identifier for the Id @param locale: Locale layout object used to decode used to decode uid ''' Id.__init__(self) Schedule.__init__(self) self.type = type self.uid = uid self.locale = locale self.locale_type = self.type_locale[self.type] # Set secondary type self.second_type = self.secondary_types[self.type] # Set kll type self.kll_type = self.kll_types[self.type] # Determine hex_str padding self.padding = 2 if self.type == 'ConsCode': self.padding = 3 # Validate uid is in locale based on what type of HID field it is if self.hex_str() not in self.locale.json()[self.locale_type].keys(): print("{} Unknown HID({}) UID('{}') in locale '{}'".format( WARNING, self.type, self.uid, self.locale.name() )) def hex_str(self): ''' Returns hex string used by locale for uid lookup ''' return "0x{0:0{1}X}".format(self.uid, self.padding) def get_hex_str(self): ''' Returns hex string used by locale for uid lookup, uses self.get_uid() instead of self.uid ''' return "0x{0:0{1}X}".format(self.get_uid(), self.padding) def width(self): ''' Returns the bit width of the HIDId This is the maximum number of bytes required for each type of HIDId as per the USB spec. Generally this is just 1 byte, however, Consumer elements (ConsCode) requires 2 bytes. ''' return self.type_width[self.type] def __repr__(self): ''' Use string name instead of integer, easier to debug ''' try: name = self.locale.json()[self.locale_type][self.hex_str()] schedule = self.strSchedule() if len(schedule) > 0: schedule = "({0})".format(schedule) output = 'HID({},{})"{}"{}'.format(self.type, self.locale.name(), self.uid, name, schedule) return output except: print("{} '{}' is an invalid dictionary lookup.".format( WARNING, (self.second_type, self.uid), )) return "" def json(self): ''' JSON representation of HIDId ''' output = Id.json(self) output.update(Schedule.json(self)) return output def kllify(self): ''' Returns KLL version of the Id ''' schedule = self.strSchedule() if len(schedule) > 0: schedule = "({0})".format(schedule) output = "{0}{1:#05x}{2}".format(self.kll_type, self.uid, schedule) return output class ScanCodeId(Id, Schedule, Position): ''' Scan Code identifier container class ''' def __init__(self, uid): Id.__init__(self) Schedule.__init__(self) Position.__init__(self) self.type = 'ScanCode' self.uid = uid # This uid is used for any post-processing of the uid # The original uid is maintained in case it is needed self.updated_uid = None def inferred_type(self): ''' Always returns ScanCode (simplifies code when mixed with PixelAddressId) ''' return 'PixelAddressId_ScanCode' def get_uid(self): ''' Determine uid May have been updated due to connect_id setting for interconnect offsets ''' uid = self.uid if self.updated_uid is not None: uid = self.updated_uid return uid def uid_set(self): ''' Returns a tuple of uids, always a single element for ScanCodeId ''' return tuple([self.get_uid()]) def unique_key(self): ''' Returns the key string used for datastructure sorting ''' # Positions are a special case if self.positionSet(): return "S{0:03d}".format(self.get_uid()) def __repr__(self): # Positions are a special case if self.positionSet(): return "{0} <= {1}".format(self.unique_key(), self.strPosition()) schedule = self.strSchedule() if len(schedule) > 0: return "S{0:03d}({1})".format(self.get_uid(), schedule) else: return "S{0:03d}".format(self.get_uid()) def json(self): ''' JSON representation of ScanCodeId ''' output = Id.json(self) output.update(Schedule.json(self)) output.update(Position.json(self)) return output def kllify(self): ''' Returns KLL version of the Id ''' schedule = self.strSchedule() if len(schedule) > 0: schedule = "({0})".format(schedule) output = "S{0:#05x}{1}".format(self.get_uid(), schedule) # Position enabled if self.isPositionSet(): output += " <= {0}".format(self.strPosition()) return output class LayerId(Id, Schedule): ''' Layer identifier container class ''' def __init__(self, type, layer): Id.__init__(self) Schedule.__init__(self) self.type = type self.uid = layer def __repr__(self): schedule = self.strSchedule() if len(schedule) > 0: return "{0}[{1}]({2})".format( self.type, self.uid, schedule, ) else: return "{0}[{1}]".format( self.type, self.uid, ) def width(self): ''' Returns the bit width of the LayerId This is currently 2 bytes. ''' return 2 def json(self): ''' JSON representation of LayerId ''' output = Id.json(self) output.update(Schedule.json(self)) return output def kllify(self): ''' Returns KLL version of the Id ''' # The string __repr__ is KLL in this case return str(self) class TriggerId(Id, Schedule): ''' Generic trigger identifier container class ''' def __init__(self, idcode, uid): Id.__init__(self) Schedule.__init__(self) self.type = 'GenericTrigger' self.uid = uid self.idcode = idcode def __repr__(self): schedule = self.strSchedule() schedule_val = "" if len(schedule) > 0: schedule_val = "({})".format(schedule) return "T[{0},{1}]{2}".format( self.idcode, self.uid, schedule_val, ) def json(self): ''' JSON representation of TriggerId ''' output = Id.json(self) output.update(Schedule.json(self)) return output def kllify(self): ''' Returns KLL version of the Id ''' # The string __repr__ is KLL in this case return str(self) class AnimationId(Id, Schedule, AnimationModifierList): ''' Animation identifier container class ''' name = None def __init__(self, name, state=None): Id.__init__(self) Schedule.__init__(self) AnimationModifierList.__init__(self) self.name = name self.type = 'Animation' self.second_type = 'A' self.state = state def __repr__(self): state = "" if self.state is not None: state = ", {}".format(self.state) schedule = self.strSchedule() if len(schedule) > 0: return "A[{0}{1}]({2})".format(self.name, state, self.strSchedule()) if len(self.modifiers) > 0: return "A[{0}{1}]({2})".format(self.name, state, self.strModifiers()) return self.base_repr() def base_repr(self): ''' Returns string of just the identifier, exclude animation modifiers ''' state = "" if self.state is not None: state = ", {}".format(self.state) return "A[{0}{1}]".format(self.name, state) def width(self): ''' Returns the bit width of the AnimationId This is currently 2 bytes. ''' return 2 def json(self): ''' JSON representation of AnimationId ''' output = Id.json(self) output.update(AnimationModifierList.json(self)) output.update(Schedule.json(self)) output['name'] = self.name output['setting'] = "{}".format(self) output['state'] = self.state del output['uid'] return output class AnimationFrameId(Id, AnimationModifierList): ''' Animation Frame identifier container class ''' def __init__(self, name, index): Id.__init__(self) AnimationModifierList.__init__(self) self.name = name self.index = index self.type = 'AnimationFrame' def __repr__(self): return "AF[{0}, {1}]".format(self.name, self.index) def kllify(self): ''' Returns KLL version of the Id ''' return "A[{0}, {1}]".format(self.name, self.index) class PixelId(Id, Position, PixelModifierList, ChannelList): ''' Pixel identifier container class ''' def __init__(self, uid): Id.__init__(self) Position.__init__(self) PixelModifierList.__init__(self) ChannelList.__init__(self) self.uid = uid self.type = 'Pixel' def unique_key(self, kll=False): ''' Returns the key string used for datastructure sorting @param kll: Kll output format mode ''' if isinstance(self.uid, HIDId) or isinstance(self.uid, ScanCodeId): if kll: return "{0}".format(self.uid.kllify()) return "P[{0}]".format(self.uid) if isinstance(self.uid, PixelAddressId): if kll: return "P[{0}]".format(self.uid.kllify()) return "P[{0}]".format(self.uid) if kll: return "P{0:#05x}".format(self.uid) return "P{0}".format(self.uid) def __repr__(self): # Positions are a special case if self.positionSet(): return "{0} <= {1}".format(self.unique_key(), self.strPosition()) extra = "" if len(self.modifiers) > 0: extra += "({0})".format(self.strModifiers()) if len(self.channels) > 0: extra += "({0})".format(self.strChannels()) return "{0}{1}".format(self.unique_key(), extra) def kllify(self): ''' KLL syntax compatible output for Pixel object ''' # Positions are a special case if self.positionSet(): return "{0} <= {1}".format(self.unique_key(kll=True), self.strPosition()) extra = "" if len(self.modifiers) > 0: extra += "({0})".format(self.strModifiers()) if len(self.channels) > 0: extra += "({0})".format(self.strChannels()) return "{0}{1}".format(self.unique_key(kll=True), extra) class PixelAddressId(Id): ''' Pixel address identifier container class ''' def __init__(self, index=None, col=None, row=None, relCol=None, relRow=None): Id.__init__(self) # Check to make sure index, col or row is set if index is None and col is None and row is None and relRow is None and relCol is None: print("{0} index, col or row must be set".format(ERROR)) self.index = index self.col = col self.row = row self.relCol = relCol self.relRow = relRow self.type = 'PixelAddress' def inferred_type(self): ''' Determine which PixelAddressType based on set values ''' if self.index is not None: return 'PixelAddressId_Index' if self.col is not None and self.row is None: return 'PixelAddressId_ColumnFill' if self.col is None and self.row is not None: return 'PixelAddressId_RowFill' if self.col is not None and self.row is not None: return 'PixelAddressId_Rect' if self.relCol is not None and self.relRow is None: return 'PixelAddressId_RelativeColumnFill' if self.relCol is None and self.relRow is not None: return 'PixelAddressId_RelativeRowFill' if self.relCol is not None and self.relRow is not None: return 'PixelAddressId_RelativeRect' print("{0} Unknown PixelAddressId, this is a bug!".format(ERROR)) return "" def uid_set(self): ''' Returns a tuple of uids, depends on what has been set. ''' if self.index is not None: return tuple([self.index]) if self.col is not None and self.row is None: return tuple([self.col, self.row]) if self.col is None and self.row is not None: return tuple([self.col, self.row]) if self.col is not None and self.row is not None: return tuple([self.col, self.row]) if self.relCol is not None and self.relRow is None: return tuple([self.relCol, self.relRow]) if self.relCol is None and self.relRow is not None: return tuple([self.relCol, self.relRow]) if self.relCol is not None and self.relRow is not None: return tuple([self.relCol, self.relRow]) print("{0} Unknown uid set, this is a bug!".format(ERROR)) return "= 0 and "+" or "" cur_out += "{0}".format(self.valueStr(self.relRow)) output.append(cur_out) if not self.relCol is None: cur_out = "c:i" cur_out += self.relCol >= 0 and "+" or "" cur_out += "{0}".format(self.valueStr(self.relCol)) output.append(cur_out) return output def __repr__(self): return "{0}".format(self.outputStrList()) def kllify(self): ''' KLL syntax compatible output for PixelAddress object ''' return ",".join(self.outputStrList()) class PixelLayerId(Id, PixelModifierList): ''' Pixel Layer identifier container class ''' def __init__(self, uid): Id.__init__(self) PixelModifierList.__init__(self) self.uid = uid self.type = 'PixelLayer' def __repr__(self): if len(self.modifiers) > 0: return "PL{0}({1})".format(self.uid, self.strModifiers()) return "PL{0}".format(self.uid) class CapId(Id): ''' Capability identifier ''' def __init__(self, name, type, arg_list=[]): ''' @param name: Name of capability @param type: Type of capability definition, string @param arg_list: List of CapArgIds, empty list if there are none ''' Id.__init__(self) self.name = name self.type = type self.arg_list = arg_list def __repr__(self): # Generate prettified argument list arg_string = "" for arg in self.arg_list: arg_string += "{0},".format(arg) if len(arg_string) > 0: arg_string = arg_string[:-1] return "{0}({1})".format(self.name, arg_string) def json(self): ''' JSON representation of CapId ''' return { 'type' : self.type, 'name' : self.name, 'args' : [arg.json() for arg in self.arg_list] } def total_arg_bytes(self, capabilities_dict=None): ''' Calculate the total number of bytes needed for the args @param capabilities_dict: Dictionary of capabilities used, just in case no widths have been assigned return: Number of bytes ''' # Zero if no args total_bytes = 0 for index, arg in enumerate(self.arg_list): # Lookup actual width if necessary (wasn't set explicitly) if capabilities_dict is not None and (arg.type == 'CapArgValue' or arg.width is None): # Check if there are enough arguments expected = len(capabilities_dict[self.name].association.arg_list) got = len(self.arg_list) if got != expected: print("{0} incorrect number of arguments for {1}. Expected {2} Got {3}".format( ERROR, self, expected, got, )) print("\t{0}".format(capabilities_dict[self.name].kllify())) raise AssertionError("Invalid arguments") total_bytes += capabilities_dict[self.name].association.arg_list[index].width # Otherwise use the set width else: total_bytes += arg.width return total_bytes class NoneId(CapId): ''' None identifier It's just a capability...that does nothing (instead of infering to do something else) ''' def __init__(self): super().__init__('None', 'None') def json(self): ''' JSON representation of NoneId ''' return { 'type' : self.type, } def __repr__(self): return "None" class CapArgId(Id): ''' Capability Argument identifier ''' def __init__(self, name, width=None): ''' @param name: Name of argument @param width: Byte-width of the argument, if None, this is not port of a capability definition ''' Id.__init__(self) self.name = name self.width = width self.type = 'CapArg' def __repr__(self): if self.width is None: return "{0}".format(self.name) else: return "{0}:{1}".format(self.name, self.width) def json(self): ''' JSON representation of CapArgId ''' return { 'name' : self.name, 'width' : self.width, 'type' : self.type, } class CapArgValue(Id): ''' Capability Argument Value identifier ''' def __init__(self, value): ''' @param value: Value of argument ''' Id.__init__(self) self.value = value self.type = 'CapArgValue' def __repr__(self): return "{}".format(self.value) def json(self): ''' JSON representation of CapArgValue ''' return { 'value' : self.value, 'type' : self.type, } PKdL#ƊL+L+kll/common/modifier.py#!/usr/bin/env python3 ''' KLL Modifier Containers ''' # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### class AnimationModifierArg: ''' Animation modification arg container class ''' def __init__(self, parent, value): self.parent = parent self.arg = value self.subarg = None # In case we have a bad modifier, arg is set to None if self.arg is None: return # Sub-arg case if isinstance(value, tuple): self.arg = value[0] self.subarg = value[1] # Validate arg validation = parent.valid_modifiers[parent.name] if isinstance(validation, dict): # arg if self.arg not in validation.keys(): print("{0} '{1}' is not a valid modifier arg for '{2}'".format( ERROR, self.arg, parent.name, )) # subarg subvalidation = validation[self.arg] if subvalidation is None and self.subarg is not None: print("{0} '{1}' is an incorrect subargument for '{2}:{3}', should be a '{4}'".format( ERROR, self.subarg, parent.name, self.arg, subvalidation, )) elif subvalidation is not None and not isinstance(self.subarg, subvalidation[self.arg]): print("{0} '{1}' is an incorrect subargument for '{2}:{3}', should be a '{4}'".format( ERROR, self.subarg, parent.name, self.arg, subvalidation, )) else: # arg if not isinstance(self.arg, validation): print("{0} '{1}' is an incorrect argument for '{2}', should be a '{3}'".format( ERROR, self.arg, parent.name, validation, )) def __repr__(self): if self.arg is None: return "" if self.subarg is not None: int_list = ["{0}".format(x) for x in self.subarg] return "{0}({1})".format( self.arg, ",".join(int_list), ) return "{0}".format(self.arg) def like(self, other): ''' Returns true if the other AnimationModifierArg is the same ''' if self.arg != other.arg: return False if self.subarg is None and other.subarg is None: return True elif self.subarg is None or other.subarg is None: return False if frozenset(self.subarg) == frozenset(other.subarg): return True return False def json(self): ''' JSON representation of AnimationModifierArg ''' return { 'arg': self.arg, 'subarg': self.subarg, } def kllify(self): ''' Returns KLL version of the Modifier In most cases we can just the string representation of the object ''' return "{0}".format(self) class AnimationModifier: ''' Animation modification container class ''' # Modifier validation tree valid_modifiers = { 'loops': int, 'loop': None, 'framedelay': int, 'framestretch': None, 'start': None, 'pause': None, 'stop': None, 'pos': int, 'pfunc': { 'off': None, 'interp': None, 'kllinterp': None, }, 'ffunc': { 'off': None, 'interp': None, 'kllinterp': None, }, 'replace': { 'stack': None, 'basic': None, 'all': None, 'state': None, }, } def __init__(self, name, value=None): # Check if name is valid if name not in self.valid_modifiers.keys(): print("{0} '{1}' is not a valid modifier {1}:{2}".format( ERROR, name, value, )) self.name = '' self.value = AnimationModifierArg(self, None) return self.name = name self.value = AnimationModifierArg(self, value) def __repr__(self): if self.value.arg is None: return "{0}".format(self.name) return "{0}:{1}".format(self.name, self.value) def like(self, other): ''' Returns true if AnimationModifier has the same name ''' return other.name == self.name def __eq__(self, other): return self.like(other) and self.value.like(other.value) def json(self): ''' JSON representation of AnimationModifier ''' # Determine json of self.value value = None if self.value is not None: value = self.value.json() return { 'name': self.name, 'value': value, } def kllify(self): ''' Returns KLL version of the Modifier In most cases we can just the string representation of the object ''' return "{0}".format(self) class AnimationModifierList: ''' Animation modification container list class Contains a list of modifiers, the order does not matter ''' frameoption_modifiers = [ 'framestretch', ] def __init__(self): self.modifiers = [] def setModifiers(self, modifier_list): ''' Apply modifiers to Animation ''' for modifier in modifier_list: self.modifiers.append(AnimationModifier(modifier[0], modifier[1])) def clean(self, new_modifier, new, old): ''' Remove conflicting modifier if necessary ''' if new_modifier.name == new: for index, modifier in enumerate(self.modifiers): if modifier.name == old: return False return True def replace(self, new_modifier): ''' Replace modifier If it doesn't exist already, just add it. ''' # If new_modifier is loops and loop exists, remove loop if not self.clean(new_modifier, 'loops', 'loop'): return # If new_modifier is loop and loops exists, remove loops if not self.clean(new_modifier, 'loop', 'loops'): return # Check for modifier for modifier in self.modifiers: if modifier.name == new_modifier.name: modifier.value = new_modifier.value return # Otherwise just add it self.modifiers.append(new_modifier) def getModifier(self, name): ''' Retrieves modifier Returns False if doesn't exist Returns argument if exists and has an argument, may be None ''' for mod in self.modifiers: if mod.name == name: return mod.value return False def strModifiers(self): ''' __repr__ of Position when multiple inheritance is used ''' output = "" for index, modifier in enumerate(sorted(self.modifiers, key=lambda x: x.name)): if index > 0: output += "," output += "{0}".format(modifier) return output def __repr__(self): return self.strModifiers() def json(self): ''' JSON representation of AnimationModifierList ''' output = { 'modifiers' : [], } # Output sorted list of modifiers for modifier in sorted(self.modifiers, key=lambda x: x.name): output['modifiers'].append(modifier.json()) # Look for any frameoption modifiers frameoption_list = [] for modifier in self.modifiers: if modifier.name in self.frameoption_modifiers: frameoption_list.append(modifier.name) output['frameoptions'] = frameoption_list return output def kllify(self): ''' Returns KLL version of the ModifierList In most cases we can just the string representation of the object ''' return "{0}".format(self) class PixelModifier: ''' Pixel modification container class ''' def __init__(self, operator, value): self.operator = operator self.value = value def __repr__(self): if self.operator is None: return "{0}".format(self.value) return "{0}{1}".format(self.operator, self.value) def operator_type(self): ''' Returns operator type ''' types = { None: 'Set', '+': 'Add', '-': 'Subtract', '+:': 'NoRoll_Add', '-:': 'NoRoll_Subtract', '<<': 'LeftShift', '>>': 'RightShift', } return types[self.operator] def kllify(self): ''' Returns KLL version of the PixelModifier In most cases we can just the string representation of the object ''' return "{0}".format(self) class PixelModifierList: ''' Pixel modification container list class Contains a list of modifiers Index 0, corresponds to pixel 0 ''' def __init__(self): self.modifiers = [] def setModifiers(self, modifier_list): ''' Apply modifier to each pixel channel ''' for modifier in modifier_list: self.modifiers.append(PixelModifier(modifier[0], modifier[1])) def strModifiers(self): ''' __repr__ of Position when multiple inheritance is used ''' output = "" for index, modifier in enumerate(self.modifiers): if index > 0: output += "," output += "{0}".format(modifier) return output def __repr__(self): return self.strModifiers() def kllify(self): ''' Returns KLL version of the PixelModifierList In most cases we can just the string representation of the object ''' return "{0}".format(self) PKLsskll/common/organization.py#!/usr/bin/env python3 ''' KLL Data Organization ''' # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### import copy import re import kll.common.expression as expression from kll.common.id import PixelAddressId ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ansi_escape = re.compile(r'\x1b[^m]*m') ### Classes ### class Data: ''' Base class for KLL datastructures ''' # Debug output formatters debug_output = { 'add': "\t\033[1;42;37m++\033[0m\033[1mADD KEY\033[1;42;37m++\033[0m \033[1m<==\033[0m {0}", 'app': "\t\033[1;45;37m**\033[0m\033[1mAPP KEY\033[1;45;37m**\033[0m \033[1m<==\033[0m {0}", 'mod': "\t\033[1;44;37m##\033[0m\033[1mMOD KEY\033[1;44;37m##\033[0m \033[1m<==\033[0m {0}", 'rem': "\t\033[1;41;37m--\033[0m\033[1mREM KEY\033[1;41;37m--\033[0m \033[1m<==\033[0m {0}", 'drp': "\t\033[1;43;37m@@\033[0m\033[1mDRP KEY\033[1;43;37m@@\033[0m \033[1m<==\033[0m {0}", 'dup': "\t\033[1;46;37m!!\033[0m\033[1mDUP KEY\033[1;46;37m!!\033[0m \033[1m<==\033[0m {0}", } def __init__(self, parent): ''' Initialize datastructure @param parent: Parent organization, used to query data from other datastructures ''' self.data = {} self.parent = parent self.connect_id = 0 self.merge_in_log = [] def merge_in_log_prune(self, debug): ''' Prune the merge_in_log Reverse searches the list, if the key already exists, disable the key ''' new_log = [] found = [] # We have to manually reverse, then modify the referenced items # i.e. we're still modifying self.merge_in_log # This is done so we still have a proper index, and do the invalidation in the correct order reversed_log = self.merge_in_log[::-1] for index, elem in enumerate(reversed_log): key, expr, enabled = elem # Add to found list if key not in found: found.append(key) new_log.insert(0, elem) # Otherwise mark as disabled else: reversed_log[index] = [ key, expr, False, ] return new_log def merge_in_log_expression(self, key, expression, debug): ''' Logs a given merge_in expressions This is used to determine the order in which the merges occurred Duplicate entries are pruned after the merge @param key: Hash entry for (text) @param expression: Expression object @param debug: Enable debug out ''' # Debug output if debug[0]: output = "{0} Log Add: {1} {2}".format( self.parent.parent.layer_info(), key, expression, ) print(debug[1] and output or ansi_escape.sub('', output)) # Add to log, and enable key self.merge_in_log.append([key, expression, True]) def add_expression(self, expression, debug): ''' Add expression to data structure May have multiple keys to add for a given expression @param expression: KLL Expression (fully tokenized and parsed) @param debug: Enable debug output ''' # Lookup unique keys for expression keys = expression.unique_keys() # Add/Modify expressions in datastructure for key, uniq_expr in keys: # Check which operation we are trying to do, add or modify if debug[0]: if key in self.data.keys(): output = self.debug_output['mod'].format(key) else: output = self.debug_output['add'].format(key) print(debug[1] and output or ansi_escape.sub('', output)) self.data[key] = uniq_expr # Add to log self.merge_in_log_expression(key, uniq_expr, debug) def merge(self, merge_in, map_type, debug): ''' Merge in the given datastructure to this datastructure This datastructure serves as the base. @param merge_in: Data structure from another organization to merge into this one @param map_type: Used fo map specific merges @param debug: Enable debug out ''' # The default case is just to add the expression in directly for key, kll_expression in merge_in.data.items(): # Set ConnectId in expression kll_expression.connect_id = merge_in.connect_id # Display key:expression being merged in if debug[0]: output = merge_in.elem_str(key, True) print(debug[1] and output or ansi_escape.sub('', output), end="") self.add_expression(kll_expression, debug) # Add to log self.merge_in_log_expression(key, kll_expression, debug) def reduction(self, debug=False): ''' Simplifies datastructure Most of the datastructures don't have a reduction. Just do nothing in this case. ''' pass def cleanup(self, debug=False): ''' Post-processing step for merges that may need to remove some data in the organization. Mainly used for dropping BaseMapContext expressions after generating a PartialMapContext. ''' pass def connectid(self, connect_id): ''' Sets the Data store with a given connect_id By default, this is 0, but may be set prior to an organization merge ''' self.connect_id = connect_id def elem_str(self, key, single=False): ''' Debug output for a single element @param key: Index to datastructure @param single: Setting to True will bold the key ''' if single: return "\033[1;33m{0: <20}\033[0m \033[1;36;41m>\033[0m {1}\n".format(key, self.data[key]) else: return "{0: <20} \033[1;36;41m>\033[0m {1}\n".format(key, self.data[key]) def __repr__(self): output = "" # Display sorted list of keys, along with the internal value for key in sorted(self.data): output += self.elem_str(key) return output class MappingData(Data): ''' KLL datastructure for data mapping ScanCode trigger -> result USBCode trigger -> result Animation trigger -> result ''' def add_expression(self, expression, debug): ''' Add expression to data structure May have multiple keys to add for a given expression Map expressions insert into the datastructure according to their operator. +Operators+ : Add/Modify :+ Append :- Remove :: Lazy Add/Modify i: Add/Modify i:+ Append i:- Remove i:: Lazy Add/Modify The i or isolation operators are stored separately from the main ones. Each key is pre-pended with an i The :: or lazy operators act just like : operators, except that they will be ignore if the evaluation merge cannot resolve a ScanCode. @param expression: KLL Expression (fully tokenized and parsed) @param debug: Enable debug output ''' # Lookup unique keys for expression keys = expression.unique_keys() # Add/Modify expressions in datastructure for ukey, uniq_expr in keys: # Determine which the expression operator operator = expression.operator # Except for the : operator, all others have delayed action # Meaning, they change behaviour depending on how Contexts are merged # This means we can't simplify yet # In addition, :+ and :- are stackable, which means each key has a list of expressions # We append the operator to differentiate between the different types of delayed operations key = "{0}{1}".format(operator, ukey) # Determine if key exists already exists = key in self.data.keys() # Add/Modify if operator in [':', '::', 'i:', 'i::']: debug_tag = exists and 'mod' or 'add' # Append/Remove else: # Check to make sure we haven't already appended expression # Use the string representation to do the comparison (general purpose) if exists and "{0}".format(uniq_expr) in ["{0}".format(elem) for elem in self.data[key]]: debug_tag = 'dup' # Append elif operator in [':+', 'i:+']: debug_tag = 'app' # Remove else: debug_tag = 'rem' # Debug output if debug[0]: output = self.debug_output[debug_tag].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # Don't append if a duplicate if debug_tag == 'dup': continue # Append, rather than replace if operator in [':+', ':-', 'i:+', 'i:-']: if exists: self.data[key].append(uniq_expr) # Create initial list else: self.data[key] = [uniq_expr] else: self.data[key] = [uniq_expr] # Append to log self.merge_in_log_expression(key, uniq_expr, debug) def set_interconnect_id(self, interconnect_id, triggers): ''' Traverses the sequence of combo of identifiers to set the interconnect_id ''' for sequence in triggers: for combo in sequence: for identifier in combo: identifier.interconnect_id = interconnect_id def connectid(self, connect_id): ''' Sets the Data store with a given connect_id By default, this is 0, but may be set prior to an organization merge ''' self.connect_id = connect_id # Update dictionary keys using new connect_id for key, value in self.data.items(): if value[0].type == 'ScanCode': # Update connect_id, then regenerate dictionary item value[0].connect_id = connect_id new_key = "{0}{1}".format( value[0].operator, value[0].unique_keys()[0][0], ) # Replace dictionary item self.data[new_key] = self.data.pop(key) def maxscancode(self): ''' Find max scancode per connect id @return: Dictionary of max Scan Codes (keys are the connect id) ''' max_dict = {} for key, value in self.data.items(): connect_id = value[0].connect_id max_uid = value[0].max_trigger_uid() # Initial value if connect_id not in max_dict.keys(): max_dict[connect_id] = 0 # Update if necessary if max_dict[connect_id] < max_uid: max_dict[connect_id] = max_uid return max_dict def merge_lazy_operators(self, debug): ''' Lazy Set :: is not applied as a Set : until after the merge_in_log has been pruned Intended to be called during reduction. ''' # Build dictionary of single ScanCodes first result_code_lookup = {} for key, expr in self.data.items(): if expr[0].elems()[0] == 1 and expr[0].triggers[0][0][0].type == 'ScanCode': result_code_lookup.setdefault(expr[0].result_str(), []).append(key) # Build list of lazy keys from log lazy_keys = {} for key, expr, enabled in reversed(self.merge_in_log): if key[0:2] == '::' or key[0:3] == 'i::': if key not in lazy_keys.keys(): # Debug info if debug: print("\033[1mLazy\033[0m", key, expr) # Determine the target key from the expression target_key = expr.trigger_str() lazy_keys[target_key] = expr # Check if we need to do a lazy replacement if target_key in result_code_lookup.keys(): expr_keys = result_code_lookup[target_key] for target_expr_key in expr_keys: # Calculate new key new_expr = self.data[target_expr_key][0] new_key = "{0}{1}".format( new_expr.operator, new_expr.unique_keys()[0][0] ) # Determine action based on the new_expr.operator orig_expr = self.data[new_key][0] if debug: print("\t\033[1;32mREPLACE\033[0m {0} -> {1}\n\t{2} => {3}".format( target_expr_key, new_key, expr, new_expr )) # Do replacement self.data[new_key] = [expression.MapExpression( orig_expr.triggers, orig_expr.operator, expr.results )] self.data[new_key][0].connect_id = orig_expr.connect_id # Unset basemap on expression self.data[new_key][0].base_map = False def merge(self, merge_in, map_type, debug): ''' Merge in the given datastructure to this datastructure This datastructure serves as the base. Map expressions merge differently than insertions. +Operators+ : Add/Modify - Replace :+ Append - Add :- Remove - Remove :: Lazy Add/Modify - Replace if found, otherwise drop i: Add/Modify - Replace i:+ Append - Add i:- Remove - Remove i:: Lazy Add/Modify - Replace if found, otherwise drop @param merge_in: Data structure from another organization to merge into this one @param map_type: Used fo map specific merges @param debug: Enable debug out ''' # Get unique list of ordered keys # We can't just query the keys directly from the as we need them in order of being added # In addition, we need a unique list of keys, where the most recently added is the most important cur_keys = [] for key, expr, enabled in reversed(merge_in.merge_in_log): if key not in cur_keys: cur_keys.insert(0, key) # Lazy Set :: lazy_keys = [key for key in cur_keys if key[0:2] == '::' or key[0:3] == 'i::'] cur_keys = list(set(cur_keys) - set(lazy_keys)) # Append :+ append_keys = [key for key in cur_keys if key[0:2] == ':+' or key[0:3] == 'i:+'] cur_keys = list(set(cur_keys) - set(append_keys)) # Remove :- remove_keys = [key for key in cur_keys if key[0:2] == ':-' or key[0:3] == 'i:-'] cur_keys = list(set(cur_keys) - set(remove_keys)) # Set : # Everything left is just a set set_keys = cur_keys # First process the :: (or lazy) operators # We need to read into this datastructure and apply those first # Otherwise we may get undesired behaviour for key in lazy_keys: # Display key:expression being merged in if debug[0]: output = merge_in.elem_str(key, True) print(debug[1] and output or ansi_escape.sub('', output), end="") # Construct target key # XXX (HaaTa) We now delay lazy operation application till reduction #target_key = key[0] == 'i' and "i{0}".format(key[2:]) or key[1:] target_key = key # Lazy expressions will be dropped later at reduction debug_tag = 'mod' # Debug output if debug[0]: output = self.debug_output[debug_tag].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # Only replace self.data[target_key] = merge_in.data[key] # Unset BaseMapContext tag if not a BaseMapContext if map_type != 'BaseMapContext': self.data[target_key][0].base_map = False # Then apply : assignment operators for key in set_keys: # Display key:expression being merged in if debug[0]: output = merge_in.elem_str(key, True) print(debug[1] and output or ansi_escape.sub('', output), end="") # Construct target key target_key = key # Indicate if add or modify if target_key in self.data.keys(): debug_tag = 'mod' else: debug_tag = 'add' # Debug output if debug[0]: output = self.debug_output[debug_tag].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # Set into new datastructure regardless self.data[target_key] = merge_in.data[key] # Unset BaseMap flag if this is not a BaseMap merge if map_type != 'BaseMapContext': self.data[target_key][0].base_map = False # Now apply append operations for key in append_keys: # Display key:expression being merged in if debug[0]: output = merge_in.elem_str(key, True) print(debug[1] and output or ansi_escape.sub('', output), end="") # Construct target key # XXX (HaaTa) Might not be correct, but seems to work with the merge_in_log #target_key = key[0] == 'i' and "i:{0}".format(key[3:]) or ":{0}".format(key[2:]) target_key = key # Alwyays appending debug_tag = 'app' # Debug output if debug[0]: output = self.debug_output[debug_tag].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # Extend list if it exists if target_key in self.data.keys(): self.data[target_key].extend(merge_in.data[key]) else: self.data[target_key] = merge_in.data[key] # Finally apply removal operations to this datastructure # If the target removal doesn't exist, ignore silently (show debug message) for key in remove_keys: # Display key:expression being merged in if debug[0]: output = merge_in.elem_str(key, True) print(debug[1] and output or ansi_escape.sub('', output), end="") # Construct target key # XXX (HaaTa) Might not be correct, but seems to work with the merge_in_log #target_key = key[0] == 'i' and "i:{0}".format(key[3:]) or ":{0}".format(key[2:]) target_key = key # Drop right away if target datastructure doesn't have target key if target_key not in self.data.keys(): debug_tag = 'drp' # Debug output if debug[0]: output = self.debug_output[debug_tag].format(key) print(debug[1] and output or ansi_escape.sub('', output)) continue # Compare expressions to be removed with the current set # Use strings to compare remove_expressions = ["{0}".format(expr) for expr in merge_in.data[key]] current_expressions = [("{0}".format(expr), expr) for expr in self.data[target_key]] for string, expr in current_expressions: debug_tag = 'drp' # Check if an expression matches if string in remove_expressions: debug_tag = 'rem' # Debug output if debug[0]: output = self.debug_output[debug_tag].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # Remove if found if debug_tag == 'rem': self.data[target_key] = [value for value in self.data.values() if value != expr] # Now append the merge_in_log self.merge_in_log.extend(merge_in.merge_in_log) def cleanup(self, debug=False): ''' Post-processing step for merges that may need to remove some data in the organization. Mainly used for dropping BaseMapContext expressions after generating a PartialMapContext. ''' # Using this dictionary, replace all the trigger USB codes # Iterate over a copy so we can modify the dictionary in place for key, expr in self.data.copy().items(): if expr[0].base_map: if debug[0]: output = "\t\033[1;34mDROP\033[0m {0}".format(self.data[key][0]) print(debug[1] and output or ansi_escape.sub('', output)) del self.data[key] elif debug[0]: output = "\t\033[1;32mKEEP\033[0m {0}".format(self.data[key][0]) print(debug[1] and output or ansi_escape.sub('', output)) def reduction(self, debug=False): ''' Simplifies datastructure Used to replace all trigger HIDCode(USBCode)s with ScanCodes NOTE: Make sure to create a new MergeContext before calling this as you lose data and prior context ''' result_code_lookup = {} # Prune merge_in_log merge_in_pruned = self.merge_in_log_prune(debug) # Build dictionary of single ScanCodes first for key, expr in self.data.items(): if expr[0].elems()[0] == 1 and expr[0].triggers[0][0][0].type == 'ScanCode': result_code_lookup[expr[0].result_str()] = expr # Skip if dict is empty if len(self.data.keys()) == 0: return # Instead of using the .data dictionary, use the merge_in_log which maintains the expression application order # Using this list, replace all the trigger USB codes for key, log_expr, active in self.merge_in_log: # Skip if not active if not active: continue # Lookup currently merged expression if key not in self.data.keys(): continue expr = self.data[key] for sub_expr in expr: # 1) Single USB Codes trigger results will replace the original ScanCode result if sub_expr.elems()[0] == 1 and sub_expr.triggers[0][0][0].type in ['USBCode', 'SysCode', 'ConsCode']: # Debug info if debug: print("\033[1mSingle\033[0m", key, expr) # Lookup trigger to see if it exists trigger_str = sub_expr.trigger_str() if trigger_str in result_code_lookup.keys(): # Calculate new key new_expr = result_code_lookup[trigger_str][0] new_key = "{0}{1}".format( new_expr.operator, new_expr.unique_keys()[0][0] ) # Determine action based on the new_expr.operator orig_expr = self.data[new_key][0] # Replace expression if sub_expr.operator in [':']: if debug: print("\t\033[1;32mREPLACE\033[0m {0} -> {1}\n\t{2} => {3}".format( key, new_key, sub_expr, new_expr )) # Do replacement self.data[new_key] = [expression.MapExpression( orig_expr.triggers, orig_expr.operator, sub_expr.results )] # Transfer connect_id self.data[new_key][0].connect_id = orig_expr.connect_id # Unset basemap on expression self.data[new_key][0].base_map = False # Add expression elif sub_expr.operator in [':+']: if debug: print("\t\033[1;42mADD\033[0m {0} -> {1}\n\t{2} => {3}".format( key, new_key, sub_expr, new_expr )) # Add expression self.data[new_key].append(expression.MapExpression( orig_expr.triggers, orig_expr.operator, sub_expr.results )) # Unset basemap on sub results for sub_expr in self.data[new_key]: sub_expr.base_map = False # Remove expression elif sub_expr.operator in [':-']: if debug: print("\t\033[1;41mREMOVE\033[0m {0} -> {1}\n\t{2} => {3}".format( key, new_key, sub_expr, new_expr )) # Remove old key if key in self.data.keys(): del self.data[key] # Otherwise drop HID expression else: if debug: print("\t\033[1;34mDROP\033[0m") if key in self.data.keys(): del self.data[key] # 2) Complex triggers are processed to replace out any USB Codes with Scan Codes elif sub_expr.elems()[0] > 1: # Debug info if debug: print("\033[1;4mMulti\033[0m ", key, expr) # Lookup each trigger element and replace # If any trigger element doesn't exist, drop expression # Dive through sequence->combo->identifier (sequence of combos of ids) replace = False drop = False for seq_in, sequence in enumerate(sub_expr.triggers): for com_in, combo in enumerate(sequence): for ident_in, identifier in enumerate(combo): ident_str = "({0})".format(identifier) # Replace identifier if ident_str in result_code_lookup.keys(): match_expr = result_code_lookup[ident_str] sub_expr.triggers[seq_in][com_in][ident_in] = match_expr[0].triggers[0][0][0] replace = True # Ignore non-USB triggers elif identifier.type in ['IndCode', 'GenericTrigger', 'Layer', 'LayerLock', 'LayerShift', 'LayerLatch', 'ScanCode']: pass # Drop everything else else: drop = True # Trigger Identifier was replaced if replace: if debug: print("\t\033[1;32mREPLACE\033[0m", expr) # Trigger Identifier failed (may still occur if there was a replacement) if drop: if debug: print("\t\033[1;34mDROP\033[0m") del self.data[key] # Finally we can merge in the Lazy :: Set operators self.merge_lazy_operators(debug) # Show results of reduction if debug: print(self) class AnimationData(Data): ''' KLL datastructure for Animation configuration Animation -> modifiers ''' class AnimationFrameData(Data): ''' KLL datastructure for Animation Frame configuration Animation -> Pixel Settings ''' class CapabilityData(Data): ''' KLL datastructure for Capability mapping Capability -> C Function/Identifier ''' class DefineData(Data): ''' KLL datastructure for Define mapping Variable -> C Define/Identifier ''' class PixelChannelData(Data): ''' KLL datastructure for Pixel Channel mapping Pixel -> Channels ''' def maxpixelid(self): ''' Find max pixel id per connect id @return: dictionary of connect id to max pixel id ''' max_pixel = {} for key, value in self.data.items(): connect_id = value.connect_id # Make sure this is a PixelAddressId if isinstance(value.pixel.uid, PixelAddressId): max_uid = value.pixel.uid.index else: max_uid = value.pixel.uid # Initial value if connect_id not in max_pixel.keys(): max_pixel[connect_id] = 0 # Update if necessary if max_pixel[connect_id] < max_uid: max_pixel[connect_id] = max_uid # TODO REMOVEME #print( key,value, value.__class__, value.pixel.uid.index, value.connect_id ) return max_pixel class PixelPositionData(Data): ''' KLL datastructure for Pixel Position mapping Pixel -> Physical Location ''' def add_expression(self, expression, debug): ''' Add expression to data structure May have multiple keys to add for a given expression @param expression: KLL Expression (fully tokenized and parsed) @param debug: Enable debug output ''' # Lookup unique keys for expression keys = expression.unique_keys() # Add/Modify expressions in datastructure for key, uniq_expr in keys: # Check which operation we are trying to do, add or modify if debug[0]: if key in self.data.keys(): output = self.debug_output['mod'].format(key) else: output = self.debug_output['add'].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # If key already exists, just update if key in self.data.keys(): self.data[key].update(uniq_expr) else: self.data[key] = uniq_expr # Append to log self.merge_in_log_expression(key, uniq_expr, debug) class ScanCodePositionData(Data): ''' KLL datastructure for ScanCode Position mapping ScanCode -> Physical Location ''' def add_expression(self, expression, debug): ''' Add expression to data structure May have multiple keys to add for a given expression @param expression: KLL Expression (fully tokenized and parsed) @param debug: Enable debug output ''' # Lookup unique keys for expression keys = expression.unique_keys() # Add/Modify expressions in datastructure for key, uniq_expr in keys: # Check which operation we are trying to do, add or modify if debug[0]: if key in self.data.keys(): output = self.debug_output['mod'].format(key) else: output = self.debug_output['add'].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # If key already exists, just update if key in self.data.keys(): self.data[key].update(uniq_expr) else: self.data[key] = uniq_expr # Append to log self.merge_in_log_expression(key, uniq_expr, debug) class VariableData(Data): ''' KLL datastructure for Variables and Arrays Variable -> Data Array -> Data ''' def add_expression(self, expression, debug): ''' Add expression to data structure May have multiple keys to add for a given expression In the case of indexed variables, only replaced the specified index @param expression: KLL Expression (fully tokenized and parsed) @param debug: Enable debug output ''' # Lookup unique keys for expression keys = expression.unique_keys() # Add/Modify expressions in datastructure for key, uniq_expr in keys: # Check which operation we are trying to do, add or modify if debug[0]: if key in self.data.keys(): output = self.debug_output['mod'].format(key) else: output = self.debug_output['add'].format(key) print(debug[1] and output or ansi_escape.sub('', output)) # Check to see if we need to cap-off the array (a position parameter is given) if uniq_expr.type == 'Array' and uniq_expr.pos is not None: # Modify existing array if key in self.data.keys(): self.data[key].merge_array(uniq_expr) # Add new array else: uniq_expr.merge_array() self.data[key] = uniq_expr # Otherwise just add/replace expression else: self.data[key] = uniq_expr # Append to log self.merge_in_log_expression(key, uniq_expr, debug) class Organization: ''' Container class for KLL datastructures The purpose of these datastructures is to symbolically store at first, and slowly solve/deduplicate expressions. Since the order in which the merges occurs matters, this involves a number of intermediate steps. ''' def __init__(self, parent): ''' Intialize data structure ''' self.parent = parent # Setup each of the internal sub-datastructures self.animation_data = AnimationData(self) self.animation_frame_data = AnimationFrameData(self) self.capability_data = CapabilityData(self) self.define_data = DefineData(self) self.mapping_data = MappingData(self) self.pixel_channel_data = PixelChannelData(self) self.pixel_position_data = PixelPositionData(self) self.scan_code_position_data = ScanCodePositionData(self) self.variable_data = VariableData(self) # Expression to Datastructure mapping self.data_mapping = { 'AssignmentExpression': { 'Array': self.variable_data, 'Variable': self.variable_data, }, 'DataAssociationExpression': { 'Animation': self.animation_data, 'AnimationFrame': self.animation_frame_data, 'PixelPosition': self.pixel_position_data, 'ScanCodePosition': self.scan_code_position_data, }, 'MapExpression': { 'TriggerCode': self.mapping_data, 'PixelChannel': self.pixel_channel_data, }, 'NameAssociationExpression': { 'Capability': self.capability_data, 'Define': self.define_data, }, } def __copy__(self): ''' On organization copy, return a safe object Attempts to only copy the datastructures that may need to diverge ''' new_obj = Organization(self.parent) # Copy only .data from each organization new_obj.animation_data.data = copy.copy(self.animation_data.data) new_obj.animation_frame_data.data = copy.copy(self.animation_frame_data.data) new_obj.capability_data.data = copy.copy(self.capability_data.data) new_obj.define_data.data = copy.copy(self.define_data.data) new_obj.mapping_data.data = copy.copy(self.mapping_data.data) new_obj.pixel_channel_data.data = copy.copy(self.pixel_channel_data.data) new_obj.pixel_position_data.data = copy.copy(self.pixel_position_data.data) new_obj.scan_code_position_data.data = copy.copy(self.scan_code_position_data.data) new_obj.variable_data.data = copy.copy(self.variable_data.data) # Also copy merge_in_log new_obj.animation_data.merge_in_log = copy.copy(self.animation_data.merge_in_log) new_obj.animation_frame_data.merge_in_log = copy.copy(self.animation_frame_data.merge_in_log) new_obj.capability_data.merge_in_log = copy.copy(self.capability_data.merge_in_log) new_obj.define_data.merge_in_log = copy.copy(self.define_data.merge_in_log) new_obj.mapping_data.merge_in_log = copy.copy(self.mapping_data.merge_in_log) new_obj.pixel_channel_data.merge_in_log = copy.copy(self.pixel_channel_data.merge_in_log) new_obj.pixel_position_data.merge_in_log = copy.copy(self.pixel_position_data.merge_in_log) new_obj.scan_code_position_data.merge_in_log = copy.copy(self.scan_code_position_data.merge_in_log) new_obj.variable_data.merge_in_log = copy.copy(self.variable_data.merge_in_log) return new_obj def stores(self): ''' Returns list of sub-datastructures ''' return [ self.animation_data, self.animation_frame_data, self.capability_data, self.define_data, self.mapping_data, self.pixel_channel_data, self.pixel_position_data, self.scan_code_position_data, self.variable_data, ] def add_expression(self, expression, debug): ''' Add expression to datastructure Will automatically determine which type of expression and place in the relevant store @param expression: KLL Expression (fully tokenized and parsed) @param debug: Enable debug output ''' # Determine type of of Expression expression_type = expression.__class__.__name__ # Determine Expression Subtype expression_subtype = expression.type # Locate datastructure data = self.data_mapping[expression_type][expression_subtype] # Debug output if debug[0]: output = "\t\033[4m{0}\033[0m".format(data.__class__.__name__) print(debug[1] and output or ansi_escape.sub('', output)) # Add expression to determined datastructure data.add_expression(expression, debug) def merge(self, merge_in, map_type, debug): ''' Merge in the given organization to this organization This organization serves as the base. @param merge_in: Organization to merge into this one @param map_type: Used fo map specific merges @param debug: Enable debug out ''' # Merge each of the sub-datastructures for this, that in zip(self.stores(), merge_in.stores()): this.merge(that, map_type, debug) def cleanup(self, debug=False): ''' Post-processing step for merges that may need to remove some data in the organization. Mainly used for dropping BaseMapContext expressions after generating a PartialMapContext. ''' for store in self.stores(): store.cleanup(debug) def reduction(self, debug=False): ''' Simplifies datastructure NOTE: This will remove data, therefore, context is lost ''' for store in self.stores(): store.reduction(debug) def maxscancode(self): ''' Find max scancode per connect id @return: dictionary of connect id to max scancode ''' return self.mapping_data.maxscancode() def maxpixelid(self): ''' Find max pixel id per connect id @return: dictionary of connect id to max pixel id ''' return self.pixel_channel_data.maxpixelid() def __repr__(self): return "{0}".format(self.stores()) PKLu8kll/common/parse.py#!/usr/bin/env python3 ''' KLL Parsing Expressions This file contains various parsing rules and processors used by funcparserlib for KLL REMEMBER: When editing parser BNF-like expressions, order matters. Specifically lexer tokens and parser | ''' # Parser doesn't play nice with linters, disable some checks # pylint: disable=no-self-argument, too-many-public-methods, no-self-use, bad-builtin # Copyright (C) 2016-2018 by Jacob Alexander # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this file. If not, see . ### Imports ### from kll.common.id import ( AnimationId, AnimationFrameId, CapArgId, CapArgValue, CapId, HIDId, LayerId, NoneId, PixelAddressId, PixelId, PixelLayerId, ScanCodeId, TriggerId ) from kll.common.modifier import AnimationModifierList from kll.common.schedule import AnalogScheduleParam, ScheduleParam, Time from kll.extern.funcparserlib.lexer import Token from kll.extern.funcparserlib.parser import (some, a, many, oneplus, skip, maybe) ### Decorators ### # Print Decorator Variables ERROR = '\033[5;1;31mERROR\033[0m:' WARNING = '\033[5;1;33mWARNING\033[0m:' ### Classes ### # Parsing Functions class Make: ''' Collection of parse string interpreters ''' def scanCode(token): ''' Converts a raw scan code string into an ScanCodeId /w integer S0x10 -> 16 ''' if isinstance(token, int): return ScanCodeId(token) else: return ScanCodeId(int(token[1:], 0)) def hidCode(type, token): ''' Convert a given raw hid token string to an integer /w a type U"Enter" -> USB, Enter(0x28) ''' # If already converted to a HIDId, just return if isinstance(token, HIDId): return token # If first character is a U or I, strip token_val = token.value if token_val[0] == "U" or token_val[0] == "I": token_val = token_val[1:] # CONS specifier elif 'CONS' in token_val: token_val = token_val[4:] # SYS specifier elif 'SYS' in token_val: token_val = token_val[3:] # Determine locale locale = token.locale # Determine lookup dictionary lookup = None if type == 'USBCode': lookup = locale.dict('from_hid_keyboard', key_caps=True) elif type == 'SysCode': lookup = locale.dict('from_hid_sysctrl', key_caps=True) elif type == 'ConsCode': lookup = locale.dict('from_hid_consumer', key_caps=True) elif type == 'IndCode': lookup = locale.dict('from_hid_led', key_caps=True) # If using string representation of USB Code, do lookup, case-insensitive if '"' in token_val: try: match_name = token_val[1:-1].upper() hid_code = int(lookup[match_name], 0) except LookupError as err: print("{} {} ({}) is an invalid USB HID Code Lookup...".format( ERROR, err, locale )) raise else: # Already tokenized if ( type == 'USBCode' and token_val[0] == 'USB' or type == 'SysCode' and token_val[0] == 'SYS' or type == 'ConsCode' and token_val[0] == 'CONS' or type == 'IndCode' and token_val[0] == 'IND' ): hid_code = token_val[1] # Convert else: hid_code = int(token_val, 0) return HIDId(type, hid_code, locale) def usbCode(token): ''' Convert a given raw USB Keyboard hid token string to an integer /w a type U"Enter" -> USB, Enter(0x28) ''' return Make.hidCode('USBCode', token) def consCode(token): ''' Convert a given raw Consumer Control hid token string to an integer /w a type ''' return Make.hidCode('ConsCode', token) def sysCode(token): ''' Convert a given raw System Control hid token string to an integer /w a type ''' return Make.hidCode('SysCode', token) def indCode(token): ''' Convert a given raw Indicator hid token string to an integer /w a type ''' return Make.hidCode('IndCode', token) def animation(name): ''' Converts a raw animation value into an AnimationId /w name A"myname" -> myname ''' if name[0] == "A": return AnimationId(name[2:-1]) else: return AnimationId(name) def animationTrigger(animation, specifier): ''' Generate an AnimationId ''' trigger_list = [] # AnimationId trigger_list.append(AnimationId(animation)) return trigger_list, specifier def animationAssociation(animation, frame_identifier): ''' Generate an AnimationFrameId ''' trigger_list = [] # AnimationFrameId for index in frame_identifier: trigger_list.append([[AnimationFrameId(animation, index)]]) return trigger_list def animationCapability(animation, modifiers): ''' Apply modifiers to AnimationId ''' if modifiers is not None: animation.setModifiers(modifiers) return [animation] def animationModlist(modifiers): ''' Build an AnimationModifierList Only used for animation data association ''' modlist = AnimationModifierList() modlist.setModifiers(modifiers) return modlist def pixelCapability(pixels, modifiers): ''' Apply modifiers to list of pixels/pixellists Results in a combination of pixel capabilities ''' pixelcap_list = [] for pixel in pixels: pixel.setModifiers(modifiers) pixelcap_list.append(pixel) return pixelcap_list def pixel(token): ''' Converts a raw pixel value into a PixelId /w integer P0x3 -> 3 ''' if isinstance(token, int): return PixelId(token) else: return PixelId(int(token[1:], 0)) def pixel_list(pixel_list): ''' Converts a list a numbers into a list of PixelIds ''' pixels = [] for pixel in pixel_list: pixels.append(PixelId(pixel)) return pixels def pixelLayer(token): ''' Converts a raw pixel layer value into a PixelLayerId /w integer PL0x3 -> 3 ''' if isinstance(token, int): return PixelLayerId(token) else: return PixelLayerId(int(token[2:], 0)) def pixelLayer_list(layer_list): ''' Converts a list a numbers into a list of PixelLayerIds ''' layers = [] for layer in layer_list: layers.append(PixelLayerId(layer)) return layers def pixelchan(pixel_list, chans): ''' Apply channels to PixelId Only one pixel at a time can be mapped, hence pixel_list[0] ''' pixel = pixel_list[0] pixel.setChannels(chans) return pixel def pixelmod(pixels, modifiers): ''' Apply modifiers to list of pixels/pixellists Results in a combination of pixel capabilities ''' pixelcap_list = [] for pixel in pixels: # Convert HIDIds into PixelIds if isinstance(pixel, HIDId) or isinstance(pixel, ScanCodeId): pixel = PixelId(pixel) pixel.setModifiers(modifiers) pixelcap_list.append(pixel) return pixelcap_list def pixel_address(elems): ''' Parse pixel positioning for row/column addressing @param elems: index list or (operator, value) 40 c:0 c:30% r:i+30 ''' pixel_address_list = [] # Index list if isinstance(elems, list): # List of integers, possibly a range if isinstance(elems[0], int): for elem in elems: pixel_address_list.append(PixelAddressId(index=elem)) # Already ready to append elif isinstance(elems[0], PixelId): pixel_address_list.append(elems[0]) # No value elif isinstance(elems, Token): # Row if "r:i" in elems.name: pixel_address_list.append(PixelAddressId(relRow=0)) # Column if "c:i" in elems.name: pixel_address_list.append(PixelAddressId(relCol=0)) # Operator with value elif isinstance(elems[0], Token): # Prepare address value value = elems[1] # Positioning if elems[0].type == "ColRowOperator": # Row if elems[0].name == "r:": pixel_address_list.append(PixelAddressId(row=value)) # Column if elems[0].name == "c:": pixel_address_list.append(PixelAddressId(col=value)) # Relative Positioning elif elems[0].type == "RelCROperator": if '-' in elems[0].name: value *= -1 # Row if "r:i" in elems[0].name: pixel_address_list.append(PixelAddressId(relRow=value)) # Column if "c:i" in elems[0].name: pixel_address_list.append(PixelAddressId(relCol=value)) return pixel_address_list def pixel_address_merge(elems): ''' Merge pixel addresses together ''' # Merge is only necessary if there is more than one element if len(elems) > 1: for elem in elems[1:]: elems[0].merge(elem) return [elems[0]] def position(token): ''' Physical position split x:20 -> (x, 20) ''' return token.split(':') def usbCode_number(token): ''' USB Keyboard HID Code lookup ''' return HIDId('USBCode', token.value, token.locale) def consCode_number(token): ''' Consumer Control HID Code lookup ''' return HIDId('ConsCode', token.value, token.locale) def sysCode_number(token): ''' System Control HID Code lookup ''' return HIDId('SysCode', token.value, token.locale) def indCode_number(token): ''' Indicator HID Code lookup ''' return HIDId('IndCode', token.value, token.locale) def none(token): ''' Replace key-word with NoneId specifier (which indicates a noneOut capability) ''' return [[[NoneId()]]] def seqString(token, spec='lspec'): ''' Converts sequence string to a sequence of combinations 'Ab' -> U"Shift" + U"A", U"B" 'abb' -> U"A", U"B", U"NoEvent", U"B" @param spec: 'lspec' or 'rspec' ''' # Determine locale locale = token.locale # Compose string using set locale sequence = None if spec == 'lspec': sequence = locale.compose(token.value[1:-1], minimal_clears=True, no_clears=True) else: sequence = locale.compose(token.value[1:-1], minimal_clears=True) # Convert each element in sequence of combos to HIDIds hid_ids = [] for combo in sequence: new_combo = [] for elem in combo: # Lookup uid (usb code) from alias name (used in sequence) new_elem = HIDId('USBCode', int(locale.json()['from_hid_keyboard'][elem], 0), locale) new_combo.append(new_elem) hid_ids.append(new_combo) return hid_ids def seqStringL(token): ''' Converts sequence string to a sequence of combinations lspec side 'Ab' -> U"Shift" + U"A", U"B" 'abb' -> U"A", U"B", U"NoEvent", U"B" ''' return Make.seqString(token, 'lspec') def seqStringR(token): ''' Converts sequence string to a sequence of combinations rspec side 'Ab' -> U"Shift" + U"A", U"B" 'abb' -> U"A", U"B", U"NoEvent", U"B" ''' return Make.seqString(token, 'rspec') def string(token): ''' Converts a raw string to a Python string "this string" -> this string ''' return token[1:-1] def unseqString(token): ''' Converts a raw sequence string to a Python string 'this string' -> this string ''' return token[1:-1] def number(token): ''' Convert string number to Python integer ''' return int(token, 0) def numberToken(token): ''' Convert token value to Python integer ''' try: token.value = int(token.value, 0) except TypeError: pass return token def percent(token): ''' Convert string percent to Python float ''' return int(token[:-1], 0) / 100.0 def timing(token): ''' Convert raw timing parameter to integer time and determine units 1ms -> 1, ms ''' # Find ms, us, or s if 'ms' in token: unit = 'ms' num = token.split('m')[0] elif 'us' in token: unit = 'us' num = token.split('u')[0] elif 'ns' in token: unit = 'ns' num = token.split('n')[0] elif 's' in token: unit = 's' num = token.split('s')[0] else: print("{0} cannot find timing unit in token '{1}'".format(ERROR, token)) return Time(float(num), unit) def specifierTiming(timing): ''' When only timing is given, infer state at a later stage from the context of the mapping ''' return ScheduleParam(None, timing) def specifierState(state, timing=None): ''' Generate a Schedule Parameter Automatically mutates itself into the correct object type ''' return ScheduleParam(state, timing) def specifierAnalog(value): ''' Generate an Analog Schedule Parameter ''' return AnalogScheduleParam(value) def specifierUnroll(identifier, schedule_params): ''' Unroll specifiers into the trigger/result identifier First, combine all Schedule Parameters into a Schedul Then attach Schedule to the identifier If the identifier is a list, then iterate through them and apply the schedule to each ''' # Check if this is a list of identifiers if isinstance(identifier, list): for ident in identifier: ident.setSchedule(schedule_params) return identifier else: identifier.setSchedule(schedule_params) return [identifier] def layerTypeIdent(layer_type, inner_list, specifier): ''' Given a layer expression, determine what kind of layer expression Layer LayerShift LayerLatch LayerLock ''' # Determine layer type (remove [) layer_type = layer_type[:-1] # Add layer type to each given layer identifier_list = [] for layer in inner_list: identifier_list.append(LayerId(layer_type, layer)) return identifier_list, specifier def genericTriggerIdent(identifier, code, specifier): ''' Given a generic trigger, create a TriggerId object Generic Triggers don't support ranges ''' trigger_obj = TriggerId(identifier, code) return trigger_obj, specifier # Range can go from high to low or low to high def scanCode_range(rangeVals): ''' Scan Code range expansion S[0x10-0x12] -> S0x10, S0x11, S0x12 ''' start = rangeVals[0] end = rangeVals[1] # Swap start, end if start is greater than end if start > end: start, end = end, start # Iterate from start to end, and generate the range values = list(range(start, end + 1)) # Generate ScanCodeIds return [ScanCodeId(v) for v in values] # Range can go from high to low or low to high # Warn on 0-9 for USBCodes (as this does not do what one would expect) TODO # Lookup USB HID tags and convert to a number def hidCode_range(type, rangeVals): ''' HID Code range expansion U["A"-"C"] -> U"A", U"B", U"C" ''' # Check if already integers if isinstance(rangeVals[0], int): start = rangeVals[0] else: start = Make.hidCode(type, rangeVals[0]).uid if isinstance(rangeVals[1], int): end = rangeVals[1] else: end = Make.hidCode(type, rangeVals[1]).uid # Swap start, end if start is greater than end if start > end: start, end = end, start # Iterate from start to end, and generate the range listRange = list(range(start, end + 1)) # Determine locale locale = rangeVals[0].locale # Convert each item in the list to a tuple for item in range(len(listRange)): listRange[item] = HIDId(type, listRange[item], locale) return listRange def usbCode_range(rangeVals): ''' USB Keyboard HID Code range expansion ''' return Make.hidCode_range('USBCode', rangeVals) def sysCode_range(rangeVals): ''' System Control HID Code range expansion ''' return Make.hidCode_range('SysCode', rangeVals) def consCode_range(rangeVals): ''' Consumer Control HID Code range expansion ''' return Make.hidCode_range('ConsCode', rangeVals) def indCode_range(rangeVals): ''' Indicator HID Code range expansion ''' return Make.hidCode_range('IndCode', rangeVals) def range(start, end): ''' Converts a start and end points of a range to a list of numbers Can go low to high or high to low ''' # High to low if end < start: return list(range(end, start + 1)) # Low to high return list(range(start, end + 1)) def capArg(argument, width=None): ''' Converts a capability argument:width to a CapArgId If no width is specified, it is ignored ''' return CapArgId(argument, width) def capArgValue(tuple_value): ''' Converts a capability argument value to a CapArgValue ''' sign, value = tuple_value if sign is not None: value *= -1 return CapArgValue(value) def capUsage(name, arguments): ''' Converts a capability tuple, argument list to a CapId Usage ''' return CapId(name, 'Capability', arguments) def debug(tokens): ''' Just prints tokens Used for debugging ''' print(tokens) return tokens ### Rules ### # Base Rules def const(x): return lambda _: x def unarg(f): return lambda x: f(*x) def flatten(list): return sum(list, []) def tokenValue(x): ''' Return string value of a token @param x: Token @returns: String value of token ''' return x.value def tokenType(t): ''' Returns string of token @param t: Name of token type @returns: String of token ''' return some(lambda x: x.type == t) >> tokenValue def tokenTypeOnly(t): ''' Returns the full token object @param t: Name of token type @return: Token matching ''' return some(lambda x: x.type == t) def operator(s): return a(Token('Operator', s)) >> tokenValue def parenthesis(s): return a(Token('Parenthesis', s)) >> tokenValue def bracket(s): return a(Token('Bracket', s)) >> tokenValue eol = a(Token('EndOfLine', ';')) def maybeFlatten(items): ''' Iterate through top-level lists Flatten, only if the element is also a list [[1,2],3,[[4,5]]] -> [1,2,3,[4,5]] ''' new_list = [] for elem in items: # Flatten only if a list if isinstance(elem, list): new_list.extend(elem) else: new_list.append(elem) return new_list def listElem(item): ''' Convert to a list element ''' return [item] def listToTuple(items): ''' Convert list to a tuple ''' return tuple(items) def oneLayerFlatten(items): ''' Flatten only the top layer (list of lists of ...) ''' mainList = [] for sublist in items: for item in sublist: mainList.append(item) return mainList def optionCompression(sequence): ''' Adds another dimension to a list of lists. This is the inverse operation of optionExpansion, iff there were no expanded ranges @param: sequence: Sequence of combos @returns: Squence of combos of ranges ''' new_list = [] for combo in sequence: new_combo = [] for elem in combo: new_combo.append([elem]) new_list.append(new_combo) return new_list def optionExpansion(sequences): ''' Expand ranges of values in the 3rd dimension of the list, to a list of 2nd lists i.e. [ sequence, [ combo, [ range ] ] ] --> [ [ sequence, [ combo ] ],