PK-¹ƒFkÒm•cuppa/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) def run( *args, **kwargs ): import cuppa.core cuppa.core.run( *args, **kwargs ) def add_option( *args, **kwargs ): import cuppa.core cuppa.core.add_option( *args, **kwargs ) import cuppa.build_with_header_library from cuppa.build_with_header_library import header_library_dependency PK'™¯FoS‡ÒŠŠ cuppa/tree.py# Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Tree #------------------------------------------------------------------------------- def get_all_children( node ): return node.all_children() def filter_out( path, ignore_filter ): for ignore in ignore_filter: if path.startswith( ignore ): return True return False # process_callback should have the signature process( node ) def process_tree( root, process_callback, ignore_filter=[], visited=set() ): path = str( root ) children = get_all_children( root ) if path in visited and children: process_callback( root ) return visited.add( path ) if filter_out( path, ignore_filter ): return process_callback( root ) if children: for child in children[:-1]: process_tree( child, process_callback, ignore_filter, visited ) process_tree( children[-1], process_callback, ignore_filter, visited ) def print_tree( root, ignore_filter=[], margin=[0], visited=set() ): path = str( root ) children = get_all_children( root ) def get_margin(m): return [" ","| "][m] margins = list(map(get_margin, margin[:-1])) if path in visited and children: print ''.join(margins + ['+-[', path, ']']) return visited.add( path ) if filter_out( path, ignore_filter ): return print ''.join(margins + ['+-', path]) if children: margin.append(1) for child in children[:-1]: print_tree( child, ignore_filter, margin, visited ) margin[-1] = 0 print_tree( children[-1], ignore_filter, margin, visited ) margin.pop() PK 0: time_found = True emphasised += self.start_highlight('time') emphasised += char emphasised += colorama.Style.RESET_ALL return emphasised def highlight( self, meaning, text ): if not self.use_colour: return text else: return self.start_highlight( meaning ) + text + colorama.Style.RESET_ALL def start_colour( self, meaning ): if self.use_colour: return self._start_colour( meaning ) return '' def start_highlight( self, meaning ): if self.use_colour: return self._start_highlight( meaning ) return '' def reset( self ): if self.use_colour: return self._reset() return '' def _reset( self ): return colorama.Style.RESET_ALL ## Make these functions into simple dictionary lookups def _start_colour( self, meaning ): if meaning == 'error': return colorama.Fore.RED elif meaning == 'warning': return colorama.Fore.MAGENTA elif meaning == 'summary': return colorama.Fore.BLACK elif meaning == 'passed': return colorama.Fore.GREEN elif meaning == 'success': return colorama.Fore.GREEN elif meaning == 'unexpected_success': return colorama.Fore.GREEN elif meaning == 'expected_failure': return colorama.Fore.YELLOW elif meaning == 'failure': return colorama.Fore.RED elif meaning == 'failed': return colorama.Fore.RED elif meaning == 'aborted': return colorama.Fore.RED elif meaning == 'skipped': return colorama.Fore.BLACK elif meaning == 'notice': return colorama.Fore.YELLOW elif meaning == 'time': return colorama.Fore.BLUE elif meaning == 'info': return colorama.Fore.BLUE elif meaning == 'message': return '' def _start_highlight( self, meaning ): if meaning == 'error': return colorama.Style.BRIGHT + colorama.Back.RED + colorama.Fore.WHITE elif meaning == 'warning': return colorama.Style.BRIGHT + colorama.Back.MAGENTA + colorama.Fore.WHITE elif meaning == 'summary': return colorama.Style.BRIGHT + colorama.Back.BLACK + colorama.Fore.WHITE elif meaning == 'success': return colorama.Style.BRIGHT + colorama.Back.GREEN + colorama.Fore.WHITE elif meaning == 'unexpected_success': return colorama.Style.BRIGHT + colorama.Back.GREEN + colorama.Fore.BLACK elif meaning == 'passed': return colorama.Style.BRIGHT + colorama.Back.GREEN + colorama.Fore.WHITE elif meaning == 'expected_failure': return colorama.Style.BRIGHT + colorama.Back.YELLOW + colorama.Fore.WHITE elif meaning == 'failure': return colorama.Style.BRIGHT + colorama.Back.RED + colorama.Fore.WHITE elif meaning == 'failed': return colorama.Style.BRIGHT + colorama.Back.RED + colorama.Fore.WHITE elif meaning == 'aborted': return colorama.Style.BRIGHT + colorama.Back.RED + colorama.Fore.BLACK elif meaning == 'skipped': return colorama.Style.BRIGHT + colorama.Back.BLACK + colorama.Fore.WHITE elif meaning == 'notice': return colorama.Style.BRIGHT + colorama.Back.YELLOW + colorama.Fore.WHITE elif meaning == 'time': return colorama.Style.BRIGHT + colorama.Back.BLUE + colorama.Fore.WHITE elif meaning == 'info': return colorama.Style.BRIGHT + colorama.Back.BLUE + colorama.Fore.WHITE elif meaning == 'message': return '' def as_error( self, text ): return self.colour( "error", text ) def as_warning( self, text ): return self.colour( "warning", text ) def as_info( self, text ): return self.colour( "info", text ) def as_message( self, text ): return self.colour( "message", text ) def as_notice( self, text ): return self.colour( "notice", text ) def as_emphasised( env, text ): return env['colouriser'].emphasise( text ) def as_error( env, text ): return env['colouriser'].colour( 'error', text ) def as_warning( env, text ): return env['colouriser'].colour( 'warning', text ) def as_info( env, text ): return env['colouriser'].colour( 'info', text ) def as_message( env, text ): return env['colouriser'].colour( 'message', text ) def as_notice( env, text ): return env['colouriser'].colour( 'notice', text ) PKê ¶FÇ[9øøcuppa/configure.py # Copyright Jamie Allsop 2013-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Configure #------------------------------------------------------------------------------- # standard library Imports import os import ast # Scons Imports import SCons.Script import cuppa.options SCons.Script.AddOption( '--show-conf', dest='show_conf', action='store_true', help='Show the current values in the configuration file if one exists' ) SCons.Script.AddOption( '--save-conf', dest='save_conf', action='store_true', help='Save the current command-line a configuration file' ) SCons.Script.AddOption( '--update-conf', dest='update_conf', action='store_true', help='Update the configuration file with the current command-line' ) SCons.Script.AddOption( '--remove-settings', type='string', nargs=1, action='callback', callback=cuppa.options.list_parser( 'remove_settings' ), help='Remove the listed settings from the configuration file' ) SCons.Script.AddOption( '--clear-conf', dest='clear_conf', action='store_true', help='Clear the configuration file' ) class never_save(object): pass default_scons_options = { 'debug_explain': False, 'debug_includes': False, 'climb_up': never_save } class Configure(object): def __init__( self, env, conf_path="configure.conf", callback=None ): self._env = env self._conf_path = conf_path self._callback = callback env['configured_options'] = {} self._colouriser = env['colouriser'] self._configured_options = {} def load( self ): self._show = self._env.get_option( 'show_conf' ) self._save = self._env.get_option( 'save_conf' ) self._remove = self._env.get_option( 'remove_settings' ) self._update = self._env.get_option( 'update_conf' ) self._clear = self._env.get_option( 'clear_conf' ) self._configure = self._save or self._remove or self._update self._clean = self._env.get_option( 'clean' ) self._unconfigure = ( self._save and self._clean ) or self._clear if self._unconfigure: self._configure = False print "cuppa: configure - {}".format( self._colouriser.as_notice( "Clear configuration requested..." ) ) if os.path.exists( self._conf_path ): print "cuppa: configure - removing configure file [{}]".format( self._colouriser.as_info( self._conf_path ) ) os.remove( self._conf_path ) else: print "cuppa: configure - configure file [{}] does not exist. Unconfigure not needed".format( self._colouriser.as_info( self._conf_path ) ) return elif self._configure: print "cuppa: configure - {}".format( self._colouriser.as_notice( "Update configuration requested..." ) ) if not self._save: self._loaded_options = self._load_conf() else: self._loaded_options = {} self._env['configured_options'] = self._loaded_options self._env['default_options'].update( self._loaded_options ) def save( self ): if self._configure and not self._clean: if self._save: self._save_conf() else: if self._update: self._update_conf() if self._remove: self._remove_settings() def handle_conf_only( self ): return self._save or self._update or self._remove or self._clear or self._show def action( self ): if self._save: return "save" elif self._update: return "update" elif self._remove: return "remove" elif self._clear: return "clear" elif self._show: return "show" def configure( self, env ): configure = SCons.Script.Configure( env ) if self._callback: self._callback( configure ) env = configure.Finish() def _load_conf( self ): settings = {} if os.path.exists(self._conf_path): with open(self._conf_path) as config_file: print "cuppa: configure - configure file [{}] exists. Load stored settings...".format( self._colouriser.as_info( self._conf_path ) ) for line in config_file.readlines(): name, value = tuple( l.strip() for l in line.split('=', 1) ) try: value = ast.literal_eval( str(value) ) except: pass self._print_setting( 'loading', name, value ) settings[name] = value if settings: print "cuppa: configure - load complete" else: print "cuppa: configure - no settings to load, skipping configure" return settings def _is_defaulted_scons_option( self, key, value ): if key in default_scons_options: if default_scons_options[key] == value: return True elif default_scons_options[key] == never_save: return True return False def _is_saveable( self, key, value ): return( not key.startswith("__") and not self._is_defaulted_scons_option( key, value ) and not key =='save_conf' and not key =='update_conf' and not key =='remove_settings' and not key =='show_conf' and not key =='clear_conf' ) def _print_setting( self, action, key, value ): print "cuppa: configure - {} [{}] = [{}]".format( action, self._colouriser.colour( 'notice', key ), self._colouriser.colour( 'notice', str(value) ) ) def _save_settings( self ): options = self._loaded_options for key, value in SCons.Script.Main.OptionsParser.values.__dict__.items(): if self._is_saveable( key, value ): try: value = ast.literal_eval( str(value) ) except: pass options[key] = value with open(self._conf_path, "w") as config_file: for key, value in options.items(): self._print_setting( 'saving', key, value ) config_file.write( "{} = {}\n".format( key, value ) ) def _remove_settings( self ): initial_option_count = len(self._loaded_options) print "cuppa: configure - Remove settings requested for the following options {}".format( self._remove ) for setting in self._remove: if setting in self._loaded_options: del self._loaded_options[setting] print "cuppa: configure - removing option [{}] as requested".format( self._colouriser.colour( 'notice', "--" + setting ) ) if initial_option_count != len(self._loaded_options): self._update_conf() def _save_conf( self ): print "cuppa: configure - {}".format( self._colouriser.colour( 'notice', "save current settings..." ) ) self._save_settings() print "cuppa: configure - {}".format( self._colouriser.colour( 'notice', "save complete" ) ) def _update_conf( self ): print "cuppa: configure - {}".format( self._colouriser.colour( 'notice', "updating current settings..." ) ) self._save_settings() print "cuppa: configure - {}".format( self._colouriser.colour( 'notice', "update complete" ) ) PK-¹ƒFëÌ„§¨¨cuppa/build_platform.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Build Platform #------------------------------------------------------------------------------- import platform # Custom import cuppa.modules.registration from cuppa.platforms import * class PlatformException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class Platform(object): @classmethod def _get_supported_platforms( cls, supported ): cuppa.modules.registration.add_to_env( 'platforms', { 'env': supported } ) @classmethod def _create( cls ): cls._supported = { 'platforms': {} } cls._get_supported_platforms( cls._supported ) system = platform.system() if system not in cls._supported['platforms']: raise PlatformException( 'Platform [' + system + '] not supported. Supported platforms are ' + str(cls._supported['platforms']) ) cls._platform = cls._supported['platforms'][ system ] cls._platform.initialise() @classmethod def supported( cls ): if not hasattr(cls, '_supported'): cls._create() return cls._supported['platforms'] @classmethod def current( cls ): if not hasattr(cls, '_platform'): cls._create() return cls._platform def name(): return Platform.current().name() def constants(): return Platform.current().constants() PKîcáFew}»»cuppa/recursive_glob.py # Copyright Jamie Allsop 2012-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RecursiveGlob #------------------------------------------------------------------------------- import fnmatch import re import os import cuppa.utility def glob( start, file_pattern, exclude_dirs_pattern=None, discard_pattern=None ): if cuppa.utility.is_string( file_pattern ): file_pattern = re.compile( fnmatch.translate( file_pattern ) ) if exclude_dirs_pattern: if cuppa.utility.is_string( exclude_dirs_pattern ): exclude_dirs_pattern = re.compile( fnmatch.translate( exclude_dirs_pattern ) ) if discard_pattern: if cuppa.utility.is_string( discard_pattern ): discard_pattern = re.compile( fnmatch.translate( discard_pattern ) ) matches = [] subdir = False for root, dirnames, filenames in os.walk( start ): if exclude_dirs_pattern: # remove any directories from the search that match the exclude regex dirnames[:] = [ d for d in dirnames if not exclude_dirs_pattern.match(d) ] exclude_this_dir = False matches_in_this_dir = [] for filename in filenames: if subdir and discard_pattern and discard_pattern.match( filename ): # if we are in a subdir and it contains a file that matches the discard_pattern # set exclude_this_dir to True so later we can discard any local matches we'd # already encountered while walking the directory exclude_this_dir = True break if file_pattern.match( filename ): matches_in_this_dir.append( os.path.join( root, filename ) ) if not exclude_this_dir: matches += matches_in_this_dir else: # We are excluding this directory and therefore all of its subdirs dirnames[:] = [] # After the first pass through the loop we will be in a subdirectory subdir = True return matches PK-¹ƒF tDøbbcuppa/options.py # Copyright Jamie Allsop 2014-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Options #------------------------------------------------------------------------------- class list_parser(object): def __init__( self, attribute ): self._attribute = attribute def __call__( self, option, opt, value, parser): setattr( parser.values, self._attribute, value.split(',') ) PK 1: SCons.Script.SetOption( 'num_jobs', job_count ) parallel_mode = "automatically" default_env['job_count'] = job_count default_env['parallel'] = parallel if job_count>1: print "cuppa: Running in {} with option [{}] set {} as [{}]".format( self._as_emphasised("parallel mode"), self._as_info( "jobs" ), self._as_emphasised(parallel_mode), self._as_info( str( SCons.Script.GetOption( 'num_jobs') ) ) ) if not help and self._configure.handle_conf_only(): self._configure.save() if not help and not self._configure.handle_conf_only(): self.build( default_env ) if self._configure.handle_conf_only(): print "cuppa: Handling onfiguration only, so no builds will be attempted." print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:" print "" print "scons -D {}".format( self._command_line_from_settings( default_env['configured_options'] ) ) print "" print "cuppa: Nothing to be done. Exiting." SCons.Script.Exit() def _as_emphasised( self, text ): return self._default_env['colouriser'].emphasise( text ) def _as_error( self, text ): return self._default_env['colouriser'].colour( 'error', text ) def _as_warning( self, text ): return self._default_env['colouriser'].colour( 'warning', text ) def _as_info( self, text ): return self._default_env['colouriser'].colour( 'info', text ) def _as_notice( self, text ): return self._default_env['colouriser'].colour( 'notice', text ) def _command_line_from_settings( self, settings ): commands = [] for key, value in settings.iteritems(): command = self._as_emphasised( "--" + key ) if value != True and value != False: if not isinstance( value, list ): command += "=" + self._as_info( str(value) ) else: command += "=" + self._as_info( ",".join( value ) ) commands.append( command ) commands.sort() return " ".join( commands ) def get_active_actions_for_variant( self, default_env, active_variants, variant ): available_variants = default_env[ self.variants_key ] available_actions = default_env[ self.actions_key ] specified_actions = {} for key, action in available_actions.items(): if default_env.get_option( action.name() ): specified_actions[ action.name() ] = action if not specified_actions: default_variants = active_variants if default_variants: for variant in default_variants: if available_actions.has_key( variant ): specified_actions[ variant ] = available_actions[ variant ] active_actions = {} for key, action in specified_actions.items(): if key not in available_variants: active_actions[ key ] = action elif key == variant.name(): active_actions[ key ] = action return active_actions def create_build_variants( self, toolchain, default_env ): variants = default_env[ self.variants_key ] active_variants = {} for key, variant in variants.items(): if default_env.get_option( variant.name() ): active_variants[ variant.name() ] = variant if not active_variants: default_variants = default_env['default_variants'] or toolchain.default_variants() if default_variants: for variant in default_variants: if variants.has_key( variant ): active_variants[ variant ] = variants[ variant ] variant_envs = {} for key, variant in active_variants.items(): variant_envs[ key ] = variant.create( default_env.Clone(), toolchain ) if not default_env['raw_output']: cuppa.output_processor.Processor.install( variant_envs[ key ] ) variant_envs[ key ]['toolchain'] = toolchain variant_envs[ key ]['variant'] = variant variant_envs[ key ]['variant_actions'] = self.get_active_actions_for_variant( default_env, active_variants, variant ) return variant_envs def get_sub_sconscripts( self, path, exclude_dirs ): file_regex = re.compile( r'([^.]+[.])?sconscript$', re.IGNORECASE ) discard_if_subdir_contains_regex = re.compile( r'(SC|Sc|sc)onstruct' ) def up_dir( path ): element = next( e for e in path.split(os.path.sep) if e ) return element == ".." exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ] exclude_dirs = "|".join( exclude_dirs ) exclude_dirs_regex = re.compile( exclude_dirs, re.IGNORECASE ) return cuppa.recursive_glob.glob( path, file_regex, exclude_dirs_pattern= exclude_dirs_regex, discard_pattern=discard_if_subdir_contains_regex ) def colour_items( self, items ): return "'{}'".format( "', '".join( self._as_notice( item ) for item in items ) ) def build( self, default_env ): projects = default_env.get_option( 'projects' ) toolchains = default_env['active_toolchains'] if projects == None: projects = default_env['default_projects'] if projects == None or not default_env['run_from_launch_dir']: sub_sconscripts = self.get_sub_sconscripts( default_env['launch_dir'], [ default_env['build_root'], default_env['download_root'] ] ) if sub_sconscripts: projects = sub_sconscripts print "cuppa: Using sub-sconscripts [{}]".format( self.colour_items( projects ) ) elif projects != None: print "cuppa: Using default_projects [{}]".format( self.colour_items( projects ) ) if projects: sconscripts = [] for project in projects: if os.path.exists( project ) and os.path.isdir( project ): sub_sconscripts = self.get_sub_sconscripts( project, [ default_env['build_root'], default_env['download_root'] ] ) if sub_sconscripts: print "cuppa: Reading project folder [{}] and using sub-sconscripts [{}]".format( project, self.colour_items( sub_sconscripts ) ) sconscripts.extend( sub_sconscripts ) else: sconscripts.append( project ) for toolchain in toolchains: toolchain_env = default_env.Clone() toolchain_env['default_env'] = default_env toolchain.initialise_env( toolchain_env ) variants = self.create_build_variants( toolchain, toolchain_env ) for variant, env in variants.items(): for sconscript in sconscripts: self.call_project_sconscript_files( toolchain, variant, env, sconscript ) else: print "cuppa: No projects to build. Nothing to be done" def call_project_sconscript_files( self, toolchain, variant, sconscript_env, project ): sconscript_file = project if not os.path.exists( project ) or os.path.isdir( project ): sconscript_file = sconscript_file + '.sconscript' if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ): print "cuppa: project exists and added to build [{}] using [{},{}]".format( self._as_notice( sconscript_file ), self._as_notice( toolchain.name() ), self._as_notice( variant ) ) path_without_ext = os.path.splitext( sconscript_file )[0] sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file ) name = os.path.splitext( sconscript_name )[0] if name.lower() == "sconscript": path_without_ext = sconstruct_offset_path name = path_without_ext sconscript_env['sconscript_file'] = sconscript_file build_root = sconscript_env['build_root'] cloned_env = sconscript_env.Clone() cloned_env['sconscript_env'] = sconscript_env cloned_env['sconscript_build_dir'] = path_without_ext cloned_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() ) cloned_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path ) cloned_env['build_dir'] = os.path.normpath( os.path.join( build_root, path_without_ext, toolchain.name(), variant, 'working', '' ) ) cloned_env['abs_build_dir'] = os.path.abspath( cloned_env['build_dir'] ) cloned_env['offset_dir'] = sconstruct_offset_path cloned_env['final_dir'] = '..' + os.path.sep + 'final' + os.path.sep cloned_env['active_toolchain']= toolchain def abs_final_dir( abs_build_dir, final_dir ): return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) ) cloned_env['abs_final_dir'] = abs_final_dir( cloned_env['abs_build_dir'], cloned_env['final_dir'] ) cloned_env.AppendUnique( INCPATH = [ cloned_env['offset_dir'] ] ) sconscript_exports = { 'env' : cloned_env, 'sconscript_env' : sconscript_env, 'build_root' : build_root, 'build_dir' : cloned_env['build_dir'], 'abs_build_dir' : cloned_env['abs_build_dir'], 'final_dir' : cloned_env['final_dir'], 'abs_final_dir' : cloned_env['abs_final_dir'], 'common_variant_final_dir': '../../../common/final/', 'common_project_final_dir': build_root + '/common/final/', 'project' : name, } self._configure.configure( sconscript_exports['env'] ) cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports ) SCons.Script.SConscript( [ sconscript_file ], variant_dir = sconscript_exports['build_dir'], duplicate = 0, exports = sconscript_exports ) else: print "cuppa: Skipping non-existent project [{}]".format( self._as_error( sconscript_file ) ) def run( *args, **kwargs ): Construct( *args, **kwargs ) PK-¹ƒF2¨Ô**cuppa/platforms/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PK-¹ƒFH\]úMMcuppa/platforms/linux.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Linux Platform #------------------------------------------------------------------------------- from subprocess import Popen, PIPE from re import match, search, MULTILINE from os import path import platform import SCons.Script class LinuxException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class Linux: @classmethod def add_to_env( cls, args ): args['env']['platforms']['Linux'] = cls() def __init__( self ): self._toolchain = None self.values = {} def default_toolchain( self ): if not self._toolchain: env = SCons.Script.Environment() self._toolchain = env['CC'] return self._toolchain return self._toolchain def __getitem__( self, key ): return self.values.get( key ) def _bit_depth( self, machine ): if machine == "i386": return '32' elif machine == "i686": return '32' elif machine == "x86_64": return '64' else: return 'unknown' def _libc_version( self, machine, system ): libc_file = "libc.so.6" libc_path = "/lib/" + libc_file if not path.exists( libc_path ): multiarch_lib_path = '-'.join( [ machine, system.lower(), 'gnu' ] ) libc_path = "/lib/" + multiarch_lib_path + "/" + libc_file libc_version = Popen([libc_path], stdout=PIPE).communicate()[0] return 'libc' + search( r'^GNU C Library [()a-zA-Z ]*([0-9][.0-9]+)', libc_version, MULTILINE ).expand(r'\1').replace('.','') def initialise( self ): ( system, node, release, version, machine, processor ) = platform.uname() self.values['system'] = system self.values['node'] = node self.values['release'] = release self.values['version'] = version self.values['machine'] = machine self.values['processor'] = processor self.values['os'] = system self.values['architecture'] = machine self.values['os_version'] = match( r'(\d+\.\d+)', release ).group(0) self.values['bit_width'] = self._bit_depth( machine ) self.values['libc_version'] = self._libc_version( machine, system ) self.values['platform_path'] = self.values['architecture'] + '_' + self.values['os'] + '_' + self.values['os_version'] class Constants(object): CLOCK_REALTIME = 0 # System-wide realtime clock. CLOCK_MONOTONIC = 1 # Monotonic system-wide clock. CLOCK_PROCESS_CPUTIME_ID = 2 # High-resolution timer from the CPU. CLOCK_THREAD_CPUTIME_ID = 3 # Thread-specific CPU-time clock. CLOCK_MONOTONIC_RAW = 4 # Monotonic system-wide clock, not adjusted for frequency scaling. CLOCK_REALTIME_COARSE = 5 # System-wide realtime clock, updated only on ticks. CLOCK_MONOTONIC_COARSE = 6 # Monotonic system-wide clock, updated only on ticks. CLOCK_BOOTTIME = 7 # Monotonic system-wide clock that includes time spent in suspension. @classmethod def constants( cls ): return cls.Constants @classmethod def name( cls ): return cls.__name__ PK-¹ƒF:ï8§ § cuppa/platforms/darwin.py # Copyright Jamie Allsop 2014-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Darwin Platform #------------------------------------------------------------------------------- import platform import SCons.Script class DarwinException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class Darwin: @classmethod def add_to_env( cls, args ): args['env']['platforms']['Darwin'] = cls() def __init__( self ): self._toolchain = "clang" self.values = {} def default_toolchain( self ): if not self._toolchain: env = SCons.Script.Environment() self._toolchain = env['CC'] return self._toolchain return self._toolchain def __getitem__( self, key ): return self.values.get( key ) def _bit_depth( self, machine ): if machine == "i386": return '32' elif machine == "i686": return '32' elif machine == "x86_64": return '64' else: return 'unknown' def initialise( self ): ( system, node, release, version, machine, processor ) = platform.uname() self.values['system'] = system self.values['node'] = node self.values['release'] = release self.values['version'] = version self.values['machine'] = machine self.values['processor'] = processor self.values['os'] = system self.values['architecture'] = machine self.values['os_version'] = release # re.match( r'(\d+\.\d+)', release ).group(0) self.values['bit_width'] = self._bit_depth( machine ) self.values['platform_path'] = self.values['architecture'] + '_' + self.values['os'] + '_' + self.values['os_version'] class Constants(object): CLOCK_REALTIME = 0 # System-wide realtime clock. CLOCK_MONOTONIC = 1 # Monotonic system-wide clock. CLOCK_PROCESS_CPUTIME_ID = 2 # High-resolution timer from the CPU. CLOCK_THREAD_CPUTIME_ID = 3 # Thread-specific CPU-time clock. CLOCK_MONOTONIC_RAW = 4 # Monotonic system-wide clock, not adjusted for frequency scaling. CLOCK_REALTIME_COARSE = 5 # System-wide realtime clock, updated only on ticks. CLOCK_MONOTONIC_COARSE = 6 # Monotonic system-wide clock, updated only on ticks. CLOCK_BOOTTIME = 7 # Monotonic system-wide clock that includes time spent in suspension. @classmethod def constants( cls ): return cls.Constants @classmethod def name( cls ): return cls.__name__ PKÓGE¯·g¡ËËcuppa/platforms/windows.py # Copyright Jamie Allsop 2014-2014 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Windows Platform #------------------------------------------------------------------------------- import platform import SCons.Script class WindowsException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class Windows: @classmethod def add_to_env( cls, args ): args['env']['platforms']['Windows'] = cls() def __init__( self ): self._toolchain = None self.values = {} def default_toolchain( self ): if not self._toolchain: env = SCons.Script.Environment() self._toolchain = env['CC'] return self._toolchain return self._toolchain def __getitem__( self, key ): return self.values.get( key ) def _bit_depth( self, machine ): if machine == "i386": return '32' elif machine == "i686": return '32' elif machine == "x86_64": return '64' else: return 'unknown' def initialise( self ): ( system, node, release, version, machine, processor ) = platform.uname() self.values['system'] = system self.values['node'] = node self.values['release'] = release self.values['version'] = version self.values['machine'] = machine self.values['processor'] = processor self.values['os'] = system self.values['architecture'] = machine self.values['os_version'] = release self.values['bit_width'] = self._bit_depth( machine ) self.values['platform_path'] = self.values['architecture'] + '_' + self.values['os'] + '_' + self.values['os_version'] class Constants(object): pass @classmethod def constants( cls ): return cls.Constants @classmethod def name( cls ): return cls.__name__ PK-¹ƒF2¨Ô**$cuppa/project_generators/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PKؽôFPÒ©DBDB&cuppa/project_generators/codeblocks.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Codeblocks #------------------------------------------------------------------------------- import os from exceptions import Exception import cuppa.path import cuppa.progress import cuppa.tree import cuppa.options from cuppa.colourise import as_error, as_notice class CodeblocksException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) def ignored_types( env ): return [ env['PROGSUFFIX'], env['LIBSUFFIX'], env['SHLIBSUFFIX'], env['OBJSUFFIX'], env['SHOBJSUFFIX'], '.log' ] class Codeblocks(object): @classmethod def add_options( cls, add_option ): add_option( '--generate-cbs', dest='generate-cbs', action='store_true', help='Tell scons to generate a Codeblocks project', default=False ) add_option( '--generate-cbs-include-thirdparty', dest='generate_cbs_include_thirdparty', action='store_true', help='Include dependencies under the thirdparty directory or in downloaded libraries.', default=False ) add_option( '--generate-cbs-exclude-relative-branches', dest='generate_cbs_exclude_relative_branches', action='store_true', help='Exclude branches outside of the working directory', default=False ) add_option( '--generate-cbs-place-with-sconscript', dest='generate_cbs_place_with_sconscript', action='store_true', help='Exclude branches outside of the working directory', default=False ) add_option( '--generate-cbs-exclude-paths-starting', type='string', nargs=1, action='callback', callback=cuppa.options.list_parser( 'generate_cbs_exclude_paths_starting' ), help='Exclude dependencies starting with the specified paths from the file list for the project' ) @classmethod def add_to_env( cls, env ): try: generate = env.get_option( 'generate-cbs' ) if generate: obj = cls( env, env.get_option( 'generate_cbs_include_thirdparty' ), env.get_option( 'generate_cbs_exclude_relative_branches' ), env.get_option( 'generate_cbs_exclude_paths_starting' ), env.get_option( 'generate_cbs_place_with_sconscript' ) ) env['project_generators']['codeblocks'] = obj except CodeblocksException as error: print as_error( env, "cuppa: error: failed to create CodeBlocks project generator with error [{}]".format( error ) ) def __init__( self, env, include_thirdparty, exclude_branches, excluded_paths_starting, place_cbs_by_sconscript ): self._include_thirdparty = include_thirdparty self._exclude_branches = exclude_branches self._excluded_paths_starting = excluded_paths_starting and excluded_paths_starting or [] self._place_cbs_by_sconscript = place_cbs_by_sconscript self._projects = {} base_include = self._exclude_branches and env['base_path'] or env['branch_root'] base = os.path.realpath( base_include ) download = os.path.realpath( env['download_root'] ) thirdparty = env['thirdparty'] and os.path.realpath( env['thirdparty'] ) or None common, tail1, tail2 = cuppa.path.split_common( base, download ) download_under_base = common and not tail1 thirdparty_under_base = None if thirdparty: common, tail1, tail2 = cuppa.path.split_common( base, thirdparty ) thirdparty_under_base = common and not tail1 self._exclude_paths = self._excluded_paths_starting self._build_root = [ env['build_root'] ] if not self._include_thirdparty: if download_under_base: self._exclude_paths.append( env['download_root'] ) if thirdparty and thirdparty_under_base: self._exclude_paths.append( env['thirdparty'] ) self._include_paths = [ base_include ] if self._include_thirdparty: if not download_under_base: self._include_paths.append( env['download_root'] ) if thirdparty and not thirdparty_under_base: self._include_paths.append( env['thirdparty'] ) self._ignored_types = ignored_types( env ) cuppa.progress.NotifyProgress.register_callback( None, self.on_progress ) print "cuppa: project-generator (CodeBlocks): Including Paths Under = {}".format( as_notice( env, str( self._include_paths ) ) ) print "cuppa: project-generator (CodeBlocks): Excluding Paths Starting = {}".format( as_notice( env, str( self._exclude_paths ) ) ) def on_progress( self, progress, sconscript, variant, env, target, source ): if progress == 'begin': self.on_sconscript_begin( env, sconscript ) elif progress == 'started': self.on_variant_started( env, sconscript ) elif progress == 'finished': self.on_variant_finished( env, sconscript, target[0], source ) elif progress == 'end': self.on_sconscript_end( env, sconscript ) elif progress =='sconstruct_end': self.on_sconstruct_end( env ) def on_sconscript_begin( self, env, sconscript ): pass def on_variant_started( self, env, sconscript ): project = sconscript toolchain = env['toolchain'].name() variant = env['variant'].name() build_root = env['build_root'] working_dir = env['build_dir'] final_dir_offset = env['final_dir'] self.update( env, project, toolchain, variant, build_root, working_dir, final_dir_offset ) def on_variant_finished( self, env, sconscript, root_node, source ): project = sconscript tree_processor = ProcessNodes( env, self._projects[project]['path'], self._projects[project]['files'], self._include_paths, self._exclude_paths + self._build_root, self._ignored_types ) cuppa.tree.process_tree( root_node, tree_processor, self._exclude_paths ) self._projects[project]['files'] = tree_processor.file_paths() def on_sconscript_end( self, env, sconscript ): self.write( env, sconscript ) def on_sconstruct_end( self, env ): workspace_dir = os.path.join( env['working_dir'], "cbs" ) workspace_path = os.path.join( workspace_dir, "all.workspace" ) if workspace_dir and not os.path.exists( workspace_dir ): os.makedirs( workspace_dir ) print "cuppa: project-generator (CodeBlocks): write workspace [{}]".format( as_notice( env, workspace_path ) ) with open( workspace_path, "w" ) as workspace_file: workspace_file.write( "\n".join( self.create_workspace( self._projects ) ) ) def update( self, env, project, toolchain, variant, build_root, working_dir, final_dir_offset ): print "cuppa: project-generator (CodeBlocks): update project [{}] for [{}, {}]".format( as_notice( env, project ), as_notice( env, toolchain) , as_notice( env, variant ) ) if project not in self._projects: title = os.path.splitext( project )[0] directory, filename = os.path.split( title ) cbs_file_name = filename if cbs_file_name in [ 'sconscript', 'SConscript', 'Sconscript' ]: cbs_file_name = os.path.split( directory )[1] if cbs_file_name == ".": cbs_file_name = os.path.split( os.path.abspath( env['sconscript_dir'] ) )[1] if not cbs_file_name: cbs_file_name = "sconscript" if not self._place_cbs_by_sconscript: directory = env['working_dir'] directory = os.path.join( directory, "cbs") project_file = directory + os.path.sep + cbs_file_name + ".cbp" execution_dir = '' if directory: execution_dir = os.path.relpath( os.getcwd(), directory ) execution_dir = ( os.path.pardir + os.path.sep + os.path.join( execution_dir, os.path.split( os.path.abspath( os.getcwd() ) )[1] ) ) self._projects[project] = {} self._projects[project]['title'] = title self._projects[project]['directory'] = directory self._projects[project]['path'] = os.path.join( os.getcwd(), directory ) self._projects[project]['execution_dir'] = execution_dir self._projects[project]['project_file'] = project_file self._projects[project]['working_dir'] = os.path.join( execution_dir, working_dir ) self._projects[project]['final_dir'] = os.path.normpath( os.path.join( self._projects[project]['working_dir'], final_dir_offset ) ) self._projects[project]['variants'] = set() self._projects[project]['toolchains'] = set() self._projects[project]['files'] = set() self._projects[project]['targets'] = {} self._projects[project]['lines_header'] = [] self._projects[project]['lines_footer'] = [] if not self._projects[project]['lines_header']: self._projects[project]['lines_header'] = self.create_header( self._projects[project]['title'], execution_dir ) if not self._projects[project]['lines_footer']: self._projects[project]['lines_footer'] = self.create_footer() self._projects[project]['variants'].add( variant ) self._projects[project]['toolchains'].add( toolchain ) target = "{}-{}".format( toolchain, variant ) test_actions = [ "", "--test" ] for action in test_actions: target_name = target + action if target_name not in self._projects[project]['targets']: self._projects[project]['targets'][target_name] = self.create_target( target_name, project, toolchain, variant, action, self._projects[project]['working_dir'], self._projects[project]['final_dir'] ) def write( self, env, project ): project_file = self._projects[project]['project_file'] directory = self._projects[project]['directory'] print "cuppa: project-generator (CodeBlocks): write [{}] for [{}]".format( as_notice( env, self._projects[project]['project_file'] ), as_notice( env, project ) ) if directory and not os.path.exists( directory ): os.makedirs( directory ) lines = [] lines += self._projects[project]['lines_header'] for target in sorted( self._projects[project]['targets'].itervalues() ): lines += target lines += [ '\t\t' ] for filepath in sorted( self._projects[project]['files'] ): lines += [ '\t\t' ] lines += self._projects[project]['lines_footer'] with open( project_file, "w" ) as cbs_file: cbs_file.write( "\n".join( lines ) ) def create_header( self, project, execution_dir ): lines = [ '\n' '\n' '\t\n' '\t' ] lines += [ '\t\t\n' '\n' ] return lines def create_target( self, target, project, toolchain, variant, test, working_dir, final_dir ): lines = [ '\t\t\t\n' '\t\t\t\t' ] return lines def create_workspace( self, projects ): lines = [ '\n' '\n' '\t' ] for project in projects.itervalues(): project_file = project['project_file'] base_path = project['path'] project_file = os.path.relpath( os.path.abspath( project_file ), base_path ) lines += [ '\t\t' ] lines += [ '\t\n' '\n' ] return lines class ProcessNodes(object): def __init__( self, env, base_path, files, allowed_paths, excluded_paths, ignored_types ): self._env = env self._base_path = base_path self._files = files self._allowed_paths = allowed_paths self._excluded_paths = excluded_paths self._ignored_types = ignored_types def __call__( self, node ): file_path = str(node) for excluded in self._excluded_paths: if file_path.startswith( excluded ): return path, ext = os.path.splitext( file_path ) if ext and ext in self._ignored_types: return for allowed in self._allowed_paths: prefix = os.path.commonprefix( [ os.path.abspath( file_path ), allowed ] ) # print "cuppa: project-generator (CodeBlocks): str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format( # as_notice( self._env, str(node) ), # as_notice( self._env, node.path ), # as_notice( self._env, str(allowed) ), # as_notice( self._env, str(prefix) ) # ) if prefix != allowed: return # print "cuppa: project-generator (CodeBlocks): str(file)=[{}], file.path=[{}], allowed=[{}], prefix=[{}]".format( # as_notice( self._env, str(node) ), # as_notice( self._env, node.path ), # as_notice( self._env, str(allowed) ), # as_notice( self._env, str(prefix) ) # ) file_path = os.path.relpath( os.path.abspath( file_path ), self._base_path ) self._files.add( file_path ) return def file_paths( self ): return self._files PK-¹ƒF2¨Ô**cuppa/profiles/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PK¾®F/5ÙAAcuppa/methods/coverage.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # CoverageMethod #------------------------------------------------------------------------------- from SCons.Script import Flatten import cuppa.progress class CoverageMethod(object): def __init__( self ): pass def __call__( self, env, program, sources, final_dir=None ): if final_dir == None: final_dir = env['abs_final_dir'] emitter, builder = env['toolchain'].coverage_runner( program, final_dir ) env['BUILDERS']['CoverageBuilder'] = env.Builder( action=builder, emitter=emitter ) coverage = env.CoverageBuilder( [], Flatten( [ sources ] ) ) cuppa.progress.NotifyProgress.add( env, coverage ) return coverage @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "Coverage" ) PK-¹ƒF ^/kkcuppa/methods/build_with.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # BuildWithMethod #------------------------------------------------------------------------------- class BuildWithException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class BuildWithMethod: def __init__( self, env ): self.__build_with = env['BUILD_WITH'] def __call__( self, env, build_with ): if isinstance( build_with, basestring ): build_with = [ build_with ] for name in build_with: if name in env['dependencies']: dependency = env['dependencies'][name] if not dependency: raise BuildWithException( "The sconscript [{}] requires the dependency [{}] but it has not been initialised." .format( env['sconscript_file'], name ) ) env.AppendUnique( BUILD_WITH = name ) dependency( env, env['toolchain'], env['variant'].name() ) @classmethod def add_to_env( cls, env ): env.AddMethod( cls( env ), "BuildWith" ) @classmethod def init_env_for_variant( cls, sconscript_exports ): env = sconscript_exports['env'] if env['default_dependencies']: env.BuildWith( env['default_dependencies'] ) PK-¹ƒFTvBûûcuppa/methods/using.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # UseMethod #------------------------------------------------------------------------------- class UseMethod: def __init__( self, dependencies ): self.__dependencies = dependencies def __call__( self, env, dependency ): if dependency in self.__dependencies: return self.__dependencies[ dependency ] return None @classmethod def add_to_env( cls, env ): env.AddMethod( cls( env['dependencies'] ), "Using" ) PK-¹ƒFÅtööcuppa/methods/toolchain.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # ToolchainMethod #------------------------------------------------------------------------------- class ToolchainMethod: def __init__( self, toolchains ): self.__toolchains = toolchains def __call__( self, env, toolchain ): if toolchain and toolchain in self.__toolchains: return self.__toolchains[ toolchain ] @classmethod def add_to_env( cls, env ): env.AddMethod( cls( env['toolchains'] ), "Toolchain" ) PKVxõFOWÖE22(cuppa/methods/relative_recursive_glob.py # Copyright Jamie Allsop 2012-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RelativeRecursiveGlob #------------------------------------------------------------------------------- import os import fnmatch import re import cuppa.recursive_glob class RecursiveGlobMethod: default = () def __call__( self, env, pattern, start=default, exclude_dirs=default ): if start == self.default: start = env['sconscript_dir'] start = os.path.expanduser( start ) if not os.path.isabs( start ): start = os.path.join( env['sconscript_dir'], start ) if exclude_dirs == self.default: exclude_dirs = [ env['download_root'], env['build_root' ] ] exclude_dirs_regex = None if exclude_dirs: def up_dir( path ): element = next( e for e in path.split(os.path.sep) if e ) return element == ".." exclude_dirs = [ re.escape(d) for d in exclude_dirs if not os.path.isabs(d) and not up_dir(d) ] exclude_dirs = "|".join( exclude_dirs ) exclude_dirs_regex = re.compile( exclude_dirs ) matches = cuppa.recursive_glob.glob( start, pattern, exclude_dirs_pattern=exclude_dirs_regex ) nodes = [ env.File( os.path.relpath( match, env['sconscript_dir'] ) ) for match in matches ] return nodes @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "RecursiveGlob" ) class GlobFilesMethod: def __call__( self, env, pattern ): filenames = [] for filename in os.listdir(env['sconscript_dir']): if fnmatch.fnmatch( filename, pattern): filenames.append( filename ) nodes = [ env.File(f) for f in filenames ] return nodes @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "GlobFiles" ) PK-¹ƒF2¨Ô**cuppa/methods/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PK-¹ƒFâª}WWcuppa/methods/remove_flags.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RemoveFlags method #------------------------------------------------------------------------------- class RemoveFlagsMethod: def __init__( self ): pass def _remove_flags( self, remove, flags ): return [f for f in flags if not f.split('=')[0] in remove] def __call__( self, env, flags ): remove = set( f.split('=')[0] for f in flags ) env.Replace( CCFLAGS = self._remove_flags( remove, env['CCFLAGS'] ) ) env.Replace( CXXFLAGS = self._remove_flags( remove, env['CXXFLAGS'] ) ) env.Replace( CFLAGS = self._remove_flags( remove, env['CFLAGS'] ) ) env.Replace( LINKFLAGS = self._remove_flags( remove, env['LINKFLAGS'] ) ) return None @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "RemoveFlags" ) PK-¹ƒFÉ¿Æcuppa/methods/stdcpp.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # StdCpp method #------------------------------------------------------------------------------- from cuppa.colourise import as_error class StdCppMethod: stdcpp_choices = ( "c++98", "c++03", "c++0x", "c++11", "c++1y", "c++14" ) @classmethod def add_options( cls, add_option ): add_option( '--stdcpp', dest='stdcpp', choices=cls.stdcpp_choices, nargs=1, action='store', help="Use this option to override the default language compliance of your cpp compiler which by dafault is the highest compliance available. Value may be one of {}".format( str(cls.stdcpp_choices) ) ) @classmethod def get_options( cls, env ): env['stdcpp'] = env.get_option( 'stdcpp' ) def __init__( self ): pass def __call__( self, env, standard ): if standard not in self.stdcpp_choices: print as_error( env, "cuppa: stdcpp: error: [{}] not in allowed list {}".format( standard, self.stdcpp_choices ) ) return None env[ 'stdcpp' ] = standard toolchain = env['toolchain'] flag = toolchain.stdcpp_flag_for( standard ) env.ReplaceFlags( [ flag ] ) return None @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "StdCpp" ) PKqüF½U(~~cuppa/methods/test.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # TestMethod #------------------------------------------------------------------------------- import cuppa.progress from SCons.Script import Flatten class TestMethod(object): def __init__( self, default_test_runner=None ): self._default_runner = default_test_runner def __call__( self, env, source, final_dir=None, data=None, runner=None, expected='passed' ): if final_dir == None: final_dir = env['abs_final_dir'] if not runner: runner = self._default_runner test_builder, test_emitter = env['toolchain'].test_runner( runner, final_dir, expected ) env['BUILDERS']['TestBuilder'] = env.Builder( action=test_builder, emitter=test_emitter ) sources = source if data: sources = Flatten( [ source, data ] ) test = env.TestBuilder( [], sources ) cuppa.progress.NotifyProgress.add( env, test ) return test @classmethod def add_to_env( cls, env ): env.AddMethod( cls( env['default_runner'] ), "Test" ) test_runners = set() for toolchain in env['active_toolchains']: for test_runner in toolchain.test_runners(): test_runners.add( test_runner ) for test_runner in test_runners: env.AddMethod( cls( test_runner ), "{}Test".format( test_runner.title() ) ) PKqüFAXû  cuppa/methods/build_test.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # BuildTestMethod #------------------------------------------------------------------------------- from SCons.Script import Flatten class BuildTestMethod: def __init__( self, default_test_runner=None ): self._default_runner = default_test_runner def __call__( self, env, target, source, final_dir=None, data=None, append_variant=None, runner=None, expected='passed', **kwargs ): nodes = [] program = env.Build( target, source, final_dir=final_dir, append_variant=append_variant, **kwargs ) nodes.append( program ) if env['variant_actions'].has_key('test') or env['variant_actions'].has_key('cov'): if not runner: runner = self._default_runner test = env.Test( program, final_dir=final_dir, data=data, runner=runner, expected=expected ) nodes.append( test ) if 'cov' in env['variant_actions']: coverage = env.Coverage( program, source, final_dir=final_dir ) nodes.append( coverage ) return Flatten( nodes ) @classmethod def add_to_env( cls, env ): env.AddMethod( cls( env['default_runner'] ), "BuildTest" ) test_runners = set() for toolchain in env['active_toolchains']: for test_runner in toolchain.test_runners(): test_runners.add( test_runner ) for test_runner in test_runners: env.AddMethod( cls( test_runner ), "Build{}Test".format( test_runner.title() ) ) PKDnôF0‰ R**cuppa/methods/compile.py # Copyright Jamie Allsop 2013-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # CompileMethod #------------------------------------------------------------------------------- import cuppa.progress from SCons.Script import Flatten class CompileMethod: def __call__( self, env, source, **kwargs ): sources = Flatten( [ source ] ) objects = [] if 'CPPPATH' in env: env.AppendUnique( INCPATH = env['CPPPATH'] ) for source in sources: objects.append( env.Object( source = source, CPPPATH = env['SYSINCPATH'] + env['INCPATH'], **kwargs ) ) cuppa.progress.NotifyProgress.add( env, objects ) return objects @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "Compile" ) PKz¬ÜFþÕi`66cuppa/methods/build.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # BuildMethod #------------------------------------------------------------------------------- import cuppa.progress import os.path class BuildMethod: @classmethod def build( cls, env, target, source, final_dir = None, append_variant = False, LIBS=[], DYNAMICLIBS=[], STATICLIBS=[], **kwargs ): if final_dir == None: final_dir = env['abs_final_dir'] exe = os.path.join( final_dir, target ) if append_variant and env['variant'] != 'rel': exe += '_' + env['variant'] env.AppendUnique( DYNAMICLIBS = env['LIBS'] ) if 'CPPPATH' in env: env.AppendUnique( INCPATH = env['CPPPATH'] ) all_libs = env['DYNAMICLIBS'] + env['STATICLIBS'] + LIBS + DYNAMICLIBS + STATICLIBS program = env.Program( exe, source, CPPPATH = env['SYSINCPATH'] + env['INCPATH'], LIBS = all_libs, DYNAMICLIBS = env['DYNAMICLIBS'] + LIBS + DYNAMICLIBS, STATICLIBS = env['STATICLIBS'] + STATICLIBS, **kwargs ) cuppa.progress.NotifyProgress.add( env, program ) return program def __call__( self, env, target, source, final_dir = None, append_variant = False, **kwargs ): return self.build( env, target, source, final_dir=final_dir, append_variant=append_variant, **kwargs ) @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "Build" ) PK-¹ƒF"éޝ¯cuppa/methods/replace_flags.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # ReplaceFlags method #------------------------------------------------------------------------------- class ReplaceFlagsMethod: def __init__( self ): pass def __call__( self, env, flags ): env.RemoveFlags( flags ) env.MergeFlags( flags ) return None @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "ReplaceFlags" ) PK-¹ƒFè< ËLLcuppa/methods/create_version.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # CreateVersionMethod #------------------------------------------------------------------------------- class CreateVersionMethod: def __init__( self ): pass def __call__( self, env, target, source, namespaces, version, location ): create_version_file_builder = env['toolchain'].version_file_builder( env, namespaces, version, location ) create_version_file_emitter = env['toolchain'].version_file_emitter( env, namespaces, version, location ) env.AppendUnique( BUILDERS = { 'CreateVersionFile' : env.Builder( action=create_version_file_builder, emitter=create_version_file_emitter ) } ) return env.CreateVersionFile( target, source ) @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "CreateVersion" ) PK²³F¸¾o‚‚!cuppa/methods/markdown_to_html.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # MarkdownToHtmlMethod #------------------------------------------------------------------------------- import os.path import itertools import grip import cuppa.progress class GripRunner(object): def __call__( self, target, source, env ): for s, t in itertools.izip( source, target ): in_file = str(s) out_file = str(t) try: grip.export( path=in_file, render_wide=True, out_filename=out_file ) except Exception as error: print "cuppa: error: grip.export( path={}, render_wide=True, out_filename={}) failed with error [{}]".format( in_file, out_file, error ) return None class GripEmitter(object): def __init__( self, output_dir ): self._output_dir = output_dir def __call__( self, target, source, env ): target = [] for s in source: path = os.path.join( self._output_dir, os.path.split( str(s) )[1] ) t = os.path.splitext(path)[0] + ".html" target.append(t) return target, source class MarkdownToHtmlMethod(object): def __call__( self, env, source, final_dir=None ): if final_dir == None: final_dir = env['abs_final_dir'] env.AppendUnique( BUILDERS = { 'Grip' : env.Builder( action = GripRunner(), emitter = GripEmitter(final_dir) ) } ) html = env.Grip( [], source ) cuppa.progress.NotifyProgress.add( env, html ) return html @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "MarkdownToHtml" ) PK¾®Fýß6:cuppa/methods/build_library.py # Copyright Jamie Allsop 2014-2014 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # BuildLibMethods #------------------------------------------------------------------------------- import cuppa.progress import os.path class BuildStaticLibMethod: def __call__( self, env, target, source, final_dir=None, **kwargs ): if final_dir == None: final_dir = env['abs_final_dir'] lib = env.StaticLibrary( os.path.join( final_dir, target ), source, **kwargs ) cuppa.progress.NotifyProgress.add( env, lib ) return lib @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "BuildStaticLib" ) class BuildSharedLibMethod: def __call__( self, env, target, source, final_dir=None, **kwargs ): if final_dir == None: final_dir = env['abs_final_dir'] lib = env.SharedLibrary( os.path.join( final_dir, target ), source, **kwargs ) cuppa.progress.NotifyProgress.add( env, lib ) return lib @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "BuildSharedLib" ) PK-¹ƒFOâVcuppa/methods/build_profile.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # BuildProfileMethod #------------------------------------------------------------------------------- import cuppa.utility class BuildProfileMethod: def __init__( self, env ): self.__build_profile = env['BUILD_PROFILE'] def __call__( self, env, build_profile ): for profile in build_profile: if cuppa.utility.is_string( profile ): name = profile if name in env['profiles']: profile = env['profiles'][name] else: name = str( profile ) env.AppendUnique( BUILD_PROFILE = name ) profile( env, env['toolchain'], env['variant'].name() ) @classmethod def add_to_env( cls, env ): env.AddMethod( cls( env ), "BuildProfile" ) @classmethod def init_env_for_variant( cls, sconscript_exports ): env = sconscript_exports['env'] if env['default_profiles']: env.BuildProfile( env['default_profiles'] ) PK-¹ƒF-a\ˆˆcuppa/variants/dbg.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Dbg #------------------------------------------------------------------------------- class Dbg: @classmethod def name( cls ): return cls.__name__.lower() @classmethod def add_options( cls, add_option ): add_option( '--dbg', dest=cls.name(), action='store_true', help='Build a debug binary' ) @classmethod def add_to_env( cls, env, add_variant, add_action ): add_variant( cls.name(), cls() ) @classmethod def create( cls, env, toolchain ): toolchain.update_variant( env, cls.name() ) return env PK-¹ƒF2¨Ô**cuppa/variants/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PK-¹ƒF@q?‘··cuppa/variants/cov.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Cov #------------------------------------------------------------------------------- class Cov: @classmethod def name( cls ): return cls.__name__.lower() @classmethod def add_options( cls, add_option ): add_option( '--cov', dest=cls.name(), action='store_true', help='Build an instrumented binary' ) @classmethod def add_to_env( cls, env, add_variant, add_action ): add_variant( cls.name(), cls() ) add_action( cls.name(), cls() ) @classmethod def create( cls, env, toolchain ): toolchain.update_variant( env, cls.name() ) return env PK-¹ƒF¼¨¶€cuppa/variants/test.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Test #------------------------------------------------------------------------------- class Test: @classmethod def name( cls ): return cls.__name__.lower() @classmethod def add_options( cls, add_option ): add_option( '--test', dest=cls.name(), action='store_true', help='Run the binary as a test' ) @classmethod def add_to_env( cls, env, add_variant, add_action ): add_action( cls.name(), cls() ) PK-¹ƒF,Gæ••cuppa/variants/rel.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Rel #------------------------------------------------------------------------------- class Rel: @classmethod def name( cls ): return cls.__name__.lower() @classmethod def add_options( cls, add_option ): add_option( '--rel', dest=cls.name(), action='store_true', help='Build a release (optimised) binary' ) @classmethod def add_to_env( cls, env, add_variant, add_action ): add_variant( cls.name(), cls() ) @classmethod def create( cls, env, toolchain ): toolchain.update_variant( env, cls.name() ) return env PK-¹ƒF2¨Ô**cuppa/dependencies/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PKàmôFÀ*¾Y Y $cuppa/dependencies/build_with_qt4.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Qt4 #------------------------------------------------------------------------------- import subprocess import shlex # Cuppa Imports import cuppa.location import cuppa.output_processor import cuppa.build_platform from cuppa.colourise import as_info, as_warning import SCons.Script class Qt4Exception(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class build_with_qt4(object): _name = "qt4" @classmethod def add_options( cls, add_option ): pass @classmethod def add_to_env( cls, env, add_dependency ): try: add_dependency( cls._name, cls( env ) ) except Qt4Exception: print as_warning( env, "cuppa: warning: Could not create dependency [{}]. Dependency not available.".format( cls._name ) ) def __init__( self, env ): url = "hg+https://bitbucket.org/dirkbaechle/scons_qt4" try: self._location = cuppa.location.Location( env, url, extra_sub_path = "qt4" ) except cuppa.location.LocationException: print as_warning( env, "cuppa: qt4: warning: Could not retrieve url [{}]".format( url ) ) raise Qt4Exception( "Could not retrieve scons_qt4 from [{}]".format( url ) ) self._version = "4" if cuppa.build_platform.name() in ["Darwin", "Linux"]: if not cuppa.output_processor.command_available( "pkg-config" ): return if 'QT4DIR' not in env: self._set_qt4_dir( env ) self._version = self._get_qt4_version() def _set_qt4_dir( self, env ): command = "pkg-config --cflags QtCore" try: cflags = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() if cflags: flags = env.ParseFlags( cflags ) if 'CPPPATH' in flags: shortest_path = flags['CPPPATH'][0] for include in flags['CPPPATH']: if len(include) < len(shortest_path): shortest_path = include env['QT4DIR'] = shortest_path print "cuppa: qt4: Q4DIR detected as [{}]".format( as_info( env, env['QT4DIR'] ) ) except: #TODO: Warning? pass def _get_qt4_version( self ): command = "pkg-config --modversion QtCore" try: return subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() except: #TODO: Warning? return None def __call__( self, env, toolchain, variant ): SCons.Script.Tool( 'qt4', toolpath=[ self._location.base_local() ] )( env ) if cuppa.build_platform.name() in ["Darwin", "Linux"]: env.MergeFlags("-fPIC") def name( self ): return self._name def version( self ): return self._version def repository( self ): return "N/A" def branch( self ): return "N/A" def revisions( self ): return [] PK´¼êF+Ó Ëù(ù('cuppa/dependencies/build_with_quince.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Quince #------------------------------------------------------------------------------- import os.path import subprocess import shlex # Cuppa Imports import cuppa.location import cuppa.output_processor class QuinceLibraryMethod(object): def __init__( self, location, src_path ): self._location = location self._src_path = src_path def __call__( self, env, linktype ): build_dir = os.path.join( self._location, env['build_dir'] ) final_dir = os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) env.BuildWith( 'boost' ) objects = [] for source in env.RecursiveGlob( "*.cpp", start=self._src_path, exclude_dirs=[ env['build_dir'] ] ): rel_path = os.path.relpath( str(source), self._location ) obj_path = os.path.join( build_dir, os.path.splitext( rel_path )[0] ) +env['OBJSUFFIX'] objects.append( env.Object( obj_path, source ) ) if linktype == "static": return env.BuildStaticLib( "quince", objects, final_dir = final_dir ) else: shared_lib = env.BuildSharedLib( "quince", objects, final_dir = final_dir ) return env.Install( env['abs_final_dir'], shared_lib ) class build_with_quince(object): _name = "quince" @classmethod def add_options( cls, add_option ): location_name = cls._name + "-location" add_option( '--' + location_name, dest=location_name, type='string', nargs=1, action='store', help = cls._name + ' location to build against' ) @classmethod def add_to_env( cls, env, add_dependency ): location = env.get_option( cls._name + "-location" ) if not location: print "No location specified for dependency [{}]. Dependency not available.".format( cls._name.title() ) add_dependency( cls._name, cls( env, location ) ) def __init__( self, env, location ): self._location = cuppa.location.Location( env, location ) self._includes = [ os.path.join( self._location.local(), "include" ) ] self._src_path = os.path.join( self._location.local(), "src" ) env.AddMethod( QuinceLibraryMethod( self._location.local(), self._src_path ), "QuinceLibrary" ) def __call__( self, env, toolchain, variant ): env.AppendUnique( INCPATH = self._includes ) env.AppendUnique( STATICLIBS = [ env.QuinceLibrary( 'static' ), env.BoostStaticLibs( [ 'filesystem', 'system', 'thread', ] ), ] ) def name( self ): return self._name def version( self ): return str(self._location.version()) def repository( self ): return self._location.repository() def branch( self ): return self._location.branch() def revisions( self ): return self._location.revisions() class QuincePostgresqlLibraryMethod(object): def __init__( self, location, src_path ): self._location = location self._src_path = src_path def __call__( self, env, linktype ): build_dir = os.path.join( self._location, env['build_dir'] ) final_dir = os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) env.BuildWith( 'boost' ) objects = [] for source in env.RecursiveGlob( "*.cpp", start=self._src_path, exclude_dirs=[ env['build_dir'] ] ): rel_path = os.path.relpath( str(source), self._location ) obj_path = os.path.join( build_dir, os.path.splitext( rel_path )[0] ) +env['OBJSUFFIX'] objects.append( env.Object( obj_path, source ) ) if linktype == "static": return env.BuildStaticLib( "quince-postgresql", objects, final_dir = final_dir ) else: shared_lib = env.BuildSharedLib( "quince-postgresql", objects, final_dir = final_dir ) return env.Install( env['abs_final_dir'], shared_lib ) class quince_postgresql(object): _name = "quince-postgresql" @classmethod def add_options( cls, add_option ): location_name = cls._name + "-location" add_option( '--' + location_name, dest=location_name, type='string', nargs=1, action='store', help = cls._name + ' location to build against' ) @classmethod def add_to_env( cls, env, add_dependency ): location = env.get_option( cls._name + "-location" ) if not location: print "No location specified for dependency [{}]. Dependency not available.".format( cls._name.title() ) add_dependency( cls._name, cls( env, location ) ) def __init__( self, env, location ): self._location = cuppa.location.Location( env, location ) self._flags = {} self._flags['INCPATH'] = [ os.path.join( self._location.local(), "include" ) ] if cuppa.output_processor.command_available( "pg_config"): command = "pg_config --includedir" libpq_include = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() self._flags['INCPATH'].append( libpq_include ) command = "pg_config --libdir" libpq_libpath = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() self._flags['LIBPATH'] = [ libpq_libpath ] self._flags['DYNAMICLIBS'] = [ 'pq' ] self._src_path = os.path.join( self._location.local(), "src" ) env.AddMethod( QuincePostgresqlLibraryMethod( self._location.local(), self._src_path ), "QuincePostgresqlLibrary" ) def __call__( self, env, toolchain, variant ): env.AppendUnique( INCPATH = self._flags['INCPATH'] ) env.AppendUnique( LIBPATH = self._flags['LIBPATH'] ) env.AppendUnique( DYNAMICLIBS = self._flags['DYNAMICLIBS'] ) quince_postgresql_lib = env.QuincePostgresqlLibrary('static') quince_lib = env.QuinceLibrary('static') env.Append( STATICLIBS = [ quince_postgresql_lib, quince_lib, env.BoostStaticLibs( [ 'date_time' ] ), ] ) def name( self ): return self._name def version( self ): return str(self._location.version()) def repository( self ): return self._location.repository() def branch( self ): return self._location.branch() def revisions( self ): return self._location.revisions() class QuinceSqliteLibraryMethod(object): def __init__( self, location, src_path ): self._location = location self._src_path = src_path def __call__( self, env, linktype ): build_dir = os.path.join( self._location, env['build_dir'] ) final_dir = os.path.normpath( os.path.join( build_dir, env['final_dir'] ) ) env.BuildWith( 'boost' ) objects = [] for source in env.RecursiveGlob( "*.cpp", start=self._src_path, exclude_dirs=[ env['build_dir'] ] ): rel_path = os.path.relpath( str(source), self._location ) obj_path = os.path.join( build_dir, os.path.splitext( rel_path )[0] ) +env['OBJSUFFIX'] objects.append( env.Object( obj_path, source ) ) if linktype == "static": return env.BuildStaticLib( "quince-sqlite", objects, final_dir = final_dir ) else: shared_lib = env.BuildSharedLib( "quince-sqlite", objects, final_dir = final_dir ) return env.Install( env['abs_final_dir'], shared_lib ) class quince_sqlite(object): _name = "quince-sqlite" @classmethod def add_options( cls, add_option ): location_name = cls._name + "-location" add_option( '--' + location_name, dest=location_name, type='string', nargs=1, action='store', help = cls._name + ' location to build against' ) @classmethod def add_to_env( cls, env, add_dependency ): location = env.get_option( cls._name + "-location" ) if not location: print "No location specified for dependency [{}]. Dependency not available.".format( cls._name.title() ) add_dependency( cls._name, cls( env, location ) ) def __init__( self, env, location ): self._location = cuppa.location.Location( env, location ) self._flags = {} if cuppa.output_processor.command_available( "pkg-config"): command = "pkg-config --cflags --libs sqlite3" cflags = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() self._flags = env.ParseFlags( cflags ) if 'CPPPATH' in self._flags: self._flags['SYSINCPATH'] = self._flags['CPPPATH'] del self._flags['CPPPATH'] if 'LIBS' in self._flags: self._flags['DYNAMICLIBS'] = self._flags['LIBS'] del self._flags['LIBS'] if not 'INCPATH' in self._flags: self._flags['INCPATH'] = [] self._flags['INCPATH'].append( os.path.join( self._location.local(), "include" ) ) self._src_path = os.path.join( self._location.local(), "src" ) env.AddMethod( QuinceSqliteLibraryMethod( self._location.local(), self._src_path ), "QuinceSqliteLibrary" ) def __call__( self, env, toolchain, variant ): for name, flags in self._flags.iteritems(): if flags: env.AppendUnique( **{ name: flags } ) quince_sqlite_lib = env.QuinceSqliteLibrary('static') quince_lib = env.QuinceLibrary('static') env.Append( STATICLIBS = [ quince_sqlite_lib, quince_lib, env.BoostStaticLibs( [ 'date_time', 'filesystem', ] ), ] ) def name( self ): return self._name def version( self ): return str(self._location.version()) def repository( self ): return self._location.repository() def branch( self ): return self._location.branch() def revisions( self ): return self._location.revisions() PK\d+)', line ) if match: int_version = int(match.group('version')) major = int_version/100000 minor = int_version/100%1000 patch = int_version%100 full_version = "{}.{}.{}".format( major, minor, patch ) short_version = "{}_{}".format( major, minor ) numeric_version = float(major) + float(minor)/100 return full_version, short_version, numeric_version raise BoostException("Could not determine BoostVersion") def set_home_if_exists( self, path ): if os.path.exists( path ) and os.path.isdir( path ): self.values['home'] = path return True return False def location_from_boost_version( self, location ): if location == "latest" or location == "current": location = determine_latest_boost_verion( self._env ) if location: match = re.match( r'(boost_)?(?P\d[._]\d\d(?P[._]\d)?)', location ) if match: version = match.group('version') if not match.group('minor'): version += "_0" print "cuppa: boost version specified as a location, attempt to download it from SourceForge" extension = ".tar.gz" if cuppa.build_platform.name() == "Windows": extension = ".zip" return "http://sourceforge.net/projects/boost/files/boost/{numeric_version}/boost_{string_version}{extension}/download".format( numeric_version = version.translate( string.maketrans( '._', '..' ) ), string_version = version.translate( string.maketrans( '._', '__' ) ), extension = extension ) return location def patched_boost_test( self, home ): patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" ) return os.path.exists( patch_applied_path ) def apply_patch_if_needed( self, env, home ): patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" ) diff_file = "boost_test_patch.diff" if os.path.exists( patch_applied_path ): print "cuppa: boost: [{}] already applied".format( as_info( env, diff_file ) ) return diff_path = os.path.join( os.path.split( __file__ )[0], "boost", diff_file ) command = "patch --batch -p1 --input={}".format( diff_path ) print "cuppa: boost: info: Applying [{}] using [{}] in [{}]".format( as_info( env, diff_file ), as_info( env, command ), as_info( env, home ) ) if subprocess.call( shlex.split( command ), cwd=home ) != 0: print as_error( env, "cuppa: boost: error: Could not apply [{}]".format( diff_file ) ) with open( patch_applied_path, "w" ) as patch_applied_file: pass def __init__( self, env, platform, base=None, location=None, version=None, patch_test=False ): print "cuppa: boost: identify boost using base = [{}], location = [{}] and version = [{}]".format( as_info( env, str(base) ), as_info( env, str(location) ), as_info( env, str(version) ) ) if not base and not version and not location: raise BoostException("Cannot construct Boost Object. Invalid parameters") self._env = env self.values = {} self.values['name'] = 'boost' extra_sub_path = 'clean' if patch_test: extra_sub_path = 'patched' if location: location = self.location_from_boost_version( location ) if not location: # use version as a fallback in case both at specified location = self.location_from_boost_version( version ) self._location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path ) elif base: # Find boost locally if not os.path.isabs( base ): base = os.path.abspath( base ) if not version: self.values['home'] = base elif version: search_list = [ os.path.join( base, 'boost', version, 'source' ), os.path.join( base, 'boost', 'boost_' + version ), os.path.join( base, 'boost', version ), os.path.join( base, 'boost_' + version ), ] def exists_in( locations ): for location in locations: if self.set_home_if_exists( location ): return True return False if not exists_in( search_list ): raise BoostException("Cannot construct Boost Object. Home for Version [{}] cannot be found. Seached in [{}]".format(version, str([l for l in search_list]))) else: raise BoostException("Cannot construct Boost Object. No Home or Version specified") print "cuppa: boost: using boost found at [{}]".format( as_info( env, self.values['home'] ) ) self._location = cuppa.location.Location( env, self.values['home'], extra_sub_path=extra_sub_path ) else: location = self.location_from_boost_version( version ) self._location = cuppa.location.Location( env, location, extra_sub_path=extra_sub_path ) self.values['home'] = self._location.local() if patch_test: self.apply_patch_if_needed( env, self.values['home'] ) self._patched_test = self.patched_boost_test( self.values['home'] ) self.values['full_version'], self.values['version'], self.values['numeric_version'] = self.get_boost_version( self.values['home'] ) self.values['revisions'] = self._location.revisions() self.values['include'] = [ self.values['home'] ] self.values['lib_base'] = os.path.join( self.values['home'], 'build' ) self.values['location'] = self.values['home'] if self.values['numeric_version'] > 1.39: self.values['library_mt_tag'] = '' else: self.values['library_mt_tag'] = '-' + platform['toolchain_tag'] + '-mt' self.values['defines'] = [ 'BOOST_PARAMETER_MAX_ARITY=20', 'BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG' ] def name( self ): return self.values['name'] def version( self ): return self.values['version'] def repository( self ): return self._location.repository() def branch( self ): return self._location.branch() def revisions( self ): return self._location.revisions() def local( self ): return self._location.local() def __call__( self, env, toolchain, variant ): env.AppendUnique( SYSINCPATH = self.values['include'] ) env.AppendUnique( CPPDEFINES = self.values['defines'] ) def numeric_version( self ): return self.values['numeric_version'] def full_version( self ): return self.values['full_version'] def lib( self, library ): return 'boost_' + library + self.values['library_mt_tag'] class BoostStaticLibraryMethod(object): def __init__( self, add_dependents=False, build_always=False, verbose_build=False, verbose_config=False ): self._add_dependents = add_dependents self._build_always = build_always self._verbose_build = verbose_build self._verbose_config = verbose_config def __call__( self, env, libraries ): if not self._add_dependents: print as_warning( env, "cuppa: boost: warning: BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" ) libraries = Flatten( [ libraries ] ) if not 'boost' in env['BUILD_WITH']: env.BuildWith( 'boost' ) Boost = env['dependencies']['boost'] library = BoostLibraryBuilder( Boost, add_dependents = self._add_dependents, verbose_build = self._verbose_build, verbose_config = self._verbose_config )( env, None, None, libraries, 'static' ) if self._build_always: return AlwaysBuild( library ) else: return library class BoostSharedLibraryMethod(object): def __init__( self, add_dependents=False, build_always=False, verbose_build=False, verbose_config=False ): self._add_dependents = add_dependents self._build_always = build_always self._verbose_build = verbose_build self._verbose_config = verbose_config def __call__( self, env, libraries ): if not self._add_dependents: print as_warning( env, "cuppa: boost: warning: BoostSharedLibrary() is deprecated, use BoostSharedLibs() or BoostSharedLib() instead" ) libraries = Flatten( [ libraries ] ) if not 'boost' in env['BUILD_WITH']: env.BuildWith( 'boost' ) Boost = env['dependencies']['boost'] for library in libraries: if library.startswith('log'): env.AppendUnique( CPPDEFINES = 'BOOST_LOG_DYN_LINK' ) elif library == 'chrono': env.AppendUnique( CPPDEFINES = 'BOOST_CHRONO_DYN_LINK' ) elif library == 'filesystem': env.AppendUnique( CPPDEFINES = 'BOOST_FILESYSTEM_DYN_LINK' ) elif library == 'date_time': env.AppendUnique( CPPDEFINES = 'BOOST_DATE_TIME_DYN_LINK' ) elif library == 'system': env.AppendUnique( CPPDEFINES = 'BOOST_SYSTEM_DYN_LINK' ) library = BoostLibraryBuilder( Boost, add_dependents = self._add_dependents, verbose_build = self._verbose_build, verbose_config = self._verbose_config )( env, None, None, libraries, 'shared' ) if self._build_always: return AlwaysBuild( library ) else: return library class ProcessBjamBuild(object): def __call__( self, line ): match = re.search( r'\[COMPILE\] ([\S]+)', line ) if match: self.bjam_exe_path = match.expand( r'\1' ) return line def exe_path( self ): return self.bjam_exe_path class BuildBjam(object): def __init__( self, boost ): self._location = boost.local() self._version = boost.numeric_version() def __call__( self, target, source, env ): build_script_path = self._location + '/tools/build' if self._version < 1.47: build_script_path += '/src/v2/engine' elif self._version > 1.55: build_script_path += '/src/engine' else: build_script_path += '/v2/engine' ## TODO: change build script depending on platform bjam_build_script = './build.sh' print 'Execute ' + bjam_build_script + ' from ' + build_script_path process_bjam_build = ProcessBjamBuild() try: IncrementalSubProcess.Popen( process_bjam_build, [ bjam_build_script ], cwd=build_script_path ) bjam_exe_path = process_bjam_build.exe_path() if not bjam_exe_path: print "Could not determine bjam exe path" return 1 bjam_binary_path = build_script_path + '/' + bjam_exe_path shutil.copy( bjam_binary_path, target[0].path ) except OSError as error: print 'Error building bjam [' + str( error.args ) + ']' return 1 return None def toolset_name_from_toolchain( toolchain ): toolset_name = toolchain.family() if cuppa.build_platform.name() == "Darwin": if toolset_name == "gcc": toolset_name = "darwin" elif toolset_name == "clang": toolset_name = "clang-darwin" return toolset_name def toolset_from_toolchain( toolchain ): toolset_name = toolset_name_from_toolchain( toolchain ) if toolset_name == "clang-darwin": return toolset_name return toolset_name + '-' + toolchain.cxx_version() def build_with_library_name( library ): return library == 'log_setup' and 'log' or library def variant_name( variant ): if variant == 'dbg': return 'debug' else: return 'release' def directory_from_abi_flag( abi_flag ): flag, value = abi_flag.split('=') if value: return value return abi_flag def stage_directory( toolchain, variant, abi_flag ): build_base = "build" abi_dir = directory_from_abi_flag( abi_flag ) if abi_dir: build_base += "." + abi_dir return os.path.join( build_base, toolchain.name(), variant ) def boost_dependency_order(): return [ 'graph', 'regex', 'coroutine', 'context', 'log_setup', 'log', 'date_time', 'filesystem', 'test', 'timer', 'chrono', 'system', 'thread' ] def boost_dependency_set(): return set( boost_dependency_order() ) def boost_libraries_with_no_dependencies(): return set( [ 'context', 'date_time', 'exception', 'graph_parallel', 'iostreams', 'locale', 'math', 'mpi', 'program_options', 'python', 'random', 'regex', 'serialization', 'signals', 'system', 'thread', 'wave' ] ) def add_dependent_libraries( boost, linktype, libraries ): version = boost.numeric_version() patched_test = boost._patched_test required_libraries = set( libraries ) for library in libraries: if library in boost_libraries_with_no_dependencies(): continue elif library == 'chrono': required_libraries.update( ['system'] ) elif library == 'coroutine': required_libraries.update( ['context', 'system'] ) if version > 1.55: required_libraries.update( ['thread'] ) if linktype == 'shared': required_libraries.update( ['chrono'] ) elif library == 'filesystem': required_libraries.update( ['system'] ) elif library == 'graph': required_libraries.update( ['regex'] ) elif library == 'log': required_libraries.update( ['date_time', 'filesystem', 'system', 'thread'] ) elif library == 'log_setup': required_libraries.update( ['log', 'date_time', 'filesystem', 'system', 'thread'] ) elif library == 'test' and patched_test: required_libraries.update( ['timer, chrono'] ) elif library == 'timer': required_libraries.update( ['chrono'] ) libraries = [] for library in boost_dependency_order(): if library in required_libraries: libraries.append( library ) for library in required_libraries: if library not in boost_dependency_set(): libraries.append( library ) return libraries def lazy_update_library_list( env, emitting, libraries, built_libraries, add_dependents, linktype, boost ): if add_dependents: if not emitting: libraries = set( build_with_library_name(l) for l in add_dependent_libraries( boost, linktype, libraries ) ) else: libraries = add_dependent_libraries( boost, linktype, libraries ) # Use the sconscript_file + build_dir to identify this instance of the environment variant_instance = env['sconscript_file'] + env['build_dir'] if not variant_instance in built_libraries: built_libraries[ variant_instance ] = set( libraries ) else: libraries = [ l for l in libraries if l not in built_libraries[ variant_instance ] ] return libraries class BoostLibraryAction(object): _built_libraries = {} def __init__( self, env, libraries, add_dependents, linktype, boost, verbose_build, verbose_config ): self._env = env self._libraries = lazy_update_library_list( env, False, libraries, self._built_libraries, add_dependents, linktype, boost ) self._location = boost.local() self._version = boost.numeric_version() self._full_version = boost.full_version() self._verbose_build = verbose_build self._verbose_config = verbose_config self._linktype = linktype self._variant = variant_name( self._env['variant'].name() ) self._toolchain = env['toolchain'] self._job_count = env['job_count'] self._parallel = env['parallel'] def _toolset_name_from_toolchain( self, toolchain ): toolset_name = toolchain.family() if cuppa.build_platform.name() == "Darwin": if toolset_name == "gcc": toolset_name = "darwin" elif toolset_name == "clang": toolset_name = "clang-darwin" return toolset_name def _toolset_from_toolchain( self, toolchain ): toolset_name = toolset_name_from_toolchain( toolchain ) if toolset_name == "clang-darwin": return toolset_name return toolset_name + '-' + toolchain.version() def _build_command( self, env, toolchain, libraries, variant, linktype, stage_dir ): verbose = "" if self._verbose_build: verbose += " -d+2" if self._verbose_config: verbose += " --debug-configuration" jobs = "1" if self._job_count >= 2 and self._parallel: if len(libraries)>4: jobs = str( self._job_count - 1 ) else: jobs = str( self._job_count/4 + 1 ) with_libraries = "" for library in libraries: with_libraries += " --with-" + library build_flags = "" abi_flag = toolchain.abi_flag(env) if abi_flag: build_flags = 'cxxflags="' + abi_flag + '"' build_flags += ' define="BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG"' if linktype == 'shared': build_flags += ' define="BOOST_ALL_DYN_LINK"' build_dir = "bin." + directory_from_abi_flag( abi_flag ) command_line = "./bjam{verbose} -j {jobs}{with_libraries} toolset={toolset} variant={variant} {build_flags} link={linktype} --build-dir=./{build_dir} stage --stagedir=./{stage_dir}".format( verbose = verbose, jobs = jobs, with_libraries = with_libraries, toolset = toolset_from_toolchain( toolchain ), variant = variant, build_flags = build_flags, linktype = linktype, build_dir = build_dir, stage_dir = stage_dir ) print command_line return shlex.split( command_line ) def __call__( self, target, source, env ): if not self._libraries: return None stage_dir = stage_directory( self._toolchain, self._variant, self._toolchain.abi_flag(env) ) args = self._build_command( env, self._toolchain, self._libraries, self._variant, self._linktype, stage_dir ) processor = BjamOutputProcessor( env, self._verbose_build, self._verbose_config, self._toolset_name_from_toolchain( self._toolchain ) ) returncode = IncrementalSubProcess.Popen( processor, args, cwd=self._location ) summary = processor.summary( returncode ) if summary: print summary if returncode: return returncode return None class BjamOutputProcessor(object): def __init__( self, env, verbose_build, verbose_config, toolset_name ): self._verbose_build = verbose_build self._verbose_config = verbose_config self._toolset_filter = toolset_name + '.' self._colouriser = env['colouriser'] self._minimal_output = not self._verbose_build ignore_duplicates = not self._verbose_build self._toolchain_processor = ToolchainProcessor( self._colouriser, env['toolchain'], self._minimal_output, ignore_duplicates ) def __call__( self, line ): if line.startswith( self._toolset_filter ): return line elif not self._verbose_config: if( line.startswith( "Performing configuration" ) or line.startswith( "Component configuration" ) or line.startswith( " - " ) ): return None return self._toolchain_processor( line ) def summary( self, returncode ): summary = self._toolchain_processor.summary( returncode ) if returncode and not self._verbose_build: summary += "\nTry running with {} for more details".format( self._colouriser.emphasise( '--boost-verbose-build' ) ) return summary def static_library_name( env, library ): return env.subst('$LIBPREFIX') + 'boost_' + library + env.subst('$LIBSUFFIX') def shared_library_name( env, library, full_version ): if cuppa.build_platform.name() == "Darwin": return env.subst('$SHLIBPREFIX') + 'boost_' + library + env.subst('$SHLIBSUFFIX') else: return env.subst('$SHLIBPREFIX') + 'boost_' + library + env.subst('$SHLIBSUFFIX') + '.' + full_version class BoostLibraryEmitter(object): _built_libraries = {} def __init__( self, env, libraries, add_dependents, linktype, boost ): self._env = env self._libraries = lazy_update_library_list( env, True, libraries, self._built_libraries, add_dependents, linktype, boost ) self._location = boost.local() self._linktype = linktype self._version = boost.numeric_version() self._full_version = boost.full_version() self._variant = variant_name( self._env['variant'].name() ) self._toolchain = env['toolchain'] def __call__( self, target, source, env ): stage_dir = stage_directory( self._toolchain, self._variant, self._toolchain.abi_flag(env) ) for library in self._libraries: filename = None if self._linktype == 'static': filename = static_library_name( env, library ) else: filename = shared_library_name( env, library, self._full_version ) built_library_path = os.path.join( self._location, stage_dir, 'lib', filename ) node = File( built_library_path ) target.append( node ) return target, source class WriteToolsetConfigJam(object): def _update_project_config_jam( self, project_config_path, current_toolset, toolset_config_line ): config_added = False changed = False temp_path = os.path.splitext( project_config_path )[0] + ".new_jam" if not os.path.exists( project_config_path ): with open( project_config_path, 'w' ) as project_config_jam: project_config_jam.write( "# File created by cuppa:boost\n" ) with open( project_config_path ) as project_config_jam: with open( temp_path, 'w' ) as temp_file: for line in project_config_jam.readlines(): if line.startswith( current_toolset ): if line != toolset_config_line: temp_file.write( toolset_config_line ) changed = True config_added = True else: temp_file.write( line ) if not config_added: temp_file.write( toolset_config_line ) changed = True if changed: os.remove( project_config_path ) shutil.move( temp_path, project_config_path ) else: os.remove( temp_path ) def __call__( self, target, source, env ): path = str(target[0]) if not os.path.exists( path ): toolchain = env['toolchain'] current_toolset = "using {} : {} :".format( toolset_name_from_toolchain( toolchain ), toolchain.cxx_version() ) toolset_config_line = "{} {} ;\n".format( current_toolset, toolchain.binary() ) with open( path, 'w' ) as toolchain_config: print "cuppa: boost: adding toolset config [{}] to dummy toolset config".format( str(toolset_config_line.strip()) ) toolchain_config.write( toolset_config_line ) self._update_project_config_jam( os.path.join( os.path.split( path )[0], "project-config.jam" ), current_toolset, toolset_config_line ) return None class BoostLibraryBuilder(object): _library_targets = {} def __init__( self, boost, add_dependents, verbose_build, verbose_config ): self._boost = boost self._add_dependents = add_dependents self._verbose_build = verbose_build self._verbose_config = verbose_config def __call__( self, env, target, source, libraries, linktype ): library_action = BoostLibraryAction ( env, libraries, self._add_dependents, linktype, self._boost, self._verbose_build, self._verbose_config ) library_emitter = BoostLibraryEmitter( env, libraries, self._add_dependents, linktype, self._boost ) env.AppendUnique( BUILDERS = { 'BoostLibraryBuilder' : env.Builder( action=library_action, emitter=library_emitter ) } ) bjam_target = os.path.join( self._boost.local(), 'bjam' ) bjam = env.Command( bjam_target, [], BuildBjam( self._boost ) ) env.NoClean( bjam ) built_libraries = env.BoostLibraryBuilder( target, source ) prefix = env.subst('$LIBPREFIX') if linktype == 'shared': prefix = env.subst('$SHLIBPREFIX') built_library_map = {} for library in built_libraries: name = os.path.splitext( os.path.split( str(library) )[1] )[0].replace( prefix + "boost_", "" ) built_library_map[name] = library variant_instance = env['sconscript_file'] + env['build_dir'] if not variant_instance in self._library_targets: self._library_targets[ variant_instance ] = {} required_libraries = add_dependent_libraries( self._boost, linktype, libraries ) for library in required_libraries: if library in self._library_targets[ variant_instance ]: if library not in built_library_map: env.Depends( built_libraries, self._library_targets[ variant_instance ][library] ) else: self._library_targets[ variant_instance ][library] = built_library_map[library] installed_libraries = [] if not built_libraries: return installed_libraries env.Requires( built_libraries, bjam ) if cuppa.build_platform.name() == "Linux": toolset_target = os.path.join( self._boost.local(), env['toolchain'].name() + "._jam" ) toolset_config_jam = env.Command( toolset_target, [], WriteToolsetConfigJam() ) project_config_target = os.path.join( self._boost.local(), "project-config.jam" ) if not os.path.exists( project_config_target ): project_config_jam = env.Requires( project_config_target, env.AlwaysBuild( toolset_config_jam ) ) env.Requires( built_libraries, project_config_jam ) env.Requires( built_libraries, toolset_config_jam ) install_dir = env['abs_build_dir'] library_path = os.path.split( str(built_libraries[0]) )[1] if linktype == 'shared': install_dir = os.path.split( os.path.join( env['abs_final_dir'], library_path ) )[0] for library in required_libraries: installed_libraries.append( env.Install( install_dir, self._library_targets[ variant_instance ][library] ) ) return installed_libraries PKàmôFhµð[ [ $cuppa/dependencies/build_with_qt5.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Qt5 #------------------------------------------------------------------------------- import subprocess import shlex # Cuppa Imports import cuppa.location import cuppa.output_processor import cuppa.build_platform from cuppa.colourise import as_info, as_warning import SCons.Script class Qt5Exception(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class build_with_qt5(object): _name = "qt5" @classmethod def add_options( cls, add_option ): pass @classmethod def add_to_env( cls, env, add_dependency ): try: add_dependency( cls._name, cls( env ) ) except Qt5Exception: print as_warning( env, "cuppa: warning: Could not create dependency [{}]. Dependency not available.".format( cls._name ) ) def __init__( self, env ): url = "hg+https://bitbucket.org/dirkbaechle/scons_qt5" try: self._location = cuppa.location.Location( env, url, extra_sub_path = "qt5" ) except cuppa.location.LocationException: print as_warning( env, "cuppa: qt5: warning: Could not retrieve url [{}]".format( url ) ) raise Qt5Exception( "Could not retrieve scons_qt5 from [{}]".format( url ) ) self._version = "5" if cuppa.build_platform.name() in ["Darwin", "Linux"]: if not cuppa.output_processor.command_available( "pkg-config" ): return if 'QT5DIR' not in env: self._set_qt5_dir( env ) self._version = self._get_qt5_version() def _set_qt5_dir( self, env ): command = "pkg-config --cflags Qt5Core" try: cflags = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() if cflags: flags = env.ParseFlags( cflags ) if 'CPPPATH' in flags: shortest_path = flags['CPPPATH'][0] for include in flags['CPPPATH']: if len(include) < len(shortest_path): shortest_path = include env['QT5DIR'] = shortest_path print "cuppa: qt5: Q5DIR detected as [{}]".format( as_info( env, env['QT5DIR'] ) ) except: #TODO: Warning? pass def _get_qt5_version( self ): command = "pkg-config --modversion Qt5Core" try: return subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ).strip() except: #TODO: Warning? return None def __call__( self, env, toolchain, variant ): SCons.Script.Tool( 'qt5', toolpath=[ self._location.base_local() ] )( env ) if cuppa.build_platform.name() in ["Darwin", "Linux"]: env.MergeFlags("-fPIC") def name( self ): return self._name def version( self ): return self._version def repository( self ): return "N/A" def branch( self ): return "N/A" def revisions( self ): return [] PK•c×FC߈4$cuppa/dependencies/boost/__init__.py PKÖ“üFÎHe33.cuppa/dependencies/boost/boost_test_patch.diffdiff -urN a/boost/test/impl/compiler_log_formatter.ipp b/boost/test/impl/compiler_log_formatter.ipp --- a/boost/test/impl/compiler_log_formatter.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/compiler_log_formatter.ipp 2015-06-21 20:57:34.113753873 +0100 @@ -19,8 +19,10 @@ #include #include #include +#include #include #include +#include // Boost #include @@ -52,6 +54,22 @@ : BOOST_TEST_L( "Test setup" ); } +//____________________________________________________________________________// + +void +print_result( std::ostream& ostr, counter_t v, counter_t total, + const_string name, const_string res ) +{ + if( v > 0 ) { + ostr << v << ' ' << name << ( v != 1 ? "s" : "" ); + if( total > 0 ) + ostr << " out of " << total; + + ostr << ' ' << res << ". "; + } +} + + } // local namespace //____________________________________________________________________________// @@ -90,25 +108,57 @@ void compiler_log_formatter::test_unit_start( std::ostream& output, test_unit const& tu ) { + print_prefix( output, tu.p_file_name, tu.p_line_num ); output << "Entering test " << tu.p_type_name << " \"" << tu.p_name << "\"" << std::endl; } //____________________________________________________________________________// void -compiler_log_formatter::test_unit_finish( std::ostream& output, test_unit const& tu, unsigned long elapsed ) +compiler_log_formatter::test_unit_finish( std::ostream& output, test_unit const& tu, elapsed_t elapsed ) { output << "Leaving test " << tu.p_type_name << " \"" << tu.p_name << "\""; - if( elapsed > 0 ) { + if( has_time( elapsed ) ) { output << "; testing time: "; - if( elapsed % 1000 == 0 ) - output << elapsed/1000 << "ms"; - else - output << elapsed << "mks"; + output << to_string( elapsed ); + } + + output << ". "; + + test_results const& tr = results_collector.results( tu.p_id ); + + const_string status; + + if( tr.passed() ) + status = "passed"; + else if( tr.p_skipped ) + status = "skipped"; + else if( tr.p_aborted ) + status = "aborted"; + else + status = "failed"; + + output << "Test " << ( tu.p_type == tut_case ? "case" : "suite" ) << ' ' << status << ". "; + + if( tr.p_skipped ) { + output << "due to " << ( tu.check_dependencies() ? "test aborting\n" : "failed dependency\n" ); + return; } - output << std::endl; + counter_t total_assertions = tr.p_assertions_passed + tr.p_assertions_failed; + counter_t total_tc = tr.p_test_cases_passed + tr.p_test_cases_failed + tr.p_test_cases_skipped; + + if( total_assertions > 0 || total_tc > 0 ) { + print_result( output, tr.p_assertions_passed, total_assertions, "assertion", "passed" ); + print_result( output, tr.p_assertions_failed, total_assertions, "assertion", "failed" ); + print_result( output, tr.p_expected_failures, 0 , "failure" , "expected" ); + print_result( output, tr.p_test_cases_passed, total_tc , "test case", "passed" ); + print_result( output, tr.p_test_cases_failed, total_tc , "test case", "failed" ); + print_result( output, tr.p_test_cases_skipped, total_tc , "test case", "skipped" ); + print_result( output, tr.p_test_cases_aborted, total_tc , "test case", "aborted" ); + } + output << "\n"; } //____________________________________________________________________________// diff -urN a/boost/test/impl/framework.ipp b/boost/test/impl/framework.ipp --- a/boost/test/impl/framework.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/framework.ipp 2015-06-21 20:58:16.588222869 +0100 @@ -32,9 +32,10 @@ #include #include +#include // Boost -#include +// none // STL #include @@ -135,6 +136,16 @@ } } + void reset() + { + clear(); + m_master_test_suite = 0; + m_curr_test_case = INV_TEST_UNIT_ID; + m_next_test_case_id = MIN_TEST_CASE_ID; + m_next_test_suite_id = MIN_TEST_SUITE_ID; + m_test_in_progress = false; + } + void set_tu_id( test_unit& tu, test_unit_id id ) { tu.p_id.value = id; } // test_tree_visitor interface implementation @@ -150,12 +161,13 @@ BOOST_TEST_FOREACH( test_observer*, to, m_observers ) to->test_unit_start( tc ); - boost::timer tc_timer; + timer_t tc_timer; test_unit_id bkup = m_curr_test_case; m_curr_test_case = tc.p_id; unit_test_monitor_t::error_level run_result = unit_test_monitor.execute_and_translate( tc ); - unsigned long elapsed = static_cast( tc_timer.elapsed() * 1e6 ); + tc_timer.stop(); + elapsed_t elapsed = tc_timer.elapsed(); if( unit_test_monitor.is_critical_error( run_result ) ) { BOOST_TEST_FOREACH( test_observer*, to, m_observers ) @@ -189,7 +201,7 @@ void test_suite_finish( test_suite const& ts ) { BOOST_TEST_FOREACH( test_observer*, to, m_observers ) - to->test_unit_finish( ts, 0 ); + to->test_unit_finish( ts, elapsed_t() ); } ////////////////////////////////////////////////////////////////// @@ -233,8 +245,20 @@ namespace framework { void +reset() +{ + reset_observers(); + s_frk_impl().reset(); +} + +void init( init_unit_test_func init_func, int argc, char* argv[] ) { + if( s_frk_impl().m_is_initialized ) + { + reset(); + } + runtime_config::init( argc, argv ); // set the log level and format diff -urN a/boost/test/impl/progress_monitor.ipp b/boost/test/impl/progress_monitor.ipp --- a/boost/test/impl/progress_monitor.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/progress_monitor.ipp 2015-06-21 20:58:35.818982452 +0100 @@ -20,9 +20,10 @@ #include #include +#include +#include // Boost -#include #include #include @@ -72,7 +73,7 @@ //____________________________________________________________________________// void -progress_monitor_t::test_unit_finish( test_unit const& tu, unsigned long ) +progress_monitor_t::test_unit_finish( test_unit const& tu, elapsed_t ) { if( tu.p_type == tut_case ) ++(*s_pm_impl().m_progress_display); diff -urN a/boost/test/impl/results_collector.ipp b/boost/test/impl/results_collector.ipp --- a/boost/test/impl/results_collector.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/results_collector.ipp 2015-06-21 20:58:49.850807030 +0100 @@ -20,6 +20,7 @@ #include #include #include +#include // Boost #include @@ -201,7 +202,7 @@ //____________________________________________________________________________// void -results_collector_t::test_unit_finish( test_unit const& tu, unsigned long ) +results_collector_t::test_unit_finish( test_unit const& tu, elapsed_t ) { if( tu.p_type == tut_suite ) { results_collect_helper ch( s_rc_impl().m_results_store[tu.p_id], tu ); diff -urN a/boost/test/impl/unit_test_log.ipp b/boost/test/impl/unit_test_log.ipp --- a/boost/test/impl/unit_test_log.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/unit_test_log.ipp 2015-06-21 21:06:43.252888689 +0100 @@ -28,6 +28,8 @@ #include #include +#include + // Boost #include #include @@ -178,7 +180,7 @@ //____________________________________________________________________________// void -unit_test_log_t::test_unit_finish( test_unit const& tu, unsigned long elapsed ) +unit_test_log_t::test_unit_finish( test_unit const& tu, elapsed_t elapsed ) { if( s_log_impl().m_threshold_level > log_test_units ) return; diff -urN a/boost/test/impl/unit_test_parameters.ipp b/boost/test/impl/unit_test_parameters.ipp --- a/boost/test/impl/unit_test_parameters.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/unit_test_parameters.ipp 2015-06-21 21:10:23.607133882 +0100 @@ -154,6 +154,27 @@ namespace { // framework parameters and corresponding command-line arguments +#ifdef BOOST_TEST_USE_QUALIFIED_COMMANDLINE_ARGUMENTS +std::string AUTO_START_DBG = "boost.test.auto_start_dbg"; +std::string BREAK_EXEC_PATH = "boost.test.break_exec_path"; +std::string BUILD_INFO = "boost.test.build_info"; +std::string CATCH_SYS_ERRORS = "boost.test.catch_system_errors"; +std::string DETECT_FP_EXCEPT = "boost.test.detect_fp_exceptions"; +std::string DETECT_MEM_LEAKS = "boost.test.detect_memory_leaks"; +std::string LOG_FORMAT = "boost.test.log_format"; +std::string LOG_LEVEL = "boost.test.log_level"; +std::string LOG_SINK = "boost.test.log_sink"; +std::string OUTPUT_FORMAT = "boost.test.output_format"; +std::string RANDOM_SEED = "boost.test.random"; +std::string REPORT_FORMAT = "boost.test.report_format"; +std::string REPORT_LEVEL = "boost.test.report_level"; +std::string REPORT_SINK = "boost.test.report_sink"; +std::string RESULT_CODE = "boost.test.result_code"; +std::string TESTS_TO_RUN = "boost.test.run_test"; +std::string SAVE_TEST_PATTERN = "boost.test.save_pattern"; +std::string SHOW_PROGRESS = "boost.test.show_progress"; +std::string USE_ALT_STACK = "boost.test.use_alt_stack"; +#else std::string AUTO_START_DBG = "auto_start_dbg"; std::string BREAK_EXEC_PATH = "break_exec_path"; std::string BUILD_INFO = "build_info"; @@ -173,6 +194,7 @@ std::string SAVE_TEST_PATTERN = "save_pattern"; std::string SHOW_PROGRESS = "show_progress"; std::string USE_ALT_STACK = "use_alt_stack"; +#endif fixed_mapping parameter_2_env_var( AUTO_START_DBG , "BOOST_TEST_AUTO_START_DBG", @@ -247,6 +269,8 @@ { using namespace cla; + s_cla_parser.reset(); + try { s_cla_parser - cla::ignore_mismatch << cla::dual_name_parameter( AUTO_START_DBG + "|d" ) diff -urN a/boost/test/impl/unit_test_suite.ipp b/boost/test/impl/unit_test_suite.ipp --- a/boost/test/impl/unit_test_suite.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/unit_test_suite.ipp 2015-06-21 22:15:42.079146241 +0100 @@ -23,9 +23,10 @@ #include #include #include +#include // Boost -#include +// none // STL #include @@ -49,9 +50,11 @@ // ************** test_unit ************** // // ************************************************************************** // -test_unit::test_unit( const_string name, test_unit_type t ) +test_unit::test_unit( const_string name, test_unit_type t, const_string fn, std::size_t ln ) : p_type( t ) , p_type_name( t == tut_case ? "case" : "suite" ) +, p_file_name( fn ) +, p_line_num( ln ) , p_id( INV_TEST_UNIT_ID ) , p_name( std::string( name.begin(), name.size() ) ) , p_enabled( true ) @@ -103,8 +106,8 @@ // ************** test_case ************** // // ************************************************************************** // -test_case::test_case( const_string name, callback0<> const& test_func ) -: test_unit( name, static_cast(type) ) +test_case::test_case( const_string name, callback0<> const& test_func, const_string fn, std::size_t ln ) +: test_unit( name, static_cast(type), fn, ln ) , m_test_func( test_func ) { // !! weirdest MSVC BUG; try to remove this statement; looks like it eats first token of next statement @@ -122,8 +125,8 @@ //____________________________________________________________________________// -test_suite::test_suite( const_string name ) -: test_unit( name, static_cast(type) ) +test_suite::test_suite( const_string name, const_string fn, std::size_t ln ) +: test_unit( name, static_cast(type), fn, ln ) { framework::register_test_unit( this ); } @@ -276,7 +279,7 @@ //____________________________________________________________________________// -auto_test_unit_registrar::auto_test_unit_registrar( const_string ts_name ) +auto_test_unit_registrar::auto_test_unit_registrar( const_string ts_name, const_string fn, std::size_t ln ) { test_unit_id id = curr_ts_store().back()->get( ts_name ); @@ -287,7 +290,7 @@ BOOST_ASSERT( ts->p_parent_id == curr_ts_store().back()->p_id ); } else { - ts = new test_suite( ts_name ); + ts = new test_suite( ts_name, fn, ln ); curr_ts_store().back()->add( ts ); } diff -urN a/boost/test/impl/xml_log_formatter.ipp b/boost/test/impl/xml_log_formatter.ipp --- a/boost/test/impl/xml_log_formatter.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/impl/xml_log_formatter.ipp 2015-06-21 21:07:13.416511592 +0100 @@ -23,6 +23,8 @@ #include +#include + // Boost #include @@ -82,17 +84,24 @@ void xml_log_formatter::test_unit_start( std::ostream& ostr, test_unit const& tu ) { - ostr << "<" << tu_type_name( tu ) << " name" << attr_value() << tu.p_name.get() << ">"; + ostr << "<" << tu_type_name( tu ) << " name" << attr_value() << tu.p_name.get(); + + if( !tu.p_file_name.get().empty() ) + { + ostr << BOOST_TEST_L( " file" ) << attr_value() << tu.p_file_name + << BOOST_TEST_L( " line" ) << attr_value() << tu.p_line_num; + } + ostr << ">"; } //____________________________________________________________________________// void -xml_log_formatter::test_unit_finish( std::ostream& ostr, test_unit const& tu, unsigned long elapsed ) +xml_log_formatter::test_unit_finish( std::ostream& ostr, test_unit const& tu, elapsed_t elapsed ) { if( tu.p_type == tut_case ) - ostr << "" << elapsed << ""; - + ostr << to_xml( elapsed ); + ostr << ""; } diff -urN a/boost/test/output/compiler_log_formatter.hpp b/boost/test/output/compiler_log_formatter.hpp --- a/boost/test/output/compiler_log_formatter.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/output/compiler_log_formatter.hpp 2015-06-21 21:04:44.776369850 +0100 @@ -21,6 +21,8 @@ #include +#include + //____________________________________________________________________________// namespace boost { @@ -41,7 +43,7 @@ void log_build_info( std::ostream& ); void test_unit_start( std::ostream&, test_unit const& tu ); - void test_unit_finish( std::ostream&, test_unit const& tu, unsigned long elapsed ); + void test_unit_finish( std::ostream&, test_unit const& tu, elapsed_t elapsed ); void test_unit_skipped( std::ostream&, test_unit const& tu ); void log_exception( std::ostream&, log_checkpoint_data const&, execution_exception const& ex ); diff -urN a/boost/test/output/xml_log_formatter.hpp b/boost/test/output/xml_log_formatter.hpp --- a/boost/test/output/xml_log_formatter.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/output/xml_log_formatter.hpp 2015-06-21 21:04:54.543247747 +0100 @@ -19,6 +19,8 @@ #include #include +#include + // STL #include // std::size_t @@ -44,7 +46,7 @@ void log_build_info( std::ostream& ); void test_unit_start( std::ostream&, test_unit const& tu ); - void test_unit_finish( std::ostream&, test_unit const& tu, unsigned long elapsed ); + void test_unit_finish( std::ostream&, test_unit const& tu, elapsed_t elapsed ); void test_unit_skipped( std::ostream&, test_unit const& tu ); void log_exception( std::ostream&, log_checkpoint_data const&, execution_exception const& ex ); diff -urN a/boost/test/progress_monitor.hpp b/boost/test/progress_monitor.hpp --- a/boost/test/progress_monitor.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/progress_monitor.hpp 2015-06-21 21:02:33.650009155 +0100 @@ -18,6 +18,7 @@ // Boost.Test #include #include +#include // STL #include // for std::ostream& @@ -42,7 +43,7 @@ void test_aborted(); void test_unit_start( test_unit const& ) {} - void test_unit_finish( test_unit const&, unsigned long ); + void test_unit_finish( test_unit const&, elapsed_t ); void test_unit_skipped( test_unit const& ); void test_unit_aborted( test_unit const& ) {} diff -urN a/boost/test/results_collector.hpp b/boost/test/results_collector.hpp --- a/boost/test/results_collector.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/results_collector.hpp 2015-06-21 21:02:52.590772362 +0100 @@ -25,6 +25,8 @@ #include #include +#include + #include //____________________________________________________________________________// @@ -84,7 +86,7 @@ void test_aborted(); void test_unit_start( test_unit const& ); - void test_unit_finish( test_unit const&, unsigned long elapsed ); + void test_unit_finish( test_unit const&, elapsed_t elapsed ); void test_unit_skipped( test_unit const& ); void test_unit_aborted( test_unit const& ); diff -urN a/boost/test/test_observer.hpp b/boost/test/test_observer.hpp --- a/boost/test/test_observer.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/test_observer.hpp 2015-06-21 20:59:39.020192328 +0100 @@ -22,6 +22,8 @@ #include +#include + //____________________________________________________________________________// namespace boost { @@ -40,7 +42,7 @@ virtual void test_aborted() {} virtual void test_unit_start( test_unit const& ) {} - virtual void test_unit_finish( test_unit const&, unsigned long /* elapsed */ ) {} + virtual void test_unit_finish( test_unit const&, elapsed_t /* elapsed */ ) {} virtual void test_unit_skipped( test_unit const& ) {} virtual void test_unit_aborted( test_unit const& ) {} diff -urN a/boost/test/unit_test_log_formatter.hpp b/boost/test/unit_test_log_formatter.hpp --- a/boost/test/unit_test_log_formatter.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/unit_test_log_formatter.hpp 2015-06-21 21:01:49.045566787 +0100 @@ -22,6 +22,8 @@ #include +#include + // STL #include #include // for std::string @@ -95,7 +97,7 @@ virtual void log_build_info( std::ostream& ) = 0; virtual void test_unit_start( std::ostream&, test_unit const& tu ) = 0; - virtual void test_unit_finish( std::ostream&, test_unit const& tu, unsigned long elapsed ) = 0; + virtual void test_unit_finish( std::ostream&, test_unit const& tu, elapsed_t elapsed ) = 0; virtual void test_unit_skipped( std::ostream&, test_unit const& ) = 0; virtual void log_exception( std::ostream& os, log_checkpoint_data const& cd, execution_exception const& ex ) diff -urN a/boost/test/unit_test_log.hpp b/boost/test/unit_test_log.hpp --- a/boost/test/unit_test_log.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/unit_test_log.hpp 2015-06-21 21:01:36.740720619 +0100 @@ -28,6 +28,8 @@ #include #include +#include + // Boost #include @@ -98,7 +100,7 @@ void test_aborted(); void test_unit_start( test_unit const& ); - void test_unit_finish( test_unit const&, unsigned long elapsed ); + void test_unit_finish( test_unit const&, elapsed_t elapsed ); void test_unit_skipped( test_unit const& ); void test_unit_aborted( test_unit const& ); diff -urN a/boost/test/unit_test_suite.hpp b/boost/test/unit_test_suite.hpp --- a/boost/test/unit_test_suite.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/unit_test_suite.hpp 2015-06-21 22:16:12.496765968 +0100 @@ -26,7 +26,7 @@ // ************************************************************************** // #define BOOST_TEST_CASE( test_function ) \ -boost::unit_test::make_test_case( boost::unit_test::callback0<>(test_function), BOOST_TEST_STRINGIZE( test_function ) ) +boost::unit_test::make_test_case( boost::unit_test::callback0<>(test_function), BOOST_TEST_STRINGIZE( test_function ), __FILE__, __LINE__ ) #define BOOST_CLASS_TEST_CASE( test_function, tc_instance ) \ boost::unit_test::make_test_case((test_function), BOOST_TEST_STRINGIZE( test_function ), tc_instance ) @@ -35,7 +35,7 @@ // ************************************************************************** // #define BOOST_TEST_SUITE( testsuite_name ) \ -( new boost::unit_test::test_suite( testsuite_name ) ) +( new boost::unit_test::test_suite( testsuite_name, __FILE__, __LINE__ ) ) // ************************************************************************** // // ************** BOOST_AUTO_TEST_SUITE ************** // @@ -100,7 +100,7 @@ \ BOOST_AUTO_TU_REGISTRAR( test_name )( \ boost::unit_test::make_test_case( \ - &BOOST_AUTO_TC_INVOKER( test_name ), #test_name ), \ + &BOOST_AUTO_TC_INVOKER( test_name ), #test_name, __FILE__, __LINE__ ), \ boost::unit_test::ut_detail::auto_tc_exp_fail< \ BOOST_AUTO_TC_UNIQUE_ID( test_name )>::instance()->value() ); \ \ diff -urN a/boost/test/unit_test_suite_impl.hpp b/boost/test/unit_test_suite_impl.hpp --- a/boost/test/unit_test_suite_impl.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/unit_test_suite_impl.hpp 2015-06-21 22:16:03.388879832 +0100 @@ -54,7 +54,7 @@ enum { type = tut_any }; // Constructor - test_unit( const_string tu_name, test_unit_type t ); + test_unit( const_string tu_name, test_unit_type t, const_string fn, std::size_t ln ); // dependencies management void depends_on( test_unit* tu ); @@ -65,6 +65,8 @@ typedef BOOST_READONLY_PROPERTY(test_unit_id,(test_suite)) parent_id_t; readonly_property p_type; // type for this test unit readonly_property p_type_name; // "case"/"suite" + readonly_property p_file_name; + readonly_property p_line_num; id_t p_id; // unique id for this test unit parent_id_t p_parent_id; // parent test suite id @@ -105,7 +107,7 @@ enum { type = tut_case }; // Constructor - test_case( const_string tc_name, callback0<> const& test_func ); + test_case( const_string tc_name, callback0<> const& test_func, const_string fn, std::size_t ln ); // Access methods callback0<> const& test_func() const { return m_test_func; } @@ -128,7 +130,7 @@ enum { type = tut_suite }; // Constructor - explicit test_suite( const_string ts_name ); + explicit test_suite( const_string ts_name, const_string fn, std::size_t ln ); // test unit list management void add( test_unit* tu, counter_t expected_failures = 0, unsigned timeout = 0 ); @@ -155,7 +157,7 @@ class BOOST_TEST_DECL master_test_suite_t : public test_suite { public: - master_test_suite_t() : test_suite( "Master Test Suite" ) + master_test_suite_t() : test_suite( "Master Test Suite", "", 0 ) , argc( 0 ) , argv( 0 ) {} @@ -250,9 +252,9 @@ //____________________________________________________________________________// inline test_case* -make_test_case( callback0<> const& test_func, const_string tc_name ) +make_test_case( callback0<> const& test_func, const_string tc_name, const_string fn, std::size_t ln ) { - return new test_case( ut_detail::normalize_test_case_name( tc_name ), test_func ); + return new test_case( ut_detail::normalize_test_case_name( tc_name ), test_func, fn, ln ); } //____________________________________________________________________________// @@ -279,7 +281,7 @@ { // Constructors auto_test_unit_registrar( test_case* tc, counter_t exp_fail ); - explicit auto_test_unit_registrar( const_string ts_name ); + explicit auto_test_unit_registrar( const_string ts_name, const_string fn, std::size_t ln ); explicit auto_test_unit_registrar( test_unit_generator const& tc_gen ); explicit auto_test_unit_registrar( int ); diff -urN a/boost/test/utils/progress.hpp b/boost/test/utils/progress.hpp --- a/boost/test/utils/progress.hpp 1970-01-01 01:00:00.000000000 +0100 +++ b/boost/test/utils/progress.hpp 2014-10-17 17:40:31.000000000 +0100 @@ -0,0 +1,107 @@ +// boost progress.hpp header file ------------------------------------------// + +// Copyright Beman Dawes 1994-99. Distributed under the Boost +// Software License, Version 1.0. (See accompanying file +// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) + +// See http://www.boost.org/libs/timer for documentation. + +// Revision History +// 1 Dec 01 Add leading progress display strings (suggested by Toon Knapen) +// 20 May 01 Introduce several static_casts<> to eliminate warning messages +// (Fixed by Beman, reported by Herve Bronnimann) +// 12 Jan 01 Change to inline implementation to allow use without library +// builds. See docs for more rationale. (Beman Dawes) +// 22 Jul 99 Name changed to .hpp +// 16 Jul 99 Second beta +// 6 Jul 99 Initial boost version + +#ifndef BOOST_TEST_UTILS_PROGRESS_DISPLAY_HPP +#define BOOST_TEST_UTILS_PROGRESS_DISPLAY_HPP + +#include +#include // for ostream, cout, etc +#include // for string + +namespace boost { + +namespace unit_test { + +// progress_display --------------------------------------------------------// + +// progress_display displays an appropriate indication of +// progress at an appropriate place in an appropriate form. + +// NOTE: (Jan 12, 2001) Tried to change unsigned long to boost::uintmax_t, but +// found some compilers couldn't handle the required conversion to double. +// Reverted to unsigned long until the compilers catch up. + +class progress_display : private noncopyable +{ + public: + explicit progress_display( unsigned long expected_count, + std::ostream & os = std::cout, + const std::string & s1 = "\n", //leading strings + const std::string & s2 = "", + const std::string & s3 = "" ) + // os is hint; implementation may ignore, particularly in embedded systems + : m_os(os), m_s1(s1), m_s2(s2), m_s3(s3) { restart(expected_count); } + + void restart( unsigned long expected_count ) + // Effects: display appropriate scale + // Postconditions: count()==0, expected_count()==expected_count + { + _count = _next_tic_count = _tic = 0; + _expected_count = expected_count; + + m_os << m_s1 << "0% 10 20 30 40 50 60 70 80 90 100%\n" + << m_s2 << "|----|----|----|----|----|----|----|----|----|----|" + << std::endl // endl implies flush, which ensures display + << m_s3; + if ( !_expected_count ) _expected_count = 1; // prevent divide by zero + } // restart + + unsigned long operator+=( unsigned long increment ) + // Effects: Display appropriate progress tic if needed. + // Postconditions: count()== original count() + increment + // Returns: count(). + { + if ( (_count += increment) >= _next_tic_count ) { display_tic(); } + return _count; + } + + unsigned long operator++() { return operator+=( 1 ); } + unsigned long count() const { return _count; } + unsigned long expected_count() const { return _expected_count; } + + private: + std::ostream & m_os; // may not be present in all imps + const std::string m_s1; // string is more general, safer than + const std::string m_s2; // const char *, and efficiency or size are + const std::string m_s3; // not issues + + unsigned long _count, _expected_count, _next_tic_count; + unsigned int _tic; + void display_tic() + { + // use of floating point ensures that both large and small counts + // work correctly. static_cast<>() is also used several places + // to suppress spurious compiler warnings. + unsigned int tics_needed = + static_cast( + (static_cast(_count)/_expected_count)*50.0 ); + do { m_os << '*' << std::flush; } while ( ++_tic < tics_needed ); + _next_tic_count = + static_cast((_tic/50.0)*_expected_count); + if ( _count == _expected_count ) { + if ( _tic < 51 ) m_os << '*'; + m_os << std::endl; + } + } // display_tic +}; + +} // namespace unit_test + +} // namespace boost + +#endif // BOOST_TEST_UTILS_PROGRESS_DISPLAY_HPP diff -urN a/boost/test/utils/runtime/cla/argv_traverser.ipp source/boost/test/utils/runtime/cla/argv_traverser.ipp --- a/boost/test/utils/runtime/cla/argv_traverser.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/utils/runtime/cla/argv_traverser.ipp 2015-06-21 21:11:49.038065848 +0100 @@ -49,6 +49,8 @@ BOOST_RT_PARAM_INLINE void argv_traverser::init( int argc, char_type** argv ) { + m_buffer.clear(); + for( int index = 1; index < argc; ++index ) { m_buffer += argv[index]; if( index != argc-1 ) diff -urN a/boost/test/utils/runtime/cla/parser.hpp b/boost/test/utils/runtime/cla/parser.hpp --- a/boost/test/utils/runtime/cla/parser.hpp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/utils/runtime/cla/parser.hpp 2015-06-21 21:11:49.038065848 +0100 @@ -126,6 +126,8 @@ void usage( out_stream& ostr ); void help( out_stream& ostr ); + void reset( cstring program_name = cstring() ); + private: argument const& valid_argument( cstring string_id ) const; diff -urN a/boost/test/utils/runtime/cla/parser.ipp b/boost/test/utils/runtime/cla/parser.ipp --- a/boost/test/utils/runtime/cla/parser.ipp 2012-12-13 21:32:58.000000000 +0000 +++ b/boost/test/utils/runtime/cla/parser.ipp 2015-06-21 21:12:24.184626456 +0100 @@ -249,6 +249,15 @@ //____________________________________________________________________________// +BOOST_RT_PARAM_INLINE void +parser::reset( cstring program_name ) +{ + assign_op( m_program_name, program_name, 0 ); + m_parameters.clear(); +} + +//____________________________________________________________________________// + } // namespace cla } // namespace BOOST_RT_PARAM_NAMESPACE diff -urN a/boost/test/utils/timer.hpp b/boost/test/utils/timer.hpp --- a/boost/test/utils/timer.hpp 1970-01-01 01:00:00.000000000 +0100 +++ b/boost/test/utils/timer.hpp 2015-06-21 17:27:52.000000000 +0100 @@ -0,0 +1,94 @@ +// (C) Copyright Jamie Allsop 2015. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +// See http://www.boost.org/libs/test for the library home page. +// +// Description : timer and elapsed types +// *************************************************************************** + +#ifndef BOOST_TEST_UTILS_TIMER_HPP +#define BOOST_TEST_UTILS_TIMER_HPP + +#ifdef BOOST_TEST_USE_DEPRECATED_TIMER +#include +#else +#include +#endif +#include + +namespace boost { + +namespace unit_test { + +// ************************************************************************** // +// ************** opaque timer and elapsed types ************** // +// ************************************************************************** // + +#ifdef BOOST_TEST_USE_DEPRECATED_TIMER + +typedef boost::timer timer_t; +typedef unsigned long elapsed_t; + +inline std::string to_string( elapsed_t elapsed ) +{ + std::ostringstream output; + if( elapsed % 1000 == 0 ) + { + output << elapsed/1000 << "ms"; + } + else + { + output << elapsed << "mks"; + } + return output.str(); +} + +inline std::string to_xml( elapsed_t elapsed ) +{ + std::ostringstream output; + output << "" << elapsed << ""; + return output.str(); +} + +inline bool has_time( const elapsed_t& elapsed ) +{ + return elapsed; +} + +#else + +typedef boost::timer::cpu_timer timer_t; +typedef boost::timer::cpu_times elapsed_t; + +inline std::string to_string( elapsed_t elapsed ) +{ + return boost::timer::format( elapsed, 9, "%ws wall, %us user + %ss system = %ts CPU (%p%)" ); +} + +inline std::string to_xml( elapsed_t elapsed ) +{ + std::ostringstream output; + output << "" << ( elapsed.user + elapsed.system ) << "" + << "" << elapsed.wall << "" + << "" << elapsed.user << "" + << "" << elapsed.system << ""; + return output.str(); +} + +inline bool has_time( const elapsed_t& elapsed ) +{ + return elapsed.wall != 0 || elapsed.user != 0 || elapsed.system != 0; +} + +#endif + +//____________________________________________________________________________// + +} // namespace unit_test + +} // namespace boost + +#endif // BOOST_TEST_UTILS_TIMER_HPP + diff -urN a/cuppa_test_patch_applied.txt b/cuppa_test_patch_applied.txt --- a/cuppa_test_patch_applied.txt 1970-01-01 01:00:00.000000000 +0100 +++ b/cuppa_test_patch_applied.txt 2015-06-23 09:55:02.225423584 +0100 @@ -0,0 +1 @@ + diff -urN a/libs/test/build/Jamfile.v2 b/libs/test/build/Jamfile.v2 --- a/libs/test/build/Jamfile.v2 2012-12-13 21:32:58.000000000 +0000 +++ b/libs/test/build/Jamfile.v2 2015-06-21 21:36:46.092350094 +0100 @@ -14,11 +14,22 @@ shared,msvc:-wd4275 msvc:-wd4671 msvc:-wd4673 + #gcc:-std=gnu++0x + clang:-Wno-c99-extensions + clang:-Wno-variadic-macros all + + # adding a dependency on boost/timer as the header are needed, and the junction needs + # to be there in order to build the library. + /boost/timer//boost_timer : usage-requirements BOOST_TEST_NO_AUTO_LINK=1 # Disable Warning about boost::noncopyable not being exported shared,msvc:-wd4275 + + # Adding a dependency on boost/timer as the headers need to be there in case of the + # header-only usage variant + /boost/timer//boost_timer ; PRG_EXEC_MON_SOURCES = PKDûFC߈4cuppa/test_report/__init__.py PKqüFš@âiP P +cuppa/test_report/generate_bitten_report.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Generate Bitten Report #------------------------------------------------------------------------------- import json import os import itertools import cgi import cuppa.progress class GenerateReportBuilder(object): def __init__( self, final_dir ): self._final_dir = final_dir def emitter( self, target, source, env ): sources = [] targets = [] try: for s in source: if os.path.splitext( str(s) )[1] == ".json": sources.append( str(s) ) target_report = os.path.splitext( str(s) )[0] + "_bitten.xml" targets.append( target_report ) except StopIteration: pass return targets, sources def GenerateBittenReport( self, target, source, env ): for s, t in itertools.izip( source, target ): test_cases = self._read( str(s) ) self._write( str(t), test_cases ) return None def _read( self, json_report_path ): with open( json_report_path, "r" ) as report: test_cases = json.load( report ) return test_cases def _write( self, destination_path, test_cases ): with open( destination_path, "w" ) as report: report.write( '\n' ) for test in test_cases: report.write( ' \n' ) if not 'file' in test: test['file'] = "" if not 'line' in test: test['line'] = "" if not 'branch_dir' in test: test['branch_dir'] = "" for key, value in test.iteritems(): if key == "cpu_times" or key == "timer": continue report.write( ' <%s>' % key ) if key == 'stdout': value = ( '' + cgi.escape(line) + '
' for line in value ) value = '' else: value = cgi.escape( str( value ) ) report.write( value ) report.write( '\n' % key ) report.write( '
\n' ) report.write( '
\n' ) class GenerateBittenReportMethod(object): def __call__( self, env, source, final_dir=None ): builder = GenerateReportBuilder( final_dir ) env['BUILDERS']['GenerateBittenReport'] = env.Builder( action=builder.GenerateBittenReport, emitter=builder.emitter ) report = env.GenerateBittenReport( [], source ) cuppa.progress.NotifyProgress.add( env, report ) return report @classmethod def add_to_env( cls, env ): env.AddMethod( cls(), "GenerateBittenReport" ) PKýûFN]cuppa/test_report/cuppa_json.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Json Encoder for Cuppa Types #------------------------------------------------------------------------------- import json import cuppa.timer class Encoder( json.JSONEncoder ): def default(self, obj): if isinstance( obj, cuppa.timer.CpuTimes ): return { "wall_time" : obj.wall, "process_time" : obj.process, "system_time" : obj.system, "user_time" : obj.user } elif isinstance( obj, cuppa.timer.Timer ): return { "wall_time" : obj.elapsed().wall, "process_time" : obj.elapsed().process, "system_time" : obj.elapsed().system, "user_time" : obj.elapsed().user } return json.JSONEncoder.default( self, obj ) def write_report( report_path, test_cases ): with open( report_path, "w" ) as report: json.dump( test_cases, report, sort_keys = True, indent = 4, separators = (',', ': '), cls = Encoder ) PK0\g<2>') return gcov_version + '*' def _initialise_toolchain( self, toolchain, stdlib ): self.values['sys_inc_prefix'] = '-isystem' self.values['sys_inc_suffix'] = '' self.values['static_link'] = '-Xlinker -Bstatic' self.values['dynamic_link'] = '-Xlinker -Bdynamic' CommonCxxFlags = [ '-Wall', '-fexceptions' ] CommonCFlags = [ '-Wall' ] if not re.match( 'clang3[2-5]', toolchain ) or not self._suppress_debug_for_auto: CommonCxxFlags += [ "-g" ] CommonCFlags += [ "-g" ] if stdlib: CommonCxxFlags += [ "-stdlib={}".format(stdlib) ] if re.match( 'clang3[2-3]', toolchain ): CommonCxxFlags += [ '-std=c++11' ] elif re.match( 'clang3[4-7]', toolchain ): CommonCxxFlags += [ '-std=c++1y' ] self.values['debug_cxx_flags'] = CommonCxxFlags + [] self.values['release_cxx_flags'] = CommonCxxFlags + [ '-O3', '-DNDEBUG' ] coverage_options = "--coverage -Xclang -coverage-cfg-checksum -Xclang -coverage-no-function-names-in-data -Xclang -coverage-version={}".format( self._gcov_format ) self.values['coverage_flags'] = CommonCxxFlags self.values['coverage_cxx_flags'] = coverage_options.split() self.values['debug_c_flags'] = CommonCFlags + [] self.values['release_c_flags'] = CommonCFlags + [ '-O3', '-DNDEBUG' ] CommonLinkCxxFlags = [] if cuppa.build_platform.name() == "Linux": CommonLinkCxxFlags = ['-rdynamic', '-Wl,-rpath=.' ] self.values['debug_link_cxx_flags'] = CommonLinkCxxFlags self.values['release_link_cxx_flags'] = CommonLinkCxxFlags self.values['coverage_link_flags'] = CommonLinkCxxFlags + [ '--coverage' ] DynamicLibraries = [] if cuppa.build_platform.name() == "Linux": DynamicLibraries = [ 'pthread', 'rt' ] if stdlib == "libc++": DynamicLibraries += [ 'c++abi', 'c++', 'c++abi', 'm', 'c', 'gcc_s', 'gcc' ] self.values['dynamic_libraries'] = DynamicLibraries def __get_clang_coverage( self, object_dir, source ): # -l = --long-file-names # -p = --preserve-paths # -b = --branch-probabilities return 'gcov -o ' + object_dir \ + ' -l -p -b ' \ + source + ' > ' + source + '_summary.gcov' def abi_flag( self, env ): if env['stdcpp']: return '-std={}'.format(env['stdcpp']) elif re.match( 'clang3[2-3]', self.values['name'] ): return '-std=c++11' elif re.match( 'clang3[4-7]', self.values['name'] ): return '-std=c++1y' def stdcpp_flag_for( self, standard ): return "-std={}".format( standard ) def error_format( self ): return "{}:{}: {}" @classmethod def output_interpretors( cls ): return [ { 'title' : "Compiler Error", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):([0-9]+))(:[ \t](error:[ \t].*))", 'meaning' : 'error', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 5 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Warning", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):([0-9]+))(:[ \t](warning:[ \t].*))", 'meaning' : 'warning', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 5 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Note", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):([0-9]+))(:[ \t](note:[ \t].*))", 'meaning' : 'message', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 5 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Warning", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:\(\.text\+[0-9a-fA-FxX]+\))(:[ \t]([Ww]arning:[ \t].*))", 'meaning' : 'warning', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Error", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):[0-9]+)(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Error 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+\(.text\+[0-9A-Za-z]+\):([ \tA-Za-z0-9_:+/\.-]+))(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Error 3", 'regex' : r"(([][{}() \t#%$~\w&_:+/\.-]+):\(\.text\+[0-9a-fA-FxX]+\))(:(.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : 2, 'line' : None, 'column' : None, }, { 'title' : "Linker Error - lib not found", 'regex' : r"(.*(ld.*):[ \t](cannot find.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Linker Error - cannot open output file", 'regex' : r"(.*(ld.*):[ \t](cannot open output file.*))(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Linker Error - unrecognized option", 'regex' : r"(.*(ld.*))(:[ \t](unrecognized option.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 3 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Undefined Reference", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:[ \t](undefined reference.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 2 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "No such File or Directory", 'regex' : r"(.*:(.*))(:[ \t](No such file or directory.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 3 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Compiler Error", 'regex' : r"(error:)([ \t].*)", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 2 ], 'file' : None, 'line' : None, 'column' : None, }, ] PK-¹ƒF2¨Ô**cuppa/toolchains/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import cuppa.modules __all__ = cuppa.modules.registration.get_module_list( __file__ ) PKl—µFW*cuppa/toolchains/cl.py # Copyright Jamie Allsop 2014-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # CL Toolchain #------------------------------------------------------------------------------- from exceptions import Exception import SCons.Script from cuppa.cpp.create_version_file_cpp import CreateVersionHeaderCpp, CreateVersionFileCpp from cuppa.cpp.run_boost_test import RunBoostTestEmitter, RunBoostTest from cuppa.cpp.run_patched_boost_test import RunPatchedBoostTestEmitter, RunPatchedBoostTest from cuppa.cpp.run_process_test import RunProcessTestEmitter, RunProcessTest from cuppa.output_processor import command_available class ClException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) class Cl(object): @classmethod def default_version( cls ): return 'cl' @classmethod def supported_versions( cls ): return [ "cl" ] @classmethod def available_versions( cls ): if not hasattr( cls, '_available_versions' ): cls._available_versions = [] for version in cls.supported_versions(): command = "cl" if command_available( command ): cls._available_versions = [ "cl" ] return cls._available_versions @classmethod def add_options( cls, add_option ): pass @classmethod def add_to_env( cls, env, add_toolchain, add_to_supported ): for version in cls.supported_versions(): add_to_supported( version ) for version in cls.available_versions(): add_toolchain( version, cls( version ) ) @classmethod def default_variants( cls ): return [ 'dbg', 'rel' ] def __init__( self, version ): self.values = {} self._version = "cl" self.values['name'] = version env = SCons.Script.DefaultEnvironment() self.values['sys_inc_prefix'] = env['INCPREFIX'] self.values['sys_inc_suffix'] = env['INCSUFFIX'] SYSINCPATHS = '${_concat(\"' + self.values['sys_inc_prefix'] + '\", SYSINCPATH, \"'+ self.values['sys_inc_suffix'] + '\", __env__, RDirs, TARGET, SOURCE)}' self.values['_CPPINCFLAGS'] = '$( ' + SYSINCPATHS + ' ${_concat(INCPREFIX, INCPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' self._initialise_toolchain() def __getitem__( self, key ): return self.values.get( key ) def name( self ): return self.values['name'] def family( self ): return "cl" def version( self ): return self._version def cxx_version( self ): return self._version def binary( self ): return self.values['CXX'] def initialise_env( self, env ): env['_CPPINCFLAGS'] = self.values['_CPPINCFLAGS'] env['SYSINCPATH'] = [] env['INCPATH'] = [ '#.', '.' ] env['LIBPATH'] = [] env['CPPDEFINES'] = [] env['LIBS'] = [] env['STATICLIBS'] = [] def variants( self ): pass def supports_coverage( self ): return False def version_file_builder( self, env, namespace, version, location ): return CreateVersionFileCpp( env, namespace, version, location ) def version_file_emitter( self, env, namespace, version, location ): return CreateVersionHeaderCpp( env, namespace, version, location ) def test_runner( self, tester, final_dir, expected ): if not tester or tester =='process': return RunProcessTest( expected ), RunProcessTestEmitter( final_dir ) elif tester=='boost': return RunBoostTest( expected ), RunBoostTestEmitter( final_dir ) elif tester=='patched_boost': return RunPatchedBoostTest( expected ), RunPatchedBoostTestEmitter( final_dir ) def test_runners( self ): return [ 'process', 'boost', 'patched_boost' ] def coverage_runner( self, program, final_dir ): return None def update_variant( self, env, variant ): if variant == 'dbg': env.AppendUnique( CXXFLAGS = self.values['dbg_cxx_flags'] ) env.AppendUnique( LINKFLAGS = self.values['dbg_link_flags'] ) elif variant == 'rel': env.AppendUnique( CXXFLAGS = self.values['rel_cxx_flags'] ) env.AppendUnique( LINKFLAGS = self.values['rel_link_flags'] ) elif variant == 'cov': pass def _initialise_toolchain( self ): CommonCxxFlags = [ '-W4', '-EHac', '-nologo', '-GR' ] self.values['dbg_cxx_flags'] = CommonCxxFlags + [ '-Zi', '-MDd' ] self.values['rel_cxx_flags'] = CommonCxxFlags + [ '-Ox', '-MD' ] CommonLinkFlags = [ '-OPT:REF'] self.values['dbg_link_flags'] = CommonLinkFlags + [] self.values['rel_link_flags'] = CommonLinkFlags + [] def abi_flag( self, library ): return "" def stdcpp_flag_for( self, standard ): return "" def error_format( self ): return "{}({}): error: {}" @classmethod def output_interpretors( cls ): return [ { 'title' : "Compiler Error", 'regex' : r"([][{}() \t#%$~\w&_:+\\/\.-]+)([(]([0-9]+)[)])([ ]:[ ]error [A-Z0-9]+:.*)", 'meaning' : 'error', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Warning", 'regex' : r"([][{}() \t#%$~\w&_:+\\/\.-]+)([(]([0-9]+)[)])([ ]:[ ]warning [A-Z0-9]+:.*)", 'meaning' : 'warning', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, ] PK(\d))?(?P(\d))?', version ) if not matches: raise GccException("GCC toolchain [{}] is not recognised as supported!.".format( version ) ) major = matches.group('major') minor = matches.group('minor') if not major and not minor: default_ver, default_cxx = cls.default_version() if default_ver: cls._available_versions[version] = {'cxx_version': default_cxx, 'version': default_ver } elif not minor: cxx_version = "-{}".format( major ) cxx = "g++{}".format( cxx_version ) reported_version = cls.version_from_command( cxx ) if reported_version: cls._available_versions[version] = {'cxx_version': cxx_version, 'version': reported_version } cls._available_versions[reported_version] = {'cxx_version': cxx_version, 'version': reported_version } else: cxx_version = "-{}.{}".format( major, minor ) cxx = "g++{}".format( cxx_version ) reported_version = cls.version_from_command( cxx ) if reported_version: if version == reported_version: cls._available_versions[version] = {'cxx_version': cxx_version, 'version': reported_version } else: raise GccException("GCC toolchain [{}] reporting version as [{}].".format( version, reported_version ) ) return cls._available_versions @classmethod def add_options( cls, add_option ): pass @classmethod def add_to_env( cls, env, add_toolchain, add_to_supported ): for version in cls.supported_versions(): add_to_supported( version ) for version, gcc in cls.available_versions().iteritems(): # print "Adding toolchain [{}] reported as [{}] with cxx_version [g++{}]".format( version, gcc['version'], gcc['cxx_version'] ) add_toolchain( version, cls( version, gcc['cxx_version'], gcc['version'] ) ) @classmethod def default_variants( cls ): return [ 'dbg', 'rel' ] def _linux_lib_flags( self, env ): self.values['static_link'] = '-Xlinker -Bstatic' self.values['dynamic_link'] = '-Xlinker -Bdynamic' STATICLIBFLAGS = self.values['static_link'] + ' ' + re.search( r'(.*)(,\s*LIBS\s*,)(.*)', env['_LIBFLAGS'] ).expand( r'\1, STATICLIBS,\3' ) DYNAMICLIBFLAGS = self.values['dynamic_link'] + ' ' + re.search( r'(.*)(,\s*LIBS\s*,)(.*)', env['_LIBFLAGS'] ).expand( r'\1, DYNAMICLIBS,\3' ) return STATICLIBFLAGS + ' ' + DYNAMICLIBFLAGS def __init__( self, available_version, cxx_version, reported_version ): self.values = {} self._version = re.search( r'(\d)(\d)', reported_version ).expand(r'\1.\2') self._cxx_version = cxx_version.lstrip('-') self._name = reported_version self._reported_version = reported_version self._initialise_toolchain( self._reported_version ) self.values['CXX'] = "g++-{}".format( self._cxx_version ) self.values['CC'] = "gcc-{}".format( self._cxx_version ) env = SCons.Script.DefaultEnvironment() SYSINCPATHS = '${_concat(\"' + self.values['sys_inc_prefix'] + '\", SYSINCPATH, \"'+ self.values['sys_inc_suffix'] + '\", __env__, RDirs, TARGET, SOURCE)}' self.values['_CPPINCFLAGS'] = '$( ' + SYSINCPATHS + ' ${_concat(INCPREFIX, INCPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' if cuppa.build_platform.name() == "Linux": self.values['_LIBFLAGS'] = self._linux_lib_flags( env ) else: self.values['_LIBFLAGS'] = env['_LIBFLAGS'] def __getitem__( self, key ): return self.values.get( key ) def name( self ): return self._name def family( self ): return "gcc" def version( self ): return self._version def cxx_version( self ): return self._cxx_version def binary( self ): return self.values['CXX'] def initialise_env( self, env ): env['CXX'] = self.values['CXX'] env['CC'] = self.values['CC'] env['_CPPINCFLAGS'] = self.values['_CPPINCFLAGS'] env['_LIBFLAGS'] = self.values['_LIBFLAGS'] env['SYSINCPATH'] = [] env['INCPATH'] = [ '#.', '.' ] env['LIBPATH'] = [] env['CPPDEFINES'] = [] env['LIBS'] = [] env['STATICLIBS'] = [] env['DYNAMICLIBS'] = self.values['dynamic_libraries'] def variants( self ): pass def supports_coverage( self ): return 'coverage_cxx_flags' in self.values def version_file_builder( self, env, namespace, version, location ): return CreateVersionFileCpp( env, namespace, version, location ) def version_file_emitter( self, env, namespace, version, location ): return CreateVersionHeaderCpp( env, namespace, version, location ) def test_runner( self, tester, final_dir, expected ): if not tester or tester =='process': return RunProcessTest( expected ), RunProcessTestEmitter( final_dir ) elif tester=='boost': return RunBoostTest( expected ), RunBoostTestEmitter( final_dir ) elif tester=='patched_boost': return RunPatchedBoostTest( expected ), RunPatchedBoostTestEmitter( final_dir ) def test_runners( self ): return [ 'process', 'boost', 'patched_boost' ] def coverage_runner( self, program, final_dir ): return RunGcovCoverageEmitter( program, final_dir ), RunGcovCoverage( program, final_dir ) def update_variant( self, env, variant ): if variant == 'dbg': env.MergeFlags( self.values['debug_cxx_flags'] + self.values['debug_c_flags'] + self.values['debug_link_cxx_flags'] ) elif variant == 'rel': env.MergeFlags( self.values['release_cxx_flags'] + self.values['release_c_flags'] + self.values['release_link_cxx_flags'] ) elif variant == 'cov': env.MergeFlags( self.values['coverage_cxx_flags'] + self.values['coverage_c_flags'] ) env.Append( CXXFLAGS = self.values['coverage_cxx_flags'] ) env.AppendUnique( LINKFLAGS = self.values['coverage_link_cxx_flags'] ) if env['stdcpp']: env.ReplaceFlags( "-std={}".format(env['stdcpp']) ) def _initialise_toolchain( self, toolchain ): if toolchain == 'gcc34': self.values['sys_inc_prefix'] = '-I' else: self.values['sys_inc_prefix'] = '-isystem' self.values['sys_inc_suffix'] = '' CommonCxxFlags = [ '-Wall', '-fexceptions', '-g' ] CommonCFlags = [ '-Wall', '-g' ] if re.match( 'gcc4[3-6]', toolchain ): CommonCxxFlags += [ '-std=c++0x' ] elif re.match( 'gcc47', toolchain ): CommonCxxFlags += [ '-std=c++11' ] elif re.match( 'gcc4[8-9]', toolchain ): CommonCxxFlags += [ '-std=c++1y' ] elif re.match( 'gcc5[0-2]', toolchain ): CommonCxxFlags += [ '-std=c++1y' ] self.values['debug_cxx_flags'] = CommonCxxFlags + [] self.values['release_cxx_flags'] = CommonCxxFlags + [ '-O3', '-DNDEBUG' ] self.values['coverage_cxx_flags'] = CommonCxxFlags + [ '--coverage' ] self.values['debug_c_flags'] = CommonCFlags + [] self.values['release_c_flags'] = CommonCFlags + [ '-O3', '-DNDEBUG' ] self.values['coverage_c_flags'] = CommonCFlags + [ '--coverage' ] CommonLinkCxxFlags = [] if cuppa.build_platform.name() == "Linux": CommonLinkCxxFlags = ['-rdynamic', '-Wl,-rpath=.' ] self.values['debug_link_cxx_flags'] = CommonLinkCxxFlags self.values['release_link_cxx_flags'] = CommonLinkCxxFlags self.values['coverage_link_cxx_flags'] = CommonLinkCxxFlags + [ '--coverage' ] DynamicLibraries = [] if cuppa.build_platform.name() == "Linux": DynamicLibraries = [ 'pthread', 'rt' ] self.values['dynamic_libraries'] = DynamicLibraries def __get_gcc_coverage( self, object_dir, source ): # -l = --long-file-names # -p = --preserve-paths # -b = --branch-probabilities return 'gcov -o ' + object_dir \ + ' -l -p -b ' \ + source + ' > ' + source + '_summary.gcov' def abi_flag( self, env ): if env['stdcpp']: return '-std={}'.format(env['stdcpp']) elif re.match( 'gcc4[3-6]', self._reported_version ): return '-std=c++0x' elif re.match( 'gcc47', self._reported_version ): return '-std=c++11' elif re.match( 'gcc4[8-9]', self._reported_version ): return '-std=c++1y' elif re.match( 'gcc5[0-2]', self._reported_version ): return '-std=c++1y' def stdcpp_flag_for( self, standard ): return "-std={}".format( standard ) def error_format( self ): return "{}:{}: {}" @classmethod def output_interpretors( cls ): return [ { 'title' : "Fatal Error", 'regex' : r"(FATAL:[ \t]*(.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "In File Included From", 'regex' : r"(In file included\s+|\s+)(from\s+)([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+)(:[0-9]+)?)([,:])", 'meaning' : 'message', 'highlight' : set( [ 1, 3, 4 ] ), 'display' : [ 1, 2, 3, 4, 7 ], 'file' : 3, 'line' : None, 'column' : None, }, { 'title' : "In Function Info", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:[ \t]+([iI]n ([cC]lass|[cC]onstructor|[dD]estructor|[fF]unction|[mM]ember [fF]unction|[sS]tatic [fF]unction|[sS]tatic [mM]ember [fF]unction).*))", 'meaning' : 'message', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 2 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Skipping Instantiation Contexts 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):[0-9]+)(:[ \t]+(\[[ \t]+[Ss]kipping [0-9]+ instantiation contexts[, \t]+.*\]))", 'meaning' : 'message', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Skipping Instantiation Contexts", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+))(:[ \t]+(\[[ \t]+[Ss]kipping [0-9]+ instantiation contexts[ \t]+\]))", 'meaning' : 'message', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 2, 'line' : None, 'column' : None, }, { 'title' : "Instantiated From", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+))(:[ \t]+([iI]nstantiated from .*))", 'meaning' : 'message', 'highlight' : set( [ 1, 2] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Instantiation of", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:[ \t]+(In instantiation of .*))", 'meaning' : 'message', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 2 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Required From", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):[0-9]+)(:[ \t]+required from .*)", 'meaning' : 'message', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Warning 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):([0-9]+))(:[ \t]([Ww]arning:[ \t].*))", 'meaning' : 'warning', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 5 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Note 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):[0-9]+)(:[ \t]([Nn]ote:[ \t].*))", 'meaning' : 'message', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Note", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+))(:[ \t]([Nn]ote:[ \t].*))", 'meaning' : 'message', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "General Note", 'regex' : r"([Nn]ote:[ \t].*)", 'meaning' : 'message', 'highlight' : set( [ 1 ] ), 'display' : [ 1 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Compiler Error 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):[0-9]+)(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Compiler Warning", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+))(:[ \t]([Ww]arning:[ \t].*))", 'meaning' : 'warning', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Undefined Reference 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+\.o:([][{}() \t#%$~\w&_:+/\.-]+):([0-9]+))(:[ \t](undefined reference.*))", 'meaning' : 'warning', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : 2, 'line' : None, 'column' : None, }, { 'title' : "Compiler Error", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+))(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Warning", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:\(\.text\+[0-9a-fA-FxX]+\))(:[ \t]([Ww]arning:[ \t].*))", 'meaning' : 'warning', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Error", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:([0-9]+):[0-9]+)(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1, 2 ] ), 'display' : [ 1, 2, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Error 2", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+\(.text\+[0-9A-Za-z]+\):([ \tA-Za-z0-9_:+/\.-]+))(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : 1, 'line' : None, 'column' : None, }, { 'title' : "Linker Error 3", 'regex' : r"(([][{}() \t#%$~\w&_:+/\.-]+):\(\.text\+[0-9a-fA-FxX]+\))(:(.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : 2, 'line' : None, 'column' : None, }, { 'title' : "Linker Error - lib not found", 'regex' : r"(.*(ld.*):[ \t](cannot find.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Linker Error - cannot open output file", 'regex' : r"(.*(ld.*):[ \t](cannot open output file.*))(:[ \t](.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 4 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Linker Error - unrecognized option", 'regex' : r"(.*(ld.*))(:[ \t](unrecognized option.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 3 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "No such File or Directory", 'regex' : r"(.*:(.*))(:[ \t](No such file or directory.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 3 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Undefined Reference", 'regex' : r"([][{}() \t#%$~\w&_:+/\.-]+)(:[ \t](undefined reference.*))", 'meaning' : 'error', 'highlight' : set( [ 1 ] ), 'display' : [ 1, 2 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "General Warning", 'regex' : r"([Ww]arning:[ \t].*)", 'meaning' : 'warning', 'highlight' : set( [ 1 ] ), 'display' : [ 1 ], 'file' : None, 'line' : None, 'column' : None, }, { 'title' : "Auto-Import Info", 'regex' : r"(([Ii]nfo:[ \t].*)\(auto-import\))", 'meaning' : 'message', 'highlight' : set( [ 1 ] ), 'display' : [ 1 ], 'file' : None, 'line' : None, 'column' : None, }, ] PK-¹ƒFó§H=ÔÔcuppa/modules/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) PK-¹ƒF!t.cuppa/modules/registration.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Registraion #------------------------------------------------------------------------------- def get_module_list( path, base=None ): from os import listdir from re import match from os.path import dirname paths = listdir( dirname( path ) ) def unique( seq ): seen = set() seen_add = seen.add return [ x for x in seq if x not in seen and not seen_add(x) ] return unique( [ base and '.'.join( [ base, f.replace('.py','') ] ) or f.replace('.py','') for f in paths for f in paths if match( '[^_.~].*\.py$', f ) ] ) def add_to_env( module_name, env, *args ): __call_classmethod_for_classes_in_module( 'cuppa', module_name, __package('cuppa'), "add_to_env", env, *args ) def add_options( module_name ): import SCons.Script __call_classmethod_for_classes_in_module( 'cuppa', module_name, __package('cuppa'), "add_options", SCons.Script.AddOption ) def get_options( module_name, env ): __call_classmethod_for_classes_in_module( 'cuppa', module_name, __package('cuppa'), "get_options", env ) def init_env_for_variant( module_name, sconscript_exports ): __call_classmethod_for_classes_in_module( 'cuppa', module_name, __package('cuppa'), "init_env_for_variant", sconscript_exports ) #------------------------------------------------------------------------------- import inspect import imp import sys def __package( name ): package = None try: filehandle, pathname, description = imp.find_module( name, None ) try: try: module = sys.modules[ name ] except KeyError, (e): module = imp.load_module( name, filehandle, pathname, description ) package = pathname finally: if filehandle: filehandle.close() except ImportError, (e): pass return package def __call_classmethod_for_classes_in_module( package, name, path, method, *args, **kwargs ): try: filehandle, pathname, description = imp.find_module( name, path and [ path ] or None ) try: try: qualified_name = package and package + "." + name or name module = sys.modules[ qualified_name ] except KeyError, (e): module = imp.load_module( name, filehandle, pathname, description ) for member_name in dir( module ): member = getattr( module, member_name ) if inspect.ismodule( member ): if package: parent_package = package + "." + name else: parent_package = name __call_classmethod_for_classes_in_module( parent_package, member_name, pathname, method, *args, **kwargs ) elif inspect.isclass( member ): try: function = getattr( member, method ) if callable( function ): function( *args, **kwargs ) except AttributeError, (e): pass finally: if filehandle: filehandle.close() except ImportError, (e): pass #------------------------------------------------------------------------------- PK-¹ƒF3´¾¾cuppa/scms/subversion.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Subversion Source Control Management System #------------------------------------------------------------------------------- import subprocess import shlex import re from exceptions import Exception class SubversionException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) def info( path ): if not path: raise SubversionException("No working copy path specified for calling svnversion with.") url = None repository = None branch = None revision = None try: command = "svn info {}".format( path ) svn_info = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ) url = re.search( r'URL: ([^\s]+)', svn_info ).expand(r'\1') repository = re.search( r'Repository Root: ([^\s]+)', svn_info ).expand(r'\1') branch = re.search( r'Relative URL: \^/([^\s]+)', svn_info ).expand(r'\1') revision = re.search( r'Revision: (\d+)', svn_info ).expand(r'\1') except subprocess.CalledProcessError: raise SubversionException("Not a Subversion working copy") try: command = "svnversion -n {}".format( path ) revision = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT ) except subprocess.CalledProcessError: pass return url, repository, branch, revision PK-¹ƒFITXŸÓÓcuppa/scms/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) PKF©ÜF ê »‡‡cuppa/scms/mercurial.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Mercurial Source Control Management System #------------------------------------------------------------------------------- import subprocess import shlex import os from exceptions import Exception class MercurialException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) def info( path ): if not path: raise MercurialException("No working copy path specified for calling hg commands with.") url = None repository = None branch = None revision = None if not os.path.exists( os.path.join( path, ".hg" ) ): raise MercurialException("Not a Mercurial working copy") try: command = "hg summary" summary = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT, cwd=path ).strip().split('\n') revision = "" branch = "" for line in summary: if not revision and line.startswith( 'parent: ' ): revision = line.replace( 'parent: ', '' ) if branch: break elif not branch and line.startswith( 'branch: ' ): branch = line.replace( 'branch: ', '' ) if revision: break command = "hg path" repository = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT, cwd=path ).strip().split('=')[1] url = repository except subprocess.CalledProcessError: raise MercurialException("Not a Mercurial working copy") return url, repository, branch, revision PK|»ÔFdƒÌ••cuppa/scms/git.py # Copyright Jamie Allsop 2014-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # Git Source Control Management System #------------------------------------------------------------------------------- import subprocess import shlex import os from exceptions import Exception class GitException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter) def info( path ): if not path: raise GitException("No working copy path specified for calling git commands with.") url = None repository = None branch = None revision = None if not os.path.exists( os.path.join( path, ".git" ) ): raise GitException("Not a Git working copy") try: command = "git describe --always" revision = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT, cwd=path ).strip() command = "git symbolic-ref HEAD" branch = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT, cwd=path ) branch = branch.replace( "refs/heads/", "" ).strip() command = "git config --get remote.origin.url" repository = subprocess.check_output( shlex.split( command ), stderr=subprocess.STDOUT, cwd=path ).strip() url = repository except subprocess.CalledProcessError: raise GitException("Not a Git working copy") return url, repository, branch, revision PKqüFÒ­¬Ö~W~W#cuppa/cpp/run_patched_boost_test.py # Copyright Jamie Allsop 2011-2015 # Copyright Declan Traynor 2012 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RunPatchedBoostTest #------------------------------------------------------------------------------- import os import sys import shlex import re import cuppa.test_report.cuppa_json import cuppa.build_platform from cuppa.output_processor import IncrementalSubProcess class Notify(object): def __init__( self, scons_env, show_test_output ): self._show_test_output = show_test_output self._toolchain = scons_env['toolchain'] self._colouriser = scons_env['colouriser'] self.master_suite = {} self.master_suite['status'] = 'passed' def enter_suite(self, suite): sys.stdout.write( self._colouriser.emphasise( "\nStarting Test Suite [%s]\n" % suite ) ) def exit_suite(self, suite): sys.stdout.write( self._colouriser.emphasise( "\nTest Suite Finished [%s] " % suite['name'] ) ) label = suite['status'].upper() meaning = suite['status'] store_durations( suite ) sys.stdout.write( self._colouriser.highlight( meaning, " = %s = " % label ) ) self.__write_time( suite ) total_tests = int(suite['total_tests']) passed_tests = int(suite['passed_tests']) failed_tests = int(suite['failed_tests']) expected_failures = int(suite['expected_failures']) skipped_tests = int(suite['skipped_tests']) aborted_tests = int(suite['aborted_tests']) total_assertions = int(suite['total_assertions']) passed_assertions = int(suite['passed_assertions']) failed_assertions = int(suite['failed_assertions']) if total_assertions > 0: if suite['status'] == 'passed': sys.stdout.write( self._colouriser.highlight( meaning, " ( %s of %s Assertions Passed )" % (passed_assertions, total_assertions) ) ) else: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s of %s Assertions Failed )" % (failed_assertions, total_assertions) ) ) else: sys.stdout.write( self._colouriser.colour( 'notice', " ( No Assertions Checked )" ) ) if suite['status'] == 'passed' and passed_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Passed ) " % (passed_tests, passed_tests > 1 and 'Test Cases' or 'Test Case') ) ) elif suite['status'] != 'passed': self.master_suite['status'] = 'failed' if failed_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Failed ) " % (failed_tests, failed_tests > 1 and 'Test Cases' or 'Test Case') ) ) if expected_failures > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Expected ) " % (expected_failures, expected_failures > 1 and 'Failures' or 'Failure') ) ) if skipped_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Skipped ) " % (skipped_tests, skipped_tests > 1 and 'Test Cases' or 'Test Case') ) ) if aborted_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Aborted ) " % (aborted_tests, aborted_tests > 1 and 'Test Cases Were' or 'Test Case Was') ) ) sys.stdout.write('\n\n') def enter_test(self, test_case): pass sys.stdout.write( self._colouriser.emphasise( "\nRunning Test Case [%s] ...\n" % test_case ) ) def exit_test( self, test_case ): label = test_case['status'] meaning = test_case['status'] sys.stdout.write( self._colouriser.highlight( meaning, " = %s = " % label ) ) self.__write_time( test_case ) assertions = int(test_case['total']) passed = int(test_case['passed']) failed = int(test_case['failed']) if test_case['status'] == 'passed' and passed > 0: sys.stdout.write( self._colouriser.colour( meaning, " ( %s of %s Assertions Passed )" % ( passed, assertions ) ) ) if failed > 0: sys.stdout.write( self._colouriser.colour( meaning, " ( %s of %s Assertions Failed )" % ( failed, assertions ) ) ) if test_case['total'] == 0: sys.stdout.write( self._colouriser.colour( 'notice'," ( No Assertions )" ) ) sys.stdout.write('\n') def __write_time( self, results ): sys.stdout.write( " Time:" ) if 'wall_duration' in results: sys.stdout.write( " Wall [ %s ]" % self._colouriser.emphasise_time_by_digit( results['wall_duration'] ) ) sys.stdout.write( " CPU [ %s ]" % self._colouriser.emphasise_time_by_digit( results['cpu_duration'] ) ) if 'wall_cpu_percent' in results: wall_cpu_percent = results['wall_cpu_percent'].upper() format = "%6s%%" if wall_cpu_percent == "N/A": format = "%5s " wall_cpu_percent = format % wall_cpu_percent sys.stdout.write( " CPU/Wall [ %s ]" % self._colouriser.colour( 'time', wall_cpu_percent ) ) def message(self, line): sys.stdout.write( line + "\n" ) def store_durations( results ): if 'cpu_time' in results: results['cpu_duration'] = duration_from_elapsed(results['cpu_time']) if 'wall_time' in results: results['wall_duration'] = duration_from_elapsed(results['wall_time']) if 'user_time' in results: results['user_duration'] = duration_from_elapsed(results['user_time']) if 'sys_time' in results: results['sys_duration'] = duration_from_elapsed(results['sys_time']) class State: waiting, test_suite, test_case = range(3) class ProcessStdout: def __init__( self, log, branch_root, notify ): self._log = open( log, "w" ) self._branch_root = branch_root self._notify = notify self._state = State.waiting self._test_case_names = [] self._test_cases = {} self._test_suites = {} self._master_test_suite = 'Master Test Suite' self._test = None def entered_test_suite( self, line ): matches = re.match( r'(?:(?P[a-zA-Z0-9._/\s\-]+)?[(](?P[0-9]+)[)]: )?' 'Entering test suite "(?P[a-zA-Z0-9(){}:&_<>/\-, ]+)"', line.strip() ) if matches and matches.group('suite') != self._master_test_suite: self.suite = matches.group('suite') self._test_suites[self.suite] = {} self._test_suites[self.suite]['name'] = self.suite self._test_suites[self.suite]['cpu_time'] = 0 self._test_suites[self.suite]['wall_time'] = 0 self._test_suites[self.suite]['user_time'] = 0 self._test_suites[self.suite]['sys_time'] = 0 self._test_suites[self.suite]['total_tests'] = 0 self._test_suites[self.suite]['expected_failures'] = 0 self._test_suites[self.suite]['passed_tests'] = 0 self._test_suites[self.suite]['failed_tests'] = 0 self._test_suites[self.suite]['skipped_tests'] = 0 self._test_suites[self.suite]['aborted_tests'] = 0 self._test_suites[self.suite]['total_assertions'] = 0 self._test_suites[self.suite]['passed_assertions'] = 0 self._test_suites[self.suite]['failed_assertions'] = 0 self._notify.enter_suite(self.suite) return True return False def leaving_test_suite( self, line ): matches = re.match( r'Leaving test suite "(?P[a-zA-Z0-9(){}:&_<>/\-, ]+)"' '\. Test suite (?Ppassed|failed)\.' '(?: (?P.*))?', line.strip() ) if matches and matches.group('suite') != self._master_test_suite: suite = self._test_suites[matches.group('suite')] if matches.group('status'): suite['status'] = matches.group('status') if matches.group('results'): self.store_suite_results(suite, matches.group('results')) self._notify.exit_suite(suite) return True else: return False def entered_test_case( self, line ): matches = re.match( r'(?:(?P[a-zA-Z0-9._/\s\-]+)[(](?P[0-9]+)[)]: )?' 'Entering test case "(?P[a-zA-Z0-9(){}\[\]:;&_<>\-, =]+)"', line.strip() ) if matches: name = matches.group('test') self._test = '[' + self.suite + '] ' + name self._test_cases[ self._test ] = {} self._test_cases[ self._test ]['suite'] = self.suite self._test_cases[ self._test ]['fixture'] = self.suite self._test_cases[ self._test ]['key'] = self._test self._test_cases[ self._test ]['name'] = name self._test_cases[ self._test ]['stdout'] = [] self._test_cases[ self._test ]['file'] = matches.group('file') self._test_cases[ self._test ]['line'] = matches.group('line') self._test_cases[ self._test ]['cpu_time'] = 0 self._test_cases[ self._test ]['branch_dir'] = os.path.relpath( matches.group('file'), self._branch_root ) self._test_cases[ self._test ]['total'] = 0 self._test_cases[ self._test ]['assertions'] = 0 self._test_cases[ self._test ]['passed'] = 0 self._test_cases[ self._test ]['failed'] = 0 self._notify.enter_test(self._test) return True return False def leaving_test_case( self, line ): test = self._test_cases[self._test] matches = re.match( r'Leaving test case "(?:[a-zA-Z0-9(){}\[\]:;&_<>\-, =]+)"' '(?:; testing time: (?P[a-zA-Z0-9.s ,+=()%/]+))?' '\. Test case (?Ppassed|failed|skipped|aborted)\.' '(?: (?P.*))?', line.strip() ) if matches: self.__capture_times( matches.group('testing_time'), test ) if matches.group('status'): test['status'] = matches.group('status') if matches.group('results'): self.store_test_results(test, matches.group('results')) self._test_case_names.append( test['key'] ) self._notify.exit_test(test) return True else: test['stdout'].append( line ) self._notify.message(line) return False def __capture_times( self, time, results ): if time: time = time.strip() test_time = re.match( '(?:(P[0-9]+)(?Pms|mks))', time ) if test_time: multiplier = test_time.group('units') == 'ms' and 1000000 or 1000 subseconds = int(test_time.group('test_time')) total_nanosecs = subseconds * multiplier results['cpu_time'] = total_nanosecs cpu_times = re.match( r'(?P[0-9.]+)s wall, ' '(?P[0-9.]+)s user [+] ' '(?P[0-9.]+)s system [=] ' '(?P[0-9.]+)s CPU [(](?P[nN/aA0-9.]+)%?[)]', time ) if cpu_times: results['wall_time'] = nanosecs_from_time( cpu_times.group('wall_time') ) results['user_time'] = nanosecs_from_time( cpu_times.group('user_time') ) results['sys_time'] = nanosecs_from_time( cpu_times.group('sys_time') ) results['cpu_time'] = nanosecs_from_time( cpu_times.group('cpu_time') ) self._test_suites[results['suite']]['wall_time'] += results['wall_time'] self._test_suites[results['suite']]['user_time'] += results['user_time'] self._test_suites[results['suite']]['sys_time'] += results['sys_time'] results['wall_cpu_percent'] = cpu_times.group('wall_cpu_percent') self._test_suites[results['suite']]['cpu_time'] += results['cpu_time'] store_durations( results ) else: results['cpu_duration'] = duration_from_elapsed(0) ## For backward compatibility - remove later results['elapsed'] = results['cpu_time'] def __call__( self, line ): self._log.write( line + '\n' ) if self._state == State.waiting: if self.entered_test_suite( line ): self._state = State.test_suite elif self.leaving_test_suite( line ): self._state = State.waiting elif self._state == State.test_suite: if self.entered_test_case( line ): self._state = State.test_case elif self.entered_test_suite( line ): self._state = State.test_suite elif self.leaving_test_suite( line ): self._state = State.waiting elif self._state == State.test_case: if self.leaving_test_case( line ): self._state = State.test_suite def __exit__( self, type, value, traceback ): if self._log: self._log.close() def tests( self ): return [ self._test_cases[ name ] for name in self._test_case_names ] def store_test_results(self, test, results): matches = [] for result in results.split('.'): matched = re.match( r'(?P[0-9]+) assertions? out of (?P[0-9]+) (?Ppassed|failed)', result.strip() ) if matched: matches.append(matched) for match in matches: count = match.group('count') total = match.group('total') status = match.group('status') test['total'] = total if status == 'passed': test['passed'] = count elif status == 'failed': test['failed'] = count ## For backward compatibility - remove later test['assertions'] = test['total'] def store_suite_results(self, suite, results): matches = [] for result in results.split('.'): matched = re.match( r'(?P[0-9]+) (?Passertions?|test cases?|failures?) ' '((?Pexpected)|(out of (?P[0-9]+) ' '(?Ppassed|failed|skipped|aborted)))', result.strip() ) if matched: matches.append(matched) for match in matches: count = match.group('count') type = match.group('type') expected_failures = match.group('expected') total = match.group('total') status = match.group('status') if not expected_failures: if type.startswith('test case'): suite['total_tests'] = total elif type.startswith('assertion'): suite['total_assertions'] = total else: suite['expected_failures'] = count if status == 'passed': if type.startswith('test case'): suite['passed_tests'] = count elif type.startswith('assertion'): suite['passed_assertions'] = count elif status == 'failed': if type.startswith('test case'): suite['failed_tests'] = count elif type.startswith('assertion'): suite['failed_assertions'] = count elif status == 'skipped': suite['skipped_tests'] = count elif status == 'aborted': suite['aborted_tests'] = count class ProcessStderr: def __init__( self, log, notify ): self._log = open( log, "w" ) def __call__( self, line ): self._log.write( line + '\n' ) def __exit__( self, type, value, traceback ): if self._log: self._log.close() def stdout_file_name_from( program_file ): return program_file + '.stdout.log' def stderr_file_name_from( program_file ): return program_file + '.stderr.log' def report_file_name_from( program_file ): return program_file + '.report.json' def success_file_name_from( program_file ): return program_file + '.success' class RunPatchedBoostTestEmitter: def __init__( self, final_dir ): self._final_dir = final_dir def __call__( self, target, source, env ): program_file = os.path.join( self._final_dir, os.path.split( str( source[0] ) )[1] ) target = [] target.append( stdout_file_name_from( program_file ) ) target.append( stderr_file_name_from( program_file ) ) target.append( report_file_name_from( program_file ) ) target.append( success_file_name_from( program_file ) ) return target, source class RunPatchedBoostTest: def __init__( self, expected ): self._expected = expected def __call__( self, target, source, env ): executable = str( source[0].abspath ) working_dir = os.path.split( executable )[0] program_path = source[0].path notifier = Notify(env, env['show_test_output']) if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable + " --boost.test.log_format=hrf --boost.test.log_level=test_suite --boost.test.report_level=no" print "cuppa: RunPatchedBoostTest: [" + test_command + "]" try: return_code, tests = self._run_test( program_path, test_command, working_dir, env['branch_root'], notifier ) cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), tests ) if return_code < 0: self._write_file_to_stderr( stderr_file_name_from( program_path ) ) print >> sys.stderr, "cuppa: RunPatchedBoostTest: Test was terminated by signal: ", -return_code elif return_code > 0: self._write_file_to_stderr( stderr_file_name_from( program_path ) ) print >> sys.stderr, "cuppa: RunPatchedBoostTest: Test returned with error code: ", return_code elif notifier.master_suite['status'] != 'passed': print >> sys.stderr, "cuppa: RunPatchedBoostTest: Not all test suites passed. " if return_code: self._remove_success_file( success_file_name_from( program_path ) ) else: self._write_success_file( success_file_name_from( program_path ) ) return None except OSError, e: print >> sys.stderr, "Execution of [", test_command, "] failed with error: ", e return 1 def _run_test( self, program_path, test_command, working_dir,branch_root, notifier ): process_stdout = ProcessStdout( stdout_file_name_from( program_path ), branch_root, notifier ) process_stderr = ProcessStderr( stderr_file_name_from( program_path ), notifier ) return_code = IncrementalSubProcess.Popen2( process_stdout, process_stderr, shlex.split( test_command ), cwd=working_dir ) return return_code, process_stdout.tests() def _write_file_to_stderr( self, file_name ): error_file = open( file_name, "r" ) for line in error_file: print >> sys.stderr, line error_file.close() def _write_success_file( self, file_name ): with open( file_name, "w" ) as success_file: success_file.write( "success" ) def _remove_success_file( self, file_name ): try: os.remove( file_name ) except: pass def nanosecs_from_time( time_in_seconds ): seconds, subseconds = time_in_seconds.split('.') nanoseconds = subseconds decimal_places = len(subseconds) if decimal_places < 9: nanoseconds = int(subseconds) * 10**(9-decimal_places) return int(seconds) * 1000000000 + int(nanoseconds) def duration_from_elapsed( total_nanosecs ): secs, remainder = divmod( total_nanosecs, 1000000000 ) millisecs, remainder = divmod( remainder, 1000000 ) microsecs, nanosecs = divmod( remainder, 1000 ) hours, remainder = divmod( secs, 3600 ) minutes, secs = divmod( remainder, 60 ) duration = "%02d:%02d:%02d.%03d,%03d,%03d" % ( hours, minutes, secs, millisecs, microsecs, nanosecs ) return duration PK-¹ƒFó§H=ÔÔcuppa/cpp/__init__.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) PKý«þFÉ1é3ÿ\ÿ\cuppa/cpp/run_boost_test.py # Copyright Jamie Allsop 2015-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RunBoostTest #------------------------------------------------------------------------------- import os import sys import shlex import re import cuppa.timer import cuppa.test_report.cuppa_json import cuppa.build_platform from cuppa.output_processor import IncrementalSubProcess class Notify(object): def __init__( self, scons_env, show_test_output ): self._show_test_output = show_test_output self._toolchain = scons_env['toolchain'] self._colouriser = scons_env['colouriser'] self.master_suite = {} self.master_suite['status'] = 'passed' def enter_suite(self, suite): sys.stdout.write( self._colouriser.emphasise( "\nStarting Test Suite [%s]\n" % suite ) ) def exit_suite(self, suite): sys.stdout.write( self._colouriser.emphasise( "\nTest Suite Finished [%s] " % suite['name'] ) ) label = suite['status'].upper() meaning = suite['status'] sys.stdout.write( self._colouriser.highlight( meaning, " = {} = ".format( suite['status'].upper() ) ) ) sys.stdout.write('\n') sys.stdout.write( self._colouriser.emphasise( "\nSummary\n" ) ) for test in suite['tests']: sys.stdout.write( self._colouriser.emphasise( "\nTest case [{}]".format( test['name'] ) ) + '\n' ) self._write_test_case( test ) sys.stdout.write('\n') sys.stdout.write( self._colouriser.highlight( meaning, " = %s = " % label ) ) cuppa.timer.write_time( suite['total_cpu_times'], self._colouriser ) passed_tests = suite['passed_tests'] failed_tests = suite['failed_tests'] expected_failures = suite['expected_failures'] skipped_tests = suite['skipped_tests'] aborted_tests = suite['aborted_tests'] total_assertions = suite['total_assertions'] passed_assertions = suite['passed_assertions'] failed_assertions = suite['failed_assertions'] if total_assertions > 0: if suite['status'] == 'passed': sys.stdout.write( self._colouriser.highlight( meaning, " ( %s of %s Assertions Passed )" % (passed_assertions, total_assertions) ) ) else: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s of %s Assertions Failed )" % (failed_assertions, total_assertions) ) ) else: sys.stdout.write( self._colouriser.colour( 'notice', " ( No Assertions Checked )" ) ) if suite['status'] == 'passed' and passed_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Passed ) " % (passed_tests, passed_tests > 1 and 'Test Cases' or 'Test Case') ) ) elif suite['status'] != 'passed': self.master_suite['status'] = 'failed' if failed_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Failed ) " % (failed_tests, failed_tests > 1 and 'Test Cases' or 'Test Case') ) ) if expected_failures > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Expected ) " % (expected_failures, expected_failures > 1 and 'Failures' or 'Failure') ) ) if len( skipped_tests ): number = len( skipped_tests ) sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Skipped ) " % (number, number > 1 and 'Test Cases' or 'Test Case') ) ) if aborted_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Aborted ) " % (aborted_tests, aborted_tests > 1 and 'Test Cases Were' or 'Test Case Was') ) ) sys.stdout.write('\n\n') def _write_test_case( self, test_case ): label = test_case['status'] meaning = test_case['status'] sys.stdout.write( self._colouriser.highlight( meaning, " = %s = " % label ) ) cuppa.timer.write_time( test_case['cpu_times'], self._colouriser ) assertions = test_case['total'] passed = test_case['passed'] failed = test_case['failed'] if test_case['status'] == 'passed' and passed > 0: sys.stdout.write( self._colouriser.colour( meaning, " ( %s of %s Assertions Passed )" % ( passed, assertions ) ) ) if failed > 0: sys.stdout.write( self._colouriser.colour( meaning, " ( %s of %s Assertions Failed )" % ( failed, assertions ) ) ) if test_case['total'] == 0: sys.stdout.write( self._colouriser.colour( 'notice'," ( No Assertions )" ) ) sys.stdout.write('\n') def enter_test_case(self, test_case): sys.stdout.write( self._colouriser.emphasise( "\nRunning Test Case [%s] ...\n" % test_case['key'] ) ) test_case['timer'] = cuppa.timer.Timer() def exit_test_case( self, test ): self._write_test_case( test ) def failed_assertion(self, line ): def as_error( text ): return self._colouriser.as_error( text ) def emphasise( text ): return self._colouriser.emphasise( text ) def start_error(): return self._colouriser.start_colour( "error" ) def reset(): return self._colouriser.reset() matches = re.match( r'(?P[a-zA-Z0-9._/\s\-]+)[(](?P[0-9]+)[)]: ' '(?P[a-zA-Z0-9(){}:&_<>/\-=!," \[\]]+)', line ) if matches: path = matches.group( 'file' ) line = matches.group( 'line' ) message = matches.group( 'message') error = self._toolchain.error_format() sys.stdout.write( error.format( start_error() + emphasise( path ) + start_error(), emphasise( line ) + start_error(), message + reset() ) + "\n" ) else: sys.stdout.write( self._colouriser.colour( "error", line ) + "\n" ) def message(self, line): if self._show_test_output: sys.stdout.write( line + "\n" ) class State: waiting, test_suite, test_case = range(3) class ProcessStdout: def __init__( self, log, branch_root, notify ): self.log = open( log, "w" ) self.branch_root = branch_root self.notify = notify self.state = State.waiting self.test_case_names = [] self.test_cases = {} self.test_suites = {} self.master_test_suite = 'Master Test Suite' def entered_test_suite( self, line ): matches = re.match( r'(?:(?P[a-zA-Z0-9._/\s\-]+)?[(](?P[0-9]+)[)]: )?' 'Entering test suite "(?P[a-zA-Z0-9(){}:&_<>/\-, ]+)"', line.strip() ) if matches and matches.group('suite') != self.master_test_suite: self.suite = matches.group('suite') self.test_suites[self.suite] = {} self.test_suites[self.suite]['name'] = self.suite self.test_suites[self.suite]['total_tests'] = 0 self.test_suites[self.suite]['expected_failures'] = 0 self.test_suites[self.suite]['passed_tests'] = 0 self.test_suites[self.suite]['failed_tests'] = 0 self.test_suites[self.suite]['skipped_tests'] = [] self.test_suites[self.suite]['aborted_tests'] = 0 self.test_suites[self.suite]['total_assertions'] = 0 self.test_suites[self.suite]['passed_assertions'] = 0 self.test_suites[self.suite]['failed_assertions'] = 0 self.test_suites[self.suite]['total_cpu_times'] = cuppa.timer.CpuTimes( 0, 0, 0, 0 ) self.test_suites[self.suite]['tests'] = [] self.notify.enter_suite(self.suite) return True return False def leaving_test_suite( self, line ): matches = re.match( r'Leaving test suite "(?P[a-zA-Z0-9(){}:&_<>/\-, ]+)"' '(\. Test suite (?Ppassed|failed)\.' '(?: (?P.*))?)?', line.strip() ) if matches and matches.group('suite') != self.master_test_suite: suite = self.test_suites[matches.group('suite')] if matches.group('status'): suite['status'] = matches.group('status') if matches.group('results'): self.store_suite_results(suite, matches.group('results')) else: self.collate_suite_results(suite) self.notify.exit_suite(suite) return True else: return False def skipped_test_case( self, line ): matches = re.match( r'Test "(?P[a-zA-Z0-9(){}\[\]:;&_<>\-, =]+)" is skipped', line.strip() ) if matches: name = matches.group('test') self.test_suites[self.suite]['skipped_tests'].append( name ) return True return False def entered_test_case( self, line ): matches = re.match( r'Entering test case "(?P[a-zA-Z0-9(){}\[\]:;&_<>\-, =]+)"', line.strip() ) if matches: name = matches.group('test') self.test_suites[self.suite]['tests'].append( {} ) test_case = self.test_suites[self.suite]['tests'][-1] test_case['suite'] = self.suite test_case['fixture'] = self.suite test_case['key'] = '[' + self.suite + '] ' + name test_case['name'] = name test_case['stdout'] = [] test_case['total'] = 0 test_case['assertions'] = 0 test_case['passed'] = 0 test_case['failed'] = 0 test_case['skipped'] = False test_case['aborted'] = 0 self.notify.enter_test_case( test_case ) return True return False def leaving_test_case( self, line ): test_case = self.test_suites[self.suite]['tests'][-1] matches = re.match( r'Leaving test case "(?:[a-zA-Z0-9(){}\[\]:;&_<>\-, =]+)"' '(?:; testing time: (?P[a-zA-Z0-9.s ,+=()%/]+))?' '(\. Test case (?Ppassed|failed|skipped|aborted)\.' '(?: (?P.*))?)?', line.strip() ) if matches: test_case['timer'].stop() test_case['cpu_times'] = test_case['timer'].elapsed() if matches.group('status'): test_case['status'] = matches.group('status') else: test_case['status'] = 'passed' if matches.group('results'): self.store_test_results(test_case, matches.group('results')) else: self.collate_test_case_results( test_case ) self.test_case_names.append( test_case['key'] ) self.notify.exit_test_case(test_case) return True else: test_case['stdout'].append( line ) self.notify.message(line) return False def handle_assertion( self, line ): test_case = self.test_suites[self.suite]['tests'][-1] is_assertion = False write_line = True matches = re.match( r'.*\s(?Ppassed|failed)(\s[\[][^\[\]]+[\]])?$', line.strip() ) if matches: is_assertion = True write_line = False status = matches.group('status') test_case['assertions'] = test_case['assertions'] + 1 test_case[status] = test_case[status] + 1 if status == 'failed': write_line = True self.notify.failed_assertion(line) return is_assertion, write_line def __call__( self, line ): if not self.state == State.test_case: self.log.write( line + '\n' ) if self.state == State.waiting: if self.entered_test_suite( line ): self.state = State.test_suite elif self.leaving_test_suite( line ): self.state = State.waiting elif self.state == State.test_suite: if self.entered_test_case( line ): self.state = State.test_case elif self.skipped_test_case( line ): self.state = State.test_suite elif self.entered_test_suite( line ): self.state = State.test_suite elif self.leaving_test_suite( line ): self.state = State.waiting elif self.state == State.test_case: is_assertion, write_line = self.handle_assertion( line ) if write_line: self.log.write( line + '\n' ) if not is_assertion: if self.leaving_test_case( line ): self.state = State.test_suite def __exit__( self, type, value, traceback ): if self.log: self.log.close() def tests( self ): tests = [] for suite in self.test_suites.itervalues(): for test_case in suite['tests']: tests.append( test_case ) return tests def collate_test_case_results( self, test ): test['status'] = ( test['failed'] or test['aborted'] ) and 'failed' or 'passed' test['total'] = test['assertions'] test['cpu_time'] = test['cpu_times'].process test['wall_time'] = test['cpu_times'].wall test['user_time'] = test['cpu_times'].user test['sys_time'] = test['cpu_times'].system test['cpu_duration'] = cuppa.timer.as_duration_string( test['cpu_time'] ) test['wall_duration'] = cuppa.timer.as_duration_string( test['wall_time'] ) test['user_duration'] = cuppa.timer.as_duration_string( test['user_time'] ) test['sys_duration'] = cuppa.timer.as_duration_string( test['sys_time'] ) test['wall_cpu_percent'] = cuppa.timer.as_wall_cpu_percent_string( test['cpu_times'] ) test_suite = self.test_suites[test['suite']] test_suite['passed_tests'] = test_suite['passed_tests'] + ( test['passed'] and 1 or 0 ) test_suite['failed_tests'] = test_suite['failed_tests'] + ( test['failed'] and 1 or 0 ) test_suite['aborted_tests'] = test_suite['aborted_tests'] + ( test['aborted'] and 1 or 0 ) test_suite['total_assertions'] = test_suite['total_assertions'] + test['total'] test_suite['passed_assertions'] = test_suite['passed_assertions'] + test['passed'] + test['skipped'] test_suite['failed_assertions'] = test_suite['failed_assertions'] + test['failed'] + test['aborted'] test_suite['total_cpu_times'] += test['cpu_times'] def store_test_results(self, test, results): matches = [] for result in results.split('.'): matched = re.match( r'(?P[0-9]+) assertions? out of (?P[0-9]+) (?Ppassed|failed)', result.strip() ) if matched: matches.append(matched) for match in matches: count = match.group('count') total = match.group('total') status = match.group('status') test['total'] = total if status == 'passed': test['passed'] = count elif status == 'failed': test['failed'] = count ## For backward compatibility - remove later test['assertions'] = test['total'] def collate_suite_results( self, suite ): suite['status'] = suite['failed_assertions'] and 'failed' or 'passed' suite['cpu_time'] = suite['total_cpu_times'].process suite['wall_time'] = suite['total_cpu_times'].wall suite['user_time'] = suite['total_cpu_times'].user suite['sys_time'] = suite['total_cpu_times'].system suite['cpu_duration'] = cuppa.timer.as_duration_string( suite['cpu_time'] ) suite['wall_duration'] = cuppa.timer.as_duration_string( suite['wall_time'] ) suite['user_duration'] = cuppa.timer.as_duration_string( suite['user_time'] ) suite['sys_duration'] = cuppa.timer.as_duration_string( suite['sys_time'] ) suite['wall_cpu_percent'] = cuppa.timer.as_wall_cpu_percent_string( suite['total_cpu_times'] ) def store_suite_results(self, suite, results): matches = [] for result in results.split('.'): matched = re.match( r'(?P[0-9]+) (?Passertions?|test cases?|failures?) ' '((?Pexpected)|(out of (?P[0-9]+) ' '(?Ppassed|failed|skipped|aborted)))', result.strip() ) if matched: matches.append(matched) for match in matches: count = match.group('count') type = match.group('type') expected_failures = match.group('expected') total = match.group('total') status = match.group('status') if not expected_failures: if type.startswith('test case'): suite['total_tests'] = total elif type.startswith('assertion'): suite['total_assertions'] = total else: suite['expected_failures'] = count if status == 'passed': if type.startswith('test case'): suite['passed_tests'] = count elif type.startswith('assertion'): suite['passed_assertions'] = count elif status == 'failed': if type.startswith('test case'): suite['failed_tests'] = count elif type.startswith('assertion'): suite['failed_assertions'] = count elif status == 'skipped': suite['skipped_tests'] = count elif status == 'aborted': suite['aborted_tests'] = count class ProcessStderr: def __init__( self, log, notify ): self.log = open( log, "w" ) def __call__( self, line ): self.log.write( line + '\n' ) def __exit__( self, type, value, traceback ): if self.log: self.log.close() def stdout_file_name_from( program_file ): return program_file + '.stdout.log' def stderr_file_name_from( program_file ): return program_file + '.stderr.log' def report_file_name_from( program_file ): return program_file + '.report.json' def success_file_name_from( program_file ): return program_file + '.success' class RunBoostTestEmitter: def __init__( self, final_dir ): self._final_dir = final_dir def __call__( self, target, source, env ): program_file = os.path.join( self._final_dir, os.path.split( str( source[0] ) )[1] ) target = [] target.append( stdout_file_name_from( program_file ) ) target.append( stderr_file_name_from( program_file ) ) target.append( report_file_name_from( program_file ) ) target.append( success_file_name_from( program_file ) ) return target, source class RunBoostTest: def __init__( self, expected ): self._expected = expected def __call__( self, target, source, env ): executable = str( source[0].abspath ) working_dir = os.path.split( executable )[0] program_path = source[0].path notifier = Notify(env, env['show_test_output']) if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable + " --log_format=hrf --log_level=all --report_level=no" print "cuppa: RunBoostTest: [" + test_command + "]" try: return_code, tests = self.__run_test( program_path, test_command, working_dir, env['branch_root'], notifier ) cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), tests ) if return_code < 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) print >> sys.stderr, "cuppa: RunBoostTest: Test was terminated by signal: ", -return_code elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) print >> sys.stderr, "cuppa: RunBoostTest: Test returned with error code: ", return_code elif notifier.master_suite['status'] != 'passed': print >> sys.stderr, "cuppa: RunBoostTest: Not all test suites passed. " if return_code: self._remove_success_file( success_file_name_from( program_path ) ) else: self._write_success_file( success_file_name_from( program_path ) ) return None except OSError as e: print >> sys.stderr, "Execution of [", test_command, "] failed with error: ", str(e) return 1 def __run_test( self, program_path, test_command, working_dir, branch_root, notifier ): process_stdout = ProcessStdout( stdout_file_name_from( program_path ), branch_root, notifier ) process_stderr = ProcessStderr( stderr_file_name_from( program_path ), notifier ) return_code = IncrementalSubProcess.Popen2( process_stdout, process_stderr, shlex.split( test_command ), cwd=working_dir ) return return_code, process_stdout.tests() def __write_file_to_stderr( self, file_name ): with open( file_name, "r" ) as error_file: for line in error_file: print >> sys.stderr, line def _write_success_file( self, file_name ): with open( file_name, "w" ) as success_file: success_file.write( "success" ) def _remove_success_file( self, file_name ): try: os.remove( file_name ) except: pass PKqüFw`À(ý2ý2cuppa/cpp/run_process_test.py # Copyright Jamie Allsop 2013-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RunProcessTest #------------------------------------------------------------------------------- import os import sys import shlex import cuppa.timer import cuppa.progress import cuppa.test_report.cuppa_json from cuppa.output_processor import IncrementalSubProcess class TestSuite(object): suites = {} @classmethod def create( cls, name, scons_env ): if not name in cls.suites: cls.suites[name] = TestSuite( name, scons_env ) return cls.suites[name] def __init__( self, name, scons_env ): self._name = name self._scons_env = scons_env self._colouriser = scons_env['colouriser'] sys.stdout.write('\n') sys.stdout.write( self._colouriser.emphasise( "Starting Test Suite [{}]".format( name ) ) ) sys.stdout.write('\n') cuppa.progress.NotifyProgress.register_callback( scons_env, self.on_progress ) self._suite = {} self._suite['total_tests'] = 0 self._suite['passed_tests'] = 0 self._suite['failed_tests'] = 0 self._suite['expected_failures'] = 0 self._suite['skipped_tests'] = 0 self._suite['aborted_tests'] = 0 self._suite['total_cpu_times'] = cuppa.timer.CpuTimes( 0, 0, 0, 0 ) self._tests = [] def on_progress( self, progress, sconscript, variant, env, target, source ): if progress == 'finished': self.exit_suite() suite = env['build_dir'] del self.suites[suite] def enter_test( self, test, expected='passed' ) : sys.stdout.write( self._colouriser.emphasise( "\nTest [%s]..." % test ) + '\n' ) self._tests.append( {} ) test_case = self._tests[-1] test_case['name'] = test test_case['expected'] = expected test_case['suite'] = self._name test_case['timer'] = cuppa.timer.Timer() def exit_test( self, test, status='passed' ): test_case = self._tests[-1] test_case['timer'].stop() test_case['status'] = status cpu_times = test_case['timer'].elapsed() del test_case['timer'] test_case['cpu_times'] = cpu_times test_case['cpu_time'] = cpu_times.process test_case['wall_time'] = cpu_times.wall test_case['user_time'] = cpu_times.user test_case['sys_time'] = cpu_times.system test_case['cpu_duration'] = cuppa.timer.as_duration_string( test_case['cpu_time'] ) test_case['wall_duration'] = cuppa.timer.as_duration_string( test_case['wall_time'] ) test_case['user_duration'] = cuppa.timer.as_duration_string( test_case['user_time'] ) test_case['sys_duration'] = cuppa.timer.as_duration_string( test_case['sys_time'] ) test_case['wall_cpu_percent'] = cuppa.timer.as_wall_cpu_percent_string( cpu_times ) self._write_test_case( test_case ) self._suite['total_tests'] += 1 if status == 'passed': self._suite['passed_tests'] += 1 elif status == 'failed': self._suite['failed_tests'] += 1 elif status == 'expected_failure': self._suite['expected_failures'] += 1 elif status == 'aborted': self._suite['aborted_tests'] += 1 elif status == 'skipped': self._suite['skipped_tests'] += 1 self._suite['total_cpu_times'] += test_case['cpu_times'] sys.stdout.write('\n\n') def _write_test_case( self, test_case ): expected = test_case['expected'] == test_case['status'] passed = test_case['status'] == 'passed' meaning = test_case['status'] if not expected and passed: meaning = 'unexpected_success' label = " ".join( meaning.upper().split('_') ) cpu_times = test_case['cpu_times'] sys.stdout.write( self._colouriser.highlight( meaning, " = %s = " % label ) ) cuppa.timer.write_time( cpu_times, self._colouriser ) def exit_suite( self ): suite = self._suite total_tests = suite['total_tests'] passed_tests = suite['passed_tests'] + suite['expected_failures'] + suite['skipped_tests'] failed_tests = suite['failed_tests'] + suite['aborted_tests'] expected_failures = suite['expected_failures'] skipped_tests = suite['skipped_tests'] aborted_tests = suite['aborted_tests'] suite['status'] = 'passed' meaning = 'passed' if total_tests != passed_tests: suite['status'] = 'failed' meaning = 'failed' sys.stdout.write( self._colouriser.emphasise( "\nTest Suite [{}] ".format( self._name ) ) ) sys.stdout.write( self._colouriser.highlight( meaning, " = {} = ".format( suite['status'].upper() ) ) ) sys.stdout.write('\n') sys.stdout.write( self._colouriser.emphasise( "\nSummary\n" ) ) for test in self._tests: sys.stdout.write( self._colouriser.emphasise( "\nTest case [{}]".format( test['name'] ) ) + '\n' ) self._write_test_case( test ) sys.stdout.write('\n\n') if total_tests > 0: if suite['status'] == 'passed': sys.stdout.write( self._colouriser.highlight( meaning, " ( %s of %s Test Cases Passed )" % ( passed_tests, total_tests ) ) ) else: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s of %s Test Cases Failed )" % (failed_tests, total_tests) ) ) else: sys.stdout.write( self._colouriser.colour( 'notice', " ( No Test Cases Checked )" ) ) if passed_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Passed ) " % (passed_tests, passed_tests > 1 and 'Test Cases' or 'Test Case') ) ) if failed_tests > 0: sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Failed ) " % (failed_tests, failed_tests > 1 and 'Test Cases' or 'Test Case') ) ) if expected_failures > 0: meaning = 'expected_failure' sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Expected ) " % (expected_failures, expected_failures > 1 and 'Failures' or 'Failure') ) ) if skipped_tests > 0: meaning = 'skipped' sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Skipped ) " % (skipped_tests, skipped_tests > 1 and 'Test Cases' or 'Test Case') ) ) if aborted_tests > 0: meaning = 'aborted' sys.stdout.write( self._colouriser.highlight( meaning, " ( %s %s Aborted ) " % (aborted_tests, aborted_tests > 1 and 'Test Cases Were' or 'Test Case Was') ) ) sys.stdout.write('\n') cuppa.timer.write_time( self._suite['total_cpu_times'], self._colouriser, True ) self._tests = [] self._suite = {} sys.stdout.write('\n\n') def message( self, line ): sys.stdout.write( line + "\n" ) def tests( self ): return self._tests def stdout_file_name_from( program_file ): return program_file + '.stdout.log' def stderr_file_name_from( program_file ): return program_file + '.stderr.log' def report_file_name_from( program_file ): return program_file + '.report.json' def success_file_name_from( program_file ): return program_file + '.success' class RunProcessTestEmitter(object): def __init__( self, final_dir ): self._final_dir = final_dir def __call__( self, target, source, env ): program_file = os.path.join( self._final_dir, os.path.split( source[0].path )[1] ) target = [] target.append( stdout_file_name_from( program_file ) ) target.append( stderr_file_name_from( program_file ) ) target.append( report_file_name_from( program_file ) ) target.append( success_file_name_from( program_file ) ) return target, source class ProcessStdout(object): def __init__( self, show_test_output, log ): self._show_test_output = show_test_output self.log = open( log, "w" ) def __call__( self, line ): self.log.write( line + '\n' ) if self._show_test_output: sys.stdout.write( line + '\n' ) def __exit__( self, type, value, traceback ): if self.log: self.log.close() class ProcessStderr(object): def __init__( self, show_test_output, log ): self._show_test_output = show_test_output self.log = open( log, "w" ) def __call__( self, line ): self.log.write( line + '\n' ) if self._show_test_output: sys.stderr.write( line + '\n' ) def __exit__( self, type, value, traceback ): if self.log: self.log.close() class RunProcessTest(object): def __init__( self, expected ): self._expected = expected def __call__( self, target, source, env ): executable = str( source[0].abspath ) working_dir, test = os.path.split( executable ) program_path = source[0].path suite = env['build_dir'] if cuppa.build_platform.name() == "Windows": executable = '"' + executable + '"' test_command = executable test_suite = TestSuite.create( suite, env ) test_suite.enter_test( test, expected=self._expected ) show_test_output = env['show_test_output'] try: return_code = self._run_test( show_test_output, program_path, test_command, working_dir ) if return_code < 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) print >> sys.stderr, "cuppa: ProcessTest: Test was terminated by signal: ", -return_code test_suite.exit_test( test, 'aborted' ) elif return_code > 0: self.__write_file_to_stderr( stderr_file_name_from( program_path ) ) print >> sys.stderr, "cuppa: ProcessTest: Test returned with error code: ", return_code test_suite.exit_test( test, 'failed' ) else: test_suite.exit_test( test, 'passed' ) cuppa.test_report.cuppa_json.write_report( report_file_name_from( program_path ), test_suite.tests() ) if return_code: self._remove_success_file( success_file_name_from( program_path ) ) else: self._write_success_file( success_file_name_from( program_path ) ) return None except OSError, e: print >> sys.stderr, "Execution of [", test_command, "] failed with error: ", e return 1 def _write_success_file( self, file_name ): with open( file_name, "w" ) as success_file: success_file.write( "success" ) def _remove_success_file( self, file_name ): try: os.remove( file_name ) except: pass def _run_test( self, show_test_output, program_path, test_command, working_dir ): process_stdout = ProcessStdout( show_test_output, stdout_file_name_from( program_path ) ) process_stderr = ProcessStderr( show_test_output, stderr_file_name_from( program_path ) ) return_code = IncrementalSubProcess.Popen2( process_stdout, process_stderr, shlex.split( test_command ), cwd=working_dir ) return return_code def __write_file_to_stderr( self, file_name ): error_file = open( file_name, "r" ) for line in error_file: print >> sys.stderr, line error_file.close() PKý«þFkY¨ÞÃÃcuppa/cpp/run_gcov_coverage.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # RunGcovCoverage #------------------------------------------------------------------------------- # python standard library imports import os import shlex import re import itertools import glob from SCons.Script import Glob # construct imports import cuppa.progress from cuppa.output_processor import IncrementalSubProcess, command_available def offset_path( path, env ): build_dir = env['build_dir'] offset_dir = env['offset_dir'] path = offset_dir + os.path.sep + os.path.relpath( path, build_dir ) if path.startswith( "." + os.path.sep ): path = path[2:] return path def coverage_base_name( sconscript_file ): if sconscript_file.startswith( "." + os.path.sep ): sconscript_file = sconscript_file[2:] sconscript_file = sconscript_file.replace( ".", '' ) sconscript_file = sconscript_file.replace( "sconscript", '' ) sconscript_file = sconscript_file.replace( os.path.sep, '.' ) return sconscript_file + ".coverage" class WriteToString(object): def __init__( self ): self._output = [] def __call__( self, line ): self._output.append( line ) def string( self ): return "\n".join( self._output ) def run_command( command, working_dir ): print "cuppa: gcov: executing [{}]".format( command ) process_output = WriteToString() return_code = IncrementalSubProcess.Popen( process_output, shlex.split( command ), cwd=working_dir ) return return_code, process_output.string() class CoverageSuite(object): suites = {} @classmethod def create( cls, name, scons_env, final_dir ): if not name in cls.suites: cls.suites[name] = CoverageSuite( name, scons_env, final_dir ) return cls.suites[name] def __init__( self, name, scons_env, final_dir ): self._name = name self._scons_env = scons_env self._final_dir = final_dir cuppa.progress.NotifyProgress.register_callback( scons_env, self.on_progress ) self._suite = {} def on_progress( self, progress, sconscript, variant, env, target, source ): if progress == 'finished': self.exit_suite() del self.suites[self._name] def exit_suite( self ): env = self._scons_env self._run_gcovr( env['build_dir'], self._final_dir, env['working_dir'], env['sconscript_toolchain_build_dir'] ) def _run_gcovr( self, build_dir, output_dir, working_dir, sconscript_id ): command = 'gcovr -h' if not command_available( command ): print "cuppa: gcov: Skipping gcovr output as not available" return base_name = coverage_base_name( sconscript_id ) index_file = base_name + ".html" regex_filter = re.escape( os.path.join( build_dir, "" ) ) regex_filter = ".*" + regex_filter + ".*\.gcov" command = 'gcovr -g --gcov-filter="{}" -k -r . --html --html-details -o {}'.format( regex_filter, index_file ) return_code, output = run_command( command, working_dir ) new_index_file = os.path.join( output_dir, "coverage.html" ) try: os.rename( index_file, new_index_file ) except OSError as e: print "cuppa: gcov: Failed moving coverage file from [{}] to [{}] with error: {}".format( index_file, new_index_file, str(e) ) coverage_files = Glob( base_name + '*.html' ) for coverage_file in coverage_files: new_coverage_file = os.path.join( output_dir, str( coverage_file ) ) try: os.rename( str( coverage_file ), new_coverage_file ) except OSError as e: print "cuppa: gcov: Failed moving coverage file from [{}] to [{}] with error: {}".format( str( coverage_file ), new_coverage_file, str(e) ) print output class RunGcovCoverageEmitter(object): def __init__( self, program, final_dir ): self._program = program self._final_dir = final_dir self._program_id = '##' + os.path.split(str(program[0]))[1] def __call__( self, target, source, env ): for s in source: source_file = os.path.relpath( s.path, env['build_dir'] ) offset_source = offset_path( s.path, env ) gcov_source_path = offset_source.replace( os.path.sep, '#' ) gcno_file = os.path.splitext( source_file )[0] + '.gcno' gcda_file = os.path.splitext( source_file )[0] + '.gcda' gcov_log = source_file + self._program_id + '_gcov.log' env.Clean( source_file, [gcno_file, gcda_file] ) target.append( gcov_log ) gcov_files = Glob( gcov_source_path + '*' ) env.Clean( source_file, gcov_files ) env.Clean( source_file, os.path.join( self._final_dir, "coverage.html" ) ) base_name = coverage_base_name( env['sconscript_toolchain_build_dir'] ) coverage_files = Glob( os.path.join( self._final_dir, base_name + '*.html' ) ) env.Clean( source_file, coverage_files ) return target, source def iter_grouped( items, step=2, fillvalue=None ): it = iter( items ) return itertools.izip_longest( *[it]*step, fillvalue=fillvalue ) class RunGcovCoverage(object): def __init__( self, program, final_dir ): self._program = program self._final_dir = final_dir self._program_id = '##' + os.path.split(str(program[0]))[1] def __call__( self, target, source, env ): for s, t in itertools.izip( source, target ): gcov_path = os.path.splitext( os.path.splitext( t.path )[0] )[0] gcov_log = t.path self._run_gcov( env, s.path, gcov_path, gcov_log ) return None def _run_gcov( self, env, source_path, gcov_path, gcov_log_path ): working_dir = env['working_dir'] build_dir = env['build_dir'] final_dir = self._final_dir if not os.path.isabs( self._final_dir ): final_dir = os.path.normpath( os.path.join( build_dir, self._final_dir ) ) suite_name = working_dir coverage_suite = CoverageSuite.create( suite_name, env, final_dir ) command = 'gcov -o {} -l -p -r -c -b {}'.format( gcov_path, source_path ) return_code, output = run_command( command, working_dir ) if return_code == 0: gcov_source_path = source_path.replace( os.path.sep, '#' ) gcov_files = glob.glob( gcov_source_path + '*gcov' ) for gcov_file in gcov_files: filename, ext = os.path.splitext( str(gcov_file) ) filename = filename + self._program_id + ext new_gcov_file = os.path.join( build_dir, filename ) try: os.rename( str(gcov_file), new_gcov_file ) except OSError as e: print "cuppa: gcov: Failed moving gcov file [{}] to [{}] with error: {}".format( str(gcov_file), new_gcov_file, str(e) ) with open( gcov_log_path, 'w' ) as summary_file: summary_file.write( output ) else: print output os.remove( gcov_log_path ) PK•c×F•Ý«/îBîB$cuppa/cpp/create_version_file_cpp.py # Copyright Jamie Allsop 2011-2015 # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) #------------------------------------------------------------------------------- # CreateVersionFileCpp #------------------------------------------------------------------------------- import os from os.path import splitext, relpath, sep from SCons.Script import File import cuppa.location def offset_path( path, env ): build_dir = env['build_dir'] offset_dir = env['offset_dir'] return offset_dir + sep + relpath( path, build_dir) def hpp_from_cpp( cpp_file ): return splitext( cpp_file )[0] + '.hpp' def txt_from_cpp( cpp_file ): return splitext( cpp_file )[0] + '.txt' class CreateVersionHeaderCpp: def __init__( self, env, namespaces, version, location ): self.__env = env self.__namespace_guard = "_".join( namespaces ) self.__namespaces = namespaces self.__version = version self.__location = location self.__variant = self.__env['variant'].name() self.__working_dir = os.path.join( env['base_path'], env['build_dir'] ) if not os.path.exists( self.__working_dir ): os.makedirs( self.__working_dir ) def __call__( self, target, source, env ): cpp_file = offset_path( target[0].path, env ) hpp_file = hpp_from_cpp( cpp_file ) txt_file = txt_from_cpp( cpp_file ) output_dir = os.path.split( hpp_file )[0] if output_dir: output_dir = os.path.join( self.__working_dir, output_dir ) if not os.path.exists( output_dir ): os.makedirs( output_dir ) version_hpp = open( os.path.join( self.__working_dir, hpp_file ), "w" ) version_hpp.write( get_build_identity_header( self.__namespace_guard, self.__namespaces ) ) version_hpp.close() version_txt = open( os.path.join( self.__working_dir, txt_file ), "w" ) version_txt.write( get_build_identity_txt( self.__version, relpath( env['base_path'], self.__location), self.__namespaces ) ) version_txt.close() target[0] = File( cpp_file ) source.append( hpp_file ) source.append( txt_file ) return target, source def get_build_identity_txt( version, location, namespaces ): lines = [] lines += [ '// v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v\n' '// Version File for product version [ ' + version + ' ]\n' '// Location for dependency versions [ ' + location + ' ]\n' '// Namespace [ ' + "::".join( namespaces ) + ' ]\n' '// v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v v\n' ] return "\n".join( lines ) def get_build_identity_header( namespace_guard, namespaces ): lines = [] lines += [ '// G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G\n' '#ifndef INCLUDED_' + namespace_guard.upper() + '_BUILD_GENERATED_VERSION_HPP\n' '#define INCLUDED_' + namespace_guard.upper() + '_BUILD_GENERATED_VERSION_HPP\n' '// G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G\n' '\n' '// I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I\n' '#include \n' '#include \n' '#include \n' '// I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I\n' '\n' '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n' ] for namespace in namespaces: lines += [ 'namespace ' + namespace + ' {' ] lines += [ 'namespace build {\n' '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n\n' '\n' 'class identity\n' '{\n' 'public:\n' '\n' ' typedef std::string string_t;\n' ' typedef std::vector< string_t > revisions_t;\n' '\n' 'private:\n' '\n' ' struct dependency\n' ' {\n' ' dependency()\n' ' {\n' ' }\n' '\n' ' dependency( const string_t& Name,\n' ' const string_t& Version,\n' ' const string_t& Repository,\n' ' const string_t& Branch,\n' ' const revisions_t& Revisions )\n' ' : name ( Name )\n' ' , version ( Version )\n' ' , repository ( Repository )\n' ' , branch ( Branch )\n' ' , revisions ( Revisions )\n' ' {\n' ' }\n' '\n' ' string_t name;\n' ' string_t version;\n' ' string_t repository;\n' ' string_t branch;\n' ' revisions_t revisions;\n' ' };\n' '\n' 'public:\n' '\n' ' typedef dependency dependency_t;\n' ' typedef std::map< string_t, dependency > dependencies_t;\n' '\n' 'public:\n' ] lines += [ function_declaration_from_variable( 'product_version' ) ] lines += [ function_declaration_from_variable( 'product_repository' ) ] lines += [ function_declaration_from_variable( 'product_branch' ) ] lines += [ function_declaration_from_variable( 'product_revision' ) ] lines += [ function_declaration_from_variable( 'build_variant' ) ] lines += [ function_declaration_from_variable( 'build_time' ) ] lines += [ function_declaration_from_variable( 'build_user' ) ] lines += [ function_declaration_from_variable( 'build_host' ) ] lines += [ function_declaration_dependencies() ] lines += [ function_declaration_report() ] lines += [ '\nprivate:\n' ' static const dependencies_t Dependencies_;\n' ' static const string_t Report_;\n' '};\n' '\n' '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n\n' '} //end namespace build' ] for namespace in namespaces: lines += [ '} //end namespace ' + namespace ] lines += [ '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n\n' '\n' '// G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G\n' '#endif\n' '// G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G G\n' '\n' ] return "\n".join( lines ) def function_declaration_from_variable( name ): lines = [] lines += [ ' static const char* ' + name + '();' ] return "\n".join( lines ) def function_declaration_dependencies(): lines = [] lines += [ ' static const dependencies_t& dependencies();' ] return "\n".join( lines ) def function_declaration_report(): lines = [] lines += [ ' static const char* report();' ] return "\n".join( lines ) class CreateVersionFileCpp: def __init__( self, env, namespaces, version, location ): self.__env = env self.__namespace_guard = "_".join( namespaces ) self.__namespaces = namespaces self.__version = version self.__location = location location = cuppa.location.Location( env, location ) self.__repository = location.repository() self.__branch = location.branch() self.__revision = location.revisions()[0] self.__variant = self.__env['variant'].name() def __call__( self, target, source, env ): cpp_file = target[0].path hpp_file = hpp_from_cpp( cpp_file ) #print "Create CPP Version File at [" + cpp_file + "]" version_cpp = open( cpp_file, "w" ) version_cpp.write( self.get_build_identity_source( env['BUILD_WITH'], hpp_file ) ) version_cpp.close() return None def function_definition_from_variable( self, name, variable ): lines = [] lines += [ '\nconst char* identity::' + name + '()' ] lines += [ '{' ] lines += [ ' return "' + str( variable ) + '";' ] lines += [ '}\n' ] return "\n".join( lines ) def function_definition_dependencies( self ): lines = [] lines += [ '\nconst identity::dependencies_t& identity::dependencies()\n' '{\n' ' return Dependencies_;\n' '}\n' ] return "\n".join( lines ) def initialise_dependencies_definition( self, dependencies ): lines = [] lines += [ '\nidentity::dependencies_t initialise_dependencies()\n' '{\n' ' typedef identity::dependencies_t dependencies_t;\n' ' typedef identity::dependency_t dependency_t;\n' ' typedef identity::revisions_t revisions_t;\n' ' dependencies_t Dependencies;' ] for name in dependencies: if name in self.__env['dependencies']: dependency = self.__env['dependencies'][name] lines += [ ' Dependencies[ "' + name + '" ] = dependency_t( "' + dependency.name() + '", "' + dependency.version() + '", "' + dependency.repository() + '", "' + dependency.branch() + '", revisions_t() );' ] try: if callable( getattr( dependency, 'revisions' ) ): revisions = dependency.revisions() if revisions: for revision in revisions: lines += [ ' Dependencies[ "' + name + '" ].revisions.push_back( "' + revision + '" );' ] except AttributeError: pass lines += [ ' return Dependencies;\n' '}\n' '\n' 'const identity::dependencies_t identity::Dependencies_ = initialise_dependencies();\n' ] return "\n".join( lines ) def function_definition_report( self ): lines = [] lines += [ '\nconst char* identity::report()' ] lines += [ '{' ] lines += [ ' return Report_.c_str();' ] lines += [ '}\n' ] return "\n".join( lines ) def initialise_report_definition( self ): lines = [] lines += [ '\nidentity::string_t initialise_report()\n' '{\n' ' std::ostringstream Report;\n' '\n' ' Report\n' ' << "Product:\\n"\n' ' " |- Version = " << identity::product_version() << "\\n"\n' ' " |- Repository = " << identity::product_repository() << "\\n"\n' ' " |- Branch = " << identity::product_branch() << "\\n"\n' ' " +- Revision = " << identity::product_revision() << "\\n"\n' ' "Build:\\n"\n' ' " |- Variant = " << identity::build_variant() << "\\n"\n' ' " |- Time = " << identity::build_time() << "\\n"\n' ' " |- User = " << identity::build_user() << "\\n"\n' ' " +- Host = " << identity::build_host() << "\\n";\n' '\n' ' if( !identity::dependencies().empty() )\n' ' {\n' ' Report << "Dependencies:\\n";\n' ' }\n' '\n' ' identity::dependencies_t::const_iterator Dependency = identity::dependencies().begin();\n' ' identity::dependencies_t::const_iterator End = identity::dependencies().end();\n' '\n' ' for( ; Dependency != End; ++Dependency )\n' ' {\n' ' Report\n' ' << " " << Dependency->second.name << "\\n"\n' ' << " |- Version = " << Dependency->second.version << "\\n"\n' ' << " |- Repository = " << Dependency->second.repository << "\\n"\n' ' << " |- Branch = " << Dependency->second.branch << "\\n";\n' '\n' ' identity::revisions_t::const_iterator Revision = Dependency->second.revisions.begin();\n' ' identity::revisions_t::const_iterator End = Dependency->second.revisions.end();\n' '\n' ' for( ; Revision != End; )\n' ' {\n' ' identity::string_t Value( *Revision );\n' ' if( ++Revision != End )\n' ' {\n' ' Report << " |";\n' ' }\n' ' else\n' ' {\n' ' Report << " +";\n' ' }\n' ' Report << "- Revision = " << Value << "\\n";\n' ' }\n' ' }\n' '\n' ' return Report.str();\n' '}\n' '\n' 'const identity::string_t identity::Report_ = initialise_report();' ] return "\n".join( lines ) def get_build_identity_source( self, dependencies, header_file ): from datetime import datetime from getpass import getuser from socket import gethostname build_time = datetime.utcnow() build_user = getuser() build_host = gethostname() lines = [] lines += [ '// I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I\n' '// Self Include' ] lines += [ '#include "' + header_file + '"' ] lines += [ '' '// C++ Standard Includes\n' '#include \n' '\n' '// I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I I\n' '\n' '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n' ] for namespace in self.__namespaces: lines += [ 'namespace ' + namespace + ' {' ] lines += [ 'namespace build {\n' '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n\n' ] lines += [ self.function_definition_from_variable( 'product_version', self.__version ) ] lines += [ self.function_definition_from_variable( 'product_repository', self.__repository ) ] lines += [ self.function_definition_from_variable( 'product_branch', self.__branch ) ] lines += [ self.function_definition_from_variable( 'product_revision', self.__revision ) ] lines += [ self.function_definition_from_variable( 'build_variant', self.__variant ) ] lines += [ self.function_definition_from_variable( 'build_time', build_time ) ] lines += [ self.function_definition_from_variable( 'build_user', build_user ) ] lines += [ self.function_definition_from_variable( 'build_host', build_host ) ] lines += [ self.initialise_dependencies_definition( dependencies ) ] lines += [ self.function_definition_dependencies() ] lines += [ self.initialise_report_definition() ] lines += [ self.function_definition_report() ] lines += [ '\n// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n\n' '} //end namespace build' ] for namespace in self.__namespaces: lines += [ '} //end namespace ' + namespace ] lines += [ '// n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n n\n' '\n' ] return "\n".join( lines ) PKfZGgž¿¿&cuppa-0.1.53.dist-info/DESCRIPTION.rstCuppa ===== A simple, extensible build system for use with `Scons `__. **Cuppa** is designed to leverage the capabilities of Scons, while allowing developers to focus on the task of describing what needs to be built. In general **cuppa** supports ``make`` like usage on the command-line. That is developers can simply write: .. code:: sh scons -D and have Scons "do the right thing"; building targets for any ``sconscript`` files found in the current directory. **Cuppa** can be installed as a normal python package or installed locally into a ``site_scons`` directory allowing it to be effortlessly integrated into any Scons setup. Note: ``-D`` tells ``scons`` to look for an ``sconstruct`` file in the current or in parent directories and if it finds one execute the ``sconscript`` files as if called from that directory. This ensures everything works as expected. For more details refer to the `Scons documentation `__ Quick Intro ----------- Get **cuppa** ~~~~~~~~~~~~~ The simpest way to get **cuppa** is to ``pip install`` it using: :: pip install cuppa Sample ``sconstruct`` file ~~~~~~~~~~~~~~~~~~~~~~~~~~ Let's look at a minimal ``sconstruct`` that makes use of **cuppa**. It could look like this: .. code:: python # Pull in all the Cuppa goodies.. import cuppa # Call sconscripts to do the work cuppa.run() Calling the ``run`` method in the ``cuppa`` module starts the build process calling ``sconscript`` files. Sample ``sconscript`` file ~~~~~~~~~~~~~~~~~~~~~~~~~~ Here is an example ``sconscript`` file that builds all \*.cpp files in the directory where it resides: .. code:: python Import( 'env' ) # Build all *.cpp source files as executables for Source in env.GlobFiles('*.cpp'): env.Build( Source[:-4], Source ) The ``env.Build()`` method is provided by **cuppa** and does essentially what ``env.Program()`` does but in addition is both toolchain and variant aware, and further can provide notifications on progress. Note: Source[:-4] simply strips off the file extension ``.cpp``, that is, the last 4 characters of the file name. If our ``sconscript`` file was for a directory containing \*.cpp files that are actually tests then we could instead write the ``sconscript`` file as: .. code:: python Import( 'env' ) # Build all *.cpp source files as executables to be run as tests for Source in env.GlobFiles('*.cpp'): env.BuildTest( Source[:-4], Source ) The ``env.BuildTest()`` method is provided by **cuppa** and builds the sources specified as ``env.Build()`` does. However, in addition, passing ``--test`` on the command-line will also result in the executable produced being run by a **runner**. The default test runner simply treats each executable as a test case and each directory or executables as a test suite. If the process executes cleanly the test passed, if not it failed. To run this on the command-line we would write: .. code:: sh scons -D --test If we only want to build and test *debug* executables we can instead write this: .. code:: sh scons -D --dbg --test Or for release only pass ``--rel``. **cuppa** also makes it easy to work with dependencies. For example, if `boost `__ was a default dependency for all your ``sconscript`` files you could write your sconstruct file as follows: .. code:: python import cuppa cuppa.run( default_options = { 'boost-home': '' }, default_dependencies = [ 'boost' ] ) This will automatically ensure that necessary includes and other compile options are set for the boost version that is found at ``boost-home``. If you need to link against specific boost libraries this can also be done in the sconscript file as follows: .. code:: python Import('env') Test = 'my_complex_test' Sources = [ Test + '.cpp' ] env.AppendUnique( STATICLIBS = [ env.BoostStaticLibrary( 'system' ), env.BoostStaticLibrary( 'log' ), env.BoostStaticLibrary( 'thread' ), env.BoostStaticLibrary( 'timer' ), env.BoostStaticLibrary( 'chrono' ), env.BoostStaticLibrary( 'filesystem' ), ] ) env.BuildTest( Test, Sources ) The ``BoostStaticLibrary()`` method ensures that the library is built in the correct build variant as required. If you preferred to use dynamic linking then that can also be achieved using ``BoostSharedLibrary()``. The point is the complexities of using `boost `__ as a dependency are encapsulated and managed separately from the scontruct and sconscript files allowing developers to focus on intent not method. Design Principles ----------------- **cuppa** has been written primarily to provide a clean and structured way to leverage the power of Scons without the usual problems of hugely complex ``scontruct`` files that diverge between projects. Key goals of **cuppa** are: - minimise the need for adding logic into ``sconscript`` files, keeping them as declarative as possible. - allow declarative ``sconscript``\ s that are both much clearer and significantly simpler than the equivalent ``make`` file, without the need to learn a whole new scripting language like ``make`` or ``cmake``. - provide a clear structure for extending the facilities offered by **cuppa** - provide a clear vocabulary for building projects - codify Scons best practices into **cuppa** itself so that users just need to call appropriate methods knowing that **cuppa** will do the right thing with their intent - provide a framework that allows experts to focus on providing facilities for others to use. Write once, use everywhere. For example one person who knows how best to make `boost `__ available as a dependency can manage that dependency and allow others to use it seamlessly. More Details ------------ For more details refer to the `project homepage `__. Acknowledgements ---------------- This work is based on the build system used in `clearpool.io `__ during development of its next generation exchange platform. PKeZG‚û¸‡‡'cuppa-0.1.53.dist-info/entry_points.txt[cuppa.method.plugins] cuppa.test_report.generate_bitten_report = cuppa.test_report.generate_bitten_report:GenerateBittenReportMethod PKfZGEd¼T××$cuppa-0.1.53.dist-info/metadata.json{"classifiers": ["Topic :: Software Development :: Build Tools", "Intended Audience :: Developers", "Development Status :: 4 - Beta", "License :: OSI Approved", "Operating System :: OS Independent", "Programming Language :: Python :: 2"], "extensions": {"python.details": {"contacts": [{"name": "ja11sop", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/ja11sop/cuppa"}}, "python.exports": {"cuppa.method.plugins": {"cuppa.test_report.generate_bitten_report": "cuppa.test_report.generate_bitten_report:GenerateBittenReportMethod"}}}, "extras": [], "generator": "bdist_wheel (0.24.0)", "keywords": ["scons", "build", "c", ""], "license": "Boost Software License 1.0 - http://www.boost.org/LICENSE_1_0.txt", "metadata_version": "2.0", "name": "cuppa", "run_requires": [{"requires": ["colorama", "gcovr", "lxml", "grip"]}], "summary": "Cuppa, an extension package to simplify and extend Scons", "version": "0.1.53"}PKeZGÜÈm­$cuppa-0.1.53.dist-info/top_level.txtcuppa PKfZG4»´Ø\\cuppa-0.1.53.dist-info/WHEELWheel-Version: 1.0 Generator: bdist_wheel (0.24.0) Root-Is-Purelib: true Tag: py2-none-any PKfZGE«ƒeecuppa-0.1.53.dist-info/METADATAMetadata-Version: 2.0 Name: cuppa Version: 0.1.53 Summary: Cuppa, an extension package to simplify and extend Scons Home-page: https://github.com/ja11sop/cuppa Author: ja11sop Author-email: UNKNOWN License: Boost Software License 1.0 - http://www.boost.org/LICENSE_1_0.txt Keywords: scons,build,c++ Platform: UNKNOWN Classifier: Topic :: Software Development :: Build Tools Classifier: Intended Audience :: Developers Classifier: Development Status :: 4 - Beta Classifier: License :: OSI Approved Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2 Requires-Dist: colorama Requires-Dist: gcovr Requires-Dist: lxml Requires-Dist: grip Cuppa ===== A simple, extensible build system for use with `Scons `__. **Cuppa** is designed to leverage the capabilities of Scons, while allowing developers to focus on the task of describing what needs to be built. In general **cuppa** supports ``make`` like usage on the command-line. That is developers can simply write: .. code:: sh scons -D and have Scons "do the right thing"; building targets for any ``sconscript`` files found in the current directory. **Cuppa** can be installed as a normal python package or installed locally into a ``site_scons`` directory allowing it to be effortlessly integrated into any Scons setup. Note: ``-D`` tells ``scons`` to look for an ``sconstruct`` file in the current or in parent directories and if it finds one execute the ``sconscript`` files as if called from that directory. This ensures everything works as expected. For more details refer to the `Scons documentation `__ Quick Intro ----------- Get **cuppa** ~~~~~~~~~~~~~ The simpest way to get **cuppa** is to ``pip install`` it using: :: pip install cuppa Sample ``sconstruct`` file ~~~~~~~~~~~~~~~~~~~~~~~~~~ Let's look at a minimal ``sconstruct`` that makes use of **cuppa**. It could look like this: .. code:: python # Pull in all the Cuppa goodies.. import cuppa # Call sconscripts to do the work cuppa.run() Calling the ``run`` method in the ``cuppa`` module starts the build process calling ``sconscript`` files. Sample ``sconscript`` file ~~~~~~~~~~~~~~~~~~~~~~~~~~ Here is an example ``sconscript`` file that builds all \*.cpp files in the directory where it resides: .. code:: python Import( 'env' ) # Build all *.cpp source files as executables for Source in env.GlobFiles('*.cpp'): env.Build( Source[:-4], Source ) The ``env.Build()`` method is provided by **cuppa** and does essentially what ``env.Program()`` does but in addition is both toolchain and variant aware, and further can provide notifications on progress. Note: Source[:-4] simply strips off the file extension ``.cpp``, that is, the last 4 characters of the file name. If our ``sconscript`` file was for a directory containing \*.cpp files that are actually tests then we could instead write the ``sconscript`` file as: .. code:: python Import( 'env' ) # Build all *.cpp source files as executables to be run as tests for Source in env.GlobFiles('*.cpp'): env.BuildTest( Source[:-4], Source ) The ``env.BuildTest()`` method is provided by **cuppa** and builds the sources specified as ``env.Build()`` does. However, in addition, passing ``--test`` on the command-line will also result in the executable produced being run by a **runner**. The default test runner simply treats each executable as a test case and each directory or executables as a test suite. If the process executes cleanly the test passed, if not it failed. To run this on the command-line we would write: .. code:: sh scons -D --test If we only want to build and test *debug* executables we can instead write this: .. code:: sh scons -D --dbg --test Or for release only pass ``--rel``. **cuppa** also makes it easy to work with dependencies. For example, if `boost `__ was a default dependency for all your ``sconscript`` files you could write your sconstruct file as follows: .. code:: python import cuppa cuppa.run( default_options = { 'boost-home': '' }, default_dependencies = [ 'boost' ] ) This will automatically ensure that necessary includes and other compile options are set for the boost version that is found at ``boost-home``. If you need to link against specific boost libraries this can also be done in the sconscript file as follows: .. code:: python Import('env') Test = 'my_complex_test' Sources = [ Test + '.cpp' ] env.AppendUnique( STATICLIBS = [ env.BoostStaticLibrary( 'system' ), env.BoostStaticLibrary( 'log' ), env.BoostStaticLibrary( 'thread' ), env.BoostStaticLibrary( 'timer' ), env.BoostStaticLibrary( 'chrono' ), env.BoostStaticLibrary( 'filesystem' ), ] ) env.BuildTest( Test, Sources ) The ``BoostStaticLibrary()`` method ensures that the library is built in the correct build variant as required. If you preferred to use dynamic linking then that can also be achieved using ``BoostSharedLibrary()``. The point is the complexities of using `boost `__ as a dependency are encapsulated and managed separately from the scontruct and sconscript files allowing developers to focus on intent not method. Design Principles ----------------- **cuppa** has been written primarily to provide a clean and structured way to leverage the power of Scons without the usual problems of hugely complex ``scontruct`` files that diverge between projects. Key goals of **cuppa** are: - minimise the need for adding logic into ``sconscript`` files, keeping them as declarative as possible. - allow declarative ``sconscript``\ s that are both much clearer and significantly simpler than the equivalent ``make`` file, without the need to learn a whole new scripting language like ``make`` or ``cmake``. - provide a clear structure for extending the facilities offered by **cuppa** - provide a clear vocabulary for building projects - codify Scons best practices into **cuppa** itself so that users just need to call appropriate methods knowing that **cuppa** will do the right thing with their intent - provide a framework that allows experts to focus on providing facilities for others to use. Write once, use everywhere. For example one person who knows how best to make `boost `__ available as a dependency can manage that dependency and allow others to use it seamlessly. More Details ------------ For more details refer to the `project homepage `__. Acknowledgements ---------------- This work is based on the build system used in `clearpool.io `__ during development of its next generation exchange platform. PKfZGµ,süÍÍcuppa-0.1.53.dist-info/RECORDcuppa-0.1.53.dist-info/entry_points.txt,sha256=ZEldvxCauUUxSl8mebeLfj1jDD6r3Umcdam2qHqoqJM,135 cuppa-0.1.53.dist-info/RECORD,, cuppa-0.1.53.dist-info/metadata.json,sha256=bW9NRL6cFsX4iqXPlqSSCoXNP1874fUipUr8_N_5Y_M,983 cuppa-0.1.53.dist-info/WHEEL,sha256=54bVun1KfEBTJ68SHUmbxNPj80VxlQ0sHi4gZdGZXEY,92 cuppa-0.1.53.dist-info/DESCRIPTION.rst,sha256=gIOZuoowWxaVot78GAhcdlulTgEt6YcXJlFzUgG20PE,6335 cuppa-0.1.53.dist-info/METADATA,sha256=TMySR9NoUVMyK8flr8zQummFXSi260nELeqZUWAjk4I,7013 cuppa-0.1.53.dist-info/top_level.txt,sha256=2jJke2-J2Hsbn_aEY5_RR9Zh2A80XyqsQNpMxV07HQU,6 cuppa/__init__.py,sha256=gc-hRMWYAeSFR5j1O3535M8cPZGg1VMomFZZHQZz8FA,516 cuppa/tree.py,sha256=5z98sBZ3Lyez8MHeVm5v1jJuCNl3yxYxRJzUAr6NpLA,1930 cuppa/location.py,sha256=sSj9VkNeBAO_D4csMWCDk2R8Uk6_91-6s2vyE6VWW_I,15170 cuppa/timer.py,sha256=v-JPWTOz-1psSngA3Yui17Z4jz8H16TsIcufstbOtPU,4189 cuppa/build_with_header_library.py,sha256=QtFYqaRtA90ilBKLEdMxqXxazJRaoNIExIIYiWWzT2s,3860 cuppa/path.py,sha256=MFCZ2NXlfbO8SwujNKPM-tLzgObhdm1E6olbNYZkrXc,1145 cuppa/progress.py,sha256=qy8MnfGty1jPjh8uZZunjiyJQcTI3YRhS9TQVvfR92k,3568 cuppa/colourise.py,sha256=_3dGsDfGnvmt_e9PbMOzgEY-YNOwJT8jfK9WlWOOh-E,6981 cuppa/configure.py,sha256=JAS3p1yAQb4IC7RjRqqhTka5LQNgs-JFJZheYqVTpIU,7928 cuppa/build_platform.py,sha256=2GPf3tzur-siU5onvtKCroACXVW7EyTCA7p2ojj3HmE,1704 cuppa/recursive_glob.py,sha256=ZNjITAiUgow67-MUTqL1Qiwo7JsKval1Hgb9YbHm0Kk,2235 cuppa/options.py,sha256=F5YJ7GflfrxXWX_1MT9IFW3fTtqBRRsZ9KyIQ0FHbdM,610 cuppa/VERSION,sha256=-d7zIHYnq78TbGblmJgEQBrFW9SCPndBByKUqezwAKs,7 cuppa/version.py,sha256=j1er-79-d6RAP-GysQzvlNp3fzxPQx3UqVYG43P_t-s,1360 cuppa/output_processor.py,sha256=t8y3XI-K0ZVHSH7AsiU2QIKB30vSOMeSmXrr_FYH_o0,8062 cuppa/utility.py,sha256=7-6A_b_6iTpr9s7Es_TfI_YMXeKQuGuGi8tNu2eQT0o,584 cuppa/core.py,sha256=J8WkPfksNp73xS9qHBwyypY5nS5gIN6pVojyBqVwwmg,29312 cuppa/platforms/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/platforms/linux.py,sha256=uWU5jwK4v7QWZpOmYF6w5ZWzol4v_HACTkbtojnSUls,3661 cuppa/platforms/darwin.py,sha256=jq-Lo1iied-VPlzRtqk4XFR5TQjBdGE-UtciR4MrEfA,2983 cuppa/platforms/windows.py,sha256=WhXOWauscDiHmL0uZJqesUhHPjjAlaGCY2Hx37JNfTg,2251 cuppa/project_generators/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/project_generators/codeblocks.py,sha256=Dmx5S8hfILltpVfJ0xMdGgTrPxJcDS9h_0tYAQvJ12I,16964 cuppa/profiles/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/methods/coverage.py,sha256=cePK2iuZRk4FGpCBzmRLzk9AIDjfi5LY4fB_CExmF7U,1089 cuppa/methods/build_with.py,sha256=4DscZGiWywt2wM4T7r43XdIIp2EZvQ48rXZRynztql4,1643 cuppa/methods/using.py,sha256=XoJodh1X-iH15n2dtO-BrAAodlF3cHGfKtq1d1fFEX8,763 cuppa/methods/toolchain.py,sha256=mcJ-bwd1Ey2Z3QgSC2N5fSxKsqVHtswYySke7sf-dog,758 cuppa/methods/relative_recursive_glob.py,sha256=t4bxQZu63mXiNBYeRDOxhxvAk14kc1_sW_Pf75xsRFw,2098 cuppa/methods/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/methods/remove_flags.py,sha256=0DtlRa_JwMvz5g_eFkoP23blNqEeBzBjbpCxLsmebGs,1111 cuppa/methods/stdcpp.py,sha256=cP3LaDG2UJVf2HJHlUAfy7-zBoA98-sVo1fgBo3rzeA,1562 cuppa/methods/test.py,sha256=wBGSowuT-ohAvxOO0lHi1Fz62OJEWNNt0CPy9i6QAoE,1662 cuppa/methods/build_test.py,sha256=OulfJKXxDxGMIJwFBEA4XX5m1J5sfG6IOssFEE6ypAg,1801 cuppa/methods/compile.py,sha256=NCFl6as_LHY29amwmEMRUewRbpDexTdyhnroWBFVeus,1066 cuppa/methods/build.py,sha256=tgGAcRZ3QNzw_pBPW6L6als7_18HVoX1HdL991oe5J8,1846 cuppa/methods/replace_flags.py,sha256=TSPz2CI2OJuhmbbuffeT_EGrdpF6uIzSxTxow4mPUgk,687 cuppa/methods/create_version.py,sha256=9G_bZJsHJ7jF1VkfixkRgV0A4LpnXF-pzWuo5LUSA10,1100 cuppa/methods/markdown_to_html.py,sha256=Xjk1X4p36lyDmLqGcbvb5NXPnpj67jEscFVscFg6hCw,1922 cuppa/methods/build_library.py,sha256=yVeMVUGvhp4ARg9l1oS8DKLsXBRt886nZmQSDOUuwkM,1311 cuppa/methods/build_profile.py,sha256=sgwgWqeBllqIXDNXV1cjecADuy-L-JUESE1kLhcKvRY,1283 cuppa/variants/dbg.py,sha256=hF7h0C_mN7HycCGwKyElooyBZ7rh8rpn_jBLPSVl19I,904 cuppa/variants/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/variants/cov.py,sha256=qOl_Z8a2AxAJlw4yjV6M_UYfw1Whd25lojtdgZseOrI,951 cuppa/variants/test.py,sha256=sYwaQshL7M4osWAbGSTA3-gexjMwmTnt_qfcOP2509U,782 cuppa/variants/rel.py,sha256=0bX2JSLPpQeeEE1rO9zyZIukGMKbd7INy9H-FUqioRY,917 cuppa/dependencies/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/dependencies/build_with_qt4.py,sha256=wTWLssQYKH6mI_twDNgQ5XFznEocVT10h_IkroFf_l4,3417 cuppa/dependencies/build_with_quince.py,sha256=AT_zcdmmbF7Hx1qNT-TWc_bRy-eOW13LNkq6__ecXmw,10489 cuppa/dependencies/build_with_boost.py,sha256=-ZZYyoAyq4mDi_8q_xRVz18X4g2cmXYYGwmBEMTprNY,35686 cuppa/dependencies/build_with_qt5.py,sha256=XwDQQRY-oTirvbWkHz8MTtfP94tSNgyovTbunnqw2jQ,3419 cuppa/dependencies/boost/__init__.py,sha256=4W8VliAYUP1KY2gLJ_YDy2TmcXYVm-PY7XikQD_bFwA,2 cuppa/dependencies/boost/boost_test_patch.diff,sha256=AKahZU-fEukQU9Akr5_gmATvrzUOOZ48jIckq7_7iAc,36915 cuppa/test_report/__init__.py,sha256=4W8VliAYUP1KY2gLJ_YDy2TmcXYVm-PY7XikQD_bFwA,2 cuppa/test_report/generate_bitten_report.py,sha256=xcnS8fS_PG-fQiBQfgHVCFPWQBLG-RIJ7oaw0yPAY6s,3152 cuppa/test_report/cuppa_json.py,sha256=1oVLEaUN81J-E4r2qeZ84lSFbjNJ9lzWhASN66OmuR0,1409 cuppa/toolchains/clang.py,sha256=vqyOmisY504qd3Qj_nXjihHJu1R1or7WQi3FdYNcqCo,16837 cuppa/toolchains/__init__.py,sha256=OrSC_AV10c5wdwAnQjh3ys-s3KS_2__r9tFMLDC1TRU,298 cuppa/toolchains/cl.py,sha256=tVVB2GR3xGV6tG1wVL8KPVE9kGFjVgH5nDMkNiwvGho,6149 cuppa/toolchains/gcc.py,sha256=vrgy0Z76bf_RFZ8UnfUNvipSJYYA5jfJgbBlB_j09TQ,22777 cuppa/modules/__init__.py,sha256=_J9QQLbV3rEEW2zq_4O8ZDAGiWp-kWwn3_6DN9wdl3g,212 cuppa/modules/registration.py,sha256=vjAdMWNUQowY6P2uzVi-BZrDv1gg4V9JZ8h7uhtRy8Q,3604 cuppa/scms/subversion.py,sha256=RpajLcRpLtd1Cw-5Mv_eqFcDIKQyEcvRkxl1S7bLDYs,1726 cuppa/scms/__init__.py,sha256=2bcw9YCEUlyvy-sFAU8QaDVn--7Bd2VAHwjSAWKGN1k,211 cuppa/scms/mercurial.py,sha256=itzasF_xp0Mh9yJCYvGehDgMJWLyDja_qk3mKI03ets,1927 cuppa/scms/git.py,sha256=6zArRxHL-CbE94o0n-m8FaasUGbQtwCD3rhpZbuQpoU,1685 cuppa/cpp/run_patched_boost_test.py,sha256=mJfno3AiI2HxF7VvRnHfDB9uph12GzdSLMIra9dD3Fg,22398 cuppa/cpp/__init__.py,sha256=_J9QQLbV3rEEW2zq_4O8ZDAGiWp-kWwn3_6DN9wdl3g,212 cuppa/cpp/run_boost_test.py,sha256=43yRJN6q-olmUbfZ9U0rAZBfB3z9kMCvLFs-rNfTMj8,23807 cuppa/cpp/run_process_test.py,sha256=3kEnsrmzeLTcOFJpD7BVdKM_Myq6YPOTlW4110maWxY,13053 cuppa/cpp/run_gcov_coverage.py,sha256=2GfrDX2uTTPnoDf8Y--6glvToEmVYEZhath-iku7Qzc,7875 cuppa/cpp/create_version_file_cpp.py,sha256=LhY0o3kF9Mh3poqgYiosc-zauSymYzC6wmmxh59Iv9E,17134 PK-¹ƒFkÒm•cuppa/__init__.pyPK'™¯FoS‡ÒŠŠ 3cuppa/tree.pyPK