diff options
author | Kenneth Heafield <github@kheafield.com> | 2012-10-22 12:07:20 +0100 |
---|---|---|
committer | Kenneth Heafield <github@kheafield.com> | 2012-10-22 12:07:20 +0100 |
commit | 5f98fe5c4f2a2090eeb9d30c030305a70a8347d1 (patch) | |
tree | 9b6002f850e6dea1e3400c6b19bb31a9cdf3067f /jam-files/boost-build | |
parent | cf9994131993b40be62e90e213b1e11e6b550143 (diff) | |
parent | 21825a09d97c2e0afd20512f306fb25fed55e529 (diff) |
Merge remote branch 'upstream/master'
Conflicts:
Jamroot
bjam
decoder/Jamfile
decoder/cdec.cc
dpmert/Jamfile
jam-files/sanity.jam
klm/lm/Jamfile
klm/util/Jamfile
mira/Jamfile
Diffstat (limited to 'jam-files/boost-build')
185 files changed, 0 insertions, 51677 deletions
diff --git a/jam-files/boost-build/boost-build.jam b/jam-files/boost-build/boost-build.jam deleted file mode 100644 index 73db0497..00000000 --- a/jam-files/boost-build/boost-build.jam +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2001, 2002 Dave Abrahams -# Copyright 2002 Rene Rivera -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - - -boost-build kernel ; diff --git a/jam-files/boost-build/bootstrap.jam b/jam-files/boost-build/bootstrap.jam deleted file mode 100644 index af3e8bf5..00000000 --- a/jam-files/boost-build/bootstrap.jam +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2003 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# This file handles initial phase of Boost.Build loading. -# Boost.Jam has already figured out where Boost.Build is -# and loads this file, which is responsible for initialization -# of basic facilities such a module system and loading the -# main Boost.Build module, build-system.jam. -# -# Exact operation of this module is not interesting, it makes -# sense to look at build-system.jam right away. - -# Load the kernel/bootstrap.jam, which does all the work. -.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ; -include $(.bootstrap-file) ;
\ No newline at end of file diff --git a/jam-files/boost-build/build-system.jam b/jam-files/boost-build/build-system.jam deleted file mode 100644 index 9f9c884c..00000000 --- a/jam-files/boost-build/build-system.jam +++ /dev/null @@ -1,1008 +0,0 @@ -# Copyright 2003, 2005, 2007 Dave Abrahams -# Copyright 2006, 2007 Rene Rivera -# Copyright 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This file is part of Boost Build version 2. You can think of it as forming the -# main() routine. It is invoked by the bootstrapping code in bootstrap.jam. - -import build-request ; -import builtin ; -import "class" : new ; -import errors ; -import feature ; -import make ; -import modules ; -import os ; -import path ; -import project ; -import property-set ; -import regex ; -import sequence ; -import targets ; -import toolset ; -import utility ; -import version ; -import virtual-target ; -import generators ; -import configure ; - -################################################################################ -# -# Module global data. -# -################################################################################ - -# Shortcut used in this module for accessing used command-line parameters. -.argv = [ modules.peek : ARGV ] ; - -# Flag indicating we should display additional debugging information related to -# locating and loading Boost Build configuration files. -.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ; - -# Legacy option doing too many things, some of which are not even documented. -# Should be phased out. -# * Disables loading site and user configuration files. -# * Disables auto-configuration for toolsets specified explicitly on the -# command-line. -# * Causes --toolset command-line options to be ignored. -# * Prevents the default toolset from being used even if no toolset has been -# configured at all. -.legacy-ignore-config = [ MATCH ^(--ignore-config)$ : $(.argv) ] ; - -# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a -# directory, then we want to clean targets which are in 'foo' as well as those -# in any children Jamfiles under foo but not in any unrelated Jamfiles. To -# achieve this we collect a list of projects under which cleaning is allowed. -.project-targets = ; - -# Virtual targets obtained when building main targets references on the command -# line. When running 'bjam --clean main_target' we want to clean only files -# belonging to that main target so we need to record which targets are produced -# for it. -.results-of-main-targets = ; - -# Was an XML dump requested? -.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ; - -# Default toolset & version to be used in case no other toolset has been used -# explicitly by either the loaded configuration files, the loaded project build -# scripts or an explicit toolset request on the command line. If not specified, -# an arbitrary default will be used based on the current host OS. This value, -# while not strictly necessary, has been added to allow testing Boost-Build's -# default toolset usage functionality. -.default-toolset = ; -.default-toolset-version = ; - - -################################################################################ -# -# Public rules. -# -################################################################################ - -# Returns the property set with the free features from the currently processed -# build request. -# -rule command-line-free-features ( ) -{ - return $(.command-line-free-features) ; -} - - -# Returns the location of the build system. The primary use case is building -# Boost where it is sometimes needed to get the location of other components -# (e.g. BoostBook files) and it is convenient to use locations relative to the -# Boost Build path. -# -rule location ( ) -{ - local r = [ modules.binding build-system ] ; - return $(r:P) ; -} - - -# Sets the default toolset & version to be used in case no other toolset has -# been used explicitly by either the loaded configuration files, the loaded -# project build scripts or an explicit toolset request on the command line. For -# more detailed information see the comment related to used global variables. -# -rule set-default-toolset ( toolset : version ? ) -{ - .default-toolset = $(toolset) ; - .default-toolset-version = $(version) ; -} - -rule set-pre-build-hook ( function ) -{ - .pre-build-hook = $(function) ; -} - -rule set-post-build-hook ( function ) -{ - .post-build-hook = $(function) ; -} - -################################################################################ -# -# Local rules. -# -################################################################################ - -# Returns actual Jam targets to be used for executing a clean request. -# -local rule actual-clean-targets ( ) -{ - # Construct a list of projects explicitly detected as targets on this build - # system run. These are the projects under which cleaning is allowed. - for local t in $(targets) - { - if [ class.is-a $(t) : project-target ] - { - .project-targets += [ $(t).project-module ] ; - } - } - - # Construct a list of targets explicitly detected on this build system run - # as a result of building main targets. - local targets-to-clean ; - for local t in $(.results-of-main-targets) - { - # Do not include roots or sources. - targets-to-clean += [ virtual-target.traverse $(t) ] ; - } - targets-to-clean = [ sequence.unique $(targets-to-clean) ] ; - - local to-clean ; - for local t in [ virtual-target.all-targets ] - { - local p = [ $(t).project ] ; - - # Remove only derived targets. - if [ $(t).action ] - { - if $(t) in $(targets-to-clean) || - [ should-clean-project [ $(p).project-module ] ] = true - { - to-clean += $(t) ; - } - } - } - - local to-clean-actual ; - for local t in $(to-clean) - { - to-clean-actual += [ $(t).actualize ] ; - } - return $(to-clean-actual) ; -} - - -# Given a target id, try to find and return the corresponding target. This is -# only invoked when there is no Jamfile in ".". This code somewhat duplicates -# code in project-target.find but we can not reuse that code without a -# project-targets instance. -# -local rule find-target ( target-id ) -{ - local split = [ MATCH (.*)//(.*) : $(target-id) ] ; - - local pm ; - if $(split) - { - pm = [ project.find $(split[1]) : "." ] ; - } - else - { - pm = [ project.find $(target-id) : "." ] ; - } - - local result ; - if $(pm) - { - result = [ project.target $(pm) ] ; - } - - if $(split) - { - result = [ $(result).find $(split[2]) ] ; - } - - return $(result) ; -} - - -# Initializes a new configuration module. -# -local rule initialize-config-module ( module-name : location ? ) -{ - project.initialize $(module-name) : $(location) ; - if USER_MODULE in [ RULENAMES ] - { - USER_MODULE $(module-name) ; - } -} - - -# Helper rule used to load configuration files. Loads the first configuration -# file with the given 'filename' at 'path' into module with name 'module-name'. -# Not finding the requested file may or may not be treated as an error depending -# on the must-find parameter. Returns a normalized path to the loaded -# configuration file or nothing if no file was loaded. -# -local rule load-config ( module-name : filename : path + : must-find ? ) -{ - if $(.debug-config) - { - ECHO "notice: Searching" "$(path)" "for" "$(module-name)" - "configuration file" "$(filename)" "." ; - } - local where = [ GLOB $(path) : $(filename) ] ; - if $(where) - { - where = [ NORMALIZE_PATH $(where[1]) ] ; - if $(.debug-config) - { - ECHO "notice: Loading" "$(module-name)" "configuration file" - "$(filename)" "from" $(where) "." ; - } - - # Set source location so that path-constant in config files - # with relative paths work. This is of most importance - # for project-config.jam, but may be used in other - # config files as well. - local attributes = [ project.attributes $(module-name) ] ; - $(attributes).set source-location : $(where:D) : exact ; - modules.load $(module-name) : $(filename) : $(path) ; - project.load-used-projects $(module-name) ; - } - else - { - if $(must-find) - { - errors.user-error "Configuration file" "$(filename)" "not found in" - "$(path)" "." ; - } - if $(.debug-config) - { - ECHO "notice:" "Configuration file" "$(filename)" "not found in" - "$(path)" "." ; - } - } - return $(where) ; -} - - -# Loads all the configuration files used by Boost Build in the following order: -# -# -- test-config -- -# Loaded only if specified on the command-line using the --test-config -# command-line parameter. It is ok for this file not to exist even if specified. -# If this configuration file is loaded, regular site and user configuration -# files will not be. If a relative path is specified, file is searched for in -# the current folder. -# -# -- site-config -- -# Always named site-config.jam. Will only be found if located on the system -# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build -# path, in that order. Not loaded in case the test-config configuration file is -# loaded or either the --ignore-site-config or the --ignore-config command-line -# option is specified. -# -# -- user-config -- -# Named user-config.jam by default or may be named explicitly using the -# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment -# variable. If named explicitly the file is looked for from the current working -# directory and if the default one is used then it is searched for in the -# user's home directory and the Boost Build path, in that order. Not loaded in -# case either the test-config configuration file is loaded, --ignore-config -# command-line option is specified or an empty file name is explicitly -# specified. If the file name has been given explicitly then the file must -# exist. -# -# Test configurations have been added primarily for use by Boost Build's -# internal unit testing system but may be used freely in other places as well. -# -local rule load-configuration-files -{ - # Flag indicating that site configuration should not be loaded. - local ignore-site-config = - [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ; - - if $(.legacy-ignore-config) && $(.debug-config) - { - ECHO "notice: Regular site and user configuration files will be ignored" ; - ECHO "notice: due to the --ignore-config command-line option." ; - } - - initialize-config-module test-config ; - local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ; - local uq = [ MATCH \"(.*)\" : $(test-config) ] ; - if $(uq) - { - test-config = $(uq) ; - } - if $(test-config) - { - local where = - [ load-config test-config : $(test-config:BS) : $(test-config:D) ] ; - if $(where) - { - if $(.debug-config) && ! $(.legacy-ignore-config) - { - ECHO "notice: Regular site and user configuration files will" ; - ECHO "notice: be ignored due to the test configuration being" - "loaded." ; - } - } - else - { - test-config = ; - } - } - - local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ; - local site-path = /etc $(user-path) ; - if [ os.name ] in NT CYGWIN - { - site-path = [ modules.peek : SystemRoot ] $(user-path) ; - } - - if $(ignore-site-config) && !$(.legacy-ignore-config) - { - ECHO "notice: Site configuration files will be ignored due to the" ; - ECHO "notice: --ignore-site-config command-line option." ; - } - - initialize-config-module site-config ; - if ! $(test-config) && ! $(ignore-site-config) && ! $(.legacy-ignore-config) - { - load-config site-config : site-config.jam : $(site-path) ; - } - - initialize-config-module user-config ; - if ! $(test-config) && ! $(.legacy-ignore-config) - { - local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ; - user-config = $(user-config[-1]) ; - user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ; - # Special handling for the case when the OS does not strip the quotes - # around the file name, as is the case when using Cygwin bash. - user-config = [ utility.unquote $(user-config) ] ; - local explicitly-requested = $(user-config) ; - user-config ?= user-config.jam ; - - if $(user-config) - { - if $(explicitly-requested) - { - # Treat explicitly entered user paths as native OS path - # references and, if non-absolute, root them at the current - # working directory. - user-config = [ path.make $(user-config) ] ; - user-config = [ path.root $(user-config) [ path.pwd ] ] ; - user-config = [ path.native $(user-config) ] ; - - if $(.debug-config) - { - ECHO "notice: Loading explicitly specified user" - "configuration file:" ; - ECHO " $(user-config)" ; - } - - load-config user-config : $(user-config:BS) : $(user-config:D) - : must-exist ; - } - else - { - load-config user-config : $(user-config) : $(user-path) ; - } - } - else if $(.debug-config) - { - ECHO "notice: User configuration file loading explicitly disabled." ; - } - } - - # We look for project-config.jam from "." upward. - # I am not sure this is 100% right decision, we might as well check for - # it only alonside the Jamroot file. However: - # - # - We need to load project-root.jam before Jamroot - # - We probably would need to load project-root.jam even if there's no - # Jamroot - e.g. to implement automake-style out-of-tree builds. - local file = [ path.glob "." : project-config.jam ] ; - if ! $(file) - { - file = [ path.glob-in-parents "." : project-config.jam ] ; - } - if $(file) - { - initialize-config-module project-config : $(file:D) ; - load-config project-config : project-config.jam : $(file:D) ; - } -} - - -# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or -# toolset=xx,yy,...zz in the command line. May return additional properties to -# be processed as if they had been specified by the user. -# -local rule process-explicit-toolset-requests -{ - local extra-properties ; - - local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ; - local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ; - - for local t in $(option-toolsets) $(feature-toolsets) - { - # Parse toolset-version/properties. - local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ; - local toolset-version = $((t-v,t,v)[1]) ; - local toolset = $((t-v,t,v)[2]) ; - local version = $((t-v,t,v)[3]) ; - - if $(.debug-config) - { - ECHO notice: [cmdline-cfg] Detected command-line request for - $(toolset-version): "toolset=" $(toolset) "version=" - $(version) ; - } - - # If the toolset is not known, configure it now. - local known ; - if $(toolset) in [ feature.values <toolset> ] - { - known = true ; - } - if $(known) && $(version) && ! [ feature.is-subvalue toolset - : $(toolset) : version : $(version) ] - { - known = ; - } - # TODO: we should do 'using $(toolset)' in case no version has been - # specified and there are no versions defined for the given toolset to - # allow the toolset to configure its default version. For this we need - # to know how to detect whether a given toolset has any versions - # defined. An alternative would be to do this whenever version is not - # specified but that would require that toolsets correctly handle the - # case when their default version is configured multiple times which - # should be checked for all existing toolsets first. - - if ! $(known) - { - if $(.debug-config) - { - ECHO "notice: [cmdline-cfg] toolset $(toolset-version) not" - "previously configured; attempting to auto-configure now" ; - } - toolset.using $(toolset) : $(version) ; - } - else - { - if $(.debug-config) - { - ECHO notice: [cmdline-cfg] toolset $(toolset-version) already - configured ; - } - } - - # Make sure we get an appropriate property into the build request in - # case toolset has been specified using the "--toolset=..." command-line - # option form. - if ! $(t) in $(.argv) && ! $(t) in $(feature-toolsets) - { - if $(.debug-config) - { - ECHO notice: [cmdline-cfg] adding toolset=$(t) to the build - request. ; - } - extra-properties += toolset=$(t) ; - } - } - - return $(extra-properties) ; -} - - -# Returns 'true' if the given 'project' is equal to or is a (possibly indirect) -# child to any of the projects requested to be cleaned in this build system run. -# Returns 'false' otherwise. Expects the .project-targets list to have already -# been constructed. -# -local rule should-clean-project ( project ) -{ - if ! $(.should-clean-project.$(project)) - { - local r = false ; - if $(project) in $(.project-targets) - { - r = true ; - } - else - { - local parent = [ project.attribute $(project) parent-module ] ; - if $(parent) && $(parent) != user-config - { - r = [ should-clean-project $(parent) ] ; - } - } - .should-clean-project.$(project) = $(r) ; - } - - return $(.should-clean-project.$(project)) ; -} - - -################################################################################ -# -# main() -# ------ -# -################################################################################ - -{ - if --version in $(.argv) - { - version.print ; - EXIT ; - } - - version.verify-engine-version ; - - load-configuration-files ; - - local extra-properties ; - # Note that this causes --toolset options to be ignored if --ignore-config - # is specified. - if ! $(.legacy-ignore-config) - { - extra-properties = [ process-explicit-toolset-requests ] ; - } - - - # We always load project in "." so that 'use-project' directives have any - # chance of being seen. Otherwise, we would not be able to refer to - # subprojects using target ids. - local current-project ; - if [ project.find "." : "." ] - { - current-project = [ project.target [ project.load "." ] ] ; - } - - - # In case there are no toolsets currently defined makes the build run using - # the default toolset. - if ! $(.legacy-ignore-config) && ! [ feature.values <toolset> ] - { - local default-toolset = $(.default-toolset) ; - local default-toolset-version = ; - if $(default-toolset) - { - default-toolset-version = $(.default-toolset-version) ; - } - else - { - default-toolset = gcc ; - if [ os.name ] = NT - { - default-toolset = msvc ; - } - else if [ os.name ] = MACOSX - { - default-toolset = darwin ; - } - } - - ECHO "warning: No toolsets are configured." ; - ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ; - ECHO "warning: If the default is wrong, your build may not work correctly." ; - ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ; - ECHO "warning: For more configuration options, please consult" ; - ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ; - - toolset.using $(default-toolset) : $(default-toolset-version) ; - } - - - # Parse command line for targets and properties. Note that this requires - # that all project files already be loaded. - local build-request = [ build-request.from-command-line $(.argv) - $(extra-properties) ] ; - local target-ids = [ $(build-request).get-at 1 ] ; - local properties = [ $(build-request).get-at 2 ] ; - - - # Expand properties specified on the command line into multiple property - # sets consisting of all legal property combinations. Each expanded property - # set will be used for a single build run. E.g. if multiple toolsets are - # specified then requested targets will be built with each of them. - if $(properties) - { - expanded = [ build-request.expand-no-defaults $(properties) ] ; - local xexpanded ; - for local e in $(expanded) - { - xexpanded += [ property-set.create [ feature.split $(e) ] ] ; - } - expanded = $(xexpanded) ; - } - else - { - expanded = [ property-set.empty ] ; - } - - - # Check that we actually found something to build. - if ! $(current-project) && ! $(target-ids) - { - errors.user-error "error: no Jamfile in current directory found, and no" - "target references specified." ; - EXIT ; - } - - - # Flags indicating that this build system run has been started in order to - # clean existing instead of create new targets. Note that these are not the - # final flag values as they may get changed later on due to some special - # targets being specified on the command line. - local clean ; if "--clean" in $(.argv) { clean = true ; } - local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; } - - - # List of explicitly requested files to build. Any target references read - # from the command line parameter not recognized as one of the targets - # defined in the loaded Jamfiles will be interpreted as an explicitly - # requested file to build. If any such files are explicitly requested then - # only those files and the targets they depend on will be built and they - # will be searched for among targets that would have been built had there - # been no explicitly requested files. - local explicitly-requested-files - - - # List of Boost Build meta-targets, virtual-targets and actual Jam targets - # constructed in this build system run. - local targets ; - local virtual-targets ; - local actual-targets ; - - - # Process each target specified on the command-line and convert it into - # internal Boost Build target objects. Detect special clean target. If no - # main Boost Build targets were explictly requested use the current project - # as the target. - for local id in $(target-ids) - { - if $(id) = clean - { - clean = true ; - } - else - { - local t ; - if $(current-project) - { - t = [ $(current-project).find $(id) : no-error ] ; - } - else - { - t = [ find-target $(id) ] ; - } - - if ! $(t) - { - ECHO "notice: could not find main target" $(id) ; - ECHO "notice: assuming it is a name of file to create." ; - explicitly-requested-files += $(id) ; - } - else - { - targets += $(t) ; - } - } - } - if ! $(targets) - { - targets += [ project.target [ project.module-name "." ] ] ; - } - - if [ option.get dump-generators : : true ] - { - generators.dump ; - } - - # We wish to put config.log in the build directory corresponding - # to Jamroot, so that the location does not differ depending on - # directory where we do build. The amount of indirection necessary - # here is scary. - local first-project = [ $(targets[0]).project ] ; - local first-project-root-location = [ $(first-project).get project-root ] ; - local first-project-root-module = [ project.load $(first-project-root-location) ] ; - local first-project-root = [ project.target $(first-project-root-module) ] ; - local first-build-build-dir = [ $(first-project-root).build-dir ] ; - configure.set-log-file $(first-build-build-dir)/config.log ; - - # Now that we have a set of targets to build and a set of property sets to - # build the targets with, we can start the main build process by using each - # property set to generate virtual targets from all of our listed targets - # and any of their dependants. - for local p in $(expanded) - { - .command-line-free-features = [ property-set.create [ $(p).free ] ] ; - for local t in $(targets) - { - local g = [ $(t).generate $(p) ] ; - if ! [ class.is-a $(t) : project-target ] - { - .results-of-main-targets += $(g[2-]) ; - } - virtual-targets += $(g[2-]) ; - } - } - - - # Convert collected virtual targets into actual raw Jam targets. - for t in $(virtual-targets) - { - actual-targets += [ $(t).actualize ] ; - } - - - # If XML data output has been requested prepare additional rules and targets - # so we can hook into Jam to collect build data while its building and have - # it trigger the final XML report generation after all the planned targets - # have been built. - if $(.out-xml) - { - # Get a qualified virtual target name. - rule full-target-name ( target ) - { - local name = [ $(target).name ] ; - local project = [ $(target).project ] ; - local project-path = [ $(project).get location ] ; - return $(project-path)//$(name) ; - } - - # Generate an XML file containing build statistics for each constituent. - # - rule out-xml ( xml-file : constituents * ) - { - # Prepare valid XML header and footer with some basic info. - local nl = " -" ; - local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ; - local timestamp = [ modules.peek : JAMDATE ] ; - local cwd = [ PWD ] ; - local command = $(.argv) ; - local bb-version = [ version.boost-build ] ; - .header on $(xml-file) = - "<?xml version=\"1.0\" encoding=\"utf-8\"?>" - "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">" - "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>" - "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>" - "$(nl) <directory><![CDATA[$(cwd)]]></directory>" - "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>" - ; - .footer on $(xml-file) = - "$(nl)</build>" ; - - # Generate the target dependency graph. - .contents on $(xml-file) += - "$(nl) <targets>" ; - for local t in [ virtual-target.all-targets ] - { - local action = [ $(t).action ] ; - if $(action) - # If a target has no action, it has no dependencies. - { - local name = [ full-target-name $(t) ] ; - local sources = [ $(action).sources ] ; - local dependencies ; - for local s in $(sources) - { - dependencies += [ full-target-name $(s) ] ; - } - - local path = [ $(t).path ] ; - local jam-target = [ $(t).actual-name ] ; - - .contents on $(xml-file) += - "$(nl) <target>" - "$(nl) <name><![CDATA[$(name)]]></name>" - "$(nl) <dependencies>" - "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>" - "$(nl) </dependencies>" - "$(nl) <path><![CDATA[$(path)]]></path>" - "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>" - "$(nl) </target>" - ; - } - } - .contents on $(xml-file) += - "$(nl) </targets>" ; - - # Build $(xml-file) after $(constituents). Do so even if a - # constituent action fails and regenerate the xml on every bjam run. - INCLUDES $(xml-file) : $(constituents) ; - ALWAYS $(xml-file) ; - __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ; - out-xml.generate $(xml-file) ; - } - - # The actual build actions are here; if we did this work in the actions - # clause we would have to form a valid command line containing the - # result of @(...) below (the name of the XML file). - # - rule out-xml.generate-action ( args * : xml-file - : command status start end user system : output ? ) - { - local contents = - [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ; - local f = @($(xml-file):E=$(contents)) ; - } - - # Nothing to do here; the *real* actions happen in - # out-xml.generate-action. - actions quietly out-xml.generate { } - - # Define the out-xml file target, which depends on all the targets so - # that it runs the collection after the targets have run. - out-xml $(.out-xml) : $(actual-targets) ; - - # Set up a global __ACTION_RULE__ that records all the available - # statistics about each actual target in a variable "on" the --out-xml - # target. - # - rule out-xml.collect ( xml-file : target : command status start end user - system : output ? ) - { - local nl = " -" ; - # Open the action with some basic info. - .contents on $(xml-file) += - "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ; - - # If we have an action object we can print out more detailed info. - local action = [ on $(target) return $(.action) ] ; - if $(action) - { - local action-name = [ $(action).action-name ] ; - local action-sources = [ $(action).sources ] ; - local action-props = [ $(action).properties ] ; - - # The qualified name of the action which we created the target. - .contents on $(xml-file) += - "$(nl) <name><![CDATA[$(action-name)]]></name>" ; - - # The sources that made up the target. - .contents on $(xml-file) += - "$(nl) <sources>" ; - for local source in $(action-sources) - { - local source-actual = [ $(source).actual-name ] ; - .contents on $(xml-file) += - "$(nl) <source><![CDATA[$(source-actual)]]></source>" ; - } - .contents on $(xml-file) += - "$(nl) </sources>" ; - - # The properties that define the conditions under which the - # target was built. - .contents on $(xml-file) += - "$(nl) <properties>" ; - for local prop in [ $(action-props).raw ] - { - local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ; - .contents on $(xml-file) += - "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ; - } - .contents on $(xml-file) += - "$(nl) </properties>" ; - } - - local locate = [ on $(target) return $(LOCATE) ] ; - locate ?= "" ; - .contents on $(xml-file) += - "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>" - "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>" - "$(nl) <command><![CDATA[$(command)]]></command>" - "$(nl) <output><![CDATA[$(output)]]></output>" ; - .contents on $(xml-file) += - "$(nl) </action>" ; - } - - # When no __ACTION_RULE__ is set "on" a target, the search falls back to - # the global module. - module - { - __ACTION_RULE__ = build-system.out-xml.collect - [ modules.peek build-system : .out-xml ] ; - } - - IMPORT - build-system : - out-xml.collect - out-xml.generate-action - : : - build-system.out-xml.collect - build-system.out-xml.generate-action - ; - } - - local j = [ option.get jobs ] ; - if $(j) - { - modules.poke : PARALLELISM : $(j) ; - } - - local k = [ option.get keep-going : true : true ] ; - if $(k) in "on" "yes" "true" - { - modules.poke : KEEP_GOING : 1 ; - } - else if $(k) in "off" "no" "false" - { - modules.poke : KEEP_GOING : 0 ; - } - else - { - ECHO "error: Invalid value for the --keep-going option" ; - EXIT ; - } - - # The 'all' pseudo target is not strictly needed expect in the case when we - # use it below but people often assume they always have this target - # available and do not declare it themselves before use which may cause - # build failures with an error message about not being able to build the - # 'all' target. - NOTFILE all ; - - # And now that all the actual raw Jam targets and all the dependencies - # between them have been prepared all that is left is to tell Jam to update - # those targets. - if $(explicitly-requested-files) - { - # Note that this case can not be joined with the regular one when only - # exact Boost Build targets are requested as here we do not build those - # requested targets but only use them to construct the dependency tree - # needed to build the explicitly requested files. - UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ; - } - else if $(cleanall) - { - UPDATE clean-all ; - } - else if $(clean) - { - common.Clean clean : [ actual-clean-targets ] ; - UPDATE clean ; - } - else - { - configure.print-configure-checks-summary ; - - if $(.pre-build-hook) - { - $(.pre-build-hook) ; - } - - DEPENDS all : $(actual-targets) ; - if UPDATE_NOW in [ RULENAMES ] - { - local ok = [ UPDATE_NOW all $(.out-xml) ] ; - if $(.post-build-hook) - { - $(.post-build-hook) $(ok) ; - } - # Prevent automatic update of the 'all' target, now that - # we have explicitly updated what we wanted. - UPDATE ; - } - else - { - UPDATE all $(.out-xml) ; - } - } -} diff --git a/jam-files/boost-build/build/__init__.py b/jam-files/boost-build/build/__init__.py deleted file mode 100644 index e69de29b..00000000 --- a/jam-files/boost-build/build/__init__.py +++ /dev/null diff --git a/jam-files/boost-build/build/ac.jam b/jam-files/boost-build/build/ac.jam deleted file mode 100644 index 6768f358..00000000 --- a/jam-files/boost-build/build/ac.jam +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright (c) 2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import property-set ; -import path ; -import modules ; -import "class" ; -import errors ; -import configure ; - -rule find-include-path ( variable : properties : header - : provided-path ? ) -{ - # FIXME: document which properties affect this function by - # default. - local target-os = [ $(properties).get <target-os> ] ; - properties = [ property-set.create <target-os>$(toolset) ] ; - if $($(variable)-$(properties)) - { - return $($(variable)-$(properties)) ; - } - else - { - provided-path ?= [ modules.peek : $(variable) ] ; - includes = $(provided-path) ; - includes += [ $(properties).get <include> ] ; - if [ $(properties).get <target-os> ] != windows - { - # FIXME: use sysroot - includes += /usr/include ; - } - - local result ; - while ! $(result) && $(includes) - { - local f = [ path.root $(header) $(includes[1]) ] ; - ECHO "Checking " $(f) ; - if [ path.exists $(f) ] - { - result = $(includes[1]) ; - } - else if $(provided-path) - { - errors.user-error "Could not find header" $(header) - : "in the user-specified directory" $(provided-path) ; - } - includes = $(includes[2-]) ; - } - $(variable)-$(properties) = $(result) ; - return $(result) ; - } -} - -rule find-library ( variable : properties : names + : provided-path ? ) -{ - local target-os = [ $(properties).get <target-os> ] ; - properties = [ property-set.create <target-os>$(toolset) ] ; - if $($(variable)-$(properties)) - { - return $($(variable)-$(properties)) ; - } - else - { - provided-path ?= [ modules.peek : $(variable) ] ; - paths = $(provided-path) ; - paths += [ $(properties).get <library-path> ] ; - if [ $(properties).get <target-os> ] != windows - { - paths += /usr/lib /usr/lib32 /usr/lib64 ; - } - - local result ; - while ! $(result) && $(paths) - { - while ! $(result) && $(names) - { - local f ; - if $(target-os) = windows - { - f = $(paths[1])/$(names[1]).lib ; - if [ path.exists $(f) ] - { - result = $(f) ; - } - } - else - { - # FIXME: check for .a as well, depending on - # the 'link' feature. - f = $(paths[1])/lib$(names[1]).so ; - ECHO "CHECKING $(f) " ; - if [ path.exists $(f) ] - { - result = $(f) ; - } - } - if ! $(result) && $(provided-path) - { - errors.user-error "Could not find either of: " $(names) - : "in the user-specified directory" $(provided-path) ; - - } - names = $(names[2-]) ; - } - paths = $(paths[2-]) ; - } - $(variable)-$(properties) = $(result) ; - return $(result) ; - } -} - -class ac-library : basic-target -{ - import errors ; - import indirect ; - import virtual-target ; - import ac ; - import configure ; - - rule __init__ ( name : project : * : * ) - { - basic-target.__init__ $(name) : $(project) : $(sources) - : $(requirements) ; - - reconfigure $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule set-header ( header ) - { - self.header = $(header) ; - } - - rule set-default-names ( names + ) - { - self.default-names = $(names) ; - } - - rule reconfigure ( * : * ) - { - ECHO "XXX" $(1) ; - if ! $(1) - { - # This is 'using xxx ;'. Nothing to configure, really. - } - else - { - for i in 1 2 3 4 5 6 7 8 9 - { - # FIXME: this naming is inconsistent with XXX_INCLUDE/XXX_LIBRARY - if ! ( $($(i)[1]) in root include-path library-path library-name condition ) - { - errors.user-error "Invalid named parameter" $($(i)[1]) ; - } - local name = $($(i)[1]) ; - local value = $($(i)[2-]) ; - if $($(name)) && $($(name)) != $(value) - { - errors.user-error "Attempt to change value of '$(name)'" ; - } - $(name) = $(value) ; - } - - include-path ?= $(root)/include ; - library-path ?= $(root)/lib ; - } - } - - rule construct ( name : sources * : property-set ) - { - # FIXME: log results. - local libnames = $(library-name) ; - if ! $(libnames) && ! $(include-path) && ! $(library-path) - { - libnames = [ modules.peek : $(name:U)_NAME ] ; - # Backward compatibility only. - libnames ?= [ modules.peek : $(name:U)_BINARY ] ; - } - libnames ?= $(self.default-names) ; - - local includes = [ - ac.find-include-path $(name:U)_INCLUDE : $(property-set) : $(self.header) : $(include-path) ] ; - local library = [ ac.find-library $(name:U)_LIBRARY : $(property-set) : $(libnames) : $(library-path) ] ; - if $(includes) && $(library) - { - library = [ virtual-target.from-file $(library) : . : $(self.project) ] ; - configure.log-library-search-result $(name) : "found" ; - return [ property-set.create <include>$(includes) <source>$(library) ] ; - } - else - { - configure.log-library-search-result $(name) : "no found" ; - } - } -} - diff --git a/jam-files/boost-build/build/alias.jam b/jam-files/boost-build/build/alias.jam deleted file mode 100644 index 48019cb9..00000000 --- a/jam-files/boost-build/build/alias.jam +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2003, 2004, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'alias' rule and the associated target class. -# -# Alias is just a main target which returns its source targets without any -# processing. For example: -# -# alias bin : hello test_hello ; -# alias lib : helpers xml_parser ; -# -# Another important use of 'alias' is to conveniently group source files: -# -# alias platform-src : win.cpp : <os>NT ; -# alias platform-src : linux.cpp : <os>LINUX ; -# exe main : main.cpp platform-src ; -# -# Lastly, it is possible to create a local alias for some target, with different -# properties: -# -# alias big_lib : : @/external_project/big_lib/<link>static ; -# - -import "class" : new ; -import project ; -import property-set ; -import targets ; - - -class alias-target-class : basic-target -{ - rule __init__ ( name : project : sources * : requirements * - : default-build * : usage-requirements * ) - { - basic-target.__init__ $(name) : $(project) : $(sources) : - $(requirements) : $(default-build) : $(usage-requirements) ; - } - - rule construct ( name : source-targets * : property-set ) - { - return [ property-set.empty ] $(source-targets) ; - } - - rule compute-usage-requirements ( subvariant ) - { - local base = [ basic-target.compute-usage-requirements $(subvariant) ] ; - return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ; - } -} - - -# Declares the 'alias' target. It will process its sources virtual-targets by -# returning them unaltered as its own constructed virtual-targets. -# -rule alias ( name : sources * : requirements * : default-build * : - usage-requirements * ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new alias-target-class $(name) : $(project) - : [ targets.main-target-sources $(sources) : $(name) : no-renaming ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) - ] - : [ targets.main-target-usage-requirements $(usage-requirements) : - $(project) ] - ] ; -} - - -IMPORT $(__name__) : alias : : alias ; diff --git a/jam-files/boost-build/build/alias.py b/jam-files/boost-build/build/alias.py deleted file mode 100644 index 575e5360..00000000 --- a/jam-files/boost-build/build/alias.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2003, 2004, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Status: ported (danielw) -# Base revision: 56043 - -# This module defines the 'alias' rule and associated class. -# -# Alias is just a main target which returns its source targets without any -# processing. For example:: -# -# alias bin : hello test_hello ; -# alias lib : helpers xml_parser ; -# -# Another important use of 'alias' is to conveniently group source files:: -# -# alias platform-src : win.cpp : <os>NT ; -# alias platform-src : linux.cpp : <os>LINUX ; -# exe main : main.cpp platform-src ; -# -# Lastly, it's possible to create local alias for some target, with different -# properties:: -# -# alias big_lib : : @/external_project/big_lib/<link>static ; -# - -import targets -import property_set -from b2.manager import get_manager - -from b2.util import metatarget - -class AliasTarget(targets.BasicTarget): - - def __init__(self, *args): - targets.BasicTarget.__init__(self, *args) - - def construct(self, name, source_targets, properties): - return [property_set.empty(), source_targets] - - def compute_usage_requirements(self, subvariant): - base = targets.BasicTarget.compute_usage_requirements(self, subvariant) - # Add source's usage requirement. If we don't do this, "alias" does not - # look like 100% alias. - return base.add(subvariant.sources_usage_requirements()) - -@metatarget -def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]): - - project = get_manager().projects().current() - targets = get_manager().targets() - - targets.main_target_alternative(AliasTarget( - name, project, - targets.main_target_sources(sources, name, no_renaming=True), - targets.main_target_requirements(requirements or [], project), - targets.main_target_default_build(default_build, project), - targets.main_target_usage_requirements(usage_requirements or [], project))) - -# Declares the 'alias' target. It will build sources, and return them unaltered. -get_manager().projects().add_rule("alias", alias) - diff --git a/jam-files/boost-build/build/build-request.jam b/jam-files/boost-build/build/build-request.jam deleted file mode 100644 index 8a1f7b0e..00000000 --- a/jam-files/boost-build/build/build-request.jam +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright 2002 Dave Abrahams -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import sequence ; -import set ; -import regex ; -import feature ; -import property ; -import container ; -import string ; - - -# Transform property-set by applying f to each component property. -# -local rule apply-to-property-set ( f property-set ) -{ - local properties = [ feature.split $(property-set) ] ; - return [ string.join [ $(f) $(properties) ] : / ] ; -} - - -# Expand the given build request by combining all property-sets which do not -# specify conflicting non-free features. Expects all the project files to -# already be loaded. -# -rule expand-no-defaults ( property-sets * ) -{ - # First make all features and subfeatures explicit. - local expanded-property-sets = [ sequence.transform apply-to-property-set - feature.expand-subfeatures : $(property-sets) ] ; - - # Now combine all of the expanded property-sets - local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ; - - return $(product) ; -} - - -# Implementation of x-product, below. Expects all the project files to already -# be loaded. -# -local rule x-product-aux ( property-sets + ) -{ - local result ; - local p = [ feature.split $(property-sets[1]) ] ; - local f = [ set.difference $(p:G) : [ feature.free-features ] ] ; - local seen ; - # No conflict with things used at a higher level? - if ! [ set.intersection $(f) : $(x-product-used) ] - { - local x-product-seen ; - { - # Do not mix in any conflicting features. - local x-product-used = $(x-product-used) $(f) ; - - if $(property-sets[2]) - { - local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ; - result = $(property-sets[1])/$(rest) ; - } - - result ?= $(property-sets[1]) ; - } - - # If we did not encounter a conflicting feature lower down, do not - # recurse again. - if ! [ set.intersection $(f) : $(x-product-seen) ] - { - property-sets = ; - } - - seen = $(x-product-seen) ; - } - - if $(property-sets[2]) - { - result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ; - } - - # Note that we have seen these features so that higher levels will recurse - # again without them set. - x-product-seen += $(f) $(seen) ; - return $(result) ; -} - - -# Return the cross-product of all elements of property-sets, less any that would -# contain conflicting values for single-valued features. Expects all the project -# files to already be loaded. -# -local rule x-product ( property-sets * ) -{ - if $(property-sets).non-empty - { - # Prepare some "scoped globals" that can be used by the implementation - # function, x-product-aux. - local x-product-seen x-product-used ; - return [ x-product-aux $(property-sets) : $(feature-space) ] ; - } - # Otherwise return empty. -} - - -# Returns true if either 'v' or the part of 'v' before the first '-' symbol is -# an implicit value. Expects all the project files to already be loaded. -# -local rule looks-like-implicit-value ( v ) -{ - if [ feature.is-implicit-value $(v) ] - { - return true ; - } - else - { - local split = [ regex.split $(v) - ] ; - if [ feature.is-implicit-value $(split[1]) ] - { - return true ; - } - } -} - - -# Takes the command line tokens (such as taken from the ARGV rule) and -# constructs a build request from them. Returns a vector of two vectors (where -# "vector" means container.jam's "vector"). First is the set of targets -# specified in the command line, and second is the set of requested build -# properties. Expects all the project files to already be loaded. -# -rule from-command-line ( command-line * ) -{ - local targets ; - local properties ; - - command-line = $(command-line[2-]) ; - local skip-next = ; - for local e in $(command-line) - { - if $(skip-next) - { - skip-next = ; - } - else if ! [ MATCH "^(-).*" : $(e) ] - { - # Build request spec either has "=" in it or completely consists of - # implicit feature values. - local fs = feature-space ; - if [ MATCH "(.*=.*)" : $(e) ] - || [ looks-like-implicit-value $(e:D=) : $(feature-space) ] - { - properties += [ convert-command-line-element $(e) : - $(feature-space) ] ; - } - else - { - targets += $(e) ; - } - } - else if [ MATCH "^(-[-ldjfsto])$" : $(e) ] - { - skip-next = true ; - } - } - return [ new vector - [ new vector $(targets) ] - [ new vector $(properties) ] ] ; -} - - -# Converts one element of command line build request specification into internal -# form. Expects all the project files to already be loaded. -# -local rule convert-command-line-element ( e ) -{ - local result ; - local parts = [ regex.split $(e) "/" ] ; - while $(parts) - { - local p = $(parts[1]) ; - local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ; - local lresult ; - local feature ; - local values ; - if $(m) - { - feature = $(m[1]) ; - values = [ regex.split $(m[2]) "," ] ; - lresult = <$(feature)>$(values) ; - } - else - { - lresult = [ regex.split $(p) "," ] ; - } - - if $(feature) && free in [ feature.attributes $(feature) ] - { - # If we have free feature, then the value is everything - # until the end of the command line token. Slashes in - # the following string are not taked to mean separation - # of properties. Commas are also not interpreted specially. - values = $(values:J=,) ; - values = $(values) $(parts[2-]) ; - values = $(values:J=/) ; - lresult = <$(feature)>$(values) ; - parts = ; - } - - if ! [ MATCH (.*-.*) : $(p) ] - { - # property.validate cannot handle subfeatures, so we avoid the check - # here. - for local p in $(lresult) - { - property.validate $(p) : $(feature-space) ; - } - } - - if ! $(result) - { - result = $(lresult) ; - } - else - { - result = $(result)/$(lresult) ; - } - - parts = $(parts[2-]) ; - } - - return $(result) ; -} - - -rule __test__ ( ) -{ - import assert ; - import feature ; - - feature.prepare-test build-request-test-temp ; - - import build-request ; - import build-request : expand-no-defaults : build-request.expand-no-defaults ; - import errors : try catch ; - import feature : feature subfeature ; - - feature toolset : gcc msvc borland : implicit ; - subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 - 3.0 3.0.1 3.0.2 : optional ; - - feature variant : debug release : implicit composite ; - feature inlining : on off ; - feature "include" : : free ; - - feature stdlib : native stlport : implicit ; - - feature runtime-link : dynamic static : symmetric ; - - # Empty build requests should expand to empty. - assert.result - : build-request.expand-no-defaults ; - - assert.result - <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug - <toolset>msvc/<stdlib>stlport/<variant>debug - <toolset>msvc/<variant>debug - : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ; - - assert.result - <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug - <toolset>msvc/<variant>debug - <variant>debug/<toolset>msvc/<stdlib>stlport - : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ; - - assert.result - <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off - <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off - : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ; - - assert.result - <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z - <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z - <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z - : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ; - - local r ; - - r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; - assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ; - - try ; - { - build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ; - } - catch \"static\" is not a value of an implicit feature ; - - r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ; - assert.equal [ $(r).get-at 1 ] : target ; - assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ; - - r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ; - assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ; - - r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ; - assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic - gcc/<runtime-link>static ; - - r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ; - assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static - borland/<runtime-link>static ; - - r = [ build-request.from-command-line bjam gcc-3.0 ] ; - assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : gcc-3.0 ; - - feature.finish-test build-request-test-temp ; -} diff --git a/jam-files/boost-build/build/build_request.py b/jam-files/boost-build/build/build_request.py deleted file mode 100644 index cc9f2400..00000000 --- a/jam-files/boost-build/build/build_request.py +++ /dev/null @@ -1,216 +0,0 @@ -# Status: being ported by Vladimir Prus -# TODO: need to re-compare with mainline of .jam -# Base revision: 40480 -# -# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -import b2.build.feature -feature = b2.build.feature - -from b2.util.utility import * -import b2.build.property_set as property_set - -def expand_no_defaults (property_sets): - """ Expand the given build request by combining all property_sets which don't - specify conflicting non-free features. - """ - # First make all features and subfeatures explicit - expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets] - - # Now combine all of the expanded property_sets - product = __x_product (expanded_property_sets) - - return [property_set.create(p) for p in product] - - -def __x_product (property_sets): - """ Return the cross-product of all elements of property_sets, less any - that would contain conflicting values for single-valued features. - """ - x_product_seen = set() - return __x_product_aux (property_sets, x_product_seen)[0] - -def __x_product_aux (property_sets, seen_features): - """Returns non-conflicting combinations of property sets. - - property_sets is a list of PropertySet instances. seen_features is a set of Property - instances. - - Returns a tuple of: - - list of lists of Property instances, such that within each list, no two Property instance - have the same feature, and no Property is for feature in seen_features. - - set of features we saw in property_sets - """ - if not property_sets: - return ([], set()) - - properties = property_sets[0].all() - - these_features = set() - for p in property_sets[0].non_free(): - these_features.add(p.feature()) - - # Note: the algorithm as implemented here, as in original Jam code, appears to - # detect conflicts based on features, not properties. For example, if command - # line build request say: - # - # <a>1/<b>1 c<1>/<b>1 - # - # It will decide that those two property sets conflict, because they both specify - # a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two - # different property sets. This is a topic for future fixing, maybe. - if these_features & seen_features: - - (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features) - return (inner_result, inner_seen | these_features) - - else: - - result = [] - (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features) - if inner_result: - for inner in inner_result: - result.append(properties + inner) - else: - result.append(properties) - - if inner_seen & these_features: - # Some of elements in property_sets[1:] conflict with elements of property_sets[0], - # Try again, this time omitting elements of property_sets[0] - (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features) - result.extend(inner_result2) - - return (result, inner_seen | these_features) - - - -def looks_like_implicit_value(v): - """Returns true if 'v' is either implicit value, or - the part before the first '-' symbol is implicit value.""" - if feature.is_implicit_value(v): - return 1 - else: - split = v.split("-") - if feature.is_implicit_value(split[0]): - return 1 - - return 0 - -def from_command_line(command_line): - """Takes the command line tokens (such as taken from ARGV rule) - and constructs build request from it. Returns a list of two - lists. First is the set of targets specified in the command line, - and second is the set of requested build properties.""" - - targets = [] - properties = [] - - for e in command_line: - if e[0] != "-": - # Build request spec either has "=" in it, or completely - # consists of implicit feature values. - if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]): - properties += convert_command_line_element(e) - else: - targets.append(e) - - return [targets, properties] - -# Converts one element of command line build request specification into -# internal form. -def convert_command_line_element(e): - - result = None - parts = e.split("/") - for p in parts: - m = p.split("=") - if len(m) > 1: - feature = m[0] - values = m[1].split(",") - lresult = [("<%s>%s" % (feature, v)) for v in values] - else: - lresult = p.split(",") - - if p.find('-') == -1: - # FIXME: first port property.validate - # property.validate cannot handle subfeatures, - # so we avoid the check here. - #for p in lresult: - # property.validate(p) - pass - - if not result: - result = lresult - else: - result = [e1 + "/" + e2 for e1 in result for e2 in lresult] - - return [property_set.create(b2.build.feature.split(r)) for r in result] - -### -### rule __test__ ( ) -### { -### import assert feature ; -### -### feature.prepare-test build-request-test-temp ; -### -### import build-request ; -### import build-request : expand_no_defaults : build-request.expand_no_defaults ; -### import errors : try catch ; -### import feature : feature subfeature ; -### -### feature toolset : gcc msvc borland : implicit ; -### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 -### 3.0 3.0.1 3.0.2 : optional ; -### -### feature variant : debug release : implicit composite ; -### feature inlining : on off ; -### feature "include" : : free ; -### -### feature stdlib : native stlport : implicit ; -### -### feature runtime-link : dynamic static : symmetric ; -### -### -### local r ; -### -### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; -### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ; -### -### try ; -### { -### -### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ; -### } -### catch \"static\" is not a value of an implicit feature ; -### -### -### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ; -### assert.equal [ $(r).get-at 1 ] : target ; -### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ; -### -### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ; -### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ; -### -### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ; -### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic -### gcc/<runtime-link>static ; -### -### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ; -### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static -### borland/<runtime-link>static ; -### -### r = [ build-request.from-command-line bjam gcc-3.0 ] ; -### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ; -### -### feature.finish-test build-request-test-temp ; -### } -### -### diff --git a/jam-files/boost-build/build/configure.jam b/jam-files/boost-build/build/configure.jam deleted file mode 100644 index 14c1328a..00000000 --- a/jam-files/boost-build/build/configure.jam +++ /dev/null @@ -1,237 +0,0 @@ -# Copyright (c) 2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# This module defines function to help with two main tasks: -# -# - Discovering build-time configuration for the purposes of adjusting -# build process. -# - Reporting what is built, and how it is configured. - -import targets ; -import errors ; -import targets ; -import sequence ; -import property ; -import property-set ; -import "class" : new ; -import common ; -import path ; - -rule log-summary ( ) -{ - -} - -.width = 30 ; - -rule set-width ( width ) -{ - .width = $(width) ; -} - -# Declare that the components specified by the parameter exist. -rule register-components ( components * ) -{ - .components += $(components) ; -} - -# Declare that the components specified by the parameters will -# be build. -rule components-building ( components * ) -{ - .built-components += $(components) ; -} - -# Report something about component configuration that the -# user should better know. -rule log-component-configuration ( component : message ) -{ - # FIXME: implement per-property-set logs - .component-logs.$(component) += $(message) ; -} - - - -rule log-check-result ( result ) -{ - if ! $(.announced-checks) - { - ECHO "Performing configuration checks\n" ; - .announced-checks = 1 ; - } - - ECHO $(result) ; - #.check-results += $(result) ; -} - -rule log-library-search-result ( library : result ) -{ - local x = [ PAD " - $(library) : $(result)" : $(.width) ] ; - log-check-result "$(x)" ; -} - -rule print-component-configuration ( ) -{ - local c = [ sequence.unique $(.components) ] ; - - ECHO "\nComponent configuration:\n" ; - for c in $(.components) - { - local s ; - if $(c) in $(.built-components) - { - s = "building" ; - } - else - { - s = "not building" ; - } - ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ; - for local m in $(.component-logs.$(c)) - { - ECHO " -" $(m) ; - } - } - ECHO ; -} - -rule print-configure-checks-summary ( ) -{ - # FIXME: the problem with that approach is tha - # the user sees checks summary when all checks are - # done, and has no progress reporting while the - # checks are being executed. - if $(.check-results) - { - ECHO "Configuration checks summary\n" ; - - for local r in $(.check-results) - { - ECHO $(r) ; - } - ECHO ; - } -} - -# Attempt to build a metatarget named by 'metatarget-reference' -# in context of 'project' with properties 'ps'. -# Returns non-empty value if build is OK. -rule builds-raw ( metatarget-reference : project : ps : what : retry ? ) -{ - local result ; - - if ! $(retry) && ! $(.$(what)-tested.$(ps)) - { - .$(what)-tested.$(ps) = true ; - - local targets = [ targets.generate-from-reference - $(metatarget-reference) : $(project) : $(ps) ] ; - - local jam-targets ; - for local t in $(targets[2-]) - { - jam-targets += [ $(t).actualize ] ; - } - - if ! UPDATE_NOW in [ RULENAMES ] - { - # Cannot determine. Assume existance. - } - else - { - local x = [ PAD " - $(what)" : $(.width) ] ; - if [ UPDATE_NOW $(jam-targets) : - $(.log-fd) : ignore-minus-n : ignore-minus-q ] - { - .$(what)-supported.$(ps) = yes ; - result = true ; - log-check-result "$(x) : yes" ; - } - else - { - log-check-result "$(x) : no" ; - } - } - return $(result) ; - } - else - { - return $(.$(what)-supported.$(ps)) ; - } -} - -rule builds ( metatarget-reference : properties * : what ? : retry ? ) -{ - what ?= "$(metatarget-reference) builds" ; - - # FIXME: this should not be hardcoded. Other checks might - # want to consider different set of features as relevant. - local toolset = [ property.select <toolset> : $(properties) ] ; - local toolset-version-property = "<toolset-$(toolset:G=):version>" ; - local relevant = [ property.select <target-os> <toolset> $(toolset-version-property) - <address-model> <architecture> - : $(properties) ] ; - local ps = [ property-set.create $(relevant) ] ; - local t = [ targets.current ] ; - local p = [ $(t).project ] ; - - return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : $(retry) ] ; -} - - -# Called by Boost.Build startup code to specify name of a file -# that will receive results of configure checks. This -# should never be called by users. -rule set-log-file ( log-file ) -{ - path.makedirs [ path.parent $(log-file) ] ; - - .log-fd = [ FILE_OPEN $(log-file) : "w" ] ; -} - -# Frontend rules - -class check-target-builds-worker -{ - import configure ; - import property-set ; - import targets ; - import property ; - - rule __init__ ( target message ? : true-properties * : false-properties * ) - { - self.target = $(target) ; - self.message = $(message) ; - self.true-properties = $(true-properties) ; - self.false-properties = $(false-properties) ; - } - - rule check ( properties * ) - { - local choosen ; - if [ configure.builds $(self.target) : $(properties) : $(self.message) ] - { - choosen = $(self.true-properties) ; - } - else - { - choosen = $(self.false-properties) ; - } - return [ property.evaluate-conditionals-in-context $(choosen) : $(properties) ] ; - } -} - - -rule check-target-builds ( target message ? : true-properties * : false-properties * ) -{ - local instance = [ new check-target-builds-worker $(target) $(message) : $(true-properties) - : $(false-properties) ] ; - return <conditional>@$(instance).check ; -} - -IMPORT $(__name__) : check-target-builds : : check-target-builds ; - - diff --git a/jam-files/boost-build/build/configure.py b/jam-files/boost-build/build/configure.py deleted file mode 100644 index 0426832c..00000000 --- a/jam-files/boost-build/build/configure.py +++ /dev/null @@ -1,164 +0,0 @@ -# Status: ported. -# Base revison: 64488 -# -# Copyright (c) 2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# This module defines function to help with two main tasks: -# -# - Discovering build-time configuration for the purposes of adjusting -# build process. -# - Reporting what is built, and how it is configured. - -import b2.build.property as property -import b2.build.property_set as property_set - -import b2.build.targets - -from b2.manager import get_manager -from b2.util.sequence import unique -from b2.util import bjam_signature, value_to_jam - -import bjam -import os - -__width = 30 - -def set_width(width): - global __width - __width = 30 - -__components = [] -__built_components = [] -__component_logs = {} -__announced_checks = False - -__log_file = None -__log_fd = -1 - -def register_components(components): - """Declare that the components specified by the parameter exist.""" - __components.extend(components) - -def components_building(components): - """Declare that the components specified by the parameters will be build.""" - __built_components.extend(components) - -def log_component_configuration(component, message): - """Report something about component configuration that the user should better know.""" - __component_logs.setdefault(component, []).append(message) - -def log_check_result(result): - global __announced_checks - if not __announced_checks: - print "Performing configuration checks" - __announced_checks = True - - print result - -def log_library_search_result(library, result): - log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width)) - - -def print_component_configuration(): - - print "\nComponent configuration:" - for c in __components: - if c in __built_components: - s = "building" - else: - s = "not building" - message = " - %s)" % c - message = message.rjust(__width) - message += " : " + s - for m in __component_logs.get(c, []): - print " -" + m - print "" - -__builds_cache = {} - -def builds(metatarget_reference, project, ps, what): - # Attempt to build a metatarget named by 'metatarget-reference' - # in context of 'project' with properties 'ps'. - # Returns non-empty value if build is OK. - - result = [] - - existing = __builds_cache.get((what, ps), None) - if existing is None: - - result = False - __builds_cache[(what, ps)] = False - - targets = b2.build.targets.generate_from_reference( - metatarget_reference, project, ps).targets() - jam_targets = [] - for t in targets: - jam_targets.append(t.actualize()) - - x = (" - %s" % what).rjust(__width) - if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"): - __builds_cache[(what, ps)] = True - result = True - log_check_result("%s: yes" % x) - else: - log_check_result("%s: no" % x) - - return result - else: - return existing - -def set_log_file(log_file_name): - # Called by Boost.Build startup code to specify name of a file - # that will receive results of configure checks. This - # should never be called by users. - global __log_file, __log_fd - dirname = os.path.dirname(log_file_name) - if not os.path.exists(dirname): - os.makedirs(dirname) - # Make sure to keep the file around, so that it's not - # garbage-collected and closed - __log_file = open(log_file_name, "w") - __log_fd = __log_file.fileno() - -# Frontend rules - -class CheckTargetBuildsWorker: - - def __init__(self, target, true_properties, false_properties): - self.target = target - self.true_properties = property.create_from_strings(true_properties, True) - self.false_properties = property.create_from_strings(false_properties, True) - - def check(self, ps): - - # FIXME: this should not be hardcoded. Other checks might - # want to consider different set of features as relevant. - toolset = ps.get('toolset')[0] - toolset_version_property = "<toolset-" + toolset + ":version>" ; - relevant = ps.get_properties('target-os') + \ - ps.get_properties("toolset") + \ - ps.get_properties(toolset_version_property) + \ - ps.get_properties("address-model") + \ - ps.get_properties("architecture") - rps = property_set.create(relevant) - t = get_manager().targets().current() - p = t.project() - if builds(self.target, p, rps, "%s builds" % self.target): - choosen = self.true_properties - else: - choosen = self.false_properties - return property.evaluate_conditionals_in_context(choosen, ps) - -@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"])) -def check_target_builds(target, true_properties, false_properties): - worker = CheckTargetBuildsWorker(target, true_properties, false_properties) - value = value_to_jam(worker.check) - return "<conditional>" + value - -get_manager().projects().add_rule("check-target-builds", check_target_builds) - - diff --git a/jam-files/boost-build/build/engine.py b/jam-files/boost-build/build/engine.py deleted file mode 100644 index be9736e0..00000000 --- a/jam-files/boost-build/build/engine.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright Pedro Ferreira 2005. -# Copyright Vladimir Prus 2007. -# Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -bjam_interface = __import__('bjam') - -import operator -import re - -import b2.build.property_set as property_set -import b2.util - -class BjamAction: - """Class representing bjam action defined from Python.""" - - def __init__(self, action_name, function): - self.action_name = action_name - self.function = function - - def __call__(self, targets, sources, property_set): - if self.function: - self.function(targets, sources, property_set) - - # Bjam actions defined from Python have only the command - # to execute, and no associated jam procedural code. So - # passing 'property_set' to it is not necessary. - bjam_interface.call("set-update-action", self.action_name, - targets, sources, []) - -class BjamNativeAction: - """Class representing bjam action defined by Jam code. - - We still allow to associate a Python callable that will - be called when this action is installed on any target. - """ - - def __init__(self, action_name, function): - self.action_name = action_name - self.function = function - - def __call__(self, targets, sources, property_set): - if self.function: - self.function(targets, sources, property_set) - - p = [] - if property_set: - p = property_set.raw() - - b2.util.set_jam_action(self.action_name, targets, sources, p) - -action_modifiers = {"updated": 0x01, - "together": 0x02, - "ignore": 0x04, - "quietly": 0x08, - "piecemeal": 0x10, - "existing": 0x20} - -class Engine: - """ The abstract interface to a build engine. - - For now, the naming of targets, and special handling of some - target variables like SEARCH and LOCATE make this class coupled - to bjam engine. - """ - def __init__ (self): - self.actions = {} - - def add_dependency (self, targets, sources): - """Adds a dependency from 'targets' to 'sources' - - Both 'targets' and 'sources' can be either list - of target names, or a single target name. - """ - if isinstance (targets, str): - targets = [targets] - if isinstance (sources, str): - sources = [sources] - - for target in targets: - for source in sources: - self.do_add_dependency (target, source) - - def set_target_variable (self, targets, variable, value, append=0): - """ Sets a target variable. - - The 'variable' will be available to bjam when it decides - where to generate targets, and will also be available to - updating rule for that 'taret'. - """ - if isinstance (targets, str): - targets = [targets] - - for target in targets: - self.do_set_target_variable (target, variable, value, append) - - def set_update_action (self, action_name, targets, sources, properties=property_set.empty()): - """ Binds a target to the corresponding update action. - If target needs to be updated, the action registered - with action_name will be used. - The 'action_name' must be previously registered by - either 'register_action' or 'register_bjam_action' - method. - """ - assert(isinstance(properties, property_set.PropertySet)) - if isinstance (targets, str): - targets = [targets] - self.do_set_update_action (action_name, targets, sources, properties) - - def register_action (self, action_name, command, bound_list = [], flags = [], - function = None): - """Creates a new build engine action. - - Creates on bjam side an action named 'action_name', with - 'command' as the command to be executed, 'bound_variables' - naming the list of variables bound when the command is executed - and specified flag. - If 'function' is not None, it should be a callable taking three - parameters: - - targets - - sources - - instance of the property_set class - This function will be called by set_update_action, and can - set additional target variables. - """ - if self.actions.has_key(action_name): - raise "Bjam action %s is already defined" % action_name - - assert(isinstance(flags, list)) - - bjam_flags = reduce(operator.or_, - (action_modifiers[flag] for flag in flags), 0) - - bjam_interface.define_action(action_name, command, bound_list, bjam_flags) - - self.actions[action_name] = BjamAction(action_name, function) - - def register_bjam_action (self, action_name, function=None): - """Informs self that 'action_name' is declared in bjam. - - From this point, 'action_name' is a valid argument to the - set_update_action method. The action_name should be callable - in the global module of bjam. - """ - - # We allow duplicate calls to this rule for the same - # action name. This way, jamfile rules that take action names - # can just register them without specially checking if - # action is already registered. - if not self.actions.has_key(action_name): - self.actions[action_name] = BjamNativeAction(action_name, function) - - # Overridables - - - def do_set_update_action (self, action_name, targets, sources, property_set): - action = self.actions.get(action_name) - if not action: - raise Exception("No action %s was registered" % action_name) - action(targets, sources, property_set) - - def do_set_target_variable (self, target, variable, value, append): - if append: - bjam_interface.call("set-target-variable", target, variable, value, "true") - else: - bjam_interface.call("set-target-variable", target, variable, value) - - def do_add_dependency (self, target, source): - bjam_interface.call("DEPENDS", target, source) - - diff --git a/jam-files/boost-build/build/errors.py b/jam-files/boost-build/build/errors.py deleted file mode 100644 index d9dceefe..00000000 --- a/jam-files/boost-build/build/errors.py +++ /dev/null @@ -1,127 +0,0 @@ -# Status: being written afresh by Vladimir Prus - -# Copyright 2007 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This file is supposed to implement error reporting for Boost.Build. -# Experience with jam version has shown that printing full backtrace -# on each error is buffling. Further, for errors printed after parsing -- -# during target building, the stacktrace does not even mention what -# target is being built. - -# This module implements explicit contexts -- where other code can -# communicate which projects/targets are being built, and error -# messages will show those contexts. For programming errors, -# Python assertions are to be used. - -import bjam -import traceback -import sys - -def format(message, prefix=""): - parts = str(message).split("\n") - return "\n".join(prefix+p for p in parts) - - -class Context: - - def __init__(self, message, nested=None): - self.message_ = message - self.nested_ = nested - - def report(self, indent=""): - print indent + " -", self.message_ - if self.nested_: - print indent + " declared at:" - for n in self.nested_: - n.report(indent + " ") - -class JamfileContext: - - def __init__(self): - raw = bjam.backtrace() - self.raw_ = raw - - def report(self, indent=""): - for r in self.raw_: - print indent + " - %s:%s" % (r[0], r[1]) - -class ExceptionWithUserContext(Exception): - - def __init__(self, message, context, - original_exception=None, original_tb=None, stack=None): - Exception.__init__(self, message) - self.context_ = context - self.original_exception_ = original_exception - self.original_tb_ = original_tb - self.stack_ = stack - - def report(self): - print "error:", self.args[0] - if self.original_exception_: - print format(str(self.original_exception_), " ") - print - print " error context (most recent first):" - for c in self.context_[::-1]: - c.report() - print - if "--stacktrace" in bjam.variable("ARGV"): - if self.original_tb_: - traceback.print_tb(self.original_tb_) - elif self.stack_: - for l in traceback.format_list(self.stack_): - print l, - else: - print " use the '--stacktrace' option to get Python stacktrace" - print - -def user_error_checkpoint(callable): - def wrapper(self, *args): - errors = self.manager().errors() - try: - return callable(self, *args) - except ExceptionWithUserContext, e: - raise - except Exception, e: - errors.handle_stray_exception(e) - finally: - errors.pop_user_context() - - return wrapper - -class Errors: - - def __init__(self): - self.contexts_ = [] - self._count = 0 - - def count(self): - return self._count - - def push_user_context(self, message, nested=None): - self.contexts_.append(Context(message, nested)) - - def pop_user_context(self): - del self.contexts_[-1] - - def push_jamfile_context(self): - self.contexts_.append(JamfileContext()) - - def pop_jamfile_context(self): - del self.contexts_[-1] - - def capture_user_context(self): - return self.contexts_[:] - - def handle_stray_exception(self, e): - raise ExceptionWithUserContext("unexpected exception", self.contexts_[:], - e, sys.exc_info()[2]) - def __call__(self, message): - self._count = self._count + 1 - raise ExceptionWithUserContext(message, self.contexts_[:], - stack=traceback.extract_stack()) - - - - diff --git a/jam-files/boost-build/build/feature.jam b/jam-files/boost-build/build/feature.jam deleted file mode 100644 index 6f54adef..00000000 --- a/jam-files/boost-build/build/feature.jam +++ /dev/null @@ -1,1335 +0,0 @@ -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2002, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import assert : * ; -import "class" : * ; -import errors : lol->list ; -import indirect ; -import modules ; -import regex ; -import sequence ; -import set ; -import utility ; - - -local rule setup ( ) -{ - .all-attributes = - implicit - composite - optional - symmetric - free - incidental - path - dependency - propagated - link-incompatible - subfeature - order-sensitive - ; - - .all-features = ; - .all-subfeatures = ; - .all-top-features = ; # non-subfeatures - .all-implicit-values = ; -} -setup ; - - -# Prepare a fresh space to test in by moving all global variable settings into -# the given temporary module and erasing them here. -# -rule prepare-test ( temp-module ) -{ - DELETE_MODULE $(temp-module) ; - - # Transfer globals to temp-module. - for local v in [ VARNAMES feature ] - { - if [ MATCH (\\.) : $(v) ] - { - modules.poke $(temp-module) : $(v) : $($(v)) ; - $(v) = ; - } - } - setup ; -} - - -# Clear out all global variables and recover all variables from the given -# temporary module. -# -rule finish-test ( temp-module ) -{ - # Clear globals. - for local v in [ VARNAMES feature ] - { - if [ MATCH (\\.) : $(v) ] - { - $(v) = ; - } - } - - for local v in [ VARNAMES $(temp-module) ] - { - $(v) = [ modules.peek $(temp-module) : $(v) ] ; - } - DELETE_MODULE $(temp-module) ; -} - - -# Transform features by bracketing any elements which are not already bracketed -# by "<>". -# -local rule grist ( features * ) -{ - local empty = "" ; - return $(empty:G=$(features)) ; -} - - -# Declare a new feature with the given name, values, and attributes. -# -rule feature ( - name # Feature name. - : values * # Allowable values - may be extended later using feature.extend. - : attributes * # Feature attributes (e.g. implicit, free, propagated...). -) -{ - name = [ grist $(name) ] ; - - local error ; - - # Check for any unknown attributes. - if ! ( $(attributes) in $(.all-attributes) ) - { - error = unknown attributes: - [ set.difference $(attributes) : $(.all-attributes) ] ; - } - else if $(name) in $(.all-features) - { - error = feature already defined: ; - } - else if implicit in $(attributes) && free in $(attributes) - { - error = free features cannot also be implicit ; - } - else if free in $(attributes) && propagated in $(attributes) - { - error = free features cannot be propagated ; - } - else - { - local m = [ MATCH (.*=.*) : $(values) ] ; - if $(m[1]) - { - error = "feature value may not contain '='" ; - } - } - - if $(error) - { - errors.error $(error) - : "in" feature declaration: - : feature [ lol->list $(1) : $(2) : $(3) ] ; - } - - $(name).values ?= ; - $(name).attributes = $(attributes) ; - $(name).subfeatures ?= ; - $(attributes).features += $(name) ; - - .all-features += $(name) ; - if subfeature in $(attributes) - { - .all-subfeatures += $(name) ; - } - else - { - .all-top-features += $(name) ; - } - extend $(name) : $(values) ; -} - - -# Sets the default value of the given feature, overriding any previous default. -# -rule set-default ( feature : value ) -{ - local f = [ grist $(feature) ] ; - local a = $($(f).attributes) ; - local bad-attribute = ; - if free in $(a) - { - bad-attribute = free ; - } - else if optional in $(a) - { - bad-attribute = optional ; - } - if $(bad-attribute) - { - errors.error "$(bad-attribute) property $(f) cannot have a default." ; - } - if ! $(value) in $($(f).values) - { - errors.error "The specified default value, '$(value)' is invalid" - : "allowed values are: " $($(f).values) ; - } - $(f).default = $(value) ; -} - - -# Returns the default property values for the given features. -# -rule defaults ( features * ) -{ - local result ; - for local f in $(features) - { - local gf = $(:E=:G=$(f)) ; - local a = $($(gf).attributes) ; - if ( free in $(a) ) || ( optional in $(a) ) - { - } - else - { - result += $(gf)$($(gf).default) ; - } - } - return $(result) ; -} - - -# Returns true iff all 'names' elements are valid features. -# -rule valid ( names + ) -{ - if $(names) in $(.all-features) - { - return true ; - } -} - - -# Returns the attibutes of the given feature. -# -rule attributes ( feature ) -{ - return $($(:E=:G=$(feature)).attributes) ; -} - - -# Returns the values of the given feature. -# -rule values ( feature ) -{ - return $($(:E=:G=$(feature)).values) ; -} - - -# Returns true iff 'value-string' is a value-string of an implicit feature. -# -rule is-implicit-value ( value-string ) -{ - local v = [ regex.split $(value-string) - ] ; - local failed ; - if ! $(v[1]) in $(.all-implicit-values) - { - failed = true ; - } - else - { - local feature = $($(v[1]).implicit-feature) ; - for local subvalue in $(v[2-]) - { - if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ] - { - failed = true ; - } - } - } - - if ! $(failed) - { - return true ; - } -} - - -# Returns the implicit feature associated with the given implicit value. -# -rule implied-feature ( implicit-value ) -{ - local components = [ regex.split $(implicit-value) "-" ] ; - - local feature = $($(components[1]).implicit-feature) ; - if ! $(feature) - { - errors.error \"$(implicit-value)\" is not a value of an implicit feature ; - feature = "" ; # Keep testing happy; it expects a result. - } - return $(feature) ; -} - - -local rule find-implied-subfeature ( feature subvalue : value-string ? ) -{ - # Feature should be of the form <feature-name>. - if $(feature) != $(feature:G) - { - errors.error invalid feature $(feature) ; - } - - return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ; -} - - -# Given a feature and a value of one of its subfeatures, find the name of the -# subfeature. If value-string is supplied, looks for implied subfeatures that -# are specific to that value of feature -# -rule implied-subfeature ( - feature # The main feature name. - subvalue # The value of one of its subfeatures. - : value-string ? # The value of the main feature. -) -{ - local subfeature = [ find-implied-subfeature $(feature) $(subvalue) - : $(value-string) ] ; - if ! $(subfeature) - { - value-string ?= "" ; - errors.error \"$(subvalue)\" is not a known subfeature value of - $(feature)$(value-string) ; - } - return $(subfeature) ; -} - - -# Generate an error if the feature is unknown. -# -local rule validate-feature ( feature ) -{ - if ! $(feature) in $(.all-features) - { - errors.error unknown feature \"$(feature)\" ; - } -} - - -# Given a feature and its value or just a value corresponding to an implicit -# feature, returns a property set consisting of all component subfeatures and -# their values. For example all the following calls: -# -# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86 -# expand-subfeatures-aux gcc-2.95.2-linux-x86 -# -# return: -# -# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86 -# -local rule expand-subfeatures-aux ( - feature ? # Feature name or empty if value corresponds to an - # implicit property. - : value # Feature value. - : dont-validate ? # If set, no value string validation will be done. -) -{ - if $(feature) - { - feature = $(feature) ; - } - - if ! $(feature) - { - feature = [ implied-feature $(value) ] ; - } - else - { - validate-feature $(feature) ; - } - if ! $(dont-validate) - { - validate-value-string $(feature) $(value) ; - } - - local components = [ regex.split $(value) "-" ] ; - - # Get the top-level feature's value. - local value = $(components[1]:G=) ; - - local result = $(components[1]:G=$(feature)) ; - - local subvalues = $(components[2-]) ; - while $(subvalues) - { - local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues. - subvalues = $(subvalues[2-]) ; - - local subfeature = [ find-implied-subfeature $(feature) $(subvalue) : - $(value) ] ; - - # If no subfeature was found reconstitute the value string and use that. - if ! $(subfeature) - { - result = $(components:J=-) ; - result = $(result:G=$(feature)) ; - subvalues = ; # Stop looping. - } - else - { - local f = [ MATCH ^<(.*)>$ : $(feature) ] ; - result += $(subvalue:G=$(f)-$(subfeature)) ; - } - } - - return $(result) ; -} - - -# Make all elements of properties corresponding to implicit features explicit, -# and express all subfeature values as separate properties in their own right. -# For example, all of the following properties -# -# gcc-2.95.2-linux-x86 -# <toolset>gcc-2.95.2-linux-x86 -# -# might expand to -# -# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86 -# -rule expand-subfeatures ( - properties * # Property set with elements of the form - # <feature>value-string or just value-string in the case - # of implicit features. - : dont-validate ? -) -{ - local result ; - for local p in $(properties) - { - # Don't expand subfeatures in subfeatures - if ! [ MATCH "(:)" : $(p:G) ] - { - result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ; - } - else - { - result += $(p) ; - } - } - return $(result) ; -} - - -# Helper for extend, below. Handles the feature case. -# -local rule extend-feature ( feature : values * ) -{ - feature = [ grist $(feature) ] ; - validate-feature $(feature) ; - if implicit in $($(feature).attributes) - { - for local v in $(values) - { - if $($(v).implicit-feature) - { - errors.error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ; - } - $(v).implicit-feature = $(feature) ; - } - - .all-implicit-values += $(values) ; - } - if ! $($(feature).values) - { - # This is the first value specified for this feature so make it be the - # default. - $(feature).default = $(values[1]) ; - } - $(feature).values += $(values) ; -} - - -# Checks that value-string is a valid value-string for the given feature. -# -rule validate-value-string ( feature value-string ) -{ - if ! ( - free in $($(feature).attributes) - || ( $(value-string) in $(feature).values ) - ) - { - local values = $(value-string) ; - - if $($(feature).subfeatures) - { - if ! ( $(value-string) in $($(feature).values) ) - && ! ( $(value-string) in $($(feature).subfeatures) ) - { - values = [ regex.split $(value-string) - ] ; - } - } - - if ! ( $(values[1]) in $($(feature).values) ) && - - # An empty value is allowed for optional features. - ( $(values[1]) || ! ( optional in $($(feature).attributes) ) ) - { - errors.error \"$(values[1])\" is not a known value of feature $(feature) - : legal values: \"$($(feature).values)\" ; - } - - for local v in $(values[2-]) - { - # This will validate any subfeature values in value-string. - implied-subfeature $(feature) $(v) : $(values[1]) ; - } - } -} - - -# A helper that computes: -# * name(s) of module-local variable(s) used to record the correspondence -# between subvalue(s) and a subfeature -# * value of that variable when such a subfeature/subvalue has been defined and -# returns a list consisting of the latter followed by the former. -# -local rule subvalue-var ( - feature # Main feature name. - value-string ? # If supplied, specifies a specific value of the main - # feature for which the subfeature values are valid. - : subfeature # Subfeature name. - : subvalues * # Subfeature values. -) -{ - feature = [ grist $(feature) ] ; - validate-feature $(feature) ; - if $(value-string) - { - validate-value-string $(feature) $(value-string) ; - } - - local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ; - - return $(subfeature-name) - $(feature)$(value-string:E="")<>$(subvalues).subfeature ; -} - - -# Extends the given subfeature with the subvalues. If the optional value-string -# is provided, the subvalues are only valid for the given value of the feature. -# Thus, you could say that <target-platform>mingw is specific to -# <toolset>gcc-2.95.2 as follows: -# -# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ; -# -rule extend-subfeature ( - feature # The feature whose subfeature is being extended. - - value-string ? # If supplied, specifies a specific value of the main - # feature for which the new subfeature values are valid. - - : subfeature # Subfeature name. - : subvalues * # Additional subfeature values. -) -{ - local subfeature-vars = [ subvalue-var $(feature) $(value-string) - : $(subfeature) : $(subvalues) ] ; - - local f = [ utility.ungrist [ grist $(feature) ] ] ; - extend $(f)-$(subfeature-vars[1]) : $(subvalues) ; - - # Provide a way to get from the given feature or property and subfeature - # value to the subfeature name. - $(subfeature-vars[2-]) = $(subfeature-vars[1]) ; -} - - -# Returns true iff the subvalues are valid for the feature. When the optional -# value-string is provided, returns true iff the subvalues are valid for the -# given value of the feature. -# -rule is-subvalue ( feature : value-string ? : subfeature : subvalue ) -{ - local subfeature-vars = [ subvalue-var $(feature) $(value-string) - : $(subfeature) : $(subvalue) ] ; - - if $($(subfeature-vars[2])) = $(subfeature-vars[1]) - { - return true ; - } -} - - -# Can be called three ways: -# -# 1. extend feature : values * -# 2. extend <feature> subfeature : values * -# 3. extend <feature>value-string subfeature : values * -# -# * Form 1 adds the given values to the given feature. -# * Forms 2 and 3 add subfeature values to the given feature. -# * Form 3 adds the subfeature values as specific to the given property -# value-string. -# -rule extend ( feature-or-property subfeature ? : values * ) -{ - local feature ; # If a property was specified this is its feature. - local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2. - - # If a property was specified. - if $(feature-or-property:G) && $(feature-or-property:G=) - { - # Extract the feature and value-string, if any. - feature = $(feature-or-property:G) ; - value-string = $(feature-or-property:G=) ; - } - else - { - feature = [ grist $(feature-or-property) ] ; - } - - # Dispatch to the appropriate handler. - if $(subfeature) - { - extend-subfeature $(feature) $(value-string) : $(subfeature) - : $(values) ; - } - else - { - # If no subfeature was specified, we do not expect to see a - # value-string. - if $(value-string) - { - errors.error can only specify a property as the first argument when - extending a subfeature - : usage: - : " extend" feature ":" values... - : " | extend" <feature>value-string subfeature ":" values... - ; - } - - extend-feature $(feature) : $(values) ; - } -} - - -local rule get-subfeature-name ( subfeature value-string ? ) -{ - local prefix = $(value-string): ; - return $(prefix:E="")$(subfeature) ; -} - - -# Declares a subfeature. -# -rule subfeature ( - feature # Root feature that is not a subfeature. - value-string ? # A value-string specifying which feature or subfeature - # values this subfeature is specific to, if any. - : subfeature # The name of the subfeature being declared. - : subvalues * # The allowed values of this subfeature. - : attributes * # The attributes of the subfeature. -) -{ - feature = [ grist $(feature) ] ; - validate-feature $(feature) ; - - # Add grist to the subfeature name if a value-string was supplied. - local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ; - - if $(subfeature-name) in $($(feature).subfeatures) - { - errors.error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\" - "specific to "$(value-string) ; - } - $(feature).subfeatures += $(subfeature-name) ; - - # First declare the subfeature as a feature in its own right. - local f = [ utility.ungrist $(feature) ] ; - feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ; - - # Now make sure the subfeature values are known. - extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ; -} - - -# Set components of the given composite property. -# -rule compose ( composite-property : component-properties * ) -{ - local feature = $(composite-property:G) ; - if ! ( composite in [ attributes $(feature) ] ) - { - errors.error "$(feature)" is not a composite feature ; - } - - $(composite-property).components ?= ; - if $($(composite-property).components) - { - errors.error components of "$(composite-property)" already set: - $($(composite-property).components) ; - } - - if $(composite-property) in $(component-properties) - { - errors.error composite property "$(composite-property)" cannot have itself as a component ; - } - $(composite-property).components = $(component-properties) ; -} - - -local rule expand-composite ( property ) -{ - return $(property) - [ sequence.transform expand-composite : $($(property).components) ] ; -} - - -# Return all values of the given feature specified by the given property set. -# -rule get-values ( feature : properties * ) -{ - local result ; - - feature = $(:E=:G=$(feature)) ; # Add <> if necessary. - for local p in $(properties) - { - if $(p:G) = $(feature) - { - # Use MATCH instead of :G= to get the value, in order to preserve - # the value intact instead of having bjam treat it as a decomposable - # path. - result += [ MATCH ">(.*)" : $(p) ] ; - } - } - return $(result) ; -} - - -rule free-features ( ) -{ - return $(free.features) ; -} - - -# Expand all composite properties in the set so that all components are -# explicitly expressed. -# -rule expand-composites ( properties * ) -{ - local explicit-features = $(properties:G) ; - local result ; - - # Now expand composite features. - for local p in $(properties) - { - local expanded = [ expand-composite $(p) ] ; - - for local x in $(expanded) - { - if ! $(x) in $(result) - { - local f = $(x:G) ; - - if $(f) in $(free.features) - { - result += $(x) ; - } - else if ! $(x) in $(properties) # x is the result of expansion - { - if ! $(f) in $(explicit-features) # not explicitly-specified - { - if $(f) in $(result:G) - { - errors.error expansions of composite features result - in conflicting values for $(f) - : values: [ get-values $(f) : $(result) ] $(x:G=) - : one contributing composite property was $(p) ; - } - else - { - result += $(x) ; - } - } - } - else if $(f) in $(result:G) - { - errors.error explicitly-specified values of non-free feature - $(f) conflict : - "existing values:" [ get-values $(f) : $(properties) ] : - "value from expanding " $(p) ":" $(x:G=) ; - } - else - { - result += $(x) ; - } - } - } - } - return $(result) ; -} - - -# Return true iff f is an ordinary subfeature of the parent-property's feature, -# or if f is a subfeature of the parent-property's feature specific to the -# parent-property's value. -# -local rule is-subfeature-of ( parent-property f ) -{ - if subfeature in $($(f).attributes) - { - local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ; - if $(specific-subfeature) - { - # The feature has the form <topfeature-topvalue:subfeature>, e.g. - # <toolset-msvc:version>. - local feature-value = [ split-top-feature $(specific-subfeature[1]) - ] ; - if <$(feature-value[1])>$(feature-value[2]) = $(parent-property) - { - return true ; - } - } - else - { - # The feature has the form <topfeature-subfeature>, e.g. - # <toolset-version> - local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ; - if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G) - { - return true ; - } - } - } -} - - -# As for is-subfeature-of but for subproperties. -# -local rule is-subproperty-of ( parent-property p ) -{ - return [ is-subfeature-of $(parent-property) $(p:G) ] ; -} - - -# Given a property, return the subset of features consisting of all ordinary -# subfeatures of the property's feature, and all specific subfeatures of the -# property's feature which are conditional on the property's value. -# -local rule select-subfeatures ( parent-property : features * ) -{ - return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ; -} - - -# As for select-subfeatures but for subproperties. -# -local rule select-subproperties ( parent-property : properties * ) -{ - return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ; -} - - -# Given a property set which may consist of composite and implicit properties -# and combined subfeature values, returns an expanded, normalized property set -# with all implicit features expressed explicitly, all subfeature values -# individually expressed, and all components of composite properties expanded. -# Non-free features directly expressed in the input properties cause any values -# of those features due to composite feature expansion to be dropped. If two -# values of a given non-free feature are directly expressed in the input, an -# error is issued. -# -rule expand ( properties * ) -{ - local expanded = [ expand-subfeatures $(properties) ] ; - return [ expand-composites $(expanded) ] ; -} - - -# Helper rule for minimize. Returns true iff property's feature is present in -# the contents of the variable named by feature-set-var. -# -local rule in-features ( feature-set-var property ) -{ - if $(property:G) in $($(feature-set-var)) - { - return true ; - } -} - - -# Helper rule for minimize. Returns the list with the same properties, but with -# all subfeatures moved to the end of the list. -# -local rule move-subfeatures-to-the-end ( properties * ) -{ - local x1 ; - local x2 ; - for local p in $(properties) - { - if subfeature in $($(p:G).attributes) - { - x2 += $(p) ; - } - else - { - x1 += $(p) ; - } - } - return $(x1) $(x2) ; -} - - -# Given an expanded property set, eliminate all redundancy: properties that are -# elements of other (composite) properties in the set will be eliminated. -# Non-symmetric properties equal to default values will be eliminated unless -# they override a value from some composite property. Implicit properties will -# be expressed without feature grist, and sub-property values will be expressed -# as elements joined to the corresponding main property. -# -rule minimize ( properties * ) -{ - # Precondition checking - local implicits = [ set.intersection $(p:G=) : $(p:G) ] ; - if $(implicits) - { - errors.error minimize requires an expanded property set, but - \"$(implicits[1])\" appears to be the value of an un-expanded - implicit feature ; - } - - # Remove properties implied by composite features. - local components = $($(properties).components) ; - local x = [ set.difference $(properties) : $(components) ] ; - - # Handle subfeatures and implicit features. - x = [ move-subfeatures-to-the-end $(x) ] ; - local result ; - while $(x) - { - local p fullp = $(x[1]) ; - local f = $(p:G) ; - local v = $(p:G=) ; - - # Eliminate features in implicit properties. - if implicit in [ attributes $(f) ] - { - p = $(v) ; - } - - # Locate all subproperties of $(x[1]) in the property set. - local subproperties = [ select-subproperties $(fullp) : $(x) ] ; - if $(subproperties) - { - # Reconstitute the joined property name. - local sorted = [ sequence.insertion-sort $(subproperties) ] ; - result += $(p)-$(sorted:G="":J=-) ; - - x = [ set.difference $(x[2-]) : $(subproperties) ] ; - } - else - { - # Eliminate properties whose value is equal to feature's default, - # which are not symmetric and which do not contradict values implied - # by composite properties. - - # Since all component properties of composites in the set have been - # eliminated, any remaining property whose feature is the same as a - # component of a composite in the set must have a non-redundant - # value. - if $(fullp) != [ defaults $(f) ] - || symmetric in [ attributes $(f) ] - || $(fullp:G) in $(components:G) - { - result += $(p) ; - } - - x = $(x[2-]) ; - } - } - return $(result) ; -} - - -# Combine all subproperties into their parent properties -# -# Requires: for every subproperty, there is a parent property. All features are -# explicitly expressed. -# -# This rule probably should not be needed, but build-request.expand-no-defaults -# is being abused for unintended purposes and it needs help. -# -rule compress-subproperties ( properties * ) -{ - local all-subs ; - local matched-subs ; - local result ; - - for local p in $(properties) - { - if ! $(p:G) - { - # Expecting fully-gristed properties. - assert.variable-not-empty p:G ; - } - - if ! subfeature in $($(p:G).attributes) - { - local subs = [ sequence.insertion-sort - [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ; - - matched-subs += $(subs) ; - - local subvalues = -$(subs:G=:J=-) ; - subvalues ?= "" ; - result += $(p)$(subvalues) ; - } - else - { - all-subs += $(p) ; - } - } - assert.result true : set.equal $(all-subs) : $(matched-subs) ; - return $(result) ; -} - - -# Given an ungristed string, finds the longest prefix which is a top-level -# feature name followed by a dash, and return a pair consisting of the parts -# before and after that dash. More interesting than a simple split because -# feature names may contain dashes. -# -local rule split-top-feature ( feature-plus ) -{ - local e = [ regex.split $(feature-plus) - ] ; - local f = $(e[1]) ; - local v ; - while $(e) - { - if <$(f)> in $(.all-top-features) - { - v = $(f) $(e[2-]:J=-) ; - } - e = $(e[2-]) ; - f = $(f)-$(e[1]) ; - } - return $(v) ; -} - - -# Given a set of properties, add default values for features not represented in -# the set. -# -# Note: if there's an ordinary feature F1 and a composite feature F2 which -# includes some value for F1 and both feature have default values then the -# default value of F1 will be added (as opposed to the value in F2). This might -# not be the right idea, e.g. consider: -# -# feature variant : debug ... ; -# <variant>debug : .... <runtime-debugging>on -# feature <runtime-debugging> : off on ; -# -# Here, when adding default for an empty property set, we'll get -# -# <variant>debug <runtime_debugging>off -# -# and that's kind of strange. -# -rule add-defaults ( properties * ) -{ - for local v in $(properties:G=) - { - if $(v) in $(properties) - { - errors.error add-defaults requires explicitly specified features, - but \"$(v)\" appears to be the value of an un-expanded implicit - feature ; - } - } - # We don't add default for elements with ":" inside. This catches: - # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG - # to be takes as specified value for <variant> - # 2. Free properties with ":" in values. We don't care, since free - # properties don't have defaults. - local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ; - local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ; - local more = [ defaults $(missing-top) ] ; - properties += $(more) ; - xproperties += $(more) ; - - # Add defaults for subfeatures of features which are present. - for local p in $(xproperties) - { - local s = $($(p:G).subfeatures) ; - local f = [ utility.ungrist $(p:G) ] ; - local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ; - properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ; - } - - return $(properties) ; -} - - -# Given a property-set of the form -# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM -# -# Returns -# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM -# -# Note that vN...vM may contain slashes. This needs to be resilient to the -# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps -# slash direction on NT. -# -rule split ( property-set ) -{ - local pieces = [ regex.split $(property-set) [\\/] ] ; - local result ; - - for local x in $(pieces) - { - if ( ! $(x:G) ) && $(result[-1]:G) - { - result = $(result[1--2]) $(result[-1])/$(x) ; - } - else - { - result += $(x) ; - } - } - - return $(result) ; -} - - -# Tests of module feature. -# -rule __test__ ( ) -{ - # Use a fresh copy of the feature module. - prepare-test feature-test-temp ; - - import assert ; - import errors : try catch ; - - # These are local rules and so must be explicitly reimported into the - # testing module. - import feature : extend-feature validate-feature select-subfeatures ; - - feature toolset : gcc : implicit ; - feature define : : free ; - feature runtime-link : dynamic static : symmetric ; - feature optimization : on off ; - feature variant : debug release profile : implicit composite symmetric ; - feature stdlib : native stlport ; - feature magic : : free ; - - compose <variant>debug : <define>_DEBUG <optimization>off ; - compose <variant>release : <define>NDEBUG <optimization>on ; - - assert.result dynamic static : values <runtime-link> ; - assert.result dynamic static : values runtime-link ; - - try ; - { - compose <variant>profile : <variant>profile ; - } - catch composite property <variant>profile cannot have itself as a component ; - - extend-feature toolset : msvc metrowerks ; - subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ; - - assert.true is-subvalue toolset : gcc : version : 2.95.3 ; - assert.false is-subvalue toolset : gcc : version : 1.1 ; - - assert.false is-subvalue toolset : msvc : version : 2.95.3 ; - assert.false is-subvalue toolset : : version : yabba ; - - feature yabba ; - subfeature yabba : version : dabba ; - assert.true is-subvalue yabba : : version : dabba ; - - subfeature toolset gcc : platform : linux cygwin : optional ; - - assert.result <toolset-gcc:version> - : select-subfeatures <toolset>gcc - : <toolset-gcc:version> - <toolset-msvc:version> - <toolset-version> - <stdlib> ; - - subfeature stdlib : version : 3 4 : optional ; - - assert.result <stdlib-version> - : select-subfeatures <stdlib>native - : <toolset-gcc:version> - <toolset-msvc:version> - <toolset-version> - <stdlib-version> ; - - assert.result <toolset>gcc <toolset-gcc:version>3.0.1 - : expand-subfeatures <toolset>gcc-3.0.1 ; - - assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux - : expand-subfeatures <toolset>gcc-3.0.1-linux ; - - assert.result <toolset>gcc <toolset-gcc:version>3.0.1 - : expand <toolset>gcc <toolset-gcc:version>3.0.1 ; - - assert.result <define>foo=x-y - : expand-subfeatures <define>foo=x-y ; - - assert.result <toolset>gcc <toolset-gcc:version>3.0.1 - : expand-subfeatures gcc-3.0.1 ; - - assert.result a c e - : get-values <x> : <x>a <y>b <x>c <y>d <x>e ; - - assert.result <toolset>gcc <toolset-gcc:version>3.0.1 - <variant>debug <define>_DEBUG <optimization>on - : expand gcc-3.0.1 debug <optimization>on ; - - assert.result <variant>debug <define>_DEBUG <optimization>on - : expand debug <optimization>on ; - - assert.result <optimization>on <variant>debug <define>_DEBUG - : expand <optimization>on debug ; - - assert.result <runtime-link>dynamic <optimization>on - : defaults <runtime-link> <define> <optimization> ; - - # Make sure defaults is resilient to missing grist. - assert.result <runtime-link>dynamic <optimization>on - : defaults runtime-link define optimization ; - - feature dummy : dummy1 dummy2 ; - subfeature dummy : subdummy : x y z : optional ; - - feature fu : fu1 fu2 : optional ; - subfeature fu : subfu : x y z : optional ; - subfeature fu : subfu2 : q r s ; - - assert.result optional : attributes <fu> ; - assert.result optional : attributes fu ; - - assert.result <runtime-link>static <define>foobar <optimization>on - <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native - <dummy>dummy1 <toolset-gcc:version>2.95.2 - : add-defaults <runtime-link>static <define>foobar <optimization>on - <toolset>gcc:<define>FOO ; - - assert.result <runtime-link>static <define>foobar <optimization>on - <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug - <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2 - : add-defaults <runtime-link>static <define>foobar <optimization>on - <toolset>gcc:<define>FOO <fu>fu1 ; - - set-default <runtime-link> : static ; - assert.result <runtime-link>static : defaults <runtime-link> ; - - assert.result gcc-3.0.1 debug <optimization>on - : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ; - - assert.result gcc-3.0.1 debug <runtime-link>dynamic - : minimize - [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ; - - assert.result gcc-3.0.1 debug - : minimize [ expand gcc-3.0.1 debug <optimization>off ] ; - - assert.result debug <optimization>on - : minimize [ expand debug <optimization>on ] ; - - assert.result gcc-3.0 - : minimize <toolset>gcc <toolset-gcc:version>3.0 ; - - assert.result gcc-3.0 - : minimize <toolset-gcc:version>3.0 <toolset>gcc ; - - assert.result <x>y/z <a>b/c <d>e/f - : split <x>y/z/<a>b/c/<d>e/f ; - - assert.result <x>y/z <a>b/c <d>e/f - : split <x>y\\z\\<a>b\\c\\<d>e\\f ; - - assert.result a b c <d>e/f/g <h>i/j/k - : split a/b/c/<d>e/f/g/<h>i/j/k ; - - assert.result a b c <d>e/f/g <h>i/j/k - : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ; - - # Test error checking. - - try ; - { - expand release <optimization>off <optimization>on ; - } - catch explicitly-specified values of non-free feature <optimization> conflict ; - - try ; - { - validate-feature <foobar> ; - } - catch unknown feature ; - - validate-value-string <toolset> gcc ; - validate-value-string <toolset> gcc-3.0.1 ; - - try ; - { - validate-value-string <toolset> digital_mars ; - } - catch \"digital_mars\" is not a known value of <toolset> ; - - try ; - { - feature foobar : : baz ; - } - catch unknown attributes: baz ; - - feature feature1 ; - try ; - { - feature feature1 ; - } - catch feature already defined: ; - - try ; - { - feature feature2 : : free implicit ; - } - catch free features cannot also be implicit ; - - try ; - { - feature feature3 : : free propagated ; - } - catch free features cannot be propagated ; - - try ; - { - implied-feature lackluster ; - } - catch \"lackluster\" is not a value of an implicit feature ; - - try ; - { - implied-subfeature <toolset> 3.0.1 ; - } - catch \"3.0.1\" is not a known subfeature value of <toolset> ; - - try ; - { - implied-subfeature <toolset> not-a-version : gcc ; - } - catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ; - - # Leave a clean copy of the features module behind. - finish-test feature-test-temp ; -} diff --git a/jam-files/boost-build/build/feature.py b/jam-files/boost-build/build/feature.py deleted file mode 100644 index 315a18e9..00000000 --- a/jam-files/boost-build/build/feature.py +++ /dev/null @@ -1,905 +0,0 @@ -# Status: ported, except for unit tests. -# Base revision: 64488 -# -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2002, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import re - -from b2.util import utility, bjam_signature -import b2.util.set -from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq -from b2.exceptions import * - -__re_split_subfeatures = re.compile ('<(.*):(.*)>') -__re_no_hyphen = re.compile ('^([^:]+)$') -__re_slash_or_backslash = re.compile (r'[\\/]') - -class Feature(object): - - # Map from string attribute names to integers bit flags. - # This will be initialized after declaration of the class. - _attribute_name_to_integer = {} - - def __init__(self, name, values, attributes): - self._name = name - self._values = values - self._default = None - self._attributes = 0 - for a in attributes: - self._attributes = self._attributes | Feature._attribute_name_to_integer[a] - self._attributes_string_list = attributes - self._subfeatures = [] - self._parent = None - - def name(self): - return self._name - - def values(self): - return self._values - - def add_values(self, values): - self._values.extend(values) - - def attributes(self): - return self._attributes - - def set_default(self, value): - self._default = value - - def default(self): - return self._default - - # FIXME: remove when we fully move to using classes for features/properties - def attributes_string_list(self): - return self._attributes_string_list - - def subfeatures(self): - return self._subfeatures - - def add_subfeature(self, name): - self._subfeatures.append(name) - - def parent(self): - """For subfeatures, return pair of (parent_feature, value). - - Value may be None if this subfeature is not specific to any - value of the parent feature. - """ - return self._parent - - def set_parent(self, feature, value): - self._parent = (feature, value) - - def __str__(self): - return self._name - - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __all_attributes, __all_features, __implicit_features, __composite_properties - global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features - global __all_subfeatures - - # The list with all attribute names. - __all_attributes = [ 'implicit', - 'composite', - 'optional', - 'symmetric', - 'free', - 'incidental', - 'path', - 'dependency', - 'propagated', - 'link-incompatible', - 'subfeature', - 'order-sensitive' - ] - i = 1 - for a in __all_attributes: - setattr(Feature, a.upper(), i) - Feature._attribute_name_to_integer[a] = i - def probe(self, flag=i): - return getattr(self, "_attributes") & flag - setattr(Feature, a.replace("-", "_"), probe) - i = i << 1 - - # A map containing all features. The key is the feature name. - # The value is an instance of Feature class. - __all_features = {} - - # All non-subfeatures. - __all_top_features = [] - - # Maps valus to the corresponding implicit feature - __implicit_features = {} - - # A map containing all composite properties. The key is a Property instance, - # and the value is a list of Property instances - __composite_properties = {} - - __features_with_attributes = {} - for attribute in __all_attributes: - __features_with_attributes [attribute] = [] - - # Maps a value to the corresponding subfeature name. - __subfeature_from_value = {} - - # All free features - __free_features = [] - - __all_subfeatures = [] - -reset () - -def enumerate (): - """ Returns an iterator to the features map. - """ - return __all_features.iteritems () - -def get(name): - """Return the Feature instance for the specified name. - - Throws if no feature by such name exists - """ - return __all_features[name] - -# FIXME: prepare-test/finish-test? - -@bjam_signature((["name"], ["values", "*"], ["attributes", "*"])) -def feature (name, values, attributes = []): - """ Declares a new feature with the given name, values, and attributes. - name: the feature name - values: a sequence of the allowable values - may be extended later with feature.extend - attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...) - """ - __validate_feature_attributes (name, attributes) - - feature = Feature(name, [], attributes) - __all_features[name] = feature - # Temporary measure while we have not fully moved from 'gristed strings' - __all_features["<" + name + ">"] = feature - - for attribute in attributes: - __features_with_attributes [attribute].append (name) - - name = add_grist(name) - - if 'subfeature' in attributes: - __all_subfeatures.append(name) - else: - __all_top_features.append(feature) - - extend (name, values) - - # FIXME: why his is needed. - if 'free' in attributes: - __free_features.append (name) - - return feature - -@bjam_signature((["feature"], ["value"])) -def set_default (feature, value): - """ Sets the default value of the given feature, overriding any previous default. - feature: the name of the feature - value: the default value to assign - """ - f = __all_features[feature] - attributes = f.attributes() - bad_attribute = None - - if attributes & Feature.FREE: - bad_attribute = "free" - elif attributes & Feature.OPTIONAL: - bad_attribute = "optional" - - if bad_attribute: - raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, feature.name())) - - if not value in f.values(): - raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % values) - - f.set_default(value) - -def defaults(features): - """ Returns the default property values for the given features. - """ - # FIXME: should merge feature and property modules. - import property - - result = [] - for f in features: - if not f.free() and not f.optional() and f.default(): - result.append(property.Property(f, f.default())) - - return result - -def valid (names): - """ Returns true iff all elements of names are valid features. - """ - def valid_one (name): return __all_features.has_key (name) - - if isinstance (names, str): - return valid_one (names) - else: - return [ valid_one (name) for name in names ] - -def attributes (feature): - """ Returns the attributes of the given feature. - """ - return __all_features[feature].attributes_string_list() - -def values (feature): - """ Return the values of the given feature. - """ - validate_feature (feature) - return __all_features[feature].values() - -def is_implicit_value (value_string): - """ Returns true iff 'value_string' is a value_string - of an implicit feature. - """ - - if __implicit_features.has_key(value_string): - return __implicit_features[value_string] - - v = value_string.split('-') - - if not __implicit_features.has_key(v[0]): - return False - - feature = __implicit_features[v[0]] - - for subvalue in (v[1:]): - if not __find_implied_subfeature(feature, subvalue, v[0]): - return False - - return True - -def implied_feature (implicit_value): - """ Returns the implicit feature associated with the given implicit value. - """ - components = implicit_value.split('-') - - if not __implicit_features.has_key(components[0]): - raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value) - - return __implicit_features[components[0]] - -def __find_implied_subfeature (feature, subvalue, value_string): - - #if value_string == None: value_string = '' - - if not __subfeature_from_value.has_key(feature) \ - or not __subfeature_from_value[feature].has_key(value_string) \ - or not __subfeature_from_value[feature][value_string].has_key (subvalue): - return None - - return __subfeature_from_value[feature][value_string][subvalue] - -# Given a feature and a value of one of its subfeatures, find the name -# of the subfeature. If value-string is supplied, looks for implied -# subfeatures that are specific to that value of feature -# feature # The main feature name -# subvalue # The value of one of its subfeatures -# value-string # The value of the main feature - -def implied_subfeature (feature, subvalue, value_string): - result = __find_implied_subfeature (feature, subvalue, value_string) - if not result: - raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string)) - - return result - -def validate_feature (name): - """ Checks if all name is a valid feature. Otherwise, raises an exception. - """ - if not __all_features.has_key(name): - raise InvalidFeature ("'%s' is not a valid feature name" % name) - else: - return __all_features[name] - -def valid (names): - """ Returns true iff all elements of names are valid features. - """ - def valid_one (name): return __all_features.has_key (name) - - if isinstance (names, str): - return valid_one (names) - else: - return [ valid_one (name) for name in names ] - -# Uses Property -def __expand_subfeatures_aux (property, dont_validate = False): - """ Helper for expand_subfeatures. - Given a feature and value, or just a value corresponding to an - implicit feature, returns a property set consisting of all component - subfeatures and their values. For example: - - expand_subfeatures <toolset>gcc-2.95.2-linux-x86 - -> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86 - equivalent to: - expand_subfeatures gcc-2.95.2-linux-x86 - - feature: The name of the feature, or empty if value corresponds to an implicit property - value: The value of the feature. - dont_validate: If True, no validation of value string will be done. - """ - f = property.feature() - v = property.value() - if not dont_validate: - validate_value_string(f, v) - - components = v.split ("-") - - v = components[0] - - import property - - result = [property.Property(f, components[0])] - - subvalues = components[1:] - - while len(subvalues) > 0: - subvalue = subvalues [0] # pop the head off of subvalues - subvalues = subvalues [1:] - - subfeature = __find_implied_subfeature (f, subvalue, v) - - # If no subfeature was found, reconstitute the value string and use that - if not subfeature: - return [property.Property(f, '-'.join(components))] - - result.append(property.Property(subfeature, subvalue)) - - return result - -def expand_subfeatures(properties, dont_validate = False): - """ - Make all elements of properties corresponding to implicit features - explicit, and express all subfeature values as separate properties - in their own right. For example, the property - - gcc-2.95.2-linux-x86 - - might expand to - - <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86 - - properties: A sequence with elements of the form - <feature>value-string or just value-string in the - case of implicit features. - : dont_validate: If True, no validation of value string will be done. - """ - result = [] - for p in properties: - # Don't expand subfeatures in subfeatures - if p.feature().subfeature(): - result.append (p) - else: - result.extend(__expand_subfeatures_aux (p, dont_validate)) - - return result - - - -# rule extend was defined as below: - # Can be called three ways: - # - # 1. extend feature : values * - # 2. extend <feature> subfeature : values * - # 3. extend <feature>value-string subfeature : values * - # - # * Form 1 adds the given values to the given feature - # * Forms 2 and 3 add subfeature values to the given feature - # * Form 3 adds the subfeature values as specific to the given - # property value-string. - # - #rule extend ( feature-or-property subfeature ? : values * ) -# -# Now, the specific rule must be called, depending on the desired operation: -# extend_feature -# extend_subfeature - -def extend (name, values): - """ Adds the given values to the given feature. - """ - name = add_grist (name) - __validate_feature (name) - feature = __all_features [name] - - if feature.implicit(): - for v in values: - if __implicit_features.has_key(v): - raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v])) - - __implicit_features[v] = feature - - if len (feature.values()) == 0 and len (values) > 0: - # This is the first value specified for this feature, - # take it as default value - feature.set_default(values[0]) - - feature.add_values(values) - -def validate_value_string (f, value_string): - """ Checks that value-string is a valid value-string for the given feature. - """ - if f.free() or value_string in f.values(): - return - - values = [value_string] - - if f.subfeatures(): - if not value_string in f.values() and \ - not value_string in f.subfeatures(): - values = value_string.split('-') - - # An empty value is allowed for optional features - if not values[0] in f.values() and \ - (values[0] or not f.optional()): - raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], feature, f.values())) - - for v in values [1:]: - # this will validate any subfeature values in value-string - implied_subfeature(f, v, values[0]) - - -""" Extends the given subfeature with the subvalues. If the optional - value-string is provided, the subvalues are only valid for the given - value of the feature. Thus, you could say that - <target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows: - - extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ; - - feature: The feature whose subfeature is being extended. - - value-string: If supplied, specifies a specific value of the - main feature for which the new subfeature values - are valid. - - subfeature: The name of the subfeature. - - subvalues: The additional values of the subfeature being defined. -""" -def extend_subfeature (feature_name, value_string, subfeature_name, subvalues): - - feature = validate_feature(feature_name) - - if value_string: - validate_value_string(feature, value_string) - - subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string) - - extend(subfeature_name, subvalues) ; - subfeature = __all_features[subfeature_name] - - if value_string == None: value_string = '' - - if not __subfeature_from_value.has_key(feature): - __subfeature_from_value [feature] = {} - - if not __subfeature_from_value[feature].has_key(value_string): - __subfeature_from_value [feature][value_string] = {} - - for subvalue in subvalues: - __subfeature_from_value [feature][value_string][subvalue] = subfeature - -@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"], - ["subvalues", "*"], ["attributes", "*"])) -def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []): - """ Declares a subfeature. - feature_name: Root feature that is not a subfeature. - value_string: An optional value-string specifying which feature or - subfeature values this subfeature is specific to, - if any. - subfeature: The name of the subfeature being declared. - subvalues: The allowed values of this subfeature. - attributes: The attributes of the subfeature. - """ - parent_feature = validate_feature (feature_name) - - # Add grist to the subfeature name if a value-string was supplied - subfeature_name = __get_subfeature_name (subfeature, value_string) - - if subfeature_name in __all_features[feature_name].subfeatures(): - message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name) - message += " specific to '%s'" % value_string - raise BaseException (message) - - # First declare the subfeature as a feature in its own right - f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature']) - f.set_parent(parent_feature, value_string) - - parent_feature.add_subfeature(f) - - # Now make sure the subfeature values are known. - extend_subfeature (feature_name, value_string, subfeature, subvalues) - - -@bjam_signature((["composite_property_s"], ["component_properties_s", "*"])) -def compose (composite_property_s, component_properties_s): - """ Sets the components of the given composite property. - - All paremeters are <feature>value strings - """ - import property - - component_properties_s = to_seq (component_properties_s) - composite_property = property.create_from_string(composite_property_s) - f = composite_property.feature() - - if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property): - component_properties = component_properties_s - else: - component_properties = [property.create_from_string(p) for p in component_properties_s] - - if not f.composite(): - raise BaseException ("'%s' is not a composite feature" % f) - - if __composite_properties.has_key(property): - raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property]))) - - if composite_property in component_properties: - raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property) - - __composite_properties[composite_property] = component_properties - - -def expand_composite(property): - result = [ property ] - if __composite_properties.has_key(property): - for p in __composite_properties[property]: - result.extend(expand_composite(p)) - return result - - -def get_values (feature, properties): - """ Returns all values of the given feature specified by the given property set. - """ - result = [] - for p in properties: - if get_grist (p) == feature: - result.append (replace_grist (p, '')) - - return result - -def free_features (): - """ Returns all free features. - """ - return __free_features - -def expand_composites (properties): - """ Expand all composite properties in the set so that all components - are explicitly expressed. - """ - explicit_features = set(p.feature() for p in properties) - - result = [] - - # now expand composite features - for p in properties: - expanded = expand_composite(p) - - for x in expanded: - if not x in result: - f = x.feature() - - if f.free(): - result.append (x) - elif not x in properties: # x is the result of expansion - if not f in explicit_features: # not explicitly-specified - if any(r.feature() == f for r in result): - raise FeatureConflict( - "expansions of composite features result in " - "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" % - (f.name(), [r.value() for r in result if r.feature() == f] + [x.value()], p)) - else: - result.append (x) - elif any(r.feature() == f for r in result): - raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n" - "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f, - [r.value() for r in result if r.feature() == f], p, x.value())) - else: - result.append (x) - - return result - -# Uses Property -def is_subfeature_of (parent_property, f): - """ Return true iff f is an ordinary subfeature of the parent_property's - feature, or if f is a subfeature of the parent_property's feature - specific to the parent_property's value. - """ - if not f.subfeature(): - return False - - p = f.parent() - if not p: - return False - - parent_feature = p[0] - parent_value = p[1] - - if parent_feature != parent_property.feature(): - return False - - if parent_value and parent_value != parent_property.value(): - return False - - return True - -def __is_subproperty_of (parent_property, p): - """ As is_subfeature_of, for subproperties. - """ - return is_subfeature_of (parent_property, p.feature()) - - -# Returns true iff the subvalue is valid for the feature. When the -# optional value-string is provided, returns true iff the subvalues -# are valid for the given value of the feature. -def is_subvalue(feature, value_string, subfeature, subvalue): - - if not value_string: - value_string = '' - - if not __subfeature_from_value.has_key(feature): - return False - - if not __subfeature_from_value[feature].has_key(value_string): - return False - - if not __subfeature_from_value[feature][value_string].has_key(subvalue): - return False - - if __subfeature_from_value[feature][value_string][subvalue]\ - != subfeature: - return False - - return True - -def implied_subfeature (feature, subvalue, value_string): - result = __find_implied_subfeature (feature, subvalue, value_string) - if not result: - raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string)) - - return result - - -# Uses Property -def expand (properties): - """ Given a property set which may consist of composite and implicit - properties and combined subfeature values, returns an expanded, - normalized property set with all implicit features expressed - explicitly, all subfeature values individually expressed, and all - components of composite properties expanded. Non-free features - directly expressed in the input properties cause any values of - those features due to composite feature expansion to be dropped. If - two values of a given non-free feature are directly expressed in the - input, an error is issued. - """ - expanded = expand_subfeatures(properties) - return expand_composites (expanded) - -# Accepts list of Property objects -def add_defaults (properties): - """ Given a set of properties, add default values for features not - represented in the set. - Note: if there's there's ordinary feature F1 and composite feature - F2, which includes some value for F1, and both feature have default values, - then the default value of F1 will be added, not the value in F2. This might - not be right idea: consider - - feature variant : debug ... ; - <variant>debug : .... <runtime-debugging>on - feature <runtime-debugging> : off on ; - - Here, when adding default for an empty property set, we'll get - - <variant>debug <runtime_debugging>off - - and that's kind of strange. - """ - result = [x for x in properties] - - handled_features = set() - for p in properties: - # We don't add default for conditional properties. We don't want - # <variant>debug:<define>DEBUG to be takes as specified value for <variant> - if not p.condition(): - handled_features.add(p.feature()) - - missing_top = [f for f in __all_top_features if not f in handled_features] - more = defaults(missing_top) - result.extend(more) - for p in more: - handled_features.add(p.feature()) - - # Add defaults for subfeatures of features which are present - for p in result[:]: - s = p.feature().subfeatures() - more = defaults([s for s in p.feature().subfeatures() if not s in handled_features]) - for p in more: - handled_features.add(p.feature()) - result.extend(more) - - return result - -def minimize (properties): - """ Given an expanded property set, eliminate all redundancy: properties - which are elements of other (composite) properties in the set will - be eliminated. Non-symmetric properties equal to default values will be - eliminated, unless the override a value from some composite property. - Implicit properties will be expressed without feature - grist, and sub-property values will be expressed as elements joined - to the corresponding main property. - """ - - # remove properties implied by composite features - components = [] - for property in properties: - if __composite_properties.has_key (property): - components.extend(__composite_properties[property]) - properties = b2.util.set.difference (properties, components) - - # handle subfeatures and implicit features - - # move subfeatures to the end of the list - properties = [p for p in properties if not p.feature().subfeature()] +\ - [p for p in properties if p.feature().subfeature()] - - result = [] - while properties: - p = properties[0] - f = p.feature() - - # locate all subproperties of $(x[1]) in the property set - subproperties = __select_subproperties (p, properties) - - if subproperties: - # reconstitute the joined property name - subproperties.sort () - joined = b2.build.property.Property(p.feature(), p.value() + '-' + '-'.join ([sp.value() for sp in subproperties])) - result.append(joined) - - properties = b2.util.set.difference(properties[1:], subproperties) - - else: - # eliminate properties whose value is equal to feature's - # default and which are not symmetric and which do not - # contradict values implied by composite properties. - - # since all component properties of composites in the set - # have been eliminated, any remaining property whose - # feature is the same as a component of a composite in the - # set must have a non-redundant value. - if p.value() != f.default() or f.symmetric(): - result.append (p) - #\ - #or get_grist (fullp) in get_grist (components): - # FIXME: restore above - - - properties = properties[1:] - - return result - - -def split (properties): - """ Given a property-set of the form - v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM - - Returns - v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM - - Note that vN...vM may contain slashes. This is resilient to the - substitution of backslashes for slashes, since Jam, unbidden, - sometimes swaps slash direction on NT. - """ - - def split_one (properties): - pieces = re.split (__re_slash_or_backslash, properties) - result = [] - - for x in pieces: - if not get_grist (x) and len (result) > 0 and get_grist (result [-1]): - result = result [0:-1] + [ result [-1] + '/' + x ] - else: - result.append (x) - - return result - - if isinstance (properties, str): - return split_one (properties) - - result = [] - for p in properties: - result += split_one (p) - return result - - -def compress_subproperties (properties): - """ Combine all subproperties into their parent properties - - Requires: for every subproperty, there is a parent property. All - features are explicitly expressed. - - This rule probably shouldn't be needed, but - build-request.expand-no-defaults is being abused for unintended - purposes and it needs help - """ - result = [] - matched_subs = set() - all_subs = set() - for p in properties: - f = p.feature() - - if not f.subfeature(): - subs = __select_subproperties (p, properties) - if subs: - - matched_subs.update(subs) - - subvalues = '-'.join (sub.value() for sub in subs) - result.append(b2.build.property.Property( - p.feature(), p.value() + '-' + subvalues, - p.condition())) - else: - result.append(p) - - else: - all_subs.add(p) - - # TODO: this variables are used just for debugging. What's the overhead? - assert all_subs == matched_subs - - return result - -###################################################################################### -# Private methods - -def __select_subproperties (parent_property, properties): - return [ x for x in properties if __is_subproperty_of (parent_property, x) ] - -def __get_subfeature_name (subfeature, value_string): - if value_string == None: - prefix = '' - else: - prefix = value_string + ':' - - return prefix + subfeature - - -def __validate_feature_attributes (name, attributes): - for attribute in attributes: - if not attribute in __all_attributes: - raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name)) - - if name in __all_features: - raise AlreadyDefined ("feature '%s' already defined" % name) - elif 'implicit' in attributes and 'free' in attributes: - raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name) - elif 'free' in attributes and 'propagated' in attributes: - raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name) - - -def __validate_feature (feature): - """ Generates an error if the feature is unknown. - """ - if not __all_features.has_key (feature): - raise BaseException ('unknown feature "%s"' % feature) - - -def __select_subfeatures (parent_property, features): - """ Given a property, return the subset of features consisting of all - ordinary subfeatures of the property's feature, and all specific - subfeatures of the property's feature which are conditional on the - property's value. - """ - return [f for f in features if is_subfeature_of (parent_property, f)] - -# FIXME: copy over tests. diff --git a/jam-files/boost-build/build/generators.jam b/jam-files/boost-build/build/generators.jam deleted file mode 100644 index 1515525f..00000000 --- a/jam-files/boost-build/build/generators.jam +++ /dev/null @@ -1,1408 +0,0 @@ -# Copyright Vladimir Prus 2002. -# Copyright Rene Rivera 2006. -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Manages 'generators' --- objects which can do transformation between different -# target types and contain algorithm for finding transformation from sources to -# targets. -# -# The main entry point to this module is generators.construct rule. It is given -# a list of source targets, desired target type and a set of properties. It -# starts by selecting 'viable generators', which have any chances of producing -# the desired target type with the required properties. Generators are ranked -# and a set of the most specific ones is selected. -# -# The most specific generators have their 'run' methods called, with the -# properties and list of sources. Each one selects a target which can be -# directly consumed, and tries to convert the remaining ones to the types it can -# consume. This is done by recursively calling 'construct' with all consumable -# types. -# -# If the generator has collected all the targets it needs, it creates targets -# corresponding to result, and returns it. When all generators have been run, -# results of one of them are selected and returned as a result. -# -# It is quite possible for 'construct' to return more targets that it was asked -# for. For example, if it were asked to generate a target of type EXE, but the -# only found generator produces both EXE and TDS (file with debug) information. -# The extra target will be returned. -# -# Likewise, when generator tries to convert sources to consumable types, it can -# get more targets that it was asked for. The question is what to do with extra -# targets. Boost.Build attempts to convert them to requested types, and attempts -# that as early as possible. Specifically, this is done after invoking each -# generator. TODO: An example is needed to document the rationale for trying -# extra target conversion at that point. -# -# In order for the system to be able to use a specific generator instance 'when -# needed', the instance needs to be registered with the system using -# generators.register() or one of its related rules. Unregistered generators may -# only be run explicitly and will not be considered by Boost.Build when when -# converting between given target types. - -import "class" : new ; -import errors ; -import property-set ; -import sequence ; -import set ; -import type ; -import utility ; -import virtual-target ; - - -if "--debug-generators" in [ modules.peek : ARGV ] -{ - .debug = true ; -} - - -# Updated cached viable source target type information as needed after a new -# target type gets defined. This is needed because if a target type is a viable -# source target type for some generator then all of the target type's derived -# target types should automatically be considered as viable source target types -# for the same generator as well. Does nothing if a non-derived target type is -# passed to it. -# -rule update-cached-information-with-a-new-type ( type ) -{ - local base-type = [ type.base $(type) ] ; - if $(base-type) - { - for local g in $(.vstg-cached-generators) - { - if $(base-type) in $(.vstg.$(g)) - { - .vstg.$(g) += $(type) ; - } - } - - for local t in $(.vst-cached-types) - { - if $(base-type) in $(.vst.$(t)) - { - .vst.$(t) += $(type) ; - } - } - } -} - - -# Clears cached viable source target type information except for target types -# and generators with all source types listed as viable. Should be called when -# something invalidates those cached values by possibly causing some new source -# types to become viable. -# -local rule invalidate-extendable-viable-source-target-type-cache ( ) -{ - local generators-with-cached-source-types = $(.vstg-cached-generators) ; - .vstg-cached-generators = ; - for local g in $(generators-with-cached-source-types) - { - if $(.vstg.$(g)) = * - { - .vstg-cached-generators += $(g) ; - } - else - { - .vstg.$(g) = ; - } - } - - local types-with-cached-source-types = $(.vst-cached-types) ; - .vst-cached-types = ; - for local t in $(types-with-cached-source-types) - { - if $(.vst.$(t)) = * - { - .vst-cached-types += $(t) ; - } - else - { - .vst.$(t) = ; - } - } -} - - -# Outputs a debug message if generators debugging is on. Each element of -# 'message' is checked to see if it is a class instance. If so, instead of the -# value, the result of 'str' call is output. -# -local rule generators.dout ( message * ) -{ - if $(.debug) - { - ECHO [ sequence.transform utility.str : $(message) ] ; - } -} - - -local rule indent ( ) -{ - return $(.indent:J="") ; -} - - -local rule increase-indent ( ) -{ - .indent += " " ; -} - - -local rule decrease-indent ( ) -{ - .indent = $(.indent[2-]) ; -} - - -# Models a generator. -# -class generator -{ - import generators : indent increase-indent decrease-indent generators.dout ; - import set ; - import utility ; - import feature ; - import errors ; - import sequence ; - import type ; - import virtual-target ; - import "class" : new ; - import property ; - import path ; - - EXPORT class@generator : indent increase-indent decrease-indent - generators.dout ; - - rule __init__ ( - id # Identifies the generator - should be name - # of the rule which sets up the build - # actions. - - composing ? # Whether generator processes each source - # target in turn, converting it to required - # types. Ordinary generators pass all - # sources together to the recursive - # generators.construct-types call. - - : source-types * # Types that this generator can handle. If - # empty, the generator can consume anything. - - : target-types-and-names + # Types the generator will create and, - # optionally, names for created targets. - # Each element should have the form - # type["(" name-pattern ")"], for example, - # obj(%_x). Generated target name will be - # found by replacing % with the name of - # source, provided an explicit name was not - # specified. - - : requirements * - ) - { - self.id = $(id) ; - self.rule-name = $(id) ; - self.composing = $(composing) ; - self.source-types = $(source-types) ; - self.target-types-and-names = $(target-types-and-names) ; - self.requirements = $(requirements) ; - - for local e in $(target-types-and-names) - { - # Create three parallel lists: one with the list of target types, - # and two other with prefixes and postfixes to be added to target - # name. We use parallel lists for prefix and postfix (as opposed to - # mapping), because given target type might occur several times, for - # example "H H(%_symbols)". - local m = [ MATCH ([^\\(]*)(\\((.*)%(.*)\\))? : $(e) ] ; - self.target-types += $(m[1]) ; - self.name-prefix += $(m[3]:E="") ; - self.name-postfix += $(m[4]:E="") ; - } - - # Note that 'transform' here, is the same as 'for_each'. - sequence.transform type.validate : $(self.source-types) ; - sequence.transform type.validate : $(self.target-types) ; - } - - ################# End of constructor ################# - - rule id ( ) - { - return $(self.id) ; - } - - # Returns the list of target type the generator accepts. - # - rule source-types ( ) - { - return $(self.source-types) ; - } - - # Returns the list of target types that this generator produces. It is - # assumed to be always the same -- i.e. it can not change depending on some - # provided list of sources. - # - rule target-types ( ) - { - return $(self.target-types) ; - } - - # Returns the required properties for this generator. Properties in returned - # set must be present in build properties if this generator is to be used. - # If result has grist-only element, that build properties must include some - # value of that feature. - # - # XXX: remove this method? - # - rule requirements ( ) - { - return $(self.requirements) ; - } - - rule set-rule-name ( rule-name ) - { - self.rule-name = $(rule-name) ; - } - - rule rule-name ( ) - { - return $(self.rule-name) ; - } - - # Returns a true value if the generator can be run with the specified - # properties. - # - rule match-rank ( property-set-to-match ) - { - # See if generator requirements are satisfied by 'properties'. Treat a - # feature name in requirements (i.e. grist-only element), as matching - # any value of the feature. - local all-requirements = [ requirements ] ; - - local property-requirements feature-requirements ; - for local r in $(all-requirements) - { - if $(r:G=) - { - property-requirements += $(r) ; - } - else - { - feature-requirements += $(r) ; - } - } - - local properties-to-match = [ $(property-set-to-match).raw ] ; - if $(property-requirements) in $(properties-to-match) && - $(feature-requirements) in $(properties-to-match:G) - { - return true ; - } - else - { - return ; - } - } - - # Returns another generator which differs from $(self) in - # - id - # - value to <toolset> feature in properties - # - rule clone ( new-id : new-toolset-properties + ) - { - local g = [ new $(__class__) $(new-id) $(self.composing) : - $(self.source-types) : $(self.target-types-and-names) : - # Note: this does not remove any subfeatures of <toolset> which - # might cause problems. - [ property.change $(self.requirements) : <toolset> ] - $(new-toolset-properties) ] ; - return $(g) ; - } - - # Creates another generator that is the same as $(self), except that if - # 'base' is in target types of $(self), 'type' will in target types of the - # new generator. - # - rule clone-and-change-target-type ( base : type ) - { - local target-types ; - for local t in $(self.target-types-and-names) - { - local m = [ MATCH ([^\\(]*)(\\(.*\\))? : $(t) ] ; - if $(m) = $(base) - { - target-types += $(type)$(m[2]:E="") ; - } - else - { - target-types += $(t) ; - } - } - - local g = [ new $(__class__) $(self.id) $(self.composing) : - $(self.source-types) : $(target-types) : $(self.requirements) ] ; - if $(self.rule-name) - { - $(g).set-rule-name $(self.rule-name) ; - } - return $(g) ; - } - - # Tries to invoke this generator on the given sources. Returns a list of - # generated targets (instances of 'virtual-target') and optionally a set of - # properties to be added to the usage-requirements for all the generated - # targets. Returning nothing from run indicates that the generator was - # unable to create the target. - # - rule run - ( - project # Project for which the targets are generated. - name ? # Used when determining the 'name' attribute for all - # generated targets. See the 'generated-targets' method. - : property-set # Desired properties for generated targets. - : sources + # Source targets. - ) - { - generators.dout [ indent ] " ** generator" $(self.id) ; - generators.dout [ indent ] " composing:" $(self.composing) ; - - if ! $(self.composing) && $(sources[2]) && $(self.source-types[2]) - { - errors.error "Unsupported source/source-type combination" ; - } - - # We do not run composing generators if no name is specified. The reason - # is that composing generator combines several targets, which can have - # different names, and it cannot decide which name to give for produced - # target. Therefore, the name must be passed. - # - # This in effect, means that composing generators are runnable only at - # the top-level of a transformation graph, or if their name is passed - # explicitly. Thus, we dissallow composing generators in the middle. For - # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE - # will not be allowed as the OBJ -> STATIC_LIB generator is composing. - if ! $(self.composing) || $(name) - { - run-really $(project) $(name) : $(property-set) : $(sources) ; - } - } - - rule run-really ( project name ? : property-set : sources + ) - { - # Targets that this generator will consume directly. - local consumed = ; - # Targets that can not be consumed and will be returned as-is. - local bypassed = ; - - if $(self.composing) - { - convert-multiple-sources-to-consumable-types $(project) - : $(property-set) : $(sources) : consumed bypassed ; - } - else - { - convert-to-consumable-types $(project) $(name) : $(property-set) - : $(sources) : : consumed bypassed ; - } - - local result ; - if $(consumed) - { - result = [ construct-result $(consumed) : $(project) $(name) : - $(property-set) ] ; - } - - if $(result) - { - generators.dout [ indent ] " SUCCESS: " $(result) ; - } - else - { - generators.dout [ indent ] " FAILURE" ; - } - generators.dout ; - return $(result) ; - } - - # Constructs the dependency graph to be returned by this generator. - # - rule construct-result - ( - consumed + # Already prepared list of consumable targets. - # Composing generators may receive multiple sources - # all of which will have types matching those in - # $(self.source-types). Non-composing generators with - # multiple $(self.source-types) will receive exactly - # len $(self.source-types) sources with types matching - # those in $(self.source-types). And non-composing - # generators with only a single source type may - # receive multiple sources with all of them of the - # type listed in $(self.source-types). - : project name ? - : property-set # Properties to be used for all actions created here. - ) - { - local result ; - # If this is 1->1 transformation, apply it to all consumed targets in - # order. - if ! $(self.source-types[2]) && ! $(self.composing) - { - for local r in $(consumed) - { - result += [ generated-targets $(r) : $(property-set) : - $(project) $(name) ] ; - } - } - else if $(consumed) - { - result += [ generated-targets $(consumed) : $(property-set) : - $(project) $(name) ] ; - } - return $(result) ; - } - - # Determine target name from fullname (maybe including path components) - # Place optional prefix and postfix around basename - # - rule determine-target-name ( fullname : prefix ? : postfix ? ) - { - # See if we need to add directory to the target name. - local dir = $(fullname:D) ; - local name = $(fullname:B) ; - - name = $(prefix:E=)$(name) ; - name = $(name)$(postfix:E=) ; - - if $(dir) && - # Never append '..' to target path. - ! [ MATCH .*(\\.\\.).* : $(dir) ] - && - ! [ path.is-rooted $(dir) ] - { - # Relative path is always relative to the source - # directory. Retain it, so that users can have files - # with the same in two different subdirectories. - name = $(dir)/$(name) ; - } - return $(name) ; - } - - # Determine the name of the produced target from the names of the sources. - # - rule determine-output-name ( sources + ) - { - # The simple case if when a name of source has single dot. Then, we take - # the part before dot. Several dots can be caused by: - # - using source file like a.host.cpp, or - # - a type whose suffix has a dot. Say, we can type 'host_cpp' with - # extension 'host.cpp'. - # In the first case, we want to take the part up to the last dot. In the - # second case -- not sure, but for now take the part up to the last dot - # too. - name = [ utility.basename [ $(sources[1]).name ] ] ; - - for local s in $(sources[2]) - { - local n2 = [ utility.basename [ $(s).name ] ] ; - if $(n2) != $(name) - { - errors.error "$(self.id): source targets have different names: cannot determine target name" ; - } - } - name = [ determine-target-name [ $(sources[1]).name ] ] ; - return $(name) ; - } - - # Constructs targets that are created after consuming 'sources'. The result - # will be the list of virtual-target, which has the same length as the - # 'target-types' attribute and with corresponding types. - # - # When 'name' is empty, all source targets must have the same 'name' - # attribute value, which will be used instead of the 'name' argument. - # - # The 'name' attribute value for each generated target will be equal to - # the 'name' parameter if there is no name pattern for this type. Otherwise, - # the '%' symbol in the name pattern will be replaced with the 'name' - # parameter to obtain the 'name' attribute. - # - # For example, if targets types are T1 and T2 (with name pattern "%_x"), - # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created - # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually - # determines the basename of a file. - # - # Note that this pattern mechanism has nothing to do with implicit patterns - # in make. It is a way to produce a target whose name is different than the - # name of its source. - # - rule generated-targets ( sources + : property-set : project name ? ) - { - if ! $(name) - { - name = [ determine-output-name $(sources) ] ; - } - - # Assign an action for each target. - local action = [ action-class ] ; - local a = [ class.new $(action) $(sources) : $(self.rule-name) : - $(property-set) ] ; - - # Create generated target for each target type. - local targets ; - local pre = $(self.name-prefix) ; - local post = $(self.name-postfix) ; - for local t in $(self.target-types) - { - local generated-name = $(pre[1])$(name:BS)$(post[1]) ; - generated-name = $(generated-name:R=$(name:D)) ; - pre = $(pre[2-]) ; - post = $(post[2-]) ; - - targets += [ class.new file-target $(generated-name) : $(t) : - $(project) : $(a) ] ; - } - - return [ sequence.transform virtual-target.register : $(targets) ] ; - } - - # Attempts to convert 'sources' to targets of types that this generator can - # handle. The intention is to produce the set of targets that can be used - # when the generator is run. - # - rule convert-to-consumable-types - ( - project name ? - : property-set - : sources + - : only-one ? # Convert 'source' to only one of the source types. If - # there is more that one possibility, report an error. - : consumed-var # Name of the variable which receives all targets which - # can be consumed. - bypassed-var # Name of the variable which receives all targets which - # can not be consumed. - ) - { - # We are likely to be passed 'consumed' and 'bypassed' var names. Use - # '_' to avoid name conflicts. - local _consumed ; - local _bypassed ; - local missing-types ; - - if $(sources[2]) - { - # Do not know how to handle several sources yet. Just try to pass - # the request to other generator. - missing-types = $(self.source-types) ; - } - else - { - consume-directly $(sources) : _consumed : missing-types ; - } - - # No need to search for transformation if some source type has consumed - # source and no more source types are needed. - if $(only-one) && $(_consumed) - { - missing-types = ; - } - - # TODO: we should check that only one source type if create of - # 'only-one' is true. - # TODO: consider if consumed/bypassed separation should be done by - # 'construct-types'. - - if $(missing-types) - { - local transformed = [ generators.construct-types $(project) $(name) - : $(missing-types) : $(property-set) : $(sources) ] ; - - # Add targets of right type to 'consumed'. Add others to 'bypassed'. - # The 'generators.construct' rule has done its best to convert - # everything to the required type. There is no need to rerun it on - # targets of different types. - - # NOTE: ignoring usage requirements. - for local t in $(transformed[2-]) - { - if [ $(t).type ] in $(missing-types) - { - _consumed += $(t) ; - } - else - { - _bypassed += $(t) ; - } - } - } - - _consumed = [ sequence.unique $(_consumed) ] ; - _bypassed = [ sequence.unique $(_bypassed) ] ; - - # Remove elements of '_bypassed' that are in '_consumed'. - - # Suppose the target type of current generator, X is produced from X_1 - # and X_2, which are produced from Y by one generator. When creating X_1 - # from Y, X_2 will be added to 'bypassed'. Likewise, when creating X_2 - # from Y, X_1 will be added to 'bypassed', but they are also in - # 'consumed'. We have to remove them from bypassed, so that generators - # up the call stack do not try to convert them. - - # In this particular case, X_1 instance in 'consumed' and X_1 instance - # in 'bypassed' will be the same: because they have the same source and - # action name, and 'virtual-target.register' will not allow two - # different instances. Therefore, it is OK to use 'set.difference'. - - _bypassed = [ set.difference $(_bypassed) : $(_consumed) ] ; - - $(consumed-var) += $(_consumed) ; - $(bypassed-var) += $(_bypassed) ; - } - - # Converts several files to consumable types. Called for composing - # generators only. - # - rule convert-multiple-sources-to-consumable-types ( project : property-set : - sources * : consumed-var bypassed-var ) - { - # We process each source one-by-one, trying to convert it to a usable - # type. - for local source in $(sources) - { - local _c ; - local _b ; - # TODO: need to check for failure on each source. - convert-to-consumable-types $(project) : $(property-set) : $(source) - : true : _c _b ; - if ! $(_c) - { - generators.dout [ indent ] " failed to convert " $(source) ; - } - $(consumed-var) += $(_c) ; - $(bypassed-var) += $(_b) ; - } - } - - rule consume-directly ( source : consumed-var : missing-types-var ) - { - local real-source-type = [ $(source).type ] ; - - # If there are no source types, we can consume anything. - local source-types = $(self.source-types) ; - source-types ?= $(real-source-type) ; - - for local st in $(source-types) - { - # The 'source' if of the right type already. - if $(real-source-type) = $(st) || [ type.is-derived - $(real-source-type) $(st) ] - { - $(consumed-var) += $(source) ; - } - else - { - $(missing-types-var) += $(st) ; - } - } - } - - # Returns the class to be used to actions. Default implementation returns - # "action". - # - rule action-class ( ) - { - return "action" ; - } -} - - -# Registers a new generator instance 'g'. -# -rule register ( g ) -{ - .all-generators += $(g) ; - - # A generator can produce several targets of the same type. We want unique - # occurrence of that generator in .generators.$(t) in that case, otherwise, - # it will be tried twice and we will get a false ambiguity. - for local t in [ sequence.unique [ $(g).target-types ] ] - { - .generators.$(t) += $(g) ; - } - - # Update the set of generators for toolset. - - # TODO: should we check that generator with this id is not already - # registered. For example, the fop.jam module intentionally declared two - # generators with the same id, so such check will break it. - local id = [ $(g).id ] ; - - # Some generators have multiple periods in their name, so a simple $(id:S=) - # will not generate the right toolset name. E.g. if id = gcc.compile.c++, - # then .generators-for-toolset.$(id:S=) will append to - # .generators-for-toolset.gcc.compile, which is a separate value from - # .generators-for-toolset.gcc. Correcting this makes generator inheritance - # work properly. See also inherit-generators in the toolset module. - local base = $(id) ; - while $(base:S) - { - base = $(base:B) ; - } - .generators-for-toolset.$(base) += $(g) ; - - - # After adding a new generator that can construct new target types, we need - # to clear the related cached viable source target type information for - # constructing a specific target type or using a specific generator. Cached - # viable source target type lists affected by this are those containing any - # of the target types constructed by the new generator or any of their base - # target types. - # - # A more advanced alternative to clearing that cached viable source target - # type information would be to expand it with additional source types or - # even better - mark it as needing to be expanded on next use. - # - # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077 - # mailing list thread for an even more advanced idea of how we could convert - # Boost Build's Jamfile processing, target selection and generator selection - # into separate steps which would prevent these caches from ever being - # invalidated. - # - # For now we just clear all the cached viable source target type information - # that does not simply state 'all types' and may implement a more detailed - # algorithm later on if it becomes needed. - - invalidate-extendable-viable-source-target-type-cache ; -} - - -# Creates a new non-composing 'generator' class instance and registers it. -# Returns the created instance. Rationale: the instance is returned so that it -# is possible to first register a generator and then call its 'run' method, -# bypassing the whole generator selection process. -# -rule register-standard ( id : source-types * : target-types + : requirements * ) -{ - local g = [ new generator $(id) : $(source-types) : $(target-types) : - $(requirements) ] ; - register $(g) ; - return $(g) ; -} - - -# Creates a new composing 'generator' class instance and registers it. -# -rule register-composing ( id : source-types * : target-types + : requirements * - ) -{ - local g = [ new generator $(id) true : $(source-types) : $(target-types) : - $(requirements) ] ; - register $(g) ; - return $(g) ; -} - - -# Returns all generators belonging to the given 'toolset', i.e. whose ids are -# '$(toolset).<something>'. -# -rule generators-for-toolset ( toolset ) -{ - return $(.generators-for-toolset.$(toolset)) ; -} - - -# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when -# searching for generators that could produce a target of a certain type, both -# those generators are among viable generators, the overridden generator is -# immediately discarded. -# -# The overridden generators are discarded immediately after computing the list -# of viable generators but before running any of them. -# -rule override ( overrider-id : overridee-id ) -{ - .override.$(overrider-id) += $(overridee-id) ; -} - - -# Returns a list of source type which can possibly be converted to 'target-type' -# by some chain of generator invocation. -# -# More formally, takes all generators for 'target-type' and returns a union of -# source types for those generators and result of calling itself recursively on -# source types. -# -# Returns '*' in case any type should be considered a viable source type for the -# given type. -# -local rule viable-source-types-real ( target-type ) -{ - local result ; - - # 't0' is the initial list of target types we need to process to get a list - # of their viable source target types. New target types will not be added to - # this list. - local t0 = [ type.all-bases $(target-type) ] ; - - # 't' is the list of target types which have not yet been processed to get a - # list of their viable source target types. This list will get expanded as - # we locate more target types to process. - local t = $(t0) ; - - while $(t) - { - # Find all generators for the current type. Unlike - # 'find-viable-generators' we do not care about the property-set. - local generators = $(.generators.$(t[1])) ; - t = $(t[2-]) ; - - while $(generators) - { - local g = $(generators[1]) ; - generators = $(generators[2-]) ; - - if ! [ $(g).source-types ] - { - # Empty source types -- everything can be accepted. - result = * ; - # This will terminate this loop. - generators = ; - # This will terminate the outer loop. - t = ; - } - - for local source-type in [ $(g).source-types ] - { - if ! $(source-type) in $(result) - { - # If a generator accepts a 'source-type' it will also - # happily accept any type derived from it. - for local n in [ type.all-derived $(source-type) ] - { - if ! $(n) in $(result) - { - # Here there is no point in adding target types to - # the list of types to process in case they are or - # have already been on that list. We optimize this - # check by realizing that we only need to avoid the - # original target type's base types. Other target - # types that are or have been on the list of target - # types to process have been added to the 'result' - # list as well and have thus already been eliminated - # by the previous if. - if ! $(n) in $(t0) - { - t += $(n) ; - } - result += $(n) ; - } - } - } - } - } - } - - return $(result) ; -} - - -# Helper rule, caches the result of 'viable-source-types-real'. -# -rule viable-source-types ( target-type ) -{ - local key = .vst.$(target-type) ; - if ! $($(key)) - { - .vst-cached-types += $(target-type) ; - local v = [ viable-source-types-real $(target-type) ] ; - if ! $(v) - { - v = none ; - } - $(key) = $(v) ; - } - - if $($(key)) != none - { - return $($(key)) ; - } -} - - -# Returns the list of source types, which, when passed to 'run' method of -# 'generator', has some change of being eventually used (probably after -# conversion by other generators). -# -# Returns '*' in case any type should be considered a viable source type for the -# given generator. -# -rule viable-source-types-for-generator-real ( generator ) -{ - local source-types = [ $(generator).source-types ] ; - if ! $(source-types) - { - # If generator does not specify any source types, it might be a special - # generator like builtin.lib-generator which just relays to other - # generators. Return '*' to indicate that any source type is possibly - # OK, since we do not know for sure. - return * ; - } - else - { - local result ; - while $(source-types) - { - local s = $(source-types[1]) ; - source-types = $(source-types[2-]) ; - local viable-sources = [ generators.viable-source-types $(s) ] ; - if $(viable-sources) = * - { - result = * ; - source-types = ; # Terminate the loop. - } - else - { - result += [ type.all-derived $(s) ] $(viable-sources) ; - } - } - return [ sequence.unique $(result) ] ; - } -} - - -# Helper rule, caches the result of 'viable-source-types-for-generator'. -# -local rule viable-source-types-for-generator ( generator ) -{ - local key = .vstg.$(generator) ; - if ! $($(key)) - { - .vstg-cached-generators += $(generator) ; - local v = [ viable-source-types-for-generator-real $(generator) ] ; - if ! $(v) - { - v = none ; - } - $(key) = $(v) ; - } - - if $($(key)) != none - { - return $($(key)) ; - } -} - - -# Returns usage requirements + list of created targets. -# -local rule try-one-generator-really ( project name ? : generator : target-type - : property-set : sources * ) -{ - local targets = - [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ; - - local usage-requirements ; - local success ; - - generators.dout [ indent ] returned $(targets) ; - - if $(targets) - { - success = true ; - - if [ class.is-a $(targets[1]) : property-set ] - { - usage-requirements = $(targets[1]) ; - targets = $(targets[2-]) ; - } - else - { - usage-requirements = [ property-set.empty ] ; - } - } - - generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ; - generators.dout [ indent ] " " $(targets) ; - if $(usage-requirements) - { - generators.dout [ indent ] " with usage requirements:" $(x) ; - } - - if $(success) - { - return $(usage-requirements) $(targets) ; - } -} - - -# Checks if generator invocation can be pruned, because it is guaranteed to -# fail. If so, quickly returns an empty list. Otherwise, calls -# try-one-generator-really. -# -local rule try-one-generator ( project name ? : generator : target-type - : property-set : sources * ) -{ - local source-types ; - for local s in $(sources) - { - source-types += [ $(s).type ] ; - } - local viable-source-types = [ viable-source-types-for-generator $(generator) - ] ; - - if $(source-types) && $(viable-source-types) != * && - ! [ set.intersection $(source-types) : $(viable-source-types) ] - { - local id = [ $(generator).id ] ; - generators.dout [ indent ] " ** generator '$(id)' pruned" ; - #generators.dout [ indent ] "source-types" '$(source-types)' ; - #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ; - } - else - { - return [ try-one-generator-really $(project) $(name) : $(generator) : - $(target-type) : $(property-set) : $(sources) ] ; - } -} - - -rule construct-types ( project name ? : target-types + : property-set - : sources + ) -{ - local result ; - local matched-types ; - local usage-requirements = [ property-set.empty ] ; - for local t in $(target-types) - { - local r = [ construct $(project) $(name) : $(t) : $(property-set) : - $(sources) ] ; - if $(r) - { - usage-requirements = [ $(usage-requirements).add $(r[1]) ] ; - result += $(r[2-]) ; - matched-types += $(t) ; - } - } - # TODO: have to introduce parameter controlling if several types can be - # matched and add appropriate checks. - - # TODO: need to review the documentation for 'construct' to see if it should - # return $(source) even if nothing can be done with it. Currents docs seem - # to imply that, contrary to the behaviour. - if $(result) - { - return $(usage-requirements) $(result) ; - } - else - { - return $(usage-requirements) $(sources) ; - } -} - - -# Ensures all 'targets' have their type. If this is not so, exists with error. -# -local rule ensure-type ( targets * ) -{ - for local t in $(targets) - { - if ! [ $(t).type ] - { - errors.error "target" [ $(t).str ] "has no type" ; - } - } -} - - -# Returns generators which can be used to construct target of specified type -# with specified properties. Uses the following algorithm: -# - iterates over requested target-type and all its bases (in the order returned -# by type.all-bases). -# - for each type find all generators that generate that type and whose -# requirements are satisfied by properties. -# - if the set of generators is not empty, returns that set. -# -# Note: this algorithm explicitly ignores generators for base classes if there -# is at least one generator for the requested target-type. -# -local rule find-viable-generators-aux ( target-type : property-set ) -{ - # Select generators that can create the required target type. - local viable-generators = ; - local generator-rank = ; - - import type ; - local t = [ type.all-bases $(target-type) ] ; - - generators.dout [ indent ] find-viable-generators target-type= $(target-type) - property-set= [ $(property-set).as-path ] ; - - # Get the list of generators for the requested type. If no generator is - # registered, try base type, and so on. - local generators ; - while $(t[1]) - { - generators.dout [ indent ] "trying type" $(t[1]) ; - if $(.generators.$(t[1])) - { - generators.dout [ indent ] "there are generators for this type" ; - generators = $(.generators.$(t[1])) ; - - if $(t[1]) != $(target-type) - { - # We are here because there were no generators found for - # target-type but there are some generators for its base type. - # We will try to use them, but they will produce targets of - # base type, not of 'target-type'. So, we clone the generators - # and modify the list of target types. - local generators2 ; - for local g in $(generators) - { - # generators.register adds a generator to the list of - # generators for toolsets, which is a bit strange, but - # should work. That list is only used when inheriting a - # toolset, which should have been done before running - # generators. - generators2 += [ $(g).clone-and-change-target-type $(t[1]) : - $(target-type) ] ; - generators.register $(generators2[-1]) ; - } - generators = $(generators2) ; - } - t = ; - } - t = $(t[2-]) ; - } - - for local g in $(generators) - { - generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ; - - local m = [ $(g).match-rank $(property-set) ] ; - if $(m) - { - generators.dout [ indent ] " is viable" ; - viable-generators += $(g) ; - } - } - - return $(viable-generators) ; -} - - -rule find-viable-generators ( target-type : property-set ) -{ - local key = $(target-type).$(property-set) ; - local l = $(.fv.$(key)) ; - if ! $(l) - { - l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ; - if ! $(l) - { - l = none ; - } - .fv.$(key) = $(l) ; - } - - if $(l) = none - { - l = ; - } - - local viable-generators ; - for local g in $(l) - { - # Avoid trying the same generator twice on different levels. - if ! $(g) in $(.active-generators) - { - viable-generators += $(g) ; - } - else - { - generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ; - } - } - - # Generators which override 'all'. - local all-overrides ; - # Generators which are overriden. - local overriden-ids ; - for local g in $(viable-generators) - { - local id = [ $(g).id ] ; - local this-overrides = $(.override.$(id)) ; - overriden-ids += $(this-overrides) ; - if all in $(this-overrides) - { - all-overrides += $(g) ; - } - } - if $(all-overrides) - { - viable-generators = $(all-overrides) ; - } - local result ; - for local g in $(viable-generators) - { - if ! [ $(g).id ] in $(overriden-ids) - { - result += $(g) ; - } - } - - return $(result) ; -} - - -.construct-stack = ; - - -# Attempts to construct a target by finding viable generators, running them and -# selecting the dependency graph. -# -local rule construct-really ( project name ? : target-type : property-set : - sources * ) -{ - viable-generators = [ find-viable-generators $(target-type) : - $(property-set) ] ; - - generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ] - " viable generators" ; - - local result ; - local generators-that-succeeded ; - for local g in $(viable-generators) - { - # This variable will be restored on exit from this scope. - local .active-generators = $(g) $(.active-generators) ; - - local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type) - : $(property-set) : $(sources) ] ; - - if $(r) - { - generators-that-succeeded += $(g) ; - if $(result) - { - ECHO "Error: ambiguity found when searching for best transformation" ; - ECHO "Trying to produce type '$(target-type)' from: " ; - for local s in $(sources) - { - ECHO " - " [ $(s).str ] ; - } - ECHO "Generators that succeeded:" ; - for local g in $(generators-that-succeeded) - { - ECHO " - " [ $(g).id ] ; - } - ECHO "First generator produced: " ; - for local t in $(result[2-]) - { - ECHO " - " [ $(t).str ] ; - } - ECHO "Second generator produced: " ; - for local t in $(r[2-]) - { - ECHO " - " [ $(t).str ] ; - } - EXIT ; - } - else - { - result = $(r) ; - } - } - } - - return $(result) ; -} - - -# Attempts to create a target of 'target-type' with 'properties' from 'sources'. -# The 'sources' are treated as a collection of *possible* ingridients, i.e. -# there is no obligation to consume them all. -# -# Returns a list of targets. When this invocation is first instance of -# 'construct' in stack, returns only targets of requested 'target-type', -# otherwise, returns also unused sources and additionally generated targets. -# -# If 'top-level' is set, does not suppress generators that are already -# used in the stack. This may be useful in cases where a generator -# has to build a metatargets -- for example a target corresponding to -# built tool. -# -rule construct ( project name ? : target-type : property-set * : sources * : top-level ? ) -{ - local saved-stack ; - if $(top-level) - { - saved-active = $(.active-generators) ; - .active-generators = ; - } - - if (.construct-stack) - { - ensure-type $(sources) ; - } - - .construct-stack += 1 ; - - increase-indent ; - - if $(.debug) - { - generators.dout [ indent ] "*** construct" $(target-type) ; - - for local s in $(sources) - { - generators.dout [ indent ] " from" $(s) ; - } - generators.dout [ indent ] " properties:" [ $(property-set).raw ] ; - } - - local result = [ construct-really $(project) $(name) : $(target-type) : - $(property-set) : $(sources) ] ; - - decrease-indent ; - - .construct-stack = $(.construct-stack[2-]) ; - - if $(top-level) - { - .active-generators = $(saved-active) ; - } - - return $(result) ; -} - -# Given 'result', obtained from some generator or generators.construct, adds -# 'raw-properties' as usage requirements to it. If result already contains usage -# requirements -- that is the first element of result of an instance of the -# property-set class, the existing usage requirements and 'raw-properties' are -# combined. -# -rule add-usage-requirements ( result * : raw-properties * ) -{ - if $(result) - { - if [ class.is-a $(result[1]) : property-set ] - { - return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ; - } - else - { - return [ property-set.create $(raw-properties) ] $(result) ; - } - } -} - -rule dump ( ) -{ - for local g in $(.all-generators) - { - ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ; - } -} - diff --git a/jam-files/boost-build/build/generators.py b/jam-files/boost-build/build/generators.py deleted file mode 100644 index 2c59f7ca..00000000 --- a/jam-files/boost-build/build/generators.py +++ /dev/null @@ -1,1089 +0,0 @@ -# Status: being ported by Vladimir Prus -# Base revision: 48649 -# TODO: replace the logging with dout - -# Copyright Vladimir Prus 2002. -# Copyright Rene Rivera 2006. -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Manages 'generators' --- objects which can do transformation between different -# target types and contain algorithm for finding transformation from sources -# to targets. -# -# The main entry point to this module is generators.construct rule. It is given -# a list of source targets, desired target type and a set of properties. -# It starts by selecting 'viable generators', which have any chances of producing -# the desired target type with the required properties. Generators are ranked and -# a set of most specific ones is selected. -# -# The most specific generators have their 'run' methods called, with the properties -# and list of sources. Each one selects target which can be directly consumed, and -# tries to convert the remaining ones to the types it can consume. This is done -# by recursively calling 'construct' with all consumable types. -# -# If the generator has collected all the targets it needs, it creates targets -# corresponding to result, and returns it. When all generators have been run, -# results of one of them are selected and returned as result. -# -# It's quite possible that 'construct' returns more targets that it was asked for. -# For example, it was asked to target type EXE, but the only found generators produces -# both EXE and TDS (file with debug) information. The extra target will be returned. -# -# Likewise, when generator tries to convert sources to consumable types, it can get -# more targets that it was asked for. The question is what to do with extra targets. -# Boost.Build attempts to convert them to requested types, and attempts as early as -# possible. Specifically, this is done after invoking each generator. (Later I'll -# document the rationale for trying extra target conversion at that point). -# -# That early conversion is not always desirable. Suppose a generator got a source of -# type Y and must consume one target of type X_1 and one target of type X_2. -# When converting Y to X_1 extra target of type Y_2 is created. We should not try to -# convert it to type X_1, because if we do so, the generator will get two targets -# of type X_1, and will be at loss as to which one to use. Because of that, the -# 'construct' rule has a parameter, telling if multiple targets can be returned. If -# the parameter is false, conversion of extra targets is not performed. - - -import re -import cStringIO -import os.path - -from virtual_target import Subvariant -import virtual_target, type, property_set, property -from b2.util.logger import * -from b2.util.utility import * -from b2.util import set -from b2.util.sequence import unique -import b2.util.sequence as sequence -from b2.manager import get_manager -import b2.build.type - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __generators, __type_to_generators, __generators_for_toolset, __construct_stack - global __overrides, __active_generators - global __viable_generators_cache, __viable_source_types_cache - global __vstg_cached_generators, __vst_cached_types - - __generators = {} - __type_to_generators = {} - __generators_for_toolset = {} - __overrides = {} - - # TODO: can these be global? - __construct_stack = [] - __viable_generators_cache = {} - __viable_source_types_cache = {} - __active_generators = [] - - __vstg_cached_generators = [] - __vst_cached_types = [] - -reset () - -_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?') -_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?') - - -__debug = None -__indent = "" - -def debug(): - global __debug - if __debug is None: - __debug = "--debug-generators" in bjam.variable("ARGV") - return __debug - -def increase_indent(): - global __indent - __indent += " " - -def decrease_indent(): - global __indent - __indent = __indent[0:-4] - - -# Updated cached viable source target type information as needed after a new -# derived target type gets added. This is needed because if a target type is a -# viable source target type for some generator then all of the target type's -# derived target types are automatically viable as source target types for the -# same generator. Does nothing if a non-derived target type is passed to it. -# -def update_cached_information_with_a_new_type(type): - - base_type = b2.build.type.base(type) - - if base_type: - for g in __vstg_cached_generators: - if base_type in __viable_source_types_cache.get(g, []): - __viable_source_types_cache[g].append(type) - - for t in __vst_cached_types: - if base_type in __viable_source_types_cache.get(t, []): - __viable_source_types_cache[t].append(type) - -# Clears cached viable source target type information except for target types -# and generators with all source types listed as viable. Should be called when -# something invalidates those cached values by possibly causing some new source -# types to become viable. -# -def invalidate_extendable_viable_source_target_type_cache(): - - global __vstg_cached_generators - generators_with_cached_source_types = __vstg_cached_generators - __vstg_cached_generators = [] - - for g in generators_with_cached_source_types: - if __viable_source_types_cache.has_key(g): - if __viable_source_types_cache[g] == ["*"]: - __vstg_cached_generators.append(g) - else: - del __viable_source_types_cache[g] - - global __vst_cached_types - types_with_cached_sources_types = __vst_cached_types - __vst_cached_types = [] - for t in types_with_cached_sources_types: - if __viable_source_types_cache.has_key(t): - if __viable_source_types_cache[t] == ["*"]: - __vst_cached_types.append(t) - else: - del __viable_source_types_cache[t] - -def dout(message): - if debug(): - print __indent + message - -class Generator: - """ Creates a generator. - manager: the build manager. - id: identifies the generator - - rule: the rule which sets up build actions. - - composing: whether generator processes each source target in - turn, converting it to required types. - Ordinary generators pass all sources together to - recusrive generators.construct_types call. - - source_types (optional): types that this generator can handle - - target_types_and_names: types the generator will create and, optionally, names for - created targets. Each element should have the form - type["(" name-pattern ")"] - for example, obj(%_x). Name of generated target will be found - by replacing % with the name of source, provided explicit name - was not specified. - - requirements (optional) - - NOTE: all subclasses must have a similar signature for clone to work! - """ - def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []): - assert(not isinstance(source_types, str)) - assert(not isinstance(target_types_and_names, str)) - self.id_ = id - self.composing_ = composing - self.source_types_ = source_types - self.target_types_and_names_ = target_types_and_names - self.requirements_ = requirements - - self.target_types_ = [] - self.name_prefix_ = [] - self.name_postfix_ = [] - - for e in target_types_and_names: - # Create three parallel lists: one with the list of target types, - # and two other with prefixes and postfixes to be added to target - # name. We use parallel lists for prefix and postfix (as opposed - # to mapping), because given target type might occur several times, - # for example "H H(%_symbols)". - m = _re_separate_types_prefix_and_postfix.match (e) - - if not m: - raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id)) - - target_type = m.group (1) - if not target_type: target_type = '' - prefix = m.group (3) - if not prefix: prefix = '' - postfix = m.group (4) - if not postfix: postfix = '' - - self.target_types_.append (target_type) - self.name_prefix_.append (prefix) - self.name_postfix_.append (postfix) - - for x in self.source_types_: - type.validate (x) - - for x in self.target_types_: - type.validate (x) - - def clone (self, new_id, new_toolset_properties): - """ Returns another generator which differers from $(self) in - - id - - value to <toolset> feature in properties - """ - return self.__class__ (new_id, - self.composing_, - self.source_types_, - self.target_types_and_names_, - # Note: this does not remove any subfeatures of <toolset> - # which might cause problems - property.change (self.requirements_, '<toolset>') + new_toolset_properties) - - def clone_and_change_target_type(self, base, type): - """Creates another generator that is the same as $(self), except that - if 'base' is in target types of $(self), 'type' will in target types - of the new generator.""" - target_types = [] - for t in self.target_types_and_names_: - m = _re_match_type.match(t) - assert m - - if m.group(1) == base: - if m.group(2): - target_types.append(type + m.group(2)) - else: - target_types.append(type) - else: - target_types.append(t) - - return self.__class__(self.id_, self.composing_, - self.source_types_, - target_types, - self.requirements_) - - - def id(self): - return self.id_ - - def source_types (self): - """ Returns the list of target type the generator accepts. - """ - return self.source_types_ - - def target_types (self): - """ Returns the list of target types that this generator produces. - It is assumed to be always the same -- i.e. it cannot change depending - list of sources. - """ - return self.target_types_ - - def requirements (self): - """ Returns the required properties for this generator. Properties - in returned set must be present in build properties if this - generator is to be used. If result has grist-only element, - that build properties must include some value of that feature. - """ - return self.requirements_ - - def match_rank (self, ps): - """ Returns true if the generator can be run with the specified - properties. - """ - # See if generator's requirements are satisfied by - # 'properties'. Treat a feature name in requirements - # (i.e. grist-only element), as matching any value of the - # feature. - all_requirements = self.requirements () - - property_requirements = [] - feature_requirements = [] - # This uses strings because genenator requirements allow - # the '<feature>' syntax without value and regular validation - # is not happy about that. - for r in all_requirements: - if get_value (r): - property_requirements.append (r) - - else: - feature_requirements.append (r) - - return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \ - and all(ps.get(get_grist(s)) for s in feature_requirements) - - def run (self, project, name, prop_set, sources): - """ Tries to invoke this generator on the given sources. Returns a - list of generated targets (instances of 'virtual-target'). - - project: Project for which the targets are generated. - - name: Determines the name of 'name' attribute for - all generated targets. See 'generated_targets' method. - - prop_set: Desired properties for generated targets. - - sources: Source targets. - """ - - if project.manager ().logger ().on (): - project.manager ().logger ().log (__name__, " generator '%s'" % self.id_) - project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_) - - if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1: - raise BaseException ("Unsupported source/source_type combination") - - # We don't run composing generators if no name is specified. The reason - # is that composing generator combines several targets, which can have - # different names, and it cannot decide which name to give for produced - # target. Therefore, the name must be passed. - # - # This in effect, means that composing generators are runnable only - # at top-level of transofrmation graph, or if name is passed explicitly. - # Thus, we dissallow composing generators in the middle. For example, the - # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed - # (the OBJ -> STATIC_LIB generator is composing) - if not self.composing_ or name: - return self.run_really (project, name, prop_set, sources) - else: - return [] - - def run_really (self, project, name, prop_set, sources): - - # consumed: Targets that this generator will consume directly. - # bypassed: Targets that can't be consumed and will be returned as-is. - - if self.composing_: - (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources) - else: - (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources) - - result = [] - if consumed: - result = self.construct_result (consumed, project, name, prop_set) - result.extend (bypassed) - - if result: - if project.manager ().logger ().on (): - project.manager ().logger ().log (__name__, " SUCCESS: ", result) - - else: - project.manager ().logger ().log (__name__, " FAILURE") - - return result - - def construct_result (self, consumed, project, name, prop_set): - """ Constructs the dependency graph that will be returned by this - generator. - consumed: Already prepared list of consumable targets - If generator requires several source files will contain - exactly len $(self.source_types_) targets with matching types - Otherwise, might contain several targets with the type of - self.source_types_ [0] - project: - name: - prop_set: Properties to be used for all actions create here - """ - result = [] - # If this is 1->1 transformation, apply it to all consumed targets in order. - if len (self.source_types_) < 2 and not self.composing_: - - for r in consumed: - result.extend (self.generated_targets ([r], prop_set, project, name)) - - else: - - if consumed: - result.extend (self.generated_targets (consumed, prop_set, project, name)) - - return result - - def determine_target_name(self, fullname): - # Determine target name from fullname (maybe including path components) - # Place optional prefix and postfix around basename - - dir = os.path.dirname(fullname) - name = os.path.basename(fullname) - - if dir and not ".." in dir and not os.path.isabs(dir): - # Relative path is always relative to the source - # directory. Retain it, so that users can have files - # with the same in two different subdirectories. - name = dir + "/" + name - - return name - - def determine_output_name(self, sources): - """Determine the name of the produced target from the - names of the sources.""" - - # The simple case if when a name - # of source has single dot. Then, we take the part before - # dot. Several dots can be caused by: - # - Using source file like a.host.cpp - # - A type which suffix has a dot. Say, we can - # type 'host_cpp' with extension 'host.cpp'. - # In the first case, we want to take the part till the last - # dot. In the second case -- no sure, but for now take - # the part till the last dot too. - name = os.path.splitext(sources[0].name())[0] - - for s in sources[1:]: - n2 = os.path.splitext(s.name()) - if n2 != name: - get_manager().errors()( - "%s: source targets have different names: cannot determine target name" - % (self.id_)) - - # Names of sources might include directory. We should strip it. - return self.determine_target_name(sources[0].name()) - - - def generated_targets (self, sources, prop_set, project, name): - """ Constructs targets that are created after consuming 'sources'. - The result will be the list of virtual-target, which the same length - as 'target_types' attribute and with corresponding types. - - When 'name' is empty, all source targets must have the same value of - the 'name' attribute, which will be used instead of the 'name' argument. - - The value of 'name' attribute for each generated target will be equal to - the 'name' parameter if there's no name pattern for this type. Otherwise, - the '%' symbol in the name pattern will be replaced with the 'name' parameter - to obtain the 'name' attribute. - - For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes - for T1 and T2 are .t1 and t2, and source if foo.z, then created files would - be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the - basename of a file. - - Note that this pattern mechanism has nothing to do with implicit patterns - in make. It's a way to produce target which name is different for name of - source. - """ - if not name: - name = self.determine_output_name(sources) - - # Assign an action for each target - action = self.action_class() - a = action(project.manager(), sources, self.id_, prop_set) - - # Create generated target for each target type. - targets = [] - pre = self.name_prefix_ - post = self.name_postfix_ - for t in self.target_types_: - basename = os.path.basename(name) - idx = basename.find(".") - if idx != -1: - basename = basename[:idx] - generated_name = pre[0] + basename + post[0] - generated_name = os.path.join(os.path.dirname(name), generated_name) - pre = pre[1:] - post = post[1:] - - targets.append(virtual_target.FileTarget(generated_name, t, project, a)) - - return [ project.manager().virtual_targets().register(t) for t in targets ] - - def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False): - """ Attempts to convert 'source' to the types that this generator can - handle. The intention is to produce the set of targets can should be - used when generator is run. - only_one: convert 'source' to only one of source types - if there's more that one possibility, report an - error. - - Returns a pair: - consumed: all targets that can be consumed. - bypassed: all targets that cannot be consumed. - """ - consumed = [] - bypassed = [] - missing_types = [] - - if len (sources) > 1: - # Don't know how to handle several sources yet. Just try - # to pass the request to other generator - missing_types = self.source_types_ - - else: - (c, m) = self.consume_directly (sources [0]) - consumed += c - missing_types += m - - # No need to search for transformation if - # some source type has consumed source and - # no more source types are needed. - if only_one and consumed: - missing_types = [] - - #TODO: we should check that only one source type - #if create of 'only_one' is true. - # TODO: consider if consuned/bypassed separation should - # be done by 'construct_types'. - - if missing_types: - transformed = construct_types (project, name, missing_types, prop_set, sources) - - # Add targets of right type to 'consumed'. Add others to - # 'bypassed'. The 'generators.construct' rule has done - # its best to convert everything to the required type. - # There's no need to rerun it on targets of different types. - - # NOTE: ignoring usage requirements - for t in transformed[1]: - if t.type() in missing_types: - consumed.append(t) - - else: - bypassed.append(t) - - consumed = unique(consumed) - bypassed = unique(bypassed) - - # remove elements of 'bypassed' that are in 'consumed' - - # Suppose the target type of current generator, X is produced from - # X_1 and X_2, which are produced from Y by one generator. - # When creating X_1 from Y, X_2 will be added to 'bypassed' - # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed' - # But they are also in 'consumed'. We have to remove them from - # bypassed, so that generators up the call stack don't try to convert - # them. - - # In this particular case, X_1 instance in 'consumed' and X_1 instance - # in 'bypassed' will be the same: because they have the same source and - # action name, and 'virtual-target.register' won't allow two different - # instances. Therefore, it's OK to use 'set.difference'. - - bypassed = set.difference(bypassed, consumed) - - return (consumed, bypassed) - - - def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources): - """ Converts several files to consumable types. - """ - consumed = [] - bypassed = [] - - # We process each source one-by-one, trying to convert it to - # a usable type. - for s in sources: - # TODO: need to check for failure on each source. - (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True) - if not c: - project.manager ().logger ().log (__name__, " failed to convert ", s) - - consumed.extend (c) - bypassed.extend (b) - - return (consumed, bypassed) - - def consume_directly (self, source): - real_source_type = source.type () - - # If there are no source types, we can consume anything - source_types = self.source_types() - if not source_types: - source_types = [real_source_type] - - consumed = [] - missing_types = [] - for st in source_types: - # The 'source' if of right type already) - if real_source_type == st or type.is_derived (real_source_type, st): - consumed.append (source) - - else: - missing_types.append (st) - - return (consumed, missing_types) - - def action_class (self): - """ Returns the class to be used to actions. Default implementation - returns "action". - """ - return virtual_target.Action - - -def find (id): - """ Finds the generator with id. Returns None if not found. - """ - return __generators.get (id, None) - -def register (g): - """ Registers new generator instance 'g'. - """ - id = g.id() - - __generators [id] = g - - # A generator can produce several targets of the - # same type. We want unique occurence of that generator - # in .generators.$(t) in that case, otherwise, it will - # be tried twice and we'll get false ambiguity. - for t in sequence.unique(g.target_types()): - __type_to_generators.setdefault(t, []).append(g) - - # Update the set of generators for toolset - - # TODO: should we check that generator with this id - # is not already registered. For example, the fop.jam - # module intentionally declared two generators with the - # same id, so such check will break it. - - # Some generators have multiple periods in their name, so the - # normal $(id:S=) won't generate the right toolset name. - # e.g. if id = gcc.compile.c++, then - # .generators-for-toolset.$(id:S=) will append to - # .generators-for-toolset.gcc.compile, which is a separate - # value from .generators-for-toolset.gcc. Correcting this - # makes generator inheritance work properly. - # See also inherit-generators in module toolset - base = id.split ('.', 100) [0] - - __generators_for_toolset.setdefault(base, []).append(g) - - # After adding a new generator that can construct new target types, we need - # to clear the related cached viable source target type information for - # constructing a specific target type or using a specific generator. Cached - # viable source target type lists affected by this are those containing any - # of the target types constructed by the new generator or any of their base - # target types. - # - # A more advanced alternative to clearing that cached viable source target - # type information would be to expand it with additional source types or - # even better - mark it as needing to be expanded on next use. - # - # For now we just clear all the cached viable source target type information - # that does not simply state 'all types' and may implement a more detailed - # algorithm later on if it becomes needed. - - invalidate_extendable_viable_source_target_type_cache() - - -def register_standard (id, source_types, target_types, requirements = []): - """ Creates new instance of the 'generator' class and registers it. - Returns the creates instance. - Rationale: the instance is returned so that it's possible to first register - a generator and then call 'run' method on that generator, bypassing all - generator selection. - """ - g = Generator (id, False, source_types, target_types, requirements) - register (g) - return g - -def register_composing (id, source_types, target_types, requirements = []): - g = Generator (id, True, source_types, target_types, requirements) - register (g) - return g - -def generators_for_toolset (toolset): - """ Returns all generators which belong to 'toolset'. - """ - return __generators_for_toolset.get(toolset, []) - -def override (overrider_id, overridee_id): - """Make generator 'overrider-id' be preferred to - 'overridee-id'. If, when searching for generators - that could produce a target of certain type, - both those generators are amoung viable generators, - the overridden generator is immediately discarded. - - The overridden generators are discarded immediately - after computing the list of viable generators, before - running any of them.""" - - __overrides.get(overrider_id, []).append(overridee_id) - -def __viable_source_types_real (target_type): - """ Returns a list of source type which can possibly be converted - to 'target_type' by some chain of generator invocation. - - More formally, takes all generators for 'target_type' and - returns union of source types for those generators and result - of calling itself recusrively on source types. - """ - generators = [] - - # 't0' is the initial list of target types we need to process to get a list - # of their viable source target types. New target types will not be added to - # this list. - t0 = type.all_bases (target_type) - - - # 't' is the list of target types which have not yet been processed to get a - # list of their viable source target types. This list will get expanded as - # we locate more target types to process. - t = t0 - - result = [] - while t: - # Find all generators for current type. - # Unlike 'find_viable_generators' we don't care about prop_set. - generators = __type_to_generators.get (t [0], []) - t = t[1:] - - for g in generators: - if not g.source_types(): - # Empty source types -- everything can be accepted - result = "*" - # This will terminate outer loop. - t = None - break - - for source_type in g.source_types (): - if not source_type in result: - # If generator accepts 'source_type' it - # will happily accept any type derived from it - all = type.all_derived (source_type) - for n in all: - if not n in result: - - # Here there is no point in adding target types to - # the list of types to process in case they are or - # have already been on that list. We optimize this - # check by realizing that we only need to avoid the - # original target type's base types. Other target - # types that are or have been on the list of target - # types to process have been added to the 'result' - # list as well and have thus already been eliminated - # by the previous if. - if not n in t0: - t.append (n) - result.append (n) - - return result - - -def viable_source_types (target_type): - """ Helper rule, caches the result of '__viable_source_types_real'. - """ - if not __viable_source_types_cache.has_key(target_type): - __vst_cached_types.append(target_type) - __viable_source_types_cache [target_type] = __viable_source_types_real (target_type) - return __viable_source_types_cache [target_type] - -def viable_source_types_for_generator_real (generator): - """ Returns the list of source types, which, when passed to 'run' - method of 'generator', has some change of being eventually used - (probably after conversion by other generators) - """ - source_types = generator.source_types () - - if not source_types: - # If generator does not specify any source types, - # it might be special generator like builtin.lib-generator - # which just relays to other generators. Return '*' to - # indicate that any source type is possibly OK, since we don't - # know for sure. - return ['*'] - - else: - result = [] - for s in source_types: - viable_sources = viable_source_types(s) - if viable_sources == "*": - result = ["*"] - break - else: - result.extend(type.all_derived(s) + viable_sources) - return unique(result) - -def viable_source_types_for_generator (generator): - """ Caches the result of 'viable_source_types_for_generator'. - """ - if not __viable_source_types_cache.has_key(generator): - __vstg_cached_generators.append(generator) - __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator) - - return __viable_source_types_cache[generator] - -def try_one_generator_really (project, name, generator, target_type, properties, sources): - """ Returns usage requirements + list of created targets. - """ - targets = generator.run (project, name, properties, sources) - - usage_requirements = [] - success = False - - dout("returned " + str(targets)) - - if targets: - success = True; - - if isinstance (targets[0], property_set.PropertySet): - usage_requirements = targets [0] - targets = targets [1] - - else: - usage_requirements = property_set.empty () - - dout( " generator" + generator.id() + " spawned ") - # generators.dout [ indent ] " " $(targets) ; -# if $(usage-requirements) -# { -# generators.dout [ indent ] " with usage requirements:" $(x) ; -# } - - if success: - return (usage_requirements, targets) - else: - return None - -def try_one_generator (project, name, generator, target_type, properties, sources): - """ Checks if generator invocation can be pruned, because it's guaranteed - to fail. If so, quickly returns empty list. Otherwise, calls - try_one_generator_really. - """ - source_types = [] - - for s in sources: - source_types.append (s.type ()) - - viable_source_types = viable_source_types_for_generator (generator) - - if source_types and viable_source_types != ['*'] and\ - not set.intersection (source_types, viable_source_types): - if project.manager ().logger ().on (): - id = generator.id () - project.manager ().logger ().log (__name__, "generator '%s' pruned" % id) - project.manager ().logger ().log (__name__, "source_types" '%s' % source_types) - project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types) - - return [] - - else: - return try_one_generator_really (project, name, generator, target_type, properties, sources) - - -def construct_types (project, name, target_types, prop_set, sources): - - result = [] - usage_requirements = property_set.empty() - - for t in target_types: - r = construct (project, name, t, prop_set, sources) - - if r: - (ur, targets) = r - usage_requirements = usage_requirements.add(ur) - result.extend(targets) - - # TODO: have to introduce parameter controlling if - # several types can be matched and add appropriate - # checks - - # TODO: need to review the documentation for - # 'construct' to see if it should return $(source) even - # if nothing can be done with it. Currents docs seem to - # imply that, contrary to the behaviour. - if result: - return (usage_requirements, result) - - else: - return (usage_requirements, sources) - -def __ensure_type (targets): - """ Ensures all 'targets' have types. If this is not so, exists with - error. - """ - for t in targets: - if not t.type (): - get_manager().errors()("target '%s' has no type" % str (t)) - -def find_viable_generators_aux (target_type, prop_set): - """ Returns generators which can be used to construct target of specified type - with specified properties. Uses the following algorithm: - - iterates over requested target_type and all it's bases (in the order returned bt - type.all-bases. - - for each type find all generators that generate that type and which requirements - are satisfied by properties. - - if the set of generators is not empty, returns that set. - - Note: this algorithm explicitly ignores generators for base classes if there's - at least one generator for requested target_type. - """ - # Select generators that can create the required target type. - viable_generators = [] - initial_generators = [] - - import type - - # Try all-type generators first. Assume they have - # quite specific requirements. - all_bases = type.all_bases(target_type) - - for t in all_bases: - - initial_generators = __type_to_generators.get(t, []) - - if initial_generators: - dout("there are generators for this type") - if t != target_type: - # We're here, when no generators for target-type are found, - # but there are some generators for a base type. - # We'll try to use them, but they will produce targets of - # base type, not of 'target-type'. So, we clone the generators - # and modify the list of target types. - generators2 = [] - for g in initial_generators[:]: - # generators.register adds generator to the list of generators - # for toolsets, which is a bit strange, but should work. - # That list is only used when inheriting toolset, which - # should have being done before generators are run. - ng = g.clone_and_change_target_type(t, target_type) - generators2.append(ng) - register(ng) - - initial_generators = generators2 - break - - for g in initial_generators: - dout("trying generator " + g.id() - + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")") - - m = g.match_rank(prop_set) - if m: - dout(" is viable") - viable_generators.append(g) - - return viable_generators - -def find_viable_generators (target_type, prop_set): - key = target_type + '.' + str (prop_set) - - l = __viable_generators_cache.get (key, None) - if not l: - l = [] - - if not l: - l = find_viable_generators_aux (target_type, prop_set) - - __viable_generators_cache [key] = l - - viable_generators = [] - for g in l: - # Avoid trying the same generator twice on different levels. - # TODO: is this really used? - if not g in __active_generators: - viable_generators.append (g) - else: - dout(" generator %s is active, discarding" % g.id()) - - # Generators which override 'all'. - all_overrides = [] - - # Generators which are overriden - overriden_ids = [] - - for g in viable_generators: - id = g.id () - - this_overrides = __overrides.get (id, []) - - if this_overrides: - overriden_ids.extend (this_overrides) - if 'all' in this_overrides: - all_overrides.append (g) - - if all_overrides: - viable_generators = all_overrides - - result = [] - for g in viable_generators: - if not g.id () in overriden_ids: - result.append (g) - - - return result - -def __construct_really (project, name, target_type, prop_set, sources): - """ Attempts to construct target by finding viable generators, running them - and selecting the dependency graph. - """ - viable_generators = find_viable_generators (target_type, prop_set) - - result = [] - - project.manager ().logger ().log (__name__, "*** %d viable generators" % len (viable_generators)) - - generators_that_succeeded = [] - - for g in viable_generators: - __active_generators.append(g) - r = try_one_generator (project, name, g, target_type, prop_set, sources) - del __active_generators[-1] - - if r: - generators_that_succeeded.append(g) - if result: - output = cStringIO.StringIO() - print >>output, "ambiguity found when searching for best transformation" - print >>output, "Trying to produce type '%s' from: " % (target_type) - for s in sources: - print >>output, " - " + s.str() - print >>output, "Generators that succeeded:" - for g in generators_that_succeeded: - print >>output, " - " + g.id() - print >>output, "First generator produced: " - for t in result[1:]: - print >>output, " - " + str(t) - print >>output, "Second generator produced:" - for t in r[1:]: - print >>output, " - " + str(t) - get_manager().errors()(output.getvalue()) - else: - result = r; - - return result; - - -def construct (project, name, target_type, prop_set, sources, top_level=False): - """ Attempts to create target of 'target-type' with 'properties' - from 'sources'. The 'sources' are treated as a collection of - *possible* ingridients -- i.e. it is not required to consume - them all. If 'multiple' is true, the rule is allowed to return - several targets of 'target-type'. - - Returns a list of target. When this invocation is first instance of - 'construct' in stack, returns only targets of requested 'target-type', - otherwise, returns also unused sources and additionally generated - targets. - - If 'top-level' is set, does not suppress generators that are already - used in the stack. This may be useful in cases where a generator - has to build a metatarget -- for example a target corresponding to - built tool. - """ - - global __active_generators - if top_level: - saved_active = __active_generators - __active_generators = [] - - global __construct_stack - if not __construct_stack: - __ensure_type (sources) - - __construct_stack.append (1) - - if project.manager().logger().on(): - increase_indent () - - dout( "*** construct " + target_type) - - for s in sources: - dout(" from " + str(s)) - - project.manager().logger().log (__name__, " properties: ", prop_set.raw ()) - - result = __construct_really(project, name, target_type, prop_set, sources) - - project.manager().logger().decrease_indent() - - __construct_stack = __construct_stack [1:] - - if top_level: - __active_generators = saved_active - - return result - diff --git a/jam-files/boost-build/build/modifiers.jam b/jam-files/boost-build/build/modifiers.jam deleted file mode 100644 index 6b009343..00000000 --- a/jam-files/boost-build/build/modifiers.jam +++ /dev/null @@ -1,232 +0,0 @@ -# Copyright 2003 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Modifiers are generalized generators that mutate targets in specific ways. -# This structure allows for grouping a variety of functionality in an -# orthogonal way to the functionality in toolsets, and without specifying -# more target variations. In turn the modifiers can be used as building -# blocks to implement simple requests, like the <version> feature. - -import modules ; -import feature ; -import errors ; -import type ; -import "class" : new ; -import generators ; -import property ; -import virtual-target ; -import numbers ; -import sequence ; -import symlink ; -import property-set ; - -# Base generator for creating targets that are modifications of existing -# targets. -# -class modifier : generator -{ - rule __init__ ( - id - composing ? - : source-types * - : target-types-and-names + - : requirements * - ) - { - generator.__init__ $(id) $(composing) - : $(source-types) - : $(target-types-and-names) - : $(requirements) ; - - self.targets-in-progress = ; - } - - # Wraps the generation of the target to call before and after rules to - # affect the real target. - # - rule run ( project name ? : property-set : sources + ) - { - local result ; - local current-target = $(project)^$(name) ; - if ! $(current-target) in $(self.targets-in-progress) - { - # Before modifications... - local project_ = - [ modify-project-before - $(project) $(name) : $(property-set) : $(sources) ] ; - local name_ = - [ modify-name-before - $(project) $(name) : $(property-set) : $(sources) ] ; - local property-set_ = - [ modify-properties-before - $(project) $(name) : $(property-set) : $(sources) ] ; - local sources_ = - [ modify-sources-before - $(project) $(name) : $(property-set) : $(sources) ] ; - project = $(project_) ; - name = $(name_) ; - property-set = $(property-set_) ; - sources = $(sources_) ; - - # Generate the real target... - local target-type-p = - [ property.select <main-target-type> : [ $(property-set).raw ] ] ; - self.targets-in-progress += $(current-target) ; - result = - [ generators.construct $(project) $(name) - : $(target-type-p:G=) - : $(property-set) - : $(sources) ] ; - self.targets-in-progress = $(self.targets-in-progress[1--2]) ; - - # After modifications... - result = - [ modify-target-after $(result) - : $(project) $(name) - : $(property-set) - : $(sources) ] ; - } - return $(result) ; - } - - rule modify-project-before ( project name ? : property-set : sources + ) - { - return $(project) ; - } - - rule modify-name-before ( project name ? : property-set : sources + ) - { - return $(name) ; - } - - rule modify-properties-before ( project name ? : property-set : sources + ) - { - return $(property-set) ; - } - - rule modify-sources-before ( project name ? : property-set : sources + ) - { - return $(sources) ; - } - - rule modify-target-after ( target : project name ? : property-set : sources + ) - { - return $(target) ; - } - - # Utility, clones a file-target with optional changes to the name, type and - # project of the target. - # NOTE: This functionality should be moved, and generalized, to - # virtual-targets. - # - rule clone-file-target ( target : new-name ? : new-type ? : new-project ? ) - { - # Need a MUTCH better way to clone a target... - new-name ?= [ $(target).name ] ; - new-type ?= [ $(target).type ] ; - new-project ?= [ $(target).project ] ; - local result = [ new file-target $(new-name) : $(new-type) : $(new-project) ] ; - - if [ $(target).dependencies ] { $(result).depends [ $(target).dependencies ] ; } - $(result).root [ $(target).root ] ; - $(result).set-usage-requirements [ $(target).usage-requirements ] ; - - local action = [ $(target).action ] ; - local action-class = [ modules.peek $(action) : __class__ ] ; - - local ps = [ $(action).properties ] ; - local cloned-action = [ new $(action-class) $(result) : - [ $(action).sources ] : [ $(action).action-name ] : $(ps) ] ; - $(result).action $(cloned-action) ; - - return $(result) ; - } -} - - -# A modifier that changes the name of a target, after it's generated, given a -# regular expression to split the name, and a set of token to insert between the -# split tokens of the name. This also exposes the target for other uses with a -# symlink to the original name (optionally). -# -class name-modifier : modifier -{ - rule __init__ ( ) - { - # Apply ourselves to EXE targets, for now. - modifier.__init__ name.modifier : : EXE LIB : <name-modify>yes ; - } - - # Modifies the name, by cloning the target with the new name. - # - rule modify-target-after ( target : project name ? : property-set : sources + ) - { - local result = $(target) ; - - local name-mod-p = [ property.select <name-modifier> : [ $(property-set).raw ] ] ; - if $(name-mod-p) - { - local new-name = [ modify-name [ $(target).name ] : $(name-mod-p:G=) ] ; - if $(new-name) != [ $(target).name ] - { - result = [ clone-file-target $(target) : $(new-name) ] ; - } - local expose-original-as-symlink = [ MATCH "<symlink>(.*)" : $(name-mod-p) ] ; - if $(expose-original-as-symlink) - { - local symlink-t = [ new symlink-targets $(project) : $(name) : [ $(result).name ] ] ; - result = [ $(symlink-t).construct $(result) - : [ property-set.create [ $(property-set).raw ] <symlink-location>build-relative ] ] ; - } - } - - return $(result) ; - } - - # Do the transformation of the name. - # - rule modify-name ( name : modifier-spec + ) - { - local match = [ MATCH "<match>(.*)" : $(modifier-spec) ] ; - local name-parts = [ MATCH $(match) : $(name) ] ; - local insertions = [ sequence.insertion-sort [ MATCH "(<[0123456789]+>.*)" : $(modifier-spec) ] ] ; - local new-name-parts ; - local insert-position = 1 ; - while $(insertions) - { - local insertion = [ MATCH "<$(insert-position)>(.*)" : $(insertions[1]) ] ; - if $(insertion) - { - new-name-parts += $(insertion) ; - insertions = $(insertions[2-]) ; - } - new-name-parts += $(name-parts[1]) ; - name-parts = $(name-parts[2-]) ; - insert-position = [ numbers.increment $(insert-position) ] ; - } - new-name-parts += $(name-parts) ; - return [ sequence.join $(new-name-parts) ] ; - } - - rule optional-properties ( ) - { - return <name-modify>yes ; - } -} -feature.feature name-modifier : : free ; -feature.feature name-modify : no yes : incidental optional ; -generators.register [ new name-modifier ] ; - -# Translates <version> property to a set of modification properties -# that are applied by the name-modifier, and symlink-modifier. -# -rule version-to-modifier ( property : properties * ) -{ - return - <name-modify>yes - <name-modifier><match>"^([^.]*)(.*)" <name-modifier><2>.$(property:G=) - <name-modifier><symlink>yes - ; -} -feature.action <version> : version-to-modifier ; diff --git a/jam-files/boost-build/build/project.ann.py b/jam-files/boost-build/build/project.ann.py deleted file mode 100644 index 349f5495..00000000 --- a/jam-files/boost-build/build/project.ann.py +++ /dev/null @@ -1,996 +0,0 @@ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 1) # Status: being ported by Vladimir Prus -ddc17f01 (vladimir_prus 2007-10-26 14:57:56 +0000 2) # Base revision: 40480 -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 3) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 4) # Copyright 2002, 2003 Dave Abrahams -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 5) # Copyright 2002, 2005, 2006 Rene Rivera -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 6) # Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 7) # Distributed under the Boost Software License, Version 1.0. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 8) # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 9) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 10) # Implements project representation and loading. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 11) # Each project is represented by -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 12) # - a module where all the Jamfile content live. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 13) # - an instance of 'project-attributes' class. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 14) # (given module name, can be obtained by 'attributes' rule) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 15) # - an instance of 'project-target' class (from targets.jam) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 16) # (given a module name, can be obtained by 'target' rule) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 17) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 18) # Typically, projects are created as result of loading Jamfile, which is -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 19) # do by rules 'load' and 'initialize', below. First, module for Jamfile -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 20) # is loaded and new project-attributes instance is created. Some rules -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 21) # necessary for project are added to the module (see 'project-rules' module) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 22) # at the bottom of this file. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 23) # Default project attributes are set (inheriting attributes of parent project, if -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 24) # it exists). After that, Jamfile is read. It can declare its own attributes, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 25) # via 'project' rule, which will be combined with already set attributes. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 26) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 27) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 28) # The 'project' rule can also declare project id, which will be associated with -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 29) # the project module. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 30) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 31) # There can also be 'standalone' projects. They are created by calling 'initialize' -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 32) # on arbitrary module, and not specifying location. After the call, the module can -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 33) # call 'project' rule, declare main target and behave as regular projects. However, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 34) # since it's not associated with any location, it's better declare only prebuilt -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 35) # targets. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 36) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 37) # The list of all loaded Jamfile is stored in variable .project-locations. It's possible -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 38) # to obtain module name for a location using 'module-name' rule. The standalone projects -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 39) # are not recorded, the only way to use them is by project id. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 40) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 41) import b2.util.path -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 42) from b2.build import property_set, property -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 43) from b2.build.errors import ExceptionWithUserContext -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 44) import b2.build.targets -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 45) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 46) import bjam -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 47) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 48) import re -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 49) import sys -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 50) import os -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 51) import string -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 52) import imp -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 53) import traceback -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 54) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 55) class ProjectRegistry: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 56) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 57) def __init__(self, manager, global_build_dir): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 58) self.manager = manager -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 59) self.global_build_dir = None -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 60) self.project_rules_ = ProjectRules(self) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 61) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 62) # The target corresponding to the project being loaded now -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 63) self.current_project = None -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 64) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 65) # The set of names of loaded project modules -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 66) self.jamfile_modules = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 67) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 68) # Mapping from location to module name -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 69) self.location2module = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 70) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 71) # Mapping from project id to project module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 72) self.id2module = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 73) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 74) # Map from Jamfile directory to parent Jamfile/Jamroot -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 75) # location. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 76) self.dir2parent_jamfile = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 77) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 78) # Map from directory to the name of Jamfile in -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 79) # that directory (or None). -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 80) self.dir2jamfile = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 81) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 82) # Map from project module to attributes object. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 83) self.module2attributes = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 84) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 85) # Map from project module to target for the project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 86) self.module2target = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 87) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 88) # Map from names to Python modules, for modules loaded -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 89) # via 'using' and 'import' rules in Jamfiles. -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 90) self.loaded_tool_modules_ = {} -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 91) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 92) # Map from project target to the list of -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 93) # (id,location) pairs corresponding to all 'use-project' -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 94) # invocations. -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 95) # TODO: should not have a global map, keep this -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 96) # in ProjectTarget. -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 97) self.used_projects = {} -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 98) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 99) self.saved_current_project = [] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 100) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 101) self.JAMROOT = self.manager.getenv("JAMROOT"); -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 102) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 103) # Note the use of character groups, as opposed to listing -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 104) # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 105) # matches on windows and would have to eliminate duplicates. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 106) if not self.JAMROOT: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 107) self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 108) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 109) # Default patterns to search for the Jamfiles to use for build -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 110) # declarations. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 111) self.JAMFILE = self.manager.getenv("JAMFILE") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 112) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 113) if not self.JAMFILE: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 114) self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile", -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 115) "[Jj]amfile.jam"] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 116) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 117) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 118) def load (self, jamfile_location): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 119) """Loads jamfile at the given location. After loading, project global -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 120) file and jamfile needed by the loaded one will be loaded recursively. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 121) If the jamfile at that location is loaded already, does nothing. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 122) Returns the project module for the Jamfile.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 123) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 124) absolute = os.path.join(os.getcwd(), jamfile_location) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 125) absolute = os.path.normpath(absolute) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 126) jamfile_location = b2.util.path.relpath(os.getcwd(), absolute) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 127) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 128) if "--debug-loading" in self.manager.argv(): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 129) print "Loading Jamfile at '%s'" % jamfile_location -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 130) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 131) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 132) mname = self.module_name(jamfile_location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 133) # If Jamfile is already loaded, don't try again. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 134) if not mname in self.jamfile_modules: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 135) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 136) self.load_jamfile(jamfile_location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 137) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 138) # We want to make sure that child project are loaded only -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 139) # after parent projects. In particular, because parent projects -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 140) # define attributes whch are inherited by children, and we don't -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 141) # want children to be loaded before parents has defined everything. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 142) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 143) # While "build-project" and "use-project" can potentially refer -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 144) # to child projects from parent projects, we don't immediately -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 145) # load child projects when seing those attributes. Instead, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 146) # we record the minimal information that will be used only later. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 147) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 148) self.load_used_projects(mname) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 149) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 150) return mname -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 151) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 152) def load_used_projects(self, module_name): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 153) # local used = [ modules.peek $(module-name) : .used-projects ] ; -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 154) used = self.used_projects[module_name] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 155) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 156) location = self.attribute(module_name, "location") -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 157) for u in used: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 158) id = u[0] -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 159) where = u[1] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 160) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 161) self.use(id, os.path.join(location, where)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 162) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 163) def load_parent(self, location): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 164) """Loads parent of Jamfile at 'location'. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 165) Issues an error if nothing is found.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 166) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 167) found = b2.util.path.glob_in_parents( -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 168) location, self.JAMROOT + self.JAMFILE) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 169) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 170) if not found: -1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 171) print "error: Could not find parent for project at '%s'" % location -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 172) print "error: Did not find Jamfile or project-root.jam in any parent directory." -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 173) sys.exit(1) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 174) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 175) return self.load(os.path.dirname(found[0])) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 176) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 177) def act_as_jamfile(self, module, location): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 178) """Makes the specified 'module' act as if it were a regularly loaded Jamfile -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 179) at 'location'. If Jamfile is already located for that location, it's an -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 180) error.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 181) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 182) if self.module_name(location) in self.jamfile_modules: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 183) self.manager.errors()( -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 184) "Jamfile was already loaded for '%s'" % location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 185) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 186) # Set up non-default mapping from location to module. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 187) self.location2module[location] = module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 188) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 189) # Add the location to the list of project locations -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 190) # so that we don't try to load Jamfile in future -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 191) self.jamfile_modules.append(location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 192) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 193) self.initialize(module, location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 194) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 195) def find(self, name, current_location): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 196) """Given 'name' which can be project-id or plain directory name, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 197) return project module corresponding to that id or directory. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 198) Returns nothing of project is not found.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 199) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 200) project_module = None -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 201) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 202) # Try interpreting name as project id. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 203) if name[0] == '/': -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 204) project_module = self.id2module.get(name) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 205) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 206) if not project_module: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 207) location = os.path.join(current_location, name) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 208) # If no project is registered for the given location, try to -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 209) # load it. First see if we have Jamfile. If not we might have project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 210) # root, willing to act as Jamfile. In that case, project-root -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 211) # must be placed in the directory referred by id. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 212) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 213) project_module = self.module_name(location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 214) if not project_module in self.jamfile_modules and \ -49c03622 (jhunold 2008-07-23 09:57:41 +0000 215) b2.util.path.glob([location], self.JAMROOT + self.JAMFILE): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 216) project_module = self.load(location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 217) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 218) return project_module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 219) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 220) def module_name(self, jamfile_location): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 221) """Returns the name of module corresponding to 'jamfile-location'. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 222) If no module corresponds to location yet, associates default -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 223) module name with that location.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 224) module = self.location2module.get(jamfile_location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 225) if not module: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 226) # Root the path, so that locations are always umbiguious. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 227) # Without this, we can't decide if '../../exe/program1' and '.' -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 228) # are the same paths, or not. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 229) jamfile_location = os.path.realpath( -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 230) os.path.join(os.getcwd(), jamfile_location)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 231) module = "Jamfile<%s>" % jamfile_location -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 232) self.location2module[jamfile_location] = module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 233) return module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 234) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 235) def find_jamfile (self, dir, parent_root=0, no_errors=0): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 236) """Find the Jamfile at the given location. This returns the -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 237) exact names of all the Jamfiles in the given directory. The optional -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 238) parent-root argument causes this to search not the given directory -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 239) but the ones above it up to the directory given in it.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 240) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 241) # Glob for all the possible Jamfiles according to the match pattern. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 242) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 243) jamfile_glob = None -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 244) if parent_root: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 245) parent = self.dir2parent_jamfile.get(dir) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 246) if not parent: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 247) parent = b2.util.path.glob_in_parents(dir, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 248) self.JAMFILE) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 249) self.dir2parent_jamfile[dir] = parent -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 250) jamfile_glob = parent -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 251) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 252) jamfile = self.dir2jamfile.get(dir) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 253) if not jamfile: -49c03622 (jhunold 2008-07-23 09:57:41 +0000 254) jamfile = b2.util.path.glob([dir], self.JAMFILE) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 255) self.dir2jamfile[dir] = jamfile -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 256) jamfile_glob = jamfile -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 257) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 258) if len(jamfile_glob): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 259) # Multiple Jamfiles found in the same place. Warn about this. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 260) # And ensure we use only one of them. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 261) # As a temporary convenience measure, if there's Jamfile.v2 amount -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 262) # found files, suppress the warning and use it. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 263) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 264) pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 265) v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 266) if len(v2_jamfiles) == 1: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 267) jamfile_glob = v2_jamfiles -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 268) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 269) print """warning: Found multiple Jamfiles at '%s'! -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 270) Loading the first one: '%s'.""" % (dir, jamfile_glob[0]) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 271) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 272) # Could not find it, error. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 273) if not no_errors and not jamfile_glob: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 274) self.manager.errors()( -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 275) """Unable to load Jamfile. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 276) Could not find a Jamfile in directory '%s' -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 277) Attempted to find it with pattern '%s'. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 278) Please consult the documentation at 'http://boost.org/b2.'.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 279) % (dir, string.join(self.JAMFILE))) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 280) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 281) return jamfile_glob[0] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 282) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 283) def load_jamfile(self, dir): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 284) """Load a Jamfile at the given directory. Returns nothing. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 285) Will attempt to load the file as indicated by the JAMFILE patterns. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 286) Effect of calling this rule twice with the same 'dir' is underfined.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 287) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 288) # See if the Jamfile is where it should be. -49c03622 (jhunold 2008-07-23 09:57:41 +0000 289) jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 290) if not jamfile_to_load: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 291) jamfile_to_load = self.find_jamfile(dir) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 292) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 293) jamfile_to_load = jamfile_to_load[0] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 294) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 295) # The module of the jamfile. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 296) dir = os.path.realpath(os.path.dirname(jamfile_to_load)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 297) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 298) jamfile_module = self.module_name (dir) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 299) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 300) # Initialize the jamfile module before loading. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 301) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 302) self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 303) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 304) saved_project = self.current_project -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 305) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 306) self.used_projects[jamfile_module] = [] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 307) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 308) # Now load the Jamfile in it's own context. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 309) # Initialization might have load parent Jamfiles, which might have -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 310) # loaded the current Jamfile with use-project. Do a final check to make -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 311) # sure it's not loaded already. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 312) if not jamfile_module in self.jamfile_modules: -49c03622 (jhunold 2008-07-23 09:57:41 +0000 313) self.jamfile_modules[jamfile_module] = True -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 314) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 315) # FIXME: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 316) # mark-as-user $(jamfile-module) ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 317) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 318) bjam.call("load", jamfile_module, jamfile_to_load) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 319) basename = os.path.basename(jamfile_to_load) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 320) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 321) # Now do some checks -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 322) if self.current_project != saved_project: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 323) self.manager.errors()( -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 324) """The value of the .current-project variable -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 325) has magically changed after loading a Jamfile. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 326) This means some of the targets might be defined a the wrong project. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 327) after loading %s -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 328) expected value %s -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 329) actual value %s""" % (jamfile_module, saved_project, self.current_project)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 330) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 331) if self.global_build_dir: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 332) id = self.attribute(jamfile_module, "id") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 333) project_root = self.attribute(jamfile_module, "project-root") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 334) location = self.attribute(jamfile_module, "location") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 335) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 336) if location and project_root == dir: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 337) # This is Jamroot -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 338) if not id: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 339) # FIXME: go via errors module, so that contexts are -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 340) # shown? -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 341) print "warning: the --build-dir option was specified" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 342) print "warning: but Jamroot at '%s'" % dir -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 343) print "warning: specified no project id" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 344) print "warning: the --build-dir option will be ignored" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 345) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 346) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 347) def load_standalone(self, jamfile_module, file): -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 348) """Loads 'file' as standalone project that has no location -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 349) associated with it. This is mostly useful for user-config.jam, -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 350) which should be able to define targets, but although it has -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 351) some location in filesystem, we don't want any build to -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 352) happen in user's HOME, for example. -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 353) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 354) The caller is required to never call this method twice on -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 355) the same file. -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 356) """ -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 357) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 358) self.initialize(jamfile_module) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 359) self.used_projects[jamfile_module] = [] -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 360) bjam.call("load", jamfile_module, file) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 361) self.load_used_projects(jamfile_module) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 362) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 363) def is_jamroot(self, basename): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 364) match = [ pat for pat in self.JAMROOT if re.match(pat, basename)] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 365) if match: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 366) return 1 -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 367) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 368) return 0 -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 369) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 370) def initialize(self, module_name, location=None, basename=None): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 371) """Initialize the module for a project. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 372) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 373) module-name is the name of the project module. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 374) location is the location (directory) of the project to initialize. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 375) If not specified, stanalone project will be initialized -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 376) """ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 377) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 378) if "--debug-loading" in self.manager.argv(): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 379) print "Initializing project '%s'" % module_name -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 380) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 381) # TODO: need to consider if standalone projects can do anything but defining -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 382) # prebuilt targets. If so, we need to give more sensible "location", so that -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 383) # source paths are correct. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 384) if not location: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 385) location = "" -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 386) else: -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 387) location = b2.util.path.relpath(os.getcwd(), location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 388) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 389) attributes = ProjectAttributes(self.manager, location, module_name) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 390) self.module2attributes[module_name] = attributes -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 391) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 392) if location: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 393) attributes.set("source-location", location, exact=1) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 394) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 395) attributes.set("source-location", "", exact=1) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 396) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 397) attributes.set("requirements", property_set.empty(), exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 398) attributes.set("usage-requirements", property_set.empty(), exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 399) attributes.set("default-build", [], exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 400) attributes.set("projects-to-build", [], exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 401) attributes.set("project-root", None, exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 402) attributes.set("build-dir", None, exact=True) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 403) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 404) self.project_rules_.init_project(module_name) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 405) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 406) jamroot = False -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 407) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 408) parent_module = None; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 409) if module_name == "site-config": -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 410) # No parent -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 411) pass -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 412) elif module_name == "user-config": -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 413) parent_module = "site-config" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 414) elif location and not self.is_jamroot(basename): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 415) # We search for parent/project-root only if jamfile was specified -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 416) # --- i.e -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 417) # if the project is not standalone. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 418) parent_module = self.load_parent(location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 419) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 420) # It's either jamroot, or standalone project. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 421) # If it's jamroot, inherit from user-config. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 422) if location: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 423) parent_module = "user-config" ; -49c03622 (jhunold 2008-07-23 09:57:41 +0000 424) jamroot = True ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 425) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 426) if parent_module: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 427) self.inherit_attributes(module_name, parent_module) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 428) attributes.set("parent-module", parent_module, exact=1) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 429) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 430) if jamroot: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 431) attributes.set("project-root", location, exact=1) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 432) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 433) parent = None -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 434) if parent_module: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 435) parent = self.target(parent_module) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 436) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 437) if not self.module2target.has_key(module_name): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 438) target = b2.build.targets.ProjectTarget(self.manager, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 439) module_name, module_name, parent, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 440) self.attribute(module_name,"requirements"), -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 441) # FIXME: why we need to pass this? It's not -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 442) # passed in jam code. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 443) self.attribute(module_name, "default-build")) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 444) self.module2target[module_name] = target -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 445) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 446) self.current_project = self.target(module_name) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 447) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 448) def inherit_attributes(self, project_module, parent_module): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 449) """Make 'project-module' inherit attributes of project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 450) root and parent module.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 451) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 452) attributes = self.module2attributes[project_module] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 453) pattributes = self.module2attributes[parent_module] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 454) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 455) # Parent module might be locationless user-config. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 456) # FIXME: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 457) #if [ modules.binding $(parent-module) ] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 458) #{ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 459) # $(attributes).set parent : [ path.parent -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 460) # [ path.make [ modules.binding $(parent-module) ] ] ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 461) # } -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 462) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 463) attributes.set("project-root", pattributes.get("project-root"), exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 464) attributes.set("default-build", pattributes.get("default-build"), exact=True) -49c03622 (jhunold 2008-07-23 09:57:41 +0000 465) attributes.set("requirements", pattributes.get("requirements"), exact=True) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 466) attributes.set("usage-requirements", -cde6f09a (vladimir_prus 2007-10-19 23:12:33 +0000 467) pattributes.get("usage-requirements"), exact=1) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 468) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 469) parent_build_dir = pattributes.get("build-dir") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 470) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 471) if parent_build_dir: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 472) # Have to compute relative path from parent dir to our dir -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 473) # Convert both paths to absolute, since we cannot -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 474) # find relative path from ".." to "." -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 475) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 476) location = attributes.get("location") -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 477) parent_location = pattributes.get("location") -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 478) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 479) our_dir = os.path.join(os.getcwd(), location) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 480) parent_dir = os.path.join(os.getcwd(), parent_location) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 481) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 482) build_dir = os.path.join(parent_build_dir, -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 483) b2.util.path.relpath(parent_dir, -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 484) our_dir)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 485) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 486) def register_id(self, id, module): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 487) """Associate the given id with the given project module.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 488) self.id2module[id] = module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 489) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 490) def current(self): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 491) """Returns the project which is currently being loaded.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 492) return self.current_project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 493) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 494) def push_current(self, project): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 495) """Temporary changes the current project to 'project'. Should -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 496) be followed by 'pop-current'.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 497) self.saved_current_project.append(self.current_project) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 498) self.current_project = project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 499) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 500) def pop_current(self): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 501) self.current_project = self.saved_current_project[-1] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 502) del self.saved_current_project[-1] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 503) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 504) def attributes(self, project): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 505) """Returns the project-attribute instance for the -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 506) specified jamfile module.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 507) return self.module2attributes[project] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 508) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 509) def attribute(self, project, attribute): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 510) """Returns the value of the specified attribute in the -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 511) specified jamfile module.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 512) return self.module2attributes[project].get(attribute) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 513) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 514) def target(self, project_module): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 515) """Returns the project target corresponding to the 'project-module'.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 516) if not self.module2target[project_module]: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 517) self.module2target[project_module] = \ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 518) ProjectTarget(project_module, project_module, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 519) self.attribute(project_module, "requirements")) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 520) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 521) return self.module2target[project_module] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 522) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 523) def use(self, id, location): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 524) # Use/load a project. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 525) saved_project = self.current_project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 526) project_module = self.load(location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 527) declared_id = self.attribute(project_module, "id") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 528) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 529) if not declared_id or declared_id != id: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 530) # The project at 'location' either have no id or -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 531) # that id is not equal to the 'id' parameter. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 532) if self.id2module[id] and self.id2module[id] != project_module: -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 533) self.manager.errors()( -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 534) """Attempt to redeclare already existing project id '%s'""" % id) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 535) self.id2module[id] = project_module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 536) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 537) self.current_module = saved_project -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 538) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 539) def add_rule(self, name, callable): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 540) """Makes rule 'name' available to all subsequently loaded Jamfiles. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 541) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 542) Calling that rule wil relay to 'callable'.""" -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 543) self.project_rules_.add_rule(name, callable) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 544) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 545) def project_rules(self): -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 546) return self.project_rules_ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 547) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 548) def glob_internal(self, project, wildcards, excludes, rule_name): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 549) location = project.get("source-location") -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 550) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 551) result = [] -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 552) callable = b2.util.path.__dict__[rule_name] -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 553) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 554) paths = callable(location, wildcards, excludes) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 555) has_dir = 0 -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 556) for w in wildcards: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 557) if os.path.dirname(w): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 558) has_dir = 1 -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 559) break -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 560) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 561) if has_dir or rule_name != "glob": -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 562) # The paths we've found are relative to current directory, -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 563) # but the names specified in sources list are assumed to -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 564) # be relative to source directory of the corresponding -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 565) # prject. So, just make the name absolute. -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 566) result = [os.path.join(os.getcwd(), p) for p in paths] -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 567) else: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 568) # There were not directory in wildcard, so the files are all -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 569) # in the source directory of the project. Just drop the -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 570) # directory, instead of making paths absolute. -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 571) result = [os.path.basename(p) for p in paths] -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 572) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 573) return result -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 574) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 575) def load_module(self, name, extra_path=None): -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 576) """Classic Boost.Build 'modules' are in fact global variables. -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 577) Therefore, try to find an already loaded Python module called 'name' in sys.modules. -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 578) If the module ist not loaded, find it Boost.Build search -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 579) path and load it. The new module is not entered in sys.modules. -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 580) The motivation here is to have disjoint namespace of modules -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 581) loaded via 'import/using' in Jamfile, and ordinary Python -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 582) modules. We don't want 'using foo' in Jamfile to load ordinary -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 583) Python module 'foo' which is going to not work. And we -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 584) also don't want 'import foo' in regular Python module to -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 585) accidentally grab module named foo that is internal to -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 586) Boost.Build and intended to provide interface to Jamfiles.""" -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 587) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 588) existing = self.loaded_tool_modules_.get(name) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 589) if existing: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 590) return existing -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 591) -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 592) modules = sys.modules -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 593) for class_name in modules: -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 594) if name in class_name: -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 595) module = modules[class_name] -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 596) self.loaded_tool_modules_[name] = module -53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 597) return module -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 598) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 599) path = extra_path -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 600) if not path: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 601) path = [] -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 602) path.extend(self.manager.b2.path()) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 603) location = None -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 604) for p in path: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 605) l = os.path.join(p, name + ".py") -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 606) if os.path.exists(l): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 607) location = l -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 608) break -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 609) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 610) if not location: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 611) self.manager.errors()("Cannot find module '%s'" % name) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 612) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 613) mname = "__build_build_temporary__" -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 614) file = open(location) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 615) try: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 616) # TODO: this means we'll never make use of .pyc module, -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 617) # which might be a problem, or not. -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 618) module = imp.load_module(mname, file, os.path.basename(location), -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 619) (".py", "r", imp.PY_SOURCE)) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 620) del sys.modules[mname] -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 621) self.loaded_tool_modules_[name] = module -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 622) return module -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 623) finally: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 624) file.close() -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 625) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 626) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 627) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 628) # FIXME: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 629) # Defines a Boost.Build extension project. Such extensions usually -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 630) # contain library targets and features that can be used by many people. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 631) # Even though extensions are really projects, they can be initialize as -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 632) # a module would be with the "using" (project.project-rules.using) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 633) # mechanism. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 634) #rule extension ( id : options * : * ) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 635) #{ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 636) # # The caller is a standalone module for the extension. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 637) # local mod = [ CALLER_MODULE ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 638) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 639) # # We need to do the rest within the extension module. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 640) # module $(mod) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 641) # { -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 642) # import path ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 643) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 644) # # Find the root project. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 645) # local root-project = [ project.current ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 646) # root-project = [ $(root-project).project-module ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 647) # while -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 648) # [ project.attribute $(root-project) parent-module ] && -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 649) # [ project.attribute $(root-project) parent-module ] != user-config -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 650) # { -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 651) # root-project = [ project.attribute $(root-project) parent-module ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 652) # } -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 653) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 654) # # Create the project data, and bring in the project rules -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 655) # # into the module. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 656) # project.initialize $(__name__) : -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 657) # [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 658) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 659) # # Create the project itself, i.e. the attributes. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 660) # # All extensions are created in the "/ext" project space. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 661) # project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 662) # local attributes = [ project.attributes $(__name__) ] ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 663) # -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 664) # # Inherit from the root project of whomever is defining us. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 665) # project.inherit-attributes $(__name__) : $(root-project) ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 666) # $(attributes).set parent-module : $(root-project) : exact ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 667) # } -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 668) #} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 669) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 670) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 671) class ProjectAttributes: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 672) """Class keeping all the attributes of a project. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 673) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 674) The standard attributes are 'id', "location", "project-root", "parent" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 675) "requirements", "default-build", "source-location" and "projects-to-build". -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 676) """ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 677) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 678) def __init__(self, manager, location, project_module): -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 679) self.manager = manager -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 680) self.location = location -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 681) self.project_module = project_module -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 682) self.attributes = {} -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 683) self.usage_requirements = None -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 684) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 685) def set(self, attribute, specification, exact): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 686) """Set the named attribute from the specification given by the user. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 687) The value actually set may be different.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 688) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 689) if exact: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 690) self.__dict__[attribute] = specification -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 691) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 692) elif attribute == "requirements": -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 693) self.requirements = property_set.refine_from_user_input( -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 694) self.requirements, specification, -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 695) self.project_module, self.location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 696) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 697) elif attribute == "usage-requirements": -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 698) unconditional = [] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 699) for p in specification: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 700) split = property.split_conditional(p) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 701) if split: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 702) unconditional.append(split[1]) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 703) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 704) unconditional.append(p) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 705) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 706) non_free = property.remove("free", unconditional) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 707) if non_free: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 708) pass -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 709) # FIXME: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 710) #errors.error "usage-requirements" $(specification) "have non-free properties" $(non-free) ; -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 711) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 712) t = property.translate_paths(specification, self.location) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 713) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 714) existing = self.__dict__.get("usage-requirements") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 715) if existing: -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 716) new = property_set.create(existing.raw() + t) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 717) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 718) new = property_set.create(t) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 719) self.__dict__["usage-requirements"] = new -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 720) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 721) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 722) elif attribute == "default-build": -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 723) self.__dict__["default-build"] = property_set.create(specification) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 724) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 725) elif attribute == "source-location": -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 726) source_location = [] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 727) for path in specification: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 728) source_location += os.path.join(self.location, path) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 729) self.__dict__["source-location"] = source_location -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 730) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 731) elif attribute == "build-dir": -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 732) self.__dict__["build-dir"] = os.path.join(self.location, specification) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 733) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 734) elif not attribute in ["id", "default-build", "location", -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 735) "source-location", "parent", -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 736) "projects-to-build", "project-root"]: -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 737) self.manager.errors()( -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 738) """Invalid project attribute '%s' specified -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 739) for project at '%s'""" % (attribute, self.location)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 740) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 741) self.__dict__[attribute] = specification -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 742) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 743) def get(self, attribute): -cde6f09a (vladimir_prus 2007-10-19 23:12:33 +0000 744) return self.__dict__[attribute] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 745) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 746) def dump(self): -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 747) """Prints the project attributes.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 748) id = self.get("id") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 749) if not id: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 750) id = "(none)" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 751) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 752) id = id[0] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 753) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 754) parent = self.get("parent") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 755) if not parent: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 756) parent = "(none)" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 757) else: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 758) parent = parent[0] -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 759) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 760) print "'%s'" % id -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 761) print "Parent project:%s", parent -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 762) print "Requirements:%s", self.get("requirements") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 763) print "Default build:%s", string.join(self.get("debuild-build")) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 764) print "Source location:%s", string.join(self.get("source-location")) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 765) print "Projects to build:%s", string.join(self.get("projects-to-build").sort()); -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 766) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 767) class ProjectRules: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 768) """Class keeping all rules that are made available to Jamfile.""" -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 769) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 770) def __init__(self, registry): -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 771) self.registry = registry -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 772) self.manager_ = registry.manager -38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 773) self.rules = {} -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 774) self.local_names = [x for x in self.__class__.__dict__ -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 775) if x not in ["__init__", "init_project", "add_rule", -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 776) "error_reporting_wrapper", "add_rule_for_type"]] -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 777) self.all_names_ = [x for x in self.local_names] -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 778) -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 779) def add_rule_for_type(self, type): -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 780) rule_name = type.lower(); -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 781) -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 782) def xpto (name, sources, requirements = [], default_build = None, usage_requirements = []): -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 783) return self.manager_.targets().create_typed_target( -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 784) type, self.registry.current(), name[0], sources, -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 785) requirements, default_build, usage_requirements) -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 786) -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 787) self.add_rule(type.lower(), xpto) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 788) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 789) def add_rule(self, name, callable): -38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 790) self.rules[name] = callable -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 791) self.all_names_.append(name) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 792) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 793) def all_names(self): -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 794) return self.all_names_ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 795) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 796) def call_and_report_errors(self, callable, *args): -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 797) result = None -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 798) try: -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 799) self.manager_.errors().push_jamfile_context() -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 800) result = callable(*args) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 801) except ExceptionWithUserContext, e: -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 802) e.report() -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 803) except Exception, e: -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 804) try: -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 805) self.manager_.errors().handle_stray_exception (e) -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 806) except ExceptionWithUserContext, e: -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 807) e.report() -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 808) finally: -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 809) self.manager_.errors().pop_jamfile_context() -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 810) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 811) return result -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 812) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 813) def make_wrapper(self, callable): -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 814) """Given a free-standing function 'callable', return a new -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 815) callable that will call 'callable' and report all exceptins, -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 816) using 'call_and_report_errors'.""" -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 817) def wrapper(*args): -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 818) self.call_and_report_errors(callable, *args) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 819) return wrapper -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 820) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 821) def init_project(self, project_module): -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 822) -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 823) for n in self.local_names: -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 824) # Using 'getattr' here gives us a bound method, -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 825) # while using self.__dict__[r] would give unbound one. -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 826) v = getattr(self, n) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 827) if callable(v): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 828) if n == "import_": -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 829) n = "import" -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 830) else: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 831) n = string.replace(n, "_", "-") -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 832) -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 833) bjam.import_rule(project_module, n, -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 834) self.make_wrapper(v)) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 835) -38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 836) for n in self.rules: -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 837) bjam.import_rule(project_module, n, -0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 838) self.make_wrapper(self.rules[n])) -38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 839) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 840) def project(self, *args): -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 841) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 842) jamfile_module = self.registry.current().project_module() -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 843) attributes = self.registry.attributes(jamfile_module) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 844) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 845) id = None -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 846) if args and args[0]: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 847) id = args[0][0] -092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 848) args = args[1:] -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 849) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 850) if id: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 851) if id[0] != '/': -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 852) id = '/' + id -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 853) self.registry.register_id (id, jamfile_module) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 854) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 855) explicit_build_dir = None -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 856) for a in args: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 857) if a: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 858) attributes.set(a[0], a[1:], exact=0) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 859) if a[0] == "build-dir": -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 860) explicit_build_dir = a[1] -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 861) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 862) # If '--build-dir' is specified, change the build dir for the project. -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 863) if self.registry.global_build_dir: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 864) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 865) location = attributes.get("location") -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 866) # Project with empty location is 'standalone' project, like -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 867) # user-config, or qt. It has no build dir. -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 868) # If we try to set build dir for user-config, we'll then -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 869) # try to inherit it, with either weird, or wrong consequences. -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 870) if location and location == attributes.get("project-root"): -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 871) # This is Jamroot. -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 872) if id: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 873) if explicit_build_dir and os.path.isabs(explicit_build_dir): -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 874) self.register.manager.errors()( -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 875) """Absolute directory specified via 'build-dir' project attribute -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 876) Don't know how to combine that with the --build-dir option.""") -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 877) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 878) rid = id -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 879) if rid[0] == '/': -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 880) rid = rid[1:] -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 881) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 882) p = os.path.join(self.registry.global_build_dir, -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 883) rid, explicit_build_dir) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 884) attributes.set("build-dir", p, exact=1) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 885) elif explicit_build_dir: -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 886) self.registry.manager.errors()( -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 887) """When --build-dir is specified, the 'build-project' -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 888) attribute is allowed only for top-level 'project' invocations""") -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 889) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 890) def constant(self, name, value): -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 891) """Declare and set a project global constant. -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 892) Project global constants are normal variables but should -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 893) not be changed. They are applied to every child Jamfile.""" -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 894) m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>" -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 895) self.registry.current().add_constant(name[0], value) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 896) -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 897) def path_constant(self, name, value): -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 898) """Declare and set a project global constant, whose value is a path. The -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 899) path is adjusted to be relative to the invocation directory. The given -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 900) value path is taken to be either absolute, or relative to this project -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 901) root.""" -0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 902) self.registry.current().add_constant(name[0], value, path=1) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 903) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 904) def use_project(self, id, where): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 905) # See comment in 'load' for explanation why we record the -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 906) # parameters as opposed to loading the project now. -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 907) m = self.registry.current().project_module(); -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 908) self.registry.used_projects[m].append((id, where)) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 909) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 910) def build_project(self, dir): -1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 911) assert(isinstance(dir, list)) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 912) jamfile_module = self.registry.current().project_module() -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 913) attributes = self.registry.attributes(jamfile_module) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 914) now = attributes.get("projects-to-build") -1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 915) attributes.set("projects-to-build", now + dir, exact=True) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 916) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 917) def explicit(self, target_names): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 918) t = self.registry.current() -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 919) for n in target_names: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 920) t.mark_target_as_explicit(n) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 921) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 922) def glob(self, wildcards, excludes=None): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 923) return self.registry.glob_internal(self.registry.current(), -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 924) wildcards, excludes, "glob") -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 925) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 926) def glob_tree(self, wildcards, excludes=None): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 927) bad = 0 -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 928) for p in wildcards: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 929) if os.path.dirname(p): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 930) bad = 1 -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 931) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 932) if excludes: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 933) for p in excludes: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 934) if os.path.dirname(p): -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 935) bad = 1 -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 936) -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 937) if bad: -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 938) self.registry.manager().errors()( -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 939) "The patterns to 'glob-tree' may not include directory") -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 940) return self.registry.glob_internal(self.registry.current(), -2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 941) wildcards, excludes, "glob_tree") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 942) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 943) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 944) def using(self, toolset, *args): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 945) # The module referred by 'using' can be placed in -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 946) # the same directory as Jamfile, and the user -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 947) # will expect the module to be found even though -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 948) # the directory is not in BOOST_BUILD_PATH. -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 949) # So temporary change the search path. -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 950) jamfile_module = self.registry.current().project_module() -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 951) attributes = self.registry.attributes(jamfile_module) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 952) location = attributes.get("location") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 953) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 954) m = self.registry.load_module(toolset[0], [location]) -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 955) if not m.__dict__.has_key("init"): -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 956) self.registry.manager.errors()( -7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 957) "Tool module '%s' does not define the 'init' method" % toolset[0]) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 958) m.init(*args) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 959) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 960) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 961) def import_(self, name, names_to_import=None, local_names=None): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 962) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 963) name = name[0] -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 964) jamfile_module = self.registry.current().project_module() -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 965) attributes = self.registry.attributes(jamfile_module) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 966) location = attributes.get("location") -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 967) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 968) m = self.registry.load_module(name, [location]) -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 969) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 970) for f in m.__dict__: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 971) v = m.__dict__[f] -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 972) if callable(v): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 973) bjam.import_rule(jamfile_module, name + "." + f, v) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 974) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 975) if names_to_import: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 976) if not local_names: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 977) local_names = names_to_import -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 978) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 979) if len(names_to_import) != len(local_names): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 980) self.registry.manager.errors()( -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 981) """The number of names to import and local names do not match.""") -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 982) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 983) for n, l in zip(names_to_import, local_names): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 984) bjam.import_rule(jamfile_module, l, m.__dict__[n]) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 985) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 986) def conditional(self, condition, requirements): -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 987) """Calculates conditional requirements for multiple requirements -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 988) at once. This is a shorthand to be reduce duplication and to -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 989) keep an inline declarative syntax. For example: -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 990) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 991) lib x : x.cpp : [ conditional <toolset>gcc <variant>debug : -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 992) <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ; -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 993) """ -f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 994) -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 995) c = string.join(condition, ",") -f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 996) return [c + ":" + r for r in requirements] diff --git a/jam-files/boost-build/build/project.jam b/jam-files/boost-build/build/project.jam deleted file mode 100644 index c9967613..00000000 --- a/jam-files/boost-build/build/project.jam +++ /dev/null @@ -1,1110 +0,0 @@ -# Copyright 2002, 2003 Dave Abrahams -# Copyright 2002, 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Implements project representation and loading. Each project is represented by: -# - a module where all the Jamfile content live. -# - an instance of 'project-attributes' class. -# (given a module name, can be obtained using the 'attributes' rule) -# - an instance of 'project-target' class (from targets.jam) -# (given a module name, can be obtained using the 'target' rule) -# -# Typically, projects are created as result of loading a Jamfile, which is done -# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded -# and new project-attributes instance is created. Some rules necessary for -# project are added to the module (see 'project-rules' module) at the bottom of -# this file. Default project attributes are set (inheriting attributes of parent -# project, if it exists). After that the Jamfile is read. It can declare its own -# attributes using the 'project' rule which will be combined with any already -# set attributes. -# -# The 'project' rule can also declare a project id which will be associated with -# the project module. -# -# There can also be 'standalone' projects. They are created by calling -# 'initialize' on an arbitrary module and not specifying their location. After -# the call, the module can call the 'project' rule, declare main targets and -# behave as a regular project except that, since it is not associated with any -# location, it should not declare targets that are not prebuilt. -# -# The list of all loaded Jamfile is stored in the .project-locations variable. -# It is possible to obtain a module name for a location using the 'module-name' -# rule. Standalone projects are not recorded and can only be referenced using -# their project id. - -import "class" : new ; -import errors ; -import modules ; -import path ; -import print ; -import property-set ; -import sequence ; - - -# Loads the Jamfile at the given location. After loading, project global file -# and Jamfiles needed by the requested one will be loaded recursively. If the -# Jamfile at that location is loaded already, does nothing. Returns the project -# module for the Jamfile. -# -rule load ( jamfile-location ) -{ - if --debug-loading in [ modules.peek : ARGV ] - { - ECHO "Loading Jamfile at" '$(jamfile-location)' ; - } - - local module-name = [ module-name $(jamfile-location) ] ; - # If Jamfile is already loaded, don't try again. - if ! $(module-name) in $(.jamfile-modules) - { - load-jamfile $(jamfile-location) : $(module-name) ; - - # We want to make sure that child project are loaded only after parent - # projects. In particular, because parent projects define attributes - # which are inherited by children, and we don't want children to be - # loaded before parent has defined everything. - # - # While "build-project" and "use-project" can potentially refer to child - # projects from parent projects, we don't immediately load child - # projects when seeing those attributes. Instead, we record the minimal - # information to be used only later. - load-used-projects $(module-name) ; - } - return $(module-name) ; -} - - -rule load-used-projects ( module-name ) -{ - local used = [ modules.peek $(module-name) : .used-projects ] ; - local location = [ attribute $(module-name) location ] ; - import project ; - while $(used) - { - local id = $(used[1]) ; - local where = $(used[2]) ; - - project.use $(id) : [ path.root [ path.make $(where) ] $(location) ] ; - used = $(used[3-]) ; - } -} - - -# Note the use of character groups, as opposed to listing 'Jamroot' and -# 'jamroot'. With the latter, we would get duplicate matches on Windows and -# would have to eliminate duplicates. -JAMROOT ?= [ modules.peek : JAMROOT ] ; -JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ; - - -# Loads parent of Jamfile at 'location'. Issues an error if nothing is found. -# -rule load-parent ( location ) -{ - local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ; - - if ! $(found) - { - ECHO error: Could not find parent for project at '$(location)' ; - EXIT error: Did not find Jamfile.jam or Jamroot.jam in any parent - directory. ; - } - - return [ load $(found[1]:D) ] ; -} - - -# Makes the specified 'module' act as if it were a regularly loaded Jamfile at -# 'location'. Reports an error if a Jamfile has already been loaded for that -# location. -# -rule act-as-jamfile ( module : location ) -{ - if [ module-name $(location) ] in $(.jamfile-modules) - { - errors.error "Jamfile was already loaded for '$(location)'" ; - } - # Set up non-default mapping from location to module. - .module.$(location) = $(module) ; - - # Add the location to the list of project locations so that we don't try to - # reload the same Jamfile in the future. - .jamfile-modules += [ module-name $(location) ] ; - - initialize $(module) : $(location) ; -} - - -# Returns the project module corresponding to the given project-id or plain -# directory name. Returns nothing if such a project can not be found. -# -rule find ( name : current-location ) -{ - local project-module ; - - # Try interpreting name as project id. - if [ path.is-rooted $(name) ] - { - project-module = $($(name).jamfile-module) ; - } - - if ! $(project-module) - { - local location = [ path.root [ path.make $(name) ] $(current-location) ] - ; - - # If no project is registered for the given location, try to load it. - # First see if we have a Jamfile. If not, then see if we might have a - # project root willing to act as a Jamfile. In that case, project root - # must be placed in the directory referred by id. - - project-module = [ module-name $(location) ] ; - if ! $(project-module) in $(.jamfile-modules) - { - if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ] - { - project-module = [ load $(location) ] ; - } - else - { - project-module = ; - } - } - } - - return $(project-module) ; -} - - -# Returns the name of the module corresponding to 'jamfile-location'. If no -# module corresponds to that location yet, associates the default module name -# with that location. -# -rule module-name ( jamfile-location ) -{ - if ! $(.module.$(jamfile-location)) - { - # Root the path, so that locations are always unambiguous. Without this, - # we can't decide if '../../exe/program1' and '.' are the same paths. - jamfile-location = [ path.root $(jamfile-location) [ path.pwd ] ] ; - .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ; - } - return $(.module.$(jamfile-location)) ; -} - - -# Default patterns to search for the Jamfiles to use for build declarations. -# -JAMFILE = [ modules.peek : JAMFILE ] ; -JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ; - - -# Find the Jamfile at the given location. This returns the exact names of all -# the Jamfiles in the given directory. The optional parent-root argument causes -# this to search not the given directory but the ones above it up to the -# directory given in it. -# -rule find-jamfile ( - dir # The directory(s) to look for a Jamfile. - parent-root ? # Optional flag indicating to search for the parent Jamfile. - : no-errors ? - ) -{ - # Glob for all the possible Jamfiles according to the match pattern. - # - local jamfile-glob = ; - if $(parent-root) - { - if ! $(.parent-jamfile.$(dir)) - { - .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE) - ] ; - } - jamfile-glob = $(.parent-jamfile.$(dir)) ; - } - else - { - if ! $(.jamfile.$(dir)) - { - .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ; - } - jamfile-glob = $(.jamfile.$(dir)) ; - - } - - local jamfile-to-load = $(jamfile-glob) ; - # Multiple Jamfiles found in the same place. Warn about this and ensure we - # use only one of them. As a temporary convenience measure, if there is - # Jamfile.v2 among found files, suppress the warning and use it. - # - if $(jamfile-to-load[2-]) - { - local v2-jamfiles = [ MATCH (.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam) : $(jamfile-to-load) ] ; - - if $(v2-jamfiles) && ! $(v2-jamfiles[2]) - { - jamfile-to-load = $(v2-jamfiles) ; - } - else - { - local jamfile = [ path.basename $(jamfile-to-load[1]) ] ; - ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!" - "Loading the first one: '$(jamfile)'." ; - } - - jamfile-to-load = $(jamfile-to-load[1]) ; - } - - # Could not find it, error. - # - if ! $(no-errors) && ! $(jamfile-to-load) - { - errors.error Unable to load Jamfile. - : Could not find a Jamfile in directory '$(dir)'. - : Attempted to find it with pattern '"$(JAMFILE:J=" ")"'. - : Please consult the documentation at 'http://www.boost.org'. ; - } - - return $(jamfile-to-load) ; -} - - -# Load a Jamfile at the given directory. Returns nothing. Will attempt to load -# the file as indicated by the JAMFILE patterns. Effect of calling this rule -# twice with the same 'dir' is undefined. -# -local rule load-jamfile ( - dir # The directory of the project Jamfile. - : jamfile-module - ) -{ - # See if the Jamfile is where it should be. - # - local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ; - if ! $(jamfile-to-load) - { - jamfile-to-load = [ find-jamfile $(dir) ] ; - } - - if $(jamfile-to-load[2]) - { - errors.error "Multiple Jamfiles found at '$(dir)'" - : "Filenames are: " $(jamfile-to-load:D=) ; - } - - # Now load the Jamfile in it's own context. - # The call to 'initialize' may load parent Jamfile, which might have - # 'use-project' statement that causes a second attempt to load the - # same project we're loading now. Checking inside .jamfile-modules - # prevents that second attempt from messing up. - if ! $(jamfile-module) in $(.jamfile-modules) - { - .jamfile-modules += $(jamfile-module) ; - - # Initialize the Jamfile module before loading. - # - initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ] - : $(jamfile-to-load:BS) ; - - local saved-project = $(.current-project) ; - - mark-as-user $(jamfile-module) ; - modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ; - if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ] - { - jamfile = [ find-jamfile $(dir) : no-errors ] ; - if $(jamfile) - { - load-aux $(jamfile-module) : [ path.native $(jamfile) ] ; - } - } - - # Now do some checks. - if $(.current-project) != $(saved-project) - { - errors.error "The value of the .current-project variable has magically" - : "changed after loading a Jamfile. This means some of the targets" - : "might be defined in the wrong project." - : "after loading" $(jamfile-module) - : "expected value" $(saved-project) - : "actual value" $(.current-project) ; - } - - if $(.global-build-dir) - { - local id = [ attribute $(jamfile-module) id ] ; - local project-root = [ attribute $(jamfile-module) project-root ] ; - local location = [ attribute $(jamfile-module) location ] ; - - if $(location) && $(project-root) = $(dir) - { - # This is Jamroot. - if ! $(id) - { - ECHO "warning: the --build-dir option was specified" ; - ECHO "warning: but Jamroot at '$(dir)'" ; - ECHO "warning: specified no project id" ; - ECHO "warning: the --build-dir option will be ignored" ; - } - } - } - } -} - - -rule mark-as-user ( module-name ) -{ - if USER_MODULE in [ RULENAMES ] - { - USER_MODULE $(module-name) ; - } -} - - -rule load-aux ( module-name : file ) -{ - mark-as-user $(module-name) ; - - module $(module-name) - { - include $(2) ; - local rules = [ RULENAMES $(1) ] ; - IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ; - } -} - - -.global-build-dir = [ MATCH --build-dir=(.*) : [ modules.peek : ARGV ] ] ; -if $(.global-build-dir) -{ - # If the option is specified several times, take the last value. - .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ; -} - - -# Initialize the module for a project. -# -rule initialize ( - module-name # The name of the project module. - : location ? # The location (directory) of the project to initialize. If - # not specified, a standalone project will be initialized. - : basename ? - ) -{ - if --debug-loading in [ modules.peek : ARGV ] - { - ECHO "Initializing project '$(module-name)'" ; - } - - # TODO: need to consider if standalone projects can do anything but define - # prebuilt targets. If so, we need to give it a more sensible "location", so - # that source paths are correct. - location ?= "" ; - # Create the module for the Jamfile first. - module $(module-name) - { - } - $(module-name).attributes = [ new project-attributes $(location) - $(module-name) ] ; - local attributes = $($(module-name).attributes) ; - - if $(location) - { - $(attributes).set source-location : [ path.make $(location) ] : exact ; - } - else if ! $(module-name) in test-config site-config user-config project-config - { - # This is a standalone project with known location. Set source location - # so that it can declare targets. This is intended so that you can put - # a .jam file in your sources and use it via 'using'. Standard modules - # (in 'tools' subdir) may not assume source dir is set. - local s = [ modules.binding $(module-name) ] ; - if ! $(s) - { - errors.error "Could not determine project location $(module-name)" ; - } - $(attributes).set source-location : $(s:D) : exact ; - } - - $(attributes).set requirements : [ property-set.empty ] : exact ; - $(attributes).set usage-requirements : [ property-set.empty ] : exact ; - - # Import rules common to all project modules from project-rules module, - # defined at the end of this file. - local rules = [ RULENAMES project-rules ] ; - IMPORT project-rules : $(rules) : $(module-name) : $(rules) ; - - local jamroot ; - - local parent-module ; - if $(module-name) = test-config - { - # No parent. - } - else if $(module-name) = site-config - { - parent-module = test-config ; - } - else if $(module-name) = user-config - { - parent-module = site-config ; - } - else if $(module-name) = project-config - { - parent-module = user-config ; - } - else - { - # We search for parent/project-root only if Jamfile was specified, i.e. - # if the project is not standalone. - if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ] - { - parent-module = [ load-parent $(location) ] ; - } - else - { - # It's either jamroot or standalone project. If it's jamroot, - # inherit from user-config. - if $(location) - { - # If project-config module exist, inherit from it. - if $(project-config.attributes) - { - parent-module = project-config ; - } - else - { - parent-module = user-config ; - } - jamroot = true ; - } - } - } - - if $(parent-module) - { - inherit-attributes $(module-name) : $(parent-module) ; - $(attributes).set parent-module : $(parent-module) : exact ; - } - - if $(jamroot) - { - $(attributes).set project-root : $(location) : exact ; - } - - local parent ; - if $(parent-module) - { - parent = [ target $(parent-module) ] ; - } - - if ! $(.target.$(module-name)) - { - .target.$(module-name) = [ new project-target $(module-name) - : $(module-name) $(parent) - : [ attribute $(module-name) requirements ] ] ; - - if --debug-loading in [ modules.peek : ARGV ] - { - ECHO "Assigned project target" $(.target.$(module-name)) - "to '$(module-name)'" ; - } - } - - .current-project = [ target $(module-name) ] ; -} - - -# Make 'project-module' inherit attributes of project root and parent module. -# -rule inherit-attributes ( project-module : parent-module ) -{ - local attributes = $($(project-module).attributes) ; - local pattributes = [ attributes $(parent-module) ] ; - # Parent module might be locationless configuration module. - if [ modules.binding $(parent-module) ] - { - $(attributes).set parent : [ path.parent - [ path.make [ modules.binding $(parent-module) ] ] ] ; - } - local v = [ $(pattributes).get project-root ] ; - $(attributes).set project-root : $(v) : exact ; - $(attributes).set default-build - : [ $(pattributes).get default-build ] ; - $(attributes).set requirements - : [ $(pattributes).get requirements ] : exact ; - $(attributes).set usage-requirements - : [ $(pattributes).get usage-requirements ] : exact ; - - local parent-build-dir = [ $(pattributes).get build-dir ] ; - if $(parent-build-dir) - { - # Have to compute relative path from parent dir to our dir. Convert both - # paths to absolute, since we cannot find relative path from ".." to - # ".". - - local location = [ attribute $(project-module) location ] ; - local parent-location = [ attribute $(parent-module) location ] ; - - local pwd = [ path.pwd ] ; - local parent-dir = [ path.root $(parent-location) $(pwd) ] ; - local our-dir = [ path.root $(location) $(pwd) ] ; - $(attributes).set build-dir : [ path.join $(parent-build-dir) - [ path.relative $(our-dir) $(parent-dir) ] ] : exact ; - } -} - - -# Associate the given id with the given project module. -# -rule register-id ( id : module ) -{ - $(id).jamfile-module = $(module) ; -} - - -# Class keeping all the attributes of a project. -# -# The standard attributes are "id", "location", "project-root", "parent" -# "requirements", "default-build", "source-location" and "projects-to-build". -# -class project-attributes -{ - import property ; - import property-set ; - import errors ; - import path ; - import print ; - import sequence ; - import project ; - - rule __init__ ( location project-module ) - { - self.location = $(location) ; - self.project-module = $(project-module) ; - } - - # Set the named attribute from the specification given by the user. The - # value actually set may be different. - # - rule set ( attribute : specification * - : exact ? # Sets value from 'specification' without any processing. - ) - { - if $(exact) - { - self.$(attribute) = $(specification) ; - } - else if $(attribute) = "requirements" - { - local result = [ property-set.refine-from-user-input - $(self.requirements) : $(specification) - : $(self.project-module) : $(self.location) ] ; - - if $(result[1]) = "@error" - { - errors.error Requirements for project at '$(self.location)' - conflict with parent's. : Explanation: $(result[2-]) ; - } - else - { - self.requirements = $(result) ; - } - } - else if $(attribute) = "usage-requirements" - { - local unconditional ; - for local p in $(specification) - { - local split = [ property.split-conditional $(p) ] ; - split ?= nothing $(p) ; - unconditional += $(split[2]) ; - } - - local non-free = [ property.remove free : $(unconditional) ] ; - if $(non-free) - { - errors.error usage-requirements $(specification) have non-free - properties $(non-free) ; - } - local t = [ property.translate-paths $(specification) - : $(self.location) ] ; - if $(self.usage-requirements) - { - self.usage-requirements = [ property-set.create - [ $(self.usage-requirements).raw ] $(t) ] ; - } - else - { - self.usage-requirements = [ property-set.create $(t) ] ; - } - } - else if $(attribute) = "default-build" - { - self.default-build = [ property.make $(specification) ] ; - } - else if $(attribute) = "source-location" - { - self.source-location = ; - for local src-path in $(specification) - { - self.source-location += [ path.root [ path.make $(src-path) ] - $(self.location) ] ; - } - } - else if $(attribute) = "build-dir" - { - self.build-dir = [ path.root - [ path.make $(specification) ] $(self.location) ] ; - } - else if $(attribute) = "id" - { - id = [ path.root $(specification) / ] ; - project.register-id $(id) : $(self.project-module) ; - self.id = $(id) ; - } - else if ! $(attribute) in "default-build" "location" "parent" - "projects-to-build" "project-root" "source-location" - { - errors.error Invalid project attribute '$(attribute)' specified for - project at '$(self.location)' ; - } - else - { - self.$(attribute) = $(specification) ; - } - } - - # Returns the value of the given attribute. - # - rule get ( attribute ) - { - return $(self.$(attribute)) ; - } - - # Prints the project attributes. - # - rule print ( ) - { - local id = $(self.id) ; id ?= (none) ; - local parent = $(self.parent) ; parent ?= (none) ; - print.section "'"$(id)"'" ; - print.list-start ; - print.list-item "Parent project:" $(parent) ; - print.list-item "Requirements:" [ $(self.requirements).raw ] ; - print.list-item "Default build:" $(self.default-build) ; - print.list-item "Source location:" $(self.source-location) ; - print.list-item "Projects to build:" - [ sequence.insertion-sort $(self.projects-to-build) ] ; - print.list-end ; - } -} - - -# Returns the project which is currently being loaded. -# -rule current ( ) -{ - return $(.current-project) ; -} - - -# Temporarily changes the current project to 'project'. Should be followed by -# 'pop-current'. -# -rule push-current ( project ) -{ - .saved-current-project += $(.current-project) ; - .current-project = $(project) ; -} - - -rule pop-current ( ) -{ - .current-project = $(.saved-current-project[-1]) ; - .saved-current-project = $(.saved-current-project[1--2]) ; -} - - -# Returns the project-attribute instance for the specified Jamfile module. -# -rule attributes ( project ) -{ - return $($(project).attributes) ; -} - - -# Returns the value of the specified attribute in the specified Jamfile module. -# -rule attribute ( project attribute ) -{ - return [ $($(project).attributes).get $(attribute) ] ; -} - - -# Returns the project target corresponding to the 'project-module'. -# -rule target ( project-module ) -{ - if ! $(.target.$(project-module)) - { - .target.$(project-module) = [ new project-target $(project-module) - : $(project-module) - : [ attribute $(project-module) requirements ] ] ; - } - return $(.target.$(project-module)) ; -} - - -# Use/load a project. -# -rule use ( id : location ) -{ - local saved-project = $(.current-project) ; - local project-module = [ project.load $(location) ] ; - local declared-id = [ project.attribute $(project-module) id ] ; - - if ! $(declared-id) || $(declared-id) != $(id) - { - # The project at 'location' either has no id or that id is not equal to - # the 'id' parameter. - if $($(id).jamfile-module) && ( $($(id).jamfile-module) != - $(project-module) ) - { - errors.user-error Attempt to redeclare already existing project id - '$(id)' - location '$(location)' ; - } - $(id).jamfile-module = $(project-module) ; - } - .current-project = $(saved-project) ; -} - - -# Defines a Boost.Build extension project. Such extensions usually contain -# library targets and features that can be used by many people. Even though -# extensions are really projects, they can be initialized as a module would be -# with the "using" (project.project-rules.using) mechanism. -# -rule extension ( id : options * : * ) -{ - # The caller is a standalone module for the extension. - local mod = [ CALLER_MODULE ] ; - - # We need to do the rest within the extension module. - module $(mod) - { - import path ; - - # Find the root project. - local root-project = [ project.current ] ; - root-project = [ $(root-project).project-module ] ; - while - [ project.attribute $(root-project) parent-module ] && - [ project.attribute $(root-project) parent-module ] != user-config - { - root-project = [ project.attribute $(root-project) parent-module ] ; - } - - # Create the project data, and bring in the project rules into the - # module. - project.initialize $(__name__) : [ path.join [ project.attribute - $(root-project) location ] ext $(1:L) ] ; - - # Create the project itself, i.e. the attributes. All extensions are - # created in the "/ext" project space. - project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : - $(9) ; - local attributes = [ project.attributes $(__name__) ] ; - - # Inherit from the root project of whomever is defining us. - project.inherit-attributes $(__name__) : $(root-project) ; - $(attributes).set parent-module : $(root-project) : exact ; - } -} - - -rule glob-internal ( project : wildcards + : excludes * : rule-name ) -{ - local location = [ $(project).get source-location ] ; - - local result ; - local paths = [ path.$(rule-name) $(location) : - [ sequence.transform path.make : $(wildcards) ] : - [ sequence.transform path.make : $(excludes) ] ] ; - if $(wildcards:D) || $(rule-name) != glob - { - # The paths we have found are relative to the current directory, but the - # names specified in the sources list are assumed to be relative to the - # source directory of the corresponding project. So, just make the names - # absolute. - for local p in $(paths) - { - # If the path is below source location, use relative path. - # Otherwise, use full path just to avoid any ambiguities. - local rel = [ path.relative $(p) $(location) : no-error ] ; - if $(rel) = not-a-child - { - result += [ path.root $(p) [ path.pwd ] ] ; - } - else - { - result += $(rel) ; - } - } - } - else - { - # There were no wildcards in the directory path, so the files are all in - # the source directory of the project. Just drop the directory, instead - # of making paths absolute. - result = $(paths:D="") ; - } - - return $(result) ; -} - - -# This module defines rules common to all projects. -# -module project-rules -{ - rule using ( toolset-module : * ) - { - import toolset ; - import modules ; - import project ; - - # Temporarily change the search path so the module referred to by - # 'using' can be placed in the same directory as Jamfile. User will - # expect the module to be found even though the directory is not in - # BOOST_BUILD_PATH. - local x = [ modules.peek : BOOST_BUILD_PATH ] ; - local caller = [ CALLER_MODULE ] ; - local caller-location = [ modules.binding $(caller) ] ; - modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ; - toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - modules.poke : BOOST_BUILD_PATH : $(x) ; - - # The above might have clobbered .current-project. Restore the correct - # value. - modules.poke project : .current-project - : [ project.target $(caller) ] ; - } - - import modules ; - - rule import ( * : * : * ) - { - modules.import project ; - - local caller = [ CALLER_MODULE ] ; - local saved = [ modules.peek project : .current-project ] ; - module $(caller) - { - modules.import $(1) : $(2) : $(3) ; - } - modules.poke project : .current-project : $(saved) ; - } - - rule project ( id ? : options * : * ) - { - import errors ; - import path ; - import project ; - - local caller = [ CALLER_MODULE ] ; - local attributes = [ project.attributes $(caller) ] ; - if $(id) - { - $(attributes).set id : $(id) ; - } - - local explicit-build-dir ; - - for n in 2 3 4 5 6 7 8 9 - { - local option = $($(n)) ; - if $(option) - { - $(attributes).set $(option[1]) : $(option[2-]) ; - } - if $(option[1]) = "build-dir" - { - explicit-build-dir = [ path.make $(option[2-]) ] ; - } - } - - # If '--build-dir' is specified, change the build dir for the project. - local global-build-dir = - [ modules.peek project : .global-build-dir ] ; - - if $(global-build-dir) - { - local location = [ $(attributes).get location ] ; - # Project with an empty location is a 'standalone' project such as - # user-config or qt. It has no build dir. If we try to set build dir - # for user-config, we shall then try to inherit it, with either - # weird or wrong consequences. - if $(location) && $(location) = [ $(attributes).get project-root ] - { - # Re-read the project id, since it might have been changed in - # the project's attributes. - id = [ $(attributes).get id ] ; - # This is Jamroot. - if $(id) - { - if $(explicit-build-dir) && - [ path.is-rooted $(explicit-build-dir) ] - { - errors.user-error Absolute directory specified via - 'build-dir' project attribute : Do not know how to - combine that with the --build-dir option. ; - } - # Strip the leading slash from id. - local rid = [ MATCH /(.*) : $(id) ] ; - local p = [ path.join - $(global-build-dir) $(rid) $(explicit-build-dir) ] ; - - $(attributes).set build-dir : $(p) : exact ; - } - } - else - { - # Not Jamroot. - if $(explicit-build-dir) - { - errors.user-error When --build-dir is specified, the - 'build-dir' project : attribute is allowed only for - top-level 'project' invocations ; - } - } - } - } - - # Declare and set a project global constant. Project global constants are - # normal variables but should not be changed. They are applied to every - # child Jamfile. - # - rule constant ( - name # Variable name of the constant. - : value + # Value of the constant. - ) - { - import project ; - local caller = [ CALLER_MODULE ] ; - local p = [ project.target $(caller) ] ; - $(p).add-constant $(name) : $(value) ; - } - - # Declare and set a project global constant, whose value is a path. The path - # is adjusted to be relative to the invocation directory. The given value - # path is taken to be either absolute, or relative to this project root. - # - rule path-constant ( - name # Variable name of the constant. - : value + # Value of the constant. - ) - { - import project ; - local caller = [ CALLER_MODULE ] ; - local p = [ project.target $(caller) ] ; - $(p).add-constant $(name) : $(value) : path ; - } - - rule use-project ( id : where ) - { - import modules ; - # See comment in 'load' for explanation. - local caller = [ CALLER_MODULE ] ; - modules.poke $(caller) : .used-projects : - [ modules.peek $(caller) : .used-projects ] - $(id) $(where) ; - } - - rule build-project ( dir ) - { - import project ; - local caller = [ CALLER_MODULE ] ; - local attributes = [ project.attributes $(caller) ] ; - - local now = [ $(attributes).get projects-to-build ] ; - $(attributes).set projects-to-build : $(now) $(dir) ; - } - - rule explicit ( target-names * ) - { - import project ; - # If 'explicit' is used in a helper rule defined in Jamroot and - # inherited by children, then most of the time we want 'explicit' to - # operate on the Jamfile where the helper rule is invoked. - local t = [ project.current ] ; - for local n in $(target-names) - { - $(t).mark-target-as-explicit $(n) ; - } - } - - rule always ( target-names * ) - { - import project ; - local t = [ project.current ] ; - for local n in $(target-names) - { - $(t).mark-target-as-always $(n) ; - } - } - - rule glob ( wildcards + : excludes * ) - { - import project ; - return [ project.glob-internal [ project.current ] : $(wildcards) : - $(excludes) : glob ] ; - } - - rule glob-tree ( wildcards + : excludes * ) - { - import project ; - - if $(wildcards:D) || $(excludes:D) - { - errors.user-error The patterns to 'glob-tree' may not include - directory ; - } - return [ project.glob-internal [ project.current ] : $(wildcards) : - $(excludes) : glob-tree ] ; - } - - # Calculates conditional requirements for multiple requirements at once. - # This is a shorthand to reduce duplication and to keep an inline - # declarative syntax. For example: - # - # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug : - # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ; - # - rule conditional ( condition + : requirements * ) - { - local condition = $(condition:J=,) ; - if [ MATCH (:) : $(condition) ] - { - return $(condition)$(requirements) ; - } - else - { - return $(condition):$(requirements) ; - } - } - - rule option ( name : value ) - { - if $(__name__) != site-config && $(__name__) != user-config && $(__name__) != project-config - { - import errors ; - errors.error "The 'option' rule may be used only in site-config or user-config" ; - } - import option ; - option.set $(name) : $(value) ; - } -} diff --git a/jam-files/boost-build/build/project.py b/jam-files/boost-build/build/project.py deleted file mode 100644 index 1e1e16fa..00000000 --- a/jam-files/boost-build/build/project.py +++ /dev/null @@ -1,1120 +0,0 @@ -# Status: ported. -# Base revision: 64488 - -# Copyright 2002, 2003 Dave Abrahams -# Copyright 2002, 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Implements project representation and loading. -# Each project is represented by -# - a module where all the Jamfile content live. -# - an instance of 'project-attributes' class. -# (given module name, can be obtained by 'attributes' rule) -# - an instance of 'project-target' class (from targets.jam) -# (given a module name, can be obtained by 'target' rule) -# -# Typically, projects are created as result of loading Jamfile, which is -# do by rules 'load' and 'initialize', below. First, module for Jamfile -# is loaded and new project-attributes instance is created. Some rules -# necessary for project are added to the module (see 'project-rules' module) -# at the bottom of this file. -# Default project attributes are set (inheriting attributes of parent project, if -# it exists). After that, Jamfile is read. It can declare its own attributes, -# via 'project' rule, which will be combined with already set attributes. -# -# -# The 'project' rule can also declare project id, which will be associated with -# the project module. -# -# There can also be 'standalone' projects. They are created by calling 'initialize' -# on arbitrary module, and not specifying location. After the call, the module can -# call 'project' rule, declare main target and behave as regular projects. However, -# since it's not associated with any location, it's better declare only prebuilt -# targets. -# -# The list of all loaded Jamfile is stored in variable .project-locations. It's possible -# to obtain module name for a location using 'module-name' rule. The standalone projects -# are not recorded, the only way to use them is by project id. - -import b2.util.path -from b2.build import property_set, property -from b2.build.errors import ExceptionWithUserContext -import b2.build.targets - -import bjam - -import re -import sys -import os -import string -import imp -import traceback -import b2.util.option as option - -from b2.util import record_jam_to_value_mapping, qualify_jam_action - -class ProjectRegistry: - - def __init__(self, manager, global_build_dir): - self.manager = manager - self.global_build_dir = global_build_dir - self.project_rules_ = ProjectRules(self) - - # The target corresponding to the project being loaded now - self.current_project = None - - # The set of names of loaded project modules - self.jamfile_modules = {} - - # Mapping from location to module name - self.location2module = {} - - # Mapping from project id to project module - self.id2module = {} - - # Map from Jamfile directory to parent Jamfile/Jamroot - # location. - self.dir2parent_jamfile = {} - - # Map from directory to the name of Jamfile in - # that directory (or None). - self.dir2jamfile = {} - - # Map from project module to attributes object. - self.module2attributes = {} - - # Map from project module to target for the project - self.module2target = {} - - # Map from names to Python modules, for modules loaded - # via 'using' and 'import' rules in Jamfiles. - self.loaded_tool_modules_ = {} - - self.loaded_tool_module_path_ = {} - - # Map from project target to the list of - # (id,location) pairs corresponding to all 'use-project' - # invocations. - # TODO: should not have a global map, keep this - # in ProjectTarget. - self.used_projects = {} - - self.saved_current_project = [] - - self.JAMROOT = self.manager.getenv("JAMROOT"); - - # Note the use of character groups, as opposed to listing - # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate - # matches on windows and would have to eliminate duplicates. - if not self.JAMROOT: - self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"] - - # Default patterns to search for the Jamfiles to use for build - # declarations. - self.JAMFILE = self.manager.getenv("JAMFILE") - - if not self.JAMFILE: - self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile", - "[Jj]amfile.jam"] - - - def load (self, jamfile_location): - """Loads jamfile at the given location. After loading, project global - file and jamfile needed by the loaded one will be loaded recursively. - If the jamfile at that location is loaded already, does nothing. - Returns the project module for the Jamfile.""" - - absolute = os.path.join(os.getcwd(), jamfile_location) - absolute = os.path.normpath(absolute) - jamfile_location = b2.util.path.relpath(os.getcwd(), absolute) - - if "--debug-loading" in self.manager.argv(): - print "Loading Jamfile at '%s'" % jamfile_location - - - mname = self.module_name(jamfile_location) - # If Jamfile is already loaded, don't try again. - if not mname in self.jamfile_modules: - - self.load_jamfile(jamfile_location, mname) - - # We want to make sure that child project are loaded only - # after parent projects. In particular, because parent projects - # define attributes whch are inherited by children, and we don't - # want children to be loaded before parents has defined everything. - # - # While "build-project" and "use-project" can potentially refer - # to child projects from parent projects, we don't immediately - # load child projects when seing those attributes. Instead, - # we record the minimal information that will be used only later. - - self.load_used_projects(mname) - - return mname - - def load_used_projects(self, module_name): - # local used = [ modules.peek $(module-name) : .used-projects ] ; - used = self.used_projects[module_name] - - location = self.attribute(module_name, "location") - for u in used: - id = u[0] - where = u[1] - - self.use(id, os.path.join(location, where)) - - def load_parent(self, location): - """Loads parent of Jamfile at 'location'. - Issues an error if nothing is found.""" - - found = b2.util.path.glob_in_parents( - location, self.JAMROOT + self.JAMFILE) - - if not found: - print "error: Could not find parent for project at '%s'" % location - print "error: Did not find Jamfile or project-root.jam in any parent directory." - sys.exit(1) - - return self.load(os.path.dirname(found[0])) - - def act_as_jamfile(self, module, location): - """Makes the specified 'module' act as if it were a regularly loaded Jamfile - at 'location'. If Jamfile is already located for that location, it's an - error.""" - - if self.module_name(location) in self.jamfile_modules: - self.manager.errors()( - "Jamfile was already loaded for '%s'" % location) - - # Set up non-default mapping from location to module. - self.location2module[location] = module - - # Add the location to the list of project locations - # so that we don't try to load Jamfile in future - self.jamfile_modules.append(location) - - self.initialize(module, location) - - def find(self, name, current_location): - """Given 'name' which can be project-id or plain directory name, - return project module corresponding to that id or directory. - Returns nothing of project is not found.""" - - project_module = None - - # Try interpreting name as project id. - if name[0] == '/': - project_module = self.id2module.get(name) - - if not project_module: - location = os.path.join(current_location, name) - # If no project is registered for the given location, try to - # load it. First see if we have Jamfile. If not we might have project - # root, willing to act as Jamfile. In that case, project-root - # must be placed in the directory referred by id. - - project_module = self.module_name(location) - if not project_module in self.jamfile_modules: - if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE): - project_module = self.load(location) - else: - project_module = None - - return project_module - - def module_name(self, jamfile_location): - """Returns the name of module corresponding to 'jamfile-location'. - If no module corresponds to location yet, associates default - module name with that location.""" - module = self.location2module.get(jamfile_location) - if not module: - # Root the path, so that locations are always umbiguious. - # Without this, we can't decide if '../../exe/program1' and '.' - # are the same paths, or not. - jamfile_location = os.path.realpath( - os.path.join(os.getcwd(), jamfile_location)) - module = "Jamfile<%s>" % jamfile_location - self.location2module[jamfile_location] = module - return module - - def find_jamfile (self, dir, parent_root=0, no_errors=0): - """Find the Jamfile at the given location. This returns the - exact names of all the Jamfiles in the given directory. The optional - parent-root argument causes this to search not the given directory - but the ones above it up to the directory given in it.""" - - # Glob for all the possible Jamfiles according to the match pattern. - # - jamfile_glob = None - if parent_root: - parent = self.dir2parent_jamfile.get(dir) - if not parent: - parent = b2.util.path.glob_in_parents(dir, - self.JAMFILE) - self.dir2parent_jamfile[dir] = parent - jamfile_glob = parent - else: - jamfile = self.dir2jamfile.get(dir) - if not jamfile: - jamfile = b2.util.path.glob([dir], self.JAMFILE) - self.dir2jamfile[dir] = jamfile - jamfile_glob = jamfile - - if len(jamfile_glob) > 1: - # Multiple Jamfiles found in the same place. Warn about this. - # And ensure we use only one of them. - # As a temporary convenience measure, if there's Jamfile.v2 amount - # found files, suppress the warning and use it. - # - pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)" - v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)] - if len(v2_jamfiles) == 1: - jamfile_glob = v2_jamfiles - else: - print """warning: Found multiple Jamfiles at '%s'!""" % (dir) - for j in jamfile_glob: - print " -", j - print "Loading the first one" - - # Could not find it, error. - if not no_errors and not jamfile_glob: - self.manager.errors()( - """Unable to load Jamfile. -Could not find a Jamfile in directory '%s' -Attempted to find it with pattern '%s'. -Please consult the documentation at 'http://boost.org/boost-build2'.""" - % (dir, string.join(self.JAMFILE))) - - if jamfile_glob: - return jamfile_glob[0] - - def load_jamfile(self, dir, jamfile_module): - """Load a Jamfile at the given directory. Returns nothing. - Will attempt to load the file as indicated by the JAMFILE patterns. - Effect of calling this rule twice with the same 'dir' is underfined.""" - - # See if the Jamfile is where it should be. - is_jamroot = False - jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT) - if not jamfile_to_load: - jamfile_to_load = self.find_jamfile(dir) - else: - if len(jamfile_to_load) > 1: - get_manager().errors()("Multiple Jamfiles found at '%s'\n" +\ - "Filenames are: %s" - % (dir, [os.path.basename(j) for j in jamfile_to_load])) - - is_jamroot = True - jamfile_to_load = jamfile_to_load[0] - - dir = os.path.dirname(jamfile_to_load) - if not dir: - dir = "." - - self.used_projects[jamfile_module] = [] - - # Now load the Jamfile in it's own context. - # The call to 'initialize' may load parent Jamfile, which might have - # 'use-project' statement that causes a second attempt to load the - # same project we're loading now. Checking inside .jamfile-modules - # prevents that second attempt from messing up. - if not jamfile_module in self.jamfile_modules: - self.jamfile_modules[jamfile_module] = True - - # Initialize the jamfile module before loading. - # - self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load)) - - saved_project = self.current_project - - bjam.call("load", jamfile_module, jamfile_to_load) - basename = os.path.basename(jamfile_to_load) - - if is_jamroot: - jamfile = self.find_jamfile(dir, no_errors=True) - if jamfile: - bjam.call("load", jamfile_module, jamfile) - - # Now do some checks - if self.current_project != saved_project: - self.manager.errors()( -"""The value of the .current-project variable -has magically changed after loading a Jamfile. -This means some of the targets might be defined a the wrong project. -after loading %s -expected value %s -actual value %s""" % (jamfile_module, saved_project, self.current_project)) - - if self.global_build_dir: - id = self.attributeDefault(jamfile_module, "id", None) - project_root = self.attribute(jamfile_module, "project-root") - location = self.attribute(jamfile_module, "location") - - if location and project_root == dir: - # This is Jamroot - if not id: - # FIXME: go via errors module, so that contexts are - # shown? - print "warning: the --build-dir option was specified" - print "warning: but Jamroot at '%s'" % dir - print "warning: specified no project id" - print "warning: the --build-dir option will be ignored" - - - def load_standalone(self, jamfile_module, file): - """Loads 'file' as standalone project that has no location - associated with it. This is mostly useful for user-config.jam, - which should be able to define targets, but although it has - some location in filesystem, we don't want any build to - happen in user's HOME, for example. - - The caller is required to never call this method twice on - the same file. - """ - - self.used_projects[jamfile_module] = [] - bjam.call("load", jamfile_module, file) - self.load_used_projects(jamfile_module) - - def is_jamroot(self, basename): - match = [ pat for pat in self.JAMROOT if re.match(pat, basename)] - if match: - return 1 - else: - return 0 - - def initialize(self, module_name, location=None, basename=None): - """Initialize the module for a project. - - module-name is the name of the project module. - location is the location (directory) of the project to initialize. - If not specified, stanalone project will be initialized - """ - - if "--debug-loading" in self.manager.argv(): - print "Initializing project '%s'" % module_name - - # TODO: need to consider if standalone projects can do anything but defining - # prebuilt targets. If so, we need to give more sensible "location", so that - # source paths are correct. - if not location: - location = "" - - attributes = ProjectAttributes(self.manager, location, module_name) - self.module2attributes[module_name] = attributes - - python_standalone = False - if location: - attributes.set("source-location", [location], exact=1) - elif not module_name in ["test-config", "site-config", "user-config", "project-config"]: - # This is a standalone project with known location. Set source location - # so that it can declare targets. This is intended so that you can put - # a .jam file in your sources and use it via 'using'. Standard modules - # (in 'tools' subdir) may not assume source dir is set. - module = sys.modules[module_name] - attributes.set("source-location", self.loaded_tool_module_path_[module_name], exact=1) - python_standalone = True - - attributes.set("requirements", property_set.empty(), exact=True) - attributes.set("usage-requirements", property_set.empty(), exact=True) - attributes.set("default-build", property_set.empty(), exact=True) - attributes.set("projects-to-build", [], exact=True) - attributes.set("project-root", None, exact=True) - attributes.set("build-dir", None, exact=True) - - self.project_rules_.init_project(module_name, python_standalone) - - jamroot = False - - parent_module = None; - if module_name == "test-config": - # No parent - pass - elif module_name == "site-config": - parent_module = "test-config" - elif module_name == "user-config": - parent_module = "site-config" - elif module_name == "project-config": - parent_module = "user-config" - elif location and not self.is_jamroot(basename): - # We search for parent/project-root only if jamfile was specified - # --- i.e - # if the project is not standalone. - parent_module = self.load_parent(location) - else: - # It's either jamroot, or standalone project. - # If it's jamroot, inherit from user-config. - if location: - # If project-config module exist, inherit from it. - if self.module2attributes.has_key("project-config"): - parent_module = "project-config" - else: - parent_module = "user-config" ; - - jamroot = True ; - - if parent_module: - self.inherit_attributes(module_name, parent_module) - attributes.set("parent-module", parent_module, exact=1) - - if jamroot: - attributes.set("project-root", location, exact=1) - - parent = None - if parent_module: - parent = self.target(parent_module) - - if not self.module2target.has_key(module_name): - target = b2.build.targets.ProjectTarget(self.manager, - module_name, module_name, parent, - self.attribute(module_name,"requirements"), - # FIXME: why we need to pass this? It's not - # passed in jam code. - self.attribute(module_name, "default-build")) - self.module2target[module_name] = target - - self.current_project = self.target(module_name) - - def inherit_attributes(self, project_module, parent_module): - """Make 'project-module' inherit attributes of project - root and parent module.""" - - attributes = self.module2attributes[project_module] - pattributes = self.module2attributes[parent_module] - - # Parent module might be locationless user-config. - # FIXME: - #if [ modules.binding $(parent-module) ] - #{ - # $(attributes).set parent : [ path.parent - # [ path.make [ modules.binding $(parent-module) ] ] ] ; - # } - - attributes.set("project-root", pattributes.get("project-root"), exact=True) - attributes.set("default-build", pattributes.get("default-build"), exact=True) - attributes.set("requirements", pattributes.get("requirements"), exact=True) - attributes.set("usage-requirements", - pattributes.get("usage-requirements"), exact=1) - - parent_build_dir = pattributes.get("build-dir") - - if parent_build_dir: - # Have to compute relative path from parent dir to our dir - # Convert both paths to absolute, since we cannot - # find relative path from ".." to "." - - location = attributes.get("location") - parent_location = pattributes.get("location") - - our_dir = os.path.join(os.getcwd(), location) - parent_dir = os.path.join(os.getcwd(), parent_location) - - build_dir = os.path.join(parent_build_dir, - os.path.relpath(our_dir, parent_dir)) - attributes.set("build-dir", build_dir, exact=True) - - def register_id(self, id, module): - """Associate the given id with the given project module.""" - self.id2module[id] = module - - def current(self): - """Returns the project which is currently being loaded.""" - return self.current_project - - def set_current(self, c): - self.current_project = c - - def push_current(self, project): - """Temporary changes the current project to 'project'. Should - be followed by 'pop-current'.""" - self.saved_current_project.append(self.current_project) - self.current_project = project - - def pop_current(self): - self.current_project = self.saved_current_project[-1] - del self.saved_current_project[-1] - - def attributes(self, project): - """Returns the project-attribute instance for the - specified jamfile module.""" - return self.module2attributes[project] - - def attribute(self, project, attribute): - """Returns the value of the specified attribute in the - specified jamfile module.""" - return self.module2attributes[project].get(attribute) - try: - return self.module2attributes[project].get(attribute) - except: - raise BaseException("No attribute '%s' for project" % (attribute, project)) - - def attributeDefault(self, project, attribute, default): - """Returns the value of the specified attribute in the - specified jamfile module.""" - return self.module2attributes[project].getDefault(attribute, default) - - def target(self, project_module): - """Returns the project target corresponding to the 'project-module'.""" - if not self.module2target.has_key(project_module): - self.module2target[project_module] = \ - b2.build.targets.ProjectTarget(project_module, project_module, - self.attribute(project_module, "requirements")) - - return self.module2target[project_module] - - def use(self, id, location): - # Use/load a project. - saved_project = self.current_project - project_module = self.load(location) - declared_id = self.attributeDefault(project_module, "id", "") - - if not declared_id or declared_id != id: - # The project at 'location' either have no id or - # that id is not equal to the 'id' parameter. - if self.id2module.has_key(id) and self.id2module[id] != project_module: - self.manager.errors()( -"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location)) - self.id2module[id] = project_module - - self.current_module = saved_project - - def add_rule(self, name, callable): - """Makes rule 'name' available to all subsequently loaded Jamfiles. - - Calling that rule wil relay to 'callable'.""" - self.project_rules_.add_rule(name, callable) - - def project_rules(self): - return self.project_rules_ - - def glob_internal(self, project, wildcards, excludes, rule_name): - location = project.get("source-location")[0] - - result = [] - callable = b2.util.path.__dict__[rule_name] - - paths = callable([location], wildcards, excludes) - has_dir = 0 - for w in wildcards: - if os.path.dirname(w): - has_dir = 1 - break - - if has_dir or rule_name != "glob": - result = [] - # The paths we've found are relative to current directory, - # but the names specified in sources list are assumed to - # be relative to source directory of the corresponding - # prject. Either translate them or make absolute. - - for p in paths: - rel = os.path.relpath(p, location) - # If the path is below source location, use relative path. - if not ".." in rel: - result.append(rel) - else: - # Otherwise, use full path just to avoid any ambiguities. - result.append(os.path.abspath(p)) - - else: - # There were not directory in wildcard, so the files are all - # in the source directory of the project. Just drop the - # directory, instead of making paths absolute. - result = [os.path.basename(p) for p in paths] - - return result - - def load_module(self, name, extra_path=None): - """Load a Python module that should be useable from Jamfiles. - - There are generally two types of modules Jamfiles might want to - use: - - Core Boost.Build. Those are imported using plain names, e.g. - 'toolset', so this function checks if we have module named - b2.package.module already. - - Python modules in the same directory as Jamfile. We don't - want to even temporary add Jamfile's directory to sys.path, - since then we might get naming conflicts between standard - Python modules and those. - """ - - # See if we loaded module of this name already - existing = self.loaded_tool_modules_.get(name) - if existing: - return existing - - # See if we have a module b2.whatever.<name>, where <name> - # is what is passed to this function - modules = sys.modules - for class_name in modules: - parts = class_name.split('.') - if name is class_name or parts[0] == "b2" \ - and parts[-1] == name.replace("-", "_"): - module = modules[class_name] - self.loaded_tool_modules_[name] = module - return module - - # Lookup a module in BOOST_BUILD_PATH - path = extra_path - if not path: - path = [] - path.extend(self.manager.boost_build_path()) - location = None - for p in path: - l = os.path.join(p, name + ".py") - if os.path.exists(l): - location = l - break - - if not location: - self.manager.errors()("Cannot find module '%s'" % name) - - mname = name + "__for_jamfile" - file = open(location) - try: - # TODO: this means we'll never make use of .pyc module, - # which might be a problem, or not. - self.loaded_tool_module_path_[mname] = location - module = imp.load_module(mname, file, os.path.basename(location), - (".py", "r", imp.PY_SOURCE)) - self.loaded_tool_modules_[name] = module - return module - finally: - file.close() - - - -# FIXME: -# Defines a Boost.Build extension project. Such extensions usually -# contain library targets and features that can be used by many people. -# Even though extensions are really projects, they can be initialize as -# a module would be with the "using" (project.project-rules.using) -# mechanism. -#rule extension ( id : options * : * ) -#{ -# # The caller is a standalone module for the extension. -# local mod = [ CALLER_MODULE ] ; -# -# # We need to do the rest within the extension module. -# module $(mod) -# { -# import path ; -# -# # Find the root project. -# local root-project = [ project.current ] ; -# root-project = [ $(root-project).project-module ] ; -# while -# [ project.attribute $(root-project) parent-module ] && -# [ project.attribute $(root-project) parent-module ] != user-config -# { -# root-project = [ project.attribute $(root-project) parent-module ] ; -# } -# -# # Create the project data, and bring in the project rules -# # into the module. -# project.initialize $(__name__) : -# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ; -# -# # Create the project itself, i.e. the attributes. -# # All extensions are created in the "/ext" project space. -# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -# local attributes = [ project.attributes $(__name__) ] ; -# -# # Inherit from the root project of whomever is defining us. -# project.inherit-attributes $(__name__) : $(root-project) ; -# $(attributes).set parent-module : $(root-project) : exact ; -# } -#} - - -class ProjectAttributes: - """Class keeping all the attributes of a project. - - The standard attributes are 'id', "location", "project-root", "parent" - "requirements", "default-build", "source-location" and "projects-to-build". - """ - - def __init__(self, manager, location, project_module): - self.manager = manager - self.location = location - self.project_module = project_module - self.attributes = {} - self.usage_requirements = None - - def set(self, attribute, specification, exact=False): - """Set the named attribute from the specification given by the user. - The value actually set may be different.""" - - if exact: - self.__dict__[attribute] = specification - - elif attribute == "requirements": - self.requirements = property_set.refine_from_user_input( - self.requirements, specification, - self.project_module, self.location) - - elif attribute == "usage-requirements": - unconditional = [] - for p in specification: - split = property.split_conditional(p) - if split: - unconditional.append(split[1]) - else: - unconditional.append(p) - - non_free = property.remove("free", unconditional) - if non_free: - get_manager().errors()("usage-requirements %s have non-free properties %s" \ - % (specification, non_free)) - - t = property.translate_paths( - property.create_from_strings(specification, allow_condition=True), - self.location) - - existing = self.__dict__.get("usage-requirements") - if existing: - new = property_set.create(existing.all() + t) - else: - new = property_set.create(t) - self.__dict__["usage-requirements"] = new - - - elif attribute == "default-build": - self.__dict__["default-build"] = property_set.create(specification) - - elif attribute == "source-location": - source_location = [] - for path in specification: - source_location.append(os.path.join(self.location, path)) - self.__dict__["source-location"] = source_location - - elif attribute == "build-dir": - self.__dict__["build-dir"] = os.path.join(self.location, specification[0]) - - elif attribute == "id": - id = specification[0] - if id[0] != '/': - id = "/" + id - self.manager.projects().register_id(id, self.project_module) - self.__dict__["id"] = id - - elif not attribute in ["default-build", "location", - "source-location", "parent", - "projects-to-build", "project-root"]: - self.manager.errors()( -"""Invalid project attribute '%s' specified -for project at '%s'""" % (attribute, self.location)) - else: - self.__dict__[attribute] = specification - - def get(self, attribute): - return self.__dict__[attribute] - - def getDefault(self, attribute, default): - return self.__dict__.get(attribute, default) - - def dump(self): - """Prints the project attributes.""" - id = self.get("id") - if not id: - id = "(none)" - else: - id = id[0] - - parent = self.get("parent") - if not parent: - parent = "(none)" - else: - parent = parent[0] - - print "'%s'" % id - print "Parent project:%s", parent - print "Requirements:%s", self.get("requirements") - print "Default build:%s", string.join(self.get("debuild-build")) - print "Source location:%s", string.join(self.get("source-location")) - print "Projects to build:%s", string.join(self.get("projects-to-build").sort()); - -class ProjectRules: - """Class keeping all rules that are made available to Jamfile.""" - - def __init__(self, registry): - self.registry = registry - self.manager_ = registry.manager - self.rules = {} - self.local_names = [x for x in self.__class__.__dict__ - if x not in ["__init__", "init_project", "add_rule", - "error_reporting_wrapper", "add_rule_for_type", "reverse"]] - self.all_names_ = [x for x in self.local_names] - - def _import_rule(self, bjam_module, name, callable): - if hasattr(callable, "bjam_signature"): - bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature) - else: - bjam.import_rule(bjam_module, name, self.make_wrapper(callable)) - - - def add_rule_for_type(self, type): - rule_name = type.lower().replace("_", "-") - - def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []): - return self.manager_.targets().create_typed_target( - type, self.registry.current(), name[0], sources, - requirements, default_build, usage_requirements) - - self.add_rule(rule_name, xpto) - - def add_rule(self, name, callable): - self.rules[name] = callable - self.all_names_.append(name) - - # Add new rule at global bjam scope. This might not be ideal, - # added because if a jamroot does 'import foo' where foo calls - # add_rule, we need to import new rule to jamroot scope, and - # I'm lazy to do this now. - self._import_rule("", name, callable) - - def all_names(self): - return self.all_names_ - - def call_and_report_errors(self, callable, *args, **kw): - result = None - try: - self.manager_.errors().push_jamfile_context() - result = callable(*args, **kw) - except ExceptionWithUserContext, e: - e.report() - except Exception, e: - try: - self.manager_.errors().handle_stray_exception (e) - except ExceptionWithUserContext, e: - e.report() - finally: - self.manager_.errors().pop_jamfile_context() - - return result - - def make_wrapper(self, callable): - """Given a free-standing function 'callable', return a new - callable that will call 'callable' and report all exceptins, - using 'call_and_report_errors'.""" - def wrapper(*args, **kw): - return self.call_and_report_errors(callable, *args, **kw) - return wrapper - - def init_project(self, project_module, python_standalone=False): - - if python_standalone: - m = sys.modules[project_module] - - for n in self.local_names: - if n != "import_": - setattr(m, n, getattr(self, n)) - - for n in self.rules: - setattr(m, n, self.rules[n]) - - return - - for n in self.local_names: - # Using 'getattr' here gives us a bound method, - # while using self.__dict__[r] would give unbound one. - v = getattr(self, n) - if callable(v): - if n == "import_": - n = "import" - else: - n = string.replace(n, "_", "-") - - self._import_rule(project_module, n, v) - - for n in self.rules: - self._import_rule(project_module, n, self.rules[n]) - - def project(self, *args): - - jamfile_module = self.registry.current().project_module() - attributes = self.registry.attributes(jamfile_module) - - id = None - if args and args[0]: - id = args[0][0] - args = args[1:] - - if id: - attributes.set('id', [id]) - - explicit_build_dir = None - for a in args: - if a: - attributes.set(a[0], a[1:], exact=0) - if a[0] == "build-dir": - explicit_build_dir = a[1] - - # If '--build-dir' is specified, change the build dir for the project. - if self.registry.global_build_dir: - - location = attributes.get("location") - # Project with empty location is 'standalone' project, like - # user-config, or qt. It has no build dir. - # If we try to set build dir for user-config, we'll then - # try to inherit it, with either weird, or wrong consequences. - if location and location == attributes.get("project-root"): - # Re-read the project id, since it might have been changed in - # the project's attributes. - id = attributes.get('id') - - # This is Jamroot. - if id: - if explicit_build_dir and os.path.isabs(explicit_build_dir): - self.registry.manager.errors()( -"""Absolute directory specified via 'build-dir' project attribute -Don't know how to combine that with the --build-dir option.""") - - rid = id - if rid[0] == '/': - rid = rid[1:] - - p = os.path.join(self.registry.global_build_dir, rid) - if explicit_build_dir: - p = os.path.join(p, explicit_build_dir) - attributes.set("build-dir", p, exact=1) - elif explicit_build_dir: - self.registry.manager.errors()( -"""When --build-dir is specified, the 'build-dir' -attribute is allowed only for top-level 'project' invocations""") - - def constant(self, name, value): - """Declare and set a project global constant. - Project global constants are normal variables but should - not be changed. They are applied to every child Jamfile.""" - m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>" - self.registry.current().add_constant(name[0], value) - - def path_constant(self, name, value): - """Declare and set a project global constant, whose value is a path. The - path is adjusted to be relative to the invocation directory. The given - value path is taken to be either absolute, or relative to this project - root.""" - if len(value) > 1: - self.registry.manager.error()("path constant should have one element") - self.registry.current().add_constant(name[0], value[0], path=1) - - def use_project(self, id, where): - # See comment in 'load' for explanation why we record the - # parameters as opposed to loading the project now. - m = self.registry.current().project_module(); - self.registry.used_projects[m].append((id[0], where[0])) - - def build_project(self, dir): - assert(isinstance(dir, list)) - jamfile_module = self.registry.current().project_module() - attributes = self.registry.attributes(jamfile_module) - now = attributes.get("projects-to-build") - attributes.set("projects-to-build", now + dir, exact=True) - - def explicit(self, target_names): - self.registry.current().mark_targets_as_explicit(target_names) - - def always(self, target_names): - self.registry.current().mark_targets_as_alays(target_names) - - def glob(self, wildcards, excludes=None): - return self.registry.glob_internal(self.registry.current(), - wildcards, excludes, "glob") - - def glob_tree(self, wildcards, excludes=None): - bad = 0 - for p in wildcards: - if os.path.dirname(p): - bad = 1 - - if excludes: - for p in excludes: - if os.path.dirname(p): - bad = 1 - - if bad: - self.registry.manager.errors()( -"The patterns to 'glob-tree' may not include directory") - return self.registry.glob_internal(self.registry.current(), - wildcards, excludes, "glob_tree") - - - def using(self, toolset, *args): - # The module referred by 'using' can be placed in - # the same directory as Jamfile, and the user - # will expect the module to be found even though - # the directory is not in BOOST_BUILD_PATH. - # So temporary change the search path. - current = self.registry.current() - location = current.get('location') - - m = self.registry.load_module(toolset[0], [location]) - if not m.__dict__.has_key("init"): - self.registry.manager.errors()( - "Tool module '%s' does not define the 'init' method" % toolset[0]) - m.init(*args) - - # The above might have clobbered .current-project. Restore the correct - # value. - self.registry.set_current(current) - - def import_(self, name, names_to_import=None, local_names=None): - - name = name[0] - py_name = name - if py_name == "os": - py_name = "os_j" - jamfile_module = self.registry.current().project_module() - attributes = self.registry.attributes(jamfile_module) - location = attributes.get("location") - - saved = self.registry.current() - - m = self.registry.load_module(py_name, [location]) - - for f in m.__dict__: - v = m.__dict__[f] - f = f.replace("_", "-") - if callable(v): - qn = name + "." + f - self._import_rule(jamfile_module, qn, v) - record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v) - - - if names_to_import: - if not local_names: - local_names = names_to_import - - if len(names_to_import) != len(local_names): - self.registry.manager.errors()( -"""The number of names to import and local names do not match.""") - - for n, l in zip(names_to_import, local_names): - self._import_rule(jamfile_module, l, m.__dict__[n]) - - self.registry.set_current(saved) - - def conditional(self, condition, requirements): - """Calculates conditional requirements for multiple requirements - at once. This is a shorthand to be reduce duplication and to - keep an inline declarative syntax. For example: - - lib x : x.cpp : [ conditional <toolset>gcc <variant>debug : - <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ; - """ - - c = string.join(condition, ",") - if c.find(":") != -1: - return [c + r for r in requirements] - else: - return [c + ":" + r for r in requirements] - - def option(self, name, value): - name = name[0] - if not name in ["site-config", "user-config", "project-config"]: - get_manager().errors()("The 'option' rule may be used only in site-config or user-config") - - option.set(name, value[0]) diff --git a/jam-files/boost-build/build/property-set.jam b/jam-files/boost-build/build/property-set.jam deleted file mode 100644 index 70fd90cd..00000000 --- a/jam-files/boost-build/build/property-set.jam +++ /dev/null @@ -1,481 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import feature ; -import path ; -import project ; -import property ; -import sequence ; -import set ; -import option ; - -# Class for storing a set of properties. -# -# There is 1<->1 correspondence between identity and value. No two instances -# of the class are equal. To maintain this property, the 'property-set.create' -# rule should be used to create new instances. Instances are immutable. -# -# Each property is classified with regard to its effect on build results. -# Incidental properties have no effect on build results, from Boost.Build's -# point of view. Others are either free, or non-free and we refer to non-free -# ones as 'base'. Each property belongs to exactly one of those categories. -# -# It is possible to get a list of properties belonging to each category as -# well as a list of properties with a specific attribute. -# -# Several operations, like and refine and as-path are provided. They all use -# caching whenever possible. -# -class property-set -{ - import errors ; - import feature ; - import path ; - import property ; - import property-set ; - import set ; - - rule __init__ ( raw-properties * ) - { - self.raw = $(raw-properties) ; - - for local p in $(raw-properties) - { - if ! $(p:G) - { - errors.error "Invalid property: '$(p)'" ; - } - - local att = [ feature.attributes $(p:G) ] ; - # A feature can be both incidental and free, in which case we add it - # to incidental. - if incidental in $(att) - { - self.incidental += $(p) ; - } - else if free in $(att) - { - self.free += $(p) ; - } - else - { - self.base += $(p) ; - } - - if dependency in $(att) - { - self.dependency += $(p) ; - } - else - { - self.non-dependency += $(p) ; - } - - if [ MATCH (:) : $(p:G=) ] - { - self.conditional += $(p) ; - } - else - { - self.non-conditional += $(p) ; - } - - if propagated in $(att) - { - self.propagated += $(p) ; - } - if link-incompatible in $(att) - { - self.link-incompatible += $(p) ; - } - } - } - - # Returns Jam list of stored properties. - # - rule raw ( ) - { - return $(self.raw) ; - } - - rule str ( ) - { - return "[" $(self.raw) "]" ; - } - - # Returns properties that are neither incidental nor free. - # - rule base ( ) - { - return $(self.base) ; - } - - # Returns free properties which are not incidental. - # - rule free ( ) - { - return $(self.free) ; - } - - # Returns dependency properties. - # - rule dependency ( ) - { - return $(self.dependency) ; - } - - rule non-dependency ( ) - { - return $(self.non-dependency) ; - } - - rule conditional ( ) - { - return $(self.conditional) ; - } - - rule non-conditional ( ) - { - return $(self.non-conditional) ; - } - - # Returns incidental properties. - # - rule incidental ( ) - { - return $(self.incidental) ; - } - - rule refine ( ps ) - { - if ! $(self.refined.$(ps)) - { - local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ; - if $(r[1]) != "@error" - { - self.refined.$(ps) = [ property-set.create $(r) ] ; - } - else - { - self.refined.$(ps) = $(r) ; - } - } - return $(self.refined.$(ps)) ; - } - - rule expand ( ) - { - if ! $(self.expanded) - { - self.expanded = [ property-set.create [ feature.expand $(self.raw) ] ] ; - } - return $(self.expanded) ; - } - - rule expand-composites ( ) - { - if ! $(self.composites) - { - self.composites = [ property-set.create - [ feature.expand-composites $(self.raw) ] ] ; - } - return $(self.composites) ; - } - - rule evaluate-conditionals ( context ? ) - { - context ?= $(__name__) ; - if ! $(self.evaluated.$(context)) - { - self.evaluated.$(context) = [ property-set.create - [ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ; - } - return $(self.evaluated.$(context)) ; - } - - rule propagated ( ) - { - if ! $(self.propagated-ps) - { - self.propagated-ps = [ property-set.create $(self.propagated) ] ; - } - return $(self.propagated-ps) ; - } - - rule link-incompatible ( ) - { - if ! $(self.link-incompatible-ps) - { - self.link-incompatible-ps = - [ property-set.create $(self.link-incompatible) ] ; - } - return $(self.link-incompatible-ps) ; - } - - rule run-actions ( ) - { - if ! $(self.run) - { - self.run = [ property-set.create [ feature.run-actions $(self.raw) ] ] ; - } - return $(self.run) ; - } - - rule add-defaults ( ) - { - if ! $(self.defaults) - { - self.defaults = [ property-set.create - [ feature.add-defaults $(self.raw) ] ] ; - } - return $(self.defaults) ; - } - - rule as-path ( ) - { - if ! $(self.as-path) - { - self.as-path = [ property.as-path $(self.base) ] ; - } - return $(self.as-path) ; - } - - # Computes the path to be used for a target with the given properties. - # Returns a list of - # - the computed path - # - if the path is relative to the build directory, a value of 'true'. - # - rule target-path ( ) - { - if ! $(self.target-path) - { - # The <location> feature can be used to explicitly change the - # location of generated targets. - local l = [ get <location> ] ; - if $(l) - { - self.target-path = $(l) ; - } - else - { - local p = [ as-path ] ; - p = [ property-set.hash-maybe $(p) ] ; - - # A real ugly hack. Boost regression test system requires - # specific target paths, and it seems that changing it to handle - # other directory layout is really hard. For that reason, we - # teach V2 to do the things regression system requires. The - # value of '<location-prefix>' is prepended to the path. - local prefix = [ get <location-prefix> ] ; - if $(prefix) - { - self.target-path = [ path.join $(prefix) $(p) ] ; - } - else - { - self.target-path = $(p) ; - } - if ! $(self.target-path) - { - self.target-path = . ; - } - # The path is relative to build dir. - self.target-path += true ; - } - } - return $(self.target-path) ; - } - - rule add ( ps ) - { - if ! $(self.added.$(ps)) - { - self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ; - } - return $(self.added.$(ps)) ; - } - - rule add-raw ( properties * ) - { - return [ add [ property-set.create $(properties) ] ] ; - } - - rule link-incompatible-with ( ps ) - { - if ! $(.li.$(ps)) - { - local li1 = [ $(__name__).link-incompatible ] ; - local li2 = [ $(ps).link-incompatible ] ; - if [ set.equal $(li1) : $(li2) ] - { - .li.$(ps) = false ; - } - else - { - .li.$(ps) = true ; - } - } - if $(.li.$(ps)) = true - { - return true ; - } - else - { - return ; - } - } - - # Returns all values of 'feature'. - # - rule get ( feature ) - { - if ! $(self.map-built) - { - # For each feature, create a member var and assign all values to it. - # Since all regular member vars start with 'self', there will be no - # conflicts between names. - self.map-built = true ; - for local v in $(self.raw) - { - $(v:G) += $(v:G=) ; - } - } - return $($(feature)) ; - } -} - - -# Creates a new 'property-set' instance for the given raw properties or returns -# an already existing ones. -# -rule create ( raw-properties * ) -{ - raw-properties = [ sequence.unique - [ sequence.insertion-sort $(raw-properties) ] ] ; - - local key = $(raw-properties:J=-:E=) ; - - if ! $(.ps.$(key)) - { - .ps.$(key) = [ new property-set $(raw-properties) ] ; - } - return $(.ps.$(key)) ; -} -NATIVE_RULE property-set : create ; - - -# Creates a new 'property-set' instance after checking that all properties are -# valid and converting incidental properties into gristed form. -# -rule create-with-validation ( raw-properties * ) -{ - property.validate $(raw-properties) ; - return [ create [ property.make $(raw-properties) ] ] ; -} - - -# Creates a property-set from the input given by the user, in the context of -# 'jamfile-module' at 'location'. -# -rule create-from-user-input ( raw-properties * : jamfile-module location ) -{ - local specification = [ property.translate-paths $(raw-properties) - : $(location) ] ; - specification = [ property.translate-indirect $(specification) - : $(jamfile-module) ] ; - local project-id = [ project.attribute $(jamfile-module) id ] ; - project-id ?= [ path.root $(location) [ path.pwd ] ] ; - specification = [ property.translate-dependencies - $(specification) : $(project-id) : $(location) ] ; - specification = - [ property.expand-subfeatures-in-conditions $(specification) ] ; - specification = [ property.make $(specification) ] ; - return [ property-set.create $(specification) ] ; -} - - -# Refines requirements with requirements provided by the user. Specially handles -# "-<property>value" syntax in specification to remove given requirements. -# - parent-requirements -- property-set object with requirements to refine. -# - specification -- string list of requirements provided by the user. -# - project-module -- module to which context indirect features will be -# bound. -# - location -- path to which path features are relative. -# -rule refine-from-user-input ( parent-requirements : specification * : - project-module : location ) -{ - if ! $(specification) - { - return $(parent-requirements) ; - } - else - { - local add-requirements ; - local remove-requirements ; - - for local r in $(specification) - { - local m = [ MATCH "^-(.*)" : $(r) ] ; - if $(m) - { - remove-requirements += $(m) ; - } - else - { - add-requirements += $(r) ; - } - } - - if $(remove-requirements) - { - # Need to create a property set, so that path features and indirect - # features are translated just like they are in project - # requirements. - local ps = [ property-set.create-from-user-input - $(remove-requirements) : $(project-module) $(location) ] ; - - parent-requirements = [ property-set.create - [ set.difference [ $(parent-requirements).raw ] - : [ $(ps).raw ] ] ] ; - specification = $(add-requirements) ; - } - - local requirements = [ property-set.create-from-user-input - $(specification) : $(project-module) $(location) ] ; - - return [ $(parent-requirements).refine $(requirements) ] ; - } -} - - -# Returns a property-set with an empty set of properties. -# -rule empty ( ) -{ - if ! $(.empty) - { - .empty = [ create ] ; - } - return $(.empty) ; -} - -if [ option.get hash : : yes ] = yes -{ - rule hash-maybe ( path ? ) - { - path ?= "" ; - return [ MD5 $(path) ] ; - } -} -else -{ - rule hash-maybe ( path ? ) - { - return $(path) ; - } -} - diff --git a/jam-files/boost-build/build/property.jam b/jam-files/boost-build/build/property.jam deleted file mode 100644 index a2ad5226..00000000 --- a/jam-files/boost-build/build/property.jam +++ /dev/null @@ -1,788 +0,0 @@ -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import errors ; -import feature ; -import indirect ; -import path ; -import regex ; -import string ; -import sequence ; -import set ; -import utility ; - - -# Refines 'properties' by overriding any non-free and non-conditional properties -# for which a different value is specified in 'requirements'. Returns the -# resulting list of properties. -# -rule refine ( properties * : requirements * ) -{ - local result ; - local error ; - - # All the 'requirements' elements should be present in the result. Record - # them so that we can handle 'properties'. - for local r in $(requirements) - { - # Do not consider conditional requirements. - if ! [ MATCH (:) : $(r:G=) ] - { - # Note: cannot use a local variable here, so use an ugly name. - __require__$(r:G) = $(r:G=) ; - } - } - - for local p in $(properties) - { - if [ MATCH (:) : $(p:G=) ] - { - # Do not modify conditional properties. - result += $(p) ; - } - else if free in [ feature.attributes $(p:G) ] - { - # Do not modify free properties. - result += $(p) ; - } - else - { - local required-value = $(__require__$(p:G)) ; - if $(required-value) - { - if $(p:G=) != $(required-value) - { - result += $(p:G)$(required-value) ; - } - else - { - result += $(p) ; - } - } - else - { - result += $(p) ; - } - } - } - - # Unset our ugly map. - for local r in $(requirements) - { - __require__$(r:G) = ; - } - - if $(error) - { - return $(error) ; - } - else - { - return [ sequence.unique $(result) $(requirements) ] ; - } -} - - -# Removes all conditional properties whose conditions are not met. For those -# with met conditions, removes the condition. Properties in conditions are -# looked up in 'context'. -# -rule evaluate-conditionals-in-context ( properties * : context * ) -{ - local base ; - local conditionals ; - for local p in $(properties) - { - if [ MATCH (:<) : $(p) ] - { - conditionals += $(p) ; - } - else - { - base += $(p) ; - } - } - - local result = $(base) ; - for local p in $(conditionals) - { - # Separate condition and property. - local s = [ MATCH (.*):(<.*) : $(p) ] ; - # Split condition into individual properties. - local condition = [ regex.split $(s[1]) "," ] ; - # Evaluate condition. - if ! [ MATCH (!).* : $(condition:G=) ] - { - # Only positive checks - if $(condition) in $(context) - { - result += $(s[2]) ; - } - } - else - { - # Have negative checks - local fail ; - while $(condition) - { - local c = $(condition[1]) ; - local m = [ MATCH !(.*) : $(c) ] ; - if $(m) - { - local p = $(m:G=$(c:G)) ; - if $(p) in $(context) - { - fail = true ; - c = ; - } - } - else - { - if ! $(c) in $(context) - { - fail = true ; - c = ; - } - } - condition = $(condition[2-]) ; - } - if ! $(fail) - { - result += $(s[2]) ; - } - } - } - return $(result) ; -} - - -rule expand-subfeatures-in-conditions ( properties * ) -{ - local result ; - for local p in $(properties) - { - local s = [ MATCH (.*):(<.*) : $(p) ] ; - if ! $(s) - { - result += $(p) ; - } - else - { - local condition = $(s[1]) ; - local value = $(s[2]) ; - # Condition might include several elements. - condition = [ regex.split $(condition) "," ] ; - local e ; - for local c in $(condition) - { - # It is common for a condition to include a toolset or - # subfeatures that have not been defined. In that case we want - # the condition to simply 'never be satisfied' and validation - # would only produce a spurious error so we prevent it by - # passing 'true' as the second parameter. - e += [ feature.expand-subfeatures $(c) : true ] ; - } - if $(e) = $(condition) - { - # (todo) - # This is just an optimization and possibly a premature one at - # that. - # (todo) (12.07.2008.) (Jurko) - result += $(p) ; - } - else - { - result += $(e:J=,):$(value) ; - } - } - } - return $(result) ; -} - - -# Helper for as-path, below. Orders properties with the implicit ones first, and -# within the two sections in alphabetical order of feature name. -# -local rule path-order ( x y ) -{ - if $(y:G) && ! $(x:G) - { - return true ; - } - else if $(x:G) && ! $(y:G) - { - return ; - } - else - { - if ! $(x:G) - { - x = [ feature.expand-subfeatures $(x) ] ; - y = [ feature.expand-subfeatures $(y) ] ; - } - - if $(x[1]) < $(y[1]) - { - return true ; - } - } -} - - -local rule abbreviate-dashed ( string ) -{ - local r ; - for local part in [ regex.split $(string) - ] - { - r += [ string.abbreviate $(part) ] ; - } - return $(r:J=-) ; -} - - -local rule identity ( string ) -{ - return $(string) ; -} - - -if --abbreviate-paths in [ modules.peek : ARGV ] -{ - .abbrev = abbreviate-dashed ; -} -else -{ - .abbrev = identity ; -} - - -# Returns a path representing the given expanded property set. -# -rule as-path ( properties * ) -{ - local entry = .result.$(properties:J=-) ; - - if ! $($(entry)) - { - # Trim redundancy. - properties = [ feature.minimize $(properties) ] ; - - # Sort according to path-order. - properties = [ sequence.insertion-sort $(properties) : path-order ] ; - - local components ; - for local p in $(properties) - { - if $(p:G) - { - local f = [ utility.ungrist $(p:G) ] ; - p = $(f)-$(p:G=) ; - } - components += [ $(.abbrev) $(p) ] ; - } - - $(entry) = $(components:J=/) ; - } - - return $($(entry)) ; -} - - -# Exit with error if property is not valid. -# -local rule validate1 ( property ) -{ - local msg ; - if $(property:G) - { - local feature = $(property:G) ; - local value = $(property:G=) ; - - if ! [ feature.valid $(feature) ] - { - # Ungrist for better error messages. - feature = [ utility.ungrist $(property:G) ] ; - msg = "unknown feature '$(feature)'" ; - } - else if $(value) && ! free in [ feature.attributes $(feature) ] - { - feature.validate-value-string $(feature) $(value) ; - } - else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) ) - { - # Ungrist for better error messages. - feature = [ utility.ungrist $(property:G) ] ; - msg = "No value specified for feature '$(feature)'" ; - } - } - else - { - local feature = [ feature.implied-feature $(property) ] ; - feature.validate-value-string $(feature) $(property) ; - } - if $(msg) - { - errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ; - } -} - - -rule validate ( properties * ) -{ - for local p in $(properties) - { - validate1 $(p) ; - } -} - - -rule validate-property-sets ( property-sets * ) -{ - for local s in $(property-sets) - { - validate [ feature.split $(s) ] ; - } -} - - -# Expands any implicit property values in the given property 'specification' so -# they explicitly state their feature. -# -rule make ( specification * ) -{ - local result ; - for local e in $(specification) - { - if $(e:G) - { - result += $(e) ; - } - else if [ feature.is-implicit-value $(e) ] - { - local feature = [ feature.implied-feature $(e) ] ; - result += $(feature)$(e) ; - } - else - { - errors.error "'$(e)' is not a valid property specification" ; - } - } - return $(result) ; -} - - -# Returns a property set containing all the elements in 'properties' that do not -# have their attributes listed in 'attributes'. -# -rule remove ( attributes + : properties * ) -{ - local result ; - for local e in $(properties) - { - if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ] - { - result += $(e) ; - } - } - return $(result) ; -} - - -# Returns a property set containing all the elements in 'properties' that have -# their attributes listed in 'attributes'. -# -rule take ( attributes + : properties * ) -{ - local result ; - for local e in $(properties) - { - if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ] - { - result += $(e) ; - } - } - return $(result) ; -} - - -# Selects properties corresponding to any of the given features. -# -rule select ( features * : properties * ) -{ - local result ; - - # Add any missing angle brackets. - local empty = "" ; - features = $(empty:G=$(features)) ; - - for local p in $(properties) - { - if $(p:G) in $(features) - { - result += $(p) ; - } - } - return $(result) ; -} - - -# Returns a modified version of properties with all values of the given feature -# replaced by the given value. If 'value' is empty the feature will be removed. -# -rule change ( properties * : feature value ? ) -{ - local result ; - for local p in $(properties) - { - if $(p:G) = $(feature) - { - result += $(value:G=$(feature)) ; - } - else - { - result += $(p) ; - } - } - return $(result) ; -} - - -# If 'property' is a conditional property, returns the condition and the -# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become -# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty -# string. -# -rule split-conditional ( property ) -{ - local m = [ MATCH "(.+):<(.+)" : $(property) ] ; - if $(m) - { - return $(m[1]) <$(m[2]) ; - } -} - - -# Interpret all path properties in 'properties' as relative to 'path'. The -# property values are assumed to be in system-specific form, and will be -# translated into normalized form. -# -rule translate-paths ( properties * : path ) -{ - local result ; - for local p in $(properties) - { - local split = [ split-conditional $(p) ] ; - local condition = "" ; - if $(split) - { - condition = $(split[1]): ; - p = $(split[2]) ; - } - - if path in [ feature.attributes $(p:G) ] - { - local values = [ regex.split $(p:TG=) "&&" ] ; - local t ; - for local v in $(values) - { - t += [ path.root [ path.make $(v) ] $(path) ] ; - } - t = $(t:J="&&") ; - result += $(condition)$(t:TG=$(p:G)) ; - } - else - { - result += $(condition)$(p) ; - } - } - return $(result) ; -} - - -# Assumes that all feature values that start with '@' are names of rules, used -# in 'context-module'. Such rules can be either local to the module or global. -# Converts such values into 'indirect-rule' format (see indirect.jam), so they -# can be called from other modules. Does nothing for such values that are -# already in the 'indirect-rule' format. -# -rule translate-indirect ( specification * : context-module ) -{ - local result ; - for local p in $(specification) - { - local m = [ MATCH ^@(.+) : $(p:G=) ] ; - if $(m) - { - local v ; - if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ] - { - # Rule is already in the 'indirect-rule' format. - v = $(m) ; - } - else - { - if ! [ MATCH ".*([.]).*" : $(m) ] - { - # This is an unqualified rule name. The user might want to - # set flags on this rule name and toolset.flag - # auto-qualifies it. Need to do the same here so flag - # setting works. We can arrange for toolset.flag to *not* - # auto-qualify the argument but then two rules defined in - # two Jamfiles would conflict. - m = $(context-module).$(m) ; - } - v = [ indirect.make $(m) : $(context-module) ] ; - } - - v = @$(v) ; - result += $(v:G=$(p:G)) ; - } - else - { - result += $(p) ; - } - } - return $(result) ; -} - - -# Binds all dependency properties in a list relative to the given project. -# Targets with absolute paths will be left unchanged and targets which have a -# project specified will have the path to the project interpreted relative to -# the specified location. -# -rule translate-dependencies ( specification * : project-id : location ) -{ - local result ; - for local p in $(specification) - { - local split = [ split-conditional $(p) ] ; - local condition = "" ; - if $(split) - { - condition = $(split[1]): ; - p = $(split[2]) ; - } - if dependency in [ feature.attributes $(p:G) ] - { - local split-target = [ regex.match (.*)//(.*) : $(p:G=) ] ; - if $(split-target) - { - local rooted = [ path.root [ path.make $(split-target[1]) ] - [ path.root $(location) [ path.pwd ] ] ] ; - result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ; - } - else if [ path.is-rooted $(p:G=) ] - { - result += $(condition)$(p) ; - } - else - { - result += $(condition)$(p:G)$(project-id)//$(p:G=) ; - } - } - else - { - result += $(condition)$(p) ; - } - } - return $(result) ; -} - - -# Class maintaining a property set -> string mapping. -# -class property-map -{ - import errors ; - import numbers ; - import sequence ; - - rule __init__ ( ) - { - self.next-flag = 1 ; - } - - # Associate 'value' with 'properties'. - # - rule insert ( properties + : value ) - { - self.all-flags += $(self.next-flag) ; - self.properties.$(self.next-flag) = $(properties) ; - self.value.$(self.next-flag) = $(value) ; - - self.next-flag = [ numbers.increment $(self.next-flag) ] ; - } - - # Returns the value associated with 'properties' or any subset of it. If - # more than one subset has a value assigned to it, returns the value for the - # longest subset, if it is unique. - # - rule find ( properties + ) - { - return [ find-replace $(properties) ] ; - } - - # Returns the value associated with 'properties'. If 'value' parameter is - # given, replaces the found value. - # - rule find-replace ( properties + : value ? ) - { - # First find all matches. - local matches ; - local match-ranks ; - for local i in $(self.all-flags) - { - if $(self.properties.$(i)) in $(properties) - { - matches += $(i) ; - match-ranks += [ sequence.length $(self.properties.$(i)) ] ; - } - } - local best = [ sequence.select-highest-ranked $(matches) - : $(match-ranks) ] ; - if $(best[2]) - { - errors.error "Ambiguous key $(properties:J= :E=)" ; - } - local original = $(self.value.$(best)) ; - if $(value) - { - self.value.$(best) = $(value) ; - } - return $(original) ; - } -} - - -rule __test__ ( ) -{ - import assert ; - import "class" : new ; - import errors : try catch ; - import feature ; - - # Local rules must be explicitly re-imported. - import property : path-order abbreviate-dashed ; - - feature.prepare-test property-test-temp ; - - feature.feature toolset : gcc : implicit symmetric ; - feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 - 3.0.2 : optional ; - feature.feature define : : free ; - feature.feature runtime-link : dynamic static : symmetric link-incompatible ; - feature.feature optimization : on off ; - feature.feature variant : debug release : implicit composite symmetric ; - feature.feature rtti : on off : link-incompatible ; - - feature.compose <variant>debug : <define>_DEBUG <optimization>off ; - feature.compose <variant>release : <define>NDEBUG <optimization>on ; - - validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ; - - assert.true path-order $(test-space) debug <define>foo ; - assert.false path-order $(test-space) <define>foo debug ; - assert.true path-order $(test-space) gcc debug ; - assert.false path-order $(test-space) debug gcc ; - assert.true path-order $(test-space) <optimization>on <rtti>on ; - assert.false path-order $(test-space) <rtti>on <optimization>on ; - - assert.result-set-equal <toolset>gcc <rtti>off <define>FOO - : refine <toolset>gcc <rtti>off - : <define>FOO - : $(test-space) ; - - assert.result-set-equal <toolset>gcc <optimization>on - : refine <toolset>gcc <optimization>off - : <optimization>on - : $(test-space) ; - - assert.result-set-equal <toolset>gcc <rtti>off - : refine <toolset>gcc : <rtti>off : $(test-space) ; - - assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO - : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO - : $(test-space) ; - - assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar - : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar - : $(test-space) ; - - assert.result <define>MY_RELEASE - : evaluate-conditionals-in-context - <variant>release,<rtti>off:<define>MY_RELEASE - : <toolset>gcc <variant>release <rtti>off ; - - assert.result debug - : as-path <optimization>off <variant>debug - : $(test-space) ; - - assert.result gcc/debug/rtti-off - : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug - : $(test-space) ; - - assert.result optmz-off : abbreviate-dashed optimization-off ; - assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ; - - try ; - validate <feature>value : $(test-space) ; - catch "Invalid property '<feature>value': unknown feature 'feature'." ; - - try ; - validate <rtti>default : $(test-space) ; - catch \"default\" is not a known value of feature <rtti> ; - - validate <define>WHATEVER : $(test-space) ; - - try ; - validate <rtti> : $(test-space) ; - catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ; - - try ; - validate value : $(test-space) ; - catch "value" is not a value of an implicit feature ; - - assert.result-set-equal <rtti>on - : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ; - - assert.result-set-equal <include>a - : select include : <include>a <toolset>gcc ; - - assert.result-set-equal <include>a - : select include bar : <include>a <toolset>gcc ; - - assert.result-set-equal <include>a <toolset>gcc - : select include <bar> <toolset> : <include>a <toolset>gcc ; - - assert.result-set-equal <toolset>kylix <include>a - : change <toolset>gcc <include>a : <toolset> kylix ; - - pm = [ new property-map ] ; - $(pm).insert <toolset>gcc : o ; - $(pm).insert <toolset>gcc <os>NT : obj ; - $(pm).insert <toolset>gcc <os>CYGWIN : obj ; - - assert.equal o : [ $(pm).find <toolset>gcc ] ; - - assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ; - - try ; - $(pm).find <toolset>gcc <os>NT <os>CYGWIN ; - catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ; - - # Test ordinary properties. - assert.result : split-conditional <toolset>gcc ; - - # Test properties with ":". - assert.result : split-conditional <define>FOO=A::B ; - - # Test conditional feature. - assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO - : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ; - - feature.finish-test property-test-temp ; -} diff --git a/jam-files/boost-build/build/property.py b/jam-files/boost-build/build/property.py deleted file mode 100644 index c4b13dbc..00000000 --- a/jam-files/boost-build/build/property.py +++ /dev/null @@ -1,593 +0,0 @@ -# Status: ported, except for tests and --abbreviate-paths. -# Base revision: 64070 -# -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import re -from b2.util.utility import * -from b2.build import feature -from b2.util import sequence, qualify_jam_action -import b2.util.set -from b2.manager import get_manager - -__re_two_ampersands = re.compile ('&&') -__re_comma = re.compile (',') -__re_split_condition = re.compile ('(.*):(<.*)') -__re_split_conditional = re.compile (r'(.+):<(.+)') -__re_colon = re.compile (':') -__re_has_condition = re.compile (r':<') -__re_separate_condition_and_property = re.compile (r'(.*):(<.*)') - -class Property(object): - - __slots__ = ('_feature', '_value', '_condition') - - def __init__(self, f, value, condition = []): - if type(f) == type(""): - f = feature.get(f) - # At present, single property has a single value. - assert type(value) != type([]) - assert(f.free() or value.find(':') == -1) - self._feature = f - self._value = value - self._condition = condition - - def feature(self): - return self._feature - - def value(self): - return self._value - - def condition(self): - return self._condition - - def to_raw(self): - result = "<" + self._feature.name() + ">" + str(self._value) - if self._condition: - result = ",".join(str(p) for p in self._condition) + ':' + result - return result - - def __str__(self): - return self.to_raw() - - def __hash__(self): - # FIXME: consider if this class should be value-is-identity one - return hash((self._feature, self._value, tuple(self._condition))) - - def __cmp__(self, other): - return cmp((self._feature, self._value, self._condition), - (other._feature, other._value, other._condition)) - - -def create_from_string(s, allow_condition=False): - - condition = [] - import types - if not isinstance(s, types.StringType): - print type(s) - if __re_has_condition.search(s): - - if not allow_condition: - raise BaseException("Conditional property is not allowed in this context") - - m = __re_separate_condition_and_property.match(s) - condition = m.group(1) - s = m.group(2) - - # FIXME: break dependency cycle - from b2.manager import get_manager - - feature_name = get_grist(s) - if not feature_name: - if feature.is_implicit_value(s): - f = feature.implied_feature(s) - value = s - else: - raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s) - else: - f = feature.get(feature_name) - - value = get_value(s) - if not value: - get_manager().errors()("Invalid property '%s' -- no value specified" % s) - - - if condition: - condition = [create_from_string(x) for x in condition.split(',')] - - return Property(f, value, condition) - -def create_from_strings(string_list, allow_condition=False): - - return [create_from_string(s, allow_condition) for s in string_list] - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __results - - # A cache of results from as_path - __results = {} - -reset () - - -def path_order (x, y): - """ Helper for as_path, below. Orders properties with the implicit ones - first, and within the two sections in alphabetical order of feature - name. - """ - if x == y: - return 0 - - xg = get_grist (x) - yg = get_grist (y) - - if yg and not xg: - return -1 - - elif xg and not yg: - return 1 - - else: - if not xg: - x = feature.expand_subfeatures([x]) - y = feature.expand_subfeatures([y]) - - if x < y: - return -1 - elif x > y: - return 1 - else: - return 0 - -def identify(string): - return string - -# Uses Property -def refine (properties, requirements): - """ Refines 'properties' by overriding any non-free properties - for which a different value is specified in 'requirements'. - Conditional requirements are just added without modification. - Returns the resulting list of properties. - """ - # The result has no duplicates, so we store it in a set - result = set() - - # Records all requirements. - required = {} - - # All the elements of requirements should be present in the result - # Record them so that we can handle 'properties'. - for r in requirements: - # Don't consider conditional requirements. - if not r.condition(): - required[r.feature()] = r - - for p in properties: - # Skip conditional properties - if p.condition(): - result.add(p) - # No processing for free properties - elif p.feature().free(): - result.add(p) - else: - if required.has_key(p.feature()): - result.add(required[p.feature()]) - else: - result.add(p) - - return sequence.unique(list(result) + requirements) - -def translate_paths (properties, path): - """ Interpret all path properties in 'properties' as relative to 'path' - The property values are assumed to be in system-specific form, and - will be translated into normalized form. - """ - result = [] - - for p in properties: - - if p.feature().path(): - values = __re_two_ampersands.split(p.value()) - - new_value = "&&".join(os.path.join(path, v) for v in values) - - if new_value != p.value(): - result.append(Property(p.feature(), new_value, p.condition())) - else: - result.append(p) - - else: - result.append (p) - - return result - -def translate_indirect(properties, context_module): - """Assumes that all feature values that start with '@' are - names of rules, used in 'context-module'. Such rules can be - either local to the module or global. Qualified local rules - with the name of the module.""" - result = [] - for p in properties: - if p.value()[0] == '@': - q = qualify_jam_action(p.value()[1:], context_module) - get_manager().engine().register_bjam_action(q) - result.append(Property(p.feature(), '@' + q, p.condition())) - else: - result.append(p) - - return result - -def validate (properties): - """ Exit with error if any of the properties is not valid. - properties may be a single property or a sequence of properties. - """ - - if isinstance (properties, str): - __validate1 (properties) - else: - for p in properties: - __validate1 (p) - -def expand_subfeatures_in_conditions (properties): - - result = [] - for p in properties: - - if not p.condition(): - result.append(p) - else: - expanded = [] - for c in p.condition(): - - if c.feature().name().startswith("toolset") or c.feature().name() == "os": - # It common that condition includes a toolset which - # was never defined, or mentiones subfeatures which - # were never defined. In that case, validation will - # only produce an spirious error, so don't validate. - expanded.extend(feature.expand_subfeatures ([c], True)) - else: - expanded.extend(feature.expand_subfeatures([c])) - - result.append(Property(p.feature(), p.value(), expanded)) - - return result - -# FIXME: this should go -def split_conditional (property): - """ If 'property' is conditional property, returns - condition and the property, e.g - <variant>debug,<toolset>gcc:<inlining>full will become - <variant>debug,<toolset>gcc <inlining>full. - Otherwise, returns empty string. - """ - m = __re_split_conditional.match (property) - - if m: - return (m.group (1), '<' + m.group (2)) - - return None - - -def select (features, properties): - """ Selects properties which correspond to any of the given features. - """ - result = [] - - # add any missing angle brackets - features = add_grist (features) - - return [p for p in properties if get_grist(p) in features] - -def validate_property_sets (sets): - for s in sets: - validate(s.all()) - -def evaluate_conditionals_in_context (properties, context): - """ Removes all conditional properties which conditions are not met - For those with met conditions, removes the condition. Properies - in conditions are looked up in 'context' - """ - base = [] - conditional = [] - - for p in properties: - if p.condition(): - conditional.append (p) - else: - base.append (p) - - result = base[:] - for p in conditional: - - # Evaluate condition - # FIXME: probably inefficient - if all(x in context for x in p.condition()): - result.append(Property(p.feature(), p.value())) - - return result - - -def change (properties, feature, value = None): - """ Returns a modified version of properties with all values of the - given feature replaced by the given value. - If 'value' is None the feature will be removed. - """ - result = [] - - feature = add_grist (feature) - - for p in properties: - if get_grist (p) == feature: - if value: - result.append (replace_grist (value, feature)) - - else: - result.append (p) - - return result - - -################################################################ -# Private functions - -def __validate1 (property): - """ Exit with error if property is not valid. - """ - msg = None - - if not property.feature().free(): - feature.validate_value_string (property.feature(), property.value()) - - -################################################################### -# Still to port. -# Original lines are prefixed with "# " -# -# -# import utility : ungrist ; -# import sequence : unique ; -# import errors : error ; -# import feature ; -# import regex ; -# import sequence ; -# import set ; -# import path ; -# import assert ; -# -# - - -# rule validate-property-sets ( property-sets * ) -# { -# for local s in $(property-sets) -# { -# validate [ feature.split $(s) ] ; -# } -# } -# - -def remove(attributes, properties): - """Returns a property sets which include all the elements - in 'properties' that do not have attributes listed in 'attributes'.""" - - result = [] - for e in properties: - attributes_new = feature.attributes(get_grist(e)) - has_common_features = 0 - for a in attributes_new: - if a in attributes: - has_common_features = 1 - break - - if not has_common_features: - result += e - - return result - - -def take(attributes, properties): - """Returns a property set which include all - properties in 'properties' that have any of 'attributes'.""" - result = [] - for e in properties: - if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))): - result.append(e) - return result - -def translate_dependencies(properties, project_id, location): - - result = [] - for p in properties: - - if not p.feature().dependency(): - result.append(p) - else: - v = p.value() - m = re.match("(.*)//(.*)", v) - if m: - rooted = m.group(1) - if rooted[0] == '/': - # Either project id or absolute Linux path, do nothing. - pass - else: - rooted = os.path.join(os.getcwd(), location, rooted) - - result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition())) - - elif os.path.isabs(v): - result.append(p) - else: - result.append(Property(p.feature(), project_id + "//" + v, p.condition())) - - return result - - -class PropertyMap: - """ Class which maintains a property set -> string mapping. - """ - def __init__ (self): - self.__properties = [] - self.__values = [] - - def insert (self, properties, value): - """ Associate value with properties. - """ - self.__properties.append(properties) - self.__values.append(value) - - def find (self, properties): - """ Return the value associated with properties - or any subset of it. If more than one - subset has value assigned to it, return the - value for the longest subset, if it's unique. - """ - return self.find_replace (properties) - - def find_replace(self, properties, value=None): - matches = [] - match_ranks = [] - - for i in range(0, len(self.__properties)): - p = self.__properties[i] - - if b2.util.set.contains (p, properties): - matches.append (i) - match_ranks.append(len(p)) - - best = sequence.select_highest_ranked (matches, match_ranks) - - if not best: - return None - - if len (best) > 1: - raise NoBestMatchingAlternative () - - best = best [0] - - original = self.__values[best] - - if value: - self.__values[best] = value - - return original - -# local rule __test__ ( ) -# { -# import errors : try catch ; -# import feature ; -# import feature : feature subfeature compose ; -# -# # local rules must be explicitly re-imported -# import property : path-order ; -# -# feature.prepare-test property-test-temp ; -# -# feature toolset : gcc : implicit symmetric ; -# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 -# 3.0 3.0.1 3.0.2 : optional ; -# feature define : : free ; -# feature runtime-link : dynamic static : symmetric link-incompatible ; -# feature optimization : on off ; -# feature variant : debug release : implicit composite symmetric ; -# feature rtti : on off : link-incompatible ; -# -# compose <variant>debug : <define>_DEBUG <optimization>off ; -# compose <variant>release : <define>NDEBUG <optimization>on ; -# -# import assert ; -# import "class" : new ; -# -# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ; -# -# assert.result <toolset>gcc <rtti>off <define>FOO -# : refine <toolset>gcc <rtti>off -# : <define>FOO -# : $(test-space) -# ; -# -# assert.result <toolset>gcc <optimization>on -# : refine <toolset>gcc <optimization>off -# : <optimization>on -# : $(test-space) -# ; -# -# assert.result <toolset>gcc <rtti>off -# : refine <toolset>gcc : <rtti>off : $(test-space) -# ; -# -# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO -# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO -# : $(test-space) -# ; -# -# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar -# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar -# : $(test-space) -# ; -# -# assert.result <define>MY_RELEASE -# : evaluate-conditionals-in-context -# <variant>release,<rtti>off:<define>MY_RELEASE -# : <toolset>gcc <variant>release <rtti>off -# -# ; -# -# try ; -# validate <feature>value : $(test-space) ; -# catch "Invalid property '<feature>value': unknown feature 'feature'." ; -# -# try ; -# validate <rtti>default : $(test-space) ; -# catch \"default\" is not a known value of feature <rtti> ; -# -# validate <define>WHATEVER : $(test-space) ; -# -# try ; -# validate <rtti> : $(test-space) ; -# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ; -# -# try ; -# validate value : $(test-space) ; -# catch "value" is not a value of an implicit feature ; -# -# -# assert.result <rtti>on -# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ; -# -# assert.result <include>a -# : select include : <include>a <toolset>gcc ; -# -# assert.result <include>a -# : select include bar : <include>a <toolset>gcc ; -# -# assert.result <include>a <toolset>gcc -# : select include <bar> <toolset> : <include>a <toolset>gcc ; -# -# assert.result <toolset>kylix <include>a -# : change <toolset>gcc <include>a : <toolset> kylix ; -# -# # Test ordinary properties -# assert.result -# : split-conditional <toolset>gcc -# ; -# -# # Test properties with ":" -# assert.result -# : split-conditional <define>FOO=A::B -# ; -# -# # Test conditional feature -# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO -# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO -# ; -# -# feature.finish-test property-test-temp ; -# } -# - diff --git a/jam-files/boost-build/build/property_set.py b/jam-files/boost-build/build/property_set.py deleted file mode 100644 index f12eb90c..00000000 --- a/jam-files/boost-build/build/property_set.py +++ /dev/null @@ -1,449 +0,0 @@ -# Status: ported. -# Base revision: 40480 - -# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -from b2.util.utility import * -import property, feature, string -import b2.build.feature -from b2.exceptions import * -from b2.util.sequence import unique -from b2.util.set import difference -from b2.util import cached - -from b2.manager import get_manager - - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __cache - - # A cache of property sets - # TODO: use a map of weak refs? - __cache = {} - -reset () - - -def create (raw_properties = []): - """ Creates a new 'PropertySet' instance for the given raw properties, - or returns an already existing one. - """ - # FIXME: propagate to callers. - if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property): - x = raw_properties - else: - x = [property.create_from_string(ps) for ps in raw_properties] - x.sort() - x = unique (x) - - # FIXME: can we do better, e.g. by directly computing - # has value of the list? - key = tuple(x) - - if not __cache.has_key (key): - __cache [key] = PropertySet(x) - - return __cache [key] - -def create_with_validation (raw_properties): - """ Creates new 'PropertySet' instances after checking - that all properties are valid and converting incidental - properties into gristed form. - """ - properties = [property.create_from_string(s) for s in raw_properties] - property.validate(properties) - - return create(properties) - -def empty (): - """ Returns PropertySet with empty set of properties. - """ - return create () - -def create_from_user_input(raw_properties, jamfile_module, location): - """Creates a property-set from the input given by the user, in the - context of 'jamfile-module' at 'location'""" - - properties = property.create_from_strings(raw_properties, True) - properties = property.translate_paths(properties, location) - properties = property.translate_indirect(properties, jamfile_module) - - project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None) - if not project_id: - project_id = os.path.abspath(location) - properties = property.translate_dependencies(properties, project_id, location) - properties = property.expand_subfeatures_in_conditions(properties) - return create(properties) - - -def refine_from_user_input(parent_requirements, specification, jamfile_module, - location): - """Refines requirements with requirements provided by the user. - Specially handles "-<property>value" syntax in specification - to remove given requirements. - - parent-requirements -- property-set object with requirements - to refine - - specification -- string list of requirements provided by the use - - project-module -- the module to which context indirect features - will be bound. - - location -- the path to which path features are relative.""" - - - if not specification: - return parent_requirements - - - add_requirements = [] - remove_requirements = [] - - for r in specification: - if r[0] == '-': - remove_requirements.append(r[1:]) - else: - add_requirements.append(r) - - if remove_requirements: - # Need to create property set, so that path features - # and indirect features are translated just like they - # are in project requirements. - ps = create_from_user_input(remove_requirements, - jamfile_module, location) - - parent_requirements = create(difference(parent_requirements.all(), - ps.all())) - specification = add_requirements - - requirements = create_from_user_input(specification, - jamfile_module, location) - - return parent_requirements.refine(requirements) - -class PropertySet: - """ Class for storing a set of properties. - - there's 1<->1 correspondence between identity and value. No - two instances of the class are equal. To maintain this property, - the 'PropertySet.create' rule should be used to create new instances. - Instances are immutable. - - - each property is classified with regard to it's effect on build - results. Incidental properties have no effect on build results, from - Boost.Build point of view. Others are either free, or non-free, which we - call 'base'. Each property belong to exactly one of those categories and - it's possible to get list of properties in each category. - - In addition, it's possible to get list of properties with specific - attribute. - - - several operations, like and refine and as_path are provided. They all use - caching whenever possible. - """ - def __init__ (self, properties = []): - - - raw_properties = [] - for p in properties: - raw_properties.append(p.to_raw()) - - self.all_ = properties - self.all_raw_ = raw_properties - self.all_set_ = set(properties) - - self.incidental_ = [] - self.free_ = [] - self.base_ = [] - self.dependency_ = [] - self.non_dependency_ = [] - self.conditional_ = [] - self.non_conditional_ = [] - self.propagated_ = [] - self.link_incompatible = [] - - # A cache of refined properties. - self.refined_ = {} - - # A cache of property sets created by adding properties to this one. - self.added_ = {} - - # Cache for the default properties. - self.defaults_ = None - - # Cache for the expanded properties. - self.expanded_ = None - - # Cache for the expanded composite properties - self.composites_ = None - - # Cache for property set with expanded subfeatures - self.subfeatures_ = None - - # Cache for the property set containing propagated properties. - self.propagated_ps_ = None - - # A map of features to its values. - self.feature_map_ = None - - # A tuple (target path, is relative to build directory) - self.target_path_ = None - - self.as_path_ = None - - # A cache for already evaluated sets. - self.evaluated_ = {} - - for p in raw_properties: - if not get_grist (p): - raise BaseException ("Invalid property: '%s'" % p) - - att = feature.attributes (get_grist (p)) - - if 'propagated' in att: - self.propagated_.append (p) - - if 'link_incompatible' in att: - self.link_incompatible.append (p) - - for p in properties: - - # A feature can be both incidental and free, - # in which case we add it to incidental. - if p.feature().incidental(): - self.incidental_.append(p) - elif p.feature().free(): - self.free_.append(p) - else: - self.base_.append(p) - - if p.condition(): - self.conditional_.append(p) - else: - self.non_conditional_.append(p) - - if p.feature().dependency(): - self.dependency_.append (p) - else: - self.non_dependency_.append (p) - - - def all(self): - return self.all_ - - def raw (self): - """ Returns the list of stored properties. - """ - return self.all_raw_ - - def __str__(self): - return ' '.join(str(p) for p in self.all_) - - def base (self): - """ Returns properties that are neither incidental nor free. - """ - return self.base_ - - def free (self): - """ Returns free properties which are not dependency properties. - """ - return self.free_ - - def non_free(self): - return self.base_ + self.incidental_ - - def dependency (self): - """ Returns dependency properties. - """ - return self.dependency_ - - def non_dependency (self): - """ Returns properties that are not dependencies. - """ - return self.non_dependency_ - - def conditional (self): - """ Returns conditional properties. - """ - return self.conditional_ - - def non_conditional (self): - """ Returns properties that are not conditional. - """ - return self.non_conditional_ - - def incidental (self): - """ Returns incidental properties. - """ - return self.incidental_ - - def refine (self, requirements): - """ Refines this set's properties using the requirements passed as an argument. - """ - assert isinstance(requirements, PropertySet) - if not self.refined_.has_key (requirements): - r = property.refine(self.all_, requirements.all_) - - self.refined_[requirements] = create(r) - - return self.refined_[requirements] - - def expand (self): - if not self.expanded_: - expanded = feature.expand(self.all_) - self.expanded_ = create(expanded) - return self.expanded_ - - def expand_subfeatures(self): - if not self.subfeatures_: - self.subfeatures_ = create(feature.expand_subfeatures(self.all_)) - return self.subfeatures_ - - def evaluate_conditionals(self, context=None): - if not context: - context = self - - if not self.evaluated_.has_key(context): - # FIXME: figure why the call messes up first parameter - self.evaluated_[context] = create( - property.evaluate_conditionals_in_context(self.all(), context)) - - return self.evaluated_[context] - - def propagated (self): - if not self.propagated_ps_: - self.propagated_ps_ = create (self.propagated_) - return self.propagated_ps_ - - def add_defaults (self): - # FIXME: this caching is invalidated when new features - # are declare inside non-root Jamfiles. - if not self.defaults_: - expanded = feature.add_defaults(self.all_) - self.defaults_ = create(expanded) - return self.defaults_ - - def as_path (self): - if not self.as_path_: - - def path_order (p1, p2): - - i1 = p1.feature().implicit() - i2 = p2.feature().implicit() - - if i1 != i2: - return i2 - i1 - else: - return cmp(p1.feature().name(), p2.feature().name()) - - # trim redundancy - properties = feature.minimize(self.base_) - - # sort according to path_order - properties.sort (path_order) - - components = [] - for p in properties: - if p.feature().implicit(): - components.append(p.value()) - else: - components.append(p.feature().name() + "-" + p.value()) - - self.as_path_ = '/'.join (components) - - return self.as_path_ - - def target_path (self): - """ Computes the target path that should be used for - target with these properties. - Returns a tuple of - - the computed path - - if the path is relative to build directory, a value of - 'true'. - """ - if not self.target_path_: - # The <location> feature can be used to explicitly - # change the location of generated targets - l = self.get ('<location>') - if l: - computed = l[0] - is_relative = False - - else: - p = self.as_path () - - # Really, an ugly hack. Boost regression test system requires - # specific target paths, and it seems that changing it to handle - # other directory layout is really hard. For that reason, - # we teach V2 to do the things regression system requires. - # The value o '<location-prefix>' is predended to the path. - prefix = self.get ('<location-prefix>') - - if prefix: - if len (prefix) > 1: - raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix) - - computed = os.path.join(prefix[0], p) - - else: - computed = p - - if not computed: - computed = "." - - is_relative = True - - self.target_path_ = (computed, is_relative) - - return self.target_path_ - - def add (self, ps): - """ Creates a new property set containing the properties in this one, - plus the ones of the property set passed as argument. - """ - if not self.added_.has_key(ps): - self.added_[ps] = create(self.all_ + ps.all()) - return self.added_[ps] - - def add_raw (self, properties): - """ Creates a new property set containing the properties in this one, - plus the ones passed as argument. - """ - return self.add (create (properties)) - - - def get (self, feature): - """ Returns all values of 'feature'. - """ - if type(feature) == type([]): - feature = feature[0] - if not isinstance(feature, b2.build.feature.Feature): - feature = b2.build.feature.get(feature) - - if not self.feature_map_: - self.feature_map_ = {} - - for v in self.all_: - if not self.feature_map_.has_key(v.feature()): - self.feature_map_[v.feature()] = [] - self.feature_map_[v.feature()].append(v.value()) - - return self.feature_map_.get(feature, []) - - @cached - def get_properties(self, feature): - """Returns all contained properties associated with 'feature'""" - - if not isinstance(feature, b2.build.feature.Feature): - feature = b2.build.feature.get(feature) - - result = [] - for p in self.all_: - if p.feature() == feature: - result.append(p) - return result - - def __contains__(self, item): - return item in self.all_set_ - diff --git a/jam-files/boost-build/build/readme.txt b/jam-files/boost-build/build/readme.txt deleted file mode 100644 index c3dddd8d..00000000 --- a/jam-files/boost-build/build/readme.txt +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2001, 2002 Dave Abrahams -Copyright 2002 Vladimir Prus -Distributed under the Boost Software License, Version 1.0. -(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -Development code for new build system. To run unit tests for jam code, execute: - - bjam --debug --build-system=test - -Comprehensive tests require Python. See ../test/readme.txt - - - diff --git a/jam-files/boost-build/build/scanner.jam b/jam-files/boost-build/build/scanner.jam deleted file mode 100644 index d6042ea2..00000000 --- a/jam-files/boost-build/build/scanner.jam +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Implements scanners: objects that compute implicit dependencies for -# files, such as includes in C++. -# -# Scanner has a regular expression used to find dependencies, some -# data needed to interpret those dependencies (for example, include -# paths), and a code which actually established needed relationship -# between actual jam targets. -# -# Scanner objects are created by actions, when they try to actualize -# virtual targets, passed to 'virtual-target.actualize' method and are -# then associated with actual targets. It is possible to use -# several scanners for a virtual-target. For example, a single source -# might be used by to compile actions, with different include paths. -# In this case, two different actual targets will be created, each -# having scanner of its own. -# -# Typically, scanners are created from target type and action's -# properties, using the rule 'get' in this module. Directly creating -# scanners is not recommended, because it might create many equvivalent -# but different instances, and lead in unneeded duplication of -# actual targets. However, actions can also create scanners in a special -# way, instead of relying on just target type. - -import "class" : new ; -import property virtual-target property-set ; -import errors : error ; - -# Base scanner class. -class scanner -{ - rule __init__ ( ) - { - } - - # Returns a pattern to use for scanning - rule pattern ( ) - { - error "method must be overriden" ; - } - - # Establish necessary relationship between targets, - # given actual target beeing scanned, and a list of - # pattern matches in that file. - rule process ( target : matches * ) - { - error "method must be overriden" ; - } -} - -# Registers a new generator class, specifying a set of -# properties relevant to this scanner. Ctor for that class -# should have one parameter: list of properties. -rule register ( scanner-class : relevant-properties * ) -{ - .registered += $(scanner-class) ; - .relevant-properties.$(scanner-class) = $(relevant-properties) ; -} - -# Common scanner class, which can be used when there's only one -# kind of includes (unlike C, where "" and <> includes have different -# search paths). -class common-scanner : scanner -{ - import scanner ; - rule __init__ ( includes * ) - { - scanner.__init__ ; - self.includes = $(includes) ; - } - - rule process ( target : matches * : binding ) - { - local target_path = [ NORMALIZE_PATH $(binding:D) ] ; - - NOCARE $(matches) ; - INCLUDES $(target) : $(matches) ; - SEARCH on $(matches) = $(target_path) $(self.includes:G=) ; - ISFILE $(matches) ; - - scanner.propagate $(__name__) : $(matches) : $(target) ; - } -} - - -# Returns an instance of previously registered scanner, -# with the specified properties. -rule get ( scanner-class : property-set ) -{ - if ! $(scanner-class) in $(.registered) - { - error "attempt to get unregisted scanner" ; - } - - local r = $(.rv-cache.$(property-set)) ; - if ! $(r) - { - r = [ property-set.create - [ property.select $(.relevant-properties.$(scanner-class)) : - [ $(property-set).raw ] ] ] ; - .rv-cache.$(property-set) = $(r) ; - } - - if ! $(scanner.$(scanner-class).$(r:J=-)) - { - scanner.$(scanner-class).$(r:J=-) = [ new $(scanner-class) [ $(r).raw ] ] ; - } - return $(scanner.$(scanner-class).$(r:J=-)) ; -} - - -# Installs the specified scanner on actual target 'target'. -rule install ( scanner : target - vtarget # virtual target from which 'target' was actualized -) -{ - HDRSCAN on $(target) = [ $(scanner).pattern ] ; - SCANNER on $(target) = $(scanner) ; - HDRRULE on $(target) = scanner.hdrrule ; - - # scanner reflects difference in properties affecting - # binding of 'target', which will be known when processing - # includes for it, will give information on how to - # interpret quoted includes. - HDRGRIST on $(target) = $(scanner) ; -} - -# Propagate scanner setting from 'including-target' to 'targets'. -rule propagate ( scanner : targets * : including-target ) -{ - HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ; - SCANNER on $(targets) = $(scanner) ; - HDRRULE on $(targets) = scanner.hdrrule ; - HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ; -} - - -rule hdrrule ( target : matches * : binding ) -{ - local scanner = [ on $(target) return $(SCANNER) ] ; - $(scanner).process $(target) : $(matches) : $(binding) ; -} -# hdrrule must be available at global scope so that it can be invoked -# by header scanning -IMPORT scanner : hdrrule : : scanner.hdrrule ; - - - - diff --git a/jam-files/boost-build/build/scanner.py b/jam-files/boost-build/build/scanner.py deleted file mode 100644 index 19f1431d..00000000 --- a/jam-files/boost-build/build/scanner.py +++ /dev/null @@ -1,158 +0,0 @@ -# Status: ported. -# Base revision: 45462 -# -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Implements scanners: objects that compute implicit dependencies for -# files, such as includes in C++. -# -# Scanner has a regular expression used to find dependencies, some -# data needed to interpret those dependencies (for example, include -# paths), and a code which actually established needed relationship -# between actual jam targets. -# -# Scanner objects are created by actions, when they try to actualize -# virtual targets, passed to 'virtual-target.actualize' method and are -# then associated with actual targets. It is possible to use -# several scanners for a virtual-target. For example, a single source -# might be used by to compile actions, with different include paths. -# In this case, two different actual targets will be created, each -# having scanner of its own. -# -# Typically, scanners are created from target type and action's -# properties, using the rule 'get' in this module. Directly creating -# scanners is not recommended, because it might create many equvivalent -# but different instances, and lead in unneeded duplication of -# actual targets. However, actions can also create scanners in a special -# way, instead of relying on just target type. - -import property -import bjam -import os -from b2.exceptions import * -from b2.manager import get_manager - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __scanners, __rv_cache, __scanner_cache - - # Maps registered scanner classes to relevant properties - __scanners = {} - - # A cache of scanners. - # The key is: class_name.properties_tag, where properties_tag is the concatenation - # of all relevant properties, separated by '-' - __scanner_cache = {} - -reset () - - -def register(scanner_class, relevant_properties): - """ Registers a new generator class, specifying a set of - properties relevant to this scanner. Ctor for that class - should have one parameter: list of properties. - """ - __scanners[str(scanner_class)] = relevant_properties - -def registered(scanner_class): - """ Returns true iff a scanner of that class is registered - """ - return __scanners.has_key(str(scanner_class)) - -def get(scanner_class, properties): - """ Returns an instance of previously registered scanner - with the specified properties. - """ - scanner_name = str(scanner_class) - - if not registered(scanner_name): - raise BaseException ("attempt to get unregisted scanner: %s" % scanner_name) - - relevant_properties = __scanners[scanner_name] - r = property.select(relevant_properties, properties) - - scanner_id = scanner_name + '.' + '-'.join(r) - - if not __scanner_cache.has_key(scanner_name): - __scanner_cache[scanner_name] = scanner_class(r) - - return __scanner_cache[scanner_name] - -class Scanner: - """ Base scanner class. - """ - def __init__ (self): - pass - - def pattern (self): - """ Returns a pattern to use for scanning. - """ - raise BaseException ("method must be overriden") - - def process (self, target, matches): - """ Establish necessary relationship between targets, - given actual target beeing scanned, and a list of - pattern matches in that file. - """ - raise BaseException ("method must be overriden") - - -# Common scanner class, which can be used when there's only one -# kind of includes (unlike C, where "" and <> includes have different -# search paths). -class CommonScanner(Scanner): - - def __init__ (self, includes): - Scanner.__init__(self) - self.includes = includes - - def process(self, target, matches, binding): - - target_path = os.path.normpath(os.path.dirname(binding[0])) - bjam.call("mark-included", target, matches) - - get_manager().engine().set_target_variable(matches, "SEARCH", - [target_path] + self.includes) - get_manager().scanners().propagate(self, matches) - -class ScannerRegistry: - - def __init__ (self, manager): - self.manager_ = manager - self.count_ = 0 - self.exported_scanners_ = {} - - def install (self, scanner, target, vtarget): - """ Installs the specified scanner on actual target 'target'. - vtarget: virtual target from which 'target' was actualized. - """ - engine = self.manager_.engine() - engine.set_target_variable(target, "HDRSCAN", scanner.pattern()) - if not self.exported_scanners_.has_key(scanner): - exported_name = "scanner_" + str(self.count_) - self.count_ = self.count_ + 1 - self.exported_scanners_[scanner] = exported_name - bjam.import_rule("", exported_name, scanner.process) - else: - exported_name = self.exported_scanners_[scanner] - - engine.set_target_variable(target, "HDRRULE", exported_name) - - # scanner reflects difference in properties affecting - # binding of 'target', which will be known when processing - # includes for it, will give information on how to - # interpret quoted includes. - engine.set_target_variable(target, "HDRGRIST", str(id(scanner))) - pass - - def propagate(self, scanner, targets): - engine = self.manager_.engine() - engine.set_target_variable(targets, "HDRSCAN", scanner.pattern()) - engine.set_target_variable(targets, "HDRRULE", - self.exported_scanners_[scanner]) - engine.set_target_variable(targets, "HDRGRIST", str(id(scanner))) - diff --git a/jam-files/boost-build/build/targets.jam b/jam-files/boost-build/build/targets.jam deleted file mode 100644 index a70532ce..00000000 --- a/jam-files/boost-build/build/targets.jam +++ /dev/null @@ -1,1659 +0,0 @@ -# Copyright Vladimir Prus 2002. -# Copyright Rene Rivera 2006. -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Supports 'abstract' targets, which are targets explicitly defined in a -# Jamfile. -# -# Abstract targets are represented by classes derived from 'abstract-target' -# class. The first abstract target is 'project-target', which is created for -# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module -# (see project.jam). -# -# Project targets keep a list of 'main-target' instances. A main target is what -# the user explicitly defines in a Jamfile. It is possible to have several -# definitions for a main target, for example to have different lists of sources -# for different platforms. So, main targets keep a list of alternatives. -# -# Each alternative is an instance of 'abstract-target'. When a main target -# subvariant is defined by some rule, that rule will decide what class to use, -# create an instance of that class and add it to the list of alternatives for -# the main target. -# -# Rules supplied by the build system will use only targets derived from -# 'basic-target' class, which will provide some default behaviour. There will be -# different classes derived from it such as 'make-target', created by the 'make' -# rule, and 'typed-target', created by rules such as 'exe' and 'lib'. - -# -# +------------------------+ -# |abstract-target | -# +========================+ -# |name | -# |project | -# | | -# |generate(properties) = 0| -# +-----------+------------+ -# | -# ^ -# / \ -# +-+-+ -# | -# | -# +------------------------+------+------------------------------+ -# | | | -# | | | -# +----------+-----------+ +------+------+ +------+-------+ -# | project-target | | main-target | | basic-target | -# +======================+ 1 * +=============+ alternatives +==============+ -# | generate(properties) |o-----------+ generate |<>------------->| generate | -# | main-target | +-------------+ | construct = 0| -# +----------------------+ +--------------+ -# | -# ^ -# / \ -# +-+-+ -# | -# | -# ...--+----------------+------------------+----------------+---+ -# | | | | -# | | | | -# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+ -# | | typed-target | | make-target | | stage-target | -# . +==============+ +=============+ +==============+ -# . | construct | | construct | | construct | -# +--------------+ +-------------+ +--------------+ - -import assert ; -import "class" : new ; -import errors ; -import feature ; -import indirect ; -import path ; -import property ; -import property-set ; -import sequence ; -import set ; -import toolset ; -import build-request ; - - -# Base class for all abstract targets. -# -class abstract-target -{ - import project ; - import assert ; - import "class" ; - import errors ; - - rule __init__ ( name # Name of the target in Jamfile. - : project-target # The project target to which this one belongs. - ) - { - # Note: it might seem that we don't need either name or project at all. - # However, there are places where we really need it. One example is - # error messages which should name problematic targets. Another is - # setting correct paths for sources and generated files. - - self.name = $(name) ; - self.project = $(project-target) ; - self.location = [ errors.nearest-user-location ] ; - } - - # Returns the name of this target. - rule name ( ) - { - return $(self.name) ; - } - - # Returns the project for this target. - rule project ( ) - { - return $(self.project) ; - } - - # Return the location where the target was declared. - rule location ( ) - { - return $(self.location) ; - } - - # Returns a user-readable name for this target. - rule full-name ( ) - { - local location = [ $(self.project).get location ] ; - return $(location)/$(self.name) ; - } - - # Generates virtual targets for this abstract target using the specified - # properties, unless a different value of some feature is required by the - # target. - # On success, returns: - # - a property-set with the usage requirements to be applied to dependants - # - a list of produced virtual targets, which may be empty. - # If 'property-set' is empty, performs the default build of this target, in - # a way specific to the derived class. - # - rule generate ( property-set ) - { - errors.error "method should be defined in derived classes" ; - } - - rule rename ( new-name ) - { - self.name = $(new-name) ; - } -} - - -if --debug-building in [ modules.peek : ARGV ] -{ - modules.poke : .debug-building : true ; -} - - -rule indent ( ) -{ - return $(.indent:J="") ; -} - - -rule increase-indent ( ) -{ - .indent += " " ; -} - - -rule decrease-indent ( ) -{ - .indent = $(.indent[2-]) ; -} - - -# Project target class (derived from 'abstract-target'). -# -# This class has the following responsibilities: -# - Maintaining a list of main targets in this project and building them. -# -# Main targets are constructed in two stages: -# - When Jamfile is read, a number of calls to 'add-alternative' is made. At -# that time, alternatives can also be renamed to account for inline targets. -# - The first time 'main-target' or 'has-main-target' rule is called, all -# alternatives are enumerated and main targets are created. -# -class project-target : abstract-target -{ - import project ; - import targets ; - import path ; - import print ; - import property-set ; - import set ; - import sequence ; - import "class" : new ; - import errors ; - - rule __init__ ( name : project-module parent-project ? - : requirements * : default-build * ) - { - abstract-target.__init__ $(name) : $(__name__) ; - - self.project-module = $(project-module) ; - self.location = [ project.attribute $(project-module) location ] ; - self.requirements = $(requirements) ; - self.default-build = $(default-build) ; - - if $(parent-project) - { - inherit $(parent-project) ; - } - } - - # This is needed only by the 'make' rule. Need to find the way to make - # 'make' work without this method. - # - rule project-module ( ) - { - return $(self.project-module) ; - } - - rule get ( attribute ) - { - return [ project.attribute $(self.project-module) $(attribute) ] ; - } - - rule build-dir ( ) - { - if ! $(self.build-dir) - { - self.build-dir = [ get build-dir ] ; - if ! $(self.build-dir) - { - self.build-dir = [ path.join [ $(self.project).get location ] - bin ] ; - } - } - return $(self.build-dir) ; - } - - # Generates all possible targets contained in this project. - # - rule generate ( property-set * ) - { - if [ modules.peek : .debug-building ] - { - ECHO [ targets.indent ] "building project" [ name ] " ('$(__name__)') with" [ $(property-set).raw ] ; - targets.increase-indent ; - } - - local usage-requirements = [ property-set.empty ] ; - local targets ; - - for local t in [ targets-to-build ] - { - local g = [ $(t).generate $(property-set) ] ; - usage-requirements = [ $(usage-requirements).add $(g[1]) ] ; - targets += $(g[2-]) ; - } - targets.decrease-indent ; - return $(usage-requirements) [ sequence.unique $(targets) ] ; - } - - # Computes and returns a list of abstract-target instances which must be - # built when this project is built. - # - rule targets-to-build ( ) - { - local result ; - - if ! $(self.built-main-targets) - { - build-main-targets ; - } - - # Collect all main targets here, except for "explicit" ones. - for local t in $(self.main-targets) - { - if ! [ $(t).name ] in $(self.explicit-targets) - { - result += $(t) ; - } - } - - # Collect all projects referenced via "projects-to-build" attribute. - local self-location = [ get location ] ; - for local pn in [ get projects-to-build ] - { - result += [ find $(pn)/ ] ; - } - - return $(result) ; - } - - # Add 'target' to the list of targets in this project that should be build - # only by explicit request - # - rule mark-target-as-explicit ( target-name * ) - { - # Record the name of the target, not instance, since this rule is called - # before main target instances are created. - self.explicit-targets += $(target-name) ; - } - - rule mark-target-as-always ( target-name * ) - { - # Record the name of the target, not instance, since this rule is called - # before main target instances are created. - self.always-targets += $(target-name) ; - } - - # Add new target alternative - # - rule add-alternative ( target-instance ) - { - if $(self.built-main-targets) - { - errors.error add-alternative called when main targets are already - created. : in project [ full-name ] ; - } - self.alternatives += $(target-instance) ; - } - - # Returns a 'main-target' class instance corresponding to 'name'. - # - rule main-target ( name ) - { - if ! $(self.built-main-targets) - { - build-main-targets ; - } - return $(self.main-target.$(name)) ; - } - - # Returns whether a main target with the specified name exists. - # - rule has-main-target ( name ) - { - if ! $(self.built-main-targets) - { - build-main-targets ; - } - - if $(self.main-target.$(name)) - { - return true ; - } - } - - # Worker function for the find rule not implementing any caching and simply - # returning nothing in case the target can not be found. - # - rule find-really ( id ) - { - local result ; - local current-location = [ get location ] ; - - local split = [ MATCH (.*)//(.*) : $(id) ] ; - local project-part = $(split[1]) ; - local target-part = $(split[2]) ; - - local extra-error-message ; - if $(project-part) - { - # There is an explicitly specified project part in id. Looks up the - # project and passes the request to it. - local pm = [ project.find $(project-part) : $(current-location) ] ; - if $(pm) - { - project-target = [ project.target $(pm) ] ; - result = [ $(project-target).find $(target-part) : no-error ] ; - } - else - { - # TODO: This extra error message will not get displayed most - # likely due to some buggy refactoring. Refactor the code so the - # message gets diplayed again. - extra-error-message = error: could not find project - '$(project-part)' ; - } - } - else - { - # Interpret target-name as name of main target. Need to do this - # before checking for file. Consider the following scenario with a - # toolset not modifying its executable's names, e.g. gcc on - # Unix-like platforms: - # - # exe test : test.cpp ; - # install s : test : <location>. ; - # - # After the first build we would have a target named 'test' in the - # Jamfile and a file named 'test' on the disk. We need the target to - # override the file. - result = [ main-target $(id) ] ; - - # Interpret id as an existing file reference. - if ! $(result) - { - result = [ new file-reference [ path.make $(id) ] : - $(self.project) ] ; - if ! [ $(result).exists ] - { - result = ; - } - } - - # Interpret id as project-id. - if ! $(result) - { - local project-module = [ project.find $(id) : - $(current-location) ] ; - if $(project-module) - { - result = [ project.target $(project-module) ] ; - } - } - } - - return $(result) ; - } - - # Find and return the target with the specified id, treated relative to - # self. Id may specify either a target or a file name with the target taking - # priority. May report an error or return nothing if the target is not found - # depending on the 'no-error' parameter. - # - rule find ( id : no-error ? ) - { - local v = $(.id.$(id)) ; - if ! $(v) - { - v = [ find-really $(id) ] ; - if ! $(v) - { - v = none ; - } - .id.$(id) = $(v) ; - } - - if $(v) != none - { - return $(v) ; - } - else - { - if ! $(no-error) - { - local current-location = [ get location ] ; - ECHO "error: Unable to find file or target named" ; - ECHO "error: '$(id)'" ; - ECHO "error: referred from project at" ; - ECHO "error: '$(current-location)'" ; - ECHO $(extra-error-message) ; - EXIT ; - } - } - } - - rule build-main-targets ( ) - { - self.built-main-targets = true ; - for local a in $(self.alternatives) - { - local name = [ $(a).name ] ; - local target = $(self.main-target.$(name)) ; - if ! $(target) - { - local t = [ new main-target $(name) : $(self.project) ] ; - self.main-target.$(name) = $(t) ; - self.main-targets += $(t) ; - target = $(self.main-target.$(name)) ; - } - - if $(name) in $(self.always-targets) - { - $(a).always ; - } - - $(target).add-alternative $(a) ; - } - } - - # Accessor, add a constant. - # - rule add-constant ( - name # Variable name of the constant. - : value + # Value of the constant. - : type ? # Optional type of value. - ) - { - switch $(type) - { - case path : - local r ; - for local v in $(value) - { - local l = $(self.location) ; - if ! $(l) - { - # Project corresponding to config files do not have - # 'location' attribute, but do have source location. - # It might be more reasonable to make every project have - # a location and use some other approach to prevent buildable - # targets in config files, but that's for later. - l = [ get source-location ] ; - } - v = [ path.root [ path.make $(v) ] $(l) ] ; - # Now make the value absolute path. - v = [ path.root $(v) [ path.pwd ] ] ; - # Constants should be in platform-native form. - v = [ path.native $(v) ] ; - r += $(v) ; - } - value = $(r) ; - } - if ! $(name) in $(self.constants) - { - self.constants += $(name) ; - } - self.constant.$(name) = $(value) ; - # Inject the constant in the scope of the Jamroot module. - modules.poke $(self.project-module) : $(name) : $(value) ; - } - - rule inherit ( parent ) - { - for local c in [ modules.peek $(parent) : self.constants ] - { - # No need to pass the type. Path constants were converted to - # absolute paths already by parent. - add-constant $(c) - : [ modules.peek $(parent) : self.constant.$(c) ] ; - } - - # Import rules from parent. - local this-module = [ project-module ] ; - local parent-module = [ $(parent).project-module ] ; - # Do not import rules coming from 'project-rules' as they must be - # imported localized. - local user-rules = [ set.difference - [ RULENAMES $(parent-module) ] : - [ RULENAMES project-rules ] ] ; - IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ; - EXPORT $(this-module) : $(user-rules) ; - } -} - - -# Helper rules to detect cycles in main target references. -# -local rule start-building ( main-target-instance ) -{ - if $(main-target-instance) in $(.targets-being-built) - { - local names ; - for local t in $(.targets-being-built) $(main-target-instance) - { - names += [ $(t).full-name ] ; - } - - errors.error "Recursion in main target references" - : "the following target are being built currently:" - : $(names) ; - } - .targets-being-built += $(main-target-instance) ; -} - - -local rule end-building ( main-target-instance ) -{ - .targets-being-built = $(.targets-being-built[1--2]) ; -} - - -# A named top-level target in Jamfile. -# -class main-target : abstract-target -{ - import assert ; - import errors ; - import feature ; - import print ; - import property-set ; - import sequence ; - import targets : start-building end-building ; - - rule __init__ ( name : project ) - { - abstract-target.__init__ $(name) : $(project) ; - } - - # Add a new alternative for this target - rule add-alternative ( target ) - { - local d = [ $(target).default-build ] ; - if $(self.alternatives) && ( $(self.default-build) != $(d) ) - { - errors.error "default build must be identical in all alternatives" - : "main target is" [ full-name ] - : "with" [ $(d).raw ] - : "differing from previous default build" [ $(self.default-build).raw ] ; - } - else - { - self.default-build = $(d) ; - } - self.alternatives += $(target) ; - } - - # Returns the best viable alternative for this property-set. See the - # documentation for selection rules. - # - local rule select-alternatives ( property-set debug ? ) - { - # When selecting alternatives we have to consider defaults, for example: - # lib l : l.cpp : <variant>debug ; - # lib l : l_opt.cpp : <variant>release ; - # won't work unless we add default value <variant>debug. - property-set = [ $(p).add-defaults ] ; - - # The algorithm: we keep the current best viable alternative. When we've - # got a new best viable alternative, we compare it with the current one. - - local best ; - local best-properties ; - - if $(self.alternatives[2-]) - { - local bad ; - local worklist = $(self.alternatives) ; - while $(worklist) && ! $(bad) - { - local v = $(worklist[1]) ; - local properties = [ $(v).match $(property-set) $(debug) ] ; - - if $(properties) != no-match - { - if ! $(best) - { - best = $(v) ; - best-properties = $(properties) ; - } - else - { - if $(properties) = $(best-properties) - { - bad = true ; - } - else if $(properties) in $(best-properties) - { - # Do nothing, this alternative is worse - } - else if $(best-properties) in $(properties) - { - best = $(v) ; - best-properties = $(properties) ; - } - else - { - bad = true ; - } - } - } - worklist = $(worklist[2-]) ; - } - if ! $(bad) - { - return $(best) ; - } - } - else - { - return $(self.alternatives) ; - } - } - - rule apply-default-build ( property-set ) - { - return [ targets.apply-default-build $(property-set) - : $(self.default-build) ] ; - } - - # Select an alternative for this main target, by finding all alternatives - # which requirements are satisfied by 'properties' and picking the one with - # the longest requirements set. Returns the result of calling 'generate' on - # that alternative. - # - rule generate ( property-set ) - { - start-building $(__name__) ; - - # We want composite properties in build request act as if all the - # properties it expands too are explicitly specified. - property-set = [ $(property-set).expand ] ; - - local all-property-sets = [ apply-default-build $(property-set) ] ; - local usage-requirements = [ property-set.empty ] ; - local result ; - for local p in $(all-property-sets) - { - local r = [ generate-really $(p) ] ; - if $(r) - { - usage-requirements = [ $(usage-requirements).add $(r[1]) ] ; - result += $(r[2-]) ; - } - } - end-building $(__name__) ; - return $(usage-requirements) [ sequence.unique $(result) ] ; - } - - # Generates the main target with the given property set and returns a list - # which first element is property-set object containing usage-requirements - # of generated target and with generated virtual target in other elements. - # It is possible that no targets are generated. - # - local rule generate-really ( property-set ) - { - local best-alternatives = [ select-alternatives $(property-set) ] ; - if ! $(best-alternatives) - { - ECHO "error: No best alternative for" [ full-name ] ; - select-alternatives $(property-set) debug ; - return [ property-set.empty ] ; - } - else - { - # Now return virtual targets for the only alternative. - return [ $(best-alternatives).generate $(property-set) ] ; - } - } - - rule rename ( new-name ) - { - abstract-target.rename $(new-name) ; - for local a in $(self.alternatives) - { - $(a).rename $(new-name) ; - } - } -} - - -# Abstract target refering to a source file. This is an artificial entity -# allowing sources to a target to be represented using a list of abstract target -# instances. -# -class file-reference : abstract-target -{ - import virtual-target ; - import property-set ; - import path ; - - rule __init__ ( file : project ) - { - abstract-target.__init__ $(file) : $(project) ; - } - - rule generate ( properties ) - { - return [ property-set.empty ] [ virtual-target.from-file $(self.name) : - [ location ] : $(self.project) ] ; - } - - # Returns true if the referred file really exists. - rule exists ( ) - { - location ; - return $(self.file-path) ; - } - - # Returns the location of target. Needed by 'testing.jam'. - rule location ( ) - { - if ! $(self.file-location) - { - local source-location = [ $(self.project).get source-location ] ; - for local src-dir in $(source-location) - { - if ! $(self.file-location) - { - local location = [ path.root $(self.name) $(src-dir) ] ; - if [ CHECK_IF_FILE [ path.native $(location) ] ] - { - self.file-location = $(src-dir) ; - self.file-path = $(location) ; - } - } - } - } - return $(self.file-location) ; - } -} - - -# Given a target-reference, made in context of 'project', returns the -# abstract-target instance that is referred to, as well as properties explicitly -# specified for this reference. -# -rule resolve-reference ( target-reference : project ) -{ - # Separate target name from properties override. - local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ; - local id = $(split[1]) ; - local sproperties = ; - if $(split[3]) - { - sproperties = [ property.make [ feature.split $(split[3]) ] ] ; - sproperties = [ feature.expand-composites $(sproperties) ] ; - } - - # Find the target. - local target = [ $(project).find $(id) ] ; - - return $(target) [ property-set.create $(sproperties) ] ; -} - - -# Attempts to generate the target given by target reference, which can refer -# both to a main target or to a file. Returns a list consisting of -# - usage requirements -# - generated virtual targets, if any -# -rule generate-from-reference ( - target-reference # Target reference. - : project # Project where the reference is made. - : property-set # Properties of the main target that makes the reference. -) -{ - local r = [ resolve-reference $(target-reference) : $(project) ] ; - local target = $(r[1]) ; - local sproperties = $(r[2]) ; - - # Take properties which should be propagated and refine them with - # source-specific requirements. - local propagated = [ $(property-set).propagated ] ; - local rproperties = [ $(propagated).refine $(sproperties) ] ; - if $(rproperties[1]) = "@error" - { - errors.error - "When building" [ full-name ] " with properties " $(properties) : - "Invalid properties specified for " $(source) ":" - $(rproperties[2-]) ; - } - return [ $(target).generate $(rproperties) ] ; -} - -rule apply-default-build ( property-set : default-build ) -{ - # 1. First, see what properties from default-build are already present - # in property-set. - - local raw = [ $(property-set).raw ] ; - local specified-features = $(raw:G) ; - - local defaults-to-apply ; - for local d in [ $(default-build).raw ] - { - if ! $(d:G) in $(specified-features) - { - defaults-to-apply += $(d) ; - } - } - - # 2. If there are any defaults to be applied, form a new build request. - # Pass it through to 'expand-no-defaults' since default-build might - # contain "release debug" resulting in two property-sets. - local result ; - if $(defaults-to-apply) - { - properties = [ - build-request.expand-no-defaults - - # We have to compress subproperties here to prevent property - # lists like: - # - # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi - # - # from being expanded into: - # - # <toolset-msvc:version>7.1/<threading>multi - # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi - # - # due to a cross-product property combination. That may be an - # indication that build-request.expand-no-defaults is the wrong - # rule to use here. - [ feature.compress-subproperties $(raw) ] - $(defaults-to-apply) - ] ; - - if $(properties) - { - for local p in $(properties) - { - result += [ property-set.create - [ feature.expand [ feature.split $(p) ] ] ] ; - } - } - else - { - result = [ property-set.empty ] ; - } - } - else - { - result = $(property-set) ; - } - return $(result) ; -} - - -# Given a build request and requirements, return properties common to dependency -# build request and target requirements. -# -# TODO: Document exactly what 'common properties' are, whether they should -# include default property values, whether they should contain any conditional -# properties or should those be already processed, etc. See whether there are -# any differences between use cases with empty and non-empty build-request as -# well as with requirements containing and those not containing any non-free -# features. -# -rule common-properties ( build-request requirements ) -{ - # For optimization, we add free requirements directly, without using a - # complex algorithm. This gives the complex algorithm a better chance of - # caching results. - local free = [ $(requirements).free ] ; - local non-free = [ property-set.create [ $(requirements).base ] - [ $(requirements).incidental ] ] ; - - local key = .rp.$(build-request)-$(non-free) ; - if ! $($(key)) - { - $(key) = [ common-properties2 $(build-request) $(non-free) ] ; - } - result = [ $($(key)).add-raw $(free) ] ; -} - - -# Given a 'context' -- a set of already present properties, and 'requirements', -# decide which extra properties should be applied to 'context'. For conditional -# requirements, this means evaluating the condition. For indirect conditional -# requirements, this means calling a rule. Ordinary requirements are always -# applied. -# -# Handles the situation where evaluating one conditional requirement affects -# conditions of another conditional requirements, such as: -# <toolset>gcc:<variant>release <variant>release:<define>RELEASE -# -# If 'what' is 'refined' returns context refined with new requirements. If -# 'what' is 'added' returns just the requirements to be applied. -# -rule evaluate-requirements ( requirements : context : what ) -{ - # Apply non-conditional requirements. It is possible that further - # conditional requirement change a value set by non-conditional - # requirements. For example: - # - # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ; - # - # I am not sure if this should be an error, or not, especially given that - # - # <threading>single - # - # might come from project's requirements. - - local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] ; - - local raw = [ $(context).raw ] ; - raw = [ property.refine $(raw) : $(unconditional) ] ; - - # We have collected properties that surely must be present in common - # properties. We now try to figure out what other properties should be added - # in order to satisfy rules (4)-(6) from the docs. - - local conditionals = [ $(requirements).conditional ] ; - # The 'count' variable has one element for each conditional feature and for - # each occurrence of '<indirect-conditional>' feature. It is used as a loop - # counter: for each iteration of the loop before we remove one element and - # the property set should stabilize before we are done. It is assumed that - # #conditionals iterations should be enough for properties to propagate - # along conditions in any direction. - local count = $(conditionals) - [ $(requirements).get <conditional> ] - and-once-more ; - - local added-requirements ; - - local current = $(raw) ; - - # It is assumed that ordinary conditional requirements can not add - # <conditional> properties (a.k.a. indirect conditional properties), and - # that rules referred to by <conditional> properties can not add new - # <conditional> properties. So the list of indirect conditionals does not - # change. - local indirect = [ $(requirements).get <conditional> ] ; - indirect = [ MATCH ^@(.*) : $(indirect) ] ; - - local ok ; - while $(count) - { - # Evaluate conditionals in context of current properties. - local e = [ property.evaluate-conditionals-in-context $(conditionals) - : $(current) ] ; - - # Evaluate indirect conditionals. - for local i in $(indirect) - { - e += [ indirect.call $(i) $(current) ] ; - } - - if $(e) = $(added-requirements) - { - # If we got the same result, we have found the final properties. - count = ; - ok = true ; - } - else - { - # Oops, conditional evaluation results have changed. Also 'current' - # contains leftovers from a previous evaluation. Recompute 'current' - # using initial properties and conditional requirements. - added-requirements = $(e) ; - current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ; - } - count = $(count[2-]) ; - } - if ! $(ok) - { - errors.error "Can not evaluate conditional properties " $(conditionals) ; - } - - if $(what) = added - { - return [ property-set.create $(unconditional) $(added-requirements) ] ; - } - else if $(what) = refined - { - return [ property-set.create $(current) ] ; - } - else - { - errors.error "Invalid value of the 'what' parameter." ; - } -} - - -rule common-properties2 ( build-request requirements ) -{ - # This guarantees that default properties are present in the result, unless - # they are overriden by some requirement. FIXME: There is possibility that - # we have added <foo>bar, which is composite and expands to <foo2>bar2, but - # default value of <foo2> is not bar2, in which case it is not clear what to - # do. - # - build-request = [ $(build-request).add-defaults ] ; - # Features added by 'add-default' can be composite and expand to features - # without default values -- so they are not added yet. It could be clearer/ - # /faster to expand only newly added properties but that is not critical. - build-request = [ $(build-request).expand ] ; - - return [ evaluate-requirements $(requirements) : $(build-request) : - refined ] ; -} - -rule push-target ( target ) -{ - .targets = $(target) $(.targets) ; -} - -rule pop-target ( ) -{ - .targets = $(.targets[2-]) ; -} - -# Return the metatarget that is currently being generated. -rule current ( ) -{ - return $(.targets[1]) ; -} - - -# Implements the most standard way of constructing main target alternative from -# sources. Allows sources to be either file or other main target and handles -# generation of those dependency targets. -# -class basic-target : abstract-target -{ - import build-request ; - import build-system ; - import "class" : new ; - import errors ; - import feature ; - import property ; - import property-set ; - import sequence ; - import set ; - import targets ; - import virtual-target ; - - rule __init__ ( name : project : sources * : requirements * - : default-build * : usage-requirements * ) - { - abstract-target.__init__ $(name) : $(project) ; - - self.sources = $(sources) ; - if ! $(requirements) { - requirements = [ property-set.empty ] ; - } - self.requirements = $(requirements) ; - if ! $(default-build) - { - default-build = [ property-set.empty ] ; - } - self.default-build = $(default-build) ; - if ! $(usage-requirements) - { - usage-requirements = [ property-set.empty ] ; - } - self.usage-requirements = $(usage-requirements) ; - - if $(sources:G) - { - errors.user-error properties found in the 'sources' parameter for - [ full-name ] ; - } - } - - rule always ( ) - { - self.always = 1 ; - } - - # Returns the list of abstract-targets which are used as sources. The extra - # properties specified for sources are not represented. The only user for - # this rule at the moment is the "--dump-tests" feature of the test system. - # - rule sources ( ) - { - if ! $(self.source-targets) - { - for local s in $(self.sources) - { - self.source-targets += - [ targets.resolve-reference $(s) : $(self.project) ] ; - } - } - return $(self.source-targets) ; - } - - rule requirements ( ) - { - return $(self.requirements) ; - } - - rule default-build ( ) - { - return $(self.default-build) ; - } - - # Returns the alternative condition for this alternative, if the condition - # is satisfied by 'property-set'. - # - rule match ( property-set debug ? ) - { - # The condition is composed of all base non-conditional properties. It - # is not clear if we should expand 'self.requirements' or not. For one - # thing, it would be nice to be able to put - # <toolset>msvc-6.0 - # in requirements. On the other hand, if we have <variant>release as a - # condition it does not make sense to require <optimization>full to be - # in the build request just to select this variant. - local bcondition = [ $(self.requirements).base ] ; - local ccondition = [ $(self.requirements).conditional ] ; - local condition = [ set.difference $(bcondition) : $(ccondition) ] ; - if $(debug) - { - ECHO " next alternative: required properties:" $(condition:E=(empty)) ; - } - - if $(condition) in [ $(property-set).raw ] - { - if $(debug) - { - ECHO " matched" ; - } - return $(condition) ; - } - else - { - if $(debug) - { - ECHO " not matched" ; - } - return no-match ; - } - } - - # Takes a target reference, which might be either target id or a dependency - # property, and generates that target using 'property-set' as build request. - # - # The results are added to the variable called 'result-var'. Usage - # requirements are added to the variable called 'usage-requirements-var'. - # - rule generate-dependencies ( dependencies * : property-set - : result-var usage-requirements-var ) - { - for local dependency in $(dependencies) - { - local grist = $(dependency:G) ; - local id = $(dependency:G=) ; - - local result = [ targets.generate-from-reference $(id) : - $(self.project) : $(property-set) ] ; - - $(result-var) += $(result[2-]:G=$(grist)) ; - $(usage-requirements-var) += [ $(result[1]).raw ] ; - } - } - - # Determines final build properties, generates sources, and calls - # 'construct'. This method should not be overridden. - # - rule generate ( property-set ) - { - if [ modules.peek : .debug-building ] - { - ECHO ; - local fn = [ full-name ] ; - ECHO [ targets.indent ] "Building target '$(fn)'" ; - targets.increase-indent ; - ECHO [ targets.indent ] "Build request: " $(property-set) [ $(property-set).raw ] ; - local cf = [ build-system.command-line-free-features ] ; - ECHO [ targets.indent ] "Command line free features: " [ $(cf).raw ] ; - ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ; - } - targets.push-target $(__name__) ; - - if ! $(self.generated.$(property-set)) - { - # Apply free features from the command line. If user said - # define=FOO - # he most likely wants this define to be set for all compiles. - property-set = [ $(property-set).refine - [ build-system.command-line-free-features ] ] ; - local rproperties = [ targets.common-properties $(property-set) - $(self.requirements) ] ; - - if [ modules.peek : .debug-building ] - { - ECHO ; - ECHO [ targets.indent ] "Common properties: " [ $(rproperties).raw ] ; - } - - if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get - <build> ] != no ) - { - local source-targets ; - local properties = [ $(rproperties).non-dependency ] ; - local usage-requirements ; - - generate-dependencies [ $(rproperties).dependency ] : - $(rproperties) : properties usage-requirements ; - - generate-dependencies $(self.sources) : $(rproperties) : - source-targets usage-requirements ; - - if [ modules.peek : .debug-building ] - { - ECHO ; - ECHO [ targets.indent ] "Usage requirements for" - $(self.name)": " $(usage-requirements) ; - } - - rproperties = [ property-set.create $(properties) - $(usage-requirements) ] ; - usage-requirements = [ property-set.create $(usage-requirements) ] ; - - if [ modules.peek : .debug-building ] - { - ECHO [ targets.indent ] "Build properties: " - [ $(rproperties).raw ] ; - } - - local extra = [ $(rproperties).get <source> ] ; - source-targets += $(extra:G=) ; - # We might get duplicate sources, for example if we link to two - # libraries having the same <library> usage requirement. - # Use stable sort, since for some targets the order is - # important. E.g. RUN_PY target need python source to come - # first. - source-targets = [ sequence.unique $(source-targets) : stable ] ; - - local result = [ construct $(self.name) : $(source-targets) : - $(rproperties) ] ; - - if $(result) - { - local gur = $(result[1]) ; - result = $(result[2-]) ; - - if $(self.always) - { - for local t in $(result) - { - $(t).always ; - } - } - - local s = [ create-subvariant $(result) - : [ virtual-target.recent-targets ] - : $(property-set) : $(source-targets) - : $(rproperties) : $(usage-requirements) ] ; - virtual-target.clear-recent-targets ; - - local ur = [ compute-usage-requirements $(s) ] ; - ur = [ $(ur).add $(gur) ] ; - $(s).set-usage-requirements $(ur) ; - if [ modules.peek : .debug-building ] - { - ECHO [ targets.indent ] "Usage requirements from" - $(self.name)": " [ $(ur).raw ] ; - } - - self.generated.$(property-set) = $(ur) $(result) ; - } - } - else - { - if $(rproperties[1]) = "@error" - { - ECHO [ targets.indent ] "Skipping build of:" [ full-name ] - "cannot compute common properties" ; - } - else if [ $(rproperties).get <build> ] = no - { - # If we just see <build>no, we cannot produce any reasonable - # diagnostics. The code that adds this property is expected - # to explain why a target is not built, for example using - # the configure.log-component-configuration function. - } - else - { - ECHO [ targets.indent ] "Skipping build of: " [ full-name ] - " unknown reason" ; - } - - # We are here either because there has been an error computing - # properties or there is <build>no in properties. In the latter - # case we do not want any diagnostic. In the former case, we - # need diagnostics. FIXME - - # If this target fails to build, add <build>no to properties to - # cause any parent target to fail to build. Except that it - # - does not work now, since we check for <build>no only in - # common properties, but not in properties that came from - # dependencies - # - it is not clear if that is a good idea anyway. The alias - # target, for example, should not fail to build if a - # dependency fails. - self.generated.$(property-set) = [ property-set.create <build>no ] ; - } - } - else - { - if [ modules.peek : .debug-building ] - { - ECHO [ targets.indent ] "Already built" ; - local ur = $(self.generated.$(property-set)) ; - ur = $(ur[0]) ; - targets.increase-indent ; - ECHO [ targets.indent ] "Usage requirements from" - $(self.name)": " [ $(ur).raw ] ; - targets.decrease-indent ; - } - } - - targets.pop-target ; - targets.decrease-indent ; - return $(self.generated.$(property-set)) ; - } - - # Given the set of generated targets, and refined build properties, - # determines and sets appropriate usage requirements on those targets. - # - rule compute-usage-requirements ( subvariant ) - { - local rproperties = [ $(subvariant).build-properties ] ; - xusage-requirements = [ targets.evaluate-requirements - $(self.usage-requirements) : $(rproperties) : added ] ; - - # We generate all dependency properties and add them, as well as their - # usage requirements, to the result. - local extra ; - generate-dependencies [ $(xusage-requirements).dependency ] : - $(rproperties) : extra extra ; - - local result = [ property-set.create - [ $(xusage-requirements).non-dependency ] $(extra) ] ; - - # Propagate usage requirements we got from sources, except for the - # <pch-header> and <pch-file> features. - # - # That feature specifies which pch file to use, and should apply only to - # direct dependents. Consider: - # - # pch pch1 : ... - # lib lib1 : ..... pch1 ; - # pch pch2 : - # lib lib2 : pch2 lib1 ; - # - # Here, lib2 should not get <pch-header> property from pch1. - # - # Essentially, when those two features are in usage requirements, they - # are propagated only to direct dependents. We might need a more general - # mechanism, but for now, only those two features are special. - # - # TODO - Actually there are more possible candidates like for instance - # when listing static library X as a source for another static library. - # Then static library X will be added as a <source> property to the - # second library's usage requirements but those requirements should last - # only up to the first executable or shared library that actually links - # to it. - local raw = [ $(subvariant).sources-usage-requirements ] ; - raw = [ $(raw).raw ] ; - raw = [ property.change $(raw) : <pch-header> ] ; - raw = [ property.change $(raw) : <pch-file> ] ; - return [ $(result).add [ property-set.create $(raw) ] ] ; - } - - # Creates new subvariant instances for 'targets'. - # 'root-targets' - virtual targets to be returned to dependants - # 'all-targets' - virtual targets created while building this main target - # 'build-request' - property-set instance with requested build properties - # - local rule create-subvariant ( root-targets * : all-targets * : - build-request : sources * : rproperties : usage-requirements ) - { - for local e in $(root-targets) - { - $(e).root true ; - } - - # Process all virtual targets that will be created if this main target - # is created. - local s = [ new subvariant $(__name__) : $(build-request) : $(sources) : - $(rproperties) : $(usage-requirements) : $(all-targets) ] ; - for local v in $(all-targets) - { - if ! [ $(v).creating-subvariant ] - { - $(v).creating-subvariant $(s) ; - } - } - return $(s) ; - } - - # Constructs virtual targets for this abstract target and the dependency - # graph. Returns a usage-requirements property-set and a list of virtual - # targets. Should be overriden in derived classes. - # - rule construct ( name : source-targets * : properties * ) - { - errors.error "method should be defined in derived classes" ; - } -} - - -class typed-target : basic-target -{ - import generators ; - - rule __init__ ( name : project : type : sources * : requirements * : - default-build * : usage-requirements * ) - { - basic-target.__init__ $(name) : $(project) : $(sources) : - $(requirements) : $(default-build) : $(usage-requirements) ; - - self.type = $(type) ; - } - - rule type ( ) - { - return $(self.type) ; - } - - rule construct ( name : source-targets * : property-set ) - { - local r = [ generators.construct $(self.project) $(name:S=) : $(self.type) - : [ property-set.create [ $(property-set).raw ] - <main-target-type>$(self.type) ] - : $(source-targets) : true ] ; - if ! $(r) - { - ECHO "warn: Unable to construct" [ full-name ] ; - - # Are there any top-level generators for this type/property set. - if ! [ generators.find-viable-generators $(self.type) - : $(property-set) ] - { - ECHO "error: no generators were found for type '$(self.type)'" ; - ECHO "error: and the requested properties" ; - ECHO "error: make sure you've configured the needed tools" ; - ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ; - ECHO "To debug this problem, try the --debug-generators option." ; - EXIT ; - } - } - return $(r) ; - } -} - - -# Return the list of sources to use, if main target rule is invoked with -# 'sources'. If there are any objects in 'sources', they are treated as main -# target instances, and the name of such targets are adjusted to be -# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if -# a non-empty value is passed as the 'no-renaming' parameter. -# -rule main-target-sources ( sources * : main-target-name : no-renaming ? ) -{ - local result ; - for local t in $(sources) - { - if [ class.is-instance $(t) ] - { - local name = [ $(t).name ] ; - if ! $(no-renaming) - { - name = $(main-target-name)__$(name) ; - $(t).rename $(name) ; - } - # Inline targets are not built by default. - local p = [ $(t).project ] ; - $(p).mark-target-as-explicit $(name) ; - result += $(name) ; - } - else - { - result += $(t) ; - } - } - return $(result) ; -} - - -# Returns the requirements to use when declaring a main target, obtained by -# translating all specified property paths and refining project requirements -# with the ones specified for the target. -# -rule main-target-requirements ( - specification * # Properties explicitly specified for the main target. - : project # Project where the main target is to be declared. -) -{ - specification += [ toolset.requirements ] ; - - local requirements = [ property-set.refine-from-user-input - [ $(project).get requirements ] : $(specification) : - [ $(project).project-module ] : [ $(project).get location ] ] ; - if $(requirements[1]) = "@error" - { - errors.error "Conflicting requirements for target:" $(requirements) ; - } - return $(requirements) ; -} - - -# Returns the usage requirements to use when declaring a main target, which are -# obtained by translating all specified property paths and adding project's -# usage requirements. -# -rule main-target-usage-requirements ( - specification * # Use-properties explicitly specified for a main target. - : project # Project where the main target is to be declared. -) -{ - local project-usage-requirements = [ $(project).get usage-requirements ] ; - - # We do not use 'refine-from-user-input' because: - # - I am not sure if removing parent's usage requirements makes sense - # - refining usage requirements is not needed, since usage requirements are - # always free. - local usage-requirements = [ property-set.create-from-user-input - $(specification) - : [ $(project).project-module ] [ $(project).get location ] ] ; - - return [ $(project-usage-requirements).add $(usage-requirements) ] ; -} - - -# Return the default build value to use when declaring a main target, which is -# obtained by using the specified value if not empty and parent's default build -# attribute otherwise. -# -rule main-target-default-build ( - specification * # Default build explicitly specified for a main target. - : project # Project where the main target is to be declared. -) -{ - local result ; - if $(specification) - { - result = $(specification) ; - } - else - { - result = [ $(project).get default-build ] ; - } - return [ property-set.create-with-validation $(result) ] ; -} - - -# Registers the specified target as a main target alternative and returns it. -# -rule main-target-alternative ( target ) -{ - local ptarget = [ $(target).project ] ; - $(ptarget).add-alternative $(target) ; - return $(target) ; -} - -# Creates a new metargets with the specified properties, using 'klass' as -# the class. The 'name', 'sources', -# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in -# the form specified by the user in Jamfile corresponding to 'project'. -# -rule create-metatarget ( klass : project : name : sources * : requirements * : - default-build * : usage-requirements * ) -{ - return [ - targets.main-target-alternative - [ new $(klass) $(name) : $(project) - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] - ] ] ; -} - -# Creates a typed-target with the specified properties. The 'name', 'sources', -# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in -# the form specified by the user in Jamfile corresponding to 'project'. -# -rule create-typed-target ( type : project : name : sources * : requirements * : - default-build * : usage-requirements * ) -{ - return [ - targets.main-target-alternative - [ new typed-target $(name) : $(project) : $(type) - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] - ] ] ; -} diff --git a/jam-files/boost-build/build/targets.py b/jam-files/boost-build/build/targets.py deleted file mode 100644 index a35612ce..00000000 --- a/jam-files/boost-build/build/targets.py +++ /dev/null @@ -1,1401 +0,0 @@ -# Status: ported. -# Base revision: 64488 - -# Copyright Vladimir Prus 2002-2007. -# Copyright Rene Rivera 2006. -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Supports 'abstract' targets, which are targets explicitly defined in Jamfile. -# -# Abstract targets are represented by classes derived from 'AbstractTarget' class. -# The first abstract target is 'project_target', which is created for each -# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module. -# (see project.jam). -# -# Project targets keep a list of 'MainTarget' instances. -# A main target is what the user explicitly defines in a Jamfile. It is -# possible to have several definitions for a main target, for example to have -# different lists of sources for different platforms. So, main targets -# keep a list of alternatives. -# -# Each alternative is an instance of 'AbstractTarget'. When a main target -# subvariant is defined by some rule, that rule will decide what class to -# use, create an instance of that class and add it to the list of alternatives -# for the main target. -# -# Rules supplied by the build system will use only targets derived -# from 'BasicTarget' class, which will provide some default behaviour. -# There will be two classes derived from it, 'make-target', created by the -# 'make' rule, and 'TypedTarget', created by rules such as 'exe' and 'dll'. - -# -# +------------------------+ -# |AbstractTarget | -# +========================+ -# |name | -# |project | -# | | -# |generate(properties) = 0| -# +-----------+------------+ -# | -# ^ -# / \ -# +-+-+ -# | -# | -# +------------------------+------+------------------------------+ -# | | | -# | | | -# +----------+-----------+ +------+------+ +------+-------+ -# | project_target | | MainTarget | | BasicTarget | -# +======================+ 1 * +=============+ alternatives +==============+ -# | generate(properties) |o-----------+ generate |<>------------->| generate | -# | main-target | +-------------+ | construct = 0| -# +----------------------+ +--------------+ -# | -# ^ -# / \ -# +-+-+ -# | -# | -# ...--+----------------+------------------+----------------+---+ -# | | | | -# | | | | -# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+ -# | | TypedTarget | | make-target | | stage-target | -# . +==============+ +=============+ +==============+ -# . | construct | | construct | | construct | -# +--------------+ +-------------+ +--------------+ - -import re -import os.path -import sys - -from b2.manager import get_manager - -from b2.util.utility import * -import property, project, virtual_target, property_set, feature, generators, toolset -from virtual_target import Subvariant -from b2.exceptions import * -from b2.util.sequence import unique -from b2.util import path, bjam_signature -from b2.build.errors import user_error_checkpoint - -import b2.build.build_request as build_request - -import b2.util.set -_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$') - -class TargetRegistry: - - def __init__ (self): - # All targets that are currently being built. - # Only the key is id (target), the value is the actual object. - self.targets_being_built_ = {} - - # Current indent for debugging messages - self.indent_ = "" - - self.debug_building_ = "--debug-building" in bjam.variable("ARGV") - - self.targets_ = [] - - def main_target_alternative (self, target): - """ Registers the specified target as a main target alternatives. - Returns 'target'. - """ - target.project ().add_alternative (target) - return target - - def main_target_sources (self, sources, main_target_name, no_renaming=0): - """Return the list of sources to use, if main target rule is invoked - with 'sources'. If there are any objects in 'sources', they are treated - as main target instances, and the name of such targets are adjusted to - be '<name_of_this_target>__<name_of_source_target>'. Such renaming - is disabled is non-empty value is passed for 'no-renaming' parameter.""" - result = [] - - for t in sources: - - t = b2.util.jam_to_value_maybe(t) - - if isinstance (t, AbstractTarget): - name = t.name () - - if not no_renaming: - name = main_target_name + '__' + name - t.rename (name) - - # Inline targets are not built by default. - p = t.project() - p.mark_targets_as_explicit([name]) - result.append(name) - - else: - result.append (t) - - return result - - - def main_target_requirements(self, specification, project): - """Returns the requirement to use when declaring a main target, - which are obtained by - - translating all specified property paths, and - - refining project requirements with the one specified for the target - - 'specification' are the properties xplicitly specified for a - main target - 'project' is the project where the main taret is to be declared.""" - - specification.extend(toolset.requirements()) - - requirements = property_set.refine_from_user_input( - project.get("requirements"), specification, - project.project_module(), project.get("location")) - - return requirements - - def main_target_usage_requirements (self, specification, project): - """ Returns the use requirement to use when declaraing a main target, - which are obtained by - - translating all specified property paths, and - - adding project's usage requirements - specification: Use-properties explicitly specified for a main target - project: Project where the main target is to be declared - """ - project_usage_requirements = project.get ('usage-requirements') - - # We don't use 'refine-from-user-input' because I'm not sure if: - # - removing of parent's usage requirements makes sense - # - refining of usage requirements is not needed, since usage requirements - # are always free. - usage_requirements = property_set.create_from_user_input( - specification, project.project_module(), project.get("location")) - - return project_usage_requirements.add (usage_requirements) - - def main_target_default_build (self, specification, project): - """ Return the default build value to use when declaring a main target, - which is obtained by using specified value if not empty and parent's - default build attribute otherwise. - specification: Default build explicitly specified for a main target - project: Project where the main target is to be declared - """ - if specification: - return property_set.create_with_validation(specification) - else: - return project.get ('default-build') - - def start_building (self, main_target_instance): - """ Helper rules to detect cycles in main target references. - """ - if self.targets_being_built_.has_key(id(main_target_instance)): - names = [] - for t in self.targets_being_built_.values() + [main_target_instance]: - names.append (t.full_name()) - - get_manager().errors()("Recursion in main target references\n") - - self.targets_being_built_[id(main_target_instance)] = main_target_instance - - def end_building (self, main_target_instance): - assert (self.targets_being_built_.has_key (id (main_target_instance))) - del self.targets_being_built_ [id (main_target_instance)] - - def create_typed_target (self, type, project, name, sources, requirements, default_build, usage_requirements): - """ Creates a TypedTarget with the specified properties. - The 'name', 'sources', 'requirements', 'default_build' and - 'usage_requirements' are assumed to be in the form specified - by the user in Jamfile corresponding to 'project'. - """ - return self.main_target_alternative (TypedTarget (name, project, type, - self.main_target_sources (sources, name), - self.main_target_requirements (requirements, project), - self.main_target_default_build (default_build, project), - self.main_target_usage_requirements (usage_requirements, project))) - - def increase_indent(self): - self.indent_ += " " - - def decrease_indent(self): - self.indent_ = self.indent_[0:-4] - - def logging(self): - return self.debug_building_ - - def log(self, message): - if self.debug_building_: - print self.indent_ + message - - def push_target(self, target): - self.targets_.append(target) - - def pop_target(self): - self.targets_ = self.targets_[:-1] - - def current(self): - return self.targets_[0] - - -class GenerateResult: - - def __init__ (self, ur=None, targets=None): - if not targets: - targets = [] - - self.__usage_requirements = ur - self.__targets = targets - assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets) - - if not self.__usage_requirements: - self.__usage_requirements = property_set.empty () - - def usage_requirements (self): - return self.__usage_requirements - - def targets (self): - return self.__targets - - def extend (self, other): - assert (isinstance (other, GenerateResult)) - - self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ()) - self.__targets.extend (other.targets ()) - -class AbstractTarget: - """ Base class for all abstract targets. - """ - def __init__ (self, name, project, manager = None): - """ manager: the Manager object - name: name of the target - project: the project target to which this one belongs - manager:the manager object. If none, uses project.manager () - """ - assert (isinstance (project, ProjectTarget)) - # Note: it might seem that we don't need either name or project at all. - # However, there are places where we really need it. One example is error - # messages which should name problematic targets. Another is setting correct - # paths for sources and generated files. - - # Why allow manager to be specified? Because otherwise project target could not derive - # from this class. - if manager: - self.manager_ = manager - else: - self.manager_ = project.manager () - - self.name_ = name - self.project_ = project - - def manager (self): - return self.manager_ - - def name (self): - """ Returns the name of this target. - """ - return self.name_ - - def project (self): - """ Returns the project for this target. - """ - return self.project_ - - def location (self): - """ Return the location where the target was declared. - """ - return self.location_ - - def full_name (self): - """ Returns a user-readable name for this target. - """ - location = self.project ().get ('location') - return location + '/' + self.name_ - - def generate (self, property_set): - """ Takes a property set. Generates virtual targets for this abstract - target, using the specified properties, unless a different value of some - feature is required by the target. - On success, returns a GenerateResult instance with: - - a property_set with the usage requirements to be - applied to dependents - - a list of produced virtual targets, which may be - empty. - If 'property_set' is empty, performs default build of this - target, in a way specific to derived class. - """ - raise BaseException ("method should be defined in derived classes") - - def rename (self, new_name): - self.name_ = new_name - -class ProjectTarget (AbstractTarget): - """ Project target class (derived from 'AbstractTarget') - - This class these responsibilities: - - maintaining a list of main target in this project and - building it - - Main targets are constructed in two stages: - - When Jamfile is read, a number of calls to 'add_alternative' is made. - At that time, alternatives can also be renamed to account for inline - targets. - - The first time 'main-target' or 'has-main-target' rule is called, - all alternatives are enumerated an main targets are created. - """ - def __init__ (self, manager, name, project_module, parent_project, requirements, default_build): - AbstractTarget.__init__ (self, name, self, manager) - - self.project_module_ = project_module - self.location_ = manager.projects().attribute (project_module, 'location') - self.requirements_ = requirements - self.default_build_ = default_build - - self.build_dir_ = None - - # A cache of IDs - self.ids_cache_ = {} - - # True is main targets have already been built. - self.built_main_targets_ = False - - # A list of the registered alternatives for this project. - self.alternatives_ = [] - - # A map from main target name to the target corresponding - # to it. - self.main_target_ = {} - - # Targets marked as explicit. - self.explicit_targets_ = set() - - # Targets marked as always - self.always_targets_ = set() - - # The constants defined for this project. - self.constants_ = {} - - # Whether targets for all main target are already created. - self.built_main_targets_ = 0 - - if parent_project: - self.inherit (parent_project) - - - # TODO: This is needed only by the 'make' rule. Need to find the - # way to make 'make' work without this method. - def project_module (self): - return self.project_module_ - - def get (self, attribute): - return self.manager().projects().attribute( - self.project_module_, attribute) - - def build_dir (self): - if not self.build_dir_: - self.build_dir_ = self.get ('build-dir') - if not self.build_dir_: - self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin') - - return self.build_dir_ - - def generate (self, ps): - """ Generates all possible targets contained in this project. - """ - self.manager_.targets().log( - "Building project '%s' with '%s'" % (self.name (), str(ps))) - self.manager_.targets().increase_indent () - - result = GenerateResult () - - for t in self.targets_to_build (): - g = t.generate (ps) - result.extend (g) - - self.manager_.targets().decrease_indent () - return result - - def targets_to_build (self): - """ Computes and returns a list of AbstractTarget instances which - must be built when this project is built. - """ - result = [] - - if not self.built_main_targets_: - self.build_main_targets () - - # Collect all main targets here, except for "explicit" ones. - for n, t in self.main_target_.iteritems (): - if not t.name () in self.explicit_targets_: - result.append (t) - - # Collect all projects referenced via "projects-to-build" attribute. - self_location = self.get ('location') - for pn in self.get ('projects-to-build'): - result.append (self.find(pn + "/")) - - return result - - def mark_targets_as_explicit (self, target_names): - """Add 'target' to the list of targets in this project - that should be build only by explicit request.""" - - # Record the name of the target, not instance, since this - # rule is called before main target instaces are created. - self.explicit_targets_.update(target_names) - - def mark_targets_as_always(self, target_names): - self.always_targets_.update(target_names) - - def add_alternative (self, target_instance): - """ Add new target alternative. - """ - if self.built_main_targets_: - raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ()) - - self.alternatives_.append (target_instance) - - def main_target (self, name): - if not self.built_main_targets_: - self.build_main_targets() - - return self.main_target_[name] - - def has_main_target (self, name): - """Tells if a main target with the specified name exists.""" - if not self.built_main_targets_: - self.build_main_targets() - - return self.main_target_.has_key(name) - - def create_main_target (self, name): - """ Returns a 'MainTarget' class instance corresponding to the 'name'. - """ - if not self.built_main_targets_: - self.build_main_targets () - - return self.main_targets_.get (name, None) - - - def find_really(self, id): - """ Find and return the target with the specified id, treated - relative to self. - """ - result = None - current_location = self.get ('location') - - __re_split_project_target = re.compile (r'(.*)//(.*)') - split = __re_split_project_target.match (id) - - project_part = None - target_part = None - - if split: - project_part = split.group (1) - target_part = split.group (2) - - project_registry = self.project_.manager ().projects () - - extra_error_message = '' - if project_part: - # There's explicit project part in id. Looks up the - # project and pass the request to it. - pm = project_registry.find (project_part, current_location) - - if pm: - project_target = project_registry.target (pm) - result = project_target.find (target_part, no_error=1) - - else: - extra_error_message = "error: could not find project '$(project_part)'" - - else: - # Interpret target-name as name of main target - # Need to do this before checking for file. Consider this: - # - # exe test : test.cpp ; - # install s : test : <location>. ; - # - # After first build we'll have target 'test' in Jamfile and file - # 'test' on the disk. We need target to override the file. - - result = None - if self.has_main_target(id): - result = self.main_target(id) - - if not result: - result = FileReference (self.manager_, id, self.project_) - if not result.exists (): - # File actually does not exist. - # Reset 'target' so that an error is issued. - result = None - - - if not result: - # Interpret id as project-id - project_module = project_registry.find (id, current_location) - if project_module: - result = project_registry.target (project_module) - - return result - - def find (self, id, no_error = False): - v = self.ids_cache_.get (id, None) - - if not v: - v = self.find_really (id) - self.ids_cache_ [id] = v - - if v or no_error: - return v - - raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location'))) - - - def build_main_targets (self): - self.built_main_targets_ = True - - for a in self.alternatives_: - name = a.name () - if not self.main_target_.has_key (name): - t = MainTarget (name, self.project_) - self.main_target_ [name] = t - - if name in self.always_targets_: - a.always() - - self.main_target_ [name].add_alternative (a) - - def add_constant(self, name, value, path=0): - """Adds a new constant for this project. - - The constant will be available for use in Jamfile - module for this project. If 'path' is true, - the constant will be interpreted relatively - to the location of project. - """ - - if path: - l = self.location_ - if not l: - # Project corresponding to config files do not have - # 'location' attribute, but do have source location. - # It might be more reasonable to make every project have - # a location and use some other approach to prevent buildable - # targets in config files, but that's for later. - l = get('source-location') - - value = os.path.join(l, value) - # Now make the value absolute path - value = os.path.join(os.getcwd(), value) - - self.constants_[name] = value - bjam.call("set-variable", self.project_module(), name, value) - - def inherit(self, parent_project): - for c in parent_project.constants_: - # No need to pass the type. Path constants were converted to - # absolute paths already by parent. - self.add_constant(c, parent_project.constants_[c]) - - # Import rules from parent - this_module = self.project_module() - parent_module = parent_project.project_module() - - rules = bjam.call("RULENAMES", parent_module) - if not rules: - rules = [] - user_rules = [x for x in rules - if x not in self.manager().projects().project_rules().all_names()] - if user_rules: - bjam.call("import-rules-from-parent", parent_module, this_module, user_rules) - -class MainTarget (AbstractTarget): - """ A named top-level target in Jamfile. - """ - def __init__ (self, name, project): - AbstractTarget.__init__ (self, name, project) - self.alternatives_ = [] - self.default_build_ = property_set.empty () - - def add_alternative (self, target): - """ Add a new alternative for this target. - """ - d = target.default_build () - - if self.alternatives_ and self.default_build_ != d: - get_manager().errors()("default build must be identical in all alternatives\n" - "main target is '%s'\n" - "with '%s'\n" - "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ())) - - else: - self.default_build_ = d - - self.alternatives_.append (target) - - def __select_alternatives (self, property_set, debug): - """ Returns the best viable alternative for this property_set - See the documentation for selection rules. - # TODO: shouldn't this be 'alternative' (singular)? - """ - # When selecting alternatives we have to consider defaults, - # for example: - # lib l : l.cpp : <variant>debug ; - # lib l : l_opt.cpp : <variant>release ; - # won't work unless we add default value <variant>debug. - property_set = property_set.add_defaults () - - # The algorithm: we keep the current best viable alternative. - # When we've got new best viable alternative, we compare it - # with the current one. - best = None - best_properties = None - - if len (self.alternatives_) == 0: - return None - - if len (self.alternatives_) == 1: - return self.alternatives_ [0] - - if debug: - print "Property set for selection:", property_set - - for v in self.alternatives_: - properties = v.match (property_set, debug) - - if properties is not None: - if not best: - best = v - best_properties = properties - - else: - if b2.util.set.equal (properties, best_properties): - return None - - elif b2.util.set.contains (properties, best_properties): - # Do nothing, this alternative is worse - pass - - elif b2.util.set.contains (best_properties, properties): - best = v - best_properties = properties - - else: - return None - - return best - - def apply_default_build (self, property_set): - return apply_default_build(property_set, self.default_build_) - - def generate (self, ps): - """ Select an alternative for this main target, by finding all alternatives - which requirements are satisfied by 'properties' and picking the one with - longest requirements set. - Returns the result of calling 'generate' on that alternative. - """ - self.manager_.targets ().start_building (self) - - # We want composite properties in build request act as if - # all the properties it expands too are explicitly specified. - ps = ps.expand () - - all_property_sets = self.apply_default_build (ps) - - result = GenerateResult () - - for p in all_property_sets: - result.extend (self.__generate_really (p)) - - self.manager_.targets ().end_building (self) - - return result - - def __generate_really (self, prop_set): - """ Generates the main target with the given property set - and returns a list which first element is property_set object - containing usage_requirements of generated target and with - generated virtual target in other elements. It's possible - that no targets are generated. - """ - best_alternative = self.__select_alternatives (prop_set, debug=0) - - if not best_alternative: - # FIXME: revive. - # self.__select_alternatives(prop_set, debug=1) - self.manager_.errors()( - "No best alternative for '%s'.\n" - % (self.full_name(),)) - - result = best_alternative.generate (prop_set) - - # Now return virtual targets for the only alternative - return result - - def rename(self, new_name): - AbstractTarget.rename(self, new_name) - for a in self.alternatives_: - a.rename(new_name) - -class FileReference (AbstractTarget): - """ Abstract target which refers to a source file. - This is artificial creature; it's usefull so that sources to - a target can be represented as list of abstract target instances. - """ - def __init__ (self, manager, file, project): - AbstractTarget.__init__ (self, file, project) - self.file_location_ = None - - def generate (self, properties): - return GenerateResult (None, [ - self.manager_.virtual_targets ().from_file ( - self.name_, self.location(), self.project_) ]) - - def exists (self): - """ Returns true if the referred file really exists. - """ - if self.location (): - return True - else: - return False - - def location (self): - # Returns the location of target. Needed by 'testing.jam' - if not self.file_location_: - source_location = self.project_.get('source-location') - - for src_dir in source_location: - location = os.path.join(src_dir, self.name()) - if os.path.isfile(location): - self.file_location_ = src_dir - self.file_path = location - break - - return self.file_location_ - -def resolve_reference(target_reference, project): - """ Given a target_reference, made in context of 'project', - returns the AbstractTarget instance that is referred to, as well - as properties explicitly specified for this reference. - """ - # Separate target name from properties override - split = _re_separate_target_from_properties.match (target_reference) - if not split: - raise BaseException ("Invalid reference: '%s'" % target_reference) - - id = split.group (1) - - sproperties = [] - - if split.group (3): - sproperties = property.create_from_strings(feature.split(split.group(3))) - sproperties = feature.expand_composites(sproperties) - - # Find the target - target = project.find (id) - - return (target, property_set.create(sproperties)) - -def generate_from_reference(target_reference, project, property_set): - """ Attempts to generate the target given by target reference, which - can refer both to a main target or to a file. - Returns a list consisting of - - usage requirements - - generated virtual targets, if any - target_reference: Target reference - project: Project where the reference is made - property_set: Properties of the main target that makes the reference - """ - target, sproperties = resolve_reference(target_reference, project) - - # Take properties which should be propagated and refine them - # with source-specific requirements. - propagated = property_set.propagated() - rproperties = propagated.refine(sproperties) - - return target.generate(rproperties) - - - -class BasicTarget (AbstractTarget): - """ Implements the most standard way of constructing main target - alternative from sources. Allows sources to be either file or - other main target and handles generation of those dependency - targets. - """ - def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None): - AbstractTarget.__init__ (self, name, project) - - for s in sources: - if get_grist (s): - raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name)) - - self.sources_ = sources - - if not requirements: requirements = property_set.empty () - self.requirements_ = requirements - - if not default_build: default_build = property_set.empty () - self.default_build_ = default_build - - if not usage_requirements: usage_requirements = property_set.empty () - self.usage_requirements_ = usage_requirements - - # A cache for resolved references - self.source_targets_ = None - - # A cache for generated targets - self.generated_ = {} - - # A cache for build requests - self.request_cache = {} - - # Result of 'capture_user_context' has everything. For example, if this - # target is declare as result of loading Jamfile which was loaded when - # building target B which was requested from A, then we'll have A, B and - # Jamroot location in context. We only care about Jamroot location, most - # of the times. - self.user_context_ = self.manager_.errors().capture_user_context()[-1:] - - self.always_ = False - - def always(self): - self.always_ = True - - def sources (self): - """ Returns the list of AbstractTargets which are used as sources. - The extra properties specified for sources are not represented. - The only used of this rule at the moment is the '--dump-tests' - feature of the test system. - """ - if self.source_targets_ == None: - self.source_targets_ = [] - for s in self.sources_: - self.source_targets_.append(resolve_reference(s, self.project_)[0]) - - return self.source_targets_ - - def requirements (self): - return self.requirements_ - - def default_build (self): - return self.default_build_ - - def common_properties (self, build_request, requirements): - """ Given build request and requirements, return properties - common to dependency build request and target build - properties. - """ - # For optimization, we add free unconditional requirements directly, - # without using complex algorithsm. - # This gives the complex algorithm better chance of caching results. - # The exact effect of this "optimization" is no longer clear - free_unconditional = [] - other = [] - for p in requirements.all(): - if p.feature().free() and not p.condition() and p.feature().name() != 'conditional': - free_unconditional.append(p) - else: - other.append(p) - other = property_set.create(other) - - key = (build_request, other) - if not self.request_cache.has_key(key): - self.request_cache[key] = self.__common_properties2 (build_request, other) - - return self.request_cache[key].add_raw(free_unconditional) - - # Given 'context' -- a set of already present properties, and 'requirements', - # decide which extra properties should be applied to 'context'. - # For conditional requirements, this means evaluating condition. For - # indirect conditional requirements, this means calling a rule. Ordinary - # requirements are always applied. - # - # Handles situation where evaluating one conditional requirements affects - # condition of another conditional requirements, for example: - # - # <toolset>gcc:<variant>release <variant>release:<define>RELEASE - # - # If 'what' is 'refined' returns context refined with new requirements. - # If 'what' is 'added' returns just the requirements that must be applied. - def evaluate_requirements(self, requirements, context, what): - # Apply non-conditional requirements. - # It's possible that that further conditional requirement change - # a value set by non-conditional requirements. For example: - # - # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ; - # - # I'm not sure if this should be an error, or not, especially given that - # - # <threading>single - # - # might come from project's requirements. - unconditional = feature.expand(requirements.non_conditional()) - - context = context.refine(property_set.create(unconditional)) - - # We've collected properties that surely must be present in common - # properties. We now try to figure out what other properties - # should be added in order to satisfy rules (4)-(6) from the docs. - - conditionals = property_set.create(requirements.conditional()) - - # It's supposed that #conditionals iterations - # should be enough for properties to propagate along conditions in any - # direction. - max_iterations = len(conditionals.all()) +\ - len(requirements.get("<conditional>")) + 1 - - added_requirements = [] - current = context - - # It's assumed that ordinary conditional requirements can't add - # <indirect-conditional> properties, and that rules referred - # by <indirect-conditional> properties can't add new - # <indirect-conditional> properties. So the list of indirect conditionals - # does not change. - indirect = requirements.get("<conditional>") - - ok = 0 - for i in range(0, max_iterations): - - e = conditionals.evaluate_conditionals(current).all()[:] - - # Evaluate indirect conditionals. - for i in indirect: - i = b2.util.jam_to_value_maybe(i) - if callable(i): - # This is Python callable, yeah. - e.extend(i(current)) - else: - # Name of bjam function. Because bjam is unable to handle - # list of Property, pass list of strings. - br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()]) - if br: - e.extend(property.create_from_strings(br)) - - if e == added_requirements: - # If we got the same result, we've found final properties. - ok = 1 - break - else: - # Oops, results of evaluation of conditionals has changed. - # Also 'current' contains leftover from previous evaluation. - # Recompute 'current' using initial properties and conditional - # requirements. - added_requirements = e - current = context.refine(property_set.create(feature.expand(e))) - - if not ok: - self.manager().errors()("Can't evaluate conditional properties " - + str(conditionals)) - - - if what == "added": - return property_set.create(unconditional + added_requirements) - elif what == "refined": - return current - else: - self.manager().errors("Invalid value of the 'what' parameter") - - def __common_properties2(self, build_request, requirements): - # This guarantees that default properties are present - # in result, unless they are overrided by some requirement. - # TODO: There is possibility that we've added <foo>bar, which is composite - # and expands to <foo2>bar2, but default value of <foo2> is not bar2, - # in which case it's not clear what to do. - # - build_request = build_request.add_defaults() - # Featured added by 'add-default' can be composite and expand - # to features without default values -- so they are not added yet. - # It could be clearer/faster to expand only newly added properties - # but that's not critical. - build_request = build_request.expand() - - return self.evaluate_requirements(requirements, build_request, - "refined") - - def match (self, property_set, debug): - """ Returns the alternative condition for this alternative, if - the condition is satisfied by 'property_set'. - """ - # The condition is composed of all base non-conditional properties. - # It's not clear if we should expand 'self.requirements_' or not. - # For one thing, it would be nice to be able to put - # <toolset>msvc-6.0 - # in requirements. - # On the other hand, if we have <variant>release in condition it - # does not make sense to require <optimization>full to be in - # build request just to select this variant. - bcondition = self.requirements_.base () - ccondition = self.requirements_.conditional () - condition = b2.util.set.difference (bcondition, ccondition) - - if debug: - print " next alternative: required properties:", [str(p) for p in condition] - - if b2.util.set.contains (condition, property_set.all()): - - if debug: - print " matched" - - return condition - - else: - return None - - - def generate_dependency_targets (self, target_ids, property_set): - targets = [] - usage_requirements = [] - for id in target_ids: - - result = generate_from_reference(id, self.project_, property_set) - targets += result.targets() - usage_requirements += result.usage_requirements().all() - - return (targets, usage_requirements) - - def generate_dependency_properties(self, properties, ps): - """ Takes a target reference, which might be either target id - or a dependency property, and generates that target using - 'property_set' as build request. - - Returns a tuple (result, usage_requirements). - """ - result_properties = [] - usage_requirements = [] - for p in properties: - - result = generate_from_reference(p.value(), self.project_, ps) - - for t in result.targets(): - result_properties.append(property.Property(p.feature(), t)) - - usage_requirements += result.usage_requirements().all() - - return (result_properties, usage_requirements) - - - - - @user_error_checkpoint - def generate (self, ps): - """ Determines final build properties, generates sources, - and calls 'construct'. This method should not be - overridden. - """ - self.manager_.errors().push_user_context( - "Generating target " + self.full_name(), self.user_context_) - - if self.manager().targets().logging(): - self.manager().targets().log( - "Building target '%s'" % self.name_) - self.manager().targets().increase_indent () - self.manager().targets().log( - "Build request: '%s'" % str (ps.raw ())) - cf = self.manager().command_line_free_features() - self.manager().targets().log( - "Command line free features: '%s'" % str (cf.raw ())) - self.manager().targets().log( - "Target requirements: %s'" % str (self.requirements().raw ())) - - self.manager().targets().push_target(self) - - if not self.generated_.has_key(ps): - - # Apply free features form the command line. If user - # said - # define=FOO - # he most likely want this define to be set for all compiles. - ps = ps.refine(self.manager().command_line_free_features()) - rproperties = self.common_properties (ps, self.requirements_) - - self.manager().targets().log( - "Common properties are '%s'" % str (rproperties)) - - if rproperties.get("<build>") != ["no"]: - - result = GenerateResult () - - properties = rproperties.non_dependency () - - (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties) - properties += p - assert all(isinstance(p, property.Property) for p in properties) - usage_requirements = u - - (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties) - usage_requirements += u - - self.manager_.targets().log( - "Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements)) - - # FIXME: - - rproperties = property_set.create(properties + usage_requirements) - usage_requirements = property_set.create (usage_requirements) - - self.manager_.targets().log( - "Build properties: '%s'" % str(rproperties)) - - source_targets += rproperties.get('<source>') - - # We might get duplicate sources, for example if - # we link to two library which have the same <library> in - # usage requirements. - # Use stable sort, since for some targets the order is - # important. E.g. RUN_PY target need python source to come - # first. - source_targets = unique(source_targets, stable=True) - - # FIXME: figure why this call messes up source_targets in-place - result = self.construct (self.name_, source_targets[:], rproperties) - - if result: - assert len(result) == 2 - gur = result [0] - result = result [1] - - if self.always_: - for t in result: - t.always() - - s = self.create_subvariant ( - result, - self.manager().virtual_targets().recent_targets(), ps, - source_targets, rproperties, usage_requirements) - self.manager().virtual_targets().clear_recent_targets() - - ur = self.compute_usage_requirements (s) - ur = ur.add (gur) - s.set_usage_requirements (ur) - - self.manager_.targets().log ( - "Usage requirements from '%s' are '%s'" % - (self.name(), str(rproperties))) - - self.generated_[ps] = GenerateResult (ur, result) - else: - self.generated_[ps] = GenerateResult (property_set.empty(), []) - else: - # If we just see <build>no, we cannot produce any reasonable - # diagnostics. The code that adds this property is expected - # to explain why a target is not built, for example using - # the configure.log-component-configuration function. - - # If this target fails to build, add <build>no to properties - # to cause any parent target to fail to build. Except that it - # - does not work now, since we check for <build>no only in - # common properties, but not in properties that came from - # dependencies - # - it's not clear if that's a good idea anyway. The alias - # target, for example, should not fail to build if a dependency - # fails. - self.generated_[ps] = GenerateResult( - property_set.create(["<build>no"]), []) - else: - self.manager().targets().log ("Already built") - - self.manager().targets().pop_target() - self.manager().targets().decrease_indent() - - return self.generated_[ps] - - def compute_usage_requirements (self, subvariant): - """ Given the set of generated targets, and refined build - properties, determines and sets appripriate usage requirements - on those targets. - """ - rproperties = subvariant.build_properties () - xusage_requirements =self.evaluate_requirements( - self.usage_requirements_, rproperties, "added") - - # We generate all dependency properties and add them, - # as well as their usage requirements, to result. - (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties) - extra = r1 + r2 - - result = property_set.create (xusage_requirements.non_dependency () + extra) - - # Propagate usage requirements we've got from sources, except - # for the <pch-header> and <pch-file> features. - # - # That feature specifies which pch file to use, and should apply - # only to direct dependents. Consider: - # - # pch pch1 : ... - # lib lib1 : ..... pch1 ; - # pch pch2 : - # lib lib2 : pch2 lib1 ; - # - # Here, lib2 should not get <pch-header> property from pch1. - # - # Essentially, when those two features are in usage requirements, - # they are propagated only to direct dependents. We might need - # a more general mechanism, but for now, only those two - # features are special. - raw = subvariant.sources_usage_requirements().raw() - raw = property.change(raw, "<pch-header>", None); - raw = property.change(raw, "<pch-file>", None); - result = result.add(property_set.create(raw)) - - return result - - def create_subvariant (self, root_targets, all_targets, - build_request, sources, - rproperties, usage_requirements): - """Creates a new subvariant-dg instances for 'targets' - - 'root-targets' the virtual targets will be returned to dependents - - 'all-targets' all virtual - targets created while building this main target - - 'build-request' is property-set instance with - requested build properties""" - - for e in root_targets: - e.root (True) - - s = Subvariant (self, build_request, sources, - rproperties, usage_requirements, all_targets) - - for v in all_targets: - if not v.creating_subvariant(): - v.creating_subvariant(s) - - return s - - def construct (self, name, source_targets, properties): - """ Constructs the virtual targets for this abstract targets and - the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets. - Should be overrided in derived classes. - """ - raise BaseException ("method should be defined in derived classes") - - -class TypedTarget (BasicTarget): - import generators - - def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements): - BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements) - self.type_ = type - - def __jam_repr__(self): - return b2.util.value_to_jam(self) - - def type (self): - return self.type_ - - def construct (self, name, source_targets, prop_set): - - r = generators.construct (self.project_, name, self.type_, - prop_set.add_raw(['<main-target-type>' + self.type_]), - source_targets, True) - - if not r: - print "warning: Unable to construct '%s'" % self.full_name () - - # Are there any top-level generators for this type/property set. - if not generators.find_viable_generators (self.type_, prop_set): - print "error: no generators were found for type '" + self.type_ + "'" - print "error: and the requested properties" - print "error: make sure you've configured the needed tools" - print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" - - print "To debug this problem, try the --debug-generators option." - sys.exit(1) - - return r - -def apply_default_build(property_set, default_build): - # 1. First, see what properties from default_build - # are already present in property_set. - - specified_features = set(p.feature() for p in property_set.all()) - - defaults_to_apply = [] - for d in default_build.all(): - if not d.feature() in specified_features: - defaults_to_apply.append(d) - - # 2. If there's any defaults to be applied, form the new - # build request. Pass it throw 'expand-no-defaults', since - # default_build might contain "release debug", which will - # result in two property_sets. - result = [] - if defaults_to_apply: - - # We have to compress subproperties here to prevent - # property lists like: - # - # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi - # - # from being expanded into: - # - # <toolset-msvc:version>7.1/<threading>multi - # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi - # - # due to cross-product property combination. That may - # be an indication that - # build_request.expand-no-defaults is the wrong rule - # to use here. - compressed = feature.compress_subproperties(property_set.all()) - - result = build_request.expand_no_defaults( - b2.build.property_set.create([p]) for p in (compressed + defaults_to_apply)) - - else: - result.append (property_set) - - return result - - -def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements): - - from b2.manager import get_manager - t = get_manager().targets() - - project = get_manager().projects().current() - - return t.main_target_alternative( - TypedTarget(name, project, type, - t.main_target_sources(sources, name), - t.main_target_requirements(requirements, project), - t.main_target_default_build(default_build, project), - t.main_target_usage_requirements(usage_requirements, project))) - - -def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]): - from b2.manager import get_manager - t = get_manager().targets() - - project = get_manager().projects().current() - - return t.main_target_alternative( - klass(name, project, - t.main_target_sources(sources, name), - t.main_target_requirements(requirements, project), - t.main_target_default_build(default_build, project), - t.main_target_usage_requirements(usage_requirements, project))) - -def metatarget_function_for_class(class_): - - @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"], - ["default_build", "*"], ["usage_requirements", "*"])) - def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []): - - from b2.manager import get_manager - t = get_manager().targets() - - project = get_manager().projects().current() - - return t.main_target_alternative( - class_(name, project, - t.main_target_sources(sources, name), - t.main_target_requirements(requirements, project), - t.main_target_default_build(default_build, project), - t.main_target_usage_requirements(usage_requirements, project))) - - return create_metatarget diff --git a/jam-files/boost-build/build/toolset.jam b/jam-files/boost-build/build/toolset.jam deleted file mode 100644 index f2036d99..00000000 --- a/jam-files/boost-build/build/toolset.jam +++ /dev/null @@ -1,502 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2005 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Support for toolset definition. - -import errors ; -import feature ; -import generators ; -import numbers ; -import path ; -import property ; -import regex ; -import sequence ; -import set ; - - -.flag-no = 1 ; - -.ignore-requirements = ; - -# This is used only for testing, to make sure we do not get random extra -# elements in paths. -if --ignore-toolset-requirements in [ modules.peek : ARGV ] -{ - .ignore-requirements = 1 ; -} - - -# Initializes an additional toolset-like module. First load the 'toolset-module' -# and then calls its 'init' rule with trailing arguments. -# -rule using ( toolset-module : * ) -{ - import $(toolset-module) ; - $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -} - - -# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be -# converted to '<toolset>gcc/<toolset-version>3.2'. -# -local rule normalize-condition ( property-sets * ) -{ - local result ; - for local p in $(property-sets) - { - local split = [ feature.split $(p) ] ; - local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ; - result += $(expanded:J=/) ; - } - return $(result) ; -} - - -# Specifies if the 'flags' rule should check that the invoking module is the -# same as the module we are setting the flag for. 'v' can be either 'checked' or -# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore -# the setting that was in effect before calling this rule. -# -rule push-checking-for-flags-module ( v ) -{ - .flags-module-checking = $(v) $(.flags-module-checking) ; -} - -rule pop-checking-for-flags-module ( ) -{ - .flags-module-checking = $(.flags-module-checking[2-]) ; -} - - -# Specifies the flags (variables) that must be set on targets under certain -# conditions, described by arguments. -# -rule flags ( - rule-or-module # If contains a dot, should be a rule name. The flags will - # be applied when that rule is used to set up build - # actions. - # - # If does not contain dot, should be a module name. The - # flag will be applied for all rules in that module. If - # module for rule is different from the calling module, an - # error is issued. - - variable-name # Variable that should be set on target. - condition * : # A condition when this flag should be applied. Should be a - # set of property sets. If one of those property sets is - # contained in the build properties, the flag will be used. - # Implied values are not allowed: "<toolset>gcc" should be - # used, not just "gcc". Subfeatures, like in - # "<toolset>gcc-3.2" are allowed. If left empty, the flag - # will be used unconditionally. - # - # Propery sets may use value-less properties ('<a>' vs. - # '<a>value') to match absent properties. This allows to - # separately match: - # - # <architecture>/<address-model>64 - # <architecture>ia64/<address-model> - # - # Where both features are optional. Without this syntax - # we would be forced to define "default" values. - - values * : # The value to add to variable. If <feature> is specified, - # then the value of 'feature' will be added. - unchecked ? # If value 'unchecked' is passed, will not test that flags - # are set for the calling module. - : hack-hack ? # For - # flags rule OPTIONS <cxx-abi> : -model ansi - # Treat <cxx-abi> as condition - # FIXME: ugly hack. -) -{ - local caller = [ CALLER_MODULE ] ; - if ! [ MATCH ".*([.]).*" : $(rule-or-module) ] - && [ MATCH "(Jamfile<.*)" : $(caller) ] - { - # Unqualified rule name, used inside Jamfile. Most likely used with - # 'make' or 'notfile' rules. This prevents setting flags on the entire - # Jamfile module (this will be considered as rule), but who cares? - # Probably, 'flags' rule should be split into 'flags' and - # 'flags-on-module'. - rule-or-module = $(caller).$(rule-or-module) ; - } - else - { - local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ; - if $(unchecked) != unchecked - && $(.flags-module-checking[1]) != unchecked - && $(module_) != $(caller) - { - errors.error "Module $(caller) attempted to set flags for module $(module_)" ; - } - } - - if $(condition) && ! $(condition:G=) && ! $(hack-hack) - { - # We have condition in the form '<feature>', that is, without value. - # That is an older syntax: - # flags gcc.link RPATH <dll-path> ; - # for compatibility, convert it to - # flags gcc.link RPATH : <dll-path> ; - values = $(condition) ; - condition = ; - } - - if $(condition) - { - property.validate-property-sets $(condition) ; - condition = [ normalize-condition $(condition) ] ; - } - - add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ; -} - - -# Adds a new flag setting with the specified values. Does no checking. -# -local rule add-flag ( rule-or-module : variable-name : condition * : values * ) -{ - .$(rule-or-module).flags += $(.flag-no) ; - - # Store all flags for a module. - local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ; - .module-flags.$(module_) += $(.flag-no) ; - # Store flag-no -> rule-or-module mapping. - .rule-or-module.$(.flag-no) = $(rule-or-module) ; - - .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ; - .$(rule-or-module).values.$(.flag-no) += $(values) ; - .$(rule-or-module).condition.$(.flag-no) += $(condition) ; - - .flag-no = [ numbers.increment $(.flag-no) ] ; -} - - -# Returns the first element of 'property-sets' which is a subset of -# 'properties' or an empty list if no such element exists. -# -rule find-property-subset ( property-sets * : properties * ) -{ - # Cut property values off. - local prop-keys = $(properties:G) ; - - local result ; - for local s in $(property-sets) - { - if ! $(result) - { - # Handle value-less properties like '<architecture>' (compare with - # '<architecture>x86'). - - local set = [ feature.split $(s) ] ; - - # Find the set of features that - # - have no property specified in required property set - # - are omitted in the build property set. - local default-props ; - for local i in $(set) - { - # If $(i) is a value-less property it should match default value - # of an optional property. See the first line in the example - # below: - # - # property set properties result - # <a> <b>foo <b>foo match - # <a> <b>foo <a>foo <b>foo no match - # <a>foo <b>foo <b>foo no match - # <a>foo <b>foo <a>foo <b>foo match - if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) ) - { - default-props += $(i) ; - } - } - - if $(set) in $(properties) $(default-props) - { - result = $(s) ; - } - } - } - return $(result) ; -} - - -# Returns a value to be added to some flag for some target based on the flag's -# value definition and the given target's property set. -# -rule handle-flag-value ( value * : properties * ) -{ - local result ; - if $(value:G) - { - local matches = [ property.select $(value) : $(properties) ] ; - for local p in $(matches) - { - local att = [ feature.attributes $(p:G) ] ; - if dependency in $(att) - { - # The value of a dependency feature is a target and needs to be - # actualized. - result += [ $(p:G=).actualize ] ; - } - else if path in $(att) || free in $(att) - { - local values ; - # Treat features with && in the value specially -- each - # &&-separated element is considered a separate value. This is - # needed to handle searched libraries or include paths, which - # may need to be in a specific order. - if ! [ MATCH (&&) : $(p:G=) ] - { - values = $(p:G=) ; - } - else - { - values = [ regex.split $(p:G=) "&&" ] ; - } - if path in $(att) - { - result += [ sequence.transform path.native : $(values) ] ; - } - else - { - result += $(values) ; - } - } - else - { - result += $(p:G=) ; - } - } - } - else - { - result += $(value) ; - } - return $(result) ; -} - - -# Given a rule name and a property set, returns a list of interleaved variables -# names and values which must be set on targets for that rule/property-set -# combination. -# -rule set-target-variables-aux ( rule-or-module : property-set ) -{ - local result ; - properties = [ $(property-set).raw ] ; - for local f in $(.$(rule-or-module).flags) - { - local variable = $(.$(rule-or-module).variable.$(f)) ; - local condition = $(.$(rule-or-module).condition.$(f)) ; - local values = $(.$(rule-or-module).values.$(f)) ; - - if ! $(condition) || - [ find-property-subset $(condition) : $(properties) ] - { - local processed ; - for local v in $(values) - { - # The value might be <feature-name> so needs special treatment. - processed += [ handle-flag-value $(v) : $(properties) ] ; - } - for local r in $(processed) - { - result += $(variable) $(r) ; - } - } - } - - # Strip away last dot separated part and recurse. - local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ; - if $(next) - { - result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ; - } - return $(result) ; -} - - -rule set-target-variables ( rule-or-module targets + : property-set ) -{ - properties = [ $(property-set).raw ] ; - local key = $(rule-or-module).$(property-set) ; - local settings = $(.stv.$(key)) ; - if ! $(settings) - { - settings = [ set-target-variables-aux $(rule-or-module) : - $(property-set) ] ; - - if ! $(settings) - { - settings = none ; - } - .stv.$(key) = $(settings) ; - } - - if $(settings) != none - { - local var-name = ; - for local name-or-value in $(settings) - { - if $(var-name) - { - $(var-name) on $(targets) += $(name-or-value) ; - var-name = ; - } - else - { - var-name = $(name-or-value) ; - } - } - } -} - - -# Make toolset 'toolset', defined in a module of the same name, inherit from -# 'base'. -# 1. The 'init' rule from 'base' is imported into 'toolset' with full name. -# Another 'init' is called, which forwards to the base one. -# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset> -# property in requires is adjusted too. -# 3. All flags are inherited. -# 4. All rules are imported. -# -rule inherit ( toolset : base ) -{ - import $(base) ; - inherit-generators $(toolset) : $(base) ; - inherit-flags $(toolset) : $(base) ; - inherit-rules $(toolset) : $(base) ; -} - - -rule inherit-generators ( toolset properties * : base : generators-to-ignore * ) -{ - properties ?= <toolset>$(toolset) ; - local base-generators = [ generators.generators-for-toolset $(base) ] ; - for local g in $(base-generators) - { - local id = [ $(g).id ] ; - - if ! $(id) in $(generators-to-ignore) - { - # Some generator names have multiple periods in their name, so - # $(id:B=$(toolset)) does not generate the right new-id name. E.g. - # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is - # not what we want. Manually parse the base and suffix. If there is - # a better way to do this, I would love to see it. See also the - # register() rule in the generators module. - local base = $(id) ; - local suffix = "" ; - while $(base:S) - { - suffix = $(base:S)$(suffix) ; - base = $(base:B) ; - } - local new-id = $(toolset)$(suffix) ; - - generators.register [ $(g).clone $(new-id) : $(properties) ] ; - } - } -} - - -# Brings all flag definitions from the 'base' toolset into the 'toolset' -# toolset. Flag definitions whose conditions make use of properties in -# 'prohibited-properties' are ignored. Do not confuse property and feature, for -# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does -# not block the other one. -# -# The flag conditions are not altered at all, so if a condition includes a name, -# or version of a base toolset, it will not ever match the inheriting toolset. -# When such flag settings must be inherited, define a rule in base toolset -# module and call it as needed. -# -rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * ) -{ - for local f in $(.module-flags.$(base)) - { - local rule-or-module = $(.rule-or-module.$(f)) ; - if ( [ set.difference - $(.$(rule-or-module).condition.$(f)) : - $(prohibited-properties) ] - || ! $(.$(rule-or-module).condition.$(f)) - ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) ) - { - local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ; - local new-rule-or-module ; - if $(rule_) - { - new-rule-or-module = $(toolset).$(rule_) ; - } - else - { - new-rule-or-module = $(toolset) ; - } - - add-flag - $(new-rule-or-module) - : $(.$(rule-or-module).variable.$(f)) - : $(.$(rule-or-module).condition.$(f)) - : $(.$(rule-or-module).values.$(f)) ; - } - } -} - - -rule inherit-rules ( toolset : base : localize ? ) -{ - # It appears that "action" creates a local rule. - local base-generators = [ generators.generators-for-toolset $(base) ] ; - local rules ; - for local g in $(base-generators) - { - rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ; - } - rules = [ sequence.unique $(rules) ] ; - IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ; - IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ; -} - - -# Return the list of global 'toolset requirements'. Those requirements will be -# automatically added to the requirements of any main target. -# -rule requirements ( ) -{ - return $(.requirements) ; -} - - -# Adds elements to the list of global 'toolset requirements'. The requirements -# will be automatically added to the requirements for all main targets, as if -# they were specified literally. For best results, all requirements added should -# be conditional or indirect conditional. -# -rule add-requirements ( requirements * ) -{ - if ! $(.ignore-requirements) - { - .requirements += $(requirements) ; - } -} - - -rule __test__ ( ) -{ - import assert ; - local p = <b>0 <c>1 <d>2 <e>3 <f>4 ; - assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ; - assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ; - - local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ; - assert.result <a>/<b> : find-property-subset $(p-set) : ; - assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ; - assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ; - assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ; -} diff --git a/jam-files/boost-build/build/toolset.py b/jam-files/boost-build/build/toolset.py deleted file mode 100644 index b4267987..00000000 --- a/jam-files/boost-build/build/toolset.py +++ /dev/null @@ -1,398 +0,0 @@ -# Status: being ported by Vladimir Prus -# Base revision: 40958 -# -# Copyright 2003 Dave Abrahams -# Copyright 2005 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -""" Support for toolset definition. -""" - -import feature, property, generators, property_set -import b2.util.set -from b2.util import cached, qualify_jam_action -from b2.util.utility import * -from b2.util import bjam_signature -from b2.manager import get_manager - -__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*') -__re_two_ampersands = re.compile ('(&&)') -__re_first_segment = re.compile ('([^.]*).*') -__re_first_group = re.compile (r'[^.]*\.(.*)') - -# Flag is a mechanism to set a value -# A single toolset flag. Specifies that when certain -# properties are in build property set, certain values -# should be appended to some variable. -# -# A flag applies to a specific action in specific module. -# The list of all flags for a module is stored, and each -# flag further contains the name of the rule it applies -# for, -class Flag: - - def __init__(self, variable_name, values, condition, rule = None): - self.variable_name = variable_name - self.values = values - self.condition = condition - self.rule = rule - - def __str__(self): - return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\ - ", " + str(self.condition) + ", " + str(self.rule) + ")") - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __module_flags, __flags, __stv - - # Mapping from module name to a list of all flags that apply - # to either that module directly, or to any rule in that module. - # Each element of the list is Flag instance. - # So, for module named xxx this might contain flags for 'xxx', - # for 'xxx.compile', for 'xxx.compile.c++', etc. - __module_flags = {} - - # Mapping from specific rule or module name to a list of Flag instances - # that apply to that name. - # Say, it might contain flags for 'xxx.compile.c++'. If there are - # entries for module name 'xxx', they are flags for 'xxx' itself, - # not including any rules in that module. - __flags = {} - - # A cache for varaible settings. The key is generated from the rule name and the properties. - __stv = {} - -reset () - -# FIXME: --ignore-toolset-requirements -# FIXME: using - -# FIXME push-checking-for-flags-module .... -# FIXME: investigate existing uses of 'hack-hack' parameter -# in jam code. - -@bjam_signature((["rule_or_module", "variable_name", "condition", "*"], - ["values", "*"])) -def flags(rule_or_module, variable_name, condition, values = []): - """ Specifies the flags (variables) that must be set on targets under certain - conditions, described by arguments. - rule_or_module: If contains dot, should be a rule name. - The flags will be applied when that rule is - used to set up build actions. - - If does not contain dot, should be a module name. - The flags will be applied for all rules in that - module. - If module for rule is different from the calling - module, an error is issued. - - variable_name: Variable that should be set on target - - condition A condition when this flag should be applied. - Should be set of property sets. If one of - those property sets is contained in build - properties, the flag will be used. - Implied values are not allowed: - "<toolset>gcc" should be used, not just - "gcc". Subfeatures, like in "<toolset>gcc-3.2" - are allowed. If left empty, the flag will - always used. - - Propery sets may use value-less properties - ('<a>' vs. '<a>value') to match absent - properties. This allows to separately match - - <architecture>/<address-model>64 - <architecture>ia64/<address-model> - - Where both features are optional. Without this - syntax we'd be forced to define "default" value. - - values: The value to add to variable. If <feature> - is specified, then the value of 'feature' - will be added. - """ - caller = bjam.caller()[:-1] - if not '.' in rule_or_module and caller.startswith("Jamfile"): - # Unqualified rule name, used inside Jamfile. Most likely used with - # 'make' or 'notfile' rules. This prevents setting flags on the entire - # Jamfile module (this will be considered as rule), but who cares? - # Probably, 'flags' rule should be split into 'flags' and - # 'flags-on-module'. - rule_or_module = qualify_jam_action(rule_or_module, caller) - else: - # FIXME: revive checking that we don't set flags for a different - # module unintentionally - pass - - if condition and not replace_grist (condition, ''): - # We have condition in the form '<feature>', that is, without - # value. That's a previous syntax: - # - # flags gcc.link RPATH <dll-path> ; - # for compatibility, convert it to - # flags gcc.link RPATH : <dll-path> ; - values = [ condition ] - condition = None - - if condition: - transformed = [] - for c in condition: - # FIXME: 'split' might be a too raw tool here. - pl = [property.create_from_string(s) for s in c.split('/')] - pl = feature.expand_subfeatures(pl); - transformed.append(property_set.create(pl)) - condition = transformed - - property.validate_property_sets(condition) - - __add_flag (rule_or_module, variable_name, condition, values) - -def set_target_variables (manager, rule_or_module, targets, ps): - """ - """ - settings = __set_target_variables_aux(manager, rule_or_module, ps) - - if settings: - for s in settings: - for target in targets: - manager.engine ().set_target_variable (target, s [0], s[1], True) - -def find_satisfied_condition(conditions, ps): - """Returns the first element of 'property-sets' which is a subset of - 'properties', or an empty list if no such element exists.""" - - features = set(p.feature() for p in ps.all()) - - for condition in conditions: - - found_all = True - for i in condition.all(): - - found = False - if i.value(): - found = i.value() in ps.get(i.feature()) - else: - # Handle value-less properties like '<architecture>' (compare with - # '<architecture>x86'). - # If $(i) is a value-less property it should match default - # value of an optional property. See the first line in the - # example below: - # - # property set properties result - # <a> <b>foo <b>foo match - # <a> <b>foo <a>foo <b>foo no match - # <a>foo <b>foo <b>foo no match - # <a>foo <b>foo <a>foo <b>foo match - found = not i.feature() in features - - found_all = found_all and found - - if found_all: - return condition - - return None - - -def register (toolset): - """ Registers a new toolset. - """ - feature.extend('toolset', [toolset]) - -def inherit_generators (toolset, properties, base, generators_to_ignore = []): - if not properties: - properties = [replace_grist (toolset, '<toolset>')] - - base_generators = generators.generators_for_toolset(base) - - for g in base_generators: - id = g.id() - - if not id in generators_to_ignore: - # Some generator names have multiple periods in their name, so - # $(id:B=$(toolset)) doesn't generate the right new_id name. - # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++, - # which is not what we want. Manually parse the base and suffix - # (if there's a better way to do this, I'd love to see it.) - # See also register in module generators. - (base, suffix) = split_action_id(id) - - new_id = toolset + '.' + suffix - - generators.register(g.clone(new_id, properties)) - -def inherit_flags(toolset, base, prohibited_properties = []): - """Brings all flag definitions from the 'base' toolset into the 'toolset' - toolset. Flag definitions whose conditions make use of properties in - 'prohibited-properties' are ignored. Don't confuse property and feature, for - example <debug-symbols>on and <debug-symbols>off, so blocking one of them does - not block the other one. - - The flag conditions are not altered at all, so if a condition includes a name, - or version of a base toolset, it won't ever match the inheriting toolset. When - such flag settings must be inherited, define a rule in base toolset module and - call it as needed.""" - for f in __module_flags.get(base, []): - - if not f.condition or b2.util.set.difference(f.condition, prohibited_properties): - match = __re_first_group.match(f.rule) - rule_ = None - if match: - rule_ = match.group(1) - - new_rule_or_module = '' - - if rule_: - new_rule_or_module = toolset + '.' + rule_ - else: - new_rule_or_module = toolset - - __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values) - -def inherit_rules (toolset, base): - pass - # FIXME: do something about this. -# base_generators = generators.generators_for_toolset (base) - -# import action - -# ids = [] -# for g in base_generators: -# (old_toolset, id) = split_action_id (g.id ()) -# ids.append (id) ; - -# new_actions = [] - -# engine = get_manager().engine() - # FIXME: do this! -# for action in engine.action.values(): -# pass -# (old_toolset, id) = split_action_id(action.action_name) -# -# if old_toolset == base: -# new_actions.append ((id, value [0], value [1])) -# -# for a in new_actions: -# action.register (toolset + '.' + a [0], a [1], a [2]) - - # TODO: how to deal with this? -# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ; -# # Import the rules to the global scope -# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ; -# } -# - -###################################################################################### -# Private functions - -@cached -def __set_target_variables_aux (manager, rule_or_module, ps): - """ Given a rule name and a property set, returns a list of tuples of - variables names and values, which must be set on targets for that - rule/properties combination. - """ - result = [] - - for f in __flags.get(rule_or_module, []): - - if not f.condition or find_satisfied_condition (f.condition, ps): - processed = [] - for v in f.values: - # The value might be <feature-name> so needs special - # treatment. - processed += __handle_flag_value (manager, v, ps) - - for r in processed: - result.append ((f.variable_name, r)) - - # strip away last dot separated part and recurse. - next = __re_split_last_segment.match(rule_or_module) - - if next: - result.extend(__set_target_variables_aux( - manager, next.group(1), ps)) - - return result - -def __handle_flag_value (manager, value, ps): - result = [] - - if get_grist (value): - f = feature.get(value) - values = ps.get(f) - - for value in values: - - if f.dependency(): - # the value of a dependency feature is a target - # and must be actualized - result.append(value.actualize()) - - elif f.path() or f.free(): - - # Treat features with && in the value - # specially -- each &&-separated element is considered - # separate value. This is needed to handle searched - # libraries, which must be in specific order. - if not __re_two_ampersands.search(value): - result.append(value) - - else: - result.extend(value.split ('&&')) - else: - result.append (ungristed) - else: - result.append (value) - - return result - -def __add_flag (rule_or_module, variable_name, condition, values): - """ Adds a new flag setting with the specified values. - Does no checking. - """ - f = Flag(variable_name, values, condition, rule_or_module) - - # Grab the name of the module - m = __re_first_segment.match (rule_or_module) - assert m - module = m.group(1) - - __module_flags.setdefault(m, []).append(f) - __flags.setdefault(rule_or_module, []).append(f) - -__requirements = [] - -def requirements(): - """Return the list of global 'toolset requirements'. - Those requirements will be automatically added to the requirements of any main target.""" - return __requirements - -def add_requirements(requirements): - """Adds elements to the list of global 'toolset requirements'. The requirements - will be automatically added to the requirements for all main targets, as if - they were specified literally. For best results, all requirements added should - be conditional or indirect conditional.""" - - #if ! $(.ignore-requirements) - #{ - print "XXXX", requirements - __requirements.extend(requirements) - #} - -# Make toolset 'toolset', defined in a module of the same name, -# inherit from 'base' -# 1. The 'init' rule from 'base' is imported into 'toolset' with full -# name. Another 'init' is called, which forwards to the base one. -# 2. All generators from 'base' are cloned. The ids are adjusted and -# <toolset> property in requires is adjusted too -# 3. All flags are inherited -# 4. All rules are imported. -def inherit(toolset, base): - get_manager().projects().load_module(base, []); - - inherit_generators(toolset, [], base) - inherit_flags(toolset, base) - inherit_rules(toolset, base) diff --git a/jam-files/boost-build/build/type.jam b/jam-files/boost-build/build/type.jam deleted file mode 100644 index 1a7a5782..00000000 --- a/jam-files/boost-build/build/type.jam +++ /dev/null @@ -1,425 +0,0 @@ -# Copyright 2002, 2003 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Deals with target type declaration and defines target class which supports -# typed targets. - -import "class" : new ; -import errors ; -import feature ; -import generators : * ; -import project ; -import property ; -import scanner ; -import os ; - -# The following import would create a circular dependency: -# project -> project-root -> builtin -> type -> targets -> project -# import targets ; - -# The feature is optional so it would never get added implicitly. It is used -# only for internal purposes and in all cases we want to use it explicitly. -feature.feature target-type : : composite optional ; - -feature.feature main-target-type : : optional incidental ; -feature.feature base-target-type : : composite optional free ; - - -# Registers a target type, possible derived from a 'base-type'. Providing a list -# of 'suffixes' here is a shortcut for separately calling the register-suffixes -# rule with the given suffixes and the set-generated-target-suffix rule with the -# first given suffix. -# -rule register ( type : suffixes * : base-type ? ) -{ - # Type names cannot contain hyphens, because when used as feature-values - # they would be interpreted as composite features which need to be - # decomposed. - switch $(type) - { - case *-* : errors.error "type name \"$(type)\" contains a hyphen" ; - } - - if $(type) in $(.types) - { - errors.error "Type $(type) is already registered." ; - } - else - { - .types += $(type) ; - .base.$(type) = $(base-type) ; - .derived.$(base-type) += $(type) ; - - if $(suffixes)-is-not-empty - { - # Specify mapping from suffixes to type. - register-suffixes $(suffixes) : $(type) ; - # By default generated targets of 'type' will use the first of - #'suffixes'. This may be overriden. - set-generated-target-suffix $(type) : : $(suffixes[1]) ; - } - - feature.extend target-type : $(type) ; - feature.extend main-target-type : $(type) ; - feature.extend base-target-type : $(type) ; - - feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ; - feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ; - - # We used to declare the main target rule only when a 'main' parameter - # has been specified. However, it is hard to decide that a type will - # *never* need a main target rule and so from time to time we needed to - # make yet another type 'main'. So now a main target rule is defined for - # each type. - main-rule-name = [ type-to-rule-name $(type) ] ; - .main-target-type.$(main-rule-name) = $(type) ; - IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ; - - # Adding a new derived type affects generator selection so we need to - # make the generator selection module update any of its cached - # information related to a new derived type being defined. - generators.update-cached-information-with-a-new-type $(type) ; - } -} - - -# Given a type, returns the name of the main target rule which creates targets -# of that type. -# -rule type-to-rule-name ( type ) -{ - # Lowercase everything. Convert underscores to dashes. - import regex ; - local n = [ regex.split $(type:L) "_" ] ; - return $(n:J=-) ; -} - - -# Given a main target rule name, returns the type for which it creates targets. -# -rule type-from-rule-name ( rule-name ) -{ - return $(.main-target-type.$(rule-name)) ; -} - - -# Specifies that files with suffix from 'suffixes' be recognized as targets of -# type 'type'. Issues an error if a different type is already specified for any -# of the suffixes. -# -rule register-suffixes ( suffixes + : type ) -{ - for local s in $(suffixes) - { - if ! $(.type.$(s)) - { - .type.$(s) = $(type) ; - } - else if $(.type.$(s)) != $(type) - { - errors.error Attempting to specify multiple types for suffix - \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ; - } - } -} - - -# Returns true iff type has been registered. -# -rule registered ( type ) -{ - if $(type) in $(.types) - { - return true ; - } -} - - -# Issues an error if 'type' is unknown. -# -rule validate ( type ) -{ - if ! [ registered $(type) ] - { - errors.error "Unknown target type $(type)" ; - } -} - - -# Sets a scanner class that will be used for this 'type'. -# -rule set-scanner ( type : scanner ) -{ - validate $(type) ; - .scanner.$(type) = $(scanner) ; -} - - -# Returns a scanner instance appropriate to 'type' and 'properties'. -# -rule get-scanner ( type : property-set ) -{ - if $(.scanner.$(type)) - { - return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ; - } -} - - -# Returns a base type for the given type or nothing in case the given type is -# not derived. -# -rule base ( type ) -{ - return $(.base.$(type)) ; -} - - -# Returns the given type and all of its base types in order of their distance -# from type. -# -rule all-bases ( type ) -{ - local result = $(type) ; - while $(type) - { - type = [ base $(type) ] ; - result += $(type) ; - } - return $(result) ; -} - - -# Returns the given type and all of its derived types in order of their distance -# from type. -# -rule all-derived ( type ) -{ - local result = $(type) ; - for local d in $(.derived.$(type)) - { - result += [ all-derived $(d) ] ; - } - return $(result) ; -} - - -# Returns true if 'type' is equal to 'base' or has 'base' as its direct or -# indirect base. -# -rule is-derived ( type base ) -{ - if $(base) in [ all-bases $(type) ] - { - return true ; - } -} - -# Returns true if 'type' is either derived from or is equal to 'base'. -# -# TODO: It might be that is-derived and is-subtype were meant to be different -# rules - one returning true for type = base and one not, but as currently -# implemented they are actually the same. Clean this up. -# -rule is-subtype ( type base ) -{ - return [ is-derived $(type) $(base) ] ; -} - - -# Store suffixes for generated targets. -.suffixes = [ new property-map ] ; - -# Store prefixes for generated targets (e.g. "lib" for library). -.prefixes = [ new property-map ] ; - - -# Sets a file suffix to be used when generating a target of 'type' with the -# specified properties. Can be called with no properties if no suffix has -# already been specified for the 'type'. The 'suffix' parameter can be an empty -# string ("") to indicate that no suffix should be used. -# -# Note that this does not cause files with 'suffix' to be automatically -# recognized as being of 'type'. Two different types can use the same suffix for -# their generated files but only one type can be auto-detected for a file with -# that suffix. User should explicitly specify which one using the -# register-suffixes rule. -# -rule set-generated-target-suffix ( type : properties * : suffix ) -{ - set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ; -} - - -# Change the suffix previously registered for this type/properties combination. -# If suffix is not yet specified, sets it. -# -rule change-generated-target-suffix ( type : properties * : suffix ) -{ - change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ; -} - - -# Returns the suffix used when generating a file of 'type' with the given -# properties. -# -rule generated-target-suffix ( type : property-set ) -{ - return [ generated-target-ps suffix : $(type) : $(property-set) ] ; -} - - -# Sets a target prefix that should be used when generating targets of 'type' -# with the specified properties. Can be called with empty properties if no -# prefix for 'type' has been specified yet. -# -# The 'prefix' parameter can be empty string ("") to indicate that no prefix -# should be used. -# -# Usage example: library names use the "lib" prefix on unix. -# -rule set-generated-target-prefix ( type : properties * : prefix ) -{ - set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ; -} - - -# Change the prefix previously registered for this type/properties combination. -# If prefix is not yet specified, sets it. -# -rule change-generated-target-prefix ( type : properties * : prefix ) -{ - change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ; -} - - -rule generated-target-prefix ( type : property-set ) -{ - return [ generated-target-ps prefix : $(type) : $(property-set) ] ; -} - - -# Common rules for prefix/suffix provisioning follow. - -local rule set-generated-target-ps ( ps : type : properties * : psval ) -{ - properties = <target-type>$(type) $(properties) ; - $(.$(ps)es).insert $(properties) : $(psval) ; -} - - -local rule change-generated-target-ps ( ps : type : properties * : psval ) -{ - properties = <target-type>$(type) $(properties) ; - local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ; - if ! $(prev) - { - set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ; - } -} - - -# Returns either prefix or suffix (as indicated by 'ps') that should be used -# when generating a target of 'type' with the specified properties. Parameter -# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for -# 'type', returns prefix/suffix for base type, if any. -# -local rule generated-target-ps-real ( ps : type : properties * ) -{ - local result ; - local found ; - while $(type) && ! $(found) - { - result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ; - # If the prefix/suffix is explicitly set to an empty string, we consider - # prefix/suffix to be found. If we were not to compare with "", there - # would be no way to specify an empty prefix/suffix. - if $(result)-is-not-empty - { - found = true ; - } - type = $(.base.$(type)) ; - } - if $(result) = "" - { - result = ; - } - return $(result) ; -} - - -local rule generated-target-ps ( ps : type : property-set ) -{ - local key = .$(ps).$(type).$(property-set) ; - local v = $($(key)) ; - if ! $(v) - { - v = [ generated-target-ps-real $(ps) : $(type) : [ $(property-set).raw ] - ] ; - if ! $(v) - { - v = none ; - } - $(key) = $(v) ; - } - - if $(v) != none - { - return $(v) ; - } -} - - -# Returns file type given its name. If there are several dots in filename, tries -# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will -# be tried. -# -rule type ( filename ) -{ - if [ os.name ] in NT CYGWIN - { - filename = $(filename:L) ; - } - local type ; - while ! $(type) && $(filename:S) - { - local suffix = $(filename:S) ; - type = $(.type$(suffix)) ; - filename = $(filename:S=) ; - } - return $(type) ; -} - - -# Rule used to construct all main targets. Note that this rule gets imported -# into the global namespace under different alias names and the exact target -# type to construct is selected based on the alias used to actually invoke this -# rule. -# -rule main-target-rule ( name : sources * : requirements * : default-build * : - usage-requirements * ) -{ - # First discover the required target type based on the exact alias used to - # invoke this rule. - local bt = [ BACKTRACE 1 ] ; - local rulename = $(bt[4]) ; - local target-type = [ type-from-rule-name $(rulename) ] ; - - # This is a circular module dependency and so must be imported here. - import targets ; - - return [ targets.create-typed-target $(target-type) : [ project.current ] : - $(name) : $(sources) : $(requirements) : $(default-build) : - $(usage-requirements) ] ; -} - - -rule __test__ ( ) -{ - import assert ; - - # TODO: Add tests for all the is-derived, is-base & related type relation - # checking rules. -} diff --git a/jam-files/boost-build/build/type.py b/jam-files/boost-build/build/type.py deleted file mode 100644 index ddb7ba09..00000000 --- a/jam-files/boost-build/build/type.py +++ /dev/null @@ -1,313 +0,0 @@ -# Status: ported. -# Base revision: 45462. - -# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - - - -import re -import os -import os.path -from b2.util.utility import replace_grist, os_name -from b2.exceptions import * -from b2.build import feature, property, scanner -from b2.util import bjam_signature - - -__re_hyphen = re.compile ('-') - -def __register_features (): - """ Register features need by this module. - """ - # The feature is optional so that it is never implicitly added. - # It's used only for internal purposes, and in all cases we - # want to explicitly use it. - feature.feature ('target-type', [], ['composite', 'optional']) - feature.feature ('main-target-type', [], ['optional', 'incidental']) - feature.feature ('base-target-type', [], ['composite', 'optional', 'free']) - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - Note that this must be called _after_ resetting the module 'feature'. - """ - global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache - - __register_features () - - # Stores suffixes for generated targets. - __prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()] - - # Maps suffixes to types - __suffixes_to_types = {} - - # A map with all the registered types, indexed by the type name - # Each entry is a dictionary with following values: - # 'base': the name of base type or None if type has no base - # 'derived': a list of names of type which derive from this one - # 'scanner': the scanner class registered for this type, if any - __types = {} - - # Caches suffixes for targets with certain properties. - __target_suffixes_cache = {} - -reset () - -@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"])) -def register (type, suffixes = [], base_type = None): - """ Registers a target type, possibly derived from a 'base-type'. - If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'. - Also, the first element gives the suffix to be used when constructing and object of - 'type'. - type: a string - suffixes: None or a sequence of strings - base_type: None or a string - """ - # Type names cannot contain hyphens, because when used as - # feature-values they will be interpreted as composite features - # which need to be decomposed. - if __re_hyphen.search (type): - raise BaseException ('type name "%s" contains a hyphen' % type) - - if __types.has_key (type): - raise BaseException ('Type "%s" is already registered.' % type) - - entry = {} - entry ['base'] = base_type - entry ['derived'] = [] - entry ['scanner'] = None - __types [type] = entry - - if base_type: - __types [base_type]['derived'].append (type) - - if len (suffixes) > 0: - # Generated targets of 'type' will use the first of 'suffixes' - # (this may be overriden) - set_generated_target_suffix (type, [], suffixes [0]) - - # Specify mapping from suffixes to type - register_suffixes (suffixes, type) - - feature.extend('target-type', [type]) - feature.extend('main-target-type', [type]) - feature.extend('base-target-type', [type]) - - if base_type: - feature.compose ('<target-type>' + type, replace_grist (base_type, '<base-target-type>')) - feature.compose ('<base-target-type>' + type, '<base-target-type>' + base_type) - - import b2.build.generators as generators - # Adding a new derived type affects generator selection so we need to - # make the generator selection module update any of its cached - # information related to a new derived type being defined. - generators.update_cached_information_with_a_new_type(type) - - # FIXME: resolving recursive dependency. - from b2.manager import get_manager - get_manager().projects().project_rules().add_rule_for_type(type) - -# FIXME: quick hack. -def type_from_rule_name(rule_name): - return rule_name.upper().replace("-", "_") - - -def register_suffixes (suffixes, type): - """ Specifies that targets with suffix from 'suffixes' have the type 'type'. - If a different type is already specified for any of syffixes, issues an error. - """ - for s in suffixes: - if __suffixes_to_types.has_key (s): - old_type = __suffixes_to_types [s] - if old_type != type: - raise BaseException ('Attempting to specify type for suffix "%s"\nOld type: "%s", New type "%s"' % (s, old_type, type)) - else: - __suffixes_to_types [s] = type - -def registered (type): - """ Returns true iff type has been registered. - """ - return __types.has_key (type) - -def validate (type): - """ Issues an error if 'type' is unknown. - """ - if not registered (type): - raise BaseException ("Unknown target type '%s'" % type) - -def set_scanner (type, scanner): - """ Sets a scanner class that will be used for this 'type'. - """ - validate (type) - __types [type]['scanner'] = scanner - -def get_scanner (type, prop_set): - """ Returns a scanner instance appropriate to 'type' and 'property_set'. - """ - if registered (type): - scanner_type = __types [type]['scanner'] - if scanner_type: - return scanner.get (scanner_type, prop_set.raw ()) - pass - - return None - -def base(type): - """Returns a base type for the given type or nothing in case the given type is - not derived.""" - - return __types[type]['base'] - -def all_bases (type): - """ Returns type and all of its bases, in the order of their distance from type. - """ - result = [] - while type: - result.append (type) - type = __types [type]['base'] - - return result - -def all_derived (type): - """ Returns type and all classes that derive from it, in the order of their distance from type. - """ - result = [type] - for d in __types [type]['derived']: - result.extend (all_derived (d)) - - return result - -def is_derived (type, base): - """ Returns true if 'type' is 'base' or has 'base' as its direct or indirect base. - """ - # TODO: this isn't very efficient, especially for bases close to type - if base in all_bases (type): - return True - else: - return False - -def is_subtype (type, base): - """ Same as is_derived. Should be removed. - """ - # TODO: remove this method - return is_derived (type, base) - -@bjam_signature((["type"], ["properties", "*"], ["suffix"])) -def set_generated_target_suffix (type, properties, suffix): - """ Sets a target suffix that should be used when generating target - of 'type' with the specified properties. Can be called with - empty properties if no suffix for 'type' was specified yet. - This does not automatically specify that files 'suffix' have - 'type' --- two different types can use the same suffix for - generating, but only one type should be auto-detected for - a file with that suffix. User should explicitly specify which - one. - - The 'suffix' parameter can be empty string ("") to indicate that - no suffix should be used. - """ - set_generated_target_ps(1, type, properties, suffix) - - - -def change_generated_target_suffix (type, properties, suffix): - """ Change the suffix previously registered for this type/properties - combination. If suffix is not yet specified, sets it. - """ - change_generated_target_ps(1, type, properties, suffix) - -def generated_target_suffix(type, properties): - return generated_target_ps(1, type, properties) - -# Sets a target prefix that should be used when generating targets of 'type' -# with the specified properties. Can be called with empty properties if no -# prefix for 'type' has been specified yet. -# -# The 'prefix' parameter can be empty string ("") to indicate that no prefix -# should be used. -# -# Usage example: library names use the "lib" prefix on unix. -@bjam_signature((["type"], ["properties", "*"], ["suffix"])) -def set_generated_target_prefix(type, properties, prefix): - set_generated_target_ps(0, type, properties, prefix) - -# Change the prefix previously registered for this type/properties combination. -# If prefix is not yet specified, sets it. -def change_generated_target_prefix(type, properties, prefix): - change_generated_target_ps(0, type, properties, prefix) - -def generated_target_prefix(type, properties): - return generated_target_ps(0, type, properties) - -def set_generated_target_ps(is_suffix, type, properties, val): - properties.append ('<target-type>' + type) - __prefixes_suffixes[is_suffix].insert (properties, val) - -def change_generated_target_ps(is_suffix, type, properties, val): - properties.append ('<target-type>' + type) - prev = __prefixes_suffixes[is_suffix].find_replace(properties, val) - if not prev: - set_generated_target_ps(is_suffix, type, properties, val) - -# Returns either prefix or suffix (as indicated by 'is_suffix') that should be used -# when generating a target of 'type' with the specified properties. -# If no prefix/suffix is specified for 'type', returns prefix/suffix for -# base type, if any. -def generated_target_ps_real(is_suffix, type, properties): - - result = '' - found = False - while type and not found: - result = __prefixes_suffixes[is_suffix].find (['<target-type>' + type] + properties) - - # Note that if the string is empty (""), but not null, we consider - # suffix found. Setting prefix or suffix to empty string is fine. - if result is not None: - found = True - - type = __types [type]['base'] - - if not result: - result = '' - return result - -def generated_target_ps(is_suffix, type, prop_set): - """ Returns suffix that should be used when generating target of 'type', - with the specified properties. If not suffix were specified for - 'type', returns suffix for base type, if any. - """ - key = (is_suffix, type, prop_set) - v = __target_suffixes_cache.get(key, None) - - if not v: - v = generated_target_ps_real(is_suffix, type, prop_set.raw()) - __target_suffixes_cache [key] = v - - return v - -def type(filename): - """ Returns file type given it's name. If there are several dots in filename, - tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and - "so" will be tried. - """ - while 1: - filename, suffix = os.path.splitext (filename) - if not suffix: return None - suffix = suffix[1:] - - if __suffixes_to_types.has_key(suffix): - return __suffixes_to_types[suffix] - -# NOTE: moved from tools/types/register -def register_type (type, suffixes, base_type = None, os = []): - """ Register the given type on the specified OSes, or on remaining OSes - if os is not specified. This rule is injected into each of the type - modules for the sake of convenience. - """ - if registered (type): - return - - if not os or os_name () in os: - register (type, suffixes, base_type) diff --git a/jam-files/boost-build/build/version.jam b/jam-files/boost-build/build/version.jam deleted file mode 100644 index 7626ddda..00000000 --- a/jam-files/boost-build/build/version.jam +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright 2002, 2003, 2004, 2006 Vladimir Prus -# Copyright 2008 Jurko Gospodnetic -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import errors ; -import numbers ; - -major = "2011" ; -minor = "04" ; - -rule boost-build ( ) -{ - return "$(major).$(minor)-svn" ; -} - -rule print ( ) -{ - if [ verify-engine-version ] - { - ECHO "Boost.Build" [ boost-build ] ; - } -} - -rule verify-engine-version ( ) -{ - local v = [ modules.peek : JAM_VERSION ] ; - - if $(v[1]) != $(major) || $(v[2]) != $(minor) - { - local argv = [ modules.peek : ARGV ] ; - local e = $(argv[1]) ; - local l = [ modules.binding version ] ; - l = $(l:D) ; - l = $(l:D) ; - ECHO "warning: mismatched versions of Boost.Build engine and core" ; - ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ; - ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ; - } - else - { - return true ; - } -} - - - -# Utility rule for testing whether all elements in a sequence are equal to 0. -# -local rule is-all-zeroes ( sequence * ) -{ - local result = "true" ; - for local e in $(sequence) - { - if $(e) != "0" - { - result = "" ; - } - } - return $(result) ; -} - - -# Returns "true" if the first version is less than the second one. -# -rule version-less ( lhs + : rhs + ) -{ - numbers.check $(lhs) ; - numbers.check $(rhs) ; - - local done ; - local result ; - - while ! $(done) && $(lhs) && $(rhs) - { - if [ numbers.less $(lhs[1]) $(rhs[1]) ] - { - done = "true" ; - result = "true" ; - } - else if [ numbers.less $(rhs[1]) $(lhs[1]) ] - { - done = "true" ; - } - else - { - lhs = $(lhs[2-]) ; - rhs = $(rhs[2-]) ; - } - } - if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] ) - { - result = "true" ; - } - - return $(result) ; -} - - -# Returns "true" if the current JAM version version is at least the given -# version. -# -rule check-jam-version ( version + ) -{ - local version-tag = $(version:J=.) ; - if ! $(version-tag) - { - errors.error Invalid version specifier: : $(version:E="(undefined)") ; - } - - if ! $(.jam-version-check.$(version-tag))-is-not-empty - { - local jam-version = [ modules.peek : JAM_VERSION ] ; - if ! $(jam-version) - { - errors.error "Unable to deduce Boost Jam version. Your Boost Jam" - "installation is most likely terribly outdated." ; - } - .jam-version-check.$(version-tag) = "true" ; - if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ] - { - .jam-version-check.$(version-tag) = "" ; - } - } - return $(.jam-version-check.$(version-tag)) ; -} - - -rule __test__ ( ) -{ - import assert ; - - local jam-version = [ modules.peek : JAM_VERSION ] ; - local future-version = $(jam-version) ; - future-version += "1" ; - - assert.true check-jam-version $(jam-version) ; - assert.false check-jam-version $(future-version) ; - - assert.true version-less 0 : 1 ; - assert.false version-less 0 : 0 ; - assert.true version-less 1 : 2 ; - assert.false version-less 1 : 1 ; - assert.false version-less 2 : 1 ; - assert.true version-less 3 1 20 : 3 4 10 ; - assert.false version-less 3 1 10 : 3 1 10 ; - assert.false version-less 3 4 10 : 3 1 20 ; - assert.true version-less 3 1 20 5 1 : 3 4 10 ; - assert.false version-less 3 1 10 5 1 : 3 1 10 ; - assert.false version-less 3 4 10 5 1 : 3 1 20 ; - assert.true version-less 3 1 20 : 3 4 10 5 1 ; - assert.true version-less 3 1 10 : 3 1 10 5 1 ; - assert.false version-less 3 4 10 : 3 1 20 5 1 ; - assert.false version-less 3 1 10 : 3 1 10 0 0 ; - assert.false version-less 3 1 10 0 0 : 3 1 10 ; - assert.false version-less 3 1 10 0 : 3 1 10 0 0 ; - assert.false version-less 3 1 10 0 : 03 1 10 0 0 ; - assert.false version-less 03 1 10 0 : 3 1 10 0 0 ; - - # TODO: Add tests for invalid input data being sent to version-less. -} diff --git a/jam-files/boost-build/build/virtual-target.jam b/jam-files/boost-build/build/virtual-target.jam deleted file mode 100644 index 2e8446bc..00000000 --- a/jam-files/boost-build/build/virtual-target.jam +++ /dev/null @@ -1,1317 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Implements virtual targets, which correspond to actual files created during a -# build, but are not yet targets in Jam sense. They are needed, for example, -# when searching for possible transformation sequences, when it is not yet known -# whether a particular target should be created at all. - -import "class" : new ; -import errors ; -import path ; -import sequence ; -import set ; -import type ; -import utility ; - - -# +--------------------------+ -# | virtual-target | -# +==========================+ -# | actualize | -# +--------------------------+ -# | actualize-action() = 0 | -# | actualize-location() = 0 | -# +----------------+---------+ -# | -# ^ -# / \ -# +-+-+ -# | -# +---------------------+ +-------+--------------+ -# | action | | abstract-file-target | -# +=====================| * +======================+ -# | action-name | +--+ action | -# | properties | | +----------------------+ -# +---------------------+--+ | actualize-action() | -# | actualize() |0..1 +-----------+----------+ -# | path() | | -# | adjust-properties() | sources | -# | actualize-sources() | targets | -# +------+--------------+ ^ -# | / \ -# ^ +-+-+ -# / \ | -# +-+-+ +-------------+-------------+ -# | | | -# | +------+---------------+ +--------+-------------+ -# | | file-target | | searched-lib-target | -# | +======================+ +======================+ -# | | actualize-location() | | actualize-location() | -# | +----------------------+ +----------------------+ -# | -# +-+------------------------------+ -# | | -# +----+----------------+ +---------+-----------+ -# | compile-action | | link-action | -# +=====================+ +=====================+ -# | adjust-properties() | | adjust-properties() | -# +---------------------+ | actualize-sources() | -# +---------------------+ -# -# The 'compile-action' and 'link-action' classes are not defined here but in -# builtin.jam modules. They are shown in the diagram to give the big picture. - - -# Models a potential target. It can be converted into a Jam target and used in -# building, if needed. However, it can be also dropped, which allows us to -# search for different transformations and select only one. -# -class virtual-target -{ - import scanner ; - import sequence ; - import utility ; - import virtual-target ; - - rule __init__ ( - name # Target/project name. - : project # Project to which this target belongs. - ) - { - self.name = $(name) ; - self.project = $(project) ; - self.dependencies = ; - } - - # Name of this target. - # - rule name ( ) - { - return $(self.name) ; - } - - # Project of this target. - # - rule project ( ) - { - return $(self.project) ; - } - - # Adds additional 'virtual-target' instances this one depends on. - # - rule depends ( d + ) - { - self.dependencies = [ sequence.merge $(self.dependencies) : - [ sequence.insertion-sort $(d) ] ] ; - } - - rule dependencies ( ) - { - return $(self.dependencies) ; - } - - rule always ( ) - { - .always = 1 ; - } - - # Generates all the actual targets and sets up build actions for this - # target. - # - # If 'scanner' is specified, creates an additional target with the same - # location as the actual target, which will depend on the actual target and - # be associated with a 'scanner'. That additional target is returned. See - # the docs (#dependency_scanning) for rationale. Target must correspond to a - # file if 'scanner' is specified. - # - # If scanner is not specified then the actual target is returned. - # - rule actualize ( scanner ? ) - { - local actual-name = [ actualize-no-scanner ] ; - - if $(.always) - { - ALWAYS $(actual-name) ; - } - - if ! $(scanner) - { - return $(actual-name) ; - } - else - { - # Add the scanner instance to the grist for name. - local g = [ sequence.join - [ utility.ungrist $(actual-name:G) ] $(scanner) : - ] ; - local name = $(actual-name:G=$(g)) ; - - if ! $(self.made.$(name)) - { - self.made.$(name) = true ; - - DEPENDS $(name) : $(actual-name) ; - - actualize-location $(name) ; - - scanner.install $(scanner) : $(name) $(__name__) ; - } - return $(name) ; - } - } - -# private: (overridables) - - # Sets up build actions for 'target'. Should call appropriate rules and set - # target variables. - # - rule actualize-action ( target ) - { - errors.error "method should be defined in derived classes" ; - } - - # Sets up variables on 'target' which specify its location. - # - rule actualize-location ( target ) - { - errors.error "method should be defined in derived classes" ; - } - - # If the target is a generated one, returns the path where it will be - # generated. Otherwise, returns an empty list. - # - rule path ( ) - { - errors.error "method should be defined in derived classes" ; - } - - # Returns the actual target name to be used in case when no scanner is - # involved. - # - rule actual-name ( ) - { - errors.error "method should be defined in derived classes" ; - } - -# implementation - rule actualize-no-scanner ( ) - { - # In fact, we just need to merge virtual-target with - # abstract-file-target as the latter is the only class derived from the - # former. But that has been left for later. - - errors.error "method should be defined in derived classes" ; - } -} - - -# Target corresponding to a file. The exact mapping for file is not yet -# specified in this class. (TODO: Actually, the class name could be better...) -# -# May be a source file (when no action is specified) or a derived file -# (otherwise). -# -# The target's grist is a concatenation of its project's location, action -# properties (for derived targets) and, optionally, value identifying the main -# target. -# -class abstract-file-target : virtual-target -{ - import project ; - import regex ; - import sequence ; - import path ; - import type ; - import property-set ; - import indirect ; - - rule __init__ ( - name # Target's name. - exact ? # If non-empty, the name is exactly the name created file - # should have. Otherwise, the '__init__' method will add a - # suffix obtained from 'type' by calling - # 'type.generated-target-suffix'. - : type ? # Target's type. - : project - : action ? - ) - { - virtual-target.__init__ $(name) : $(project) ; - - self.type = $(type) ; - self.action = $(action) ; - if $(action) - { - $(action).add-targets $(__name__) ; - - if $(self.type) && ! $(exact) - { - _adjust-name $(name) ; - } - } - } - - rule type ( ) - { - return $(self.type) ; - } - - # Sets the path. When generating target name, it will override any path - # computation from properties. - # - rule set-path ( path ) - { - self.path = [ path.native $(path) ] ; - } - - # Returns the currently set action. - # - rule action ( ) - { - return $(self.action) ; - } - - # Sets/gets the 'root' flag. Target is root if it directly corresponds to - # some variant of a main target. - # - rule root ( set ? ) - { - if $(set) - { - self.root = true ; - } - return $(self.root) ; - } - - # Gets or sets the subvariant which created this target. Subvariant is set - # when target is brought into existance and is never changed after that. In - # particular, if a target is shared by a subvariant, only the first is - # stored. - # - rule creating-subvariant ( s ? # If specified, specifies the value to set, - # which should be a 'subvariant' class - # instance. - ) - { - if $(s) && ! $(self.creating-subvariant) - { - self.creating-subvariant = $(s) ; - } - return $(self.creating-subvariant) ; - } - - rule actualize-action ( target ) - { - if $(self.action) - { - $(self.action).actualize ; - } - } - - # Return a human-readable representation of this target. If this target has - # an action, that is: - # - # { <action-name>-<self.name>.<self.type> <action-sources>... } - # - # otherwise, it is: - # - # { <self.name>.<self.type> } - # - rule str ( ) - { - local action = [ action ] ; - local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ; - - if $(action) - { - local sources = [ $(action).sources ] ; - local action-name = [ $(action).action-name ] ; - - local ss ; - for local s in $(sources) - { - ss += [ $(s).str ] ; - } - - return "{" $(action-name)-$(name-dot-type) $(ss) "}" ; - } - else - { - return "{" $(name-dot-type) "}" ; - } - } - - rule less ( a ) - { - if [ str ] < [ $(a).str ] - { - return true ; - } - } - - rule equal ( a ) - { - if [ str ] = [ $(a).str ] - { - return true ; - } - } - -# private: - rule actual-name ( ) - { - if ! $(self.actual-name) - { - local grist = [ grist ] ; - local basename = [ path.native $(self.name) ] ; - self.actual-name = <$(grist)>$(basename) ; - } - return $(self.actual-name) ; - } - - # Helper to 'actual-name', above. Computes a unique prefix used to - # distinguish this target from other targets with the same name creating - # different files. - # - rule grist ( ) - { - # Depending on target, there may be different approaches to generating - # unique prefixes. We generate prefixes in the form: - # <one letter approach code> <the actual prefix> - local path = [ path ] ; - if $(path) - { - # The target will be generated to a known path. Just use the path - # for identification, since path is as unique as it can get. - return p$(path) ; - } - else - { - # File is either source, which will be searched for, or is not a - # file at all. Use the location of project for distinguishing. - local project-location = [ $(self.project).get location ] ; - local location-grist = [ sequence.join [ regex.split - $(project-location) "/" ] : "!" ] ; - - if $(self.action) - { - local ps = [ $(self.action).properties ] ; - local property-grist = [ $(ps).as-path ] ; - # 'property-grist' can be empty when 'ps' is an empty property - # set. - if $(property-grist) - { - location-grist = $(location-grist)/$(property-grist) ; - } - } - - return l$(location-grist) ; - } - } - - # Given the target name specified in constructor, returns the name which - # should be really used, by looking at the <tag> properties. Tag properties - # need to be specified as <tag>@rule-name. This makes Boost Build call the - # specified rule with the target name, type and properties to get the new - # name. If no <tag> property is specified or the rule specified by <tag> - # returns nothing, returns the result of calling - # virtual-target.add-prefix-and-suffix. - # - rule _adjust-name ( specified-name ) - { - local ps ; - if $(self.action) - { - ps = [ $(self.action).properties ] ; - } - else - { - ps = [ property-set.empty ] ; - } - - # We add ourselves to the properties so that any tag rule can get more - # direct information about the target than just that available through - # the properties. This is useful in implementing name changes based on - # the sources of the target. For example to make unique names of object - # files based on the source file. --grafik - ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ; - - local tag = [ $(ps).get <tag> ] ; - - if $(tag) - { - local rule-name = [ MATCH ^@(.*) : $(tag) ] ; - if $(rule-name) - { - if $(tag[2]) - { - errors.error "<tag>@rulename is present but is not the only" - "<tag> feature" ; - } - - self.name = [ indirect.call $(rule-name) $(specified-name) - : $(self.type) : $(ps) ] ; - } - else - { - errors.error - "The value of the <tag> feature must be '@rule-name'" ; - } - } - - # If there is no tag or the tag rule returned nothing. - if ! $(tag) || ! $(self.name) - { - self.name = [ virtual-target.add-prefix-and-suffix $(specified-name) - : $(self.type) : $(ps) ] ; - } - } - - rule actualize-no-scanner ( ) - { - local name = [ actual-name ] ; - - # Do anything only on the first invocation. - if ! $(self.made.$(name)) - { - self.made.$(name) = true ; - - if $(self.action) - { - # For non-derived target, we do not care if there are several - # virtual targets that refer to the same name. One case when - # this is unavoidable is when the file name is main.cpp and two - # targets have types CPP (for compiling) and MOCCABLE_CPP (for - # conversion to H via Qt tools). - virtual-target.register-actual-name $(name) : $(__name__) ; - } - - for local i in $(self.dependencies) - { - DEPENDS $(name) : [ $(i).actualize ] ; - } - - actualize-location $(name) ; - actualize-action $(name) ; - } - return $(name) ; - } -} - - -# Appends the suffix appropriate to 'type/property-set' combination to the -# specified name and returns the result. -# -rule add-prefix-and-suffix ( specified-name : type ? : property-set ) -{ - local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ; - - # Handle suffixes for which no leading dot is desired. Those are specified - # by enclosing them in <...>. Needed by python so it can create "_d.so" - # extensions, for example. - if $(suffix:G) - { - suffix = [ utility.ungrist $(suffix) ] ; - } - else - { - suffix = .$(suffix) ; - } - - local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ; - - if [ MATCH ^($(prefix)) : $(specified-name) ] - { - prefix = ; - } - return $(prefix:E="")$(specified-name)$(suffix:E="") ; -} - - -# File targets with explicitly known location. -# -# The file path is determined as -# * Value passed to the 'set-path' method, if any. -# * For derived files, project's build dir, joined with components that -# describe action properties. If free properties are not equal to the -# project's reference properties an element with the name of the main -# target is added. -# * For source files, project's source dir. -# -# The file suffix is determined as: -# * The value passed to the 'suffix' method, if any. -# * The suffix corresponding to the target's type. -# -class file-target : abstract-file-target -{ - import "class" : new ; - import common ; - import errors ; - - rule __init__ ( - name exact ? - : type ? # Optional type for this target. - : project - : action ? - : path ? - ) - { - abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) : - $(action) ; - - self.path = $(path) ; - } - - rule clone-with-different-type ( new-type ) - { - return [ new file-target $(self.name) exact : $(new-type) : - $(self.project) : $(self.action) : $(self.path) ] ; - } - - rule actualize-location ( target ) - { - if $(self.action) - { - # This is a derived file. - local path = [ path ] ; - LOCATE on $(target) = $(path) ; - - # Make sure the path exists. - DEPENDS $(target) : $(path) ; - common.MkDir $(path) ; - - # It is possible that the target name includes a directory too, for - # example when installing headers. Create that directory. - if $(target:D) - { - local d = $(target:D) ; - d = $(d:R=$(path)) ; - DEPENDS $(target) : $(d) ; - common.MkDir $(d) ; - } - - # For a real file target, we create a fake target depending on the - # real target. This allows us to run - # - # bjam hello.o - # - # without trying to guess the name of the real target. Note that the - # target has no directory name and uses a special <e> grist. - # - # First, that means that "bjam hello.o" will build all known hello.o - # targets. Second, the <e> grist makes sure this target will not be - # confused with other targets, for example, if we have subdir 'test' - # with target 'test' in it that includes a 'test.o' file, then the - # target for directory will be just 'test' the target for test.o - # will be <ptest/bin/gcc/debug>test.o and the target we create below - # will be <e>test.o - DEPENDS $(target:G=e) : $(target) ; - # Allow bjam <path-to-file>/<file> to work. This will not catch all - # possible ways to refer to the path (relative/absolute, extra ".", - # various "..", but should help in obvious cases. - DEPENDS $(target:G=e:R=$(path)) : $(target) ; - } - else - { - SEARCH on $(target) = [ path.native $(self.path) ] ; - } - } - - # Returns the directory for this target. - # - rule path ( ) - { - if ! $(self.path) - { - if $(self.action) - { - local p = [ $(self.action).properties ] ; - local path,relative-to-build-dir = [ $(p).target-path ] ; - local path = $(path,relative-to-build-dir[1]) ; - local relative-to-build-dir = $(path,relative-to-build-dir[2]) ; - - if $(relative-to-build-dir) - { - path = [ path.join [ $(self.project).build-dir ] $(path) ] ; - } - - self.path = [ path.native $(path) ] ; - } - } - return $(self.path) ; - } -} - - -class notfile-target : abstract-file-target -{ - rule __init__ ( name : project : action ? ) - { - abstract-file-target.__init__ $(name) : : $(project) : $(action) ; - } - - # Returns nothing to indicate that the target's path is not known. - # - rule path ( ) - { - return ; - } - - rule actualize-location ( target ) - { - NOTFILE $(target) ; - ALWAYS $(target) ; - # TEMPORARY $(target) ; - NOUPDATE $(target) ; - } -} - - -# Class representing an action. Both 'targets' and 'sources' should list -# instances of 'virtual-target'. Action name should name a rule with this -# prototype: -# rule action-name ( targets + : sources * : properties * ) -# Targets and sources are passed as actual Jam targets. The rule may not -# establish additional dependency relationships. -# -class action -{ - import "class" ; - import errors ; - import type ; - import toolset ; - import property-set ; - import indirect ; - import path ; - import set : difference ; - - rule __init__ ( sources * : action-name + : property-set ? ) - { - self.sources = $(sources) ; - - self.action-name = [ indirect.make-qualified $(action-name) ] ; - - if ! $(property-set) - { - property-set = [ property-set.empty ] ; - } - - if ! [ class.is-instance $(property-set) ] - { - errors.error "Property set instance required" ; - } - - self.properties = $(property-set) ; - } - - rule add-targets ( targets * ) - { - self.targets += $(targets) ; - } - - rule replace-targets ( old-targets * : new-targets * ) - { - self.targets = [ set.difference $(self.targets) : $(old-targets) ] ; - self.targets += $(new-targets) ; - } - - rule targets ( ) - { - return $(self.targets) ; - } - - rule sources ( ) - { - return $(self.sources) ; - } - - rule action-name ( ) - { - return $(self.action-name) ; - } - - rule properties ( ) - { - return $(self.properties) ; - } - - # Generates actual build instructions. - # - rule actualize ( ) - { - if ! $(self.actualized) - { - self.actualized = true ; - - local ps = [ properties ] ; - local properties = [ adjust-properties $(ps) ] ; - - local actual-targets ; - for local i in [ targets ] - { - actual-targets += [ $(i).actualize ] ; - } - - actualize-sources [ sources ] : $(properties) ; - - DEPENDS $(actual-targets) : $(self.actual-sources) - $(self.dependency-only-sources) ; - - # This works around a bug with -j and actions that - # produce multiple target, where: - # - dependency on the first output is found, and - # the action is started - # - dependency on the second output is found, and - # bjam noticed that command is already running - # - instead of waiting for the command, dependents - # of the second targets are immediately updated. - if $(actual-targets[2]) - { - INCLUDES $(actual-targets) : $(actual-targets) ; - } - - # Action name can include additional argument to rule, which should - # not be passed to 'set-target-variables' - toolset.set-target-variables - [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets) - : $(properties) ; - - # Reflect ourselves in a variable for the target. This allows - # looking up additional info for the action given the raw target. - # For example to debug or output action information from action - # rules. - .action on $(actual-targets) = $(__name__) ; - - indirect.call $(self.action-name) $(actual-targets) - : $(self.actual-sources) : [ $(properties).raw ] ; - - # Since we set up the creating action here, we set up the action for - # cleaning up as well. - common.Clean clean-all : $(actual-targets) ; - } - } - - # Helper for 'actualize-sources'. For each passed source, actualizes it with - # the appropriate scanner. Returns the actualized virtual targets. - # - rule actualize-source-type ( sources * : property-set ) - { - local result = ; - for local i in $(sources) - { - local scanner ; - if [ $(i).type ] - { - scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ; - } - result += [ $(i).actualize $(scanner) ] ; - } - return $(result) ; - } - - # Creates actual Jam targets for sources. Initializes the following member - # variables: - # 'self.actual-sources' -- sources passed to the updating action. - # 'self.dependency-only-sources' -- sources marked as dependencies, but - # are not used otherwise. - # - # New values will be *appended* to the variables. They may be non-empty if - # caller wants it. - # - rule actualize-sources ( sources * : property-set ) - { - local dependencies = [ $(self.properties).get <dependency> ] ; - - self.dependency-only-sources += - [ actualize-source-type $(dependencies) : $(property-set) ] ; - self.actual-sources += - [ actualize-source-type $(sources) : $(property-set) ] ; - - # This is used to help bjam find dependencies in generated headers and - # other main targets, e.g. in: - # - # make a.h : ....... ; - # exe hello : hello.cpp : <implicit-dependency>a.h ; - # - # For bjam to find the dependency the generated target must be - # actualized (i.e. have its Jam target constructed). In the above case, - # if we are building just hello ("bjam hello"), 'a.h' will not be - # actualized unless we do it here. - local implicit = [ $(self.properties).get <implicit-dependency> ] ; - for local i in $(implicit) - { - $(i:G=).actualize ; - } - } - - # Determines real properties when trying to build with 'properties'. This is - # the last chance to fix properties, for example to adjust includes to get - # generated headers correctly. Default implementation simply returns its - # argument. - # - rule adjust-properties ( property-set ) - { - return $(property-set) ; - } -} - - -# Action class which does nothing --- it produces the targets with specific -# properties out of nowhere. It is needed to distinguish virtual targets with -# different properties that are known to exist and have no actions which create -# them. -# -class null-action : action -{ - rule __init__ ( property-set ? ) - { - action.__init__ : .no-action : $(property-set) ; - } - - rule actualize ( ) - { - if ! $(self.actualized) - { - self.actualized = true ; - for local i in [ targets ] - { - $(i).actualize ; - } - } - } -} - - -# Class which acts exactly like 'action', except that its sources are not -# scanned for dependencies. -# -class non-scanning-action : action -{ - rule __init__ ( sources * : action-name + : property-set ? ) - { - action.__init__ $(sources) : $(action-name) : $(property-set) ; - } - - rule actualize-source-type ( sources * : property-set ) - { - local result ; - for local i in $(sources) - { - result += [ $(i).actualize ] ; - } - return $(result) ; - } -} - - -# Creates a virtual target with an appropriate name and type from 'file'. If a -# target with that name in that project already exists, returns that already -# created target. -# -# FIXME: a more correct way would be to compute the path to the file, based on -# name and source location for the project, and use that path to determine if -# the target has already been created. This logic should be shared with how we -# usually find targets identified by a specific target id. It should also be -# updated to work correctly when the file is specified using both relative and -# absolute paths. -# -# TODO: passing a project with all virtual targets is starting to be annoying. -# -rule from-file ( file : file-loc : project ) -{ - import type ; # Had to do this here to break a circular dependency. - - # Check whether we already created a target corresponding to this file. - local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ; - - if $(.files.$(path)) - { - return $(.files.$(path)) ; - } - else - { - local name = [ path.make $(file) ] ; - local type = [ type.type $(file) ] ; - local result ; - - result = [ new file-target $(file) : $(type) : $(project) : : - $(file-loc) ] ; - - .files.$(path) = $(result) ; - return $(result) ; - } -} - - -# Registers a new virtual target. Checks if there is already a registered target -# with the same name, type, project and subvariant properties as well as the -# same sources and equal action. If such target is found it is returned and a -# new 'target' is not registered. Otherwise, 'target' is registered and -# returned. -# -rule register ( target ) -{ - local signature = [ sequence.join - [ $(target).path ] [ $(target).name ] : - ] ; - - local result ; - for local t in $(.cache.$(signature)) - { - local a1 = [ $(t).action ] ; - local a2 = [ $(target).action ] ; - - if ! $(result) - { - if ! $(a1) && ! $(a2) - { - result = $(t) ; - } - else - { - if $(a1) && $(a2) && - ( [ $(a1).action-name ] = [ $(a2).action-name ] ) && - ( [ $(a1).sources ] = [ $(a2).sources ] ) - { - local ps1 = [ $(a1).properties ] ; - local ps2 = [ $(a2).properties ] ; - local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference - [ $(ps1).dependency ] : [ $(ps1).incidental ] ] ; - local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference - [ $(ps2).dependency ] : [ $(ps2).incidental ] ] ; - if $(p1) = $(p2) - { - result = $(t) ; - } - } - } - } - } - - if ! $(result) - { - .cache.$(signature) += $(target) ; - result = $(target) ; - } - - .recent-targets += $(result) ; - .all-targets += $(result) ; - - return $(result) ; -} - - -# Each target returned by 'register' is added to the .recent-targets list, -# returned by this function. This allows us to find all virtual targets created -# when building a specific main target, even those constructed only as -# intermediate targets. -# -rule recent-targets ( ) -{ - return $(.recent-targets) ; -} - - -rule clear-recent-targets ( ) -{ - .recent-targets = ; -} - - -# Returns all virtual targets ever created. -# -rule all-targets ( ) -{ - return $(.all-targets) ; -} - - -# Returns all targets from 'targets' with types equal to 'type' or derived from -# it. -# -rule select-by-type ( type : targets * ) -{ - local result ; - for local t in $(targets) - { - if [ type.is-subtype [ $(t).type ] $(type) ] - { - result += $(t) ; - } - } - return $(result) ; -} - - -rule register-actual-name ( actual-name : virtual-target ) -{ - if $(.actual.$(actual-name)) - { - local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ; - local cs2 = [ $(virtual-target).creating-subvariant ] ; - local cmt1 = [ $(cs1).main-target ] ; - local cmt2 = [ $(cs2).main-target ] ; - - local action1 = [ $(.actual.$(actual-name)).action ] ; - local action2 = [ $(virtual-target).action ] ; - local properties-added ; - local properties-removed ; - if $(action1) && $(action2) - { - local p1 = [ $(action1).properties ] ; - p1 = [ $(p1).raw ] ; - local p2 = [ $(action2).properties ] ; - p2 = [ $(p2).raw ] ; - properties-removed = [ set.difference $(p1) : $(p2) ] ; - properties-removed ?= "none" ; - properties-added = [ set.difference $(p2) : $(p1) ] ; - properties-added ?= "none" ; - } - errors.error "Duplicate name of actual target:" $(actual-name) - : "previous virtual target" [ $(.actual.$(actual-name)).str ] - : "created from" [ $(cmt1).full-name ] - : "another virtual target" [ $(virtual-target).str ] - : "created from" [ $(cmt2).full-name ] - : "added properties:" $(properties-added) - : "removed properties:" $(properties-removed) ; - } - else - { - .actual.$(actual-name) = $(virtual-target) ; - } -} - - -# Traverses the dependency graph of 'target' and return all targets that will be -# created before this one is created. If the root of some dependency graph is -# found during traversal, it is either included or not, depending on the -# 'include-roots' value. In either case traversal stops at root targets, i.e. -# root target sources are not traversed. -# -rule traverse ( target : include-roots ? : include-sources ? ) -{ - local result ; - if [ $(target).action ] - { - local action = [ $(target).action ] ; - # This includes the 'target' as well. - result += [ $(action).targets ] ; - - for local t in [ $(action).sources ] - { - if ! [ $(t).root ] - { - result += [ traverse $(t) : $(include-roots) : $(include-sources) ] ; - } - else if $(include-roots) - { - result += $(t) ; - } - } - } - else if $(include-sources) - { - result = $(target) ; - } - return $(result) ; -} - - -# Takes an 'action' instance and creates a new instance of it and all targets -# produced by the action. The rule-name and properties are set to -# 'new-rule-name' and 'new-properties', if those are specified. Returns the -# cloned action. -# -rule clone-action ( action : new-project : new-action-name ? : new-properties ? ) -{ - if ! $(new-action-name) - { - new-action-name = [ $(action).action-name ] ; - } - if ! $(new-properties) - { - new-properties = [ $(action).properties ] ; - } - - local action-class = [ modules.peek $(action) : __class__ ] ; - local cloned-action = [ class.new $(action-class) - [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ; - - local cloned-targets ; - for local target in [ $(action).targets ] - { - local n = [ $(target).name ] ; - # Do not modify produced target names. - local cloned-target = [ class.new file-target $(n) exact : - [ $(target).type ] : $(new-project) : $(cloned-action) ] ; - local d = [ $(target).dependencies ] ; - if $(d) - { - $(cloned-target).depends $(d) ; - } - $(cloned-target).root [ $(target).root ] ; - $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ; - - cloned-targets += $(cloned-target) ; - } - - return $(cloned-action) ; -} - - -class subvariant -{ - import sequence ; - import type ; - - rule __init__ ( main-target # The instance of main-target class. - : property-set # Properties requested for this target. - : sources * - : build-properties # Actually used properties. - : sources-usage-requirements # Properties propagated from sources. - : created-targets * ) # Top-level created targets. - { - self.main-target = $(main-target) ; - self.properties = $(property-set) ; - self.sources = $(sources) ; - self.build-properties = $(build-properties) ; - self.sources-usage-requirements = $(sources-usage-requirements) ; - self.created-targets = $(created-targets) ; - - # Pre-compose a list of other dependency graphs this one depends on. - local deps = [ $(build-properties).get <implicit-dependency> ] ; - for local d in $(deps) - { - self.other-dg += [ $(d:G=).creating-subvariant ] ; - } - - self.other-dg = [ sequence.unique $(self.other-dg) ] ; - } - - rule main-target ( ) - { - return $(self.main-target) ; - } - - rule created-targets ( ) - { - return $(self.created-targets) ; - } - - rule requested-properties ( ) - { - return $(self.properties) ; - } - - rule build-properties ( ) - { - return $(self.build-properties) ; - } - - rule sources-usage-requirements ( ) - { - return $(self.sources-usage-requirements) ; - } - - rule set-usage-requirements ( usage-requirements ) - { - self.usage-requirements = $(usage-requirements) ; - } - - rule usage-requirements ( ) - { - return $(self.usage-requirements) ; - } - - # Returns all targets referenced by this subvariant, either directly or - # indirectly, and either as sources, or as dependency properties. Targets - # referred to using the dependency property are returned as properties, not - # targets. - # - rule all-referenced-targets ( theset ) - { - # Find directly referenced targets. - local deps = [ $(self.build-properties).dependency ] ; - local all-targets = $(self.sources) $(deps) ; - - # Find other subvariants. - local r ; - for local t in $(all-targets) - { - if ! [ $(theset).contains $(t) ] - { - $(theset).add $(t) ; - r += [ $(t:G=).creating-subvariant ] ; - } - } - r = [ sequence.unique $(r) ] ; - for local s in $(r) - { - if $(s) != $(__name__) - { - $(s).all-referenced-targets $(theset) ; - } - } - } - - # Returns the properties specifying implicit include paths to generated - # headers. This traverses all targets in this subvariant and subvariants - # referred by <implcit-dependecy> properties. For all targets of type - # 'target-type' (or for all targets, if 'target-type' is not specified), the - # result will contain <$(feature)>path-to-that-target. - # - rule implicit-includes ( feature : target-type ? ) - { - local key = ii$(feature)-$(target-type:E="") ; - if ! $($(key))-is-not-empty - { - local target-paths = [ all-target-directories $(target-type) ] ; - target-paths = [ sequence.unique $(target-paths) ] ; - local result = $(target-paths:G=$(feature)) ; - if ! $(result) - { - result = "" ; - } - $(key) = $(result) ; - } - if $($(key)) = "" - { - return ; - } - else - { - return $($(key)) ; - } - } - - rule all-target-directories ( target-type ? ) - { - if ! $(self.target-directories) - { - compute-target-directories $(target-type) ; - } - return $(self.target-directories) ; - } - - rule compute-target-directories ( target-type ? ) - { - local result ; - for local t in $(self.created-targets) - { - # Skip targets of the wrong type. - if ! $(target-type) || - [ type.is-derived [ $(t).type ] $(target-type) ] - { - result = [ sequence.merge $(result) : [ $(t).path ] ] ; - } - } - for local d in $(self.other-dg) - { - result += [ $(d).all-target-directories $(target-type) ] ; - } - self.target-directories = $(result) ; - } -} diff --git a/jam-files/boost-build/build/virtual_target.py b/jam-files/boost-build/build/virtual_target.py deleted file mode 100644 index 51dff037..00000000 --- a/jam-files/boost-build/build/virtual_target.py +++ /dev/null @@ -1,1118 +0,0 @@ -# Status: ported. -# Base revision: 64488. -# -# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -# Implements virtual targets, which correspond to actual files created during -# build, but are not yet targets in Jam sense. They are needed, for example, -# when searching for possible transormation sequences, when it's not known -# if particular target should be created at all. -# -# -# +--------------------------+ -# | VirtualTarget | -# +==========================+ -# | actualize | -# +--------------------------+ -# | actualize_action() = 0 | -# | actualize_location() = 0 | -# +----------------+---------+ -# | -# ^ -# / \ -# +-+-+ -# | -# +---------------------+ +-------+--------------+ -# | Action | | AbstractFileTarget | -# +=====================| * +======================+ -# | action_name | +--+ action | -# | properties | | +----------------------+ -# +---------------------+--+ | actualize_action() | -# | actualize() |0..1 +-----------+----------+ -# | path() | | -# | adjust_properties() | sources | -# | actualize_sources() | targets | -# +------+--------------+ ^ -# | / \ -# ^ +-+-+ -# / \ | -# +-+-+ +-------------+-------------+ -# | | | -# | +------+---------------+ +--------+-------------+ -# | | FileTarget | | SearchedLibTarget | -# | +======================+ +======================+ -# | | actualize-location() | | actualize-location() | -# | +----------------------+ +----------------------+ -# | -# +-+------------------------------+ -# | | -# +----+----------------+ +---------+-----------+ -# | CompileAction | | LinkAction | -# +=====================+ +=====================+ -# | adjust_properties() | | adjust_properties() | -# +---------------------+ | actualize_sources() | -# +---------------------+ -# -# The 'CompileAction' and 'LinkAction' classes are defined not here, -# but in builtin.jam modules. They are shown in the diagram to give -# the big picture. - -import bjam - -import re -import os.path -import string -import types - -from b2.util import path, utility, set -from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value -from b2.util.sequence import unique -from b2.tools import common -from b2.exceptions import * -import b2.build.type -import b2.build.property_set as property_set - -import b2.build.property as property - -from b2.manager import get_manager -from b2.util import bjam_signature - -__re_starts_with_at = re.compile ('^@(.*)') - -class VirtualTargetRegistry: - def __init__ (self, manager): - self.manager_ = manager - - # A cache for FileTargets - self.files_ = {} - - # A cache for targets. - self.cache_ = {} - - # A map of actual names to virtual targets. - # Used to make sure we don't associate same - # actual target to two virtual targets. - self.actual_ = {} - - self.recent_targets_ = [] - - # All targets ever registed - self.all_targets_ = [] - - self.next_id_ = 0 - - def register (self, target): - """ Registers a new virtual target. Checks if there's already registered target, with the same - name, type, project and subvariant properties, and also with the same sources - and equal action. If such target is found it is retured and 'target' is not registered. - Otherwise, 'target' is registered and returned. - """ - if target.path(): - signature = target.path() + "-" + target.name() - else: - signature = "-" + target.name() - - result = None - if not self.cache_.has_key (signature): - self.cache_ [signature] = [] - - for t in self.cache_ [signature]: - a1 = t.action () - a2 = target.action () - - # TODO: why are we checking for not result? - if not result: - if not a1 and not a2: - result = t - else: - if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources (): - ps1 = a1.properties () - ps2 = a2.properties () - p1 = ps1.base () + ps1.free () +\ - b2.util.set.difference(ps1.dependency(), ps1.incidental()) - p2 = ps2.base () + ps2.free () +\ - b2.util.set.difference(ps2.dependency(), ps2.incidental()) - if p1 == p2: - result = t - - if not result: - self.cache_ [signature].append (target) - result = target - - # TODO: Don't append if we found pre-existing target? - self.recent_targets_.append(result) - self.all_targets_.append(result) - - return result - - def from_file (self, file, file_location, project): - """ Creates a virtual target with appropriate name and type from 'file'. - If a target with that name in that project was already created, returns that already - created target. - TODO: more correct way would be to compute path to the file, based on name and source location - for the project, and use that path to determine if the target was already created. - TODO: passing project with all virtual targets starts to be annoying. - """ - # Check if we've created a target corresponding to this file. - path = os.path.join(os.getcwd(), file_location, file) - path = os.path.normpath(path) - - if self.files_.has_key (path): - return self.files_ [path] - - file_type = b2.build.type.type (file) - - result = FileTarget (file, file_type, project, - None, file_location) - self.files_ [path] = result - - return result - - def recent_targets(self): - """Each target returned by 'register' is added to a list of - 'recent-target', returned by this function. So, this allows - us to find all targets created when building a given main - target, even if the target.""" - - return self.recent_targets_ - - def clear_recent_targets(self): - self.recent_targets_ = [] - - def all_targets(self): - # Returns all virtual targets ever created - return self.all_targets_ - - # Returns all targets from 'targets' with types - # equal to 'type' or derived from it. - def select_by_type(self, type, targets): - return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)] - - def register_actual_name (self, actual_name, virtual_target): - if self.actual_.has_key (actual_name): - cs1 = self.actual_ [actual_name].creating_subvariant () - cs2 = virtual_target.creating_subvariant () - cmt1 = cs1.main_target () - cmt2 = cs2.main_target () - - action1 = self.actual_ [actual_name].action () - action2 = virtual_target.action () - - properties_added = [] - properties_removed = [] - if action1 and action2: - p1 = action1.properties () - p1 = p1.raw () - p2 = action2.properties () - p2 = p2.raw () - - properties_removed = set.difference (p1, p2) - if not properties_removed: properties_removed = "none" - - properties_added = set.difference (p2, p1) - if not properties_added: properties_added = "none" - - # FIXME: Revive printing of real location. - get_manager().errors()( - "Duplicate name of actual target: '%s'\n" - "previous virtual target '%s'\n" - "created from '%s'\n" - "another virtual target '%s'\n" - "created from '%s'\n" - "added properties: '%s'\n" - "removed properties: '%s'\n" - % (actual_name, - self.actual_ [actual_name], "loc", #cmt1.location (), - virtual_target, - "loc", #cmt2.location (), - properties_added, properties_removed)) - - else: - self.actual_ [actual_name] = virtual_target - - - def add_suffix (self, specified_name, file_type, prop_set): - """ Appends the suffix appropriate to 'type/property_set' combination - to the specified name and returns the result. - """ - suffix = b2.build.type.generated_target_suffix (file_type, prop_set) - - if suffix: - return specified_name + '.' + suffix - - else: - return specified_name - -class VirtualTarget: - """ Potential target. It can be converted into jam target and used in - building, if needed. However, it can be also dropped, which allows - to search for different transformation and select only one. - name: name of this target. - project: project to which this target belongs. - """ - def __init__ (self, name, project): - self.name_ = name - self.project_ = project - self.dependencies_ = [] - self.always_ = False - - # Caches if dapendencies for scanners have already been set. - self.made_ = {} - - def manager(self): - return self.project_.manager() - - def virtual_targets(self): - return self.manager().virtual_targets() - - def name (self): - """ Name of this target. - """ - return self.name_ - - def project (self): - """ Project of this target. - """ - return self.project_ - - def depends (self, d): - """ Adds additional instances of 'VirtualTarget' that this - one depends on. - """ - self.dependencies_ = unique (self.dependencies_ + d).sort () - - def dependencies (self): - return self.dependencies_ - - def always(self): - self.always_ = True - - def actualize (self, scanner = None): - """ Generates all the actual targets and sets up build actions for - this target. - - If 'scanner' is specified, creates an additional target - with the same location as actual target, which will depend on the - actual target and be associated with 'scanner'. That additional - target is returned. See the docs (#dependency_scanning) for rationale. - Target must correspond to a file if 'scanner' is specified. - - If scanner is not specified, then actual target is returned. - """ - actual_name = self.actualize_no_scanner () - - if self.always_: - bjam.call("ALWAYS", actual_name) - - if not scanner: - return actual_name - - else: - # Add the scanner instance to the grist for name. - g = '-'.join ([ungrist(get_grist(actual_name)), str(id(scanner))]) - - name = replace_grist (actual_name, '<' + g + '>') - - if not self.made_.has_key (name): - self.made_ [name] = True - - self.project_.manager ().engine ().add_dependency (name, actual_name) - - self.actualize_location (name) - - self.project_.manager ().scanners ().install (scanner, name, str (self)) - - return name - -# private: (overridables) - - def actualize_action (self, target): - """ Sets up build actions for 'target'. Should call appropriate rules - and set target variables. - """ - raise BaseException ("method should be defined in derived classes") - - def actualize_location (self, target): - """ Sets up variables on 'target' which specify its location. - """ - raise BaseException ("method should be defined in derived classes") - - def path (self): - """ If the target is generated one, returns the path where it will be - generated. Otherwise, returns empty list. - """ - raise BaseException ("method should be defined in derived classes") - - def actual_name (self): - """ Return that actual target name that should be used - (for the case where no scanner is involved) - """ - raise BaseException ("method should be defined in derived classes") - - -class AbstractFileTarget (VirtualTarget): - """ Target which correspond to a file. The exact mapping for file - is not yet specified in this class. (TODO: Actually, the class name - could be better...) - - May be a source file (when no action is specified), or - derived file (otherwise). - - The target's grist is concatenation of project's location, - properties of action (for derived files), and, optionally, - value identifying the main target. - - exact: If non-empty, the name is exactly the name - created file should have. Otherwise, the '__init__' - method will add suffix obtained from 'type' by - calling 'type.generated-target-suffix'. - - type: optional type of this target. - """ - def __init__ (self, name, type, project, action = None, exact=False): - VirtualTarget.__init__ (self, name, project) - - self.type_ = type - - self.action_ = action - self.exact_ = exact - - if action: - action.add_targets ([self]) - - if self.type and not exact: - self.__adjust_name (name) - - - self.actual_name_ = None - self.path_ = None - self.intermediate_ = False - self.creating_subvariant_ = None - - # True if this is a root target. - self.root_ = False - - def type (self): - return self.type_ - - def set_path (self, path): - """ Sets the path. When generating target name, it will override any path - computation from properties. - """ - self.path_ = path - - def action (self): - """ Returns the action. - """ - return self.action_ - - def root (self, set = None): - """ Sets/gets the 'root' flag. Target is root is it directly correspods to some - variant of a main target. - """ - if set: - self.root_ = True - return self.root_ - - def creating_subvariant (self, s = None): - """ Gets or sets the subvariant which created this target. Subvariant - is set when target is brought into existance, and is never changed - after that. In particual, if target is shared by subvariant, only - the first is stored. - s: If specified, specified the value to set, - which should be instance of 'subvariant' class. - """ - if s and not self.creating_subvariant (): - if self.creating_subvariant (): - raise BaseException ("Attempt to change 'dg'") - - else: - self.creating_subvariant_ = s - - return self.creating_subvariant_ - - def actualize_action (self, target): - if self.action_: - self.action_.actualize () - - # Return a human-readable representation of this target - # - # If this target has an action, that's: - # - # { <action-name>-<self.name>.<self.type> <action-sources>... } - # - # otherwise, it's: - # - # { <self.name>.<self.type> } - # - def str(self): - a = self.action() - - name_dot_type = self.name_ + "." + self.type_ - - if a: - action_name = a.action_name() - ss = [ s.str() for s in a.sources()] - - return "{ %s-%s %s}" % (action_name, name_dot_type, str(ss)) - else: - return "{ " + name_dot_type + " }" - -# private: - - def actual_name (self): - if not self.actual_name_: - self.actual_name_ = '<' + self.grist() + '>' + self.name_ - - return self.actual_name_ - - def grist (self): - """Helper to 'actual_name', above. Compute unique prefix used to distinguish - this target from other targets with the same name which create different - file. - """ - # Depending on target, there may be different approaches to generating - # unique prefixes. We'll generate prefixes in the form - # <one letter approach code> <the actual prefix> - path = self.path () - - if path: - # The target will be generated to a known path. Just use the path - # for identification, since path is as unique as it can get. - return 'p' + path - - else: - # File is either source, which will be searched for, or is not a file at - # all. Use the location of project for distinguishing. - project_location = self.project_.get ('location') - path_components = b2.util.path.split(project_location) - location_grist = '!'.join (path_components) - - if self.action_: - ps = self.action_.properties () - property_grist = ps.as_path () - # 'property_grist' can be empty when 'ps' is an empty - # property set. - if property_grist: - location_grist = location_grist + '/' + property_grist - - return 'l' + location_grist - - def __adjust_name(self, specified_name): - """Given the target name specified in constructor, returns the - name which should be really used, by looking at the <tag> properties. - The tag properties come in two flavour: - - <tag>value, - - <tag>@rule-name - In the first case, value is just added to name - In the second case, the specified rule is called with specified name, - target type and properties and should return the new name. - If not <tag> property is specified, or the rule specified by - <tag> returns nothing, returns the result of calling - virtual-target.add-suffix""" - - if self.action_: - ps = self.action_.properties() - else: - ps = property_set.empty() - - # FIXME: I'm not sure how this is used, need to check with - # Rene to figure out how to implement - #~ We add ourselves to the properties so that any tag rule can get - #~ more direct information about the target than just that available - #~ through the properties. This is useful in implementing - #~ name changes based on the sources of the target. For example to - #~ make unique names of object files based on the source file. - #~ --grafik - #ps = property_set.create(ps.raw() + ["<target>%s" % "XXXX"]) - #ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ; - - tag = ps.get("<tag>") - - if tag: - - if len(tag) > 1: - get_manager().errors()( - """<tag>@rulename is present but is not the only <tag> feature""") - - tag = tag[0] - if callable(tag): - self.name_ = tag(specified_name, self.type_, ps) - else: - if not tag[0] == '@': - self.manager_.errors()("""The value of the <tag> feature must be '@rule-nane'""") - - exported_ps = b2.util.value_to_jam(ps, methods=True) - self.name_ = b2.util.call_jam_function( - tag[1:], specified_name, self.type_, exported_ps) - if self.name_: - self.name_ = self.name_[0] - - # If there's no tag or the tag rule returned nothing. - if not tag or not self.name_: - self.name_ = add_prefix_and_suffix(specified_name, self.type_, ps) - - def actualize_no_scanner(self): - name = self.actual_name() - - # Do anything only on the first invocation - if not self.made_: - self.made_[name] = True - - if self.action_: - # For non-derived target, we don't care if there - # are several virtual targets that refer to the same name. - # One case when this is unavoidable is when file name is - # main.cpp and two targets have types CPP (for compiling) - # and MOCCABLE_CPP (for convertion to H via Qt tools). - self.virtual_targets().register_actual_name(name, self) - - for i in self.dependencies_: - self.manager_.engine().add_dependency(name, i.actualize()) - - self.actualize_location(name) - self.actualize_action(name) - - return name - -@bjam_signature((["specified_name"], ["type"], ["property_set"])) -def add_prefix_and_suffix(specified_name, type, property_set): - """Appends the suffix appropriate to 'type/property-set' combination - to the specified name and returns the result.""" - - property_set = b2.util.jam_to_value_maybe(property_set) - - suffix = "" - if type: - suffix = b2.build.type.generated_target_suffix(type, property_set) - - # Handle suffixes for which no leading dot is desired. Those are - # specified by enclosing them in <...>. Needed by python so it - # can create "_d.so" extensions, for example. - if get_grist(suffix): - suffix = ungrist(suffix) - elif suffix: - suffix = "." + suffix - - prefix = "" - if type: - prefix = b2.build.type.generated_target_prefix(type, property_set) - - if specified_name.startswith(prefix): - prefix = "" - - if not prefix: - prefix = "" - if not suffix: - suffix = "" - return prefix + specified_name + suffix - - -class FileTarget (AbstractFileTarget): - """ File target with explicitly known location. - - The file path is determined as - - value passed to the 'set_path' method, if any - - for derived files, project's build dir, joined with components - that describe action's properties. If the free properties - are not equal to the project's reference properties - an element with name of main target is added. - - for source files, project's source dir - - The file suffix is - - the value passed to the 'suffix' method, if any, or - - the suffix which correspond to the target's type. - """ - def __init__ (self, name, type, project, action = None, path=None, exact=False): - AbstractFileTarget.__init__ (self, name, type, project, action, exact) - - self.path_ = path - - def __str__(self): - if self.type_: - return self.name_ + "." + self.type_ - else: - return self.name_ - - def clone_with_different_type(self, new_type): - return FileTarget(self.name_, new_type, self.project_, - self.action_, self.path_, exact=True) - - def actualize_location (self, target): - engine = self.project_.manager_.engine () - - if self.action_: - # This is a derived file. - path = self.path () - engine.set_target_variable (target, 'LOCATE', path) - - # Make sure the path exists. - engine.add_dependency (target, path) - common.mkdir(engine, path) - - # It's possible that the target name includes a directory - # too, for example when installing headers. Create that - # directory. - d = os.path.dirname(get_value(target)) - if d: - d = os.path.join(path, d) - engine.add_dependency(target, d) - common.mkdir(engine, d) - - # For real file target, we create a fake target that - # depends on the real target. This allows to run - # - # bjam hello.o - # - # without trying to guess the name of the real target. - # Note the that target has no directory name, and a special - # grist <e>. - # - # First, that means that "bjam hello.o" will build all - # known hello.o targets. - # Second, the <e> grist makes sure this target won't be confused - # with other targets, for example, if we have subdir 'test' - # with target 'test' in it that includes 'test.o' file, - # then the target for directory will be just 'test' the target - # for test.o will be <ptest/bin/gcc/debug>test.o and the target - # we create below will be <e>test.o - engine.add_dependency("<e>%s" % get_value(target), target) - - # Allow bjam <path-to-file>/<file> to work. This won't catch all - # possible ways to refer to the path (relative/absolute, extra ".", - # various "..", but should help in obvious cases. - engine.add_dependency("<e>%s" % (os.path.join(path, get_value(target))), target) - - else: - # This is a source file. - engine.set_target_variable (target, 'SEARCH', self.project_.get ('source-location')) - - - def path (self): - """ Returns the directory for this target. - """ - if not self.path_: - if self.action_: - p = self.action_.properties () - (target_path, relative_to_build_dir) = p.target_path () - - if relative_to_build_dir: - # Indicates that the path is relative to - # build dir. - target_path = os.path.join (self.project_.build_dir (), target_path) - - # Store the computed path, so that it's not recomputed - # any more - self.path_ = target_path - - return self.path_ - - -class NotFileTarget(AbstractFileTarget): - - def __init__(self, name, project, action): - AbstractFileTarget.__init__(self, name, None, project, action) - - def path(self): - """Returns nothing, to indicate that target path is not known.""" - return None - - def actualize_location(self, target): - bjam.call("NOTFILE", target) - bjam.call("ALWAYS", target) - bjam.call("NOUPDATE", target) - - -class Action: - """ Class which represents an action. - Both 'targets' and 'sources' should list instances of 'VirtualTarget'. - Action name should name a rule with this prototype - rule action_name ( targets + : sources * : properties * ) - Targets and sources are passed as actual jam targets. The rule may - not establish dependency relationship, but should do everything else. - """ - def __init__ (self, manager, sources, action_name, prop_set): - assert(isinstance(prop_set, property_set.PropertySet)) - assert type(sources) == types.ListType - self.sources_ = sources - self.action_name_ = action_name - if not prop_set: - prop_set = property_set.empty() - self.properties_ = prop_set - if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')): - import pdb - pdb.set_trace() - - self.manager_ = manager - self.engine_ = self.manager_.engine () - self.targets_ = [] - - # Indicates whether this has been actualized or not. - self.actualized_ = False - - self.dependency_only_sources_ = [] - self.actual_sources_ = [] - - - def add_targets (self, targets): - self.targets_ += targets - - - def replace_targets (old_targets, new_targets): - self.targets_ = [t for t in targets if not t in old_targets] + new_targets - - def targets (self): - return self.targets_ - - def sources (self): - return self.sources_ - - def action_name (self): - return self.action_name_ - - def properties (self): - return self.properties_ - - def actualize (self): - """ Generates actual build instructions. - """ - if self.actualized_: - return - - self.actualized_ = True - - ps = self.properties () - properties = self.adjust_properties (ps) - - - actual_targets = [] - - for i in self.targets (): - actual_targets.append (i.actualize ()) - - self.actualize_sources (self.sources (), properties) - - self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_) - - # This works around a bug with -j and actions that - # produce multiple target, where: - # - dependency on the first output is found, and - # the action is started - # - dependency on the second output is found, and - # bjam noticed that command is already running - # - instead of waiting for the command, dependents - # of the second targets are immediately updated. - if len(actual_targets) > 1: - bjam.call("INCLUDES", actual_targets, actual_targets) - - # FIXME: check the comment below. Was self.action_name_ [1] - # Action name can include additional argument to rule, which should not - # be passed to 'set-target-variables' - # FIXME: breaking circular dependency - import toolset - toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties) - - engine = self.manager_.engine () - - # FIXME: this is supposed to help --out-xml option, but we don't - # implement that now, and anyway, we should handle it in Python, - # not but putting variables on bjam-level targets. - bjam.call("set-target-variable", actual_targets, ".action", repr(self)) - - self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_, - properties) - - # Since we set up creating action here, we also set up - # action for cleaning up - self.manager_.engine ().set_update_action ('common.Clean', 'clean-all', - actual_targets) - - return actual_targets - - def actualize_source_type (self, sources, prop_set): - """ Helper for 'actualize_sources'. - For each passed source, actualizes it with the appropriate scanner. - Returns the actualized virtual targets. - """ - result = [] - for i in sources: - scanner = None - -# FIXME: what's this? -# if isinstance (i, str): -# i = self.manager_.get_object (i) - - if i.type (): - scanner = b2.build.type.get_scanner (i.type (), prop_set) - - r = i.actualize (scanner) - result.append (r) - - return result - - def actualize_sources (self, sources, prop_set): - """ Creates actual jam targets for sources. Initializes two member - variables: - 'self.actual_sources_' -- sources which are passed to updating action - 'self.dependency_only_sources_' -- sources which are made dependencies, but - are not used otherwise. - - New values will be *appended* to the variables. They may be non-empty, - if caller wants it. - """ - dependencies = self.properties_.get ('<dependency>') - - self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set) - self.actual_sources_ += self.actualize_source_type (sources, prop_set) - - # This is used to help bjam find dependencies in generated headers - # in other main targets. - # Say: - # - # make a.h : ....... ; - # exe hello : hello.cpp : <implicit-dependency>a.h ; - # - # However, for bjam to find the dependency the generated target must - # be actualized (i.e. have the jam target). In the above case, - # if we're building just hello ("bjam hello"), 'a.h' won't be - # actualized unless we do it here. - implicit = self.properties_.get("<implicit-dependency>") - - for i in implicit: - i.actualize() - - def adjust_properties (self, prop_set): - """ Determines real properties when trying building with 'properties'. - This is last chance to fix properties, for example to adjust includes - to get generated headers correctly. Default implementation returns - its argument. - """ - return prop_set - - -class NullAction (Action): - """ Action class which does nothing --- it produces the targets with - specific properties out of nowhere. It's needed to distinguish virtual - targets with different properties that are known to exist, and have no - actions which create them. - """ - def __init__ (self, manager, prop_set): - Action.__init__ (self, manager, [], None, prop_set) - - def actualize (self): - if not self.actualized_: - self.actualized_ = True - - for i in self.targets (): - i.actualize () - -class NonScanningAction(Action): - """Class which acts exactly like 'action', except that the sources - are not scanned for dependencies.""" - - def __init__(self, sources, action_name, property_set): - #FIXME: should the manager parameter of Action.__init__ - #be removed? -- Steven Watanabe - Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set) - - def actualize_source_type(self, sources, property_set): - - result = [] - for s in sources: - result.append(s.actualize()) - return result - -def traverse (target, include_roots = False, include_sources = False): - """ Traverses the dependency graph of 'target' and return all targets that will - be created before this one is created. If root of some dependency graph is - found during traversal, it's either included or not, dependencing of the - value of 'include_roots'. In either case, sources of root are not traversed. - """ - result = [] - - if target.action (): - action = target.action () - - # This includes 'target' as well - result += action.targets () - - for t in action.sources (): - - # FIXME: - # TODO: see comment in Manager.register_object () - #if not isinstance (t, VirtualTarget): - # t = target.project_.manager_.get_object (t) - - if not t.root (): - result += traverse (t, include_roots, include_sources) - - elif include_roots: - result.append (t) - - elif include_sources: - result.append (target) - - return result - -def clone_action (action, new_project, new_action_name, new_properties): - """Takes an 'action' instances and creates new instance of it - and all produced target. The rule-name and properties are set - to 'new-rule-name' and 'new-properties', if those are specified. - Returns the cloned action.""" - - if not new_action_name: - new_action_name = action.action_name() - - if not new_properties: - new_properties = action.properties() - - cloned_action = action.__class__(action.manager_, action.sources(), new_action_name, - new_properties) - - cloned_targets = [] - for target in action.targets(): - - n = target.name() - # Don't modify the name of the produced targets. Strip the directory f - cloned_target = FileTarget(n, target.type(), new_project, - cloned_action, exact=True) - - d = target.dependencies() - if d: - cloned_target.depends(d) - cloned_target.root(target.root()) - cloned_target.creating_subvariant(target.creating_subvariant()) - - cloned_targets.append(cloned_target) - - return cloned_action - -class Subvariant: - - def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets): - """ - main_target: The instance of MainTarget class - prop_set: Properties requested for this target - sources: - build_properties: Actually used properties - sources_usage_requirements: Properties propagated from sources - created_targets: Top-level created targets - """ - self.main_target_ = main_target - self.properties_ = prop_set - self.sources_ = sources - self.build_properties_ = build_properties - self.sources_usage_requirements_ = sources_usage_requirements - self.created_targets_ = created_targets - - self.usage_requirements_ = None - - # Pre-compose the list of other dependency graphs, on which this one - # depends - deps = build_properties.get('<implicit-dependency>') - - self.other_dg_ = [] - for d in deps: - self.other_dg_.append(d.creating_subvariant ()) - - self.other_dg_ = unique (self.other_dg_) - - self.implicit_includes_cache_ = {} - self.target_directories_ = None - - def main_target (self): - return self.main_target_ - - def created_targets (self): - return self.created_targets_ - - def requested_properties (self): - return self.properties_ - - def build_properties (self): - return self.build_properties_ - - def sources_usage_requirements (self): - return self.sources_usage_requirements_ - - def set_usage_requirements (self, usage_requirements): - self.usage_requirements_ = usage_requirements - - def usage_requirements (self): - return self.usage_requirements_ - - def all_referenced_targets(self, result): - """Returns all targets referenced by this subvariant, - either directly or indirectly, and either as sources, - or as dependency properties. Targets referred with - dependency property are returned a properties, not targets.""" - - # Find directly referenced targets. - deps = self.build_properties().dependency() - all_targets = self.sources_ + deps - - # Find other subvariants. - r = [] - for e in all_targets: - if not e in result: - result.add(e) - if isinstance(e, property.Property): - t = e.value() - else: - t = e - - # FIXME: how can this be? - cs = t.creating_subvariant() - if cs: - r.append(cs) - r = unique(r) - for s in r: - if s != self: - s.all_referenced_targets(result) - - - def implicit_includes (self, feature, target_type): - """ Returns the properties which specify implicit include paths to - generated headers. This traverses all targets in this subvariant, - and subvariants referred by <implcit-dependecy>properties. - For all targets which are of type 'target-type' (or for all targets, - if 'target_type' is not specified), the result will contain - <$(feature)>path-to-that-target. - """ - - if not target_type: - key = feature - else: - key = feature + "-" + target_type - - - result = self.implicit_includes_cache_.get(key) - if not result: - target_paths = self.all_target_directories(target_type) - target_paths = unique(target_paths) - result = ["<%s>%s" % (feature, p) for p in target_paths] - self.implicit_includes_cache_[key] = result - - return result - - def all_target_directories(self, target_type = None): - # TODO: does not appear to use target_type in deciding - # if we've computed this already. - if not self.target_directories_: - self.target_directories_ = self.compute_target_directories(target_type) - return self.target_directories_ - - def compute_target_directories(self, target_type=None): - result = [] - for t in self.created_targets(): - if not target_type or b2.build.type.is_derived(t.type(), target_type): - result.append(t.path()) - - for d in self.other_dg_: - result.extend(d.all_target_directories(target_type)) - - result = unique(result) - return result diff --git a/jam-files/boost-build/kernel/boost-build.jam b/jam-files/boost-build/kernel/boost-build.jam deleted file mode 100644 index 377f6ec0..00000000 --- a/jam-files/boost-build/kernel/boost-build.jam +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -boost-build . ; diff --git a/jam-files/boost-build/kernel/bootstrap.jam b/jam-files/boost-build/kernel/bootstrap.jam deleted file mode 100644 index 89048af9..00000000 --- a/jam-files/boost-build/kernel/bootstrap.jam +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003, 2005, 2006 Rene Rivera -# Copyright 2003, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# First of all, check the jam version - -if $(JAM_VERSION:J="") < 030112 -{ - ECHO "error: Boost.Jam version 3.1.12 or later required" ; - EXIT ; -} - -local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ; - -for local r in $(required-rules) -{ - if ! $(r) in [ RULENAMES ] - { - ECHO "error: builtin rule '$(r)' is not present" ; - ECHO "error: your version of bjam is likely out of date" ; - ECHO "error: please get a fresh version from SVN." ; - EXIT ; - } -} - -local native = - regex transform 2 - ; -while $(native) -{ - if ! [ HAS_NATIVE_RULE $(native[1]) : - $(native[2]) : - $(native[3]) ] - { - ECHO "error: missing native rule '$(native[1]).$(native[2])'" ; - ECHO "error: or interface version of that rule is too low" ; - ECHO "error: your version of bjam is likely out of date" ; - ECHO "error: please get a fresh version from SVN." ; - EXIT ; - } - native = $(native[4-]) ; -} - -# Check that the builtin .ENVIRON module is present. We don't have a -# builtin to check that a module is present, so we assume that the PATH -# environment variable is always set and verify that the .ENVIRON module -# has non-empty value of that variable. -module .ENVIRON -{ - local p = $(PATH) $(Path) $(path) ; - if ! $(p) - { - ECHO "error: no builtin module .ENVIRON is found" ; - ECHO "error: your version of bjam is likely out of date" ; - ECHO "error: please get a fresh version from SVN." ; - EXIT ; - } -} - -# Check that @() functionality is present. Similarly to modules, -# we don't have a way to test that directly. Instead we check that -# $(TMPNAME) functionality is present which was added at roughly -# the same time (more precisely it was added just before). -{ - if ! $(TMPNAME) - { - ECHO "error: no @() functionality found" ; - ECHO "error: your version of bjam is likely out of date" ; - ECHO "error: please get a fresh version from SVN." ; - EXIT ; - } -} - -# Make sure that \n escape is avaiable. -if "\n" = "n" -{ - if $(OS) = CYGWIN - { - ECHO "warning: escape sequences are not supported" ; - ECHO "warning: this will cause major misbehaviour on cygwin" ; - ECHO "warning: your version of bjam is likely out of date" ; - ECHO "warning: please get a fresh version from SVN." ; - } -} - -# Bootstrap the module system. Then bring the import rule into the global module. -# -SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ; -module modules { include <module@>modules.jam ; } -IMPORT modules : import : : import ; - -{ - # Add module subdirectories to the BOOST_BUILD_PATH, which allows - # us to make an incremental refactoring step by moving modules to - # the appropriate subdirectories, thereby achieving some physical - # separation of different layers without changing all of our code - # to specify subdirectories in import statements or use an extra - # level of qualification on imported names. - - local subdirs = - kernel # only the most-intrinsic modules: modules, errors - util # low-level substrate: string/number handling, etc. - build # essential elements of the build system architecture - tools # toolsets for handling specific build jobs and targets. - contrib # user contributed (unreviewed) modules - . # build-system.jam lives here - ; - local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ; - BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ; - - modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ; - - modules.poke : EXTRA_PYTHONPATH : $(whereami) ; -} - -# Reload the modules, to clean up things. The modules module can tolerate -# being included twice. -# -import modules ; - -# Process option plugins first to alow them to prevent loading -# the rest of the build system. -# -import option ; -local dont-build = [ option.process ] ; - -# Should we skip building, i.e. loading the build system, according -# to the options processed? -# -if ! $(dont-build) -{ - if ! --python in $(ARGV) - { - # Allow users to override the build system file from the - # command-line (mostly for testing) - local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ; - build-system ?= build-system ; - - # Use last element in case of multiple command-line options - import $(build-system[-1]) ; - } - else - { - ECHO "Boost.Build V2 Python port (experimental)" ; - - # Define additional interface that is exposed to Python code. Python code will - # also have access to select bjam builtins in the 'bjam' module, but some - # things are easier to define outside C. - module python_interface - { - rule load ( module-name : location ) - { - USER_MODULE $(module-name) ; - # Make all rules in the loaded module available in - # the global namespace, so that we don't have - # to bother specifying "right" module when calling - # from Python. - module $(module-name) - { - __name__ = $(1) ; - include $(2) ; - local rules = [ RULENAMES $(1) ] ; - IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ; - } - } - - rule peek ( module-name ? : variables + ) - { - module $(<) - { - return $($(>)) ; - } - } - - rule set-variable ( module-name : name : value * ) - { - module $(<) - { - $(>) = $(3) ; - } - } - - rule set-top-level-targets ( targets * ) - { - DEPENDS all : $(targets) ; - } - - rule call-in-module ( m : rulename : * ) - { - module $(m) - { - return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; - } - } - - - rule set-update-action ( action : targets * : sources * : properties * ) - { - $(action) $(targets) : $(sources) : $(properties) ; - } - - rule set-update-action-in-module ( m : action : targets * : sources * : properties * ) - { - module $(m) - { - $(2) $(3) : $(4) : $(5) ; - } - } - - rule set-target-variable ( targets + : variable : value * : append ? ) - { - if $(append) - { - $(variable) on $(targets) += $(value) ; - } - else - { - $(variable) on $(targets) = $(value) ; - } - } - - rule get-target-variable ( targets + : variable ) - { - return [ on $(targets) return $($(variable)) ] ; - } - - rule import-rules-from-parent ( parent-module : this-module : user-rules * ) - { - IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ; - EXPORT $(this-module) : $(user-rules) ; - } - - rule mark-included ( targets * : includes * ) { - NOCARE $(includes) ; - INCLUDES $(targets) : $(includes) ; - ISFILE $(includes) ; - } - } - - PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ; - modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ; - - module PyBB - { - local ok = [ bootstrap $(root) ] ; - if ! $(ok) - { - EXIT ; - } - } - - - #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ; - - #module PyBB - #{ - # main ; - #} - - } -} diff --git a/jam-files/boost-build/kernel/bootstrap.py b/jam-files/boost-build/kernel/bootstrap.py deleted file mode 100644 index 2e8dd37b..00000000 --- a/jam-files/boost-build/kernel/bootstrap.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2009 Vladimir Prus -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import imp -import sys - -def bootstrap(root_path): - """Performs python-side bootstrapping of Boost.Build/Python. - - This function arranges for 'b2.whatever' package names to work, while also - allowing to put python files alongside corresponding jam modules. - """ - - m = imp.new_module("b2") - # Note that: - # 1. If __path__ is not list of strings, nothing will work - # 2. root_path is already list of strings. - m.__path__ = root_path - sys.modules["b2"] = m - - import b2.build_system - return b2.build_system.main() - diff --git a/jam-files/boost-build/kernel/class.jam b/jam-files/boost-build/kernel/class.jam deleted file mode 100644 index b8e55af3..00000000 --- a/jam-files/boost-build/kernel/class.jam +++ /dev/null @@ -1,420 +0,0 @@ -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2002, 2005 Rene Rivera -# Copyright 2002, 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Polymorphic class system built on top of core Jam facilities. -# -# Classes are defined by 'class' keywords:: -# -# class myclass -# { -# rule __init__ ( arg1 ) # constructor -# { -# self.attribute = $(arg1) ; -# } -# -# rule method1 ( ) # method -# { -# return [ method2 ] ; -# } -# -# rule method2 ( ) # method -# { -# return $(self.attribute) ; -# } -# } -# -# The __init__ rule is the constructor, and sets member variables. -# -# New instances are created by invoking [ new <class> <args...> ]: -# -# local x = [ new myclass foo ] ; # x is a new myclass object -# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo" -# -# Derived class are created by mentioning base classes in the declaration:: -# -# class derived : myclass -# { -# rule __init__ ( arg ) -# { -# myclass.__init__ $(arg) ; # call base __init__ -# -# } -# -# rule method2 ( ) # method override -# { -# return $(self.attribute)XXX ; -# } -# } -# -# All methods operate virtually, replacing behavior in the base classes. For -# example:: -# -# local y = [ new derived foo ] ; # y is a new derived object -# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo" -# -# Each class instance is its own core Jam module. All instance attributes and -# methods are accessible without additional qualification from within the class -# instance. All rules imported in class declaration, or visible in base classses -# are also visible. Base methods are available in qualified form: -# base-name.method-name. By convention, attribute names are prefixed with -# "self.". - -import modules ; -import numbers ; - - -rule xinit ( instance : class ) -{ - module $(instance) - { - __class__ = $(2) ; - __name__ = $(1) ; - } -} - - -rule new ( class args * : * ) -{ - .next-instance ?= 1 ; - local id = object($(class))@$(.next-instance) ; - - xinit $(id) : $(class) ; - - INSTANCE $(id) : class@$(class) ; - IMPORT_MODULE $(id) ; - $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - - # Bump the next unique object name. - .next-instance = [ numbers.increment $(.next-instance) ] ; - - # Return the name of the new instance. - return $(id) ; -} - - -rule bases ( class ) -{ - module class@$(class) - { - return $(__bases__) ; - } -} - - -rule is-derived ( class : bases + ) -{ - local stack = $(class) ; - local visited found ; - while ! $(found) && $(stack) - { - local top = $(stack[1]) ; - stack = $(stack[2-]) ; - if ! ( $(top) in $(visited) ) - { - visited += $(top) ; - stack += [ bases $(top) ] ; - - if $(bases) in $(visited) - { - found = true ; - } - } - } - return $(found) ; -} - - -# Returns true if the 'value' is a class instance. -# -rule is-instance ( value ) -{ - return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ; -} - - -# Check if the given value is of the given type. -# -rule is-a ( - instance # The value to check. - : type # The type to test for. -) -{ - if [ is-instance $(instance) ] - { - return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ; - } -} - - -local rule typecheck ( x ) -{ - local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ; - if ! [ is-a $(x) : $(class-name) ] - { - return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ; - } -} - - -rule __test__ ( ) -{ - import assert ; - import "class" : new ; - - # This will be the construction function for a class called 'myclass'. - # - class myclass - { - import assert ; - - rule __init__ ( x_ * : y_ * ) - { - # Set some instance variables. - x = $(x_) ; - y = $(y_) ; - foo += 10 ; - } - - rule set-x ( newx * ) - { - x = $(newx) ; - } - - rule get-x ( ) - { - return $(x) ; - } - - rule set-y ( newy * ) - { - y = $(newy) ; - } - - rule get-y ( ) - { - return $(y) ; - } - - rule f ( ) - { - return [ g $(x) ] ; - } - - rule g ( args * ) - { - if $(x) in $(y) - { - return $(x) ; - } - else if $(y) in $(x) - { - return $(y) ; - } - else - { - return ; - } - } - - rule get-class ( ) - { - return $(__class__) ; - } - - rule get-instance ( ) - { - return $(__name__) ; - } - - rule invariant ( ) - { - assert.equal 1 : 1 ; - } - - rule get-foo ( ) - { - return $(foo) ; - } - } -# class myclass ; - - class derived1 : myclass - { - rule __init__ ( z_ ) - { - myclass.__init__ $(z_) : X ; - z = $(z_) ; - } - - # Override g. - # - rule g ( args * ) - { - return derived1.g ; - } - - rule h ( ) - { - return derived1.h ; - } - - rule get-z ( ) - { - return $(z) ; - } - - # Check that 'assert.equal' visible in base class is visible here. - # - rule invariant2 ( ) - { - assert.equal 2 : 2 ; - } - - # Check that 'assert.variable-not-empty' visible in base class is - # visible here. - # - rule invariant3 ( ) - { - local v = 10 ; - assert.variable-not-empty v ; - } - } -# class derived1 : myclass ; - - class derived2 : myclass - { - rule __init__ ( ) - { - myclass.__init__ 1 : 2 ; - } - - # Override g. - # - rule g ( args * ) - { - return derived2.g ; - } - - # Test the ability to call base class functions with qualification. - # - rule get-x ( ) - { - return [ myclass.get-x ] ; - } - } -# class derived2 : myclass ; - - class derived2a : derived2 - { - rule __init__ - { - derived2.__init__ ; - } - } -# class derived2a : derived2 ; - - local rule expect_derived2 ( [derived2] x ) { } - - local a = [ new myclass 3 4 5 : 4 5 ] ; - local b = [ new derived1 4 ] ; - local b2 = [ new derived1 4 ] ; - local c = [ new derived2 ] ; - local d = [ new derived2 ] ; - local e = [ new derived2a ] ; - - expect_derived2 $(d) ; - expect_derived2 $(e) ; - - # Argument checking is set up to call exit(1) directly on failure, and we - # can not hijack that with try, so we should better not do this test by - # default. We could fix this by having errors look up and invoke the EXIT - # rule instead; EXIT can be hijacked (;-) - if --fail-typecheck in [ modules.peek : ARGV ] - { - try ; - { - expect_derived2 $(a) ; - } - catch - "Expected an instance of derived2 but got" instead - ; - } - - #try ; - #{ - # new bad_subclass ; - #} - #catch - # bad_subclass.bad_subclass failed to call base class constructor myclass.__init__ - # ; - - #try ; - #{ - # class bad_subclass ; - #} - #catch bad_subclass has already been declared ; - - assert.result 3 4 5 : $(a).get-x ; - assert.result 4 5 : $(a).get-y ; - assert.result 4 : $(b).get-x ; - assert.result X : $(b).get-y ; - assert.result 4 : $(b).get-z ; - assert.result 1 : $(c).get-x ; - assert.result 2 : $(c).get-y ; - assert.result 4 5 : $(a).f ; - assert.result derived1.g : $(b).f ; - assert.result derived2.g : $(c).f ; - assert.result derived2.g : $(d).f ; - - assert.result 10 : $(b).get-foo ; - - $(a).invariant ; - $(b).invariant2 ; - $(b).invariant3 ; - - # Check that the __class__ attribute is getting properly set. - assert.result myclass : $(a).get-class ; - assert.result derived1 : $(b).get-class ; - assert.result $(a) : $(a).get-instance ; - - $(a).set-x a.x ; - $(b).set-x b.x ; - $(c).set-x c.x ; - $(d).set-x d.x ; - assert.result a.x : $(a).get-x ; - assert.result b.x : $(b).get-x ; - assert.result c.x : $(c).get-x ; - assert.result d.x : $(d).get-x ; - - class derived3 : derived1 derived2 - { - rule __init__ ( ) - { - } - } - - assert.result : bases myclass ; - assert.result myclass : bases derived1 ; - assert.result myclass : bases derived2 ; - assert.result derived1 derived2 : bases derived3 ; - - assert.true is-derived derived1 : myclass ; - assert.true is-derived derived2 : myclass ; - assert.true is-derived derived3 : derived1 ; - assert.true is-derived derived3 : derived2 ; - assert.true is-derived derived3 : derived1 derived2 myclass ; - assert.true is-derived derived3 : myclass ; - - assert.false is-derived myclass : derived1 ; - - assert.true is-instance $(a) ; - assert.false is-instance bar ; - - assert.true is-a $(a) : myclass ; - assert.true is-a $(c) : derived2 ; - assert.true is-a $(d) : myclass ; - assert.false is-a literal : myclass ; -} diff --git a/jam-files/boost-build/kernel/errors.jam b/jam-files/boost-build/kernel/errors.jam deleted file mode 100644 index 63b11e86..00000000 --- a/jam-files/boost-build/kernel/errors.jam +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2004 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Print a stack backtrace leading to this rule's caller. Each argument -# represents a line of output to be printed after the first line of the -# backtrace. -# -rule backtrace ( skip-frames prefix messages * : * ) -{ - local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ; - local drop-elements = $(frame-skips[$(skip-frames)]) ; - if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 ) - { - ECHO "warning: backtrace doesn't support skipping $(skip-frames) frames;" - "using 1 instead." ; - drop-elements = 5 ; - } - - local args = $(.args) ; - if $(.user-modules-only) - { - local bt = [ nearest-user-location ] ; - ECHO "$(prefix) at $(bt) " ; - for local n in $(args) - { - if $($(n))-is-not-empty - { - ECHO $(prefix) $($(n)) ; - } - } - } - else - { - # Get the whole backtrace, then drop the initial quadruples - # corresponding to the frames that must be skipped. - local bt = [ BACKTRACE ] ; - bt = $(bt[$(drop-elements)-]) ; - - while $(bt) - { - local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ; - ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ; - - # The first time through, print each argument on a separate line. - for local n in $(args) - { - if $($(n))-is-not-empty - { - ECHO $(prefix) $($(n)) ; - } - } - args = ; # Kill args so that this never happens again. - - # Move on to the next quadruple. - bt = $(bt[5-]) ; - } - } -} - -.args ?= messages 2 3 4 5 6 7 8 9 ; -.disabled ?= ; -.last-error-$(.args) ?= ; - - -# try-catch -- -# -# This is not really an exception-handling mechanism, but it does allow us to -# perform some error-checking on our error-checking. Errors are suppressed after -# a try, and the first one is recorded. Use catch to check that the error -# message matched expectations. - -# Begin looking for error messages. -# -rule try ( ) -{ - .disabled += true ; - .last-error-$(.args) = ; -} - - -# Stop looking for error messages; generate an error if an argument of messages -# is not found in the corresponding argument in the error call. -# -rule catch ( messages * : * ) -{ - .disabled = $(.disabled[2-]) ; # Pop the stack. - - import sequence ; - - if ! $(.last-error-$(.args))-is-not-empty - { - error-skip-frames 3 expected an error, but none occurred ; - } - else - { - for local n in $(.args) - { - if ! $($(n)) in $(.last-error-$(n)) - { - local v = [ sequence.join $($(n)) : " " ] ; - v ?= "" ; - local joined = [ sequence.join $(.last-error-$(n)) : " " ] ; - - .last-error-$(.args) = ; - error-skip-frames 3 expected \"$(v)\" in argument $(n) of error - : got \"$(joined)\" instead ; - } - } - } -} - - -rule error-skip-frames ( skip-frames messages * : * ) -{ - if ! $(.disabled) - { - backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - EXIT ; - } - else if ! $(.last-error-$(.args)) - { - for local n in $(.args) - { - # Add an extra empty string so that we always have - # something in the event of an error - .last-error-$(n) = $($(n)) "" ; - } - } -} - -if --no-error-backtrace in [ modules.peek : ARGV ] -{ - .no-error-backtrace = true ; -} - - -# Print an error message with a stack backtrace and exit. -# -rule error ( messages * : * ) -{ - if $(.no-error-backtrace) - { - # Print each argument on a separate line. - for local n in $(.args) - { - if $($(n))-is-not-empty - { - if ! $(first-printed) - { - ECHO error: $($(n)) ; - first-printed = true ; - } - else - { - ECHO $($(n)) ; - } - } - } - EXIT ; - } - else - { - error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } -} - - -# Same as 'error', but the generated backtrace will include only user files. -# -rule user-error ( messages * : * ) -{ - .user-modules-only = 1 ; - error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -} - - -# Print a warning message with a stack backtrace and exit. -# -rule warning -{ - backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -} - - -# Convert an arbitrary argument list into a list with ":" separators and quoted -# elements representing the same information. This is mostly useful for -# formatting descriptions of arguments with which a rule was called when -# reporting an error. -# -rule lol->list ( * ) -{ - local result ; - local remaining = 1 2 3 4 5 6 7 8 9 ; - while $($(remaining)) - { - local n = $(remaining[1]) ; - remaining = $(remaining[2-]) ; - - if $(n) != 1 - { - result += ":" ; - } - result += \"$($(n))\" ; - } - return $(result) ; -} - - -# Return the file:line for the nearest entry in backtrace which correspond to a -# user module. -# -rule nearest-user-location ( ) -{ - local bt = [ BACKTRACE ] ; - - local result ; - while $(bt) && ! $(result) - { - local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ; - local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ; - - if [ MATCH $(user-modules) : $(bt[1]:D=) ] - { - result = $(bt[1]):$(bt[2]) ; - } - bt = $(bt[5-]) ; - } - return $(result) ; -} - - -# If optimized rule is available in Jam, use it. -if NEAREST_USER_LOCATION in [ RULENAMES ] -{ - rule nearest-user-location ( ) - { - local r = [ NEAREST_USER_LOCATION ] ; - return $(r[1]):$(r[2]) ; - } -} - - -rule __test__ ( ) -{ - # Show that we can correctly catch an expected error. - try ; - { - error an error occurred : somewhere ; - } - catch an error occurred : somewhere ; - - # Show that unexpected errors generate real errors. - try ; - { - try ; - { - error an error occurred : somewhere ; - } - catch an error occurred : nowhere ; - } - catch expected \"nowhere\" in argument 2 ; - - # Show that not catching an error where one was expected is an error. - try ; - { - try ; - { - } - catch ; - } - catch expected an error, but none occurred ; -} diff --git a/jam-files/boost-build/kernel/modules.jam b/jam-files/boost-build/kernel/modules.jam deleted file mode 100644 index 1f75354f..00000000 --- a/jam-files/boost-build/kernel/modules.jam +++ /dev/null @@ -1,354 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Essentially an include guard; ensures that no module is loaded multiple times. -.loaded ?= ; - -# A list of modules currently being loaded for error reporting of circular -# dependencies. -.loading ?= ; - -# A list of modules needing to be tested using their __test__ rule. -.untested ?= ; - -# A list of modules which have been tested using their __test__ rule. -.tested ?= ; - - -# Runs internal Boost Build unit tests for the specified module. The module's -# __test__ rule is executed in its own module to eliminate any inadvertent -# effects of testing module dependencies (such as assert) on the module itself. -# -local rule run-module-test ( m ) -{ - local tested-modules = [ modules.peek modules : .tested ] ; - - if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations. - && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) ) - { - modules.poke modules : .tested : $(tested-modules) $(m) ; - - if ! ( __test__ in [ RULENAMES $(m) ] ) - { - local argv = [ peek : ARGV ] ; - if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) ) - { - ECHO warning: no __test__ rule defined in module $(m) ; - } - } - else - { - if ! ( --quiet in $(argv) ) - { - ECHO testing module $(m)... ; - } - - local test-module = __test-$(m)__ ; - IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) ] ; - IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ; - module $(test-module) - { - __test__ ; - } - } - } -} - - -# Return the binding of the given module. -# -rule binding ( module ) -{ - return $($(module).__binding__) ; -} - - -# Sets the module-local value of a variable. This is the most reliable way to -# set a module-local variable in a different module; it eliminates issues of -# name shadowing due to dynamic scoping. -# -rule poke ( module-name ? : variables + : value * ) -{ - module $(<) - { - $(>) = $(3) ; - } -} - - -# Returns the module-local value of a variable. This is the most reliable way to -# examine a module-local variable in a different module; it eliminates issues of -# name shadowing due to dynamic scoping. -# -rule peek ( module-name ? : variables + ) -{ - module $(<) - { - return $($(>)) ; - } -} - - -# Call the given rule locally in the given module. Use this for rules accepting -# rule names as arguments, so that the passed rule may be invoked in the context -# of the rule's caller (for example, if the rule accesses module globals or is a -# local rule). Note that rules called this way may accept at most 8 parameters. -# -rule call-in ( module-name ? : rule-name args * : * ) -{ - module $(module-name) - { - return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; - } -} - - -# Given a possibly qualified rule name and arguments, remove any initial module -# qualification from the rule and invoke it in that module. If there is no -# module qualification, the rule is invoked in the global module. Note that -# rules called this way may accept at most 8 parameters. -# -rule call-locally ( qualified-rule-name args * : * ) -{ - local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ; - local rule-name = $(module-rule[2]) ; - rule-name ?= $(qualified-rule-name) ; - # We pass only 8 parameters here since Boost Jam allows at most 9 rule - # parameter positions and the call-in rule already uses up the initial - # position for the module name. - return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) : - $(4) : $(5) : $(6) : $(7) : $(8) ] ; -} - - -# Load the indicated module if it is not already loaded. -# -rule load ( - module-name # Name of module to load. Rules will be defined in this - # module. - : filename ? # (partial) path to file; Defaults to $(module-name).jam. - : search * # Directories in which to search for filename. Defaults to - # $(BOOST_BUILD_PATH). -) -{ - # Avoid loading modules twice. - if ! ( $(module-name) in $(.loaded) ) - { - filename ?= $(module-name).jam ; - - # Mark the module loaded so we do not try to load it recursively. - .loaded += $(module-name) ; - - # Suppress tests if any module loads are already in progress. - local suppress-test = $(.loading[1]) ; - - # Push this module on the loading stack. - .loading += $(module-name) ; - - # Remember that it is untested. - .untested += $(module-name) ; - - # Insert the new module's __name__ and __file__ globals. - poke $(module-name) : __name__ : $(module-name) ; - poke $(module-name) : __file__ : $(filename) ; - - module $(module-name) - { - # Add some grist so that the module will have a unique target name. - local module-target = $(__file__:G=module@) ; - - local search = $(3) ; - search ?= [ modules.peek : BOOST_BUILD_PATH ] ; - SEARCH on $(module-target) = $(search) ; - BINDRULE on $(module-target) = modules.record-binding ; - - include $(module-target) ; - - # Allow the module to see its own names with full qualification. - local rules = [ RULENAMES $(__name__) ] ; - IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ; - } - - if $(module-name) != modules && ! [ binding $(module-name) ] - { - import errors ; - errors.error "Could not find module" $(module-name) in $(search) ; - } - - # Pop the loading stack. Must happen before testing or we will run into - # a circular loading dependency. - .loading = $(.loading[1--2]) ; - - # Run any pending tests if this is an outer load. - if ! $(suppress-test) - { - local argv = [ peek : ARGV ] ; - for local m in $(.untested) - { - run-module-test $(m) ; - } - .untested = ; - } - } - else if $(module-name) in $(.loading) - { - import errors ; - errors.error loading \"$(module-name)\" - : circular module loading dependency: - : $(.loading)" ->" $(module-name) ; - } -} - - -# This helper is used by load (above) to record the binding (path) of each -# loaded module. -# -rule record-binding ( module-target : binding ) -{ - $(.loading[-1]).__binding__ = $(binding) ; -} - - -# Transform each path in the list, with all backslashes converted to forward -# slashes and all detectable redundancy removed. Something like this is probably -# needed in path.jam, but I am not sure of that, I do not understand it, and I -# am not ready to move all of path.jam into the kernel. -# -local rule normalize-raw-paths ( paths * ) -{ - local result ; - for p in $(paths:T) - { - result += [ NORMALIZE_PATH $(p) ] ; - } - return $(result) ; -} - - -.cwd = [ PWD ] ; - - -# Load the indicated module and import rule names into the current module. Any -# members of rules-opt will be available without qualification in the caller's -# module. Any members of rename-opt will be taken as the names of the rules in -# the caller's module, in place of the names they have in the imported module. -# If rules-opt = '*', all rules from the indicated module are imported into the -# caller's module. If rename-opt is supplied, it must have the same number of -# elements as rules-opt. -# -rule import ( module-names + : rules-opt * : rename-opt * ) -{ - if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt) - { - import errors ; - errors.error "Rule aliasing is only available for explicit imports." ; - } - - if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) ) - { - import errors ; - errors.error "When loading multiple modules, no specific rules or" - "renaming is allowed" ; - } - - local caller = [ CALLER_MODULE ] ; - - # Import each specified module - for local m in $(module-names) - { - if ! $(m) in $(.loaded) - { - # If the importing module isn't already in the BOOST_BUILD_PATH, - # prepend it to the path. We don't want to invert the search order - # of modules that are already there. - - local caller-location ; - if $(caller) - { - caller-location = [ binding $(caller) ] ; - caller-location = $(caller-location:D) ; - caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ; - } - - local search = [ peek : BOOST_BUILD_PATH ] ; - search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ; - - if $(caller-location) && ! $(caller-location) in $(search) - { - search = $(caller-location) $(search) ; - } - - load $(m) : : $(search) ; - } - - IMPORT_MODULE $(m) : $(caller) ; - - if $(rules-opt) - { - local source-names ; - if $(rules-opt) = * - { - local all-rules = [ RULENAMES $(m) ] ; - source-names = $(all-rules) ; - } - else - { - source-names = $(rules-opt) ; - } - local target-names = $(rename-opt) ; - target-names ?= $(source-names) ; - IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ; - } - } -} - - -# Define exported copies in $(target-module) of all rules exported from -# $(source-module). Also make them available in the global module with -# qualification, so that it is just as though the rules were defined originally -# in $(target-module). -# -rule clone-rules ( source-module target-module ) -{ - local rules = [ RULENAMES $(source-module) ] ; - - IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ; - EXPORT $(target-module) : $(rules) ; - IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ; -} - - -# These rules need to be available in all modules to implement module loading -# itself and other fundamental operations. -local globalize = peek poke record-binding ; -IMPORT modules : $(globalize) : : modules.$(globalize) ; - - -rule __test__ ( ) -{ - import assert ; - import modules : normalize-raw-paths ; - - module modules.__test__ - { - foo = bar ; - } - - assert.result bar : peek modules.__test__ : foo ; - - poke modules.__test__ : foo : bar baz ; - assert.result bar baz : peek modules.__test__ : foo ; - - assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ; - assert.result . : normalize-raw-paths . ; - assert.result .. : normalize-raw-paths .. ; - assert.result ../.. : normalize-raw-paths ../.. ; - assert.result .. : normalize-raw-paths ./.. ; - assert.result / / : normalize-raw-paths / \\ ; - assert.result a : normalize-raw-paths a ; - assert.result a : normalize-raw-paths a/ ; - assert.result /a : normalize-raw-paths /a/ ; - assert.result / : normalize-raw-paths /a/.. ; -} diff --git a/jam-files/boost-build/options/help.jam b/jam-files/boost-build/options/help.jam deleted file mode 100644 index b507e1ed..00000000 --- a/jam-files/boost-build/options/help.jam +++ /dev/null @@ -1,212 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003, 2006 Rene Rivera -# Copyright 2003, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module is the plug-in handler for the --help and --help-.* -# command-line options -import modules ; -import assert ; -import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ; -import sequence ; -import set ; -import project ; -import print ; -import os ; -import version ; -import path ; - -# List of possible modules, but which really aren't. -# -.not-modules = - boost-build bootstrap site-config test user-config - -tools allyourbase boost-base features python stlport testing unit-tests ; - -# The help system options are parsed here and handed off to the doc -# module to translate into documentation requests and actions. The -# understood options are: -# -# --help-disable-<option> -# --help-doc-options -# --help-enable-<option> -# --help-internal -# --help-options -# --help-usage -# --help-output <type> -# --help-output-file <file> -# --help [<module-or-class>] -# -rule process ( - command # The option. - : values * # The values, starting after the "=". - ) -{ - assert.result --help : MATCH ^(--help).* : $(command) ; - local did-help = ; - switch $(command) - { - case --help-internal : - local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ; - path-to-modules ?= . ; - local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ; - local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ; - local modules-to-list = - [ sequence.insertion-sort - [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ; - local modules-to-scan ; - for local m in $(modules-to-list) - { - local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ; - modules-to-scan += $(module-files[1]) ; - } - do-scan $(modules-to-scan) : print-help-all ; - did-help = true ; - - case --help-enable-* : - local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ; - set-option $(option) : enabled ; - did-help = true ; - - case --help-disable-* : - local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ; - set-option $(option) ; - did-help = true ; - - case --help-output : - set-output $(values[1]) ; - did-help = true ; - - case --help-output-file : - set-output-file $(values[1]) ; - did-help = true ; - - case --help-doc-options : - local doc-module-spec = [ split-symbol doc ] ; - do-scan $(doc-module-spec[1]) : print-help-options ; - did-help = true ; - - case --help-options : - print-help-usage ; - did-help = true ; - - case --help : - local spec = $(values[1]) ; - if $(spec) - { - local spec-parts = [ split-symbol $(spec) ] ; - if $(spec-parts) - { - if $(spec-parts[2]) - { - do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ; - do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ; - do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ; - } - else - { - do-scan $(spec-parts[1]) : print-help-module ; - } - } - else - { - EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ; - } - } - else - { - version.print ; - ECHO ; - # First print documentation from the current Jamfile, if any. - # FIXME: Generally, this duplication of project.jam logic is bad. - local names = [ modules.peek project : JAMROOT ] - [ modules.peek project : JAMFILE ] ; - local project-file = [ path.glob . : $(names) ] ; - if ! $(project-file) - { - project-file = [ path.glob-in-parents . : $(names) ] ; - } - - for local p in $(project-file) - { - do-scan $(p) : print-help-project $(p) ; - } - - # Next any user-config help. - local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ; - local user-config = [ GLOB $(user-path) : user-config.jam ] ; - if $(user-config) - { - do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ; - } - - # Next any site-config help. - local site-config = [ GLOB $(user-path) : site-config.jam ] ; - if $(site-config) - { - do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ; - } - - # Then the overall help. - print-help-top ; - } - did-help = true ; - } - if $(did-help) - { - UPDATE all ; - NOCARE all ; - } - return $(did-help) ; -} - -# Split a reference to a symbol into module and symbol parts. -# -local rule split-symbol ( - symbol # The symbol to split. - ) -{ - local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ; - path-to-modules ?= . ; - local module-name = $(symbol) ; - local symbol-name = ; - local result = ; - while ! $(result) - { - local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ; - if $(module-path) - { - # The 'module-name' in fact refers to module. Return the full - # module path and a symbol within it. If 'symbol' passed to this - # rule is already module, 'symbol-name' will be empty. Otherwise, - # it's initialized on the previous loop iteration. - # In case there are several modules by this name, - # use the first one. - result = $(module-path[1]) $(symbol-name) ; - } - else - { - if ! $(module-name:S) - { - result = - ; - } - else - { - local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ; - if $(symbol-name) - { - symbol-name = $(next-symbol-part).$(symbol-name) ; - } - else - { - symbol-name = $(next-symbol-part) ; - } - module-name = $(module-name:B) ; - } - } - } - if $(result) != - - { - return $(result) ; - } -} diff --git a/jam-files/boost-build/site-config.jam b/jam-files/boost-build/site-config.jam deleted file mode 100644 index ad22d674..00000000 --- a/jam-files/boost-build/site-config.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright 2002, 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - diff --git a/jam-files/boost-build/tools/__init__.py b/jam-files/boost-build/tools/__init__.py deleted file mode 100644 index e69de29b..00000000 --- a/jam-files/boost-build/tools/__init__.py +++ /dev/null diff --git a/jam-files/boost-build/tools/acc.jam b/jam-files/boost-build/tools/acc.jam deleted file mode 100644 index f04c9dc8..00000000 --- a/jam-files/boost-build/tools/acc.jam +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Toon Knapen 2004. -# Copyright Boris Gubenko 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# -# Boost.Build V2 toolset for the HP aC++ compiler. -# - -import toolset : flags ; -import feature ; -import generators ; -import common ; - -feature.extend toolset : acc ; -toolset.inherit acc : unix ; -generators.override builtin.lib-generator : acc.prebuilt ; -generators.override acc.searched-lib-generator : searched-lib-generator ; - -# Configures the acc toolset. -rule init ( version ? : user-provided-command * : options * ) -{ - local condition = [ common.check-init-parameters acc - : version $(version) ] ; - - local command = [ common.get-invocation-command acc : aCC - : $(user-provided-command) ] ; - - common.handle-options acc : $(condition) : $(command) : $(options) ; -} - - -# Declare generators -generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ; -generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ; - -# Declare flags. -flags acc CFLAGS <optimization>off : ; -flags acc CFLAGS <optimization>speed : -O3 ; -flags acc CFLAGS <optimization>space : -O2 ; - -flags acc CFLAGS <inlining>off : +d ; -flags acc CFLAGS <inlining>on : ; -flags acc CFLAGS <inlining>full : ; - -flags acc C++FLAGS <exception-handling>off : ; -flags acc C++FLAGS <exception-handling>on : ; - -flags acc C++FLAGS <rtti>off : ; -flags acc C++FLAGS <rtti>on : ; - -# We want the full path to the sources in the debug symbols because otherwise -# the debugger won't find the sources when we use boost.build. -flags acc CFLAGS <debug-symbols>on : -g ; -flags acc LINKFLAGS <debug-symbols>on : -g ; -flags acc LINKFLAGS <debug-symbols>off : -s ; - -# V2 does not have <shared-linkable>, not sure what this meant in V1. -# flags acc CFLAGS <shared-linkable>true : +Z ; - -flags acc CFLAGS <profiling>on : -pg ; -flags acc LINKFLAGS <profiling>on : -pg ; - -flags acc CFLAGS <address-model>64 : +DD64 ; -flags acc LINKFLAGS <address-model>64 : +DD64 ; - -# It is unknown if there's separate option for rpath used only -# at link time, similar to -rpath-link in GNU. We'll use -L. -flags acc RPATH_LINK : <xdll-path> ; - -flags acc CFLAGS <cflags> ; -flags acc C++FLAGS <cxxflags> ; -flags acc DEFINES <define> ; -flags acc UNDEFS <undef> ; -flags acc HDRS <include> ; -flags acc STDHDRS <sysinclude> ; -flags acc LINKFLAGS <linkflags> ; -flags acc ARFLAGS <arflags> ; - -flags acc LIBPATH <library-path> ; -flags acc NEEDLIBS <library-file> ; -flags acc FINDLIBS <find-shared-library> ; -flags acc FINDLIBS <find-static-library> ; - -# Select the compiler name according to the threading model. -flags acc CFLAGS <threading>multi : -mt ; -flags acc LINKFLAGS <threading>multi : -mt ; - -flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; - - -actions acc.link bind NEEDLIBS -{ - $(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) -} - -SPACE = " " ; -actions acc.link.dll bind NEEDLIBS -{ - $(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) -} - -actions acc.compile.c -{ - cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS) -} - -actions acc.compile.c++ -{ - $(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS) -} - -actions updated together piecemeal acc.archive -{ - ar ru$(ARFLAGS:E="") "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/auto-index.jam b/jam-files/boost-build/tools/auto-index.jam deleted file mode 100644 index ebbf344e..00000000 --- a/jam-files/boost-build/tools/auto-index.jam +++ /dev/null @@ -1,212 +0,0 @@ - -import feature ; -import generators ; -import "class" ; -import toolset ; -import targets ; -import "class" : new ; -import project ; - -feature.feature auto-index : off "on" ; -feature.feature auto-index-internal : off "on" ; -feature.feature auto-index-verbose : off "on" ; -feature.feature auto-index-no-duplicates : off "on" ; -feature.feature auto-index-script : : free ; -feature.feature auto-index-prefix : : free ; -feature.feature auto-index-type : : free ; -feature.feature auto-index-section-names : "on" off ; - -toolset.flags auto-index.auto-index FLAGS <auto-index-internal>on : --internal-index ; -toolset.flags auto-index.auto-index SCRIPT <auto-index-script> ; -toolset.flags auto-index.auto-index PREFIX <auto-index-prefix> ; -toolset.flags auto-index.auto-index INDEX_TYPE <auto-index-type> ; -toolset.flags auto-index.auto-index FLAGS <auto-index-verbose>on : --verbose ; -toolset.flags auto-index.auto-index FLAGS <auto-index-no-duplicates>on : --no-duplicates ; -toolset.flags auto-index.auto-index FLAGS <auto-index-section-names>off : --no-section-names ; - -# <auto-index-binary> shell command to run AutoIndex -# <auto-index-binary-dependencies> targets to build AutoIndex from sources. -feature.feature <auto-index-binary> : : free ; -feature.feature <auto-index-binary-dependencies> : : free dependency ; - -class auto-index-generator : generator -{ - import common modules path targets build-system ; - rule run ( project name ? : property-set : sources * ) - { - # AutoIndex invocation command and dependencies. - local auto-index-binary = [ modules.peek auto-index : .command ] ; - local auto-index-binary-dependencies ; - - if $(auto-index-binary) - { - # Use user-supplied command. - auto-index-binary = [ common.get-invocation-command auto-index : auto-index : $(auto-index-binary) ] ; - } - else - { - # Search for AutoIndex sources in sensible places, like - # $(BOOST_ROOT)/tools/auto_index - # $(BOOST_BUILD_PATH)/../../auto_index - - # And build auto-index executable from sources. - - local boost-root = [ modules.peek : BOOST_ROOT ] ; - local boost-build-path = [ build-system.location ] ; - local boost-build-path2 = [ modules.peek : BOOST_BUILD_PATH ] ; - - local auto-index-dir ; - - if $(boost-root) - { - auto-index-dir += [ path.join $(boost-root) tools ] ; - } - - if $(boost-build-path) - { - auto-index-dir += $(boost-build-path)/../.. ; - } - if $(boost-build-path2) - { - auto-index-dir += $(boost-build-path2)/.. ; - } - - #ECHO $(auto-index-dir) ; - auto-index-dir = [ path.glob $(auto-index-dir) : auto_index ] ; - #ECHO $(auto-index-dir) ; - - # If the AutoIndex source directory was found, mark its main target - # as a dependency for the current project. Otherwise, try to find - # 'auto-index' in user's PATH - if $(auto-index-dir) - { - auto-index-dir = [ path.make $(auto-index-dir[1]) ] ; - auto-index-dir = $(auto-index-dir)/build ; - - #ECHO $(auto-index-dir) ; - - # Get the main-target in AutoIndex directory. - local auto-index-main-target = [ targets.resolve-reference $(auto-index-dir) : $(project) ] ; - - #ECHO $(auto-index-main-target) ; - - # The first element are actual targets, the second are - # properties found in target-id. We do not care about these - # since we have passed the id ourselves. - auto-index-main-target = - [ $(auto-index-main-target[1]).main-target auto_index ] ; - - #ECHO $(auto-index-main-target) ; - - auto-index-binary-dependencies = - [ $(auto-index-main-target).generate [ $(property-set).propagated ] ] ; - - # Ignore usage-requirements returned as first element. - auto-index-binary-dependencies = $(auto-index-binary-dependencies[2-]) ; - - # Some toolsets generate extra targets (e.g. RSP). We must mark - # all targets as dependencies for the project, but we will only - # use the EXE target for auto-index-to-boostbook translation. - for local target in $(auto-index-binary-dependencies) - { - if [ $(target).type ] = EXE - { - auto-index-binary = - [ path.native - [ path.join - [ $(target).path ] - [ $(target).name ] - ] - ] ; - } - } - } - else - { - ECHO "AutoIndex warning: The path to the auto-index executable was" ; - ECHO " not provided. Additionally, couldn't find AutoIndex" ; - ECHO " sources searching in" ; - ECHO " * BOOST_ROOT/tools/auto-index" ; - ECHO " * BOOST_BUILD_PATH/../../auto-index" ; - ECHO " Will now try to find a precompiled executable by searching" ; - ECHO " the PATH for 'auto-index'." ; - ECHO " To disable this warning in the future, or to completely" ; - ECHO " avoid compilation of auto-index, you can explicitly set the" ; - ECHO " path to a auto-index executable command in user-config.jam" ; - ECHO " or site-config.jam with the call" ; - ECHO " using auto-index : /path/to/auto-index ;" ; - - # As a last resort, search for 'auto-index' command in path. Note - # that even if the 'auto-index' command is not found, - # get-invocation-command will still return 'auto-index' and might - # generate an error while generating the virtual-target. - - auto-index-binary = [ common.get-invocation-command auto-index : auto-index ] ; - } - } - - # Add $(auto-index-binary-dependencies) as a dependency of the current - # project and set it as the <auto-index-binary> feature for the - # auto-index-to-boostbook rule, below. - property-set = [ $(property-set).add-raw - <dependency>$(auto-index-binary-dependencies) - <auto-index-binary>$(auto-index-binary) - <auto-index-binary-dependencies>$(auto-index-binary-dependencies) - ] ; - - #ECHO "binary = " $(auto-index-binary) ; - #ECHO "dependencies = " $(auto-index-binary-dependencies) ; - - if [ $(property-set).get <auto-index> ] = "on" - { - return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ; - } - else - { - return [ generators.construct $(project) $(name) : DOCBOOK : $(property-set) - : $(sources) ] ; - } - } -} - -# Initialization of toolset. -# -# Parameters: -# command ? -> path to AutoIndex executable. -# -# When command is not supplied toolset will search for AutoIndex directory and -# compile the executable from source. If that fails we still search the path for -# 'auto_index'. -# -rule init ( - command ? # path to the AutoIndex executable. - ) -{ - if ! $(.initialized) - { - .initialized = true ; - .command = $(command) ; - } -} - -toolset.flags auto-index.auto-index AI-COMMAND <auto-index-binary> ; -toolset.flags auto-index.auto-index AI-DEPENDENCIES <auto-index-binary-dependencies> ; - -generators.register [ class.new auto-index-generator auto-index.auto-index : DOCBOOK : DOCBOOK(%.auto_index) ] ; -generators.override auto-index.auto-index : boostbook.boostbook-to-docbook ; - -rule auto-index ( target : source : properties * ) -{ - # Signal dependency of auto-index sources on <auto-index-binary-dependencies> - # upon invocation of auto-index-to-boostbook. - #ECHO "AI-COMMAND= " $(AI-COMMAND) ; - DEPENDS $(target) : [ on $(target) return $(AI-DEPENDENCIES) ] ; - #DEPENDS $(target) : [ on $(target) return $(SCRIPT) ] ; -} - -actions auto-index -{ - $(AI-COMMAND) $(FLAGS) "--prefix="$(PREFIX) "--script="$(SCRIPT) "--index-type="$(INDEX_TYPE) "--in="$(>) "--out="$(<) -} - - diff --git a/jam-files/boost-build/tools/bison.jam b/jam-files/boost-build/tools/bison.jam deleted file mode 100644 index 0689d4bd..00000000 --- a/jam-files/boost-build/tools/bison.jam +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import generators ; -import feature ; -import type ; -import property ; - -feature.feature bison.prefix : : free ; -type.register Y : y ; -type.register YY : yy ; -generators.register-standard bison.bison : Y : C H ; -generators.register-standard bison.bison : YY : CPP HPP ; - -rule init ( ) -{ -} - -rule bison ( dst dst_header : src : properties * ) -{ - local r = [ property.select bison.prefix : $(properties) ] ; - if $(r) - { - PREFIX_OPT on $(<) = -p $(r:G=) ; - } -} - -actions bison -{ - bison $(PREFIX_OPT) -d -o $(<[1]) $(>) -} diff --git a/jam-files/boost-build/tools/boostbook-config.jam b/jam-files/boost-build/tools/boostbook-config.jam deleted file mode 100644 index 6e3f3ddc..00000000 --- a/jam-files/boost-build/tools/boostbook-config.jam +++ /dev/null @@ -1,13 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for BoostBook tools. To use, just import this module. -# -# This module is deprecated. -# using boostbook ; -# with no arguments now suffices. - -import toolset : using ; - -using boostbook ; diff --git a/jam-files/boost-build/tools/boostbook.jam b/jam-files/boost-build/tools/boostbook.jam deleted file mode 100644 index 3a5964c6..00000000 --- a/jam-files/boost-build/tools/boostbook.jam +++ /dev/null @@ -1,727 +0,0 @@ -# Copyright 2003, 2004, 2005 Dave Abrahams -# Copyright 2003, 2004, 2005 Douglas Gregor -# Copyright 2005, 2006, 2007 Rene Rivera -# Copyright 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines rules to handle generation of documentation -# from BoostBook sources. -# -# The type of output is controlled by the <format> feature which can -# have the following values:: -# -# * html: Generates html documention. This is the default. -# * xhtml: Generates xhtml documentation -# * htmlhelp: Generates html help output. -# * onehtml: Generates a single html page. -# * man: Generates man pages. -# * pdf: Generates pdf documentation. -# * ps: Generates postscript output. -# * docbook: Generates docbook XML. -# * fo: Generates XSL formating objects. -# * tests: Extracts test cases from the boostbook XML. -# -# format is an implicit feature, so typing pdf on the command -# line (for example) is a short-cut for format=pdf. - -import "class" : new ; -import common ; -import errors ; -import targets ; -import feature ; -import generators ; -import print ; -import property ; -import project ; -import property-set ; -import regex ; -import scanner ; -import sequence ; -import make ; -import os ; -import type ; -import modules path project ; -import build-system ; - -import xsltproc : xslt xslt-dir ; - -# Make this module into a project. -project.initialize $(__name__) ; -project boostbook ; - - -feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests - : incidental implicit composite propagated ; - -type.register DTDXML : dtdxml ; -type.register XML : xml ; -type.register BOOSTBOOK : boostbook : XML ; -type.register DOCBOOK : docbook : XML ; -type.register FO : fo : XML ; -type.register PDF : pdf ; -type.register PS : ps ; -type.register XSLT : xsl : XML ; -type.register HTMLDIR ; -type.register XHTMLDIR ; -type.register HTMLHELP ; -type.register MANPAGES ; -type.register TESTS : tests ; -# Artificial target type, used to require invocation of top-level -# BoostBook generator. -type.register BOOSTBOOK_MAIN ; - - -# Initialize BoostBook support. -rule init ( - docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not - # provided, we use DOCBOOK_XSL_DIR from the environment - # (if available) or look in standard locations. - # Otherwise, we let the XML processor load the - # stylesheets remotely. - - : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use - # DOCBOOK_DTD_DIR From the environment (if available) or - # look in standard locations. Otherwise, we let the XML - # processor load the DTD remotely. - - : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs. -) -{ - - if ! $(.initialized) - { - .initialized = true ; - - check-boostbook-dir $(boostbook-dir) ; - find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ; - - # Register generators only if we've were called via "using boostbook ; " - generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ; - generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ; - generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ; - generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ; - generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ; - generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ; - generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ; - generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ; - generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ; - - # The same about Jamfile main target rules. - IMPORT $(__name__) : boostbook : : boostbook ; - } - else - { - if $(docbook-xsl-dir) - { - modify-config ; - .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ; - check-docbook-xsl-dir ; - } - if $(docbook-dtd-dir) - { - modify-config ; - .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ; - check-docbook-dtd-dir ; - } - if $(boostbook-dir) - { - modify-config ; - check-boostbook-dir $(boostbook-dir) ; - local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ; - local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ; - .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ; - .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ; - check-boostbook-xsl-dir ; - check-boostbook-dtd-dir ; - } - } -} - -rule lock-config ( ) -{ - if ! $(.initialized) - { - errors.user-error "BoostBook has not been configured." ; - } - if ! $(.config-locked) - { - .config-locked = true ; - } -} - -rule modify-config ( ) -{ - if $(.config-locked) - { - errors.user-error "BoostBook configuration cannot be changed after it has been used." ; - } -} - -rule find-boost-in-registry ( keys * ) -{ - local boost-root = ; - for local R in $(keys) - { - local installed-boost = [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)" - : "InstallRoot" ] ; - if $(installed-boost) - { - boost-root += [ path.make $(installed-boost) ] ; - } - } - return $(boost-root) ; -} - -rule check-docbook-xsl-dir ( ) -{ - if $(.docbook-xsl-dir) - { - if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ] - { - errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ; - } - else - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ; - } - } - } -} - -rule check-docbook-dtd-dir ( ) -{ - if $(.docbook-dtd-dir) - { - if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ] - { - errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ; - } - else - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ; - } - } - } -} - -rule check-boostbook-xsl-dir ( ) -{ - if ! $(.boostbook-xsl-dir) - { - errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ; - } - else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ] - { - errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ; - } - else - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ; - } - } -} - -rule check-boostbook-dtd-dir ( ) -{ - if ! $(.boostbook-dtd-dir) - { - errors.user-error "error: BoostBook: could not find boostbook DTD." ; - } - else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ] - { - errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ; - } - else - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ; - } - } -} - -rule check-boostbook-dir ( boostbook-dir ? ) -{ - if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ] - { - errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ; - } -} - -rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? ) -{ - docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ; - docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ; - boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ; - - # Look for the boostbook stylesheets relative to BOOST_ROOT - # and Boost.Build. - local boost-build-root = [ path.make [ build-system.location ] ] ; - local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ; - - local boost-root = [ modules.peek : BOOST_ROOT ] ; - if $(boost-root) - { - boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ; - } - boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ; - - # Try to find the tools in platform specific locations - if [ os.name ] = NT - { - # If installed by the Boost installer. - local boost-root = ; - - local boost-installer-versions = snapshot cvs 1.33.0 ; - local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ; - local boostpro-installer-versions = - 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0 - 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ; - - local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ; - - # Make sure that the most recent version is searched for first - boost-root += [ sequence.reverse - [ find-boost-in-registry - Boost-Consulting.com\\$(boost-consulting-installer-versions) - boostpro.com\\$(boostpro-installer-versions) ] ] ; - - # Plausible locations. - local root = [ PWD ] ; - while $(root) != $(root:D) { root = $(root:D) ; } - root = [ path.make $(root) ] ; - local search-dirs = ; - local docbook-search-dirs = ; - for local p in $(boost-root) { - search-dirs += [ path.join $(p) tools ] ; - } - for local p in $(old-installer-root) - { - search-dirs += [ path.join $(p) share ] ; - docbook-search-dirs += [ path.join $(p) share ] ; - } - search-dirs += [ path.join $(root) Boost tools ] ; - search-dirs += [ path.join $(root) Boost share ] ; - docbook-search-dirs += [ path.join $(root) Boost share ] ; - - docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ; - docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ; - boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ; - } - else - { - # Plausible locations. - - local share = /usr/local/share /usr/share /opt/share /opt/local/share ; - local dtd-versions = 4.2 ; - - docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ; - docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ; - docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ; - - docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ; - docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ; - docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ; - - boostbook-dir ?= [ path.glob $(share) : boostbook* ] ; - - # Ubuntu Linux - docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ; - docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ; - } - - if $(docbook-xsl-dir) - { - .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ; - } - if $(docbook-dtd-dir) - { - .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ; - } - - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice: Boost.Book: searching XSL/DTD in" ; - ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ; - } - local boostbook-xsl-dir ; - for local dir in $(boostbook-dir) { - boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ; - } - local boostbook-dtd-dir ; - for local dir in $(boostbook-dir) { - boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ; - } - .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ; - .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ; - - check-docbook-xsl-dir ; - check-docbook-dtd-dir ; - check-boostbook-xsl-dir ; - check-boostbook-dtd-dir ; -} - -rule xsl-dir -{ - lock-config ; - return $(.boostbook-xsl-dir) ; -} - -rule dtd-dir -{ - lock-config ; - return $(.boostbook-dtd-dir) ; -} - -rule docbook-xsl-dir -{ - lock-config ; - return $(.docbook-xsl-dir) ; -} - -rule docbook-dtd-dir -{ - lock-config ; - return $(.docbook-dtd-dir) ; -} - -rule dtdxml-to-boostbook ( target : source : properties * ) -{ - lock-config ; - xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl" - : $(properties) ; -} - -rule boostbook-to-docbook ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ; - xslt $(target) : $(source) $(stylesheet) : $(properties) ; -} - -rule docbook-to-onehtml ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ; - xslt $(target) : $(source) $(stylesheet) : $(properties) ; -} - -rule docbook-to-htmldir ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ; - xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ; -} - -rule docbook-to-xhtmldir ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ; - xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ; -} - -rule docbook-to-htmlhelp ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ; - xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ; -} - -rule docbook-to-manpages ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ; - xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ; -} - -rule docbook-to-fo ( target : source : properties * ) -{ - lock-config ; - local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ; - xslt $(target) : $(source) $(stylesheet) : $(properties) ; -} - -rule format-catalog-path ( path ) -{ - local result = $(path) ; - if [ xsltproc.is-cygwin ] - { - if [ os.name ] = NT - { - drive = [ MATCH ^/(.):(.*)$ : $(path) ] ; - result = /cygdrive/$(drive[1])$(drive[2]) ; - } - } - else - { - if [ os.name ] = CYGWIN - { - local native-path = [ path.native $(path) ] ; - result = [ path.make $(native-path:W) ] ; - } - } - return [ regex.replace $(result) " " "%20" ] ; -} - -rule generate-xml-catalog ( target : sources * : properties * ) -{ - print.output $(target) ; - - # BoostBook DTD catalog entry - local boostbook-dtd-dir = [ boostbook.dtd-dir ] ; - if $(boostbook-dtd-dir) - { - boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ; - } - - print.text - "<?xml version=\"1.0\"?>" - "<!DOCTYPE catalog " - " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\"" - " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">" - "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">" - " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>" - : true ; - - local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ; - if ! $(docbook-xsl-dir) - { - ECHO "BoostBook warning: no DocBook XSL directory specified." ; - ECHO " If you have the DocBook XSL stylesheets installed, please " ; - ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ; - ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ; - ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ; - ECHO " are available here: http://docbook.sourceforge.net/ " ; - ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ; - } - else - { - docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ; - print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ; - } - - local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ; - if ! $(docbook-dtd-dir) - { - ECHO "BoostBook warning: no DocBook DTD directory specified." ; - ECHO " If you have the DocBook DTD installed, please set " ; - ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ; - ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ; - ECHO " Boost.Jam configuration file. The DocBook DTD is available " ; - ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ; - ECHO " The DTD will be downloaded on-the-fly (very slow!) " ; - } - else - { - docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ; - print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ; - } - - print.text "</catalog>" ; -} - -rule xml-catalog ( ) -{ - if ! $(.xml-catalog) - { - # The target is created as part of the root project. But ideally - # it would be created as part of the boostbook project. This is not - # current possible as such global projects don't inherit things like - # the build directory. - - # Find the root project. - local root-project = [ project.current ] ; - root-project = [ $(root-project).project-module ] ; - while - [ project.attribute $(root-project) parent-module ] && - [ project.attribute $(root-project) parent-module ] != user-config && - [ project.attribute $(root-project) parent-module ] != project-config - { - root-project = [ project.attribute $(root-project) parent-module ] ; - } - .xml-catalog = [ new file-target boostbook_catalog - : XML - : [ project.target $(root-project) ] - : [ new action : boostbook.generate-xml-catalog ] - : - ] ; - .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ; - .xml-catalog-file = $(.xml-catalog-file:J=/) ; - } - return $(.xml-catalog) $(.xml-catalog-file) ; -} - -class boostbook-generator : generator -{ - import feature ; - import virtual-target ; - import generators ; - import boostbook ; - - - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - # Generate the catalog, but only once... - local global-catalog = [ boostbook.xml-catalog ] ; - local catalog = $(global-catalog[1]) ; - local catalog-file = $(global-catalog[2]) ; - local targets ; - - # Add the catalog to the property set - property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ; - - local type = none ; - local manifest ; - local format = [ $(property-set).get <format> ] ; - switch $(format) - { - case html : - { - type = HTMLDIR ; - manifest = HTML.manifest ; - } - case xhtml : - { - type = XHTMLDIR ; - manifest = HTML.manifest ; - } - case htmlhelp : - { - type = HTMLHELP ; - manifest = HTML.manifest ; - } - - case onehtml : type = HTML ; - - case man : - { - type = MANPAGES ; - manifest = man.manifest ; - } - - case docbook : type = DOCBOOK ; - case fo : type = FO ; - case pdf : type = PDF ; - case ps : type = PS ; - case tests : type = TESTS ; - } - - if $(manifest) - { - # Create DOCBOOK file from BOOSTBOOK sources. - local base-target = [ generators.construct $(project) - : DOCBOOK : $(property-set) : $(sources) ] ; - base-target = $(base-target[2]) ; - $(base-target).depends $(catalog) ; - - # Generate HTML/PDF/PS from DOCBOOK. - local target = [ generators.construct $(project) $(name)_$(manifest) - : $(type) - : [ $(property-set).add-raw - <xsl:param>manifest=$(name)_$(manifest) ] - : $(base-target) ] ; - local name = [ $(property-set).get <name> ] ; - name ?= $(format) ; - $(target[2]).set-path $(name) ; - $(target[2]).depends $(catalog) ; - - targets += $(target[2]) ; - } - else { - local target = [ generators.construct $(project) - : $(type) : $(property-set) : $(sources) ] ; - - if ! $(target) - { - errors.error "Cannot build documentation type '$(format)'" ; - } - else - { - $(target[2]).depends $(catalog) ; - targets += $(target[2]) ; - } - } - - return $(targets) ; - } -} - -generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ; - -# Creates a boostbook target. -rule boostbook ( target-name : sources * : requirements * : default-build * ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN - : [ targets.main-target-sources $(sources) : $(target-name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; -} - -############################################################################# -# Dependency scanners -############################################################################# -# XInclude scanner. Mostly stolen from c-scanner :) -# Note that this assumes an "xi" prefix for XIncludes. This isn't always the -# case for XML documents, but we'll assume it's true for anything we encounter. -class xinclude-scanner : scanner -{ - import virtual-target ; - import path ; - import scanner ; - - rule __init__ ( includes * ) - { - scanner.__init__ ; - self.includes = $(includes) ; - } - - rule pattern ( ) - { - return "xi:include[ ]*href=\"([^\"]*)\"" ; - } - - rule process ( target : matches * : binding ) - { - local target_path = [ NORMALIZE_PATH $(binding:D) ] ; - - NOCARE $(matches) ; - INCLUDES $(target) : $(matches) ; - SEARCH on $(matches) = $(target_path) $(self.includes:G=) ; - - scanner.propagate $(__name__) : $(matches) : $(target) ; - } -} - -scanner.register xinclude-scanner : xsl:path ; -type.set-scanner XML : xinclude-scanner ; - -rule boostbook-to-tests ( target : source : properties * ) -{ - lock-config ; - local boost_root = [ modules.peek : BOOST_ROOT ] ; - local native-path = - [ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ; - local stylesheet = $(native-path:S=.xsl) ; - xslt $(target) : $(source) $(stylesheet) - : $(properties) <xsl:param>boost.root=$(boost_root) - ; -} - - diff --git a/jam-files/boost-build/tools/borland.jam b/jam-files/boost-build/tools/borland.jam deleted file mode 100644 index 6e43ca93..00000000 --- a/jam-files/boost-build/tools/borland.jam +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2005 Dave Abrahams -# Copyright 2003 Rene Rivera -# Copyright 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Support for the Borland's command line compiler - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature : get-values ; -import type ; -import common ; - -feature.extend toolset : borland ; - -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters borland : - version $(version) ] ; - - local command = [ common.get-invocation-command borland : bcc32.exe - : $(command) ] ; - - common.handle-options borland : $(condition) : $(command) : $(options) ; - - if $(command) - { - command = [ common.get-absolute-tool-path $(command[-1]) ] ; - } - root = $(command:D) ; - - flags borland.compile STDHDRS $(condition) : $(root)/include/ ; - flags borland.link STDLIBPATH $(condition) : $(root)/lib ; - flags borland.link RUN_PATH $(condition) : $(root)/bin ; - flags borland .root $(condition) : $(root)/bin/ ; -} - - -# A borland-specific target type -type.register BORLAND.TDS : tds ; - -# Declare generators - -generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ; -generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ; - -generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ; -generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ; -generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ; -generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ; - -# Declare flags - -flags borland.compile OPTIONS <debug-symbols>on : -v ; -flags borland.link OPTIONS <debug-symbols>on : -v ; - -flags borland.compile OPTIONS <optimization>off : -Od ; -flags borland.compile OPTIONS <optimization>speed : -O2 ; -flags borland.compile OPTIONS <optimization>space : -O1 ; - -if $(.BORLAND_HAS_FIXED_INLINING_BUGS) -{ - flags borland CFLAGS <inlining>off : -vi- ; - flags borland CFLAGS <inlining>on : -vi -w-inl ; - flags borland CFLAGS <inlining>full : -vi -w-inl ; -} -else -{ - flags borland CFLAGS : -vi- ; -} - -flags borland.compile OPTIONS <warnings>off : -w- ; -flags borland.compile OPTIONS <warnings>all : -w ; -flags borland.compile OPTIONS <warnings-as-errors>on : -w! ; - - -# Deal with various runtime configs... - -# This should be not for DLL -flags borland OPTIONS <user-interface>console : -tWC ; - -# -tWR sets -tW as well, so we turn it off here and then turn it -# on again later if we need it: -flags borland OPTIONS <runtime-link>shared : -tWR -tWC ; -flags borland OPTIONS <user-interface>gui : -tW ; - -flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ; -# Hmm.. not sure what's going on here. -flags borland OPTIONS : -WM- ; -flags borland OPTIONS <threading>multi : -tWM ; - - - -flags borland.compile OPTIONS <cxxflags> ; -flags borland.compile DEFINES <define> ; -flags borland.compile INCLUDES <include> ; - -flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ; - -# -# for C++ compiles the following options are turned on by default: -# -# -j5 stops after 5 errors -# -g255 allow an unlimited number of warnings -# -q no banner -# -c compile to object -# -P C++ code regardless of file extention -# -a8 8 byte alignment, this option is on in the IDE by default -# and effects binary compatibility. -# - -# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)" - - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)" -} - -# For C, we don't pass -P flag -actions compile.c -{ - "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)" -} - - -# Declare flags and action for linking -toolset.flags borland.link OPTIONS <debug-symbols>on : -v ; -toolset.flags borland.link LIBRARY_PATH <library-path> ; -toolset.flags borland.link FINDLIBS_ST <find-static-library> ; -toolset.flags borland.link FINDLIBS_SA <find-shared-library> ; -toolset.flags borland.link LIBRARIES <library-file> ; - -flags borland.link OPTIONS <linkflags> ; -flags borland.link OPTIONS <link>shared : -tWD ; - -flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ; -flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ; - - - -# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly -# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add -# $(BCC_TOOL_PATH) to the path -# The NEED_IMPLIB variable controls whether we need to invoke implib. - -flags borland.archive AROPTIONS <archiveflags> ; - -# Declare action for archives. We don't use response file -# since it's hard to get "+-" there. -# The /P256 increases 'page' size -- with too low -# values tlib fails when building large applications. -# CONSIDER: don't know what 'together' is for... -actions updated together piecemeal archive -{ - $(.set-path)$(.root:W)$(.old-path) - tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)" -} - - -if [ os.name ] = CYGWIN -{ - .set-path = "cmd /S /C set \"PATH=" ; - .old-path = ";%PATH%\" \"&&\"" ; - - - # Couldn't get TLIB to stop being confused about pathnames - # containing dashes (it seemed to treat them as option separators - # when passed through from bash), so we explicitly write the - # command into a .bat file and execute that. TLIB is also finicky - # about pathname style! Forward slashes, too, are treated as - # options. - actions updated together piecemeal archive - { - chdir $(<:D) - echo +-$(>:BS) > $(<:BS).rsp - $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp - } -} -else if [ os.name ] = NT -{ - .set-path = "set \"PATH=" ; - .old-path = ";%PATH%\" - " ; -} -else -{ - .set-path = "PATH=\"" ; - .old-path = "\":$PATH - export PATH - " ; -} - -RM = [ common.rm-command ] ; - -nl = " -" ; - -actions link -{ - $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -} - - -actions link.dll bind LIBRARIES RSP -{ - $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)" -} - -# It seems impossible to specify output file with directory when compiling -# asm files using bcc32, so use tasm32 directly. -# /ml makes all symbol names case-sensitive -actions asm -{ - $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)" -} - diff --git a/jam-files/boost-build/tools/builtin.jam b/jam-files/boost-build/tools/builtin.jam deleted file mode 100644 index 148e7308..00000000 --- a/jam-files/boost-build/tools/builtin.jam +++ /dev/null @@ -1,960 +0,0 @@ -# Copyright 2002, 2003, 2004, 2005 Dave Abrahams -# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera -# Copyright 2006 Juergen Hunold -# Copyright 2005 Toon Knapen -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines standard features and rules. - -import alias ; -import "class" : new ; -import errors ; -import feature ; -import generators ; -import numbers ; -import os ; -import path ; -import print ; -import project ; -import property ; -import regex ; -import scanner ; -import sequence ; -import stage ; -import symlink ; -import toolset ; -import type ; -import targets ; -import types/register ; -import utility ; -import virtual-target ; -import message ; -import convert ; - -# FIXME: the following generate module import is not needed here but removing it -# too hastly will break using code (e.g. the main Boost library Jamroot file) -# that forgot to import the generate module before calling the generate rule. -import generate ; - - -.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd - openbsd osf qnx qnxnto sgi solaris unix unixware windows - elf # Not actually an OS -- used for targeting bare metal where - # object format is ELF. This catches both -elf and -eabi gcc - # targets and well as other compilers targeting ELF. It is not - # clear how often do we need to key of ELF specifically as opposed - # to other bare metal targets, but let's stick with gcc naming. - ; - -# Feature used to determine which OS we're on. New <target-os> and <host-os> -# features should be used instead. -local os = [ modules.peek : OS ] ; -feature.feature os : $(os) : propagated link-incompatible ; - - -# Translates from bjam current OS to the os tags used in host-os and target-os, -# i.e. returns the running host-os. -# -local rule default-host-os ( ) -{ - local host-os ; - if [ os.name ] in $(.os-names:U) - { - host-os = [ os.name ] ; - } - else - { - switch [ os.name ] - { - case NT : host-os = windows ; - case AS400 : host-os = unix ; - case MINGW : host-os = windows ; - case BSDI : host-os = bsd ; - case COHERENT : host-os = unix ; - case DRAGONFLYBSD : host-os = bsd ; - case IRIX : host-os = sgi ; - case MACOSX : host-os = darwin ; - case KFREEBSD : host-os = freebsd ; - case LINUX : host-os = linux ; - case SUNOS : - ECHO "SunOS is not a supported operating system." ; - ECHO "We believe last version of SunOS was released in 1992, " ; - ECHO "so if you get this message, something is very wrong with configuration logic. " ; - ECHO "Please report this as a bug. " ; - EXIT ; - case * : host-os = unix ; - } - } - return $(host-os:L) ; -} - - -# The two OS features define a known set of abstract OS names. The host-os is -# the OS under which bjam is running. Even though this should really be a fixed -# property we need to list all the values to prevent unknown value errors. Both -# set the default value to the current OS to account for the default use case of -# building on the target OS. -feature.feature host-os : $(.os-names) ; -feature.set-default host-os : [ default-host-os ] ; - -feature.feature target-os : $(.os-names) : propagated link-incompatible ; -feature.set-default target-os : [ default-host-os ] ; - - -feature.feature toolset : : implicit propagated symmetric ; -feature.feature stdlib : native : propagated composite ; -feature.feature link : shared static : propagated ; -feature.feature runtime-link : shared static : propagated ; -feature.feature runtime-debugging : on off : propagated ; -feature.feature optimization : off speed space none : propagated ; -feature.feature profiling : off on : propagated ; -feature.feature inlining : off on full : propagated ; -feature.feature threading : single multi : propagated ; -feature.feature rtti : on off : propagated ; -feature.feature exception-handling : on off : propagated ; - -# Whether there is support for asynchronous EH (e.g. catching SEGVs). -feature.feature asynch-exceptions : off on : propagated ; - -# Whether all extern "C" functions are considered nothrow by default. -feature.feature extern-c-nothrow : off on : propagated ; - -feature.feature debug-symbols : on off none : propagated ; -# Controls whether the binary should be stripped -- that is have -# everything not necessary to running removed. This option should -# not be very often needed. Also, this feature will show up in -# target paths of everything, not just binaries. Should fix that -# when impelementing feature relevance. -feature.feature strip : off on : propagated ; -feature.feature define : : free ; -feature.feature undef : : free ; -feature.feature "include" : : free path ; #order-sensitive ; -feature.feature cflags : : free ; -feature.feature cxxflags : : free ; -feature.feature fflags : : free ; -feature.feature asmflags : : free ; -feature.feature linkflags : : free ; -feature.feature archiveflags : : free ; -feature.feature version : : free ; - -# Generic, i.e. non-language specific, flags for tools. -feature.feature flags : : free ; -feature.feature location-prefix : : free ; - - -# The following features are incidental since they have no effect on built -# products. Not making them incidental will result in problems in corner cases, -# e.g.: -# -# unit-test a : a.cpp : <use>b ; -# lib b : a.cpp b ; -# -# Here, if <use> is not incidental, we would decide we have two targets for -# a.obj with different properties and complain about it. -# -# Note that making a feature incidental does not mean it is ignored. It may be -# ignored when creating a virtual target, but the rest of build process will use -# them. -feature.feature use : : free dependency incidental ; -feature.feature dependency : : free dependency incidental ; -feature.feature implicit-dependency : : free dependency incidental ; - -feature.feature warnings : - on # Enable default/"reasonable" warning level for the tool. - all # Enable all possible warnings issued by the tool. - off # Disable all warnings issued by the tool. - : incidental propagated ; - -feature.feature warnings-as-errors : - off # Do not fail the compilation if there are warnings. - on # Fail the compilation if there are warnings. - : incidental propagated ; - -# Feature that allows us to configure the maximal template instantiation depth -# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers -# actually support this configuration setting. -# -# Note that Boost Build currently does not allow defining features that take any -# positive integral value as a parameter, which is what we need here, so we just -# define some of the values here and leave it up to the user to extend this set -# as he needs using the feature.extend rule. -# -# TODO: This should be upgraded as soon as Boost Build adds support for custom -# validated feature values or at least features allowing any positive integral -# value. See related Boost Build related trac ticket #194. -# -feature.feature c++-template-depth - : - [ numbers.range 64 1024 : 64 ] - [ numbers.range 20 1000 : 10 ] - # Maximum template instantiation depth guaranteed for ANSI/ISO C++ - # conforming programs. - 17 - : - incidental optional propagated ; - -feature.feature source : : free dependency incidental ; -feature.feature library : : free dependency incidental ; -feature.feature file : : free dependency incidental ; -feature.feature find-shared-library : : free ; #order-sensitive ; -feature.feature find-static-library : : free ; #order-sensitive ; -feature.feature library-path : : free path ; #order-sensitive ; - -# Internal feature. -feature.feature library-file : : free dependency ; - -feature.feature name : : free ; -feature.feature tag : : free ; -feature.feature search : : free path ; #order-sensitive ; -feature.feature location : : free path ; -feature.feature dll-path : : free path ; -feature.feature hardcode-dll-paths : true false : incidental ; - - -# An internal feature that holds the paths of all dependency shared libraries. -# On Windows, it is needed so that we can add all those paths to PATH when -# running applications. On Linux, it is needed to add proper -rpath-link command -# line options. -feature.feature xdll-path : : free path ; - -# Provides means to specify def-file for windows DLLs. -feature.feature def-file : : free dependency ; - -feature.feature suppress-import-lib : false true : incidental ; - -# Internal feature used to store the name of a bjam action to call when building -# a target. -feature.feature action : : free ; - -# This feature is used to allow specific generators to run. For example, QT -# tools can only be invoked when QT library is used. In that case, <allow>qt -# will be in usage requirement of the library. -feature.feature allow : : free ; - -# The addressing model to generate code for. Currently a limited set only -# specifying the bit size of pointers. -feature.feature address-model : 16 32 64 32_64 : propagated optional ; - -# Type of CPU architecture to compile for. -feature.feature architecture : - # x86 and x86-64 - x86 - - # ia64 - ia64 - - # Sparc - sparc - - # RS/6000 & PowerPC - power - - # MIPS/SGI - mips1 mips2 mips3 mips4 mips32 mips32r2 mips64 - - # HP/PA-RISC - parisc - - # Advanced RISC Machines - arm - - # Combined architectures for platforms/toolsets that support building for - # multiple architectures at once. "combined" would be the default multi-arch - # for the toolset. - combined - combined-x86-power - - : propagated optional ; - -# The specific instruction set in an architecture to compile. -feature.feature instruction-set : - # x86 and x86-64 - native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3 - pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe - conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale - yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp - athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2 - - # ia64 - itanium itanium1 merced itanium2 mckinley - - # Sparc - v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934 - sparclet tsc701 v9 ultrasparc ultrasparc3 - - # RS/6000 & PowerPC - 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400 - 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2 - power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a - - # MIPS - 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650 - r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300 - vr5000 vr5400 vr5500 - - # HP/PA-RISC - 700 7100 7100lc 7200 7300 8000 - - # Advanced RISC Machines - armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312 - - : propagated optional ; - -# Used to select a specific variant of C++ ABI if the compiler supports several. -feature.feature c++abi : : propagated optional ; - -feature.feature conditional : : incidental free ; - -# The value of 'no' prevents building of a target. -feature.feature build : yes no : optional ; - -# Windows-specific features - -feature.feature user-interface : console gui wince native auto ; - -feature.feature variant : : implicit composite propagated symmetric ; - - -# Declares a new variant. -# -# First determines explicit properties for this variant, by refining parents' -# explicit properties with the passed explicit properties. The result is -# remembered and will be used if this variant is used as parent. -# -# Second, determines the full property set for this variant by adding to the -# explicit properties default values for all missing non-symmetric properties. -# -# Lastly, makes appropriate value of 'variant' property expand to the full -# property set. -# -rule variant ( name # Name of the variant - : parents-or-properties * # Specifies parent variants, if - # 'explicit-properties' are given, and - # explicit-properties or parents otherwise. - : explicit-properties * # Explicit properties. - ) -{ - local parents ; - if ! $(explicit-properties) - { - if $(parents-or-properties[1]:G) - { - explicit-properties = $(parents-or-properties) ; - } - else - { - parents = $(parents-or-properties) ; - } - } - else - { - parents = $(parents-or-properties) ; - } - - # The problem is that we have to check for conflicts between base variants. - if $(parents[2]) - { - errors.error "multiple base variants are not yet supported" ; - } - - local inherited ; - # Add explicitly specified properties for parents. - for local p in $(parents) - { - # TODO: This check may be made stricter. - if ! [ feature.is-implicit-value $(p) ] - { - errors.error "Invalid base variant" $(p) ; - } - - inherited += $(.explicit-properties.$(p)) ; - } - property.validate $(explicit-properties) ; - explicit-properties = [ property.refine $(inherited) - : $(explicit-properties) ] ; - - # Record explicitly specified properties for this variant. We do this after - # inheriting parents' properties so they affect other variants derived from - # this one. - .explicit-properties.$(name) = $(explicit-properties) ; - - feature.extend variant : $(name) ; - feature.compose <variant>$(name) : $(explicit-properties) ; -} -IMPORT $(__name__) : variant : : variant ; - - -variant debug : <optimization>off <debug-symbols>on <inlining>off - <runtime-debugging>on ; -variant release : <optimization>speed <debug-symbols>off <inlining>full - <runtime-debugging>off <define>NDEBUG ; -variant profile : release : <profiling>on <debug-symbols>on ; - - -class searched-lib-target : abstract-file-target -{ - rule __init__ ( name - : project - : shared ? - : search * - : action - ) - { - abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project) - : $(action) : ; - - self.shared = $(shared) ; - self.search = $(search) ; - } - - rule shared ( ) - { - return $(self.shared) ; - } - - rule search ( ) - { - return $(self.search) ; - } - - rule actualize-location ( target ) - { - NOTFILE $(target) ; - } - - rule path ( ) - { - } -} - - -# The generator class for libraries (target type LIB). Depending on properties -# it will request building of the appropriate specific library type -- -# -- SHARED_LIB, STATIC_LIB or SHARED_LIB. -# -class lib-generator : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - # The lib generator is composing, and can be only invoked with an - # explicit name. This check is present in generator.run (and so in - # builtin.linking-generator) but duplicated here to avoid doing extra - # work. - if $(name) - { - local properties = [ $(property-set).raw ] ; - # Determine the needed target type. - local actual-type ; - # <source>files can be generated by <conditional>@rule feature - # in which case we do not consider it a SEARCHED_LIB type. - if ! <source> in $(properties:G) && - ( <search> in $(properties:G) || <name> in $(properties:G) ) - { - actual-type = SEARCHED_LIB ; - } - else if <file> in $(properties:G) - { - actual-type = LIB ; - } - else if <link>shared in $(properties) - { - actual-type = SHARED_LIB ; - } - else - { - actual-type = STATIC_LIB ; - } - property-set = [ $(property-set).add-raw <main-target-type>LIB ] ; - # Construct the target. - return [ generators.construct $(project) $(name) : $(actual-type) - : $(property-set) : $(sources) ] ; - } - } - - rule viable-source-types ( ) - { - return * ; - } -} - - -generators.register [ new lib-generator builtin.lib-generator : : LIB ] ; - - -# The implementation of the 'lib' rule. Beyond standard syntax that rule allows -# simplified: "lib a b c ;". -# -rule lib ( names + : sources * : requirements * : default-build * : - usage-requirements * ) -{ - if $(names[2]) - { - if <name> in $(requirements:G) - { - errors.user-error "When several names are given to the 'lib' rule" : - "it is not allowed to specify the <name> feature." ; - } - if $(sources) - { - errors.user-error "When several names are given to the 'lib' rule" : - "it is not allowed to specify sources." ; - } - } - - # This is a circular module dependency so it must be imported here. - import targets ; - - local project = [ project.current ] ; - local result ; - - for local name in $(names) - { - local r = $(requirements) ; - # Support " lib a ; " and " lib a b c ; " syntax. - if ! $(sources) && ! <name> in $(requirements:G) - && ! <file> in $(requirements:G) - { - r += <name>$(name) ; - } - result += [ targets.main-target-alternative - [ new typed-target $(name) : $(project) : LIB - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(r) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] - ] ] ; - } - return $(result) ; -} -IMPORT $(__name__) : lib : : lib ; - - -class searched-lib-generator : generator -{ - import property-set ; - - rule __init__ ( ) - { - # The requirements cause the generators to be tried *only* when we're - # building a lib target with a 'search' feature. This seems ugly --- all - # we want is to make sure searched-lib-generator is not invoked deep - # inside transformation search to produce intermediate targets. - generator.__init__ searched-lib-generator : : SEARCHED_LIB ; - } - - rule run ( project name ? : property-set : sources * ) - { - if $(name) - { - # If 'name' is empty, it means we have not been called to build a - # top-level target. In this case, we just fail immediately, because - # searched-lib-generator cannot be used to produce intermediate - # targets. - - local properties = [ $(property-set).raw ] ; - local shared ; - if <link>shared in $(properties) - { - shared = true ; - } - - local search = [ feature.get-values <search> : $(properties) ] ; - - local a = [ new null-action $(property-set) ] ; - local lib-name = [ feature.get-values <name> : $(properties) ] ; - lib-name ?= $(name) ; - local t = [ new searched-lib-target $(lib-name) : $(project) - : $(shared) : $(search) : $(a) ] ; - # We return sources for a simple reason. If there is - # lib png : z : <name>png ; - # the 'z' target should be returned, so that apps linking to 'png' - # will link to 'z', too. - return [ property-set.create <xdll-path>$(search) ] - [ virtual-target.register $(t) ] $(sources) ; - } - } -} - -generators.register [ new searched-lib-generator ] ; - - -class prebuilt-lib-generator : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - local f = [ $(property-set).get <file> ] ; - return $(f) $(sources) ; - } -} - -generators.register - [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ; - -generators.override builtin.prebuilt : builtin.lib-generator ; - -class preprocessed-target-class : basic-target -{ - import generators ; - rule construct ( name : sources * : property-set ) - { - local result = [ generators.construct [ project ] - $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ; - if ! $(result) - { - result = [ generators.construct [ project ] - $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ; - } - if ! $(result) - { - local s ; - for x in $(sources) - { - s += [ $(x).name ] ; - } - local p = [ project ] ; - errors.user-error - "In project" [ $(p).name ] : - "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ; - } - return $(result) ; - } -} - -rule preprocessed ( name : sources * : requirements * : default-build * : - usage-requirements * ) -{ - local project = [ project.current ] ; - return [ targets.main-target-alternative - [ new preprocessed-target-class $(name) : $(project) - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(r) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] - ] ] ; -} - -IMPORT $(__name__) : preprocessed : : preprocessed ; - -class compile-action : action -{ - import sequence ; - - rule __init__ ( targets * : sources * : action-name : properties * ) - { - action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ; - } - - # For all virtual targets for the same dependency graph as self, i.e. which - # belong to the same main target, add their directories to the include path. - # - rule adjust-properties ( property-set ) - { - local s = [ $(self.targets[1]).creating-subvariant ] ; - return [ $(property-set).add-raw - [ $(s).implicit-includes "include" : H ] ] ; - } -} - - -# Declare a special compiler generator. The only thing it does is changing the -# type used to represent 'action' in the constructed dependency graph to -# 'compile-action'. That class in turn adds additional include paths to handle -# cases when a source file includes headers which are generated themselves. -# -class C-compiling-generator : generator -{ - rule __init__ ( id : source-types + : target-types + : requirements * - : optional-properties * ) - { - generator.__init__ $(id) : $(source-types) : $(target-types) : - $(requirements) : $(optional-properties) ; - } - - rule action-class ( ) - { - return compile-action ; - } -} - - -rule register-c-compiler ( id : source-types + : target-types + : requirements * - : optional-properties * ) -{ - generators.register [ new C-compiling-generator $(id) : $(source-types) : - $(target-types) : $(requirements) : $(optional-properties) ] ; -} - -# FIXME: this is ugly, should find a better way (we would like client code to -# register all generators as "generators.some-rule" instead of -# "some-module.some-rule".) -# -IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ; - - -# The generator class for handling EXE and SHARED_LIB creation. -# -class linking-generator : generator -{ - import path ; - import project ; - import property-set ; - import type ; - - rule __init__ ( id - composing ? : # The generator will be composing if a non-empty - # string is passed or the parameter is not given. To - # make the generator non-composing, pass an empty - # string (""). - source-types + : - target-types + : - requirements * ) - { - composing ?= true ; - generator.__init__ $(id) $(composing) : $(source-types) - : $(target-types) : $(requirements) ; - } - - rule run ( project name ? : property-set : sources + ) - { - sources += [ $(property-set).get <library> ] ; - - # Add <library-path> properties for all searched libraries. - local extra ; - for local s in $(sources) - { - if [ $(s).type ] = SEARCHED_LIB - { - local search = [ $(s).search ] ; - extra += <library-path>$(search) ; - } - } - - # It is possible that sources include shared libraries that did not came - # from 'lib' targets, e.g. .so files specified as sources. In this case - # we have to add extra dll-path properties and propagate extra xdll-path - # properties so that application linking to us will get xdll-path to - # those libraries. - local extra-xdll-paths ; - for local s in $(sources) - { - if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ] - { - # Unfortunately, we do not have a good way to find the path to a - # file, so use this nasty approach. - # - # TODO: This needs to be done better. One thing that is really - # broken with this is that it does not work correctly with - # projects having multiple source locations. - local p = [ $(s).project ] ; - local location = [ path.root [ $(s).name ] - [ $(p).get source-location ] ] ; - extra-xdll-paths += [ path.parent $(location) ] ; - } - } - - # Hardcode DLL paths only when linking executables. - # Pros: do not need to relink libraries when installing. - # Cons: "standalone" libraries (plugins, python extensions) can not - # hardcode paths to dependent libraries. - if [ $(property-set).get <hardcode-dll-paths> ] = true - && [ type.is-derived $(self.target-types[1]) EXE ] - { - local xdll-path = [ $(property-set).get <xdll-path> ] ; - extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ; - } - - if $(extra) - { - property-set = [ $(property-set).add-raw $(extra) ] ; - } - - local result = [ generator.run $(project) $(name) : $(property-set) - : $(sources) ] ; - - local ur ; - if $(result) - { - ur = [ extra-usage-requirements $(result) : $(property-set) ] ; - ur = [ $(ur).add - [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ; - } - return $(ur) $(result) ; - } - - rule extra-usage-requirements ( created-targets * : property-set ) - { - local result = [ property-set.empty ] ; - local extra ; - - # Add appropricate <xdll-path> usage requirements. - local raw = [ $(property-set).raw ] ; - if <link>shared in $(raw) - { - local paths ; - local pwd = [ path.pwd ] ; - for local t in $(created-targets) - { - if [ type.is-derived [ $(t).type ] SHARED_LIB ] - { - paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ; - } - } - extra += $(paths:G=<xdll-path>) ; - } - - # We need to pass <xdll-path> features that we've got from sources, - # because if a shared library is built, exe using it needs to know paths - # to other shared libraries this one depends on in order to be able to - # find them all at runtime. - - # Just pass all features in property-set, it is theorically possible - # that we will propagate <xdll-path> features explicitly specified by - # the user, but then the user is to blaim for using an internal feature. - local values = [ $(property-set).get <xdll-path> ] ; - extra += $(values:G=<xdll-path>) ; - - if $(extra) - { - result = [ property-set.create $(extra) ] ; - } - return $(result) ; - } - - rule generated-targets ( sources + : property-set : project name ? ) - { - local sources2 ; # Sources to pass to inherited rule. - local properties2 ; # Properties to pass to inherited rule. - local libraries ; # Library sources. - - # Searched libraries are not passed as arguments to the linker but via - # some option. So, we pass them to the action using a property. - properties2 = [ $(property-set).raw ] ; - local fsa ; - local fst ; - for local s in $(sources) - { - if [ type.is-derived [ $(s).type ] SEARCHED_LIB ] - { - local name = [ $(s).name ] ; - if [ $(s).shared ] - { - fsa += $(name) ; - } - else - { - fst += $(name) ; - } - } - else - { - sources2 += $(s) ; - } - } - properties2 += <find-shared-library>$(fsa:J=&&) - <find-static-library>$(fst:J=&&) ; - - return [ generator.generated-targets $(sources2) - : [ property-set.create $(properties2) ] : $(project) $(name) ] ; - } -} - - -rule register-linker ( id composing ? : source-types + : target-types + - : requirements * ) -{ - generators.register [ new linking-generator $(id) $(composing) - : $(source-types) : $(target-types) : $(requirements) ] ; -} - - -# The generator class for handling STATIC_LIB creation. -# -class archive-generator : generator -{ - import property-set ; - - rule __init__ ( id composing ? : source-types + : target-types + - : requirements * ) - { - composing ?= true ; - generator.__init__ $(id) $(composing) : $(source-types) - : $(target-types) : $(requirements) ; - } - - rule run ( project name ? : property-set : sources + ) - { - sources += [ $(property-set).get <library> ] ; - - local result = [ generator.run $(project) $(name) : $(property-set) - : $(sources) ] ; - - # For static linking, if we get a library in source, we can not directly - # link to it so we need to cause our dependencies to link to that - # library. There are two approaches: - # - adding the library to the list of returned targets. - # - using the <library> usage requirements. - # The problem with the first is: - # - # lib a1 : : <file>liba1.a ; - # lib a2 : a2.cpp a1 : <link>static ; - # install dist : a2 ; - # - # here we will try to install 'a1', even though it is not necessary in - # the general case. With the second approach, even indirect dependants - # will link to the library, but it should not cause any harm. So, return - # all LIB sources together with created targets, so that dependants link - # to them. - local usage-requirements ; - if [ $(property-set).get <link> ] = static - { - for local t in $(sources) - { - if [ type.is-derived [ $(t).type ] LIB ] - { - usage-requirements += <library>$(t) ; - } - } - } - - usage-requirements = [ property-set.create $(usage-requirements) ] ; - - return $(usage-requirements) $(result) ; - } -} - - -rule register-archiver ( id composing ? : source-types + : target-types + - : requirements * ) -{ - generators.register [ new archive-generator $(id) $(composing) - : $(source-types) : $(target-types) : $(requirements) ] ; -} - - -# Generator that accepts everything and produces nothing. Useful as a general -# fallback for toolset-specific actions like PCH generation. -# -class dummy-generator : generator -{ - import property-set ; - - rule run ( project name ? : property-set : sources + ) - { - return [ property-set.empty ] ; - } -} - -IMPORT $(__name__) : register-linker register-archiver - : : generators.register-linker generators.register-archiver ; diff --git a/jam-files/boost-build/tools/builtin.py b/jam-files/boost-build/tools/builtin.py deleted file mode 100644 index 31a7bffe..00000000 --- a/jam-files/boost-build/tools/builtin.py +++ /dev/null @@ -1,718 +0,0 @@ -# Status: minor updates by Steven Watanabe to make gcc work -# -# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -""" Defines standard features and rules. -""" - -import b2.build.targets as targets - -import sys -from b2.build import feature, property, virtual_target, generators, type, property_set, scanner -from b2.util.utility import * -from b2.util import path, regex, bjam_signature -import b2.tools.types -from b2.manager import get_manager - - -# Records explicit properties for a variant. -# The key is the variant name. -__variant_explicit_properties = {} - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - """ - global __variant_explicit_properties - - __variant_explicit_properties = {} - -@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"])) -def variant (name, parents_or_properties, explicit_properties = []): - """ Declares a new variant. - First determines explicit properties for this variant, by - refining parents' explicit properties with the passed explicit - properties. The result is remembered and will be used if - this variant is used as parent. - - Second, determines the full property set for this variant by - adding to the explicit properties default values for all properties - which neither present nor are symmetric. - - Lastly, makes appropriate value of 'variant' property expand - to the full property set. - name: Name of the variant - parents_or_properties: Specifies parent variants, if - 'explicit_properties' are given, - and explicit_properties otherwise. - explicit_properties: Explicit properties. - """ - parents = [] - if not explicit_properties: - explicit_properties = parents_or_properties - else: - parents = parents_or_properties - - inherited = property_set.empty() - if parents: - - # If we allow multiple parents, we'd have to to check for conflicts - # between base variants, and there was no demand for so to bother. - if len (parents) > 1: - raise BaseException ("Multiple base variants are not yet supported") - - p = parents[0] - # TODO: the check may be stricter - if not feature.is_implicit_value (p): - raise BaseException ("Invalid base varaint '%s'" % p) - - inherited = __variant_explicit_properties[p] - - explicit_properties = property_set.create_with_validation(explicit_properties) - explicit_properties = inherited.refine(explicit_properties) - - # Record explicitly specified properties for this variant - # We do this after inheriting parents' properties, so that - # they affect other variants, derived from this one. - __variant_explicit_properties[name] = explicit_properties - - feature.extend('variant', [name]) - feature.compose ("<variant>" + name, explicit_properties.all()) - -__os_names = """ - amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd - openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware - vms windows -""".split() - -# Translates from bjam current OS to the os tags used in host-os and target-os, -# i.e. returns the running host-os. -# -def default_host_os(): - host_os = os_name() - if host_os not in (x.upper() for x in __os_names): - if host_os == 'NT': host_os = 'windows' - elif host_os == 'AS400': host_os = 'unix' - elif host_os == 'MINGW': host_os = 'windows' - elif host_os == 'BSDI': host_os = 'bsd' - elif host_os == 'COHERENT': host_os = 'unix' - elif host_os == 'DRAGONFLYBSD': host_os = 'bsd' - elif host_os == 'IRIX': host_os = 'sgi' - elif host_os == 'MACOSX': host_os = 'darwin' - elif host_os == 'KFREEBSD': host_os = 'freebsd' - elif host_os == 'LINUX': host_os = 'linux' - else: host_os = 'unix' - return host_os.lower() - -def register_globals (): - """ Registers all features and variants declared by this module. - """ - - # This feature is used to determine which OS we're on. - # In future, this may become <target-os> and <host-os> - # TODO: check this. Compatibility with bjam names? Subfeature for version? - os = sys.platform - feature.feature ('os', [os], ['propagated', 'link-incompatible']) - - - # The two OS features define a known set of abstract OS names. The host-os is - # the OS under which bjam is running. Even though this should really be a fixed - # property we need to list all the values to prevent unknown value errors. Both - # set the default value to the current OS to account for the default use case of - # building on the target OS. - feature.feature('host-os', __os_names) - feature.set_default('host-os', default_host_os()) - - feature.feature('target-os', __os_names, ['propagated', 'link-incompatible']) - feature.set_default('target-os', default_host_os()) - - feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric']) - - feature.feature ('stdlib', ['native'], ['propagated', 'composite']) - - feature.feature ('link', ['shared', 'static'], ['propagated']) - feature.feature ('runtime-link', ['shared', 'static'], ['propagated']) - feature.feature ('runtime-debugging', ['on', 'off'], ['propagated']) - - - feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated']) - feature.feature ('profiling', ['off', 'on'], ['propagated']) - feature.feature ('inlining', ['off', 'on', 'full'], ['propagated']) - - feature.feature ('threading', ['single', 'multi'], ['propagated']) - feature.feature ('rtti', ['on', 'off'], ['propagated']) - feature.feature ('exception-handling', ['on', 'off'], ['propagated']) - feature.feature ('debug-symbols', ['on', 'off'], ['propagated']) - feature.feature ('define', [], ['free']) - feature.feature ('include', [], ['free', 'path']) #order-sensitive - feature.feature ('cflags', [], ['free']) - feature.feature ('cxxflags', [], ['free']) - feature.feature ('linkflags', [], ['free']) - feature.feature ('archiveflags', [], ['free']) - feature.feature ('version', [], ['free']) - - feature.feature ('location-prefix', [], ['free']) - - feature.feature ('action', [], ['free']) - - - # The following features are incidental, since - # in themself they have no effect on build products. - # Not making them incidental will result in problems in corner - # cases, for example: - # - # unit-test a : a.cpp : <use>b ; - # lib b : a.cpp b ; - # - # Here, if <use> is not incidental, we'll decide we have two - # targets for a.obj with different properties, and will complain. - # - # Note that making feature incidental does not mean it's ignored. It may - # be ignored when creating the virtual target, but the rest of build process - # will use them. - feature.feature ('use', [], ['free', 'dependency', 'incidental']) - feature.feature ('dependency', [], ['free', 'dependency', 'incidental']) - feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental']) - - feature.feature('warnings', [ - 'on', # Enable default/"reasonable" warning level for the tool. - 'all', # Enable all possible warnings issued by the tool. - 'off'], # Disable all warnings issued by the tool. - ['incidental', 'propagated']) - - feature.feature('warnings-as-errors', [ - 'off', # Do not fail the compilation if there are warnings. - 'on'], # Fail the compilation if there are warnings. - ['incidental', 'propagated']) - - feature.feature ('source', [], ['free', 'dependency', 'incidental']) - feature.feature ('library', [], ['free', 'dependency', 'incidental']) - feature.feature ('file', [], ['free', 'dependency', 'incidental']) - feature.feature ('find-shared-library', [], ['free']) #order-sensitive ; - feature.feature ('find-static-library', [], ['free']) #order-sensitive ; - feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ; - # Internal feature. - feature.feature ('library-file', [], ['free', 'dependency']) - - feature.feature ('name', [], ['free']) - feature.feature ('tag', [], ['free']) - feature.feature ('search', [], ['free', 'path']) #order-sensitive ; - feature.feature ('location', [], ['free', 'path']) - - feature.feature ('dll-path', [], ['free', 'path']) - feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental']) - - - # This is internal feature which holds the paths of all dependency - # dynamic libraries. On Windows, it's needed so that we can all - # those paths to PATH, when running applications. - # On Linux, it's needed to add proper -rpath-link command line options. - feature.feature ('xdll-path', [], ['free', 'path']) - - #provides means to specify def-file for windows dlls. - feature.feature ('def-file', [], ['free', 'dependency']) - - # This feature is used to allow specific generators to run. - # For example, QT tools can only be invoked when QT library - # is used. In that case, <allow>qt will be in usage requirement - # of the library. - feature.feature ('allow', [], ['free']) - - # The addressing model to generate code for. Currently a limited set only - # specifying the bit size of pointers. - feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional']) - - # Type of CPU architecture to compile for. - feature.feature('architecture', [ - # x86 and x86-64 - 'x86', - - # ia64 - 'ia64', - - # Sparc - 'sparc', - - # RS/6000 & PowerPC - 'power', - - # MIPS/SGI - 'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64', - - # HP/PA-RISC - 'parisc', - - # Advanced RISC Machines - 'arm', - - # Combined architectures for platforms/toolsets that support building for - # multiple architectures at once. "combined" would be the default multi-arch - # for the toolset. - 'combined', - 'combined-x86-power'], - - ['propagated', 'optional']) - - # The specific instruction set in an architecture to compile. - feature.feature('instruction-set', [ - # x86 and x86-64 - 'i386', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3', - 'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'conroe', 'conroe-xe', - 'conroe-l', 'allendale', 'mermon', 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale', - 'yorksfield', 'nehalem', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp', - 'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'winchip-c6', 'winchip2', 'c3', 'c3-2', - - # ia64 - 'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley', - - # Sparc - 'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934', - 'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3', - - # RS/6000 & PowerPC - '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602', - '603', '603e', '604', '604e', '620', '630', '740', '7400', - '7450', '750', '801', '821', '823', '860', '970', '8540', - 'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2', - 'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios', - 'rios1', 'rsc', 'rios2', 'rs64a', - - # MIPS - '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000', - 'r4100', 'r4300', 'r4400', 'r4600', 'r4650', - 'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100', - 'vr4111', 'vr4120', 'vr4130', 'vr4300', - 'vr5000', 'vr5400', 'vr5500', - - # HP/PA-RISC - '700', '7100', '7100lc', '7200', '7300', '8000', - - # Advanced RISC Machines - 'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5', - 'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'], - - ['propagated', 'optional']) - - feature.feature('conditional', [], ['incidental', 'free']) - - # The value of 'no' prevents building of a target. - feature.feature('build', ['yes', 'no'], ['optional']) - - # Windows-specific features - feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], []) - feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric']) - - - variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on']) - variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full', - '<runtime-debugging>off', '<define>NDEBUG']) - variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on']) - - type.register ('H', ['h']) - type.register ('HPP', ['hpp'], 'H') - type.register ('C', ['c']) - - -reset () -register_globals () - -class SearchedLibTarget (virtual_target.AbstractFileTarget): - def __init__ (self, name, project, shared, real_name, search, action): - virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action) - - self.shared_ = shared - self.real_name_ = real_name - if not self.real_name_: - self.real_name_ = name - self.search_ = search - - def shared (self): - return self.shared_ - - def real_name (self): - return self.real_name_ - - def search (self): - return self.search_ - - def actualize_location (self, target): - bjam.call("NOTFILE", target) - - def path (self): - #FIXME: several functions rely on this not being None - return "" - - -class CScanner (scanner.Scanner): - def __init__ (self, includes): - scanner.Scanner.__init__ (self) - - self.includes_ = includes - - def pattern (self): - return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")' - - def process (self, target, matches, binding): - - angle = regex.transform (matches, "<(.*)>") - quoted = regex.transform (matches, '"(.*)"') - - g = str(id(self)) - b = os.path.normpath(os.path.dirname(binding[0])) - - # Attach binding of including file to included targets. - # When target is directly created from virtual target - # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. - # We don't need this extra information for angle includes, - # since they should not depend on including file (we can't - # get literal "." in include path). - g2 = g + "#" + b - - g = "<" + g + ">" - g2 = "<" + g2 + ">" - angle = [g + x for x in angle] - quoted = [g2 + x for x in quoted] - - all = angle + quoted - bjam.call("mark-included", target, all) - - engine = get_manager().engine() - engine.set_target_variable(angle, "SEARCH", get_value(self.includes_)) - engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_)) - - # Just propagate current scanner to includes, in a hope - # that includes do not change scanners. - get_manager().scanners().propagate(self, angle + quoted) - -scanner.register (CScanner, 'include') -type.set_scanner ('CPP', CScanner) -type.set_scanner ('C', CScanner) - -# Ported to trunk@47077 -class LibGenerator (generators.Generator): - """ The generator class for libraries (target type LIB). Depending on properties it will - request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or - SHARED_LIB. - """ - - def __init__(self, id = 'LibGenerator', composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []): - generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements) - - def run(self, project, name, prop_set, sources): - - # The lib generator is composing, and can be only invoked with - # explicit name. This check is present in generator.run (and so in - # builtin.LinkingGenerator), but duplicate it here to avoid doing - # extra work. - if name: - properties = prop_set.raw() - # Determine the needed target type - actual_type = None - properties_grist = get_grist(properties) - if '<source>' not in properties_grist and \ - ('<search>' in properties_grist or '<name>' in properties_grist): - actual_type = 'SEARCHED_LIB' - elif '<file>' in properties_grist: - # The generator for - actual_type = 'LIB' - elif '<link>shared' in properties: - actual_type = 'SHARED_LIB' - else: - actual_type = 'STATIC_LIB' - - prop_set = prop_set.add_raw(['<main-target-type>LIB']) - - # Construct the target. - return generators.construct(project, name, actual_type, prop_set, sources) - - def viable_source_types(self): - return ['*'] - -generators.register(LibGenerator()) - -def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]): - """The implementation of the 'lib' rule. Beyond standard syntax that rule allows - simplified: 'lib a b c ;'.""" - - if len(names) > 1: - if any(r.startswith('<name>') for r in requirements): - get_manager().errors()("When several names are given to the 'lib' rule\n" + - "it is not allowed to specify the <name> feature.") - - if sources: - get_manager().errors()("When several names are given to the 'lib' rule\n" + - "it is not allowed to specify sources.") - - project = get_manager().projects().current() - result = [] - - for name in names: - r = requirements[:] - - # Support " lib a ; " and " lib a b c ; " syntax. - if not sources and not any(r.startswith("<name>") for r in requirements) \ - and not any(r.startswith("<file") for r in requirements): - r.append("<name>" + name) - - result.append(targets.create_typed_metatarget(name, "LIB", sources, - r, - default_build, - usage_requirements)) - return result - -get_manager().projects().add_rule("lib", lib) - - -# Updated to trunk@47077 -class SearchedLibGenerator (generators.Generator): - def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []): - # TODO: the comment below looks strange. There are no requirements! - # The requirements cause the generators to be tried *only* when we're building - # lib target and there's 'search' feature. This seems ugly --- all we want - # is make sure SearchedLibGenerator is not invoked deep in transformation - # search. - generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - - def run(self, project, name, prop_set, sources): - - if not name: - return None - - # If name is empty, it means we're called not from top-level. - # In this case, we just fail immediately, because SearchedLibGenerator - # cannot be used to produce intermediate targets. - - properties = prop_set.raw () - shared = '<link>shared' in properties - - a = virtual_target.NullAction (project.manager(), prop_set) - - real_name = feature.get_values ('<name>', properties) - if real_name: - real_name = real_name[0] - else: - real_nake = name - search = feature.get_values('<search>', properties) - usage_requirements = property_set.create(['<xdll-path>' + p for p in search]) - t = SearchedLibTarget(name, project, shared, real_name, search, a) - - # We return sources for a simple reason. If there's - # lib png : z : <name>png ; - # the 'z' target should be returned, so that apps linking to - # 'png' will link to 'z', too. - return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources) - -generators.register (SearchedLibGenerator ()) - -### class prebuilt-lib-generator : generator -### { -### rule __init__ ( * : * ) -### { -### generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -### } -### -### rule run ( project name ? : prop_set : sources * : multiple ? ) -### { -### local f = [ $(prop_set).get <file> ] ; -### return $(f) $(sources) ; -### } -### } -### -### generators.register -### [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ; - - -class CompileAction (virtual_target.Action): - def __init__ (self, manager, sources, action_name, prop_set): - virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set) - - def adjust_properties (self, prop_set): - """ For all virtual targets for the same dependency graph as self, - i.e. which belong to the same main target, add their directories - to include path. - """ - s = self.targets () [0].creating_subvariant () - - return prop_set.add_raw (s.implicit_includes ('include', 'H')) - -class CCompilingGenerator (generators.Generator): - """ Declare a special compiler generator. - The only thing it does is changing the type used to represent - 'action' in the constructed dependency graph to 'CompileAction'. - That class in turn adds additional include paths to handle a case - when a source file includes headers which are generated themselfs. - """ - def __init__ (self, id, composing, source_types, target_types_and_names, requirements): - # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong. - generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - - def action_class (self): - return CompileAction - -def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []): - g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties) - return generators.register (g) - - -class LinkingGenerator (generators.Generator): - """ The generator class for handling EXE and SHARED_LIB creation. - """ - def __init__ (self, id, composing, source_types, target_types_and_names, requirements): - generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - - def run (self, project, name, prop_set, sources): - - lib_sources = prop_set.get('<library>') - sources.extend(lib_sources) - - # Add <library-path> properties for all searched libraries - extra = [] - for s in sources: - if s.type () == 'SEARCHED_LIB': - search = s.search() - extra.extend(property.Property('<library-path>', sp) for sp in search) - - orig_xdll_path = [] - - if prop_set.get('<hardcode-dll-paths>') == ['true'] \ - and type.is_derived(self.target_types_ [0], 'EXE'): - xdll_path = prop_set.get('<xdll-path>') - orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ] - # It's possible that we have libraries in sources which did not came - # from 'lib' target. For example, libraries which are specified - # just as filenames as sources. We don't have xdll-path properties - # for such target, but still need to add proper dll-path properties. - for s in sources: - if type.is_derived (s.type (), 'SHARED_LIB') and not s.action (): - # Unfortunately, we don't have a good way to find the path - # to a file, so use this nasty approach. - p = s.project() - location = path.root(s.name(), p.get('source-location')) - xdll_path.append(path.parent(location)) - - extra.extend(property.Property('<dll-path>', sp) for sp in xdll_path) - - if extra: - prop_set = prop_set.add_raw (extra) - - result = generators.Generator.run(self, project, name, prop_set, sources) - - if result: - ur = self.extra_usage_requirements(result, prop_set) - ur = ur.add(property_set.create(orig_xdll_path)) - else: - return None - - return(ur, result) - - def extra_usage_requirements (self, created_targets, prop_set): - - result = property_set.empty () - extra = [] - - # Add appropriate <xdll-path> usage requirements. - raw = prop_set.raw () - if '<link>shared' in raw: - paths = [] - - # TODO: is it safe to use the current directory? I think we should use - # another mechanism to allow this to be run from anywhere. - pwd = os.getcwd() - - for t in created_targets: - if type.is_derived(t.type(), 'SHARED_LIB'): - paths.append(path.root(path.make(t.path()), pwd)) - - extra += replace_grist(paths, '<xdll-path>') - - # We need to pass <xdll-path> features that we've got from sources, - # because if shared library is built, exe which uses it must know paths - # to other shared libraries this one depends on, to be able to find them - # all at runtime. - - # Just pass all features in property_set, it's theorically possible - # that we'll propagate <xdll-path> features explicitly specified by - # the user, but then the user's to blaim for using internal feature. - values = prop_set.get('<xdll-path>') - extra += replace_grist(values, '<xdll-path>') - - if extra: - result = property_set.create(extra) - - return result - - def generated_targets (self, sources, prop_set, project, name): - - # sources to pass to inherited rule - sources2 = [] - # sources which are libraries - libraries = [] - - # Searched libraries are not passed as argument to linker - # but via some option. So, we pass them to the action - # via property. - fsa = [] - fst = [] - for s in sources: - if type.is_derived(s.type(), 'SEARCHED_LIB'): - n = s.real_name() - if s.shared(): - fsa.append(n) - - else: - fst.append(n) - - else: - sources2.append(s) - - add = [] - if fsa: - add.append("<find-shared-library>" + '&&'.join(fsa)) - if fst: - add.append("<find-static-library>" + '&&'.join(fst)) - - spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name) - return spawn - - -def register_linker(id, source_types, target_types, requirements): - g = LinkingGenerator(id, True, source_types, target_types, requirements) - generators.register(g) - -class ArchiveGenerator (generators.Generator): - """ The generator class for handling STATIC_LIB creation. - """ - def __init__ (self, id, composing, source_types, target_types_and_names, requirements): - generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - - def run (self, project, name, prop_set, sources): - sources += prop_set.get ('<library>') - - result = generators.Generator.run (self, project, name, prop_set, sources) - - return result - -### rule register-archiver ( id composing ? : source_types + : target_types + : -### requirements * ) -### { -### local g = [ new ArchiveGenerator $(id) $(composing) : $(source_types) -### : $(target_types) : $(requirements) ] ; -### generators.register $(g) ; -### } -### -### -### IMPORT $(__name__) : register-linker register-archiver -### : : generators.register-linker generators.register-archiver ; -### -### -### - -get_manager().projects().add_rule("variant", variant) - -import stage -import symlink -import message diff --git a/jam-files/boost-build/tools/cast.jam b/jam-files/boost-build/tools/cast.jam deleted file mode 100644 index 6c84922f..00000000 --- a/jam-files/boost-build/tools/cast.jam +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2005 Vladimir Prus. -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target 'cast', used to change type for target. For example, in Qt -# library one wants two kinds of CPP files -- those that just compiled and those -# that are passed via the MOC tool. -# -# This is done with: -# -# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ; -# -# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then, -# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt -# support will run the MOC tool as part of the build process. -# -# At the moment, the 'cast' rule only works for non-derived (source) targets. -# -# TODO: The following comment is unclear or incorrect. Clean it up. -# > Another solution would be to add a separate main target 'moc-them' that -# > would moc all the passed sources, no matter what their type is, but I prefer -# > cast, as defining a new target type + generator for that type is somewhat -# > simpler than defining a main target rule. - -import "class" : new ; -import errors ; -import project ; -import property-set ; -import targets ; -import type ; - - -class cast-target-class : typed-target -{ - import type ; - - rule __init__ ( name : project : type : sources * : requirements * : - default-build * : usage-requirements * ) - { - typed-target.__init__ $(name) : $(project) : $(type) : $(sources) : - $(requirements) : $(default-build) : $(usage-requirements) ; - } - - rule construct ( name : source-targets * : property-set ) - { - local result ; - for local s in $(source-targets) - { - if ! [ class.is-a $(s) : file-target ] - { - import errors ; - errors.user-error Source to the 'cast' rule is not a file! ; - } - if [ $(s).action ] - { - import errors ; - errors.user-error Only non-derived target are allowed for - 'cast'. : when building [ full-name ] ; - } - local r = [ $(s).clone-with-different-type $(self.type) ] ; - result += [ virtual-target.register $(r) ] ; - } - return [ property-set.empty ] $(result) ; - } -} - - -rule cast ( name type : sources * : requirements * : default-build * : - usage-requirements * ) -{ - local project = [ project.current ] ; - - local real-type = [ type.type-from-rule-name $(type) ] ; - if ! $(real-type) - { - errors.user-error No type corresponds to the main target rule name - '$(type)' : "Hint: try a lowercase name" ; - } - - targets.main-target-alternative [ new cast-target-class $(name) : $(project) - : $(real-type) - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : - $(project) ] ] ; -} - - -IMPORT $(__name__) : cast : : cast ; diff --git a/jam-files/boost-build/tools/cast.py b/jam-files/boost-build/tools/cast.py deleted file mode 100644 index 8f053f11..00000000 --- a/jam-files/boost-build/tools/cast.py +++ /dev/null @@ -1,69 +0,0 @@ -# Status: ported -# Base revision: 64432. -# Copyright 2005-2010 Vladimir Prus. -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target 'cast', used to change type for target. For example, in Qt -# library one wants two kinds of CPP files -- those that just compiled and those -# that are passed via the MOC tool. -# -# This is done with: -# -# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ; -# -# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then, -# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt -# support will run the MOC tool as part of the build process. -# -# At the moment, the 'cast' rule only works for non-derived (source) targets. -# -# TODO: The following comment is unclear or incorrect. Clean it up. -# > Another solution would be to add a separate main target 'moc-them' that -# > would moc all the passed sources, no matter what their type is, but I prefer -# > cast, as defining a new target type + generator for that type is somewhat -# > simpler than defining a main target rule. - -import b2.build.targets as targets -import b2.build.virtual_target as virtual_target - -from b2.manager import get_manager -from b2.util import bjam_signature - -class CastTargetClass(targets.TypedTarget): - - def construct(name, source_targets, ps): - result = [] - for s in source_targets: - if not isinstance(s, virtual_targets.FileTarget): - get_manager().errors()("Source to the 'cast' metatager is not a file") - - if s.action(): - get_manager().errors()("Only non-derived targets allowed as sources for 'cast'.") - - - r = s.clone_with_different_type(self.type()) - result.append(get_manager().virtual_targets().register(r)) - - return result - - -@bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"], - ["default_build", "*"], ["usage_requirements", "*"])) -def cast(name, type, sources, requirements, default_build, usage_requirements): - - from b2.manager import get_manager - t = get_manager().targets() - - project = get_manager().projects().current() - - return t.main_target_alternative( - CastTargetClass(name, project, type, - t.main_target_sources(sources, name), - t.main_target_requirements(requirements, project), - t.main_target_default_build(default_build, project), - t.main_target_usage_requirements(usage_requirements, project))) - - -get_manager().projects().add_rule("cast", cast) diff --git a/jam-files/boost-build/tools/clang-darwin.jam b/jam-files/boost-build/tools/clang-darwin.jam deleted file mode 100644 index a8abc7d6..00000000 --- a/jam-files/boost-build/tools/clang-darwin.jam +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Noel Belcourt 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -import clang ; -import feature : feature ; -import os ; -import toolset ; -import toolset : flags ; -import gcc ; -import common ; -import errors ; -import generators ; - -feature.extend-subfeature toolset clang : platform : darwin ; - -toolset.inherit-generators clang-darwin - <toolset>clang <toolset-clang:platform>darwin - : gcc - # Don't inherit PCH generators. They were not tested, and probably - # don't work for this compiler. - : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch - ; - -generators.override clang-darwin.prebuilt : builtin.lib-generator ; -generators.override clang-darwin.prebuilt : builtin.prebuilt ; -generators.override clang-darwin.searched-lib-generator : searched-lib-generator ; - -toolset.inherit-rules clang-darwin : gcc ; -toolset.inherit-flags clang-darwin : gcc - : <inlining>off <inlining>on <inlining>full <optimization>space - <warnings>off <warnings>all <warnings>on - <architecture>x86/<address-model>32 - <architecture>x86/<address-model>64 - ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -# vectorization diagnostics -feature vectorize : off on full ; - -# Initializes the clang-darwin toolset -# version in optional -# name (default clang++) is used to invoke the specified clang complier -# compile and link options allow you to specify addition command line options for each version -rule init ( version ? : command * : options * ) -{ - command = [ common.get-invocation-command clang-darwin : clang++ - : $(command) ] ; - - # Determine the version - local command-string = $(command:J=" ") ; - if $(command) - { - version ?= [ MATCH "^([0-9.]+)" - : [ SHELL "$(command-string) -dumpversion" ] ] ; - } - - local condition = [ common.check-init-parameters clang-darwin - : version $(version) ] ; - - common.handle-options clang-darwin : $(condition) : $(command) : $(options) ; - - gcc.init-link-flags clang-darwin darwin $(condition) ; - -} - -SPACE = " " ; - -flags clang-darwin.compile OPTIONS <cflags> ; -flags clang-darwin.compile OPTIONS <cxxflags> ; -# flags clang-darwin.compile INCLUDES <include> ; - -# Declare flags and action for compilation. -toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ; -toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ; - -toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ; -toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ; -toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ; - -toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ; -toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ; -toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ; -toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ; - -toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ; -toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ; -toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ; - -actions compile.c -{ - "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -flags clang-darwin ARFLAGS <archiveflags> ; - -# Default value. Mostly for the sake of clang-linux -# that inherits from gcc, but does not has the same -# logic to set the .AR variable. We can put the same -# logic in clang-linux, but that's hardly worth the trouble -# as on Linux, 'ar' is always available. -.AR = ar ; - -rule archive ( targets * : sources * : properties * ) -{ - # Always remove archive and start again. Here's rationale from - # Andre Hentz: - # - # I had a file, say a1.c, that was included into liba.a. - # I moved a1.c to a2.c, updated my Jamfiles and rebuilt. - # My program was crashing with absurd errors. - # After some debugging I traced it back to the fact that a1.o was *still* - # in liba.a - # - # Rene Rivera: - # - # Originally removing the archive was done by splicing an RM - # onto the archive action. That makes archives fail to build on NT - # when they have many files because it will no longer execute the - # action directly and blow the line length limit. Instead we - # remove the file in a different action, just before the building - # of the archive. - # - local clean.a = $(targets[1])(clean) ; - TEMPORARY $(clean.a) ; - NOCARE $(clean.a) ; - LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; - DEPENDS $(clean.a) : $(sources) ; - DEPENDS $(targets) : $(clean.a) ; - common.RmTemps $(clean.a) : $(targets) ; -} - -actions piecemeal archive -{ - "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" - "ranlib" -cs "$(<)" -} - -flags clang-darwin.link USER_OPTIONS <linkflags> ; - -# Declare actions for linking -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; - # Serialize execution of the 'link' action, since - # running N links in parallel is just slower. - JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} diff --git a/jam-files/boost-build/tools/clang-linux.jam b/jam-files/boost-build/tools/clang-linux.jam deleted file mode 100644 index 036d749e..00000000 --- a/jam-files/boost-build/tools/clang-linux.jam +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright (c) 2003 Michael Stevens -# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer) -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import toolset ; -import feature ; -import toolset : flags ; - -import clang ; -import gcc ; -import common ; -import errors ; -import generators ; -import type ; -import numbers ; - -feature.extend-subfeature toolset clang : platform : linux ; - -toolset.inherit-generators clang-linux - <toolset>clang <toolset-clang:platform>linux : gcc - : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ; -generators.override clang-linux.prebuilt : builtin.lib-generator ; -generators.override clang-linux.prebuilt : builtin.prebuilt ; -generators.override clang-linux.searched-lib-generator : searched-lib-generator ; - -# Override default do-nothing generators. -generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ; -generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ; - -type.set-generated-target-suffix PCH - : <toolset>clang <toolset-clang:platform>linux : pth ; - -toolset.inherit-rules clang-linux : gcc ; -toolset.inherit-flags clang-linux : gcc - : <inlining>off <inlining>on <inlining>full - <optimization>space <optimization>speed - <warnings>off <warnings>all <warnings>on ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] { - .debug-configuration = true ; -} - -rule init ( version ? : command * : options * ) { - command = [ common.get-invocation-command clang-linux : clang++ - : $(command) ] ; - - # Determine the version - local command-string = $(command:J=" ") ; - - if $(command) { - version ?= [ MATCH "version ([0-9.]+)" - : [ SHELL "$(command-string) --version" ] ] ; - } - - local condition = [ common.check-init-parameters clang-linux - : version $(version) ] ; - - common.handle-options clang-linux : $(condition) : $(command) : $(options) ; - - gcc.init-link-flags clang-linux gnu $(condition) ; -} - -############################################################################### -# Flags - -toolset.flags clang-linux.compile OPTIONS <cflags> ; -toolset.flags clang-linux.compile OPTIONS <cxxflags> ; - -toolset.flags clang-linux.compile OPTIONS <optimization>off : ; -toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ; - -# note: clang silently ignores some of these inlining options -toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ; -toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ; -toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ; - -toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ; -toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ; -toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ; -toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ; - -toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ; -toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ; -toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ; - -############################################################################### -# C and C++ compilation - -rule compile.c++ ( targets * : sources * : properties * ) { - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - - local pth-file = [ on $(<) return $(PCH_FILE) ] ; - - if $(pth-file) { - DEPENDS $(<) : $(pth-file) ; - compile.c++.with-pch $(targets) : $(sources) ; - } - else { - compile.c++.without-pth $(targets) : $(sources) ; - } -} - -actions compile.c++.without-pth { - "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" -} - -actions compile.c++.with-pch bind PCH_FILE -{ - "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)" -} - -rule compile.c ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - - local pth-file = [ on $(<) return $(PCH_FILE) ] ; - - if $(pth-file) { - DEPENDS $(<) : $(pth-file) ; - compile.c.with-pch $(targets) : $(sources) ; - } - else { - compile.c.without-pth $(targets) : $(sources) ; - } -} - -actions compile.c.without-pth -{ - "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c.with-pch bind PCH_FILE -{ - "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)" -} - -############################################################################### -# PCH emission - -rule compile.c++.pch ( targets * : sources * : properties * ) { - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c++.pch { - rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)" -} - -rule compile.c.pch ( targets * : sources * : properties * ) { - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c.pch -{ - rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)" -} - -############################################################################### -# Linking - -SPACE = " " ; - -rule link ( targets * : sources * : properties * ) { - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - SPACE on $(targets) = " " ; - JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ; -} - -actions link bind LIBRARIES { - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - -rule link.dll ( targets * : sources * : properties * ) { - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - SPACE on $(targets) = " " ; - JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ; -} - -# Differ from 'link' above only by -shared. -actions link.dll bind LIBRARIES { - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - diff --git a/jam-files/boost-build/tools/clang.jam b/jam-files/boost-build/tools/clang.jam deleted file mode 100644 index e0ac9a55..00000000 --- a/jam-files/boost-build/tools/clang.jam +++ /dev/null @@ -1,27 +0,0 @@ -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This is a generic 'clang' toolset. Depending on the current system, it -# forwards either to 'clang-unix' or 'clang-darwin' modules. - -import feature ; -import os ; -import toolset ; - -feature.extend toolset : clang ; -feature.subfeature toolset clang : platform : : propagated link-incompatible ; - -rule init ( * : * ) -{ - if [ os.name ] = MACOSX - { - toolset.using clang-darwin : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - else - { - toolset.using clang-linux : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } -} diff --git a/jam-files/boost-build/tools/common.jam b/jam-files/boost-build/tools/common.jam deleted file mode 100644 index ed835a36..00000000 --- a/jam-files/boost-build/tools/common.jam +++ /dev/null @@ -1,994 +0,0 @@ -# Copyright 2003, 2005 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2005 Toon Knapen -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Provides actions common to all toolsets, such as creating directories and -# removing files. - -import os ; -import modules ; -import utility ; -import print ; -import type ; -import feature ; -import errors ; -import path ; -import sequence ; -import toolset ; -import virtual-target ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} -if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ] -{ - .show-configuration = true ; -} - -# Configurations -# -# The following class helps to manage toolset configurations. Each configuration -# has a unique ID and one or more parameters. A typical example of a unique ID -# is a condition generated by 'common.check-init-parameters' rule. Other kinds -# of IDs can be used. Parameters may include any details about the configuration -# like 'command', 'path', etc. -# -# A toolset configuration may be in one of the following states: -# -# - registered -# Configuration has been registered (e.g. explicitly or by auto-detection -# code) but has not yet been marked as used, i.e. 'toolset.using' rule has -# not yet been called for it. -# - used -# Once called 'toolset.using' rule marks the configuration as 'used'. -# -# The main difference between the states above is that while a configuration is -# 'registered' its options can be freely changed. This is useful in particular -# for autodetection code - all detected configurations may be safely overwritten -# by user code. - -class configurations -{ - import errors ; - - rule __init__ ( ) - { - } - - # Registers a configuration. - # - # Returns 'true' if the configuration has been added and an empty value if - # it already exists. Reports an error if the configuration is 'used'. - # - rule register ( id ) - { - if $(id) in $(self.used) - { - errors.error "common: the configuration '$(id)' is in use" ; - } - - local retval ; - - if ! $(id) in $(self.all) - { - self.all += $(id) ; - - # Indicate that a new configuration has been added. - retval = true ; - } - - return $(retval) ; - } - - # Mark a configuration as 'used'. - # - # Returns 'true' if the state of the configuration has been changed to - # 'used' and an empty value if it the state has not been changed. Reports an - # error if the configuration is not known. - # - rule use ( id ) - { - if ! $(id) in $(self.all) - { - errors.error "common: the configuration '$(id)' is not known" ; - } - - local retval ; - - if ! $(id) in $(self.used) - { - self.used += $(id) ; - - # Indicate that the configuration has been marked as 'used'. - retval = true ; - } - - return $(retval) ; - } - - # Return all registered configurations. - # - rule all ( ) - { - return $(self.all) ; - } - - # Return all used configurations. - # - rule used ( ) - { - return $(self.used) ; - } - - # Returns the value of a configuration parameter. - # - rule get ( id : param ) - { - return $(self.$(param).$(id)) ; - } - - # Sets the value of a configuration parameter. - # - rule set ( id : param : value * ) - { - self.$(param).$(id) = $(value) ; - } -} - - -# The rule for checking toolset parameters. Trailing parameters should all be -# parameter name/value pairs. The rule will check that each parameter either has -# a value in each invocation or has no value in each invocation. Also, the rule -# will check that the combination of all parameter values is unique in all -# invocations. -# -# Each parameter name corresponds to a subfeature. This rule will declare a -# subfeature the first time a non-empty parameter value is passed and will -# extend it with all the values. -# -# The return value from this rule is a condition to be used for flags settings. -# -rule check-init-parameters ( toolset requirement * : * ) -{ - local sig = $(toolset) ; - local condition = <toolset>$(toolset) ; - local subcondition ; - for local index in 2 3 4 5 6 7 8 9 - { - local name = $($(index)[1]) ; - local value = $($(index)[2]) ; - - if $(value)-is-not-empty - { - condition = $(condition)-$(value) ; - if $(.had-unspecified-value.$(toolset).$(name)) - { - errors.user-error - "$(toolset) initialization: parameter '$(name)'" - "inconsistent" : "no value was specified in earlier" - "initialization" : "an explicit value is specified now" ; - } - # The below logic is for intel compiler. It calls this rule with - # 'intel-linux' and 'intel-win' as toolset, so we need to get the - # base part of toolset name. We can not pass 'intel' as toolset - # because in that case it will be impossible to register versionless - # intel-linux and intel-win toolsets of a specific version. - local t = $(toolset) ; - local m = [ MATCH ([^-]*)- : $(toolset) ] ; - if $(m) - { - t = $(m[1]) ; - } - if ! $(.had-value.$(toolset).$(name)) - { - if ! $(.declared-subfeature.$(t).$(name)) - { - feature.subfeature toolset $(t) : $(name) : : propagated ; - .declared-subfeature.$(t).$(name) = true ; - } - .had-value.$(toolset).$(name) = true ; - } - feature.extend-subfeature toolset $(t) : $(name) : $(value) ; - subcondition += <toolset-$(t):$(name)>$(value) ; - } - else - { - if $(.had-value.$(toolset).$(name)) - { - errors.user-error - "$(toolset) initialization: parameter '$(name)'" - "inconsistent" : "an explicit value was specified in an" - "earlier initialization" : "no value is specified now" ; - } - .had-unspecified-value.$(toolset).$(name) = true ; - } - sig = $(sig)$(value:E="")- ; - } - if $(sig) in $(.all-signatures) - { - local message = - "duplicate initialization of $(toolset) with the following parameters: " ; - for local index in 2 3 4 5 6 7 8 9 - { - local p = $($(index)) ; - if $(p) - { - message += "$(p[1]) = $(p[2]:E=<unspecified>)" ; - } - } - message += "previous initialization at $(.init-loc.$(sig))" ; - errors.user-error - $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) : - $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ; - } - .all-signatures += $(sig) ; - .init-loc.$(sig) = [ errors.nearest-user-location ] ; - - # If we have a requirement, this version should only be applied under that - # condition. To accomplish this we add a toolset requirement that imposes - # the toolset subcondition, which encodes the version. - if $(requirement) - { - local r = <toolset>$(toolset) $(requirement) ; - r = $(r:J=,) ; - toolset.add-requirements $(r):$(subcondition) ; - } - - # We add the requirements, if any, to the condition to scope the toolset - # variables and options to this specific version. - condition += $(requirement) ; - - if $(.show-configuration) - { - ECHO notice: $(condition) ; - } - return $(condition:J=/) ; -} - - -# A helper rule to get the command to invoke some tool. If -# 'user-provided-command' is not given, tries to find binary named 'tool' in -# PATH and in the passed 'additional-path'. Otherwise, verifies that the first -# element of 'user-provided-command' is an existing program. -# -# This rule returns the command to be used when invoking the tool. If we can not -# find the tool, a warning is issued. If 'path-last' is specified, PATH is -# checked after 'additional-paths' when searching for 'tool'. -# -rule get-invocation-command-nodefault ( toolset : tool : - user-provided-command * : additional-paths * : path-last ? ) -{ - local command ; - if ! $(user-provided-command) - { - command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ; - if ! $(command) && $(.debug-configuration) - { - ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ; - ECHO "warning: initialized from" [ errors.nearest-user-location ] ; - } - } - else - { - command = [ check-tool $(user-provided-command) ] ; - if ! $(command) && $(.debug-configuration) - { - ECHO "warning: toolset $(toolset) initialization: " ; - ECHO "warning: can not find user-provided command " '$(user-provided-command)' ; - ECHO "warning: initialized from" [ errors.nearest-user-location ] ; - } - } - - return $(command) ; -} - - -# Same as get-invocation-command-nodefault, except that if no tool is found, -# returns either the user-provided-command, if present, or the 'tool' parameter. -# -rule get-invocation-command ( toolset : tool : user-provided-command * : - additional-paths * : path-last ? ) -{ - local result = [ get-invocation-command-nodefault $(toolset) : $(tool) : - $(user-provided-command) : $(additional-paths) : $(path-last) ] ; - - if ! $(result) - { - if $(user-provided-command) - { - result = $(user-provided-command) ; - } - else - { - result = $(tool) ; - } - } - return $(result) ; -} - - -# Given an invocation command return the absolute path to the command. This -# works even if command has no path element and was found on the PATH. -# -rule get-absolute-tool-path ( command ) -{ - if $(command:D) - { - return $(command:D) ; - } - else - { - local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ; - return $(m[1]:D) ; - } -} - - -# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'. -# If found in PATH, returns 'name' and if found in additional paths, returns -# absolute name. If the tool is found in several directories, returns the -# first path found. Otherwise, returns an empty string. If 'path-last' is -# specified, PATH is searched after 'additional-paths'. -# -local rule find-tool ( name : additional-paths * : path-last ? ) -{ - local path = [ path.programs-path ] ; - local match = [ path.glob $(path) : $(name) $(name).exe ] ; - local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ; - - local result ; - if $(path-last) - { - result = $(additional-match) ; - if ! $(result) && $(match) - { - result = $(name) ; - } - } - else - { - if $(match) - { - result = $(name) ; - } - else - { - result = $(additional-match) ; - } - } - if $(result) - { - return [ path.native $(result[1]) ] ; - } -} - - -# Checks if 'command' can be found either in path or is a full name to an -# existing file. -# -local rule check-tool-aux ( command ) -{ - if $(command:D) - { - if [ path.exists $(command) ] - # Both NT and Cygwin will run .exe files by their unqualified names. - || ( [ os.on-windows ] && [ path.exists $(command).exe ] ) - # Only NT will run .bat & .cmd files by their unqualified names. - || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] || - [ path.exists $(command).cmd ] ) ) - { - return $(command) ; - } - } - else - { - if [ GLOB [ modules.peek : PATH Path path ] : $(command) ] - { - return $(command) ; - } - } -} - - -# Checks that a tool can be invoked by 'command'. If command is not an absolute -# path, checks if it can be found in 'path'. If comand is an absolute path, -# check that it exists. Returns 'command' if ok or empty string otherwise. -# -local rule check-tool ( xcommand + ) -{ - if [ check-tool-aux $(xcommand[1]) ] || - [ check-tool-aux $(xcommand[-1]) ] - { - return $(xcommand) ; - } -} - - -# Handle common options for toolset, specifically sets the following flag -# variables: -# - CONFIG_COMMAND to $(command) -# - OPTIONS for compile to the value of <compileflags> in $(options) -# - OPTIONS for compile.c to the value of <cflags> in $(options) -# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options) -# - OPTIONS for compile.fortran to the value of <fflags> in $(options) -# - OPTIONS for link to the value of <linkflags> in $(options) -# -rule handle-options ( toolset : condition * : command * : options * ) -{ - if $(.debug-configuration) - { - ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ; - } - - # The last parameter ('unchecked') says it is OK to set flags for another - # module. - toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command) - : unchecked ; - - toolset.flags $(toolset).compile OPTIONS $(condition) : - [ feature.get-values <compileflags> : $(options) ] : unchecked ; - - toolset.flags $(toolset).compile.c OPTIONS $(condition) : - [ feature.get-values <cflags> : $(options) ] : unchecked ; - - toolset.flags $(toolset).compile.c++ OPTIONS $(condition) : - [ feature.get-values <cxxflags> : $(options) ] : unchecked ; - - toolset.flags $(toolset).compile.fortran OPTIONS $(condition) : - [ feature.get-values <fflags> : $(options) ] : unchecked ; - - toolset.flags $(toolset).link OPTIONS $(condition) : - [ feature.get-values <linkflags> : $(options) ] : unchecked ; -} - - -# Returns the location of the "program files" directory on a Windows platform. -# -rule get-program-files-dir ( ) -{ - local ProgramFiles = [ modules.peek : ProgramFiles ] ; - if $(ProgramFiles) - { - ProgramFiles = "$(ProgramFiles:J= )" ; - } - else - { - ProgramFiles = "c:\\Program Files" ; - } - return $(ProgramFiles) ; -} - - -if [ os.name ] = NT -{ - RM = del /f /q ; - CP = copy /b ; - IGNORE = "2>nul >nul & setlocal" ; - LN ?= $(CP) ; - # Ugly hack to convince copy to set the timestamp of the - # destination to the current time by concatenating the - # source with a nonexistent file. Note that this requires - # /b (binary) as the default when concatenating files is /a (ascii). - WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ; -} -else -{ - RM = rm -f ; - CP = cp ; - LN = ln ; -} - - -rule rm-command ( ) -{ - return $(RM) ; -} - - -rule copy-command ( ) -{ - return $(CP) ; -} - - -if "\n" = "n" -{ - # Escape characters are not supported. Use ugly hacks that won't work, - # see below. - nl = " -" ; - q = "" ; -} -else -{ - nl = "\n" ; - q = "\"" ; -} - -# Returns the command needed to set an environment variable on the current -# platform. The variable setting persists through all following commands and is -# visible in the environment seen by subsequently executed commands. In other -# words, on Unix systems, the variable is exported, which is consistent with the -# only possible behavior on Windows systems. -# -rule variable-setting-command ( variable : value ) -{ - if [ os.name ] = NT - { - return "set $(variable)=$(value)$(nl)" ; - } - else - { - # If we don't have escape characters support in bjam, the below blows - # up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n - # sequence that messes up the executed export command which then reports - # that the passed variable name is incorrect. - # But we have a check for cygwin in kernel/bootstrap.jam already. - return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ; - } -} - - -# Returns a command to sets a named shell path variable to the given NATIVE -# paths on the current platform. -# -rule path-variable-setting-command ( variable : paths * ) -{ - local sep = [ os.path-separator ] ; - return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ; -} - - -# Returns a command that prepends the given paths to the named path variable on -# the current platform. -# -rule prepend-path-variable-command ( variable : paths * ) -{ - return [ path-variable-setting-command $(variable) - : $(paths) [ os.expand-variable $(variable) ] ] ; -} - - -# Return a command which can create a file. If 'r' is result of invocation, then -# 'r foobar' will create foobar with unspecified content. What happens if file -# already exists is unspecified. -# -rule file-creation-command ( ) -{ - if [ os.name ] = NT - { - # A few alternative implementations on Windows: - # - # 'type NUL >> ' - # That would construct an empty file instead of a file containing - # a space and an end-of-line marker but it would also not change - # the target's timestamp in case the file already exists. - # - # 'type NUL > ' - # That would construct an empty file instead of a file containing - # a space and an end-of-line marker but it would also destroy an - # already existing file by overwriting it with an empty one. - # - # I guess the best solution would be to allow Boost Jam to define - # built-in functions such as 'create a file', 'touch a file' or 'copy a - # file' which could be used from inside action code. That would allow - # completely portable operations without this kind of kludge. - # (22.02.2009.) (Jurko) - return "echo. > " ; - } - else - { - return "touch " ; - } -} - - -# Returns a command that may be used for 'touching' files. It is not a real -# 'touch' command on NT because it adds an empty line at the end of file but it -# works with source files. -# -rule file-touch-command ( ) -{ - if [ os.name ] = NT - { - return "echo. >> " ; - } - else - { - return "touch " ; - } -} - - -rule MkDir -{ - # If dir exists, do not update it. Do this even for $(DOT). - NOUPDATE $(<) ; - - if $(<) != $(DOT) && ! $($(<)-mkdir) - { - # Cheesy gate to prevent multiple invocations on same dir. - $(<)-mkdir = true ; - - # Schedule the mkdir build action. - common.mkdir $(<) ; - - # Prepare a Jam 'dirs' target that can be used to make the build only - # construct all the target directories. - DEPENDS dirs : $(<) ; - - # Recursively create parent directories. $(<:P) = $(<)'s parent & we - # recurse until root. - - local s = $(<:P) ; - if [ os.name ] = NT - { - switch $(s) - { - case *: : s = ; - case *:\\ : s = ; - } - } - - if $(s) - { - if $(s) != $(<) - { - DEPENDS $(<) : $(s) ; - MkDir $(s) ; - } - else - { - NOTFILE $(s) ; - } - } - } -} - - -#actions MkDir1 -#{ -# mkdir "$(<)" -#} - -# The following quick-fix actions should be replaced using the original MkDir1 -# action once Boost Jam gets updated to correctly detect different paths leading -# up to the same filesystem target and triggers their build action only once. -# (todo) (04.07.2008.) (Jurko) - -if [ os.name ] = NT -{ - actions mkdir - { - if not exist "$(<)\\" mkdir "$(<)" - } -} -else -{ - actions mkdir - { - mkdir -p "$(<)" - } -} - -actions piecemeal together existing Clean -{ - $(RM) "$(>)" -} - - -rule copy -{ -} - - -actions copy -{ - $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)" -} - - -rule RmTemps -{ -} - - -actions quietly updated piecemeal together RmTemps -{ - $(RM) "$(>)" $(IGNORE) -} - - -actions hard-link -{ - $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT) - $(LN) "$(>)" "$(<)" $(NULL_OUT) -} - - -# Given a target, as given to a custom tag rule, returns a string formatted -# according to the passed format. Format is a list of properties that is -# represented in the result. For each element of format the corresponding target -# information is obtained and added to the result string. For all, but the -# literal, the format value is taken as the as string to prepend to the output -# to join the item to the rest of the result. If not given "-" is used as a -# joiner. -# -# The format options can be: -# -# <base>[joiner] -# :: The basename of the target name. -# <toolset>[joiner] -# :: The abbreviated toolset tag being used to build the target. -# <threading>[joiner] -# :: Indication of a multi-threaded build. -# <runtime>[joiner] -# :: Collective tag of the build runtime. -# <version:/version-feature | X.Y[.Z]/>[joiner] -# :: Short version tag taken from the given "version-feature" in the -# build properties. Or if not present, the literal value as the -# version number. -# <property:/property-name/>[joiner] -# :: Direct lookup of the given property-name value in the build -# properties. /property-name/ is a regular expression. E.g. -# <property:toolset-.*:flavor> will match every toolset. -# /otherwise/ -# :: The literal value of the format argument. -# -# For example this format: -# -# boost_ <base> <toolset> <threading> <runtime> <version:boost-version> -# -# Might return: -# -# boost_thread-vc80-mt-gd-1_33.dll, or -# boost_regex-vc80-gd-1_33.dll -# -# The returned name also has the target type specific prefix and suffix which -# puts it in a ready form to use as the value from a custom tag rule. -# -rule format-name ( format * : name : type ? : property-set ) -{ - local result = "" ; - for local f in $(format) - { - switch $(f:G) - { - case <base> : - local matched = [ MATCH "^(boost.*python)-.*" : $(name) ] ; - if $(matched) = boost_python || $(matched) = boost_mpi_python - { - result += $(name) ; - } - else - { - result += $(name:B) ; - } - - case <toolset> : - result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) : - $(property-set) ] ] ; - - case <threading> : - result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type) - : $(property-set) ] ] ; - - case <runtime> : - result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) : - $(property-set) ] ] ; - - case <qt> : - result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) : - $(property-set) ] ] ; - - case <address-model> : - result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) : - $(property-set) ] ] ; - - case <version:*> : - local key = [ MATCH <version:(.*)> : $(f:G) ] ; - local version = [ $(property-set).get <$(key)> ] ; - version ?= $(key) ; - version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ; - result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ; - - case <property:*> : - local key = [ MATCH <property:(.*)> : $(f:G) ] ; - local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ; - if $(p0) - { - local p = [ $(property-set).get <$(p0)> ] ; - if $(p) - { - result += [ join-tag $(f:G=) : $(p) ] ; - } - } - - case * : - result += $(f:G=) ; - } - } - result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) : - $(property-set) ] ; - return $(result) ; -} - - -local rule join-tag ( joiner ? : tag ? ) -{ - if ! $(joiner) { joiner = - ; } - return $(joiner)$(tag) ; -} - - -local rule toolset-tag ( name : type ? : property-set ) -{ - local tag = ; - - local properties = [ $(property-set).raw ] ; - switch [ $(property-set).get <toolset> ] - { - case borland* : tag += bcb ; - case clang* : - { - switch [ $(property-set).get <toolset-clang:platform> ] - { - case darwin : tag += clang-darwin ; - case linux : tag += clang ; - } - } - case como* : tag += como ; - case cw : tag += cw ; - case darwin* : tag += xgcc ; - case edg* : tag += edg ; - case gcc* : - { - switch [ $(property-set).get <toolset-gcc:flavor> ] - { - case *mingw* : tag += mgw ; - case * : tag += gcc ; - } - } - case intel : - if [ $(property-set).get <toolset-intel:platform> ] = win - { - tag += iw ; - } - else - { - tag += il ; - } - case kcc* : tag += kcc ; - case kylix* : tag += bck ; - #case metrowerks* : tag += cw ; - #case mingw* : tag += mgw ; - case mipspro* : tag += mp ; - case msvc* : tag += vc ; - case qcc* : tag += qcc ; - case sun* : tag += sw ; - case tru64cxx* : tag += tru ; - case vacpp* : tag += xlc ; - } - local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)" - : $(properties) ] ; - # For historical reasons, vc6.0 and vc7.0 use different naming. - if $(tag) = vc - { - if $(version[1]) = 6 - { - # Cancel minor version. - version = 6 ; - } - else if $(version[1]) = 7 && $(version[2]) = 0 - { - version = 7 ; - } - } - # On intel, version is not added, because it does not matter and it is the - # version of vc used as backend that matters. Ideally, we should encode the - # backend version but that would break compatibility with V1. - if $(tag) = iw - { - version = ; - } - - # On borland, version is not added for compatibility with V1. - if $(tag) = bcb - { - version = ; - } - - tag += $(version) ; - - return $(tag:J=) ; -} - - -local rule threading-tag ( name : type ? : property-set ) -{ - local tag = ; - local properties = [ $(property-set).raw ] ; - if <threading>multi in $(properties) { tag = mt ; } - - return $(tag:J=) ; -} - - -local rule runtime-tag ( name : type ? : property-set ) -{ - local tag = ; - - local properties = [ $(property-set).raw ] ; - if <runtime-link>static in $(properties) { tag += s ; } - - # This is an ugly thing. In V1, there is code to automatically detect which - # properties affect a target. So, if <runtime-debugging> does not affect gcc - # toolset, the tag rules will not even see <runtime-debugging>. Similar - # functionality in V2 is not implemented yet, so we just check for toolsets - # known to care about runtime debugging. - if ( <toolset>msvc in $(properties) ) || - ( <stdlib>stlport in $(properties) ) || - ( <toolset-intel:platform>win in $(properties) ) - { - if <runtime-debugging>on in $(properties) { tag += g ; } - } - - if <python-debugging>on in $(properties) { tag += y ; } - if <variant>debug in $(properties) { tag += d ; } - if <stdlib>stlport in $(properties) { tag += p ; } - if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; } - - return $(tag:J=) ; -} - -# Create a tag for the Qt library version -# "<qt>4.6.0" will result in tag "qt460" -local rule qt-tag ( name : type ? : property-set ) -{ - local properties = [ $(property-set).get <qt> ] ; - local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)" - : $(properties) ] ; - local tag = "qt"$(version:J=) ; - return $(tag) ; -} - -# Create a tag for the address-model -# <address-model>64 will simply generate "64" -local rule address-model-tag ( name : type ? : property-set ) -{ - local tag = ; - local version = [ $(property-set).get <address-model> ] ; - return $(version) ; -} - -rule __test__ ( ) -{ - import assert ; - - local nl = " -" ; - - local save-os = [ modules.peek os : .name ] ; - - modules.poke os : .name : LINUX ; - - assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)" - : path-variable-setting-command PATH : foo bar baz ; - - assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)" - : prepend-path-variable-command PATH : foo bar ; - - modules.poke os : .name : NT ; - - assert.result "set PATH=foo;bar;baz$(nl)" - : path-variable-setting-command PATH : foo bar baz ; - - assert.result "set PATH=foo;bar;%PATH%$(nl)" - : prepend-path-variable-command PATH : foo bar ; - - modules.poke os : .name : $(save-os) ; -} diff --git a/jam-files/boost-build/tools/common.py b/jam-files/boost-build/tools/common.py deleted file mode 100644 index 612745b8..00000000 --- a/jam-files/boost-build/tools/common.py +++ /dev/null @@ -1,840 +0,0 @@ -# Status: being ported by Steven Watanabe -# Base revision: 47174 -# -# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -""" Provides actions common to all toolsets, such as creating directories and - removing files. -""" - -import re -import bjam -import os -import os.path -import sys - -from b2.build import feature -from b2.util.utility import * -from b2.util import path - -__re__before_first_dash = re.compile ('([^-]*)-') - -def reset (): - """ Clear the module state. This is mainly for testing purposes. - Note that this must be called _after_ resetting the module 'feature'. - """ - global __had_unspecified_value, __had_value, __declared_subfeature - global __init_loc - global __all_signatures, __debug_configuration, __show_configuration - - # Stores toolsets without specified initialization values. - __had_unspecified_value = {} - - # Stores toolsets with specified initialization values. - __had_value = {} - - # Stores toolsets with declared subfeatures. - __declared_subfeature = {} - - # Stores all signatures of the toolsets. - __all_signatures = {} - - # Stores the initialization locations of each toolset - __init_loc = {} - - __debug_configuration = '--debug-configuration' in bjam.variable('ARGV') - __show_configuration = '--show-configuration' in bjam.variable('ARGV') - - global __executable_path_variable - OS = bjam.call("peek", [], "OS")[0] - if OS == "NT": - # On Windows the case and capitalization of PATH is not always predictable, so - # let's find out what variable name was really set. - for n in sys.environ: - if n.lower() == "path": - __executable_path_variable = n - break - else: - __executable_path_variable = "PATH" - - m = {"NT": __executable_path_variable, - "CYGWIN": "PATH", - "MACOSX": "DYLD_LIBRARY_PATH", - "AIX": "LIBPATH"} - global __shared_library_path_variable - __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH") - -reset() - -def shared_library_path_variable(): - return __shared_library_path_variable - -# ported from trunk@47174 -class Configurations(object): - """ - This class helps to manage toolset configurations. Each configuration - has a unique ID and one or more parameters. A typical example of a unique ID - is a condition generated by 'common.check-init-parameters' rule. Other kinds - of IDs can be used. Parameters may include any details about the configuration - like 'command', 'path', etc. - - A toolset configuration may be in one of the following states: - - - registered - Configuration has been registered (e.g. by autodetection code) but has - not yet been marked as used, i.e. 'toolset.using' rule has not yet been - called for it. - - used - Once called 'toolset.using' rule marks the configuration as 'used'. - - The main difference between the states above is that while a configuration is - 'registered' its options can be freely changed. This is useful in particular - for autodetection code - all detected configurations may be safely overwritten - by user code. - """ - - def __init__(self): - self.used_ = set() - self.all_ = set() - self.params = {} - - def register(self, id): - """ - Registers a configuration. - - Returns True if the configuration has been added and False if - it already exists. Reports an error if the configuration is 'used'. - """ - if id in self.used_: - #FIXME - errors.error("common: the configuration '$(id)' is in use") - - if id not in self.all_: - self.all_ += [id] - - # Indicate that a new configuration has been added. - return True - else: - return False - - def use(self, id): - """ - Mark a configuration as 'used'. - - Returns True if the state of the configuration has been changed to - 'used' and False if it the state wasn't changed. Reports an error - if the configuration isn't known. - """ - if id not in self.all_: - #FIXME: - errors.error("common: the configuration '$(id)' is not known") - - if id not in self.used_: - self.used_ += [id] - - # indicate that the configuration has been marked as 'used' - return True - else: - return False - - def all(self): - """ Return all registered configurations. """ - return self.all_ - - def used(self): - """ Return all used configurations. """ - return self.used_ - - def get(self, id, param): - """ Returns the value of a configuration parameter. """ - self.params_.getdefault(param, {}).getdefault(id, None) - - def set (self, id, param, value): - """ Sets the value of a configuration parameter. """ - self.params_.setdefault(param, {})[id] = value - -# Ported from trunk@47174 -def check_init_parameters(toolset, requirement, *args): - """ The rule for checking toolset parameters. Trailing parameters should all be - parameter name/value pairs. The rule will check that each parameter either has - a value in each invocation or has no value in each invocation. Also, the rule - will check that the combination of all parameter values is unique in all - invocations. - - Each parameter name corresponds to a subfeature. This rule will declare a - subfeature the first time a non-empty parameter value is passed and will - extend it with all the values. - - The return value from this rule is a condition to be used for flags settings. - """ - # The type checking here is my best guess about - # what the types should be. - assert(isinstance(toolset, str)) - assert(isinstance(requirement, str) or requirement is None) - sig = toolset - condition = replace_grist(toolset, '<toolset>') - subcondition = [] - - for arg in args: - assert(isinstance(arg, tuple)) - assert(len(arg) == 2) - name = arg[0] - value = arg[1] - assert(isinstance(name, str)) - assert(isinstance(value, str) or value is None) - - str_toolset_name = str((toolset, name)) - - # FIXME: is this the correct translation? - ### if $(value)-is-not-empty - if value is not None: - condition = condition + '-' + value - if __had_unspecified_value.has_key(str_toolset_name): - raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \ - "no value was specified in earlier initialization\n" \ - "an explicit value is specified now" % (toolset, name)) - - # The logic below is for intel compiler. It calls this rule - # with 'intel-linux' and 'intel-win' as toolset, so we need to - # get the base part of toolset name. - # We can't pass 'intel' as toolset, because it that case it will - # be impossible to register versionles intel-linux and - # intel-win of specific version. - t = toolset - m = __re__before_first_dash.match(toolset) - if m: - t = m.group(1) - - if not __had_value.has_key(str_toolset_name): - if not __declared_subfeature.has_key(str((t, name))): - feature.subfeature('toolset', t, name, [], ['propagated']) - __declared_subfeature[str((t, name))] = True - - __had_value[str_toolset_name] = True - - feature.extend_subfeature('toolset', t, name, [value]) - subcondition += ['<toolset-' + t + ':' + name + '>' + value ] - - else: - if __had_value.has_key(str_toolset_name): - raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \ - "an explicit value was specified in an earlier initialization\n" \ - "no value is specified now" % (toolset, name)) - - __had_unspecified_value[str_toolset_name] = True - - if value == None: value = '' - - sig = sig + value + '-' - - if __all_signatures.has_key(sig): - message = "duplicate initialization of '%s' with the following parameters: " % toolset - - for arg in args: - name = arg[0] - value = arg[1] - if value == None: value = '<unspecified>' - - message += "'%s' = '%s'\n" % (name, value) - - raise BaseException(message) - - __all_signatures[sig] = True - # FIXME - __init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ; - - # If we have a requirement, this version should only be applied under that - # condition. To accomplish this we add a toolset requirement that imposes - # the toolset subcondition, which encodes the version. - if requirement: - r = ['<toolset>' + toolset, requirement] - r = ','.join(r) - toolset.add_requirements([r + ':' + c for c in subcondition]) - - # We add the requirements, if any, to the condition to scope the toolset - # variables and options to this specific version. - condition = [condition] - if requirement: - condition += [requirement] - - if __show_configuration: - print "notice:", condition - return ['/'.join(condition)] - -# Ported from trunk@47077 -def get_invocation_command_nodefault( - toolset, tool, user_provided_command=[], additional_paths=[], path_last=False): - """ - A helper rule to get the command to invoke some tool. If - 'user-provided-command' is not given, tries to find binary named 'tool' in - PATH and in the passed 'additional-path'. Otherwise, verifies that the first - element of 'user-provided-command' is an existing program. - - This rule returns the command to be used when invoking the tool. If we can't - find the tool, a warning is issued. If 'path-last' is specified, PATH is - checked after 'additional-paths' when searching for 'tool'. - """ - assert(isinstance(toolset, str)) - assert(isinstance(tool, str)) - assert(isinstance(user_provided_command, list)) - if additional_paths is not None: - assert(isinstance(additional_paths, list)) - assert(all([isinstance(path, str) for path in additional_paths])) - assert(all(isinstance(path, str) for path in additional_paths)) - assert(isinstance(path_last, bool)) - - if not user_provided_command: - command = find_tool(tool, additional_paths, path_last) - if not command and __debug_configuration: - print "warning: toolset", toolset, "initialization: can't find tool, tool" - #FIXME - #print "warning: initialized from" [ errors.nearest-user-location ] ; - else: - command = check_tool(user_provided_command) - if not command and __debug_configuration: - print "warning: toolset", toolset, "initialization:" - print "warning: can't find user-provided command", user_provided_command - #FIXME - #ECHO "warning: initialized from" [ errors.nearest-user-location ] - - assert(isinstance(command, str)) - - return command - -# ported from trunk@47174 -def get_invocation_command(toolset, tool, user_provided_command = [], - additional_paths = [], path_last = False): - """ Same as get_invocation_command_nodefault, except that if no tool is found, - returns either the user-provided-command, if present, or the 'tool' parameter. - """ - - assert(isinstance(toolset, str)) - assert(isinstance(tool, str)) - assert(isinstance(user_provided_command, list)) - if additional_paths is not None: - assert(isinstance(additional_paths, list)) - assert(all([isinstance(path, str) for path in additional_paths])) - assert(isinstance(path_last, bool)) - - result = get_invocation_command_nodefault(toolset, tool, - user_provided_command, - additional_paths, - path_last) - - if not result: - if user_provided_command: - result = user_provided_command[0] - else: - result = tool - - assert(isinstance(result, str)) - - return result - -# ported from trunk@47281 -def get_absolute_tool_path(command): - """ - Given an invocation command, - return the absolute path to the command. This works even if commnad - has not path element and is present in PATH. - """ - if os.path.dirname(command): - return os.path.dirname(command) - else: - programs = path.programs_path() - m = path.glob(programs, [command, command + '.exe' ]) - if not len(m): - print "Could not find:", command, "in", programs - return os.path.dirname(m[0]) - -# ported from trunk@47174 -def find_tool(name, additional_paths = [], path_last = False): - """ Attempts to find tool (binary) named 'name' in PATH and in - 'additional-paths'. If found in path, returns 'name'. If - found in additional paths, returns full name. If the tool - is found in several directories, returns the first path found. - Otherwise, returns the empty string. If 'path_last' is specified, - path is checked after 'additional_paths'. - """ - assert(isinstance(name, str)) - assert(isinstance(additional_paths, list)) - assert(isinstance(path_last, bool)) - - programs = path.programs_path() - match = path.glob(programs, [name, name + '.exe']) - additional_match = path.glob(additional_paths, [name, name + '.exe']) - - result = [] - if path_last: - result = additional_match - if not result and match: - result = match - - else: - if match: - result = match - - elif additional_match: - result = additional_match - - if result: - return path.native(result[0]) - else: - return '' - -#ported from trunk@47281 -def check_tool_aux(command): - """ Checks if 'command' can be found either in path - or is a full name to an existing file. - """ - assert(isinstance(command, str)) - dirname = os.path.dirname(command) - if dirname: - if os.path.exists(command): - return command - # Both NT and Cygwin will run .exe files by their unqualified names. - elif on_windows() and os.path.exists(command + '.exe'): - return command - # Only NT will run .bat files by their unqualified names. - elif os_name() == 'NT' and os.path.exists(command + '.bat'): - return command - else: - paths = path.programs_path() - if path.glob(paths, [command]): - return command - -# ported from trunk@47281 -def check_tool(command): - """ Checks that a tool can be invoked by 'command'. - If command is not an absolute path, checks if it can be found in 'path'. - If comand is absolute path, check that it exists. Returns 'command' - if ok and empty string otherwise. - """ - assert(isinstance(command, list)) - assert(all(isinstance(c, str) for c in command)) - #FIXME: why do we check the first and last elements???? - if check_tool_aux(command[0]) or check_tool_aux(command[-1]): - return command - -# ported from trunk@47281 -def handle_options(tool, condition, command, options): - """ Handle common options for toolset, specifically sets the following - flag variables: - - CONFIG_COMMAND to 'command' - - OPTIOns for compile to the value of <compileflags> in options - - OPTIONS for compile.c to the value of <cflags> in options - - OPTIONS for compile.c++ to the value of <cxxflags> in options - - OPTIONS for compile.fortran to the value of <fflags> in options - - OPTIONs for link to the value of <linkflags> in options - """ - from b2.build import toolset - - assert(isinstance(tool, str)) - assert(isinstance(condition, list)) - assert(isinstance(command, str)) - assert(isinstance(options, list)) - assert(command) - toolset.flags(tool, 'CONFIG_COMMAND', condition, [command]) - toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options)) - toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options)) - toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options)) - toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options)) - toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options)) - -# ported from trunk@47281 -def get_program_files_dir(): - """ returns the location of the "program files" directory on a windows - platform - """ - ProgramFiles = bjam.variable("ProgramFiles") - if ProgramFiles: - ProgramFiles = ' '.join(ProgramFiles) - else: - ProgramFiles = "c:\\Program Files" - return ProgramFiles - -# ported from trunk@47281 -def rm_command(): - return __RM - -# ported from trunk@47281 -def copy_command(): - return __CP - -# ported from trunk@47281 -def variable_setting_command(variable, value): - """ - Returns the command needed to set an environment variable on the current - platform. The variable setting persists through all following commands and is - visible in the environment seen by subsequently executed commands. In other - words, on Unix systems, the variable is exported, which is consistent with the - only possible behavior on Windows systems. - """ - assert(isinstance(variable, str)) - assert(isinstance(value, str)) - - if os_name() == 'NT': - return "set " + variable + "=" + value + os.linesep - else: - # (todo) - # The following does not work on CYGWIN and needs to be fixed. On - # CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that - # messes up the executed export command which then reports that the - # passed variable name is incorrect. This is most likely due to the - # extra \r character getting interpreted as a part of the variable name. - # - # Several ideas pop to mind on how to fix this: - # * One way would be to separate the commands using the ; shell - # command separator. This seems like the quickest possible - # solution but I do not know whether this would break code on any - # platforms I I have no access to. - # * Another would be to not use the terminating $(nl) but that would - # require updating all the using code so it does not simply - # prepend this variable to its own commands. - # * I guess the cleanest solution would be to update Boost Jam to - # allow explicitly specifying \n & \r characters in its scripts - # instead of always relying only on the 'current OS native newline - # sequence'. - # - # Some code found to depend on this behaviour: - # * This Boost Build module. - # * __test__ rule. - # * path-variable-setting-command rule. - # * python.jam toolset. - # * xsltproc.jam toolset. - # * fop.jam toolset. - # (todo) (07.07.2008.) (Jurko) - # - # I think that this works correctly in python -- Steven Watanabe - return variable + "=" + value + os.linesep + "export " + variable + os.linesep - -def path_variable_setting_command(variable, paths): - """ - Returns a command to sets a named shell path variable to the given NATIVE - paths on the current platform. - """ - assert(isinstance(variable, str)) - assert(isinstance(paths, list)) - sep = os.path.pathsep - return variable_setting_command(variable, sep.join(paths)) - -def prepend_path_variable_command(variable, paths): - """ - Returns a command that prepends the given paths to the named path variable on - the current platform. - """ - return path_variable_setting_command(variable, - paths + os.environ.get(variable, "").split(os.pathsep)) - -def file_creation_command(): - """ - Return a command which can create a file. If 'r' is result of invocation, then - 'r foobar' will create foobar with unspecified content. What happens if file - already exists is unspecified. - """ - if os_name() == 'NT': - return "echo. > " - else: - return "touch " - -#FIXME: global variable -__mkdir_set = set() -__re_windows_drive = re.compile(r'^.*:\$') - -def mkdir(engine, target): - # If dir exists, do not update it. Do this even for $(DOT). - bjam.call('NOUPDATE', target) - - global __mkdir_set - - # FIXME: Where is DOT defined? - #if $(<) != $(DOT) && ! $($(<)-mkdir): - if target != '.' and target not in __mkdir_set: - # Cheesy gate to prevent multiple invocations on same dir. - __mkdir_set.add(target) - - # Schedule the mkdir build action. - if os_name() == 'NT': - engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, []) - else: - engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, []) - - # Prepare a Jam 'dirs' target that can be used to make the build only - # construct all the target directories. - engine.add_dependency('dirs', target) - - # Recursively create parent directories. $(<:P) = $(<)'s parent & we - # recurse until root. - - s = os.path.dirname(target) - if os_name() == 'NT': - if(__re_windows_drive.match(s)): - s = '' - - if s: - if s != target: - engine.add_dependency(target, s) - mkdir(engine, s) - else: - bjam.call('NOTFILE', s) - -__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)') - -def format_name(format, name, target_type, prop_set): - """ Given a target, as given to a custom tag rule, returns a string formatted - according to the passed format. Format is a list of properties that is - represented in the result. For each element of format the corresponding target - information is obtained and added to the result string. For all, but the - literal, the format value is taken as the as string to prepend to the output - to join the item to the rest of the result. If not given "-" is used as a - joiner. - - The format options can be: - - <base>[joiner] - :: The basename of the target name. - <toolset>[joiner] - :: The abbreviated toolset tag being used to build the target. - <threading>[joiner] - :: Indication of a multi-threaded build. - <runtime>[joiner] - :: Collective tag of the build runtime. - <version:/version-feature | X.Y[.Z]/>[joiner] - :: Short version tag taken from the given "version-feature" - in the build properties. Or if not present, the literal - value as the version number. - <property:/property-name/>[joiner] - :: Direct lookup of the given property-name value in the - build properties. /property-name/ is a regular expression. - e.g. <property:toolset-.*:flavor> will match every toolset. - /otherwise/ - :: The literal value of the format argument. - - For example this format: - - boost_ <base> <toolset> <threading> <runtime> <version:boost-version> - - Might return: - - boost_thread-vc80-mt-gd-1_33.dll, or - boost_regex-vc80-gd-1_33.dll - - The returned name also has the target type specific prefix and suffix which - puts it in a ready form to use as the value from a custom tag rule. - """ - assert(isinstance(format, list)) - assert(isinstance(name, str)) - assert(isinstance(target_type, str) or not type) - # assert(isinstance(prop_set, property_set.PropertySet)) - if type.is_derived(target_type, 'LIB'): - result = "" ; - for f in format: - grist = get_grist(f) - if grist == '<base>': - result += os.path.basename(name) - elif grist == '<toolset>': - result += join_tag(ungrist(f), - toolset_tag(name, target_type, prop_set)) - elif grist == '<threading>': - result += join_tag(ungrist(f), - threading_tag(name, target_type, prop_set)) - elif grist == '<runtime>': - result += join_tag(ungrist(f), - runtime_tag(name, target_type, prop_set)) - elif grist.startswith('<version:'): - key = grist[len('<version:'):-1] - version = prop_set.get('<' + key + '>') - if not version: - version = key - version = __re_version.match(version) - result += join_tag(ungrist(f), version[1] + '_' + version[2]) - elif grist.startswith('<property:'): - key = grist[len('<property:'):-1] - property_re = re.compile('<(' + key + ')>') - p0 = None - for prop in prop_set.raw(): - match = property_re.match(prop) - if match: - p0 = match[1] - break - if p0: - p = prop_set.get('<' + p0 + '>') - if p: - assert(len(p) == 1) - result += join_tag(ungrist(f), p) - else: - result += ungrist(f) - - result = virtual_target.add_prefix_and_suffix( - ''.join(result), target_type, prop_set) - return result - -def join_tag(joiner, tag): - if not joiner: joiner = '-' - return joiner + tag - -__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)") - -def toolset_tag(name, target_type, prop_set): - tag = '' - - properties = prop_set.raw() - tools = prop_set.get('<toolset>') - assert(len(tools) == 0) - tools = tools[0] - if tools.startswith('borland'): tag += 'bcb' - elif tools.startswith('como'): tag += 'como' - elif tools.startswith('cw'): tag += 'cw' - elif tools.startswith('darwin'): tag += 'xgcc' - elif tools.startswith('edg'): tag += edg - elif tools.startswith('gcc'): - flavor = prop_set.get('<toolset-gcc:flavor>') - ''.find - if flavor.find('mingw') != -1: - tag += 'mgw' - else: - tag += 'gcc' - elif tools == 'intel': - if prop_set.get('<toolset-intel:platform>') == ['win']: - tag += 'iw' - else: - tag += 'il' - elif tools.startswith('kcc'): tag += 'kcc' - elif tools.startswith('kylix'): tag += 'bck' - #case metrowerks* : tag += cw ; - #case mingw* : tag += mgw ; - elif tools.startswith('mipspro'): tag += 'mp' - elif tools.startswith('msvc'): tag += 'vc' - elif tools.startswith('sun'): tag += 'sw' - elif tools.startswith('tru64cxx'): tag += 'tru' - elif tools.startswith('vacpp'): tag += 'xlc' - - for prop in properties: - match = __re_toolset_version.match(prop) - if(match): - version = match - break - version_string = None - # For historical reasons, vc6.0 and vc7.0 use different naming. - if tag == 'vc': - if version.group(1) == '6': - # Cancel minor version. - version_string = '6' - elif version.group(1) == '7' and version.group(2) == '0': - version_string = '7' - - # On intel, version is not added, because it does not matter and it's the - # version of vc used as backend that matters. Ideally, we'd encode the - # backend version but that would break compatibility with V1. - elif tag == 'iw': - version_string = '' - - # On borland, version is not added for compatibility with V1. - elif tag == 'bcb': - version_string = '' - - if version_string is None: - version = version.group(1) + version.group(2) - - tag += version - - return tag - - -def threading_tag(name, target_type, prop_set): - tag = '' - properties = prop_set.raw() - if '<threading>multi' in properties: tag = 'mt' - - return tag - - -def runtime_tag(name, target_type, prop_set ): - tag = '' - - properties = prop_set.raw() - if '<runtime-link>static' in properties: tag += 's' - - # This is an ugly thing. In V1, there's a code to automatically detect which - # properties affect a target. So, if <runtime-debugging> does not affect gcc - # toolset, the tag rules won't even see <runtime-debugging>. Similar - # functionality in V2 is not implemented yet, so we just check for toolsets - # which are known to care about runtime debug. - if '<toolset>msvc' in properties \ - or '<stdlib>stlport' in properties \ - or '<toolset-intel:platform>win' in properties: - if '<runtime-debugging>on' in properties: tag += 'g' - - if '<python-debugging>on' in properties: tag += 'y' - if '<variant>debug' in properties: tag += 'd' - if '<stdlib>stlport' in properties: tag += 'p' - if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n' - - return tag - - -## TODO: -##rule __test__ ( ) -##{ -## import assert ; -## -## local nl = " -##" ; -## -## local save-os = [ modules.peek os : .name ] ; -## -## modules.poke os : .name : LINUX ; -## -## assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)" -## : path-variable-setting-command PATH : foo bar baz ; -## -## assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)" -## : prepend-path-variable-command PATH : foo bar ; -## -## modules.poke os : .name : NT ; -## -## assert.result "set PATH=foo;bar;baz$(nl)" -## : path-variable-setting-command PATH : foo bar baz ; -## -## assert.result "set PATH=foo;bar;%PATH%$(nl)" -## : prepend-path-variable-command PATH : foo bar ; -## -## modules.poke os : .name : $(save-os) ; -##} - -def init(manager): - engine = manager.engine() - - engine.register_action("common.MkDir1-quick-fix-for-unix", 'mkdir -p "$(<)"') - engine.register_action("common.MkDir1-quick-fix-for-windows", 'if not exist "$(<)\\" mkdir "$(<)"') - - import b2.tools.make - import b2.build.alias - - global __RM, __CP, __IGNORE, __LN - # ported from trunk@47281 - if os_name() == 'NT': - __RM = 'del /f /q' - __CP = 'copy' - __IGNORE = '2>nul >nul & setlocal' - __LN = __CP - #if not __LN: - # __LN = CP - else: - __RM = 'rm -f' - __CP = 'cp' - __IGNORE = '' - __LN = 'ln' - - engine.register_action("common.Clean", __RM + ' "$(>)"', - flags=['piecemeal', 'together', 'existing']) - engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"') - engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE, - flags=['quietly', 'updated', 'piecemeal', 'together']) - - engine.register_action("common.hard-link", - __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep + - __LN + ' "$(>)" "$(<)" $(NULL_OUT)') diff --git a/jam-files/boost-build/tools/como-linux.jam b/jam-files/boost-build/tools/como-linux.jam deleted file mode 100644 index 5c554c8f..00000000 --- a/jam-files/boost-build/tools/como-linux.jam +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# The following #// line will be used by the regression test table generation -# program as the column heading for HTML tables. Must not include a version -# number. -#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a> - -import toolset ; -import feature ; -import toolset : flags ; -import common ; -import generators ; - -import unix ; -import como ; - -feature.extend-subfeature toolset como : platform : linux ; - -toolset.inherit-generators como-linux - <toolset>como <toolset-como:platform>linux : unix ; -generators.override como-linux.prebuilt : builtin.lib-generator ; -generators.override como-linux.searched-lib-generator : searched-lib-generator ; -toolset.inherit-flags como-linux : unix ; -toolset.inherit-rules como-linux : gcc ; - -generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ - : <toolset>como <toolset-como:platform>linux ; -generators.register-c-compiler como-linux.compile.c : C : OBJ - : <toolset>como <toolset-como:platform>linux ; - - -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters como-linux - : version $(version) ] ; - - command = [ common.get-invocation-command como-linux : como - : $(command) ] ; - - common.handle-options como-linux : $(condition) : $(command) : $(options) ; -} - - -flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ; -flags como-linux C++FLAGS <exception-handling>on : --exceptions ; - -flags como-linux CFLAGS <inlining>off : --no_inlining ; -flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ; - -flags como-linux CFLAGS <optimization>off : -O0 ; -flags como-linux CFLAGS <optimization>speed : -O3 ; -flags como-linux CFLAGS <optimization>space : -Os ; - -flags como-linux CFLAGS <debug-symbols>on : -g ; -flags como-linux LINKFLAGS <debug-symbols>on : -g ; - -flags como-linux FINDLIBS : m ; -flags como-linux FINDLIBS : rt ; - -flags como-linux CFLAGS <cflags> ; -flags como-linux C++FLAGS <cxxflags> ; -flags como-linux DEFINES <define> ; -flags como-linux UNDEFS <undef> ; -flags como-linux HDRS <include> ; -flags como-linux STDHDRS <sysinclude> ; -flags como-linux LINKFLAGS <linkflags> ; -flags como-linux ARFLAGS <arflags> ; - -flags como-linux.link LIBRARIES <library-file> ; -flags como-linux.link LINKPATH <library-path> ; -flags como-linux.link FINDLIBS-ST <find-static-library> ; -flags como-linux.link FINDLIBS-SA <find-shared-library> ; - -flags como-linux.link RPATH <dll-path> ; -flags como-linux.link RPATH_LINK <xdll-path> ; - - -actions link bind LIBRARIES -{ - $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1 -} - -actions link.dll bind LIBRARIES -{ - $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1 -} - -actions compile.c -{ - $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1 -} - -actions compile.c++ -{ - $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1 -} - -actions archive -{ - ar rcu $(<) $(>) -} diff --git a/jam-files/boost-build/tools/como-win.jam b/jam-files/boost-build/tools/como-win.jam deleted file mode 100644 index d21a70d6..00000000 --- a/jam-files/boost-build/tools/como-win.jam +++ /dev/null @@ -1,117 +0,0 @@ -# (C) Copyright David Abrahams 2001. -# (C) Copyright MetaCommunications, Inc. 2004. - -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# The following #// line will be used by the regression test table generation -# program as the column heading for HTML tables. Must not include a version -# number. -#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a> - -import common ; -import como ; -import feature ; -import generators ; -import toolset : flags ; - -feature.extend-subfeature toolset como : platform : win ; - - -# Initializes the Comeau toolset for windows. The command is the command which -# invokes the compiler. You should either set environment variable -# COMO_XXX_INCLUDE where XXX is the used backend (as described in the -# documentation), or pass that as part of command, e.g: -# -# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ; -# -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters como-win - : version $(version) ] ; - - command = [ common.get-invocation-command como-win : como.exe : - $(command) ] ; - - common.handle-options como-win : $(condition) : $(command) : $(options) ; -} - -generators.register-c-compiler como-win.compile.c++ : CPP : OBJ - : <toolset>como <toolset-como:platform>win ; -generators.register-c-compiler como-win.compile.c : C : OBJ - : <toolset>como <toolset-como:platform>win ; - - -generators.register-linker como-win.link - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : EXE - : <toolset>como <toolset-como:platform>win ; - -# Note that status of shared libraries support is not clear, so we do not define -# the link.dll generator. -generators.register-archiver como-win.archive - : OBJ : STATIC_LIB - : <toolset>como <toolset-como:platform>win ; - - -flags como-win C++FLAGS <exception-handling>off : --no_exceptions ; -flags como-win C++FLAGS <exception-handling>on : --exceptions ; - -flags como-win CFLAGS <inlining>off : --no_inlining ; -flags como-win CFLAGS <inlining>on <inlining>full : --inlining ; - - -# The following seems to be VC-specific options. At least, when I uncomment -# then, Comeau with bcc as backend reports that bcc32 invocation failed. -# -#flags como-win CFLAGS <debug-symbols>on : /Zi ; -#flags como-win CFLAGS <optimization>off : /Od ; - - -flags como-win CFLAGS <cflags> ; -flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header. -flags como-win CFLAGS <threading>multi : -D_MT ; # Make sure that our config knows that threading is on. -flags como-win C++FLAGS <cxxflags> ; -flags como-win DEFINES <define> ; -flags como-win UNDEFS <undef> ; -flags como-win HDRS <include> ; -flags como-win SYSHDRS <sysinclude> ; -flags como-win LINKFLAGS <linkflags> ; -flags como-win ARFLAGS <arflags> ; -flags como-win NO_WARN <no-warn> ; - -#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ; -#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ; - -flags como-win LIBPATH <library-path> ; -flags como-win LIBRARIES <library-file> ; -flags como-win FINDLIBS <find-shared-library> ; -flags como-win FINDLIBS <find-static-library> ; - -nl = " -" ; - - -# For como, we repeat all libraries so that dependencies are always resolved. -# -actions link bind LIBRARIES -{ - $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)" -} - -actions compile.c -{ - $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)" -} - -actions compile.c++ -{ - $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)" -} - -actions archive -{ - $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" - lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" -} diff --git a/jam-files/boost-build/tools/como.jam b/jam-files/boost-build/tools/como.jam deleted file mode 100644 index 04a05a94..00000000 --- a/jam-files/boost-build/tools/como.jam +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This is a generic 'como' toolset. Depending on the current system, it -# forwards either to 'como-linux' or 'como-win' modules. - -import feature ; -import os ; -import toolset ; - -feature.extend toolset : como ; -feature.subfeature toolset como : platform : : propagated link-incompatible ; - -rule init ( * : * ) -{ - if [ os.name ] = LINUX - { - toolset.using como-linux : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - else - { - toolset.using como-win : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - - } -} diff --git a/jam-files/boost-build/tools/convert.jam b/jam-files/boost-build/tools/convert.jam deleted file mode 100644 index ac1d7010..00000000 --- a/jam-files/boost-build/tools/convert.jam +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) 2009 Vladimir Prus -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Implements 'convert' target that takes a bunch of source and -# tries to convert each one to the specified type. -# -# For example: -# -# convert objects obj : a.cpp b.cpp ; -# - -import targets ; -import generators ; -import project ; -import type ; -import "class" : new ; - -class convert-target-class : typed-target -{ - rule __init__ ( name : project : type - : sources * : requirements * : default-build * : usage-requirements * ) - { - typed-target.__init__ $(name) : $(project) : $(type) - : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ; - } - - rule construct ( name : source-targets * : property-set ) - { - local r = [ generators.construct $(self.project) : $(self.type) - : [ property-set.create [ $(property-set).raw ] # [ feature.expand - <main-target-type>$(self.type) ] - # ] - : $(source-targets) ] ; - if ! $(r) - { - errors.error "unable to construct" [ full-name ] ; - } - - return $(r) ; - } - -} - -rule convert ( name type : sources * : requirements * : default-build * - : usage-requirements * ) -{ - local project = [ project.current ] ; - - # This is a circular module dependency, so it must be imported here - modules.import targets ; - targets.main-target-alternative - [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ] - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] - ] ; -} -IMPORT $(__name__) : convert : : convert ; diff --git a/jam-files/boost-build/tools/cw-config.jam b/jam-files/boost-build/tools/cw-config.jam deleted file mode 100644 index 1211b7c0..00000000 --- a/jam-files/boost-build/tools/cw-config.jam +++ /dev/null @@ -1,34 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for CodeWarrior toolset. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ - for local R in 9 8 7 - { - local cw-path = [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)" - : "PATH" ] ; - local cw-version = [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)" - : "VERSION" ] ; - cw-path ?= [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0" - : "PATH" ] ; - cw-version ?= $(R).0 ; - - if $(cw-path) - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ; - } - using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ; - } - } -} diff --git a/jam-files/boost-build/tools/cw.jam b/jam-files/boost-build/tools/cw.jam deleted file mode 100644 index ddcbfeb2..00000000 --- a/jam-files/boost-build/tools/cw.jam +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright (C) Reece H Dunn 2004 -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# based on the msvc.jam toolset - -import property ; -import generators ; -import os ; -import type ; -import toolset : flags ; -import errors : error ; -import feature : feature get-values ; -import path ; -import sequence : unique ; -import common ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -feature.extend toolset : cw ; - -toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ; - -nl = " -" ; - -rule init ( version ? : command * : options * ) -{ - # TODO: fix the $(command[1]) = $(compiler) issue - - setup = [ get-values <setup> : $(options) ] ; - setup ?= cwenv.bat ; - compiler = [ get-values <compiler> : $(options) ] ; - compiler ?= mwcc ; - linker = [ get-values <linker> : $(options) ] ; - linker ?= mwld ; - - local condition = [ common.check-init-parameters cw : - version $(version) ] ; - - command = [ common.get-invocation-command cw : mwcc.exe : $(command) : - [ default-paths $(version) ] ] ; - - common.handle-options cw : $(condition) : $(command) : $(options) ; - - local root = [ feature.get-values <root> : $(options) ] ; - if $(command) - { - command = [ common.get-absolute-tool-path $(command[-1]) ] ; - } - local tool-root = $(command) ; - - setup = $(tool-root)\\$(setup) ; - - # map the batch file in setup so it can be executed - - other-tools = $(tool-root:D) ; - root ?= $(other-tools:D) ; - - flags cw.link RUN_PATH $(condition) : - "$(root)\\Win32-x86 Support\\Libraries\\Runtime" - "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ; - - setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ; - - if [ os.name ] = NT - { - setup = $(setup)" -" ; - } - else - { - setup = "cmd /S /C "$(setup)" \"&&\" " ; - } - - # bind the setup command to the tool so it can be executed before the - # command - - local prefix = $(setup) ; - - flags cw.compile .CC $(condition) : $(prefix)$(compiler) ; - flags cw.link .LD $(condition) : $(prefix)$(linker) ; - flags cw.archive .LD $(condition) : $(prefix)$(linker) ; - - if [ MATCH ^([89]\\.) : $(version) ] - { - if [ os.name ] = NT - { - # The runtime libraries - flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ; - flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ; - - flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ; - flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ; - - flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ; - flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ; - } - } -} - - -local rule default-paths ( version ? ) # FIXME -{ - local possible-paths ; - local ProgramFiles = [ common.get-program-files-dir ] ; - - # TODO: add support for cw8 and cw9 detection - - local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ; - possible-paths += $(version-6-path) ; - - # perform post-processing - - possible-paths - = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ; - - possible-paths += [ modules.peek : PATH Path path ] ; - - return $(possible-paths) ; -} - - - - -## declare generators - -generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ; -generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ; - -generators.register-linker cw.link - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : EXE - : <toolset>cw - ; -generators.register-linker cw.link.dll - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : SHARED_LIB IMPORT_LIB - : <toolset>cw - ; - -generators.register-archiver cw.archive - : OBJ - : STATIC_LIB - : <toolset>cw - ; - -## compilation phase - -flags cw WHATEVER <toolset-cw:version> ; - -flags cw.compile CFLAGS <debug-symbols>on : -g ; -flags cw.compile CFLAGS <optimization>off : -O0 ; -flags cw.compile CFLAGS <optimization>speed : -O4,p ; -flags cw.compile CFLAGS <optimization>space : -O4,s ; -flags cw.compile CFLAGS <inlining>off : -inline off ; -flags cw.compile CFLAGS <inlining>on : -inline on ; -flags cw.compile CFLAGS <inlining>full : -inline all ; -flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ; - - -flags cw.compile CFLAGS <rtti>on : -RTTI on ; -flags cw.compile CFLAGS <rtti>off : -RTTI off ; - -flags cw.compile CFLAGS <warnings>on : -w on ; -flags cw.compile CFLAGS <warnings>off : -w off ; -flags cw.compile CFLAGS <warnings>all : -w all ; -flags cw.compile CFLAGS <warnings-as-errors>on : -w error ; - -flags cw.compile USER_CFLAGS <cflags> : ; -flags cw.compile.c++ USER_CFLAGS <cxxflags> : ; - -flags cw.compile DEFINES <define> ; -flags cw.compile UNDEFS <undef> ; -flags cw.compile INCLUDES <include> ; - -actions compile.c -{ - $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")" -} -actions compile.c++ -{ - $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")" -} - -## linking phase - -flags cw.link DEF_FILE <def-file> ; - -flags cw LINKFLAGS : -search ; -flags cw LINKFLAGS <debug-symbols>on : -g ; -flags cw LINKFLAGS <user-interface>console : -subsystem console ; -flags cw LINKFLAGS <user-interface>gui : -subsystem windows ; -flags cw LINKFLAGS <user-interface>wince : -subsystem wince ; -flags cw LINKFLAGS <user-interface>native : -subsystem native ; -flags cw LINKFLAGS <user-interface>auto : -subsystem auto ; - -flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ; - -flags cw.link USER_LINKFLAGS <linkflags> ; -flags cw.link LINKPATH <library-path> ; - -flags cw.link FINDLIBS_ST <find-static-library> ; -flags cw.link FINDLIBS_SA <find-shared-library> ; -flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ; -flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ; - -rule link.dll ( targets + : sources * : properties * ) -{ - DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ; -} - -if [ os.name ] in NT -{ - actions archive - { - if exist "$(<[1])" DEL "$(<[1])" - $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" - } -} -else # cygwin -{ - actions archive - { - _bbv2_out_="$(<)" - if test -f "$_bbv2_out_" ; then - _bbv2_existing_="$(<:W)" - fi - $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" - } -} - -actions link bind DEF_FILE -{ - $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -} - -actions link.dll bind DEF_FILE -{ - $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -} - diff --git a/jam-files/boost-build/tools/darwin.jam b/jam-files/boost-build/tools/darwin.jam deleted file mode 100644 index bb6dd45e..00000000 --- a/jam-files/boost-build/tools/darwin.jam +++ /dev/null @@ -1,568 +0,0 @@ -# Copyright 2003 Christopher Currie -# Copyright 2006 Dave Abrahams -# Copyright 2003, 2004, 2005, 2006 Vladimir Prus -# Copyright 2005-2007 Mat Marcus -# Copyright 2005-2007 Adobe Systems Incorporated -# Copyright 2007-2010 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ -# for explanation why it's a separate toolset. - -import feature : feature ; -import toolset : flags ; -import type ; -import common ; -import generators ; -import path : basename ; -import version ; -import property-set ; -import regex ; -import errors ; - -## Use a framework. -feature framework : : free ; - -## The MacOSX version to compile for, which maps to the SDK to use (sysroot). -feature macosx-version : : propagated link-incompatible symmetric optional ; - -## The minimal MacOSX version to target. -feature macosx-version-min : : propagated optional ; - -## A dependency, that is forced to be included in the link. -feature force-load : : free dependency incidental ; - -############################################################################# - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -feature.extend toolset : darwin ; -import gcc ; -toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ; - -generators.override darwin.prebuilt : builtin.prebuilt ; -generators.override darwin.searched-lib-generator : searched-lib-generator ; - -# Override default do-nothing generators. -generators.override darwin.compile.c.pch : pch.default-c-pch-generator ; -generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ; - -type.set-generated-target-suffix PCH : <toolset>darwin : gch ; - -toolset.inherit-rules darwin : gcc : localize ; -toolset.inherit-flags darwin : gcc - : <runtime-link>static - <architecture>arm/<address-model>32 - <architecture>arm/<address-model>64 - <architecture>arm/<instruction-set> - <architecture>x86/<address-model>32 - <architecture>x86/<address-model>64 - <architecture>x86/<instruction-set> - <architecture>power/<address-model>32 - <architecture>power/<address-model>64 - <architecture>power/<instruction-set> ; - -# Options: -# -# <root>PATH -# Platform root path. The common autodetection will set this to -# "/Developer". And when a command is given it will be set to -# the corresponding "*.platform/Developer" directory. -# -rule init ( version ? : command * : options * : requirement * ) -{ - # First time around, figure what is host OSX version - if ! $(.host-osx-version) - { - .host-osx-version = [ MATCH "^([0-9.]+)" - : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ; - if $(.debug-configuration) - { - ECHO notice: OSX version on this machine is $(.host-osx-version) ; - } - } - - # - The root directory of the tool install. - local root = [ feature.get-values <root> : $(options) ] ; - - # - The bin directory where to find the commands to execute. - local bin ; - - # - The configured compile driver command. - local command = [ common.get-invocation-command darwin : g++ : $(command) ] ; - - # The version as reported by the compiler - local real-version ; - - # - Autodetect the root and bin dir if not given. - if $(command) - { - bin ?= [ common.get-absolute-tool-path $(command[1]) ] ; - if $(bin) = "/usr/bin" - { - root ?= /Developer ; - } - else - { - local r = $(bin:D) ; - r = $(r:D) ; - root ?= $(r) ; - } - } - - # - Autodetect the version if not given. - if $(command) - { - # - The 'command' variable can have multiple elements. When calling - # the SHELL builtin we need a single string. - local command-string = $(command:J=" ") ; - real-version = [ MATCH "^([0-9.]+)" - : [ SHELL "$(command-string) -dumpversion" ] ] ; - version ?= $(real-version) ; - } - - .real-version.$(version) = $(real-version) ; - - # - Define the condition for this toolset instance. - local condition = - [ common.check-init-parameters darwin $(requirement) : version $(version) ] ; - - # - Set the toolset generic common options. - common.handle-options darwin : $(condition) : $(command) : $(options) ; - - # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates. - if $(real-version) < "4.0.0" - { - flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ; - } - # - GCC 4.2 and higher in Darwin does not have -Wno-long-double. - if $(real-version) < "4.2.0" - { - flags darwin.compile OPTIONS $(condition) : -Wno-long-double ; - } - - # - Set the link flags common with the GCC toolset. - gcc.init-link-flags darwin darwin $(condition) ; - - # - The symbol strip program. - local strip ; - if <striper> in $(options) - { - # We can turn off strip by specifying it as empty. In which - # case we switch to using the linker to do the strip. - flags darwin.link.dll OPTIONS - $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ; - flags darwin.link.dll OPTIONS - $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ; - flags darwin.link OPTIONS - $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ; - flags darwin.link OPTIONS - $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ; - } - else - { - # Otherwise we need to find a strip program to use. And hence - # also tell the link action that we need to use a strip - # post-process. - flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ; - strip = - [ common.get-invocation-command darwin - : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ; - flags darwin.link .STRIP $(condition) : $(strip[1]) ; - if $(.debug-configuration) - { - ECHO notice: using strip for $(condition) at $(strip[1]) ; - } - } - - # - The archive builder (libtool is the default as creating - # archives in darwin is complicated. - local archiver = - [ common.get-invocation-command darwin - : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ; - flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ; - if $(.debug-configuration) - { - ECHO notice: using archiver for $(condition) at $(archiver[1]) ; - } - - # - Initialize the SDKs available in the root for this tool. - local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ; - - #~ ECHO --- ; - #~ ECHO --- bin :: $(bin) ; - #~ ECHO --- root :: $(root) ; - #~ ECHO --- version :: $(version) ; - #~ ECHO --- condition :: $(condition) ; - #~ ECHO --- strip :: $(strip) ; - #~ ECHO --- archiver :: $(archiver) ; - #~ ECHO --- sdks :: $(sdks) ; - #~ ECHO --- ; - #~ EXIT ; -} - -# Add and set options for a discovered SDK version. -local rule init-sdk ( condition * : root ? : version + : version-feature ? ) -{ - local rule version-to-feature ( version + ) - { - switch $(version[1]) - { - case iphone* : - { - return $(version[1])-$(version[2-]:J=.) ; - } - case mac* : - { - return $(version[2-]:J=.) ; - } - case * : - { - return $(version:J=.) ; - } - } - } - - if $(version-feature) - { - if $(.debug-configuration) - { - ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ; - } - - # Add the version to the features for specifying them. - if ! $(version-feature) in [ feature.values macosx-version ] - { - feature.extend macosx-version : $(version-feature) ; - } - if ! $(version-feature) in [ feature.values macosx-version-min ] - { - feature.extend macosx-version-min : $(version-feature) ; - } - - # Set the flags the version needs to compile with, first - # generic options. - flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature) - : -isysroot $(sdk) ; - flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature) - : -isysroot $(sdk) ; - - # Then device variation options. - switch $(version[1]) - { - case iphonesim* : - { - local N = $(version[2]) ; - if ! $(version[3]) { N += 00 ; } - else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; } - else { N += 0$(version[3]) ; } - if ! $(version[4]) { N += 00 ; } - else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; } - else { N += 0$(version[4]) ; } - N = $(N:J=) ; - flags darwin.compile OPTIONS <macosx-version-min>$(version-feature) - : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; - flags darwin.link OPTIONS <macosx-version-min>$(version-feature) - : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; - } - - case iphone* : - { - flags darwin.compile OPTIONS <macosx-version-min>$(version-feature) - : -miphoneos-version-min=$(version[2-]:J=.) ; - flags darwin.link OPTIONS <macosx-version-min>$(version-feature) - : -miphoneos-version-min=$(version[2-]:J=.) ; - } - - case mac* : - { - flags darwin.compile OPTIONS <macosx-version-min>$(version-feature) - : -mmacosx-version-min=$(version[2-]:J=.) ; - flags darwin.link OPTIONS <macosx-version-min>$(version-feature) - : -mmacosx-version-min=$(version[2-]:J=.) ; - } - } - - return $(version-feature) ; - } - else if $(version[4]) - { - # We have a patch version of an SDK. We want to set up - # both the specific patch version, and the minor version. - # So we recurse to set up the minor version. Plus the minor version. - return - [ init-sdk $(condition) : $(root) - : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ] - [ init-sdk $(condition) : $(root) - : $(version) : [ version-to-feature $(version) ] ] ; - } - else - { - # Yes, this is intentionally recursive. - return - [ init-sdk $(condition) : $(root) - : $(version) : [ version-to-feature $(version) ] ] ; - } -} - -# Determine the MacOSX SDK versions installed and their locations. -local rule init-available-sdk-versions ( condition * : root ? ) -{ - root ?= /Developer ; - local sdks-root = $(root)/SDKs ; - local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ; - local result ; - for local sdk in $(sdks) - { - local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ; - local sdk-platform = $(sdk-match[1]:L) ; - local sdk-version = $(sdk-match[2-]) ; - if $(sdk-version) - { - switch $(sdk-platform) - { - case macosx : - { - sdk-version = mac $(sdk-version) ; - } - case iphoneos : - { - sdk-version = iphone $(sdk-version) ; - } - case iphonesimulator : - { - sdk-version = iphonesim $(sdk-version) ; - } - case * : - { - sdk-version = $(sdk-version:J=-) ; - } - } - result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ; - } - } - return $(result) ; -} - -# Generic options. -flags darwin.compile OPTIONS <flags> ; - -# The following adds objective-c support to darwin. -# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759 - -generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ; -generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ; - -rule setup-address-model ( targets * : sources * : properties * ) -{ - local ps = [ property-set.create $(properties) ] ; - local arch = [ $(ps).get <architecture> ] ; - local address-model = [ $(ps).get <address-model> ] ; - local osx-version = [ $(ps).get <macosx-version> ] ; - local gcc-version = [ $(ps).get <toolset-darwin:version> ] ; - gcc-version = $(.real-version.$(gcc-version)) ; - local options ; - - local support-ppc64 = 1 ; - - osx-version ?= $(.host-osx-version) ; - - switch $(osx-version) - { - case iphone* : - { - support-ppc64 = ; - } - - case * : - if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ] - { - # When targeting 10.6: - # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested - # - gcc 4.0 will compile fine, somehow, but then fail at link time - support-ppc64 = ; - } - } - switch $(arch) - { - case combined : - { - if $(address-model) = 32_64 { - if $(support-ppc64) { - options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ; - } else { - # Build 3-way binary - options = -arch i386 -arch ppc -arch x86_64 ; - } - } else if $(address-model) = 64 { - if $(support-ppc64) { - options = -arch x86_64 -arch ppc64 ; - } else { - errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ; - } - } else { - options = -arch i386 -arch ppc ; - } - } - - case x86 : - { - if $(address-model) = 32_64 { - options = -arch i386 -arch x86_64 ; - } else if $(address-model) = 64 { - options = -arch x86_64 ; - } else { - options = -arch i386 ; - } - } - - case power : - { - if ! $(support-ppc64) - && ( $(address-model) = 32_64 || $(address-model) = 64 ) - { - errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ; - } - - if $(address-model) = 32_64 { - options = -arch ppc -arch ppc64 ; - } else if $(address-model) = 64 { - options = -arch ppc64 ; - } else { - options = -arch ppc ; - } - } - - case arm : - { - options = -arch armv6 ; - } - } - - if $(options) - { - OPTIONS on $(targets) += $(options) ; - } -} - -rule setup-threading ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; -} - -rule setup-fpic ( targets * : sources * : properties * ) -{ - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -} - -rule compile.m ( targets * : sources * : properties * ) -{ - LANG on $(<) = "-x objective-c" ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.m -{ - "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.mm ( targets * : sources * : properties * ) -{ - LANG on $(<) = "-x objective-c++" ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.mm -{ - "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Set the max header padding to allow renaming of libs for installation. -flags darwin.link.dll OPTIONS : -headerpad_max_install_names ; - -# To link the static runtime we need to link to all the core runtime libraries. -flags darwin.link OPTIONS <runtime-link>static - : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ; - -# Strip as much as possible when optimizing. -flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ; -flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ; - -# Dynamic/shared linking. -flags darwin.compile OPTIONS <link>shared : -dynamic ; - -# Misc options. -flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ; -#~ flags darwin.link OPTIONS : -fexceptions ; - -# Add the framework names to use. -flags darwin.link FRAMEWORK <framework> ; - -# -flags darwin.link FORCE_LOAD <force-load> ; - -# This is flag is useful for debugging the link step -# uncomment to see what libtool is doing under the hood -#~ flags darwin.link.dll OPTIONS : -Wl,-v ; - -_ = " " ; - -# set up the -F option to include the paths to any frameworks used. -local rule prepare-framework-path ( target + ) -{ - # The -framework option only takes basename of the framework. - # The -F option specifies the directories where a framework - # is searched for. So, if we find <framework> feature - # with some path, we need to generate property -F option. - local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ; - - # Be sure to generate no -F if there's no path. - for local framework-path in $(framework-paths) - { - if $(framework-path) != "" - { - FRAMEWORK_PATH on $(target) += -F$(framework-path) ; - } - } -} - -rule link ( targets * : sources * : properties * ) -{ - DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ; - setup-address-model $(targets) : $(sources) : $(properties) ; - prepare-framework-path $(<) ; -} - -# Note that using strip without any options was reported to result in broken -# binaries, at least on OS X 10.5.5, see: -# http://svn.boost.org/trac/boost/ticket/2347 -# So we pass -S -x. -actions link bind LIBRARIES FORCE_LOAD -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS) - $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)" -} - -rule link.dll ( targets * : sources * : properties * ) -{ - setup-address-model $(targets) : $(sources) : $(properties) ; - prepare-framework-path $(<) ; -} - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS) -} - -# We use libtool instead of ar to support universal binary linking -# TODO: Find a way to use the underlying tools, i.e. lipo, to do this. -actions piecemeal archive -{ - "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)" -} diff --git a/jam-files/boost-build/tools/darwin.py b/jam-files/boost-build/tools/darwin.py deleted file mode 100644 index c2919606..00000000 --- a/jam-files/boost-build/tools/darwin.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (C) Christopher Currie 2003. Permission to copy, use, -# modify, sell and distribute this software is granted provided this -# copyright notice appears in all copies. This software is provided -# "as is" without express or implied warranty, and with no claim as to -# its suitability for any purpose. - -# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ -# for explanation why it's a separate toolset. - -import common, gcc, builtin -from b2.build import feature, toolset, type, action, generators -from b2.util.utility import * - -toolset.register ('darwin') - -toolset.inherit_generators ('darwin', [], 'gcc') -toolset.inherit_flags ('darwin', 'gcc') -toolset.inherit_rules ('darwin', 'gcc') - -def init (version = None, command = None, options = None): - options = to_seq (options) - - condition = common.check_init_parameters ('darwin', None, ('version', version)) - - command = common.get_invocation_command ('darwin', 'g++', command) - - common.handle_options ('darwin', condition, command, options) - - gcc.init_link_flags ('darwin', 'darwin', condition) - -# Darwin has a different shared library suffix -type.set_generated_target_suffix ('SHARED_LIB', ['<toolset>darwin'], 'dylib') - -# we need to be able to tell the type of .dylib files -type.register_suffixes ('dylib', 'SHARED_LIB') - -feature.feature ('framework', [], ['free']) - -toolset.flags ('darwin.compile', 'OPTIONS', '<link>shared', ['-dynamic']) -toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp']) -toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates']) - -toolset.flags ('darwin.link', 'FRAMEWORK', '<framework>') - -# This is flag is useful for debugging the link step -# uncomment to see what libtool is doing under the hood -# toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v']) - -action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) - -# TODO: how to set 'bind LIBRARIES'? -action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) - -def darwin_archive (manager, targets, sources, properties): - pass - -action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"']) diff --git a/jam-files/boost-build/tools/dmc.jam b/jam-files/boost-build/tools/dmc.jam deleted file mode 100644 index 8af8725a..00000000 --- a/jam-files/boost-build/tools/dmc.jam +++ /dev/null @@ -1,134 +0,0 @@ -# Digital Mars C++ - -# (C) Copyright Christof Meerwald 2003. -# (C) Copyright Aleksey Gurtovoy 2004. -# (C) Copyright Arjan Knepper 2006. -# -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# The following #// line will be used by the regression test table generation -# program as the column heading for HTML tables. Must not include version number. -#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a> - -import feature generators common ; -import toolset : flags ; -import sequence regex ; - -feature.extend toolset : dmc ; - -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters dmc : version $(version) ] ; - - local command = [ common.get-invocation-command dmc : dmc : $(command) ] ; - command ?= dmc ; - - common.handle-options dmc : $(condition) : $(command) : $(options) ; - - if $(command) - { - command = [ common.get-absolute-tool-path $(command[-1]) ] ; - } - root = $(command:D) ; - - if $(root) - { - # DMC linker is sensitive the the direction of slashes, and - # won't link if forward slashes are used in command. - root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ; - flags dmc .root $(condition) : $(root)\\bin\\ ; - } - else - { - flags dmc .root $(condition) : "" ; - } -} - - -# Declare generators -generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ; -generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ; - -generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ; -generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ; -generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ; - - -# Declare flags -# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds. -# flags dmc.compile OPTIONS <debug-symbols>on : -g ; -flags dmc.compile OPTIONS <debug-symbols>on : -gl ; -flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ; -flags dmc.link OPTIONS <debug-symbols>off : /PACKF ; - -flags dmc.compile OPTIONS <optimization>off : -S -o+none ; -flags dmc.compile OPTIONS <optimization>speed : -o+time ; -flags dmc.compile OPTIONS <optimization>space : -o+space ; -flags dmc.compile OPTIONS <exception-handling>on : -Ae ; -flags dmc.compile OPTIONS <rtti>on : -Ar ; -# FIXME: -# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used -# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used -# But for some reason the -WD cflag is always in use. -# flags dmc.compile OPTIONS <link>shared : -WD ; -# flags dmc.compile OPTIONS <link>static : -WA ; - -# Note that these two options actually imply multithreading support on DMC -# because there is no single-threaded dynamic runtime library. Specifying -# <threading>multi would be a bad idea, though, because no option would be -# matched when the build uses the default settings of <runtime-link>dynamic -# and <threading>single. -flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ; -flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ; - -flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ; -flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ; -flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ; -flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ; - -flags dmc.compile OPTIONS : <cflags> ; -flags dmc.compile.c++ OPTIONS : <cxxflags> ; - -flags dmc.compile DEFINES : <define> ; -flags dmc.compile INCLUDES : <include> ; - -flags dmc.link <linkflags> ; -flags dmc.archive OPTIONS <arflags> ; - -flags dmc LIBPATH <library-path> ; -flags dmc LIBRARIES <library-file> ; -flags dmc FINDLIBS <find-library-sa> ; -flags dmc FINDLIBS <find-library-st> ; - -actions together link bind LIBRARIES -{ - "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def" -} - -actions together link.dll bind LIBRARIES -{ - echo LIBRARY "$(<[1])" > $(<[2]:B).def - echo DESCRIPTION 'A Library' >> $(<[2]:B).def - echo EXETYPE NT >> $(<[2]:B).def - echo SUBSYSTEM WINDOWS >> $(<[2]:B).def - echo CODE EXECUTE READ >> $(<[2]:B).def - echo DATA READ WRITE >> $(<[2]:B).def - "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def" -} - -actions compile.c -{ - "$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)" -} - -actions together piecemeal archive -{ - "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/docutils.jam b/jam-files/boost-build/tools/docutils.jam deleted file mode 100644 index bf061617..00000000 --- a/jam-files/boost-build/tools/docutils.jam +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -# Support for docutils ReStructuredText processing. - -import type ; -import scanner ; -import generators ; -import os ; -import common ; -import toolset ; -import path ; -import feature : feature ; -import property ; - -.initialized = ; - -type.register ReST : rst ; - -class rst-scanner : common-scanner -{ - rule __init__ ( paths * ) - { - common-scanner.__init__ . $(paths) ; - } - - rule pattern ( ) - { - return "^[ ]*\\.\\.[ ]+include::[ ]+([^ -]+)" - "^[ ]*\\.\\.[ ]+image::[ ]+([^ -]+)" - "^[ ]*\\.\\.[ ]+figure::[ ]+([^ -]+)" - ; - } -} - -scanner.register rst-scanner : include ; -type.set-scanner ReST : rst-scanner ; - -generators.register-standard docutils.html : ReST : HTML ; - -rule init ( docutils-dir ? : tools-dir ? ) -{ - docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ; - tools-dir ?= $(docutils-dir)/tools ; - - if ! $(.initialized) - { - .initialized = true ; - .docutils-dir = $(docutils-dir) ; - .tools-dir = $(tools-dir:R="") ; - - .setup = [ - common.prepend-path-variable-command PYTHONPATH - : $(.docutils-dir) $(.docutils-dir)/extras ] ; - } -} - -rule html ( target : source : properties * ) -{ - if ! [ on $(target) return $(RST2XXX) ] - { - local python-cmd = [ property.select <python.interpreter> : $(properties) ] ; - RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ; - } -} - - -feature docutils : : free ; -feature docutils-html : : free ; -feature docutils-cmd : : free ; -toolset.flags docutils COMMON-FLAGS : <docutils> ; -toolset.flags docutils HTML-FLAGS : <docutils-html> ; -toolset.flags docutils RST2XXX : <docutils-cmd> ; - -actions html -{ - $(.setup) - "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<) -} - diff --git a/jam-files/boost-build/tools/doxproc.py b/jam-files/boost-build/tools/doxproc.py deleted file mode 100644 index 4cbd5edd..00000000 --- a/jam-files/boost-build/tools/doxproc.py +++ /dev/null @@ -1,859 +0,0 @@ -#!/usr/bin/python -# Copyright 2006 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -''' -Processing of Doxygen generated XML. -''' - -import os -import os.path -import sys -import time -import string -import getopt -import glob -import re -import xml.dom.minidom - - -def usage(): - print ''' -Usage: - %s options - -Options: - --xmldir Directory with the Doxygen xml result files. - --output Write the output BoostBook to the given location. - --id The ID of the top level BoostBook section. - --title The title of the top level BoostBook section. - --enable-index Generate additional index sections for classes and - types. -''' % ( sys.argv[0] ) - - -def get_args( argv = sys.argv[1:] ): - spec = [ - 'xmldir=', - 'output=', - 'id=', - 'title=', - 'enable-index', - 'help' ] - options = { - '--xmldir' : 'xml', - '--output' : None, - '--id' : 'dox', - '--title' : 'Doxygen' - } - ( option_pairs, other ) = getopt.getopt( argv, '', spec ) - map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs ) - - if options.has_key( '--help' ): - usage() - sys.exit(1) - - return { - 'xmldir' : options['--xmldir'], - 'output' : options['--output'], - 'id' : options['--id'], - 'title' : options['--title'], - 'index' : options.has_key('--enable-index') - } - -def if_attribute(node, attribute, true_value, false_value=None): - if node.getAttribute(attribute) == 'yes': - return true_value - else: - return false_value - -class Doxygen2BoostBook: - - def __init__( self, **kwargs ): - ## - self.args = kwargs - self.args.setdefault('id','') - self.args.setdefault('title','') - self.args.setdefault('last_revision', time.asctime()) - self.args.setdefault('index', False) - self.id = '%(id)s.reference' % self.args - self.args['id'] = self.id - #~ This is our template BoostBook document we insert the generated content into. - self.boostbook = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?> -<section id="%(id)s" name="%(title)s" last-revision="%(last_revision)s"> - <title>%(title)s</title> - <library-reference id="%(id)s.headers"> - <title>Headers</title> - </library-reference> - <index id="%(id)s.classes"> - <title>Classes</title> - </index> - <index id="%(id)s.index"> - <title>Index</title> - </index> -</section> -''' % self.args ) - self.section = { - 'headers' : self._getChild('library-reference',id='%(id)s.headers' % self.args), - 'classes' : self._getChild('index',id='%(id)s.classes' % self.args), - 'index' : self._getChild('index',id='%(id)s.index' % self.args) - } - #~ Remove the index sections if we aren't generating it. - if not self.args['index']: - self.section['classes'].parentNode.removeChild(self.section['classes']) - self.section['classes'].unlink() - del self.section['classes'] - self.section['index'].parentNode.removeChild(self.section['index']) - self.section['index'].unlink() - del self.section['index'] - #~ The symbols, per Doxygen notion, that we translated. - self.symbols = {} - #~ Map of Doxygen IDs and BoostBook IDs, so we can translate as needed. - self.idmap = {} - #~ Marks generation, to prevent redoing it. - self.generated = False - - #~ Add an Doxygen generated XML document to the content we are translating. - def addDox( self, document ): - self._translateNode(document.documentElement) - - #~ Turns the internal XML tree into an output UTF-8 string. - def tostring( self ): - self._generate() - #~ return self.boostbook.toprettyxml(' ') - return self.boostbook.toxml('utf-8') - - #~ Does post-processing on the partial generated content to generate additional info - #~ now that we have the complete source documents. - def _generate( self ): - if not self.generated: - self.generated = True - symbols = self.symbols.keys() - symbols.sort() - #~ Populate the header section. - for symbol in symbols: - if self.symbols[symbol]['kind'] in ('header'): - self.section['headers'].appendChild(self.symbols[symbol]['dom']) - for symbol in symbols: - if self.symbols[symbol]['kind'] not in ('namespace', 'header'): - container = self._resolveContainer(self.symbols[symbol], - self.symbols[self.symbols[symbol]['header']]['dom']) - if container.nodeName != 'namespace': - ## The current BoostBook to Docbook translation doesn't - ## respect, nor assign, IDs to inner types of any kind. - ## So nuke the ID entry so as not create bogus links. - del self.idmap[self.symbols[symbol]['id']] - container.appendChild(self.symbols[symbol]['dom']) - self._rewriteIDs(self.boostbook.documentElement) - - #~ Rewrite the various IDs from Doxygen references to the newly created - #~ BoostBook references. - def _rewriteIDs( self, node ): - if node.nodeName in ('link'): - if (self.idmap.has_key(node.getAttribute('linkend'))): - #~ A link, and we have someplace to repoint it at. - node.setAttribute('linkend',self.idmap[node.getAttribute('linkend')]) - else: - #~ A link, but we don't have a generated target for it. - node.removeAttribute('linkend') - elif hasattr(node,'hasAttribute') and node.hasAttribute('id') and self.idmap.has_key(node.getAttribute('id')): - #~ Simple ID, and we have a translation. - node.setAttribute('id',self.idmap[node.getAttribute('id')]) - #~ Recurse, and iterate, depth-first traversal which turns out to be - #~ left-to-right and top-to-bottom for the document. - if node.firstChild: - self._rewriteIDs(node.firstChild) - if node.nextSibling: - self._rewriteIDs(node.nextSibling) - - def _resolveContainer( self, cpp, root ): - container = root - for ns in cpp['namespace']: - node = self._getChild('namespace',name=ns,root=container) - if not node: - node = container.appendChild( - self._createNode('namespace',name=ns)) - container = node - for inner in cpp['name'].split('::'): - node = self._getChild(name=inner,root=container) - if not node: - break - container = node - return container - - def _setID( self, id, name ): - self.idmap[id] = name.replace('::','.').replace('/','.') - #~ print '--| setID:',id,'::',self.idmap[id] - - #~ Translate a given node within a given context. - #~ The translation dispatches to a local method of the form - #~ "_translate[_context0,...,_contextN]", and the keyword args are - #~ passed along. If there is no translation handling method we - #~ return None. - def _translateNode( self, *context, **kwargs ): - node = None - names = [ ] - for c in context: - if c: - if not isinstance(c,xml.dom.Node): - suffix = '_'+c.replace('-','_') - else: - suffix = '_'+c.nodeName.replace('-','_') - node = c - names.append('_translate') - names = map(lambda x: x+suffix,names) - if node: - for name in names: - if hasattr(self,name): - return getattr(self,name)(node,**kwargs) - return None - - #~ Translates the children of the given parent node, appending the results - #~ to the indicated target. For nodes not translated by the translation method - #~ it copies the child over and recurses on that child to translate any - #~ possible interior nodes. Hence this will translate the entire subtree. - def _translateChildren( self, parent, **kwargs ): - target = kwargs['target'] - for n in parent.childNodes: - child = self._translateNode(n,target=target) - if child: - target.appendChild(child) - else: - child = n.cloneNode(False) - if hasattr(child,'data'): - child.data = re.sub(r'\s+',' ',child.data) - target.appendChild(child) - self._translateChildren(n,target=child) - - #~ Translate the given node as a description, into the description subnode - #~ of the target. If no description subnode is present in the target it - #~ is created. - def _translateDescription( self, node, target=None, tag='description', **kwargs ): - description = self._getChild(tag,root=target) - if not description: - description = target.appendChild(self._createNode(tag)) - self._translateChildren(node,target=description) - return description - - #~ Top level translation of: <doxygen ...>...</doxygen>, - #~ translates the children. - def _translate_doxygen( self, node ): - #~ print '_translate_doxygen:', node.nodeName - result = [] - for n in node.childNodes: - newNode = self._translateNode(n) - if newNode: - result.append(newNode) - return result - - #~ Top level translation of: - #~ <doxygenindex ...> - #~ <compound ...> - #~ <member ...> - #~ <name>...</name> - #~ </member> - #~ ... - #~ </compound> - #~ ... - #~ </doxygenindex> - #~ builds the class and symbol sections, if requested. - def _translate_doxygenindex( self, node ): - #~ print '_translate_doxygenindex:', node.nodeName - if self.args['index']: - entries = [] - classes = [] - #~ Accumulate all the index entries we care about. - for n in node.childNodes: - if n.nodeName == 'compound': - if n.getAttribute('kind') not in ('file','dir','define'): - cpp = self._cppName(self._getChildData('name',root=n)) - entry = { - 'name' : cpp['name'], - 'compoundname' : cpp['compoundname'], - 'id' : n.getAttribute('refid') - } - if n.getAttribute('kind') in ('class','struct'): - classes.append(entry) - entries.append(entry) - for m in n.childNodes: - if m.nodeName == 'member': - cpp = self._cppName(self._getChildData('name',root=m)) - entry = { - 'name' : cpp['name'], - 'compoundname' : cpp['compoundname'], - 'id' : n.getAttribute('refid') - } - if hasattr(m,'getAttribute') and m.getAttribute('kind') in ('class','struct'): - classes.append(entry) - entries.append(entry) - #~ Put them in a sensible order. - entries.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower())) - classes.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower())) - #~ And generate the BoostBook for them. - self._translate_index_(entries,target=self.section['index']) - self._translate_index_(classes,target=self.section['classes']) - return None - - #~ Translate a set of index entries in the BoostBook output. The output - #~ is grouped into groups of the first letter of the entry names. - def _translate_index_(self, entries, target=None, **kwargs ): - i = 0 - targetID = target.getAttribute('id') - while i < len(entries): - dividerKey = entries[i]['name'][0].upper() - divider = target.appendChild(self._createNode('indexdiv',id=targetID+'.'+dividerKey)) - divider.appendChild(self._createText('title',dividerKey)) - while i < len(entries) and dividerKey == entries[i]['name'][0].upper(): - iename = entries[i]['name'] - ie = divider.appendChild(self._createNode('indexentry')) - ie = ie.appendChild(self._createText('primaryie',iename)) - while i < len(entries) and entries[i]['name'] == iename: - ie.appendChild(self.boostbook.createTextNode(' (')) - ie.appendChild(self._createText( - 'link',entries[i]['compoundname'],linkend=entries[i]['id'])) - ie.appendChild(self.boostbook.createTextNode(')')) - i += 1 - - #~ Translate a <compounddef ...>...</compounddef>, - #~ by retranslating with the "kind" of compounddef. - def _translate_compounddef( self, node, target=None, **kwargs ): - return self._translateNode(node,node.getAttribute('kind')) - - #~ Translate a <compounddef kind="namespace"...>...</compounddef>. For - #~ namespaces we just collect the information for later use as there is no - #~ currently namespaces are not included in the BoostBook format. In the future - #~ it might be good to generate a namespace index. - def _translate_compounddef_namespace( self, node, target=None, **kwargs ): - namespace = { - 'id' : node.getAttribute('id'), - 'kind' : 'namespace', - 'name' : self._getChildData('compoundname',root=node), - 'brief' : self._getChildData('briefdescription',root=node), - 'detailed' : self._getChildData('detaileddescription',root=node), - 'parsed' : False - } - if self.symbols.has_key(namespace['name']): - if not self.symbols[namespace['name']]['parsed']: - self.symbols[namespace['name']]['parsed'] = True - #~ for n in node.childNodes: - #~ if hasattr(n,'getAttribute'): - #~ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) - else: - self.symbols[namespace['name']] = namespace - #~ self._setID(namespace['id'],namespace['name']) - return None - - #~ Translate a <compounddef kind="class"...>...</compounddef>, which - #~ forwards to the kind=struct as they are the same. - def _translate_compounddef_class( self, node, target=None, **kwargs ): - return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs) - - #~ Translate a <compounddef kind="struct"...>...</compounddef> into: - #~ <header id="?" name="?"> - #~ <struct name="?"> - #~ ... - #~ </struct> - #~ </header> - def _translate_compounddef_struct( self, node, tag='struct', target=None, **kwargs ): - result = None - includes = self._getChild('includes',root=node) - if includes: - ## Add the header into the output table. - self._translate_compounddef_includes_(includes,includes,**kwargs) - ## Compounds are the declared symbols, classes, types, etc. - ## We add them to the symbol table, along with the partial DOM for them - ## so that they can be organized into the output later. - compoundname = self._getChildData('compoundname',root=node) - compoundname = self._cppName(compoundname) - self._setID(node.getAttribute('id'),compoundname['compoundname']) - struct = self._createNode(tag,name=compoundname['name'].split('::')[-1]) - self.symbols[compoundname['compoundname']] = { - 'header' : includes.firstChild.data, - 'namespace' : compoundname['namespace'], - 'id' : node.getAttribute('id'), - 'kind' : tag, - 'name' : compoundname['name'], - 'dom' : struct - } - ## Add the children which will be the members of the struct. - for n in node.childNodes: - self._translateNode(n,target=struct,scope=compoundname['compoundname']) - result = struct - return result - - #~ Translate a <compounddef ...><includes ...>...</includes></compounddef>, - def _translate_compounddef_includes_( self, node, target=None, **kwargs ): - name = node.firstChild.data - if not self.symbols.has_key(name): - self._setID(node.getAttribute('refid'),name) - self.symbols[name] = { - 'kind' : 'header', - 'id' : node.getAttribute('refid'), - 'dom' : self._createNode('header', - id=node.getAttribute('refid'), - name=name) - } - return None - - #~ Translate a <basecompoundref...>...</basecompoundref> into: - #~ <inherit access="?"> - #~ ... - #~ </inherit> - def _translate_basecompoundref( self, ref, target=None, **kwargs ): - inherit = target.appendChild(self._createNode('inherit', - access=ref.getAttribute('prot'))) - self._translateChildren(ref,target=inherit) - return - - #~ Translate: - #~ <templateparamlist> - #~ <param> - #~ <type>...</type> - #~ <declname>...</declname> - #~ <defname>...</defname> - #~ <defval>...</defval> - #~ </param> - #~ ... - #~ </templateparamlist> - #~ Into: - #~ <template> - #~ <template-type-parameter name="?" /> - #~ <template-nontype-parameter name="?"> - #~ <type>?</type> - #~ <default>?</default> - #~ </template-nontype-parameter> - #~ </template> - def _translate_templateparamlist( self, templateparamlist, target=None, **kwargs ): - template = target.appendChild(self._createNode('template')) - for param in templateparamlist.childNodes: - if param.nodeName == 'param': - type = self._getChildData('type',root=param) - defval = self._getChild('defval',root=param) - paramKind = None - if type in ('class','typename'): - paramKind = 'template-type-parameter' - else: - paramKind = 'template-nontype-parameter' - templateParam = template.appendChild( - self._createNode(paramKind, - name=self._getChildData('declname',root=param))) - if paramKind == 'template-nontype-parameter': - template_type = templateParam.appendChild(self._createNode('type')) - self._translate_type( - self._getChild('type',root=param),target=template_type) - if defval: - value = self._getChildData('ref',root=defval.firstChild) - if not value: - value = self._getData(defval) - templateParam.appendChild(self._createText('default',value)) - return template - - #~ Translate: - #~ <briefdescription>...</briefdescription> - #~ Into: - #~ <purpose>...</purpose> - def _translate_briefdescription( self, brief, target=None, **kwargs ): - self._translateDescription(brief,target=target,**kwargs) - return self._translateDescription(brief,target=target,tag='purpose',**kwargs) - - #~ Translate: - #~ <detaileddescription>...</detaileddescription> - #~ Into: - #~ <description>...</description> - def _translate_detaileddescription( self, detailed, target=None, **kwargs ): - return self._translateDescription(detailed,target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="?">...</sectiondef> - #~ With kind specific translation. - def _translate_sectiondef( self, sectiondef, target=None, **kwargs ): - self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs) - - #~ Translate non-function sections. - def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ): - for n in sectiondef.childNodes: - if hasattr(n,'getAttribute'): - self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) - return None - - #~ Translate: - #~ <sectiondef kind="public-type">...</sectiondef> - def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ): - return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="public-sttrib">...</sectiondef> - def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs): - return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="?-func">...</sectiondef> - #~ All the various function group translations end up here for which - #~ they are translated into: - #~ <method-group name="?"> - #~ ... - #~ </method-group> - def _translate_sectiondef_func_( self, sectiondef, name='functions', target=None, **kwargs ): - members = target.appendChild(self._createNode('method-group',name=name)) - for n in sectiondef.childNodes: - if hasattr(n,'getAttribute'): - self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs) - return members - - #~ Translate: - #~ <sectiondef kind="public-func">...</sectiondef> - def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ): - return self._translate_sectiondef_func_(sectiondef, - name='public member functions',target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="public-static-func">...</sectiondef> - def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs): - return self._translate_sectiondef_func_(sectiondef, - name='public static functions',target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="protected-func">...</sectiondef> - def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ): - return self._translate_sectiondef_func_(sectiondef, - name='protected member functions',target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="private-static-func">...</sectiondef> - def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs): - return self._translate_sectiondef_func_(sectiondef, - name='private static functions',target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="public-func">...</sectiondef> - def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ): - return self._translate_sectiondef_func_(sectiondef, - name='private member functions',target=target,**kwargs) - - #~ Translate: - #~ <sectiondef kind="user-defined"><header>...</header>...</sectiondef> - def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ): - return self._translate_sectiondef_func_(sectiondef, - name=self._getChildData('header', root=sectiondef),target=target,**kwargs) - - #~ Translate: - #~ <memberdef kind="typedef" id="?"> - #~ <name>...</name> - #~ </memberdef> - #~ To: - #~ <typedef id="?" name="?"> - #~ <type>...</type> - #~ </typedef> - def _translate_memberdef_typedef( self, memberdef, target=None, scope=None, **kwargs ): - self._setID(memberdef.getAttribute('id'), - scope+'::'+self._getChildData('name',root=memberdef)) - typedef = target.appendChild(self._createNode('typedef', - id=memberdef.getAttribute('id'), - name=self._getChildData('name',root=memberdef))) - typedef_type = typedef.appendChild(self._createNode('type')) - self._translate_type(self._getChild('type',root=memberdef),target=typedef_type) - return typedef - - #~ Translate: - #~ <memberdef kind="function" id="?" const="?" static="?" explicit="?" inline="?"> - #~ <name>...</name> - #~ </memberdef> - #~ To: - #~ <method name="?" cv="?" specifiers="?"> - #~ ... - #~ </method> - def _translate_memberdef_function( self, memberdef, target=None, scope=None, **kwargs ): - name = self._getChildData('name',root=memberdef) - self._setID(memberdef.getAttribute('id'),scope+'::'+name) - ## Check if we have some specific kind of method. - if name == scope.split('::')[-1]: - kind = 'constructor' - target = target.parentNode - elif name == '~'+scope.split('::')[-1]: - kind = 'destructor' - target = target.parentNode - elif name == 'operator=': - kind = 'copy-assignment' - target = target.parentNode - else: - kind = 'method' - method = target.appendChild(self._createNode(kind, - # id=memberdef.getAttribute('id'), - name=name, - cv=' '.join([ - if_attribute(memberdef,'const','const','').strip() - ]), - specifiers=' '.join([ - if_attribute(memberdef,'static','static',''), - if_attribute(memberdef,'explicit','explicit',''), - if_attribute(memberdef,'inline','inline','') - ]).strip() - )) - ## We iterate the children to translate each part of the function. - for n in memberdef.childNodes: - self._translateNode(memberdef,'function',n,target=method) - return method - - #~ Translate: - #~ <memberdef kind="function"...><templateparamlist>...</templateparamlist></memberdef> - def _translate_memberdef_function_templateparamlist( - self, templateparamlist, target=None, **kwargs ): - return self._translate_templateparamlist(templateparamlist,target=target,**kwargs) - - #~ Translate: - #~ <memberdef kind="function"...><type>...</type></memberdef> - #~ To: - #~ ...<type>?</type> - def _translate_memberdef_function_type( self, resultType, target=None, **kwargs ): - methodType = self._createNode('type') - self._translate_type(resultType,target=methodType) - if methodType.hasChildNodes(): - target.appendChild(methodType) - return methodType - - #~ Translate: - #~ <memberdef kind="function"...><briefdescription>...</briefdescription></memberdef> - def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ): - result = self._translateDescription(description,target=target,**kwargs) - ## For functions if we translate the brief docs to the purpose they end up - ## right above the regular description. And since we just added the brief to that - ## on the previous line, don't bother with the repetition. - # result = self._translateDescription(description,target=target,tag='purpose',**kwargs) - return result - - #~ Translate: - #~ <memberdef kind="function"...><detaileddescription>...</detaileddescription></memberdef> - def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ): - return self._translateDescription(description,target=target,**kwargs) - - #~ Translate: - #~ <memberdef kind="function"...><inbodydescription>...</inbodydescription></memberdef> - def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ): - return self._translateDescription(description,target=target,**kwargs) - - #~ Translate: - #~ <memberdef kind="function"...><param>...</param></memberdef> - def _translate_memberdef_function_param( self, param, target=None, **kwargs ): - return self._translate_param(param,target=target,**kwargs) - - #~ Translate: - #~ <memberdef kind="variable" id="?"> - #~ <name>...</name> - #~ <type>...</type> - #~ </memberdef> - #~ To: - #~ <data-member id="?" name="?"> - #~ <type>...</type> - #~ </data-member> - def _translate_memberdef_variable( self, memberdef, target=None, scope=None, **kwargs ): - self._setID(memberdef.getAttribute('id'), - scope+'::'+self._getChildData('name',root=memberdef)) - data_member = target.appendChild(self._createNode('data-member', - id=memberdef.getAttribute('id'), - name=self._getChildData('name',root=memberdef))) - data_member_type = data_member.appendChild(self._createNode('type')) - self._translate_type(self._getChild('type',root=memberdef),target=data_member_type) - - #~ Translate: - #~ <memberdef kind="enum" id="?"> - #~ <name>...</name> - #~ ... - #~ </memberdef> - #~ To: - #~ <enum id="?" name="?"> - #~ ... - #~ </enum> - def _translate_memberdef_enum( self, memberdef, target=None, scope=None, **kwargs ): - self._setID(memberdef.getAttribute('id'), - scope+'::'+self._getChildData('name',root=memberdef)) - enum = target.appendChild(self._createNode('enum', - id=memberdef.getAttribute('id'), - name=self._getChildData('name',root=memberdef))) - for n in memberdef.childNodes: - self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs) - return enum - - #~ Translate: - #~ <memberdef kind="enum"...> - #~ <enumvalue id="?"> - #~ <name>...</name> - #~ <initializer>...</initializer> - #~ </enumvalue> - #~ </memberdef> - #~ To: - #~ <enumvalue id="?" name="?"> - #~ <default>...</default> - #~ </enumvalue> - def _translate_memberdef_enum_enumvalue( self, enumvalue, target=None, scope=None, **kwargs ): - self._setID(enumvalue.getAttribute('id'), - scope+'::'+self._getChildData('name',root=enumvalue)) - value = target.appendChild(self._createNode('enumvalue', - id=enumvalue.getAttribute('id'), - name=self._getChildData('name',root=enumvalue))) - initializer = self._getChild('initializer',root=enumvalue) - if initializer: - self._translateChildren(initializer, - target=target.appendChild(self._createNode('default'))) - return value - - #~ Translate: - #~ <param> - #~ <type>...</type> - #~ <declname>...</declname> - #~ <defval>...</defval> - #~ </param> - #~ To: - #~ <parameter name="?"> - #~ <paramtype>...</paramtype> - #~ ... - #~ </parameter> - def _translate_param( self, param, target=None, **kwargs): - parameter = target.appendChild(self._createNode('parameter', - name=self._getChildData('declname',root=param))) - paramtype = parameter.appendChild(self._createNode('paramtype')) - self._translate_type(self._getChild('type',root=param),target=paramtype) - defval = self._getChild('defval',root=param) - if defval: - self._translateChildren(self._getChild('defval',root=param),target=parameter) - return parameter - - #~ Translate: - #~ <ref kindref="?" ...>...</ref> - def _translate_ref( self, ref, **kwargs ): - return self._translateNode(ref,ref.getAttribute('kindref')) - - #~ Translate: - #~ <ref refid="?" kindref="compound">...</ref> - #~ To: - #~ <link linkend="?"><classname>...</classname></link> - def _translate_ref_compound( self, ref, **kwargs ): - result = self._createNode('link',linkend=ref.getAttribute('refid')) - classname = result.appendChild(self._createNode('classname')) - self._translateChildren(ref,target=classname) - return result - - #~ Translate: - #~ <ref refid="?" kindref="member">...</ref> - #~ To: - #~ <link linkend="?">...</link> - def _translate_ref_member( self, ref, **kwargs ): - result = self._createNode('link',linkend=ref.getAttribute('refid')) - self._translateChildren(ref,target=result) - return result - - #~ Translate: - #~ <type>...</type> - def _translate_type( self, type, target=None, **kwargs ): - result = self._translateChildren(type,target=target,**kwargs) - #~ Filter types to clean up various readability problems, most notably - #~ with really long types. - xml = target.toxml('utf-8'); - if ( - xml.startswith('<type>boost::mpl::') or - xml.startswith('<type>BOOST_PP_') or - re.match('<type>boost::(lazy_)?(enable|disable)_if',xml) - ): - while target.firstChild: - target.removeChild(target.firstChild) - target.appendChild(self._createText('emphasis','unspecified')) - return result - - def _getChild( self, tag = None, id = None, name = None, root = None ): - if not root: - root = self.boostbook.documentElement - for n in root.childNodes: - found = True - if tag and found: - found = found and tag == n.nodeName - if id and found: - if n.hasAttribute('id'): - found = found and n.getAttribute('id') == id - else: - found = found and n.hasAttribute('id') and n.getAttribute('id') == id - if name and found: - found = found and n.hasAttribute('name') and n.getAttribute('name') == name - if found: - #~ print '--|', n - return n - return None - - def _getChildData( self, tag, **kwargs ): - return self._getData(self._getChild(tag,**kwargs),**kwargs) - - def _getData( self, node, **kwargs ): - if node: - text = self._getChild('#text',root=node) - if text: - return text.data.strip() - return '' - - def _cppName( self, type ): - parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':')) - result = { - 'compoundname' : parts.group(1), - 'namespace' : parts.group(1).split('::')[0:-1], - 'name' : parts.group(1).split('::')[-1], - 'specialization' : parts.group(2) - } - if result['namespace'] and len(result['namespace']) > 0: - namespace = '::'.join(result['namespace']) - while ( - len(result['namespace']) > 0 and ( - not self.symbols.has_key(namespace) or - self.symbols[namespace]['kind'] != 'namespace') - ): - result['name'] = result['namespace'].pop()+'::'+result['name'] - namespace = '::'.join(result['namespace']) - return result - - def _createNode( self, tag, **kwargs ): - result = self.boostbook.createElement(tag) - for k in kwargs.keys(): - if kwargs[k] != '': - if k == 'id': - result.setAttribute('id',kwargs[k]) - else: - result.setAttribute(k,kwargs[k]) - return result - - def _createText( self, tag, data, **kwargs ): - result = self._createNode(tag,**kwargs) - data = data.strip() - if len(data) > 0: - result.appendChild(self.boostbook.createTextNode(data)) - return result - - -def main( xmldir=None, output=None, id=None, title=None, index=False ): - #~ print '--- main: xmldir = %s, output = %s' % (xmldir,output) - - input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) ) - input.sort - translator = Doxygen2BoostBook(id=id, title=title, index=index) - #~ Feed in the namespaces first to build up the set of namespaces - #~ and definitions so that lookup is unambiguous when reading in the definitions. - namespace_files = filter( - lambda x: - os.path.basename(x).startswith('namespace'), - input) - decl_files = filter( - lambda x: - not os.path.basename(x).startswith('namespace') and not os.path.basename(x).startswith('_'), - input) - for dox in namespace_files: - #~ print '--|',os.path.basename(dox) - translator.addDox(xml.dom.minidom.parse(dox)) - for dox in decl_files: - #~ print '--|',os.path.basename(dox) - translator.addDox(xml.dom.minidom.parse(dox)) - - if output: - output = open(output,'w') - else: - output = sys.stdout - if output: - output.write(translator.tostring()) - - -main( **get_args() ) diff --git a/jam-files/boost-build/tools/doxygen-config.jam b/jam-files/boost-build/tools/doxygen-config.jam deleted file mode 100644 index 2cd2ccae..00000000 --- a/jam-files/boost-build/tools/doxygen-config.jam +++ /dev/null @@ -1,11 +0,0 @@ -#~ Copyright 2005, 2006 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for Doxygen tools. To use, just import this module. - -import toolset : using ; - -ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ; - -using doxygen ; diff --git a/jam-files/boost-build/tools/doxygen.jam b/jam-files/boost-build/tools/doxygen.jam deleted file mode 100644 index 8394848d..00000000 --- a/jam-files/boost-build/tools/doxygen.jam +++ /dev/null @@ -1,776 +0,0 @@ -# Copyright 2003, 2004 Douglas Gregor -# Copyright 2003, 2004, 2005 Vladimir Prus -# Copyright 2006 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines rules to handle generation of various outputs from source -# files documented with doxygen comments. The supported transformations are: -# -# * Source -> Doxygen XML -> BoostBook XML -# * Source -> Doxygen HTML -# -# The type of transformation is selected based on the target requested. For -# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an -# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen -# HTML specifying a target with an ".html" suffix will produce a directory -# <target> with the Doxygen html files, and a <target>.html file redirecting to -# that directory. - -import "class" : new ; -import targets ; -import feature ; -import property ; -import generators ; -import boostbook ; -import type ; -import path ; -import print ; -import regex ; -import stage ; -import project ; -import xsltproc ; -import make ; -import os ; -import toolset : flags ; -import alias ; -import common ; -import modules ; -import project ; -import utility ; -import errors ; - - -# Use to specify extra configuration paramters. These get translated -# into a doxyfile which configures the building of the docs. -feature.feature doxygen:param : : free ; - -# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option. -feature.feature prefix : : free ; - -# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option. -feature.feature reftitle : : free ; - -# Which processor to use for various translations from Doxygen. -feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ; - -# To generate, or not, index sections. -feature.feature doxygen.doxproc.index : no yes : propagated incidental ; - -# The ID for the resulting BoostBook reference section. -feature.feature doxygen.doxproc.id : : free ; - -# The title for the resulting BoostBook reference section. -feature.feature doxygen.doxproc.title : : free ; - -# Location for images when generating XML -feature.feature doxygen:xml-imagedir : : free ; - -# Indicates whether the entire directory should be deleted -feature.feature doxygen.rmdir : off on : optional incidental ; - -# Doxygen configuration input file. -type.register DOXYFILE : doxyfile ; - -# Doxygen XML multi-file output. -type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ; - -# Doxygen XML coallesed output. -type.register DOXYGEN_XML : doxygen : XML ; - -# Doxygen HTML multifile directory. -type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ; - -# Redirection HTML file to HTML multifile directory. -type.register DOXYGEN_HTML : : HTML ; - -type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ; - -# Initialize the Doxygen module. Parameters are: -# name: the name of the 'doxygen' executable. If not specified, the name -# 'doxygen' will be used -# -rule init ( name ? ) -{ - if ! $(.initialized) - { - .initialized = true ; - - .doxproc = [ modules.binding $(__name__) ] ; - .doxproc = $(.doxproc:D)/doxproc.py ; - - generators.register-composing doxygen.headers-to-doxyfile - : H HPP CPP : DOXYFILE ; - generators.register-standard doxygen.run - : DOXYFILE : DOXYGEN_XML_MULTIFILE ; - generators.register-standard doxygen.xml-dir-to-boostbook - : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ; - generators.register-standard doxygen.xml-to-boostbook - : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ; - generators.register-standard doxygen.collect - : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ; - generators.register-standard doxygen.run - : DOXYFILE : DOXYGEN_HTML_MULTIFILE ; - generators.register-standard doxygen.html-redirect - : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ; - generators.register-standard doxygen.copy-latex-pngs - : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ; - - IMPORT $(__name__) : doxygen : : doxygen ; - } - - if $(name) - { - modify-config ; - .doxygen = $(name) ; - check-doxygen ; - } - - if ! $(.doxygen) - { - check-doxygen ; - } -} - -rule freeze-config ( ) -{ - if ! $(.initialized) - { - errors.user-error "doxygen must be initialized before it can be used." ; - } - if ! $(.config-frozen) - { - .config-frozen = true ; - - if [ .is-cygwin ] - { - .is-cygwin = true ; - } - } -} - -rule modify-config ( ) -{ - if $(.config-frozen) - { - errors.user-error "Cannot change doxygen after it has been used." ; - } -} - -rule check-doxygen ( ) -{ - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using doxygen ":" $(.doxygen) ; - } - local extra-paths ; - if [ os.name ] = NT - { - local ProgramFiles = [ modules.peek : ProgramFiles ] ; - if $(ProgramFiles) - { - extra-paths = "$(ProgramFiles:J= )" ; - } - else - { - extra-paths = "C:\\Program Files" ; - } - } - .doxygen = [ common.get-invocation-command doxygen : - doxygen : $(.doxygen) : $(extra-paths) ] ; -} - -rule name ( ) -{ - freeze-config ; - return $(.doxygen) ; -} - -rule .is-cygwin ( ) -{ - if [ os.on-windows ] - { - local file = [ path.make [ modules.binding $(__name__) ] ] ; - local dir = [ path.native - [ path.join [ path.parent $(file) ] doxygen ] ] ; - local command = - "cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ; - result = [ SHELL $(command) ] ; - if [ MATCH "(Parsing file /)" : $(result) ] - { - return true ; - } - } -} - -# Runs Doxygen on the given Doxygen configuration file (the source) to generate -# the Doxygen files. The output is dumped according to the settings in the -# Doxygen configuration file, not according to the target! Because of this, we -# essentially "touch" the target file, in effect making it look like we have -# really written something useful to it. Anyone that uses this action must deal -# with this behavior. -# -actions doxygen-action -{ - $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)" -} - - -# Runs the Python doxproc XML processor. -# -actions doxproc -{ - python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)" -} - - -rule translate-path ( path ) -{ - freeze-config ; - if [ os.on-windows ] - { - if [ os.name ] = CYGWIN - { - if $(.is-cygwin) - { - return $(path) ; - } - else - { - return $(path:W) ; - } - } - else - { - if $(.is-cygwin) - { - match = [ MATCH ^(.):(.*) : $(path) ] ; - if $(match) - { - return /cygdrive/$(match[1])$(match[2]:T) ; - } - else - { - return $(path:T) ; - } - } - else - { - return $(path) ; - } - } - } - else - { - return $(path) ; - } -} - - -# Generates a doxygen configuration file (doxyfile) given a set of C++ sources -# and a property list that may contain <doxygen:param> features. -# -rule headers-to-doxyfile ( target : sources * : properties * ) -{ - local text "# Generated by Boost.Build version 2" ; - - local output-dir ; - - # Translate <doxygen:param> into command line flags. - for local param in [ feature.get-values <doxygen:param> : $(properties) ] - { - local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ; - if $(namevalue[1]) = OUTPUT_DIRECTORY - { - output-dir = [ translate-path - [ utility.unquote $(namevalue[2]) ] ] ; - text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ; - } - else - { - text += "$(namevalue[1]) = $(namevalue[2])" ; - } - } - - if ! $(output-dir) - { - output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ; - text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ; - } - - local headers = ; - for local header in $(sources:G=) - { - header = [ translate-path $(header) ] ; - headers += \"$(header)\" ; - } - - # Doxygen generates LaTex by default. So disable it unconditionally, or at - # least until someone needs, and hence writes support for, LaTex output. - text += "GENERATE_LATEX = NO" ; - text += "INPUT = $(headers:J= )" ; - print.output $(target) plain ; - print.text $(text) : true ; -} - - -# Run Doxygen. See doxygen-action for a description of the strange properties of -# this rule. -# -rule run ( target : source : properties * ) -{ - freeze-config ; - if <doxygen.rmdir>on in $(properties) - { - local output-dir = - [ path.make - [ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) : - $(properties) ] ] ; - local html-dir = - [ path.make - [ MATCH <doxygen:param>HTML_OUTPUT=(.*) : - $(properties) ] ] ; - if $(output-dir) && $(html-dir) && - [ path.glob $(output-dir) : $(html-dir) ] - { - HTMLDIR on $(target) = - [ path.native [ path.join $(output-dir) $(html-dir) ] ] ; - rm-htmldir $(target) ; - } - } - doxygen-action $(target) : $(source) ; - NAME on $(target) = $(.doxygen) ; - RM on $(target) = [ modules.peek common : RM ] ; - *.XML on $(target) = - [ path.native - [ path.join - [ path.make [ on $(target) return $(LOCATE) ] ] - $(target:B:S=) - *.xml ] ] ; -} - -if [ os.name ] = NT -{ - RMDIR = rmdir /s /q ; -} -else -{ - RMDIR = rm -rf ; -} - -actions quietly rm-htmldir -{ - $(RMDIR) $(HTMLDIR) -} - -# The rules below require Boost.Book stylesheets, so we need some code to check -# that the boostbook module has actualy been initialized. -# -rule check-boostbook ( ) -{ - if ! [ modules.peek boostbook : .initialized ] - { - ECHO "error: the boostbook module is not initialized" ; - ECHO "error: you've attempted to use the 'doxygen' toolset, " ; - ECHO "error: which requires Boost.Book," ; - ECHO "error: but never initialized Boost.Book." ; - EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ; - } -} - - -# Collect the set of Doxygen XML files into a single XML source file that can be -# handled by an XSLT processor. The source is completely ignored (see -# doxygen-action), because this action picks up the Doxygen XML index file -# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL -# program and take a "-o output.xml" argument (grrrr). The target of the -# collection will be a single Doxygen XML file. -# -rule collect ( target : source : properties * ) -{ - check-boostbook ; - local collect-xsl-dir - = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ; - local source-path - = [ path.make [ on $(source) return $(LOCATE) ] ] ; - local collect-path - = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ; - local native-path - = [ path.native $(collect-path) ] ; - local real-source - = [ path.native [ path.join $(collect-path) index.xml ] ] ; - xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl) - : <xsl:param>doxygen.xml.path=$(native-path) ; -} - - -# Translate Doxygen XML into BoostBook. -# -rule xml-to-boostbook ( target : source : properties * ) -{ - check-boostbook ; - local xsl-dir = [ boostbook.xsl-dir ] ; - local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen - doxygen2boostbook.xsl ] ] ; - - local xslt-properties = $(properties) ; - for local prefix in [ feature.get-values <prefix> : $(properties) ] - { - xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ; - } - for local title in [ feature.get-values <reftitle> : $(properties) ] - { - xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ; - } - - xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ; -} - - -flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ; -flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ; -flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ; - - -rule xml-dir-to-boostbook ( target : source : properties * ) -{ - DOXPROC on $(target) = $(.doxproc) ; - - LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ; - - doxygen.doxproc $(target) : $(source:S=) ; -} - - -# Generate the HTML redirect to HTML dir index.html file. -# -rule html-redirect ( target : source : properties * ) -{ - local uri = "$(target:B)/index.html" ; - print.output $(target) plain ; - print.text -"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" - \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\"> -<html xmlns=\"http://www.w3.org/1999/xhtml\"> -<head> - <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" /> - - <title></title> -</head> - -<body> - Automatic redirection failed, please go to <a href= - \"$(uri)\">$(uri)</a>. -</body> -</html> -" - : true ; -} - -rule copy-latex-pngs ( target : source : requirements * ) -{ - local directory = [ path.native - [ feature.get-values <doxygen:xml-imagedir> : - $(requirements) ] ] ; - - local location = [ on $(target) return $(LOCATE) ] ; - - local pdf-location = - [ path.native - [ path.join - [ path.make $(location) ] - [ path.make $(directory) ] ] ] ; - local html-location = - [ path.native - [ path.join - . - html - [ path.make $(directory) ] ] ] ; - - common.MkDir $(pdf-location) ; - common.MkDir $(html-location) ; - - DEPENDS $(target) : $(pdf-location) $(html-location) ; - - if [ os.name ] = NT - { - CP on $(target) = copy /y ; - FROM on $(target) = \\*.png ; - TOHTML on $(target) = .\\html\\$(directory) ; - TOPDF on $(target) = \\$(directory) ; - } - else - { - CP on $(target) = cp ; - FROM on $(target) = /*.png ; - TOHTML on $(target) = ./html/$(directory) ; - TOPDF on $(target) = $(target:D)/$(directory) ; - } -} - -actions copy-latex-pngs -{ - $(CP) $(>:S=)$(FROM) $(TOHTML) - $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF) - echo "Stamped" > "$(<)" -} - -# building latex images for doxygen XML depends -# on latex, dvips, and ps being in your PATH. -# This is true for most Unix installs, but -# not on Win32, where you will need to install -# MkTex and Ghostscript and add these tools -# to your path. - -actions check-latex -{ - latex -version >$(<) -} - -actions check-dvips -{ - dvips -version >$(<) -} - -if [ os.name ] = "NT" -{ - actions check-gs - { - gswin32c -version >$(<) - } -} -else -{ - actions check-gs - { - gs -version >$(<) - } -} - -rule check-tools ( ) -{ - if ! $(.check-tools-targets) - { - # Find the root project. - local root-project = [ project.current ] ; - root-project = [ $(root-project).project-module ] ; - while - [ project.attribute $(root-project) parent-module ] && - [ project.attribute $(root-project) parent-module ] != user-config - { - root-project = - [ project.attribute $(root-project) parent-module ] ; - } - - .latex.check = [ new file-target latex.check - : - : [ project.target $(root-project) ] - : [ new action : doxygen.check-latex ] - : - ] ; - .dvips.check = [ new file-target dvips.check - : - : [ project.target $(root-project) ] - : [ new action : doxygen.check-dvips ] - : - ] ; - .gs.check = [ new file-target gs.check - : - : [ project.target $(root-project) ] - : [ new action : doxygen.check-gs ] - : - ] ; - .check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ; - } - return $(.check-tools-targets) ; -} - -project.initialize $(__name__) ; -project doxygen ; - -class doxygen-check-tools-target-class : basic-target -{ - import doxygen ; - rule construct ( name : sources * : property-set ) - { - return [ property-set.empty ] [ doxygen.check-tools ] ; - } -} - -local project = [ project.current ] ; - -targets.main-target-alternative - [ new doxygen-check-tools-target-class check-tools : $(project) - : [ targets.main-target-sources : check-tools : no-renaming ] - : [ targets.main-target-requirements : $(project) ] - : [ targets.main-target-default-build : $(project) ] - : [ targets.main-target-usage-requirements : $(project) ] - ] ; - -# User-level rule to generate BoostBook XML from a set of headers via Doxygen. -# -rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * ) -{ - freeze-config ; - local project = [ project.current ] ; - - if $(target:S) = .html - { - # Build an HTML directory from the sources. - local html-location = [ feature.get-values <location> : $(requirements) ] ; - local output-dir ; - if [ $(project).get build-dir ] - { - # Explicitly specified build dir. Add html at the end. - output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ; - } - else - { - # Trim 'bin' from implicit build dir, for no other reason that backward - # compatibility. - output-dir = [ path.join [ path.parent [ $(project).build-dir ] ] - $(html-location:E=html) ] ; - } - output-dir = [ path.root $(output-dir) [ path.pwd ] ] ; - local output-dir-native = [ path.native $(output-dir) ] ; - requirements = [ property.change $(requirements) : <location> ] ; - - ## The doxygen configuration file. - targets.main-target-alternative - [ new typed-target $(target:S=.tag) : $(project) : DOXYFILE - : [ targets.main-target-sources $(sources) : $(target:S=.tag) ] - : [ targets.main-target-requirements $(requirements) - <doxygen:param>GENERATE_HTML=YES - <doxygen:param>GENERATE_XML=NO - <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\"" - <doxygen:param>HTML_OUTPUT=$(target:B) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(target:S=.tag) ; - - ## The html directory to generate by running doxygen. - targets.main-target-alternative - [ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE - : $(target:S=.tag) - : [ targets.main-target-requirements $(requirements) - <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\"" - <doxygen:param>HTML_OUTPUT=$(target:B) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(target:S=.dir) ; - - ## The redirect html file into the generated html. - targets.main-target-alternative - [ new typed-target $(target) : $(project) : DOXYGEN_HTML - : $(target:S=.dir) - : [ targets.main-target-requirements $(requirements) - <location>$(output-dir) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - } - else - { - # Build a BoostBook XML file from the sources. - local location-xml = [ feature.get-values <location> : $(requirements) ] ; - requirements = [ property.change $(requirements) : <location> ] ; - local target-xml = $(target:B=$(target:B)-xml) ; - - # Check whether we need to build images - local images-location = - [ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ; - if $(images-location) - { - doxygen $(target).doxygen-xml-images.html : $(sources) - : $(requirements) - <doxygen.rmdir>on - <doxygen:param>QUIET=YES - <doxygen:param>WARNINGS=NO - <doxygen:param>WARN_IF_UNDOCUMENTED=NO - <dependency>/doxygen//check-tools ; - $(project).mark-target-as-explicit - $(target).doxygen-xml-images.html ; - - targets.main-target-alternative - [ new typed-target $(target).doxygen-xml-images - : $(project) : DOXYGEN_XML_IMAGES - : $(target).doxygen-xml-images.html - : [ targets.main-target-requirements $(requirements) - : $(project) ] - : [ targets.main-target-default-build $(default-build) - : $(project) ] - ] ; - - $(project).mark-target-as-explicit - $(target).doxygen-xml-images ; - - if ! [ regex.match "^(.*/)$" : $(images-location) ] - { - images-location = $(images-location)/ ; - } - - requirements += - <dependency>$(target).doxygen-xml-images - <xsl:param>boost.doxygen.formuladir=$(images-location) ; - } - - ## The doxygen configuration file. - targets.main-target-alternative - [ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE - : [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ] - : [ targets.main-target-requirements $(requirements) - <doxygen:param>GENERATE_HTML=NO - <doxygen:param>GENERATE_XML=YES - <doxygen:param>XML_OUTPUT=$(target-xml) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(target-xml:S=.tag) ; - - ## The Doxygen XML directory of the processed source files. - targets.main-target-alternative - [ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE - : $(target-xml:S=.tag) - : [ targets.main-target-requirements $(requirements) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(target-xml:S=.dir) ; - - ## The resulting BoostBook file is generated by the processor tool. The - ## tool can be either the xsltproc plus accompanying XSL scripts. Or it - ## can be the python doxproc.py script. - targets.main-target-alternative - [ new typed-target $(target-xml) : $(project) : BOOSTBOOK - : $(target-xml:S=.dir) - : [ targets.main-target-requirements $(requirements) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(target-xml) ; - - targets.main-target-alternative - [ new install-target-class $(target:S=.xml) : $(project) - : $(target-xml) - : [ targets.main-target-requirements $(requirements) - <location>$(location-xml:E=.) - <name>$(target:S=.xml) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(target:S=.xml) ; - - targets.main-target-alternative - [ new alias-target-class $(target) : $(project) - : - : [ targets.main-target-requirements $(requirements) - : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) - <dependency>$(target:S=.xml) - : $(project) ] - ] ; - } -} diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile deleted file mode 100644 index 9b969df9..00000000 --- a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile +++ /dev/null @@ -1,3 +0,0 @@ -INPUT = windows-paths-check.hpp -GENERATE_HTML = NO -GENERATE_LATEX = NO diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp deleted file mode 100644 index e69de29b..00000000 --- a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp +++ /dev/null diff --git a/jam-files/boost-build/tools/fop.jam b/jam-files/boost-build/tools/fop.jam deleted file mode 100644 index c24b8725..00000000 --- a/jam-files/boost-build/tools/fop.jam +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed -# under the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) -# -# This module defines rules to handle generation of PDF and -# PostScript files from XSL Formatting Objects via Apache FOP - -import generators ; -import common ; -import boostbook ; - -generators.register-standard fop.render.pdf : FO : PDF ; -generators.register-standard fop.render.ps : FO : PS ; - -# Initializes the fop toolset. -# -rule init ( fop-command ? : java-home ? : java ? ) -{ - local has-command = $(.has-command) ; - - if $(fop-command) - { - .has-command = true ; - } - - if $(fop-command) || ! $(has-command) - { - fop-command = [ common.get-invocation-command fop : fop : $(fop-command) - : [ modules.peek : FOP_DIR ] ] ; - } - - if $(fop-command) - { - .FOP_COMMAND = $(fop-command) ; - } - - if $(java-home) || $(java) - { - .FOP_SETUP = ; - - - # JAVA_HOME is the location that java was installed to. - - if $(java-home) - { - .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ; - } - - # JAVACMD is the location that of the java executable, useful for a - # non-standard java installation, where the executable isn't at - # $JAVA_HOME/bin/java. - - if $(java) - { - .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ; - } - } -} - -actions render.pdf -{ - $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<) -} - -actions render.ps -{ - $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<) -} diff --git a/jam-files/boost-build/tools/fortran.jam b/jam-files/boost-build/tools/fortran.jam deleted file mode 100644 index 37665825..00000000 --- a/jam-files/boost-build/tools/fortran.jam +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# -# This file contains common settings for all fortran tools -# - -import "class" : new ; -import feature : feature ; - -import type ; -import generators ; -import common ; - -type.register FORTRAN : f F for f77 ; -type.register FORTRAN90 : f90 F90 ; - -feature fortran : : free ; -feature fortran90 : : free ; - -class fortran-compiling-generator : generator -{ - rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * ) - { - generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ; - } -} - -rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * ) -{ - local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ; - generators.register $(g) ; -} - -class fortran90-compiling-generator : generator -{ - rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * ) - { - generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ; - } -} - -rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * ) -{ - local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ; - generators.register $(g) ; -} - -# FIXME: this is ugly, should find a better way (we'd want client code to -# register all generators as "generator.some-rule", not with "some-module.some-rule".) -IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ; -IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ; diff --git a/jam-files/boost-build/tools/gcc.jam b/jam-files/boost-build/tools/gcc.jam deleted file mode 100644 index f7b0da54..00000000 --- a/jam-files/boost-build/tools/gcc.jam +++ /dev/null @@ -1,1185 +0,0 @@ -# Copyright 2001 David Abrahams. -# Copyright 2002-2006 Rene Rivera. -# Copyright 2002-2003 Vladimir Prus. -# Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov. -# Copyright 2007 Roland Schwarz -# Copyright 2007 Boris Gubenko. -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import common ; -import errors ; -import feature ; -import generators ; -import os ; -import pch ; -import property ; -import property-set ; -import toolset ; -import type ; -import rc ; -import regex ; -import set ; -import unix ; -import fortran ; - - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - - -feature.extend toolset : gcc ; -# feature.subfeature toolset gcc : flavor : : optional ; - -toolset.inherit-generators gcc : unix : unix.link unix.link.dll ; -toolset.inherit-flags gcc : unix ; -toolset.inherit-rules gcc : unix ; - -generators.override gcc.prebuilt : builtin.prebuilt ; -generators.override gcc.searched-lib-generator : searched-lib-generator ; - -# Make gcc toolset object files use the "o" suffix on all platforms. -type.set-generated-target-suffix OBJ : <toolset>gcc : o ; -type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ; -type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ; - -# Initializes the gcc toolset for the given version. If necessary, command may -# be used to specify where the compiler is located. The parameter 'options' is a -# space-delimited list of options, each one specified as -# <option-name>option-value. Valid option names are: cxxflags, linkflags and -# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or -# sun and the default value will be selected based on the current OS. -# Example: -# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ; -# -# The compiler command to use is detected in a three step manner: -# 1) If an explicit command is specified by the user, it will be used and must available. -# 2) If only a certain version is specified, it is enforced: -# - either a command 'g++-VERSION' must be available -# - or the default command 'g++' must be available and match the exact version. -# 3) Without user-provided restrictions use default 'g++' -rule init ( version ? : command * : options * ) -{ - #1): use user-provided command - local tool-command = ; - if $(command) - { - tool-command = [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ; - if ! $(tool-command) - { - errors.error "toolset gcc initialization:" : - "provided command '$(command)' not found" : - "initialized from" [ errors.nearest-user-location ] ; - } - } - #2): enforce user-provided version - else if $(version) - { - tool-command = [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ; - - #2.1) fallback: check whether "g++" reports the requested version - if ! $(tool-command) - { - tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ; - if $(tool-command) - { - local tool-command-string = $(tool-command:J=" ") ; - local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ; - if $(tool-version) != $(version) - { - # Permit a match betwen two-digit version specified by the user - # (e.g. 4.4) and 3-digit version reported by gcc. - # Since only two digits are present in binary name anyway, - # insisting that user specify 3-digit version when - # configuring Boost.Build while it's not required on - # command like would be strange. - local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ; - if $(stripped) != $(version) - { - errors.error "toolset gcc initialization:" : - "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" : - "initialized from" [ errors.nearest-user-location ] ; - tool-command = ; - } - # Use full 3-digit version to be compatible with the 'using gcc ;' case - version = $(tool-version) ; - } - } - else - { - errors.error "toolset gcc initialization:" : - "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" : - "initialized from" [ errors.nearest-user-location ] ; - } - } - } - #3) default: no command and no version specified, try using default command "g++" - else - { - tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ; - if ! $(tool-command) - { - errors.error "toolset gcc initialization:" : - "no command provided, default command 'g++' not found" : - "initialized from" [ errors.nearest-user-location ] ; - } - } - - - # Information about the gcc command... - # The command. - local command = $(tool-command) ; - # The root directory of the tool install. - local root = [ feature.get-values <root> : $(options) ] ; - # The bin directory where to find the command to execute. - local bin ; - # The flavor of compiler. - local flavor = [ feature.get-values <flavor> : $(options) ] ; - # Autodetect the root and bin dir if not given. - if $(command) - { - bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; - root ?= $(bin:D) ; - } - # The 'command' variable can have multiple elements. When calling - # the SHELL builtin we need a single string. - local command-string = $(command:J=" ") ; - # Autodetect the version and flavor if not given. - if $(command) - { - local machine = [ MATCH "^([^ ]+)" - : [ SHELL "$(command-string) -dumpmachine" ] ] ; - version ?= [ MATCH "^([0-9.]+)" - : [ SHELL "$(command-string) -dumpversion" ] ] ; - switch $(machine:L) - { - case *mingw* : flavor ?= mingw ; - } - } - - local condition ; - if $(flavor) - { - condition = [ common.check-init-parameters gcc - : version $(version) - : flavor $(flavor) - ] ; - } - else - { - condition = [ common.check-init-parameters gcc - : version $(version) - ] ; - condition = $(condition) ; #/<toolset-gcc:flavor> ; - } - - common.handle-options gcc : $(condition) : $(command) : $(options) ; - - local linker = [ feature.get-values <linker-type> : $(options) ] ; - # The logic below should actually be keyed on <target-os> - if ! $(linker) - { - if [ os.name ] = OSF - { - linker = osf ; - } - else if [ os.name ] = HPUX - { - linker = hpux ; - } - else if [ os.name ] = AIX - { - linker = aix ; - } - else if [ os.name ] = SOLARIS - { - linker = sun ; - } - else - { - linker = gnu ; - } - } - init-link-flags gcc $(linker) $(condition) ; - - - # If gcc is installed in non-standard location, we'd need to add - # LD_LIBRARY_PATH when running programs created with it (for unit-test/run - # rules). - if $(command) - { - # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries - # and all must be added to LD_LIBRARY_PATH. The linker will pick the - # right onces. Note that we don't provide a clean way to build 32-bit - # binary with 64-bit compiler, but user can always pass -m32 manually. - local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ; - if $(.debug-configuration) - { - ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ; - } - toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ; - } - - # If it's not a system gcc install we should adjust the various programs as - # needed to prefer using the install specific versions. This is essential - # for correct use of MinGW and for cross-compiling. - - local nl = " -" ; - - # - The archive builder. - local archiver = [ common.get-invocation-command gcc - : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ar" ] ] ] - : [ feature.get-values <archiver> : $(options) ] - : $(bin) - : search-path ] ; - toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ; - if $(.debug-configuration) - { - ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ; - } - - # - Ranlib - local ranlib = [ common.get-invocation-command gcc - : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ] - : [ feature.get-values <ranlib> : $(options) ] - : $(bin) - : search-path ] ; - toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ; - if $(.debug-configuration) - { - ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ; - } - - - # - The resource compiler. - local rc = - [ common.get-invocation-command-nodefault gcc - : windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ; - local rc-type = - [ feature.get-values <rc-type> : $(options) ] ; - rc-type ?= windres ; - if ! $(rc) - { - # If we can't find an RC compiler we fallback to a null RC compiler that - # creates empty object files. This allows the same Jamfiles to work - # across the board. The null RC uses the assembler to create the empty - # objects, so configure that. - rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ; - rc-type = null ; - } - rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ; -} - -if [ os.name ] = NT -{ - # This causes single-line command invocation to not go through .bat files, - # thus avoiding command-line length limitations. - JAMSHELL = % ; -} - -generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ; -generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ; -generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ; -generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ; -generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ; -generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ; - -# pch support - -# The compiler looks for a precompiled header in each directory just before it -# looks for the include file in that directory. The name searched for is the -# name specified in the #include directive with ".gch" suffix appended. The -# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to -# full name of the header. - -type.set-generated-target-suffix PCH : <toolset>gcc : gch ; - -# GCC-specific pch generator. -class gcc-pch-generator : pch-generator -{ - import project ; - import property-set ; - import type ; - - rule run-pch ( project name ? : property-set : sources + ) - { - # Find the header in sources. Ignore any CPP sources. - local header ; - for local s in $(sources) - { - if [ type.is-derived [ $(s).type ] H ] - { - header = $(s) ; - } - } - - # Error handling: Base header file name should be the same as the base - # precompiled header name. - local header-name = [ $(header).name ] ; - local header-basename = $(header-name:B) ; - if $(header-basename) != $(name) - { - local location = [ $(project).project-module ] ; - errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ; - } - - local pch-file = [ generator.run $(project) $(name) : $(property-set) - : $(header) ] ; - - # return result of base class and pch-file property as usage-requirements - return - [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ] - $(pch-file) - ; - } - - # Calls the base version specifying source's name as the name of the created - # target. As result, the PCH will be named whatever.hpp.gch, and not - # whatever.gch. - rule generated-targets ( sources + : property-set : project name ? ) - { - name = [ $(sources[1]).name ] ; - return [ generator.generated-targets $(sources) - : $(property-set) : $(project) $(name) ] ; - } -} - -# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The -# latter have HPP type, but HPP type is derived from H. The type of compilation -# is determined entirely by the destination type. -generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ; -generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ; - -# Override default do-nothing generators. -generators.override gcc.compile.c.pch : pch.default-c-pch-generator ; -generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ; - -toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ; - -# Declare flags and action for compilation. -toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ; -toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags gcc.compile OPTIONS <optimization>space : -Os ; - -toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ; -toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ; -toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ; - -toolset.flags gcc.compile OPTIONS <warnings>off : -w ; -toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ; -toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ; -toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ; - -toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ; -toolset.flags gcc.compile OPTIONS <profiling>on : -pg ; -toolset.flags gcc.compile OPTIONS <rtti>off : -fno-rtti ; - -rule setup-fpic ( targets * : sources * : properties * ) -{ - local link = [ feature.get-values link : $(properties) ] ; - if $(link) = shared - { - local target = [ feature.get-values target-os : $(properties) ] ; - - # This logic will add -fPIC for all compilations: - # - # lib a : a.cpp b ; - # obj b : b.cpp ; - # exe c : c.cpp a d ; - # obj d : d.cpp ; - # - # This all is fine, except that 'd' will be compiled with -fPIC even though - # it is not needed, as 'd' is used only in exe. However, it is hard to - # detect where a target is going to be used. Alternatively, we can set -fPIC - # only when main target type is LIB but than 'b' would be compiled without - # -fPIC which would lead to link errors on x86-64. So, compile everything - # with -fPIC. - # - # Yet another alternative would be to create a propagated <sharedable> - # feature and set it when building shared libraries, but that would be hard - # to implement and would increase the target path length even more. - - # On Windows, fPIC is default, specifying -fPIC explicitly leads to - # a warning. - if $(target) != cygwin && $(target) != windows - { - OPTIONS on $(targets) += -fPIC ; - } - } -} - -rule setup-address-model ( targets * : sources * : properties * ) -{ - local model = [ feature.get-values address-model : $(properties) ] ; - if $(model) - { - local option ; - local os = [ feature.get-values target-os : $(properties) ] ; - if $(os) = aix - { - if $(model) = 32 - { - option = -maix32 ; - } - else - { - option = -maix64 ; - } - } - else if $(os) = hpux - { - if $(model) = 32 - { - option = -milp32 ; - } - else - { - option = -mlp64 ; - } - } - else - { - if $(model) = 32 - { - option = -m32 ; - } - else if $(model) = 64 - { - option = -m64 ; - } - # For darwin, the model can be 32_64. darwin.jam will handle that - # on its own. - } - OPTIONS on $(targets) += $(option) ; - } -} - - -# FIXME: this should not use os.name. -if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX && [ os.name ] != AIX -{ - # OSF does have an option called -soname but it does not seem to work as - # expected, therefore it has been disabled. - HAVE_SONAME = "" ; - SONAME_OPTION = -h ; -} - -# HPUX, for some reason, seem to use '+h', not '-h'. -if [ os.name ] = HPUX -{ - HAVE_SONAME = "" ; - SONAME_OPTION = +h ; -} - -toolset.flags gcc.compile USER_OPTIONS <cflags> ; -toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ; -toolset.flags gcc.compile DEFINES <define> ; -toolset.flags gcc.compile INCLUDES <include> ; -toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; -toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ; - -rule compile.c++.pch ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c++.pch -{ - "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.c.pch ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c.pch -{ - "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.c++.preprocess ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - - # Some extensions are compiled as C++ by default. For others, we need to - # pass -x c++. We could always pass -x c++ but distcc does not work with it. - if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C - { - LANG on $(<) = "-x c++" ; - } - DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -rule compile.c.preprocess ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - - # If we use the name g++ then default file suffix -> language mapping does - # not work. So have to pass -x option. Maybe, we can work around this by - # allowing the user to specify both C and C++ compiler names. - #if $(>:S) != .c - #{ - LANG on $(<) = "-x c" ; - #} - DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -rule compile.c++ ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - - # Some extensions are compiled as C++ by default. For others, we need to - # pass -x c++. We could always pass -x c++ but distcc does not work with it. - if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C - { - LANG on $(<) = "-x c++" ; - } - DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; - - # Here we want to raise the template-depth parameter value to something - # higher than the default value of 17. Note that we could do this using the - # feature.set-default rule but we do not want to set the default value for - # all toolsets as well. - # - # TODO: This 'modified default' has been inherited from some 'older Boost - # Build implementation' and has most likely been added to make some Boost - # library parts compile correctly. We should see what exactly prompted this - # and whether we can get around the problem more locally. - local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ; - if ! $(template-depth) - { - TEMPLATE_DEPTH on $(<) = 128 ; - } -} - -rule compile.c ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - - # If we use the name g++ then default file suffix -> language mapping does - # not work. So have to pass -x option. Maybe, we can work around this by - # allowing the user to specify both C and C++ compiler names. - #if $(>:S) != .c - #{ - LANG on $(<) = "-x c" ; - #} - DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -rule compile.fortran ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c++ bind PCH_FILE -{ - "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)" -} - -actions compile.c bind PCH_FILE -{ - "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++.preprocess bind PCH_FILE -{ - "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)" -} - -actions compile.c.preprocess bind PCH_FILE -{ - "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<) -} - -actions compile.fortran -{ - "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.asm ( targets * : sources * : properties * ) -{ - setup-fpic $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - LANG on $(<) = "-x assembler-with-cpp" ; -} - -actions compile.asm -{ - "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# The class which check that we don't try to use the <runtime-link>static -# property while creating or using shared library, since it's not supported by -# gcc/libc. -class gcc-linking-generator : unix-linking-generator -{ - rule run ( project name ? : property-set : sources + ) - { - # TODO: Replace this with the use of a target-os property. - local no-static-link = ; - if [ modules.peek : UNIX ] - { - switch [ modules.peek : JAMUNAME ] - { - case * : no-static-link = true ; - } - } - - local properties = [ $(property-set).raw ] ; - local reason ; - if $(no-static-link) && <runtime-link>static in $(properties) - { - if <link>shared in $(properties) - { - reason = - "On gcc, DLL can't be build with '<runtime-link>static'." ; - } - else if [ type.is-derived $(self.target-types[1]) EXE ] - { - for local s in $(sources) - { - local type = [ $(s).type ] ; - if $(type) && [ type.is-derived $(type) SHARED_LIB ] - { - reason = - "On gcc, using DLLS together with the" - "<runtime-link>static options is not possible " ; - } - } - } - } - if $(reason) - { - ECHO warning: - $(reason) ; - ECHO warning: - "It is suggested to use '<runtime-link>static' together" - "with '<link>static'." ; - return ; - } - else - { - local generated-targets = [ unix-linking-generator.run $(project) - $(name) : $(property-set) : $(sources) ] ; - return $(generated-targets) ; - } - } -} - -# The set of permissible input types is different on mingw. -# So, define two sets of generators, with mingw generators -# selected when target-os=windows. - -local g ; -g = [ new gcc-linking-generator gcc.mingw.link - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : EXE - : <toolset>gcc <target-os>windows ] ; -$(g).set-rule-name gcc.link ; -generators.register $(g) ; - -g = [ new gcc-linking-generator gcc.mingw.link.dll - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : IMPORT_LIB SHARED_LIB - : <toolset>gcc <target-os>windows ] ; -$(g).set-rule-name gcc.link.dll ; -generators.register $(g) ; - -generators.register - [ new gcc-linking-generator gcc.link - : LIB OBJ - : EXE - : <toolset>gcc ] ; -generators.register - [ new gcc-linking-generator gcc.link.dll - : LIB OBJ - : SHARED_LIB - : <toolset>gcc ] ; - -generators.override gcc.mingw.link : gcc.link ; -generators.override gcc.mingw.link.dll : gcc.link.dll ; - -# Cygwin is similar to msvc and mingw in that it uses import libraries. -# While in simple cases, it can directly link to a shared library, -# it is believed to be slower, and not always possible. Define cygwin-specific -# generators here. - -g = [ new gcc-linking-generator gcc.cygwin.link - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : EXE - : <toolset>gcc <target-os>cygwin ] ; -$(g).set-rule-name gcc.link ; -generators.register $(g) ; - -g = [ new gcc-linking-generator gcc.cygwin.link.dll - : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB - : IMPORT_LIB SHARED_LIB - : <toolset>gcc <target-os>cygwin ] ; -$(g).set-rule-name gcc.link.dll ; -generators.register $(g) ; - -generators.override gcc.cygwin.link : gcc.link ; -generators.override gcc.cygwin.link.dll : gcc.link.dll ; - -# Declare flags for linking. -# First, the common flags. -toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ; -toolset.flags gcc.link OPTIONS <profiling>on : -pg ; -toolset.flags gcc.link USER_OPTIONS <linkflags> ; -toolset.flags gcc.link LINKPATH <library-path> ; -toolset.flags gcc.link FINDLIBS-ST <find-static-library> ; -toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ; -toolset.flags gcc.link LIBRARIES <library-file> ; - -toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ; -toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ; - -# For <runtime-link>static we made sure there are no dynamic libraries in the -# link. On HP-UX not all system libraries exist as archived libraries (for -# example, there is no libunwind.a), so, on this platform, the -static option -# cannot be specified. -if [ os.name ] != HPUX -{ - toolset.flags gcc.link OPTIONS <runtime-link>static : -static ; -} - -# Now, the vendor specific flags. -# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun. -rule init-link-flags ( toolset linker condition ) -{ - switch $(linker) - { - case aix : - { - # - # On AIX we *have* to use the native linker. - # - # Using -brtl, the AIX linker will look for libraries with both the .a - # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the - # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived - # file that may contain shared objects and is different from static libs - # as on Linux. - # - # The -bnoipath strips the prepending (relative) path of libraries from - # the loader section in the target library or executable. Hence, during - # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded - # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without - # this option, the prepending (relative) path + library name is - # hard-coded in the loader section, causing *only* this path to be - # searched during load-time. Note that the AIX linker does not have an - # -soname equivalent, this is as close as it gets. - # - # The above options are definately for AIX 5.x, and most likely also for - # AIX 4.x and AIX 6.x. For details about the AIX linker see: - # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf - # - - toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath - : unchecked ; - } - - case darwin : - { - # On Darwin, the -s option to ld does not work unless we pass -static, - # and passing -static unconditionally is a bad idea. So, don't pass -s. - # at all, darwin.jam will use separate 'strip' invocation. - toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ; - toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ; - } - - case gnu : - { - # Strip the binary when no debugging is needed. We use --strip-all flag - # as opposed to -s since icc (intel's compiler) is generally - # option-compatible with and inherits from the gcc toolset, but does not - # support -s. - toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ; - toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ; - toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ; - toolset.flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ; - toolset.flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ; - - # gnu ld has the ability to change the search behaviour for libraries - # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic - # and change search for -l switches that follow them. The following list - # shows the tried variants. - # The search stops at the first variant that has a match. - # *nix: -Bstatic -lxxx - # libxxx.a - # - # *nix: -Bdynamic -lxxx - # libxxx.so - # libxxx.a - # - # windows (mingw,cygwin) -Bstatic -lxxx - # libxxx.a - # xxx.lib - # - # windows (mingw,cygwin) -Bdynamic -lxxx - # libxxx.dll.a - # xxx.dll.a - # libxxx.a - # xxx.lib - # cygxxx.dll (*) - # libxxx.dll - # xxx.dll - # libxxx.a - # - # (*) This is for cygwin - # Please note that -Bstatic and -Bdynamic are not a guarantee that a - # static or dynamic lib indeed gets linked in. The switches only change - # search patterns! - - # On *nix mixing shared libs with static runtime is not a good idea. - toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared - : -Wl,-Bstatic : unchecked ; - toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared - : -Wl,-Bdynamic : unchecked ; - - # On windows allow mixing of static and dynamic libs with static - # runtime. - toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows - : -Wl,-Bstatic : unchecked ; - toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows - : -Wl,-Bdynamic : unchecked ; - toolset.flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows - : -Wl,-Bstatic : unchecked ; - } - - case hpux : - { - toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on - : -Wl,-s : unchecked ; - toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared - : -fPIC : unchecked ; - } - - case osf : - { - # No --strip-all, just -s. - toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on - : -Wl,-s : unchecked ; - toolset.flags $(toolset).link RPATH $(condition) : <dll-path> - : unchecked ; - # This does not supports -R. - toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath - : unchecked ; - # -rpath-link is not supported at all. - } - - case sun : - { - toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on - : -Wl,-s : unchecked ; - toolset.flags $(toolset).link RPATH $(condition) : <dll-path> - : unchecked ; - # Solaris linker does not have a separate -rpath-link, but allows to use - # -L for the same purpose. - toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path> - : unchecked ; - - # This permits shared libraries with non-PIC code on Solaris. - # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the - # following is not needed. Whether -fPIC should be hardcoded, is a - # separate question. - # AH, 2004/10/16: it is still necessary because some tests link against - # static libraries that were compiled without PIC. - toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared - : -mimpure-text : unchecked ; - } - - case * : - { - errors.user-error - "$(toolset) initialization: invalid linker '$(linker)'" : - "The value '$(linker)' specified for <linker> is not recognized." : - "Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'" ; - } - } -} - -# Enclose the RPATH variable on 'targets' in (double) quotes, -# unless it's already enclosed in single quotes. -# This special casing is done because it's common to pass -# '$ORIGIN' to linker -- and it has to have single quotes -# to prevent expansion by shell -- and if we add double -# quotes then preventing properties of single quotes disappear. -rule quote-rpath ( targets * ) -{ - local r = [ on $(targets[1]) return $(RPATH) ] ; - if ! [ MATCH "('.*')" : $(r) ] - { - r = "\"$(r)\"" ; - } - RPATH on $(targets) = $(r) ; -} - -# Declare actions for linking. -rule link ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - SPACE on $(targets) = " " ; - # Serialize execution of the 'link' action, since running N links in - # parallel is just slower. For now, serialize only gcc links, it might be a - # good idea to serialize all links. - JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ; - quote-rpath $(targets) ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) - -} - -# Default value. Mostly for the sake of intel-linux that inherits from gcc, but -# does not have the same logic to set the .AR variable. We can put the same -# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is -# always available. -.AR = ar ; -.RANLIB = ranlib ; - -toolset.flags gcc.archive AROPTIONS <archiveflags> ; - -rule archive ( targets * : sources * : properties * ) -{ - # Always remove archive and start again. Here is the rationale from - # - # Andre Hentz: - # - # I had a file, say a1.c, that was included into liba.a. I moved a1.c to - # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd - # errors. After some debugging I traced it back to the fact that a1.o was - # *still* in liba.a - # - # Rene Rivera: - # - # Originally removing the archive was done by splicing an RM onto the - # archive action. That makes archives fail to build on NT when they have - # many files because it will no longer execute the action directly and blow - # the line length limit. Instead we remove the file in a different action, - # just before building the archive. - # - local clean.a = $(targets[1])(clean) ; - TEMPORARY $(clean.a) ; - NOCARE $(clean.a) ; - LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; - DEPENDS $(clean.a) : $(sources) ; - DEPENDS $(targets) : $(clean.a) ; - common.RmTemps $(clean.a) : $(targets) ; -} - -# Declare action for creating static libraries. -# The letter 'r' means to add files to the archive with replacement. Since we -# remove archive, we don't care about replacement, but there's no option "add -# without replacement". -# The letter 'c' suppresses the warning in case the archive does not exists yet. -# That warning is produced only on some platforms, for whatever reasons. -actions piecemeal archive -{ - "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" - "$(.RANLIB)" "$(<)" -} - -rule link.dll ( targets * : sources * : properties * ) -{ - setup-threading $(targets) : $(sources) : $(properties) ; - setup-address-model $(targets) : $(sources) : $(properties) ; - SPACE on $(targets) = " " ; - JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ; - quote-rpath $(targets) ; -} - -# Differs from 'link' above only by -shared. -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) -} - -rule setup-threading ( targets * : sources * : properties * ) -{ - local threading = [ feature.get-values threading : $(properties) ] ; - if $(threading) = multi - { - local target = [ feature.get-values target-os : $(properties) ] ; - local option ; - local libs ; - - switch $(target) - { - case windows : - { - option = -mthreads ; - } - case cygwin : - { - option = -mthreads ; - } - case solaris : - { - option = -pthreads ; - libs = rt ; - } - case beos : - { - # BeOS has no threading options, so do not set anything here. - } - case *bsd : - { - option = -pthread ; - # There is no -lrt on BSD. - } - case sgi : - { - # gcc on IRIX does not support multi-threading so do not set anything - # here. - } - case darwin : - { - # Darwin has no threading options so do not set anything here. - } - case * : - { - option = -pthread ; - libs = rt ; - } - } - - if $(option) - { - OPTIONS on $(targets) += $(option) ; - } - if $(libs) - { - FINDLIBS-SA on $(targets) += $(libs) ; - } - } -} - -local rule cpu-flags ( toolset variable : architecture : instruction-set + : values + : default ? ) -{ - if $(default) - { - toolset.flags $(toolset) $(variable) - <architecture>$(architecture)/<instruction-set> - : $(values) ; - } - toolset.flags $(toolset) $(variable) - <architecture>/<instruction-set>$(instruction-set) - <architecture>$(architecture)/<instruction-set>$(instruction-set) - : $(values) ; -} - -# Set architecture/instruction-set options. -# -# x86 and compatible -# The 'native' option appeared in gcc 4.2 so we cannot safely use it -# as default. Use conservative i386 instead. -cpu-flags gcc OPTIONS : x86 : native : -march=native ; -cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 : default ; -cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ; -cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ; -cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ; -cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ; -cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ; -cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ; -cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ; -cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ; -cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ; -cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ; -cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ; -cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ; -cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ; -cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ; -cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ; -cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ; -cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ; -cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ; -cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ; -cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ; -cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ; -cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ; -cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ; -## -cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ; -cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ; -cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ; -cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ; -cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ; -cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ; -cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ; -cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ; -# Sparc -cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ; -cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ; -cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ; -cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ; -cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ; -cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ; -cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ; -cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ; -cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ; -cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ; -cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ; -cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ; -cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ; -cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ; -cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ; -# RS/6000 & PowerPC -cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ; -cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ; -cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ; -cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ; -cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ; -cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ; -cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ; -cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ; -cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ; -cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ; -cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ; -cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ; -cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ; -cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ; -cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ; -cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ; -cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ; -cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ; -cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ; -cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ; -cpu-flags gcc OPTIONS : power : power : -mcpu=power ; -cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ; -cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ; -cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ; -cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ; -cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ; -cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ; -cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ; -cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ; -cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ; -cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ; -cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ; -# AIX variant of RS/6000 & PowerPC -toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X 64" ; diff --git a/jam-files/boost-build/tools/gcc.py b/jam-files/boost-build/tools/gcc.py deleted file mode 100644 index 2a3e675e..00000000 --- a/jam-files/boost-build/tools/gcc.py +++ /dev/null @@ -1,796 +0,0 @@ -# Status: being ported by Steven Watanabe -# Base revision: 47077 -# TODO: common.jam needs to be ported -# TODO: generators.jam needs to have register_c_compiler. -# -# Copyright 2001 David Abrahams. -# Copyright 2002-2006 Rene Rivera. -# Copyright 2002-2003 Vladimir Prus. -# Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov. -# Copyright 2007 Roland Schwarz -# Copyright 2007 Boris Gubenko. -# Copyright 2008 Steven Watanabe -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import os -import subprocess -import re - -import bjam - -from b2.tools import unix, common, rc, pch, builtin -from b2.build import feature, type, toolset, generators -from b2.util.utility import os_name, on_windows -from b2.manager import get_manager -from b2.build.generators import Generator -from b2.build.toolset import flags -from b2.util.utility import to_seq - -__debug = None - -def debug(): - global __debug - if __debug is None: - __debug = "--debug-configuration" in bjam.variable("ARGV") - return __debug - -feature.extend('toolset', ['gcc']) - - -toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll']) -toolset.inherit_flags('gcc', 'unix') -toolset.inherit_rules('gcc', 'unix') - -generators.override('gcc.prebuilt', 'builtin.prebuilt') -generators.override('gcc.searched-lib-generator', 'searched-lib-generator') - -# Target naming is determined by types/lib.jam and the settings below this -# comment. -# -# On *nix: -# libxxx.a static library -# libxxx.so shared library -# -# On windows (mingw): -# libxxx.lib static library -# xxx.dll DLL -# xxx.lib import library -# -# On windows (cygwin) i.e. <target-os>cygwin -# libxxx.a static library -# xxx.dll DLL -# libxxx.dll.a import library -# -# Note: user can always override by using the <tag>@rule -# This settings have been choosen, so that mingw -# is in line with msvc naming conventions. For -# cygwin the cygwin naming convention has been choosen. - -# Make the "o" suffix used for gcc toolset on all -# platforms -type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o') -type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a') - -type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a') -type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib') - -__machine_match = re.compile('^([^ ]+)') -__version_match = re.compile('^([0-9.]+)') - -def init(version = None, command = None, options = None): - """ - Initializes the gcc toolset for the given version. If necessary, command may - be used to specify where the compiler is located. The parameter 'options' is a - space-delimited list of options, each one specified as - <option-name>option-value. Valid option names are: cxxflags, linkflags and - linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun - and the default value will be selected based on the current OS. - Example: - using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ; - """ - - options = to_seq(options) - command = to_seq(command) - - # Information about the gcc command... - # The command. - command = to_seq(common.get_invocation_command('gcc', 'g++', command)) - # The root directory of the tool install. - root = feature.get_values('<root>', options) ; - # The bin directory where to find the command to execute. - bin = None - # The flavor of compiler. - flavor = feature.get_values('<flavor>', options) - # Autodetect the root and bin dir if not given. - if command: - if not bin: - bin = common.get_absolute_tool_path(command[-1]) - if not root: - root = os.path.dirname(bin) - # Autodetect the version and flavor if not given. - if command: - machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0] - machine = __machine_match.search(machine_info).group(1) - - version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0] - version = __version_match.search(version_info).group(1) - if not flavor and machine.find('mingw') != -1: - flavor = 'mingw' - - condition = None - if flavor: - condition = common.check_init_parameters('gcc', None, - ('version', version), - ('flavor', flavor)) - else: - condition = common.check_init_parameters('gcc', None, - ('version', version)) - - if command: - command = command[0] - - common.handle_options('gcc', condition, command, options) - - linker = feature.get_values('<linker-type>', options) - if not linker: - if os_name() == 'OSF': - linker = 'osf' - elif os_name() == 'HPUX': - linker = 'hpux' ; - else: - linker = 'gnu' - - init_link_flags('gcc', linker, condition) - - # If gcc is installed in non-standard location, we'd need to add - # LD_LIBRARY_PATH when running programs created with it (for unit-test/run - # rules). - if command: - # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries - # and all must be added to LD_LIBRARY_PATH. The linker will pick the - # right onces. Note that we don't provide a clean way to build 32-bit - # binary with 64-bit compiler, but user can always pass -m32 manually. - lib_path = [os.path.join(root, 'bin'), - os.path.join(root, 'lib'), - os.path.join(root, 'lib32'), - os.path.join(root, 'lib64')] - if debug(): - print 'notice: using gcc libraries ::', condition, '::', lib_path - toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path) - - # If it's not a system gcc install we should adjust the various programs as - # needed to prefer using the install specific versions. This is essential - # for correct use of MinGW and for cross-compiling. - - # - The archive builder. - archiver = common.get_invocation_command('gcc', - 'ar', feature.get_values('<archiver>', options), [bin], path_last=True) - toolset.flags('gcc.archive', '.AR', condition, [archiver]) - if debug(): - print 'notice: using gcc archiver ::', condition, '::', archiver - - # - The resource compiler. - rc_command = common.get_invocation_command_nodefault('gcc', - 'windres', feature.get_values('<rc>', options), [bin], path_last=True) - rc_type = feature.get_values('<rc-type>', options) - - if not rc_type: - rc_type = 'windres' - - if not rc_command: - # If we can't find an RC compiler we fallback to a null RC compiler that - # creates empty object files. This allows the same Jamfiles to work - # across the board. The null RC uses the assembler to create the empty - # objects, so configure that. - rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True) - rc_type = 'null' - rc.configure(rc_command, condition, '<rc-type>' + rc_type) - -###if [ os.name ] = NT -###{ -### # This causes single-line command invocation to not go through .bat files, -### # thus avoiding command-line length limitations. -### JAMSHELL = % ; -###} - -#FIXME: when register_c_compiler is moved to -# generators, these should be updated -builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc']) -builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc']) -builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc']) - -# pch support - -# The compiler looks for a precompiled header in each directory just before it -# looks for the include file in that directory. The name searched for is the -# name specified in the #include directive with ".gch" suffix appended. The -# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to -# full name of the header. - -type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch') - -# GCC-specific pch generator. -class GccPchGenerator(pch.PchGenerator): - - # Inherit the __init__ method - - def run_pch(self, project, name, prop_set, sources): - # Find the header in sources. Ignore any CPP sources. - header = None - for s in sources: - if type.is_derived(s.type, 'H'): - header = s - - # Error handling: Base header file name should be the same as the base - # precompiled header name. - header_name = header.name - header_basename = os.path.basename(header_name).rsplit('.', 1)[0] - if header_basename != name: - location = project.project_module - ###FIXME: - raise Exception() - ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ; - - pch_file = Generator.run(self, project, name, prop_set, [header]) - - # return result of base class and pch-file property as usage-requirements - # FIXME: what about multiple results from generator.run? - return (property_set.create('<pch-file>' + pch_file[0], '<cflags>-Winvalid-pch'), - pch_file) - - # Calls the base version specifying source's name as the name of the created - # target. As result, the PCH will be named whatever.hpp.gch, and not - # whatever.gch. - def generated_targets(self, sources, prop_set, project, name = None): - name = sources[0].name - return Generator.generated_targets(self, sources, - prop_set, project, name) - -# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The -# latter have HPP type, but HPP type is derived from H. The type of compilation -# is determined entirely by the destination type. -generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ])) -generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ])) - -# Override default do-nothing generators. -generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator') -generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator') - -flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>']) - -# Declare flags and action for compilation -flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0']) -flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3']) -flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os']) - -flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline']) -flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline']) -flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline']) - -flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w']) -flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall']) -flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic']) -flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror']) - -flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g']) -flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg']) -flags('gcc.compile', 'OPTIONS', ['<rtti>off'], ['-fno-rtti']) - -# On cygwin and mingw, gcc generates position independent code by default, and -# warns if -fPIC is specified. This might not be the right way of checking if -# we're using cygwin. For example, it's possible to run cygwin gcc from NT -# shell, or using crosscompiling. But we'll solve that problem when it's time. -# In that case we'll just add another parameter to 'init' and move this login -# inside 'init'. -if not os_name () in ['CYGWIN', 'NT']: - # This logic will add -fPIC for all compilations: - # - # lib a : a.cpp b ; - # obj b : b.cpp ; - # exe c : c.cpp a d ; - # obj d : d.cpp ; - # - # This all is fine, except that 'd' will be compiled with -fPIC even though - # it's not needed, as 'd' is used only in exe. However, it's hard to detect - # where a target is going to be used. Alternative, we can set -fPIC only - # when main target type is LIB but than 'b' will be compiled without -fPIC. - # In x86-64 that will lead to link errors. So, compile everything with - # -fPIC. - # - # Yet another alternative would be to create propagated <sharedable> - # feature, and set it when building shared libraries, but that's hard to - # implement and will increase target path length even more. - flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC']) - -if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX': - # OSF does have an option called -soname but it doesn't seem to work as - # expected, therefore it has been disabled. - HAVE_SONAME = '' - SONAME_OPTION = '-h' - - -flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>']) -flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>']) -flags('gcc.compile', 'DEFINES', [], ['<define>']) -flags('gcc.compile', 'INCLUDES', [], ['<include>']) - -engine = get_manager().engine() - -engine.register_action('gcc.compile.c++.pch', - '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') - -engine.register_action('gcc.compile.c.pch', - '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') - - -def gcc_compile_cpp(targets, sources, properties): - # Some extensions are compiled as C++ by default. For others, we need to - # pass -x c++. We could always pass -x c++ but distcc does not work with it. - extension = os.path.splitext (sources [0]) [1] - lang = '' - if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']: - lang = '-x c++' - get_manager().engine().set_target_variable (targets, 'LANG', lang) - engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) - -def gcc_compile_c(targets, sources, properties): - engine = get_manager().engine() - # If we use the name g++ then default file suffix -> language mapping does - # not work. So have to pass -x option. Maybe, we can work around this by - # allowing the user to specify both C and C++ compiler names. - #if $(>:S) != .c - #{ - engine.set_target_variable (targets, 'LANG', '-x c') - #} - engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) - -engine.register_action( - 'gcc.compile.c++', - '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' + - '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' + - '-c -o "$(<:W)" "$(>:W)"', - function=gcc_compile_cpp, - bound_list=['PCH_FILE']) - -engine.register_action( - 'gcc.compile.c', - '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' + - '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"', - function=gcc_compile_c, - bound_list=['PCH_FILE']) - -def gcc_compile_asm(targets, sources, properties): - get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp') - -engine.register_action( - 'gcc.compile.asm', - '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"', - function=gcc_compile_asm) - - -class GccLinkingGenerator(unix.UnixLinkingGenerator): - """ - The class which check that we don't try to use the <runtime-link>static - property while creating or using shared library, since it's not supported by - gcc/libc. - """ - def run(self, project, name, ps, sources): - # TODO: Replace this with the use of a target-os property. - - no_static_link = False - if bjam.variable('UNIX'): - no_static_link = True; - ##FIXME: what does this mean? -## { -## switch [ modules.peek : JAMUNAME ] -## { -## case * : no-static-link = true ; -## } -## } - - reason = None - if no_static_link and ps.get('runtime-link') == 'static': - if ps.get('link') == 'shared': - reason = "On gcc, DLL can't be build with '<runtime-link>static'." - elif type.is_derived(self.target_types[0], 'EXE'): - for s in sources: - source_type = s.type() - if source_type and type.is_derived(source_type, 'SHARED_LIB'): - reason = "On gcc, using DLLS together with the " +\ - "<runtime-link>static options is not possible " - if reason: - print 'warning:', reason - print 'warning:',\ - "It is suggested to use '<runtime-link>static' together",\ - "with '<link>static'." ; - return - else: - generated_targets = unix.UnixLinkingGenerator.run(self, project, - name, ps, sources) - return generated_targets - -if on_windows(): - flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,']) - generators.register( - GccLinkingGenerator('gcc.link', True, - ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], - [ 'EXE' ], - [ '<toolset>gcc' ])) - generators.register( - GccLinkingGenerator('gcc.link.dll', True, - ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], - ['IMPORT_LIB', 'SHARED_LIB'], - ['<toolset>gcc'])) -else: - generators.register( - GccLinkingGenerator('gcc.link', True, - ['LIB', 'OBJ'], - ['EXE'], - ['<toolset>gcc'])) - generators.register( - GccLinkingGenerator('gcc.link.dll', True, - ['LIB', 'OBJ'], - ['SHARED_LIB'], - ['<toolset>gcc'])) - -# Declare flags for linking. -# First, the common flags. -flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g']) -flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg']) -flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>']) -flags('gcc.link', 'LINKPATH', [], ['<library-path>']) -flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>']) -flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>']) -flags('gcc.link', 'LIBRARIES', [], ['<library-file>']) - -# For <runtime-link>static we made sure there are no dynamic libraries in the -# link. On HP-UX not all system libraries exist as archived libraries (for -# example, there is no libunwind.a), so, on this platform, the -static option -# cannot be specified. -if os_name() != 'HPUX': - flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static']) - -# Now, the vendor specific flags. -# The parameter linker can be either gnu, darwin, osf, hpux or sun. -def init_link_flags(toolset, linker, condition): - """ - Now, the vendor specific flags. - The parameter linker can be either gnu, darwin, osf, hpux or sun. - """ - toolset_link = toolset + '.link' - if linker == 'gnu': - # Strip the binary when no debugging is needed. We use --strip-all flag - # as opposed to -s since icc (intel's compiler) is generally - # option-compatible with and inherits from the gcc toolset, but does not - # support -s. - - # FIXME: what does unchecked translate to? - flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all']) # : unchecked ; - flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; - flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ; - flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ; - flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ; - - # gnu ld has the ability to change the search behaviour for libraries - # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic - # and change search for -l switches that follow them. The following list - # shows the tried variants. - # The search stops at the first variant that has a match. - # *nix: -Bstatic -lxxx - # libxxx.a - # - # *nix: -Bdynamic -lxxx - # libxxx.so - # libxxx.a - # - # windows (mingw,cygwin) -Bstatic -lxxx - # libxxx.a - # xxx.lib - # - # windows (mingw,cygwin) -Bdynamic -lxxx - # libxxx.dll.a - # xxx.dll.a - # libxxx.a - # xxx.lib - # cygxxx.dll (*) - # libxxx.dll - # xxx.dll - # libxxx.a - # - # (*) This is for cygwin - # Please note that -Bstatic and -Bdynamic are not a guarantee that a - # static or dynamic lib indeed gets linked in. The switches only change - # search patterns! - - # On *nix mixing shared libs with static runtime is not a good idea. - flags(toolset_link, 'FINDLIBS-ST-PFX', - map(lambda x: x + '/<runtime-link>shared', condition), - ['-Wl,-Bstatic']) # : unchecked ; - flags(toolset_link, 'FINDLIBS-SA-PFX', - map(lambda x: x + '/<runtime-link>shared', condition), - ['-Wl,-Bdynamic']) # : unchecked ; - - # On windows allow mixing of static and dynamic libs with static - # runtime. - flags(toolset_link, 'FINDLIBS-ST-PFX', - map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition), - ['-Wl,-Bstatic']) # : unchecked ; - flags(toolset_link, 'FINDLIBS-SA-PFX', - map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition), - ['-Wl,-Bdynamic']) # : unchecked ; - flags(toolset_link, 'OPTIONS', - map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition), - ['-Wl,-Bstatic']) # : unchecked ; - - elif linker == 'darwin': - # On Darwin, the -s option to ld does not work unless we pass -static, - # and passing -static unconditionally is a bad idea. So, don't pass -s. - # at all, darwin.jam will use separate 'strip' invocation. - flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; - flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ; - - elif linker == 'osf': - # No --strip-all, just -s. - flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s']) - # : unchecked ; - flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; - # This does not supports -R. - flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ; - # -rpath-link is not supported at all. - - elif linker == 'sun': - flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s']) - # : unchecked ; - flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; - # Solaris linker does not have a separate -rpath-link, but allows to use - # -L for the same purpose. - flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ; - - # This permits shared libraries with non-PIC code on Solaris. - # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the - # following is not needed. Whether -fPIC should be hardcoded, is a - # separate question. - # AH, 2004/10/16: it is still necessary because some tests link against - # static libraries that were compiled without PIC. - flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text']) - # : unchecked ; - - elif linker == 'hpux': - flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), - ['-Wl,-s']) # : unchecked ; - flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), - ['-fPIC']) # : unchecked ; - - else: - # FIXME: - errors.user_error( - "$(toolset) initialization: invalid linker '$(linker)' " + - "The value '$(linker)' specified for <linker> is not recognized. " + - "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'") - -# Declare actions for linking. -def gcc_link(targets, sources, properties): - engine = get_manager().engine() - engine.set_target_variable(targets, 'SPACE', ' ') - # Serialize execution of the 'link' action, since running N links in - # parallel is just slower. For now, serialize only gcc links, it might be a - # good idea to serialize all links. - engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore') - -engine.register_action( - 'gcc.link', - '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' + - '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' + - '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' + - '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' + - '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' + - '$(OPTIONS) $(USER_OPTIONS)', - function=gcc_link, - bound_list=['LIBRARIES']) - -# Default value. Mostly for the sake of intel-linux that inherits from gcc, but -# does not have the same logic to set the .AR variable. We can put the same -# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is -# always available. -__AR = 'ar' - -flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>']) - -def gcc_archive(targets, sources, properties): - # Always remove archive and start again. Here's rationale from - # - # Andre Hentz: - # - # I had a file, say a1.c, that was included into liba.a. I moved a1.c to - # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd - # errors. After some debugging I traced it back to the fact that a1.o was - # *still* in liba.a - # - # Rene Rivera: - # - # Originally removing the archive was done by splicing an RM onto the - # archive action. That makes archives fail to build on NT when they have - # many files because it will no longer execute the action directly and blow - # the line length limit. Instead we remove the file in a different action, - # just before building the archive. - clean = targets[0] + '(clean)' - bjam.call('TEMPORARY', clean) - bjam.call('NOCARE', clean) - engine = get_manager().engine() - engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE')) - engine.add_dependency(clean, sources) - engine.add_dependency(targets, clean) - engine.set_update_action('common.RmTemps', clean, targets) - -# Declare action for creating static libraries. -# The letter 'r' means to add files to the archive with replacement. Since we -# remove archive, we don't care about replacement, but there's no option "add -# without replacement". -# The letter 'c' suppresses the warning in case the archive does not exists yet. -# That warning is produced only on some platforms, for whatever reasons. -engine.register_action('gcc.archive', - '"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"', - function=gcc_archive, - flags=['piecemeal']) - -def gcc_link_dll(targets, sources, properties): - engine = get_manager().engine() - engine.set_target_variable(targets, 'SPACE', ' ') - engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore') - engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME) - engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION) - -engine.register_action( - 'gcc.link.dll', - # Differ from 'link' above only by -shared. - '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' + - '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' + - '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' + - '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' + - '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' + - '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' + - '$(OPTIONS) $(USER_OPTIONS)', - function = gcc_link_dll, - bound_list=['LIBRARIES']) - -# Set up threading support. It's somewhat contrived, so perform it at the end, -# to avoid cluttering other code. - -if on_windows(): - flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads']) -elif bjam.variable('UNIX'): - jamuname = bjam.variable('JAMUNAME') - host_os_name = jamuname[0] - if host_os_name.startswith('SunOS'): - flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads']) - flags('gcc', 'FINDLIBS-SA', [], ['rt']) - elif host_os_name == 'BeOS': - # BeOS has no threading options, don't set anything here. - pass - elif host_os_name.endswith('BSD'): - flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread']) - # there is no -lrt on BSD - elif host_os_name == 'DragonFly': - flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread']) - # there is no -lrt on BSD - DragonFly is a FreeBSD variant, - # which anoyingly doesn't say it's a *BSD. - elif host_os_name == 'IRIX': - # gcc on IRIX does not support multi-threading, don't set anything here. - pass - elif host_os_name == 'Darwin': - # Darwin has no threading options, don't set anything here. - pass - else: - flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread']) - flags('gcc', 'FINDLIBS-SA', [], ['rt']) - -def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None): - #FIXME: for some reason this fails. Probably out of date feature code -## if default: -## flags(toolset, variable, -## ['<architecture>' + architecture + '/<instruction-set>'], -## values) - flags(toolset, variable, - #FIXME: same as above - [##'<architecture>/<instruction-set>' + instruction_set, - '<architecture>' + architecture + '/<instruction-set>' + instruction_set], - values) - -# Set architecture/instruction-set options. -# -# x86 and compatible -flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32']) -flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i386', ['-march=i386'], default=True) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp']) -## -cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2']) -# Sparc -flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32']) -flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3']) -# RS/6000 & PowerPC -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32']) -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403']) -cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505']) -cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601']) -cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602']) -cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603']) -cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e']) -cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604']) -cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e']) -cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620']) -cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630']) -cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740']) -cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400']) -cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450']) -cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750']) -cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801']) -cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821']) -cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823']) -cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860']) -cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970']) -cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5']) -cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc']) -cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64']) -# AIX variant of RS/6000 & PowerPC -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32']) -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64']) -flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X 64']) diff --git a/jam-files/boost-build/tools/generate.jam b/jam-files/boost-build/tools/generate.jam deleted file mode 100644 index 6732fa35..00000000 --- a/jam-files/boost-build/tools/generate.jam +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Declares main target 'generate' used to produce targets by calling a -# user-provided rule that takes and produces virtual targets. - -import "class" : new ; -import errors ; -import feature ; -import project ; -import property ; -import property-set ; -import targets ; -import regex ; - - -feature.feature generating-rule : : free ; - - -class generated-target-class : basic-target -{ - import errors ; - import indirect ; - import virtual-target ; - - rule __init__ ( name : project : sources * : requirements * - : default-build * : usage-requirements * ) - { - basic-target.__init__ $(name) : $(project) : $(sources) - : $(requirements) : $(default-build) : $(usage-requirements) ; - - if ! [ $(self.requirements).get <generating-rule> ] - { - errors.user-error "The generate rule requires the <generating-rule>" - "property to be set" ; - } - } - - rule construct ( name : sources * : property-set ) - { - local result ; - local gr = [ $(property-set).get <generating-rule> ] ; - - # FIXME: this is a copy-paste from virtual-target.jam. We should add a - # utility rule to call a rule like this. - local rule-name = [ MATCH ^@(.*) : $(gr) ] ; - if $(rule-name) - { - if $(gr[2]) - { - local target-name = [ full-name ] ; - errors.user-error "Multiple <generating-rule> properties" - "encountered for target $(target-name)." ; - } - - result = [ indirect.call $(rule-name) $(self.project) $(name) - : $(property-set) : $(sources) ] ; - - if ! $(result) - { - ECHO "warning: Unable to construct" [ full-name ] ; - } - } - - local ur ; - local targets ; - - if $(result) - { - if [ class.is-a $(result[1]) : property-set ] - { - ur = $(result[1]) ; - targets = $(result[2-]) ; - } - else - { - ur = [ property-set.empty ] ; - targets = $(result) ; - } - } - # FIXME: the following loop should be doable using sequence.transform or - # some similar utility rule. - local rt ; - for local t in $(targets) - { - rt += [ virtual-target.register $(t) ] ; - } - return $(ur) $(rt) ; - } -} - - -rule generate ( name : sources * : requirements * : default-build * - : usage-requirements * ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new generated-target-class $(name) : $(project) - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] - ] ; -} - -IMPORT $(__name__) : generate : : generate ; diff --git a/jam-files/boost-build/tools/gettext.jam b/jam-files/boost-build/tools/gettext.jam deleted file mode 100644 index 99a43ffe..00000000 --- a/jam-files/boost-build/tools/gettext.jam +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module support GNU gettext internationalization utilities. -# -# It provides two main target rules: 'gettext.catalog', used for -# creating machine-readable catalogs from translations files, and -# 'gettext.update', used for update translation files from modified -# sources. -# -# To add i18n support to your application you should follow these -# steps. -# -# - Decide on a file name which will contain translations and -# what main target name will be used to update it. For example:: -# -# gettext.update update-russian : russian.po a.cpp my_app ; -# -# - Create the initial translation file by running:: -# -# bjam update-russian -# -# - Edit russian.po. For example, you might change fields like LastTranslator. -# -# - Create a main target for final message catalog:: -# -# gettext.catalog russian : russian.po ; -# -# The machine-readable catalog will be updated whenever you update -# "russian.po". The "russian.po" file will be updated only on explicit -# request. When you're ready to update translations, you should -# -# - Run:: -# -# bjam update-russian -# -# - Edit "russian.po" in appropriate editor. -# -# The next bjam run will convert "russian.po" into machine-readable form. -# -# By default, translations are marked by 'i18n' call. The 'gettext.keyword' -# feature can be used to alter this. - - -import targets ; -import property-set ; -import virtual-target ; -import "class" : new ; -import project ; -import type ; -import generators ; -import errors ; -import feature : feature ; -import toolset : flags ; -import regex ; - -.path = "" ; - -# Initializes the gettext module. -rule init ( path ? # Path where all tools are located. If not specified, - # they should be in PATH. - ) -{ - if $(.initialized) && $(.path) != $(path) - { - errors.error "Attempt to reconfigure with different path" ; - } - .initialized = true ; - if $(path) - { - .path = $(path)/ ; - } -} - -# Creates a main target 'name', which, when updated, will cause -# file 'existing-translation' to be updated with translations -# extracted from 'sources'. It's possible to specify main target -# in sources --- it which case all target from dependency graph -# of those main targets will be scanned, provided they are of -# appropricate type. The 'gettext.types' feature can be used to -# control the types. -# -# The target will be updated only if explicitly requested on the -# command line. -rule update ( name : existing-translation sources + : requirements * ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new typed-target $(name) : $(project) : gettext.UPDATE : - $(existing-translation) $(sources) - : [ targets.main-target-requirements $(requirements) : $(project) ] - ] ; - $(project).mark-target-as-explicit $(name) ; -} - - -# The human editable source, containing translation. -type.register gettext.PO : po ; -# The machine readable message catalog. -type.register gettext.catalog : mo ; -# Intermediate type produce by extracting translations from -# sources. -type.register gettext.POT : pot ; -# Pseudo type used to invoke update-translations generator -type.register gettext.UPDATE ; - -# Identifies the keyword that should be used when scanning sources. -# Default: i18n -feature gettext.keyword : : free ; -# Contains space-separated list of sources types which should be scanned. -# Default: "C CPP" -feature gettext.types : : free ; - -generators.register-standard gettext.compile : gettext.PO : gettext.catalog ; - -class update-translations-generator : generator -{ - import regex : split ; - import property-set ; - - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - # The rule should be called with at least two sources. The first source - # is the translation (.po) file to update. The remaining sources are targets - # which should be scanned for new messages. All sources files for those targets - # will be found and passed to the 'xgettext' utility, which extracts the - # messages for localization. Those messages will be merged to the .po file. - rule run ( project name ? : property-set : sources * : multiple ? ) - { - local types = [ $(property-set).get <gettext.types> ] ; - types ?= "C CPP" ; - types = [ regex.split $(types) " " ] ; - - local keywords = [ $(property-set).get <gettext.keyword> ] ; - property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ; - - # First deterime the list of sources that must be scanned for - # messages. - local all-sources ; - # CONSIDER: I'm not sure if the logic should be the same as for 'stage': - # i.e. following dependency properties as well. - for local s in $(sources[2-]) - { - all-sources += [ virtual-target.traverse $(s) : : include-sources ] ; - } - local right-sources ; - for local s in $(all-sources) - { - if [ $(s).type ] in $(types) - { - right-sources += $(s) ; - } - } - - local .constructed ; - if $(right-sources) - { - # Create the POT file, which will contain list of messages extracted - # from the sources. - local extract = - [ new action $(right-sources) : gettext.extract : $(property-set) ] ; - local new-messages = [ new file-target $(name) : gettext.POT - : $(project) : $(extract) ] ; - - # Create a notfile target which will update the existing translation file - # with new messages. - local a = [ new action $(sources[1]) $(new-messages) - : gettext.update-po-dispatch ] ; - local r = [ new notfile-target $(name) : $(project) : $(a) ] ; - .constructed = [ virtual-target.register $(r) ] ; - } - else - { - errors.error "No source could be scanned by gettext tools" ; - } - return $(.constructed) ; - } -} -generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ; - -flags gettext.extract KEYWORD <gettext.keyword> ; -actions extract -{ - $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>) -} - -# Does realy updating of po file. The tricky part is that -# we're actually updating one of the sources: -# $(<) is the NOTFILE target we're updating -# $(>[1]) is the PO file to be really updated. -# $(>[2]) is the PO file created from sources. -# -# When file to be updated does not exist (during the -# first run), we need to copy the file created from sources. -# In all other cases, we need to update the file. -rule update-po-dispatch -{ - NOCARE $(>[1]) ; - gettext.create-po $(<) : $(>) ; - gettext.update-po $(<) : $(>) ; - _ on $(<) = " " ; - ok on $(<) = "" ; - EXISTING_PO on $(<) = $(>[1]) ; -} - -# Due to fancy interaction of existing and updated, this rule can be called with -# one source, in which case we copy the lonely source into EXISTING_PO, or with -# two sources, in which case the action body expands to nothing. I'd really like -# to have "missing" action modifier. -actions quietly existing updated create-po bind EXISTING_PO -{ - cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok)) -} - -actions updated update-po bind EXISTING_PO -{ - $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])" -} - -actions gettext.compile -{ - $(.path)msgfmt -o $(<) $(>) -} - -IMPORT $(__name__) : update : : gettext.update ; diff --git a/jam-files/boost-build/tools/gfortran.jam b/jam-files/boost-build/tools/gfortran.jam deleted file mode 100644 index 0aa69b85..00000000 --- a/jam-files/boost-build/tools/gfortran.jam +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags gfortran OPTIONS <fflags> ; - -flags gfortran OPTIONS <optimization>off : -O0 ; -flags gfortran OPTIONS <optimization>speed : -O3 ; -flags gfortran OPTIONS <optimization>space : -Os ; - -flags gfortran OPTIONS <debug-symbols>on : -g ; -flags gfortran OPTIONS <profiling>on : -pg ; - -flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ; - -flags gfortran DEFINES <define> ; -flags gfortran INCLUDES <include> ; - -rule compile.fortran -{ -} - -actions compile.fortran -{ - gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)" -} - -generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ; diff --git a/jam-files/boost-build/tools/hp_cxx.jam b/jam-files/boost-build/tools/hp_cxx.jam deleted file mode 100644 index 86cd783e..00000000 --- a/jam-files/boost-build/tools/hp_cxx.jam +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright 2001 David Abrahams. -# Copyright 2004, 2005 Markus Schoepflin. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# -# HP CXX compiler -# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN -# -# -# Notes on this toolset: -# -# - Because of very subtle issues with the default ansi mode, strict_ansi mode -# is used for compilation. One example of things that don't work correctly in -# the default ansi mode is overload resolution of function templates when -# mixed with non-template functions. -# -# - For template instantiation "-timplicit_local" is used. Previously, -# "-tlocal" has been tried to avoid the need for a template repository -# but this doesn't work with manually instantiated templates. "-tweak" -# has not been used to avoid the stream of warning messages issued by -# ar or ld when creating a library or linking an application. -# -# - Debug symbols are generated with "-g3", as this works both in debug and -# release mode. When compiling C++ code without optimization, we additionally -# use "-gall", which generates full symbol table information for all classes, -# structs, and unions. As this turns off optimization, it can't be used when -# optimization is needed. -# - -import feature generators common ; -import toolset : flags ; - -feature.extend toolset : hp_cxx ; -feature.extend c++abi : cxxarm ; - -# Inherit from Unix toolset to get library ordering magic. -toolset.inherit hp_cxx : unix ; - -generators.override hp_cxx.prebuilt : builtin.lib-generator ; -generators.override hp_cxx.prebuilt : builtin.prebuilt ; -generators.override hp_cxx.searched-lib-generator : searched-lib-generator ; - - -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ; - - local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ; - - if $(command) - { - local root = [ common.get-absolute-tool-path $(command[-1]) ] ; - - if $(root) - { - flags hp_cxx .root $(condition) : "\"$(root)\"/" ; - } - } - # If we can't find 'cxx' anyway, at least show 'cxx' in the commands - command ?= cxx ; - - common.handle-options hp_cxx : $(condition) : $(command) : $(options) ; -} - -generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ; -generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ; - - - -# No static linking as far as I can tell. -# flags cxx LINKFLAGS <runtime-link>static : -bstatic ; -flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ; -flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ; -flags hp_cxx.link OPTIONS <debug-symbols>on : -g ; -flags hp_cxx.link OPTIONS <debug-symbols>off : -s ; - -flags hp_cxx.compile OPTIONS <optimization>off : -O0 ; -flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ; -flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ; - -# This (undocumented) macro needs to be defined to get all C function -# overloads required by the C++ standard. -flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ; - -# Added for threading support -flags hp_cxx.compile OPTIONS <threading>multi : -pthread ; -flags hp_cxx.link OPTIONS <threading>multi : -pthread ; - -flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ; -flags hp_cxx.compile OPTIONS <optimization>space : -O1 ; -flags hp_cxx.compile OPTIONS <inlining>off : -inline none ; - -# The compiler versions tried (up to V6.5-040) hang when compiling Boost code -# with full inlining enabled. So leave it at the default level for now. -# -# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ; - -flags hp_cxx.compile OPTIONS <profiling>on : -pg ; -flags hp_cxx.link OPTIONS <profiling>on : -pg ; - -# Selection of the object model. This flag is needed on both the C++ compiler -# and linker command line. - -# Unspecified ABI translates to '-model ansi' as most -# standard-conforming. -flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ; -flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ; -flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ; -flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ; - -# Display a descriptive tag together with each compiler message. This tag can -# be used by the user to explicitely suppress the compiler message. -flags hp_cxx.compile OPTIONS : -msg_display_tag ; - -flags hp_cxx.compile OPTIONS <cflags> ; -flags hp_cxx.compile.c++ OPTIONS <cxxflags> ; -flags hp_cxx.compile DEFINES <define> ; -flags hp_cxx.compile INCLUDES <include> ; -flags hp_cxx.link OPTIONS <linkflags> ; - -flags hp_cxx.link LIBPATH <library-path> ; -flags hp_cxx.link LIBRARIES <library-file> ; -flags hp_cxx.link FINDLIBS-ST <find-static-library> ; -flags hp_cxx.link FINDLIBS-SA <find-shared-library> ; - -flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; - -actions link bind LIBRARIES -{ - $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm -} - -# When creating dynamic libraries, we don't want to be warned about unresolved -# symbols, therefore all unresolved symbols are marked as expected by -# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool -# chain. - -actions link.dll bind LIBRARIES -{ - $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm -} - - -# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI -# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std -# is the default, no special flag is needed. -actions compile.c -{ - $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" -} - -# Note: The compiler is forced to compile the files as C++ (-x cxx) because -# otherwise it will silently ignore files with no file extension. -# -# Note: We deliberately don't suppress any warnings on the compiler command -# line, the user can always do this in a customized toolset later on. - -rule compile.c++ -{ - # We preprocess the TEMPLATE_DEPTH command line option here because we found - # no way to do it correctly in the actual action code. There we either get - # the -pending_instantiations parameter when no c++-template-depth property - # has been specified or we get additional quotes around - # "-pending_instantiations ". - local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ; - TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ; -} - -actions compile.c++ -{ - $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" -} - -# Always create archive from scratch. See the gcc toolet for rationale. -RM = [ common.rm-command ] ; -actions together piecemeal archive -{ - $(RM) "$(<)" - ar rc $(<) $(>) -} diff --git a/jam-files/boost-build/tools/hpfortran.jam b/jam-files/boost-build/tools/hpfortran.jam deleted file mode 100644 index 96e8d18b..00000000 --- a/jam-files/boost-build/tools/hpfortran.jam +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags hpfortran OPTIONS <optimization>off : -O0 ; -flags hpfortran OPTIONS <optimization>speed : -O3 ; -flags hpfortran OPTIONS <optimization>space : -O1 ; - -flags hpfortran OPTIONS <debug-symbols>on : -g ; -flags hpfortran OPTIONS <profiling>on : -pg ; - -flags hpfortran DEFINES <define> ; -flags hpfortran INCLUDES <include> ; - -rule compile.fortran -{ -} - -actions compile.fortran -{ - f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)" -} - -generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ; diff --git a/jam-files/boost-build/tools/ifort.jam b/jam-files/boost-build/tools/ifort.jam deleted file mode 100644 index eb7c1988..00000000 --- a/jam-files/boost-build/tools/ifort.jam +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags ifort OPTIONS <fflags> ; - -flags ifort OPTIONS <optimization>off : /Od ; -flags ifort OPTIONS <optimization>speed : /O3 ; -flags ifort OPTIONS <optimization>space : /O1 ; - -flags ifort OPTIONS <debug-symbols>on : /debug:full ; -flags ifort OPTIONS <profiling>on : /Qprof_gen ; - -flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ; -flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ; -flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ; -flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ; -flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ; -flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ; - -flags ifort DEFINES <define> ; -flags ifort INCLUDES <include> ; - -rule compile.fortran -{ -} - -actions compile.fortran -{ - ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)" -} - -generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ; diff --git a/jam-files/boost-build/tools/intel-darwin.jam b/jam-files/boost-build/tools/intel-darwin.jam deleted file mode 100644 index aa0fd8fb..00000000 --- a/jam-files/boost-build/tools/intel-darwin.jam +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Noel Belcourt 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -import intel ; -import feature : feature ; -import os ; -import toolset ; -import toolset : flags ; -import gcc ; -import common ; -import errors ; -import generators ; - -feature.extend-subfeature toolset intel : platform : darwin ; - -toolset.inherit-generators intel-darwin - <toolset>intel <toolset-intel:platform>darwin - : gcc - # Don't inherit PCH generators. They were not tested, and probably - # don't work for this compiler. - : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch - ; - -generators.override intel-darwin.prebuilt : builtin.lib-generator ; -generators.override intel-darwin.prebuilt : builtin.prebuilt ; -generators.override intel-darwin.searched-lib-generator : searched-lib-generator ; - -toolset.inherit-rules intel-darwin : gcc ; -toolset.inherit-flags intel-darwin : gcc - : <inlining>off <inlining>on <inlining>full <optimization>space - <warnings>off <warnings>all <warnings>on - <architecture>x86/<address-model>32 - <architecture>x86/<address-model>64 - ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -# vectorization diagnostics -feature vectorize : off on full ; - -# Initializes the intel-darwin toolset -# version in mandatory -# name (default icc) is used to invoke the specified intel complier -# compile and link options allow you to specify addition command line options for each version -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters intel-darwin - : version $(version) ] ; - - command = [ common.get-invocation-command intel-darwin : icc - : $(command) : /opt/intel_cc_80/bin ] ; - - common.handle-options intel-darwin : $(condition) : $(command) : $(options) ; - - gcc.init-link-flags intel-darwin darwin $(condition) ; - - # handle <library-path> - # local library-path = [ feature.get-values <library-path> : $(options) ] ; - # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ; - - local root = [ feature.get-values <root> : $(options) ] ; - local bin ; - if $(command) || $(root) - { - bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; - root ?= $(bin:D) ; - - if $(root) - { - # Libraries required to run the executable may be in either - # $(root)/lib (10.1 and earlier) - # or - # $(root)/lib/architecture-name (11.0 and later: - local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ; - if $(.debug-configuration) - { - ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ; - } - flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ; - } - } - - local m = [ MATCH (..).* : $(version) ] ; - local n = [ MATCH (.)\\. : $(m) ] ; - if $(n) { - m = $(n) ; - } - - local major = $(m) ; - - if $(major) = "9" { - flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ; - flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ; - flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ; - flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ; - flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ; - flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ; - flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ; - flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ; - } - else { - flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ; - flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ; - flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ; - flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ; - flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ; - flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ; - flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ; - flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ; - } - - local minor = [ MATCH ".*\\.(.).*" : $(version) ] ; - - # wchar_t char_traits workaround for compilers older than 10.2 - if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) { - flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ; - } -} - -SPACE = " " ; - -flags intel-darwin.compile OPTIONS <cflags> ; -flags intel-darwin.compile OPTIONS <cxxflags> ; -# flags intel-darwin.compile INCLUDES <include> ; - -flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc - -# -cpu-type-em64t = prescott nocona ; -flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ; -flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ; - -flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ; -flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ; -flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ; - -flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ; -flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ; -flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ; - -actions compile.c -{ - "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -flags intel-darwin ARFLAGS <archiveflags> ; - -# Default value. Mostly for the sake of intel-linux -# that inherits from gcc, but does not has the same -# logic to set the .AR variable. We can put the same -# logic in intel-linux, but that's hardly worth the trouble -# as on Linux, 'ar' is always available. -.AR = ar ; - -rule archive ( targets * : sources * : properties * ) -{ - # Always remove archive and start again. Here's rationale from - # Andre Hentz: - # - # I had a file, say a1.c, that was included into liba.a. - # I moved a1.c to a2.c, updated my Jamfiles and rebuilt. - # My program was crashing with absurd errors. - # After some debugging I traced it back to the fact that a1.o was *still* - # in liba.a - # - # Rene Rivera: - # - # Originally removing the archive was done by splicing an RM - # onto the archive action. That makes archives fail to build on NT - # when they have many files because it will no longer execute the - # action directly and blow the line length limit. Instead we - # remove the file in a different action, just before the building - # of the archive. - # - local clean.a = $(targets[1])(clean) ; - TEMPORARY $(clean.a) ; - NOCARE $(clean.a) ; - LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; - DEPENDS $(clean.a) : $(sources) ; - DEPENDS $(targets) : $(clean.a) ; - common.RmTemps $(clean.a) : $(targets) ; -} - -actions piecemeal archive -{ - "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" - "ranlib" -cs "$(<)" -} - -flags intel-darwin.link USER_OPTIONS <linkflags> ; - -# Declare actions for linking -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; - # Serialize execution of the 'link' action, since - # running N links in parallel is just slower. - JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} diff --git a/jam-files/boost-build/tools/intel-linux.jam b/jam-files/boost-build/tools/intel-linux.jam deleted file mode 100644 index d9164add..00000000 --- a/jam-files/boost-build/tools/intel-linux.jam +++ /dev/null @@ -1,250 +0,0 @@ -# Copyright (c) 2003 Michael Stevens -# Copyright (c) 2011 Bryce Lelbach -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import toolset ; -import feature ; -import toolset : flags ; - -import intel ; -import gcc ; -import common ; -import errors ; -import generators ; -import type ; -import numbers ; - -feature.extend-subfeature toolset intel : platform : linux ; - -toolset.inherit-generators intel-linux - <toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ; -generators.override intel-linux.prebuilt : builtin.lib-generator ; -generators.override intel-linux.prebuilt : builtin.prebuilt ; -generators.override intel-linux.searched-lib-generator : searched-lib-generator ; - -# Override default do-nothing generators. -generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ; -generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ; - -type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ; - -toolset.inherit-rules intel-linux : gcc ; -toolset.inherit-flags intel-linux : gcc - : <inlining>off <inlining>on <inlining>full - <optimization>space <optimization>speed - <warnings>off <warnings>all <warnings>on - ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -# Initializes the intel-linux toolset -# version in mandatory -# name (default icpc) is used to invoke the specified intel-linux complier -# compile and link options allow you to specify addition command line options for each version -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters intel-linux - : version $(version) ] ; - - if $(.debug-configuration) - { - ECHO "notice: intel-linux version is" $(version) ; - } - - local default_path ; - - # Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0, - # aka intel-linux-12.0. In this version, Intel thankfully decides to install - # to a sane 'intel' folder in /opt. - if [ MATCH "(12[.]0|12)" : $(version) ] - { default_path = /opt/intel/bin ; } - # Intel C++ Compiler 11.1. - else if [ MATCH "(11[.]1)" : $(version) ] - { default_path = /opt/intel_cce_11.1.064.x86_64/bin ; } - # Intel C++ Compiler 11.0. - else if [ MATCH "(11[.]0|11)" : $(version) ] - { default_path = /opt/intel_cce_11.0.074.x86_64/bin ; } - # Intel C++ Compiler 10.1. - else if [ MATCH "(10[.]1)" : $(version) ] - { default_path = /opt/intel_cce_10.1.013_x64/bin ; } - # Intel C++ Compiler 9.1. - else if [ MATCH "(9[.]1)" : $(version) ] - { default_path = /opt/intel_cc_91/bin ; } - # Intel C++ Compiler 9.0. - else if [ MATCH "(9[.]0|9)" : $(version) ] - { default_path = /opt/intel_cc_90/bin ; } - # Intel C++ Compiler 8.1. - else if [ MATCH "(8[.]1)" : $(version) ] - { default_path = /opt/intel_cc_81/bin ; } - # Intel C++ Compiler 8.0 - this used to be the default, so now it's the - # fallback. - else - { default_path = /opt/intel_cc_80/bin ; } - - if $(.debug-configuration) - { - ECHO "notice: default search path for intel-linux is" $(default_path) ; - } - - command = [ common.get-invocation-command intel-linux : icpc - : $(command) : $(default_path) ] ; - - common.handle-options intel-linux : $(condition) : $(command) : $(options) ; - - gcc.init-link-flags intel-linux gnu $(condition) ; - - local root = [ feature.get-values <root> : $(options) ] ; - local bin ; - if $(command) || $(root) - { - bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; - root ?= $(bin:D) ; - - local command-string = $(command:J=" ") ; - local version-output = [ SHELL "$(command-string) --version" ] ; - local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ; - local major = [ MATCH "([0-9]+).*" : $(real-version) ] ; - - # If we failed to determine major version, use the behaviour for - # the current compiler. - if $(major) && [ numbers.less $(major) 10 ] - { - flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ; - flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ; - flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ; - flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ; - flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ; - } - else if $(major) && [ numbers.less $(major) 11 ] - { - flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ; - flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ; - flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ; - flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ; - flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ; - } - else # newer version of intel do have -Os (at least 11+, don't know about 10) - { - flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ; - flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ; - flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ; - flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ; - flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ; - } - - if $(root) - { - # Libraries required to run the executable may be in either - # $(root)/lib (10.1 and earlier) - # or - # $(root)/lib/architecture-name (11.0 and later: - local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ; - if $(.debug-configuration) - { - ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ; - } - flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ; - } - } -} - -SPACE = " " ; - -flags intel-linux.compile OPTIONS <warnings>off : -w0 ; -flags intel-linux.compile OPTIONS <warnings>on : -w1 ; -flags intel-linux.compile OPTIONS <warnings>all : -w2 ; - -rule compile.c++ ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -actions compile.c++ bind PCH_FILE -{ - "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)" -} - -rule compile.c ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -actions compile.c bind PCH_FILE -{ - "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)" -} - -rule compile.c++.pch ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; -} -# -# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler -# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi -# etc - which appear not to do anything except take up disk space :-( -# -actions compile.c++.pch -{ - rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)" -} - -actions compile.fortran -{ - "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.c.pch ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-fpic $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c.pch -{ - rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)" -} - -rule link ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - SPACE on $(targets) = " " ; - JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - -rule link.dll ( targets * : sources * : properties * ) -{ - gcc.setup-threading $(targets) : $(sources) : $(properties) ; - gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - SPACE on $(targets) = " " ; - JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ; -} - -# Differ from 'link' above only by -shared. -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - - - diff --git a/jam-files/boost-build/tools/intel-win.jam b/jam-files/boost-build/tools/intel-win.jam deleted file mode 100644 index 691b5dce..00000000 --- a/jam-files/boost-build/tools/intel-win.jam +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# Importing common is needed because the rules we inherit here depend on it. -# That is nasty. -import common ; -import errors ; -import feature ; -import intel ; -import msvc ; -import os ; -import toolset ; -import generators ; -import type ; - -feature.extend-subfeature toolset intel : platform : win ; - -toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ; -toolset.inherit-flags intel-win : msvc : : YLOPTION ; -toolset.inherit-rules intel-win : msvc ; - -# Override default do-nothing generators. -generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ; -generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ; -generators.override intel-win.compile.rc : rc.compile.resource ; -generators.override intel-win.compile.mc : mc.compile ; - -toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ; - -toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ; - -# Initializes the intel toolset for windows -rule init ( version ? : # the compiler version - command * : # the command to invoke the compiler itself - options * # Additional option: <compatibility> - # either 'vc6', 'vc7', 'vc7.1' - # or 'native'(default). - ) -{ - local compatibility = - [ feature.get-values <compatibility> : $(options) ] ; - local condition = [ common.check-init-parameters intel-win - : version $(version) : compatibility $(compatibility) ] ; - - command = [ common.get-invocation-command intel-win : icl.exe : - $(command) ] ; - - common.handle-options intel-win : $(condition) : $(command) : $(options) ; - - local root ; - if $(command) - { - root = [ common.get-absolute-tool-path $(command[-1]) ] ; - root = $(root)/ ; - } - - local setup ; - setup = [ GLOB $(root) : iclvars_*.bat ] ; - if ! $(setup) - { - setup = $(root)/iclvars.bat ; - } - setup = "call \""$(setup)"\" > nul " ; - - if [ os.name ] = NT - { - setup = $(setup)" -" ; - } - else - { - setup = "cmd /S /C "$(setup)" \"&&\" " ; - } - - toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ; - toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ; - toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ; - toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ; - toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ; - toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ; - - local m = [ MATCH (.).* : $(version) ] ; - local major = $(m[1]) ; - - local C++FLAGS ; - - C++FLAGS += /nologo ; - - # Reduce the number of spurious error messages - C++FLAGS += /Qwn5 /Qwd985 ; - - # Enable ADL - C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too - - # Disable Microsoft "secure" overloads in Dinkumware libraries since they - # cause compile errors with Intel versions 9 and 10. - C++FLAGS += -D_SECURE_SCL=0 ; - - if $(major) > 5 - { - C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping. - } - - # Add options recognized only by intel7 and above. - if $(major) >= 7 - { - C++FLAGS += /Qansi_alias ; - } - - if $(compatibility) = vc6 - { - C++FLAGS += - # Emulate VC6 - /Qvc6 - - # No wchar_t support in vc6 dinkum library. Furthermore, in vc6 - # compatibility-mode, wchar_t is not a distinct type from unsigned - # short. - -DBOOST_NO_INTRINSIC_WCHAR_T - ; - } - else - { - if $(major) > 5 - { - # Add support for wchar_t - C++FLAGS += /Zc:wchar_t - # Tell the dinkumware library about it. - -D_NATIVE_WCHAR_T_DEFINED - ; - } - } - - if $(compatibility) && $(compatibility) != native - { - C++FLAGS += /Q$(base-vc) ; - } - else - { - C++FLAGS += - -Qoption,cpp,--arg_dep_lookup - # The following options were intended to disable the Intel compiler's - # 'bug-emulation' mode, but were later reported to be causing ICE with - # Intel-Win 9.0. It is not yet clear which options can be safely used. - # -Qoption,cpp,--const_string_literals - # -Qoption,cpp,--new_for_init - # -Qoption,cpp,--no_implicit_typename - # -Qoption,cpp,--no_friend_injection - # -Qoption,cpp,--no_microsoft_bugs - ; - } - - toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ; - # By default, when creating PCH, intel adds 'i' to the explicitly - # specified name of the PCH file. Of course, Boost.Build is not - # happy when compiler produces not the file it was asked for. - # The option below stops this behaviour. - toolset.flags intel-win CFLAGS : -Qpchi- ; - - if ! $(compatibility) - { - # If there's no backend version, assume 7.1. - compatibility = vc7.1 ; - } - - local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ; - if ! $(extract-version) - { - errors.user-error "Invalid value for compatibility option:" - $(compatibility) ; - } - - # Depending on the settings, running of tests require some runtime DLLs. - toolset.flags intel-win RUN_PATH $(condition) : $(root) ; - - msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ; -} - -toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ; - -toolset.flags intel-win YLOPTION ; - diff --git a/jam-files/boost-build/tools/intel.jam b/jam-files/boost-build/tools/intel.jam deleted file mode 100644 index 67038aa2..00000000 --- a/jam-files/boost-build/tools/intel.jam +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This is a generic 'intel' toolset. Depending on the current -# system, it forwards either to 'intel-linux' or 'intel-win' -# modules. - -import feature ; -import os ; -import toolset ; - -feature.extend toolset : intel ; -feature.subfeature toolset intel : platform : : propagated link-incompatible ; - -rule init ( * : * ) -{ - if [ os.name ] = LINUX - { - toolset.using intel-linux : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - else if [ os.name ] = MACOSX - { - toolset.using intel-darwin : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - else - { - toolset.using intel-win : - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } -} diff --git a/jam-files/boost-build/tools/lex.jam b/jam-files/boost-build/tools/lex.jam deleted file mode 100644 index 75d64131..00000000 --- a/jam-files/boost-build/tools/lex.jam +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import type ; -import generators ; -import feature ; -import property ; - - -feature.feature flex.prefix : : free ; -type.register LEX : l ; -type.register LEX++ : ll ; -generators.register-standard lex.lex : LEX : C ; -generators.register-standard lex.lex : LEX++ : CPP ; - -rule init ( ) -{ -} - -rule lex ( target : source : properties * ) -{ - local r = [ property.select flex.prefix : $(properties) ] ; - if $(r) - { - PREFIX on $(<) = $(r:G=) ; - } -} - -actions lex -{ - flex -P$(PREFIX) -o$(<) $(>) -} diff --git a/jam-files/boost-build/tools/make.jam b/jam-files/boost-build/tools/make.jam deleted file mode 100644 index 08567285..00000000 --- a/jam-files/boost-build/tools/make.jam +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003 Douglas Gregor -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'make' main target rule. - -import "class" : new ; -import errors : error ; -import project ; -import property ; -import property-set ; -import regex ; -import targets ; - - -class make-target-class : basic-target -{ - import type regex virtual-target ; - import "class" : new ; - - rule __init__ ( name : project : sources * : requirements * - : default-build * : usage-requirements * ) - { - basic-target.__init__ $(name) : $(project) : $(sources) : - $(requirements) : $(default-build) : $(usage-requirements) ; - } - - rule construct ( name : source-targets * : property-set ) - { - local action-name = [ $(property-set).get <action> ] ; - # 'm' will always be set -- we add '@' ourselves in the 'make' rule - # below. - local m = [ MATCH ^@(.*) : $(action-name) ] ; - - local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ; - local t = [ new file-target $(self.name) exact : [ type.type - $(self.name) ] : $(self.project) : $(a) ] ; - return [ property-set.empty ] [ virtual-target.register $(t) ] ; - } -} - - -# Declares the 'make' main target. -# -rule make ( target-name : sources * : generating-rule + : requirements * : - usage-requirements * ) -{ - local project = [ project.current ] ; - - # The '@' sign causes the feature.jam module to qualify rule name with the - # module name of current project, if needed. - local m = [ MATCH ^(@).* : $(generating-rule) ] ; - if ! $(m) - { - generating-rule = @$(generating-rule) ; - } - requirements += <action>$(generating-rule) ; - - targets.main-target-alternative - [ new make-target-class $(target-name) : $(project) - : [ targets.main-target-sources $(sources) : $(target-name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build : $(project) ] - : [ targets.main-target-usage-requirements $(usage-requirements) : - $(project) ] ] ; -} - - -IMPORT $(__name__) : make : : make ; diff --git a/jam-files/boost-build/tools/make.py b/jam-files/boost-build/tools/make.py deleted file mode 100644 index 10baa1cb..00000000 --- a/jam-files/boost-build/tools/make.py +++ /dev/null @@ -1,59 +0,0 @@ -# Status: ported. -# Base revision: 64068 - -# Copyright 2003 Dave Abrahams -# Copyright 2003 Douglas Gregor -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'make' main target rule. - -from b2.build.targets import BasicTarget -from b2.build.virtual_target import Action, FileTarget -from b2.build import type -from b2.manager import get_manager -import b2.build.property_set - - -class MakeTarget(BasicTarget): - - def construct(self, name, source_targets, property_set): - - action_name = property_set.get("<action>")[0] - action = Action(get_manager(), source_targets, action_name[1:], property_set) - target = FileTarget(self.name(), type.type(self.name()), - self.project(), action, exact=True) - return [ b2.build.property_set.empty(), - [self.project().manager().virtual_targets().register(target)]] - -def make (target_name, sources, generating_rule, - requirements=None, usage_requirements=None): - - target_name = target_name[0] - generating_rule = generating_rule[0] - if generating_rule[0] != '@': - generating_rule = '@' + generating_rule - - if not requirements: - requirements = [] - - - requirements.append("<action>%s" % generating_rule) - - m = get_manager() - targets = m.targets() - project = m.projects().current() - engine = m.engine() - engine.register_bjam_action(generating_rule) - - targets.main_target_alternative(MakeTarget( - target_name, project, - targets.main_target_sources(sources, target_name), - targets.main_target_requirements(requirements, project), - targets.main_target_default_build([], project), - targets.main_target_usage_requirements(usage_requirements or [], project))) - -get_manager().projects().add_rule("make", make) - diff --git a/jam-files/boost-build/tools/mc.jam b/jam-files/boost-build/tools/mc.jam deleted file mode 100644 index 57837773..00000000 --- a/jam-files/boost-build/tools/mc.jam +++ /dev/null @@ -1,44 +0,0 @@ -#~ Copyright 2005 Alexey Pakhunov. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Support for Microsoft message compiler tool. -# Notes: -# - there's just message compiler tool, there's no tool for -# extracting message strings from sources -# - This file allows to use Microsoft message compiler -# with any toolset. In msvc.jam, there's more specific -# message compiling action. - -import common ; -import generators ; -import feature : feature get-values ; -import toolset : flags ; -import type ; -import rc ; - -rule init ( ) -{ -} - -type.register MC : mc ; - - -# Command line options -feature mc-input-encoding : ansi unicode : free ; -feature mc-output-encoding : unicode ansi : free ; -feature mc-set-customer-bit : no yes : free ; - -flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ; -flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ; -flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ; -flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ; -flags mc.compile MCFLAGS <mc-set-customer-bit>no : ; -flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ; - -generators.register-standard mc.compile : MC : H RC ; - -actions compile -{ - mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)" -} diff --git a/jam-files/boost-build/tools/message.jam b/jam-files/boost-build/tools/message.jam deleted file mode 100644 index 212d8542..00000000 --- a/jam-files/boost-build/tools/message.jam +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2008 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target type 'message', that prints a message when built for the -# first time. - -import project ; -import "class" : new ; -import targets ; -import property-set ; - -class message-target-class : basic-target -{ - rule __init__ ( name-and-dir : project : * ) - { - basic-target.__init__ $(name-and-dir) : $(project) ; - self.3 = $(3) ; - self.4 = $(4) ; - self.5 = $(5) ; - self.6 = $(6) ; - self.7 = $(7) ; - self.8 = $(8) ; - self.9 = $(9) ; - self.built = ; - } - - rule construct ( name : source-targets * : property-set ) - { - if ! $(self.built) - { - for i in 3 4 5 6 7 8 9 - { - if $(self.$(i)) - { - ECHO $(self.$(i)) ; - } - } - self.built = 1 ; - } - - return [ property-set.empty ] ; - } -} - - -rule message ( name : * ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new message-target-class $(name) : $(project) - : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ; -} -IMPORT $(__name__) : message : : message ;
\ No newline at end of file diff --git a/jam-files/boost-build/tools/message.py b/jam-files/boost-build/tools/message.py deleted file mode 100644 index cc0b946f..00000000 --- a/jam-files/boost-build/tools/message.py +++ /dev/null @@ -1,46 +0,0 @@ -# Status: ported. -# Base revision: 64488. -# -# Copyright 2008, 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target type 'message', that prints a message when built for the -# first time. - -import b2.build.targets as targets -import b2.build.property_set as property_set - -from b2.manager import get_manager - -class MessageTargetClass(targets.BasicTarget): - - def __init__(self, name, project, *args): - - targets.BasicTarget.__init__(self, name, project, []) - self.args = args - self.built = False - - def construct(self, name, sources, ps): - - if not self.built: - for arg in self.args: - if type(arg) == type([]): - arg = " ".join(arg) - print arg - self.built = True - - return (property_set.empty(), []) - -def message(name, *args): - - if type(name) == type([]): - name = name[0] - - t = get_manager().targets() - - project = get_manager().projects().current() - - return t.main_target_alternative(MessageTargetClass(*((name, project) + args))) - -get_manager().projects().add_rule("message", message) diff --git a/jam-files/boost-build/tools/midl.jam b/jam-files/boost-build/tools/midl.jam deleted file mode 100644 index 0aa5dda3..00000000 --- a/jam-files/boost-build/tools/midl.jam +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (c) 2005 Alexey Pakhunov. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Microsoft Interface Definition Language (MIDL) related routines - -import common ; -import generators ; -import feature : feature get-values ; -import os ; -import scanner ; -import toolset : flags ; -import type ; - -rule init ( ) -{ -} - -type.register IDL : idl ; - -# A type library (.tlb) is generated by MIDL compiler and can be included -# to resources of an application (.rc). In order to be found by a resource -# compiler its target type should be derived from 'H' - otherwise -# the property '<implicit-dependency>' will be ignored. -type.register MSTYPELIB : tlb : H ; - - -# Register scanner for MIDL files -class midl-scanner : scanner -{ - import path property-set regex scanner type virtual-target ; - - rule __init__ ( includes * ) - { - scanner.__init__ ; - - self.includes = $(includes) ; - - # List of quoted strings - self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ; - - # 'import' and 'importlib' directives - self.re-import = "import"$(self.re-strings)"[ \t]*;" ; - self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ; - - # C preprocessor 'include' directive - self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ; - self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ; - } - - rule pattern ( ) - { - # Match '#include', 'import' and 'importlib' directives - return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ; - } - - rule process ( target : matches * : binding ) - { - local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ; - local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ; - local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ; - local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ; - - # CONSIDER: the new scoping rule seem to defeat "on target" variables. - local g = [ on $(target) return $(HDRGRIST) ] ; - local b = [ NORMALIZE_PATH $(binding:D) ] ; - - # Attach binding of including file to included targets. - # When target is directly created from virtual target - # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. - local g2 = $(g)"#"$(b) ; - - included-angle = $(included-angle:G=$(g)) ; - included-quoted = $(included-quoted:G=$(g2)) ; - imported = $(imported:G=$(g2)) ; - imported_tlbs = $(imported_tlbs:G=$(g2)) ; - - local all = $(included-angle) $(included-quoted) $(imported) ; - - INCLUDES $(target) : $(all) ; - DEPENDS $(target) : $(imported_tlbs) ; - NOCARE $(all) $(imported_tlbs) ; - SEARCH on $(included-angle) = $(self.includes:G=) ; - SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ; - SEARCH on $(imported) = $(b) $(self.includes:G=) ; - SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ; - - scanner.propagate - [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] : - $(included-angle) $(included-quoted) : $(target) ; - - scanner.propagate $(__name__) : $(imported) : $(target) ; - } -} - -scanner.register midl-scanner : include ; -type.set-scanner IDL : midl-scanner ; - - -# Command line options -feature midl-stubless-proxy : yes no : propagated ; -feature midl-robust : yes no : propagated ; - -flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ; -flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ; -flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ; -flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ; - -# Architecture-specific options -architecture-x86 = <architecture> <architecture>x86 ; -address-model-32 = <address-model> <address-model>32 ; -address-model-64 = <address-model> <address-model>64 ; - -flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ; -flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ; -flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ; - - -flags midl.compile.idl DEFINES <define> ; -flags midl.compile.idl UNDEFS <undef> ; -flags midl.compile.idl INCLUDES <include> ; - - -generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ; - - -# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior -# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures -# that both files will be created so bjam will not try to recreate them -# constantly. -TOUCH_FILE = [ common.file-touch-command ] ; - -actions compile.idl -{ - midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")" - $(TOUCH_FILE) "$(<[4]:W)" - $(TOUCH_FILE) "$(<[5]:W)" -} diff --git a/jam-files/boost-build/tools/mipspro.jam b/jam-files/boost-build/tools/mipspro.jam deleted file mode 100644 index 417eaefc..00000000 --- a/jam-files/boost-build/tools/mipspro.jam +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright Noel Belcourt 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature ; -import fortran ; -import type ; -import common ; - -feature.extend toolset : mipspro ; -toolset.inherit mipspro : unix ; -generators.override mipspro.prebuilt : builtin.lib-generator ; -generators.override mipspro.searched-lib-generator : searched-lib-generator ; - -# Documentation and toolchain description located -# http://www.sgi.com/products/software/irix/tools/ - -rule init ( version ? : command * : options * ) -{ - local condition = [ - common.check-init-parameters mipspro : version $(version) ] ; - - command = [ common.get-invocation-command mipspro : CC : $(command) ] ; - - common.handle-options mipspro : $(condition) : $(command) : $(options) ; - - command_c = $(command_c[1--2]) $(command[-1]:B=cc) ; - - toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ; - - # fortran support - local command = [ - common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ; - - command_f = $(command_f[1--2]) $(command[-1]:B=f77) ; - toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ; - - # set link flags - flags mipspro.link FINDLIBS-ST : [ - feature.get-values <find-static-library> : $(options) ] : unchecked ; - - flags mipspro.link FINDLIBS-SA : [ - feature.get-values <find-shared-library> : $(options) ] : unchecked ; -} - -# Declare generators -generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ; -generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ; -generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ; - -cpu-arch-32 = - <architecture>/<address-model> - <architecture>/<address-model>32 ; - -cpu-arch-64 = - <architecture>/<address-model>64 ; - -flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ; -flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ; - -# Declare flags and actions for compilation -flags mipspro.compile OPTIONS <debug-symbols>on : -g ; -# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ; -flags mipspro.compile OPTIONS <warnings>off : -w ; -flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning -flags mipspro.compile OPTIONS <warnings>all : -fullwarn ; -flags mipspro.compile OPTIONS <optimization>speed : -Ofast ; -flags mipspro.compile OPTIONS <optimization>space : -O2 ; -flags mipspro.compile OPTIONS <cflags> : -LANG:std ; -flags mipspro.compile.c++ OPTIONS <inlining>off : -INLINE:none ; -flags mipspro.compile.c++ OPTIONS <cxxflags> ; -flags mipspro.compile DEFINES <define> ; -flags mipspro.compile INCLUDES <include> ; - - -flags mipspro.compile.fortran OPTIONS <fflags> ; - -actions compile.c -{ - "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.fortran -{ - "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags mipspro.link OPTIONS <debug-symbols>on : -g ; -# Strip the binary when no debugging is needed -# flags mipspro.link OPTIONS <debug-symbols>off : -s ; -# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ; -# flags mipspro.link OPTIONS <threading>multi : -mt ; - -flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ; -flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ; - -flags mipspro.link OPTIONS <optimization>speed : -Ofast ; -flags mipspro.link OPTIONS <optimization>space : -O2 ; -flags mipspro.link OPTIONS <linkflags> ; -flags mipspro.link LINKPATH <library-path> ; -flags mipspro.link FINDLIBS-ST <find-static-library> ; -flags mipspro.link FINDLIBS-SA <find-shared-library> ; -flags mipspro.link FINDLIBS-SA <threading>multi : pthread ; -flags mipspro.link LIBRARIES <library-file> ; -flags mipspro.link LINK-RUNTIME <runtime-link>static : static ; -flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags mipspro.link RPATH <dll-path> ; - -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -# Declare action for creating static libraries -actions piecemeal archive -{ - ar -cr "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/mpi.jam b/jam-files/boost-build/tools/mpi.jam deleted file mode 100644 index 0fe490be..00000000 --- a/jam-files/boost-build/tools/mpi.jam +++ /dev/null @@ -1,583 +0,0 @@ -# Support for the Message Passing Interface (MPI) -# -# (C) Copyright 2005, 2006 Trustees of Indiana University -# (C) Copyright 2005 Douglas Gregor -# -# Distributed under the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.) -# -# Authors: Douglas Gregor -# Andrew Lumsdaine -# -# ==== MPI Configuration ==== -# -# For many users, MPI support can be enabled simply by adding the following -# line to your user-config.jam file: -# -# using mpi ; -# -# This should auto-detect MPI settings based on the MPI wrapper compiler in -# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or -# has a different name, you can pass the name of the wrapper compiler as the -# first argument to the mpi module: -# -# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ; -# -# If your MPI implementation does not have a wrapper compiler, or the MPI -# auto-detection code does not work with your MPI's wrapper compiler, -# you can pass MPI-related options explicitly via the second parameter to the -# mpi module: -# -# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++ -# <find-shared-library>mpi <find-shared-library>lam -# <find-shared-library>dl ; -# -# To see the results of MPI auto-detection, pass "--debug-configuration" on -# the bjam command line. -# -# The (optional) fourth argument configures Boost.MPI for running -# regression tests. These parameters specify the executable used to -# launch jobs (default: "mpirun") followed by any necessary arguments -# to this to run tests and tell the program to expect the number of -# processors to follow (default: "-np"). With the default parameters, -# for instance, the test harness will execute, e.g., -# -# mpirun -np 4 all_gather_test -# -# ==== Linking Against the MPI Libraries === -# -# To link against the MPI libraries, import the "mpi" module and add the -# following requirement to your target: -# -# <library>/mpi//mpi -# -# Since MPI support is not always available, you should check -# "mpi.configured" before trying to link against the MPI libraries. - -import "class" : new ; -import common ; -import feature : feature ; -import generators ; -import os ; -import project ; -import property ; -import testing ; -import toolset ; -import type ; -import path ; - -# Make this module a project -project.initialize $(__name__) ; -project mpi ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -# Assuming the first part of the command line is the given prefix -# followed by some non-empty value, remove the first argument. Returns -# either nothing (if there was no prefix or no value) or a pair -# -# <name>value rest-of-cmdline -# -# This is a subroutine of cmdline_to_features -rule add_feature ( prefix name cmdline ) -{ - local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; - - # If there was no value associated with the prefix, abort - if ! $(match) { - return ; - } - - local value = $(match[1]) ; - - if [ MATCH " +" : $(value) ] { - value = "\"$(value)\"" ; - } - - return "<$(name)>$(value)" $(match[2]) ; -} - -# Strip any end-of-line characters off the given string and return the -# result. -rule strip-eol ( string ) -{ - local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ; - - if $(match) - { - return $(match[1]) ; - } - else - { - return $(string) ; - } -} - -# Split a command-line into a set of features. Certain kinds of -# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced -# with their Boost.Build equivalents (e.g., <include>, <define>, -# <library-path>, <find-library>). All other arguments are introduced -# using the features in the unknown-features parameter, because we -# don't know how to deal with them. For instance, if your compile and -# correct. The incoming command line should be a string starting with -# an executable (e.g., g++ -I/include/path") and may contain any -# number of command-line arguments thereafter. The result is a list of -# features corresponding to the given command line, ignoring the -# executable. -rule cmdline_to_features ( cmdline : unknown-features ? ) -{ - local executable ; - local features ; - local otherflags ; - local result ; - - unknown-features ?= <cxxflags> <linkflags> ; - - # Pull the executable out of the command line. At this point, the - # executable is just thrown away. - local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; - executable = $(match[1]) ; - cmdline = $(match[2]) ; - - # List the prefix/feature pairs that we will be able to transform. - # Every kind of parameter not mentioned here will be placed in both - # cxxflags and linkflags, because we don't know where they should go. - local feature_kinds-D = "define" ; - local feature_kinds-I = "include" ; - local feature_kinds-L = "library-path" ; - local feature_kinds-l = "find-shared-library" ; - - while $(cmdline) { - - # Check for one of the feature prefixes we know about. If we - # find one (and the associated value is nonempty), convert it - # into a feature. - local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ; - local matched ; - if $(match) && $(match[2]) { - local prefix = $(match[1]) ; - if $(feature_kinds$(prefix)) { - local name = $(feature_kinds$(prefix)) ; - local add = [ add_feature $(prefix) $(name) $(cmdline) ] ; - - if $(add) { - - if $(add[1]) = <find-shared-library>pthread - { - # Uhm. It's not really nice that this MPI implementation - # uses -lpthread as opposed to -pthread. We do want to - # set <threading>multi, instead of -lpthread. - result += "<threading>multi" ; - MPI_EXTRA_REQUIREMENTS += "<threading>multi" ; - } - else - { - result += $(add[1]) ; - } - - cmdline = $(add[2]) ; - matched = yes ; - } - } - } - - # If we haven't matched a feature prefix, just grab the command-line - # argument itself. If we can map this argument to a feature - # (e.g., -pthread -> <threading>multi), then do so; otherwise, - # and add it to the list of "other" flags that we don't - # understand. - if ! $(matched) { - match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; - local value = $(match[1]) ; - cmdline = $(match[2]) ; - - # Check for multithreading support - if $(value) = "-pthread" || $(value) = "-pthreads" - { - result += "<threading>multi" ; - - # DPG: This is a hack intended to work around a BBv2 bug where - # requirements propagated from libraries are not checked for - # conflicts when BBv2 determines which "common" properties to - # apply to a target. In our case, the <threading>single property - # gets propagated from the common properties to Boost.MPI - # targets, even though <threading>multi is in the usage - # requirements of <library>/mpi//mpi. - MPI_EXTRA_REQUIREMENTS += "<threading>multi" ; - } - else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] { - otherflags += $(value) ; - } - } - } - - # If there are other flags that we don't understand, add them to the - # result as both <cxxflags> and <linkflags> - if $(otherflags) { - for unknown in $(unknown-features) - { - result += "$(unknown)$(otherflags:J= )" ; - } - } - - return $(result) ; -} - -# Determine if it is safe to execute the given shell command by trying -# to execute it and determining whether the exit code is zero or -# not. Returns true for an exit code of zero, false otherwise. -local rule safe-shell-command ( cmdline ) -{ - local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ; - return [ MATCH ".*(SSCOK).*" : $(result) ] ; -} - -# Initialize the MPI module. -rule init ( mpicxx ? : options * : mpirun-with-options * ) -{ - if ! $(options) && $(.debug-configuration) - { - ECHO "===============MPI Auto-configuration===============" ; - } - - if ! $(mpicxx) && [ os.on-windows ] - { - # Try to auto-configure to the Microsoft Compute Cluster Pack - local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ; - local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ; - if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ] - { - if $(.debug-configuration) - { - ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ; - } - - # Pick up either the 32-bit or 64-bit library, depending on which address - # model the user has selected. Default to 32-bit. - options = <include>$(cluster_pack_path)/Include - <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64 - <library-path>$(cluster_pack_path)/Lib/i386 - <find-static-library>msmpi - <toolset>msvc:<define>_SECURE_SCL=0 - ; - - # Setup the "mpirun" equivalent (mpiexec) - .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ; - .mpirun_flags = -n ; - } - else if $(.debug-configuration) - { - ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ; - } - } - - if ! $(options) - { - # Try to auto-detect options based on the wrapper compiler - local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ; - - if ! $(mpicxx) && ! $(command) - { - # Try "mpiCC", which is used by MPICH - command = [ common.get-invocation-command mpi : mpiCC ] ; - } - - if ! $(mpicxx) && ! $(command) - { - # Try "mpicxx", which is used by OpenMPI and MPICH2 - command = [ common.get-invocation-command mpi : mpicxx ] ; - } - - local result ; - local compile_flags ; - local link_flags ; - - if ! $(command) - { - # Do nothing: we'll complain later - } - # OpenMPI and newer versions of LAM-MPI have -showme:compile and - # -showme:link. - else if [ safe-shell-command "$(command) -showme:compile" ] && - [ safe-shell-command "$(command) -showme:link" ] - { - if $(.debug-configuration) - { - ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ; - } - - compile_flags = [ SHELL "$(command) -showme:compile" ] ; - link_flags = [ SHELL "$(command) -showme:link" ] ; - - # Prepend COMPILER as the executable name, to match the format of - # other compilation commands. - compile_flags = "COMPILER $(compile_flags)" ; - link_flags = "COMPILER $(link_flags)" ; - } - # Look for LAM-MPI's -showme - else if [ safe-shell-command "$(command) -showme" ] - { - if $(.debug-configuration) - { - ECHO "Found older LAM-MPI wrapper compiler: $(command)" ; - } - - result = [ SHELL "$(command) -showme" ] ; - } - # Look for MPICH - else if [ safe-shell-command "$(command) -show" ] - { - if $(.debug-configuration) - { - ECHO "Found MPICH wrapper compiler: $(command)" ; - } - compile_flags = [ SHELL "$(command) -compile_info" ] ; - link_flags = [ SHELL "$(command) -link_info" ] ; - } - # Sun HPC and Ibm POE - else if [ SHELL "$(command) -v 2>/dev/null" ] - { - compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ; - - local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ; - if $(back) - { - # Sun HPC - if $(.debug-configuration) - { - ECHO "Found Sun MPI wrapper compiler: $(command)" ; - } - - compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ; - compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ; - link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ; - link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ; - link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ; - - # strip out -v from compile options - local front = [ MATCH "(.*)-v" : $(link_flags) ] ; - local back = [ MATCH "-v(.*)" : $(link_flags) ] ; - link_flags = "$(front) $(back)" ; - front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ; - back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ; - link_flags = "$(front) $(back)" ; - } - else - { - # Ibm POE - if $(.debug-configuration) - { - ECHO "Found IBM MPI wrapper compiler: $(command)" ; - } - - # - compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ; - compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ; - local front = [ MATCH "(.*)-v" : $(compile_flags) ] ; - local back = [ MATCH "-v(.*)" : $(compile_flags) ] ; - compile_flags = "$(front) $(back)" ; - front = [ MATCH "(.*)-c" : $(compile_flags) ] ; - back = [ MATCH "-c(.*)" : $(compile_flags) ] ; - compile_flags = "$(front) $(back)" ; - link_flags = $(compile_flags) ; - - # get location of mpif.h from mpxlf - local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ; - f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ; - front = [ MATCH "(.*)-v" : $(f_flags) ] ; - back = [ MATCH "-v(.*)" : $(f_flags) ] ; - f_flags = "$(front) $(back)" ; - f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ; - f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ; - compile_flags = [ strip-eol $(compile_flags) ] ; - compile_flags = "$(compile_flags) $(f_flags)" ; - } - } - - if $(result) || $(compile_flags) && $(link_flags) - { - if $(result) - { - result = [ strip-eol $(result) ] ; - options = [ cmdline_to_features $(result) ] ; - } - else - { - compile_flags = [ strip-eol $(compile_flags) ] ; - link_flags = [ strip-eol $(link_flags) ] ; - - # Separately process compilation and link features, then combine - # them at the end. - local compile_features = [ cmdline_to_features $(compile_flags) - : "<cxxflags>" ] ; - local link_features = [ cmdline_to_features $(link_flags) - : "<linkflags>" ] ; - options = $(compile_features) $(link_features) ; - } - - # If requested, display MPI configuration information. - if $(.debug-configuration) - { - if $(result) - { - ECHO " Wrapper compiler command line: $(result)" ; - } - else - { - local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" - : $(compile_flags) ] ; - ECHO "MPI compilation flags: $(match[2])" ; - local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" - : $(link_flags) ] ; - ECHO "MPI link flags: $(match[2])" ; - } - } - } - else - { - if $(command) - { - ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ; - ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ; - } - else if $(mpicxx) - { - ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ; - } - else - { - ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ; - } - ECHO "You will need to manually configure MPI support." ; - } - - } - - # Find mpirun (or its equivalent) and its flags - if ! $(.mpirun) - { - .mpirun = - [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ; - .mpirun_flags = $(mpirun-with-options[2-]) ; - .mpirun_flags ?= -np ; - } - - if $(.debug-configuration) - { - if $(options) - { - echo "MPI build features: " ; - ECHO $(options) ; - } - - if $(.mpirun) - { - echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ; - } - - ECHO "====================================================" ; - } - - if $(options) - { - .configured = true ; - - # Set up the "mpi" alias - alias mpi : : : : $(options) ; - } -} - -# States whether MPI has bee configured -rule configured ( ) -{ - return $(.configured) ; -} - -# Returs the "extra" requirements needed to build MPI. These requirements are -# part of the /mpi//mpi library target, but they need to be added to anything -# that uses MPI directly to work around bugs in BBv2's propagation of -# requirements. -rule extra-requirements ( ) -{ - return $(MPI_EXTRA_REQUIREMENTS) ; -} - -# Support for testing; borrowed from Python -type.register RUN_MPI_OUTPUT ; -type.register RUN_MPI : : TEST ; - -class mpi-test-generator : generator -{ - import property-set ; - - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - self.composing = true ; - } - - rule run ( project name ? : property-set : sources * : multiple ? ) - { - # Generate an executable from the sources. This is the executable we will run. - local executable = - [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ; - - result = - [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ; - } -} - -# Use mpi-test-generator to generate MPI tests from sources -generators.register - [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ; - -generators.register-standard testing.expect-success - : RUN_MPI_OUTPUT : RUN_MPI ; - -# The number of processes to spawn when executing an MPI test. -feature mpi:processes : : free incidental ; - -# The flag settings on testing.capture-output do not -# apply to mpi.capture output at the moment. -# Redo this explicitly. -toolset.flags mpi.capture-output ARGS <testing.arg> ; -rule capture-output ( target : sources * : properties * ) -{ - # Use the standard capture-output rule to run the tests - testing.capture-output $(target) : $(sources[1]) : $(properties) ; - - # Determine the number of processes we should run on. - local num_processes = [ property.select <mpi:processes> : $(properties) ] ; - num_processes = $(num_processes:G=) ; - - # serialize the MPI tests to avoid overloading systems - JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ; - - # We launch MPI processes using the "mpirun" equivalent specified by the user. - LAUNCHER on $(target) = - [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ; -} - -# Creates a set of test cases to be run through the MPI launcher. The name, sources, -# and requirements are the same as for any other test generator. However, schedule is -# a list of numbers, which indicates how many processes each test run will use. For -# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7 -# 7 processes. The name provided is just the base name: the actual tests will be -# the name followed by a hypen, then the number of processes. -rule mpi-test ( name : sources * : requirements * : schedule * ) -{ - sources ?= $(name).cpp ; - schedule ?= 1 2 3 4 7 8 13 17 ; - - local result ; - for processes in $(schedule) - { - result += [ testing.make-test - run-mpi : $(sources) /boost/mpi//boost_mpi - : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ; - } - return $(result) ; -} diff --git a/jam-files/boost-build/tools/msvc-config.jam b/jam-files/boost-build/tools/msvc-config.jam deleted file mode 100644 index 6c71e3b0..00000000 --- a/jam-files/boost-build/tools/msvc-config.jam +++ /dev/null @@ -1,12 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for VisualStudio toolset. To use, just import this module. - -import toolset : using ; - -ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ; - -using msvc : all ; - diff --git a/jam-files/boost-build/tools/msvc.jam b/jam-files/boost-build/tools/msvc.jam deleted file mode 100644 index e33a66d2..00000000 --- a/jam-files/boost-build/tools/msvc.jam +++ /dev/null @@ -1,1392 +0,0 @@ -# Copyright (c) 2003 David Abrahams. -# Copyright (c) 2005 Vladimir Prus. -# Copyright (c) 2005 Alexey Pakhunov. -# Copyright (c) 2006 Bojan Resnik. -# Copyright (c) 2006 Ilya Sokolov. -# Copyright (c) 2007 Rene Rivera -# Copyright (c) 2008 Jurko Gospodnetic -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -################################################################################ -# -# MSVC Boost Build toolset module. -# -------------------------------- -# -# All toolset versions need to have their location either auto-detected or -# explicitly specified except for the special 'default' version that expects the -# environment to find the needed tools or report an error. -# -################################################################################ - -import "class" : new ; -import common ; -import errors ; -import feature ; -import generators ; -import mc ; -import midl ; -import os ; -import path ; -import pch ; -import property ; -import rc ; -import toolset ; -import type ; - - -type.register MANIFEST : manifest ; -feature.feature embed-manifest : on off : incidental propagated ; - -type.register PDB : pdb ; - -################################################################################ -# -# Public rules. -# -################################################################################ - -# Initialize a specific toolset version configuration. As the result, path to -# compiler and, possible, program names are set up, and will be used when that -# version of compiler is requested. For example, you might have: -# -# using msvc : 6.5 : cl.exe ; -# using msvc : 7.0 : Y:/foo/bar/cl.exe ; -# -# The version parameter may be ommited: -# -# using msvc : : Z:/foo/bar/cl.exe ; -# -# The following keywords have special meanings when specified as versions: -# - all - all detected but not yet used versions will be marked as used -# with their default options. -# - default - this is an equivalent to an empty version. -# -# Depending on a supplied version, detected configurations and presence 'cl.exe' -# in the path different results may be achieved. The following table describes -# the possible scenarios: -# -# Nothing "x.y" -# Passed Nothing "x.y" detected, detected, -# version detected detected cl.exe in path cl.exe in path -# -# default Error Use "x.y" Create "default" Use "x.y" -# all None Use all None Use all -# x.y - Use "x.y" - Use "x.y" -# a.b Error Error Create "a.b" Create "a.b" -# -# "x.y" - refers to a detected version; -# "a.b" - refers to an undetected version. -# -# FIXME: Currently the command parameter and the <compiler> property parameter -# seem to overlap in duties. Remove this duplication. This seems to be related -# to why someone started preparing to replace init with configure rules. -# -rule init ( - # The msvc version being configured. When omitted the tools invoked when no - # explicit version is given will be configured. - version ? - - # The command used to invoke the compiler. If not specified: - # - if version is given, default location for that version will be - # searched - # - # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0 - # and 6.* will be searched - # - # - if compiler is not found in the default locations, PATH will be - # searched. - : command * - - # Options may include: - # - # All options shared by multiple toolset types as handled by the - # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>, - # <fflags> & <linkflags>. - # - # <assembler> - # <compiler> - # <idl-compiler> - # <linker> - # <mc-compiler> - # <resource-compiler> - # Exact tool names to be used by this msvc toolset configuration. - # - # <compiler-filter> - # Command through which to pipe the output of running the compiler. - # For example to pass the output to STLfilt. - # - # <setup> - # Global setup command to invoke before running any of the msvc tools. - # It will be passed additional option parameters depending on the actual - # target platform. - # - # <setup-amd64> - # <setup-i386> - # <setup-ia64> - # Platform specific setup command to invoke before running any of the - # msvc tools used when builing a target for a specific platform, e.g. - # when building a 32 or 64 bit executable. - : options * -) -{ - if $(command) - { - options += <command>$(command) ; - } - configure $(version) : $(options) ; -} - - -# 'configure' is a newer version of 'init'. The parameter 'command' is passed as -# a part of the 'options' list. See the 'init' rule comment for more detailed -# information. -# -rule configure ( version ? : options * ) -{ - switch $(version) - { - case "all" : - if $(options) - { - errors.error "MSVC toolset configuration: options should be" - "empty when '$(version)' is specified." ; - } - - # Configure (i.e. mark as used) all registered versions. - local all-versions = [ $(.versions).all ] ; - if ! $(all-versions) - { - if $(.debug-configuration) - { - ECHO "notice: [msvc-cfg] Asked to configure all registered" - "msvc toolset versions when there are none currently" - "registered." ; - } - } - else - { - for local v in $(all-versions) - { - # Note that there is no need to skip already configured - # versions here as this will request configure-really rule - # to configure the version using default options which will - # in turn cause it to simply do nothing in case the version - # has already been configured. - configure-really $(v) ; - } - } - - case "default" : - configure-really : $(options) ; - - case * : - configure-really $(version) : $(options) ; - } -} - - -# Sets up flag definitions dependent on the compiler version used. -# - 'version' is the version of compiler in N.M format. -# - 'conditions' is the property set to be used as flag conditions. -# - 'toolset' is the toolset for which flag settings are to be defined. -# This makes the rule reusable for other msvc-option-compatible compilers. -# -rule configure-version-specific ( toolset : version : conditions ) -{ - toolset.push-checking-for-flags-module unchecked ; - # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and - # /Zc:wchar_t options that improve C++ standard conformance, but those - # options are off by default. If we are sure that the msvc version is at - # 7.*, add those options explicitly. We can be sure either if user specified - # version 7.* explicitly or if we auto-detected the version ourselves. - if ! [ MATCH ^(6\\.) : $(version) ] - { - toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ; - toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ; - - # Explicitly disable the 'function is deprecated' warning. Some msvc - # versions have a bug, causing them to emit the deprecation warning even - # with /W0. - toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ; - - if [ MATCH ^([78]\\.) : $(version) ] - { - # 64-bit compatibility warning deprecated since 9.0, see - # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx - toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ; - } - } - - # - # Processor-specific optimization. - # - - if [ MATCH ^([67]) : $(version) ] - { - # 8.0 deprecates some of the options. - toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ; - - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i386 : /G3 ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ; - - # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math" - # tests will fail. - toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ; - - # 7.1 and below have single-threaded static RTL. - toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ; - } - else - { - # 8.0 and above adds some more options. - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ; - - # 8.0 and above only has multi-threaded static RTL. - toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ; - toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ; - - # Specify target machine type so the linker will not need to guess. - toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ; - toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : /MACHINE:X86 ; - toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : /MACHINE:IA64 ; - - # Make sure that manifest will be generated even if there is no - # dependencies to put there. - toolset.flags $(toolset).link LINKFLAGS $(conditions)/<embed-manifest>off : /MANIFEST ; - } - toolset.pop-checking-for-flags-module ; -} - - -# Registers this toolset including all of its flags, features & generators. Does -# nothing on repeated calls. -# -rule register-toolset ( ) -{ - if ! msvc in [ feature.values toolset ] - { - register-toolset-really ; - } -} - - -# Declare action for creating static libraries. If library exists, remove it -# before adding files. See -# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale. -if [ os.name ] in NT -{ - # The 'DEL' command would issue a message to stdout if the file does not - # exist, so need a check. - actions archive - { - if exist "$(<[1])" DEL "$(<[1])" - $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" - } -} -else -{ - actions archive - { - $(.RM) "$(<[1])" - $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" - } -} - - -# For the assembler the following options are turned on by default: -# -# -Zp4 align structures to 4 bytes -# -Cp preserve case of user identifiers -# -Cx preserve case in publics, externs -# -actions compile.asm -{ - $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)" -} - - -rule compile.c ( targets + : sources * : properties * ) -{ - C++FLAGS on $(targets[1]) = ; - get-rspline $(targets) : -TC ; - compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - - -rule compile.c.preprocess ( targets + : sources * : properties * ) -{ - C++FLAGS on $(targets[1]) = ; - get-rspline $(targets) : -TC ; - preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - - -rule compile.c.pch ( targets + : sources * : properties * ) -{ - C++FLAGS on $(targets[1]) = ; - get-rspline $(targets[1]) : -TC ; - get-rspline $(targets[2]) : -TC ; - local pch-source = [ on $(<) return $(PCH_SOURCE) ] ; - if $(pch-source) - { - DEPENDS $(<) : $(pch-source) ; - compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ; - } - else - { - compile-c-c++-pch $(targets) : $(sources) ; - } -} - -toolset.flags msvc YLOPTION : "-Yl" ; - -# Action for running the C/C++ compiler without using precompiled headers. -# -# WARNING: Synchronize any changes this in action with intel-win -# -# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database -# -# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty -# -# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++. -# The linker will pull these into the executable's PDB -# -# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive, -# as in this case the compiler must be used to create a single PDB for our library. -# -actions compile-c-c++ bind PDB_NAME -{ - $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER) -} - -actions preprocess-c-c++ bind PDB_NAME -{ - $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)" -} - -rule compile-c-c++ ( targets + : sources * ) -{ - DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ; - DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ; - PDB_NAME on $(<) = $(<:S=.pdb) ; -} - -rule preprocess-c-c++ ( targets + : sources * ) -{ - DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ; - DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ; - PDB_NAME on $(<) = $(<:S=.pdb) ; -} - -# Action for running the C/C++ compiler using precompiled headers. In addition -# to whatever else it needs to compile, this action also adds a temporary source -# .cpp file used to compile the precompiled headers themselves. -# -# The global .escaped-double-quote variable is used to avoid messing up Emacs -# syntax highlighting in the messy N-quoted code below. -actions compile-c-c++-pch -{ - $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER) -} - - -# Action for running the C/C++ compiler using precompiled headers. An already -# built source file for compiling the precompiled headers is expected to be -# given as one of the source parameters. -actions compile-c-c++-pch-s -{ - $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER) -} - - -rule compile.c++ ( targets + : sources * : properties * ) -{ - get-rspline $(targets) : -TP ; - compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - -rule compile.c++.preprocess ( targets + : sources * : properties * ) -{ - get-rspline $(targets) : -TP ; - preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - - -rule compile.c++.pch ( targets + : sources * : properties * ) -{ - get-rspline $(targets[1]) : -TP ; - get-rspline $(targets[2]) : -TP ; - local pch-source = [ on $(<) return $(PCH_SOURCE) ] ; - if $(pch-source) - { - DEPENDS $(<) : $(pch-source) ; - compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ; - } - else - { - compile-c-c++-pch $(targets) : $(sources) ; - } -} - - -# See midl.jam for details. -# -actions compile.idl -{ - $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")" - $(.TOUCH_FILE) "$(<[4]:W)" - $(.TOUCH_FILE) "$(<[5]:W)" -} - - -actions compile.mc -{ - $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)" -} - - -actions compile.rc -{ - $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)" -} - - -rule link ( targets + : sources * : properties * ) -{ - if <embed-manifest>on in $(properties) - { - msvc.manifest $(targets) : $(sources) : $(properties) ; - } -} - -rule link.dll ( targets + : sources * : properties * ) -{ - DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ; - if <embed-manifest>on in $(properties) - { - msvc.manifest.dll $(targets) : $(sources) : $(properties) ; - } -} - -# Incremental linking a DLL causes no end of problems: if the actual exports do -# not change, the import .lib file is never updated. Therefore, the .lib is -# always out-of-date and gets rebuilt every time. I am not sure that incremental -# linking is such a great idea in general, but in this case I am sure we do not -# want it. - -# Windows manifest is a new way to specify dependencies on managed DotNet -# assemblies and Windows native DLLs. The manifests are embedded as resources -# and are useful in any PE target (both DLL and EXE). - -if [ os.name ] in NT -{ - actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE - { - $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" - if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% - } - - actions manifest - { - if exist "$(<[1]).manifest" ( - $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1" - ) - } - - actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE - { - $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" - if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% - } - - actions manifest.dll - { - if exist "$(<[1]).manifest" ( - $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2" - ) - } -} -else -{ - actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE - { - $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" - } - - actions manifest - { - if test -e "$(<[1]).manifest"; then - $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1" - fi - } - - actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE - { - $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" - } - - actions manifest.dll - { - if test -e "$(<[1]).manifest"; then - $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2" - fi - } -} - -# this rule sets up the pdb file that will be used when generating static -# libraries and the debug-store option is database, so that the compiler -# puts all debug info into a single .pdb file named after the library -# -# Poking at source targets this way is probably not clean, but it's the -# easiest approach. -rule archive ( targets + : sources * : properties * ) -{ - PDB_NAME on $(>) = $(<:S=.pdb) ; -} - -################################################################################ -# -# Classes. -# -################################################################################ - -class msvc-pch-generator : pch-generator -{ - import property-set ; - - rule run-pch ( project name ? : property-set : sources * ) - { - # Searching for the header and source file in the sources. - local pch-header ; - local pch-source ; - for local s in $(sources) - { - if [ type.is-derived [ $(s).type ] H ] - { - pch-header = $(s) ; - } - else if - [ type.is-derived [ $(s).type ] CPP ] || - [ type.is-derived [ $(s).type ] C ] - { - pch-source = $(s) ; - } - } - - if ! $(pch-header) - { - errors.user-error "can not build pch without pch-header" ; - } - - # If we do not have the PCH source - that is fine. We will just create a - # temporary .cpp file in the action. - - local generated = [ generator.run $(project) $(name) - : [ property-set.create - # Passing of <pch-source> is a dirty trick, needed because - # non-composing generators with multiple inputs are subtly - # broken. For more detailed information see: - # https://zigzag.cs.msu.su:7813/boost.build/ticket/111 - <pch-source>$(pch-source) - [ $(property-set).raw ] ] - : $(pch-header) ] ; - - local pch-file ; - for local g in $(generated) - { - if [ type.is-derived [ $(g).type ] PCH ] - { - pch-file = $(g) ; - } - } - - return [ property-set.create <pch-header>$(pch-header) - <pch-file>$(pch-file) ] $(generated) ; - } -} - - -################################################################################ -# -# Local rules. -# -################################################################################ - -# Detects versions listed as '.known-versions' by checking registry information, -# environment variables & default paths. Supports both native Windows and -# Cygwin. -# -local rule auto-detect-toolset-versions ( ) -{ - if [ os.name ] in NT CYGWIN - { - # Get installation paths from the registry. - for local i in $(.known-versions) - { - if $(.version-$(i)-reg) - { - local vc-path ; - for local x in "" "Wow6432Node\\" - { - vc-path += [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg) - : "ProductDir" ] ; - } - - if $(vc-path) - { - vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ; - register-configuration $(i) : [ path.native $(vc-path[1]) ] ; - } - } - } - } - - # Check environment and default installation paths. - for local i in $(.known-versions) - { - if ! $(i) in [ $(.versions).all ] - { - register-configuration $(i) : [ default-path $(i) ] ; - } - } -} - - -# Worker rule for toolset version configuration. Takes an explicit version id or -# nothing in case it should configure the default toolset version (the first -# registered one or a new 'default' one in case no toolset versions have been -# registered yet). -# -local rule configure-really ( version ? : options * ) -{ - local v = $(version) ; - - # Decide what the 'default' version is. - if ! $(v) - { - # Take the first registered (i.e. auto-detected) version. - version = [ $(.versions).all ] ; - version = $(version[1]) ; - v = $(version) ; - - # Note: 'version' can still be empty at this point if no versions have - # been auto-detected. - version ?= "default" ; - } - - # Version alias -> real version number. - if $(.version-alias-$(version)) - { - version = $(.version-alias-$(version)) ; - } - - # Check whether the selected configuration is already in use. - if $(version) in [ $(.versions).used ] - { - # Allow multiple 'toolset.using' calls for the same configuration if the - # identical sets of options are used. - if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] ) - { - errors.error "MSVC toolset configuration: Toolset version" - "'$(version)' already configured." ; - } - } - else - { - # Register a new configuration. - $(.versions).register $(version) ; - - # Add user-supplied to auto-detected options. - options = [ $(.versions).get $(version) : options ] $(options) ; - - # Mark the configuration as 'used'. - $(.versions).use $(version) ; - - # Generate conditions and save them. - local conditions = [ common.check-init-parameters msvc : version $(v) ] - ; - - $(.versions).set $(version) : conditions : $(conditions) ; - - local command = [ feature.get-values <command> : $(options) ] ; - - # If version is specified, we try to search first in default paths, and - # only then in PATH. - command = [ common.get-invocation-command msvc : cl.exe : $(command) : - [ default-paths $(version) ] : $(version) ] ; - - common.handle-options msvc : $(conditions) : $(command) : $(options) ; - - if ! $(version) - { - # Even if version is not explicitly specified, try to detect the - # version from the path. - # FIXME: We currently detect both Microsoft Visual Studio 9.0 and - # 9.0express as 9.0 here. - if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ] - { - version = 10.0 ; - } - else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ] - { - version = 9.0 ; - } - else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ] - { - version = 8.0 ; - } - else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ] - { - version = 7.1 ; - } - else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" : - $(command) ] - { - version = 7.1toolkit ; - } - else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ] - { - version = 7.0 ; - } - else - { - version = 6.0 ; - } - } - - # Generate and register setup command. - - local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ; - - local cpu = i386 amd64 ia64 ; - if $(below-8.0) - { - cpu = i386 ; - } - - local setup-amd64 ; - local setup-i386 ; - local setup-ia64 ; - - if $(command) - { - # TODO: Note that if we specify a non-existant toolset version then - # this rule may find and use a corresponding compiler executable - # belonging to an incorrect toolset version. For example, if you - # have only MSVC 7.1 installed, have its executable on the path and - # specify you want Boost Build to use MSVC 9.0, then you want Boost - # Build to report an error but this may cause it to silently use the - # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0 - # toolset version. - command = [ common.get-absolute-tool-path $(command[-1]) ] ; - } - - if $(command) - { - local parent = [ path.make $(command) ] ; - parent = [ path.parent $(parent) ] ; - parent = [ path.native $(parent) ] ; - - # Setup will be used if the command name has been specified. If - # setup is not specified explicitly then a default setup script will - # be used instead. Setup scripts may be global or arhitecture/ - # /platform/cpu specific. Setup options are used only in case of - # global setup scripts. - - # Default setup scripts provided with different VC distributions: - # - # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386 - # builds. It was located in the bin folder for the regular version - # and in the root folder for the free VC 7.1 tools. - # - # Later 8.0 & 9.0 versions introduce separate platform specific - # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium) - # located in or under the bin folder. Most also include a global - # vcvarsall.bat helper script located in the root folder which runs - # one of the aforementioned vcvars*.bat scripts based on the options - # passed to it. So far only the version coming with some PlatformSDK - # distributions does not include this top level script but to - # support those we need to fall back to using the worker scripts - # directly in case the top level script can not be found. - - local global-setup = [ feature.get-values <setup> : $(options) ] ; - global-setup = $(global-setup[1]) ; - if ! $(below-8.0) - { - global-setup ?= [ locate-default-setup $(command) : $(parent) : - vcvarsall.bat ] ; - } - - local default-setup-amd64 = vcvarsx86_amd64.bat ; - local default-setup-i386 = vcvars32.bat ; - local default-setup-ia64 = vcvarsx86_ia64.bat ; - - # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and - # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx - # mention an x86_IPF option, that seems to be a documentation bug - # and x86_ia64 is the correct option. - local default-global-setup-options-amd64 = x86_amd64 ; - local default-global-setup-options-i386 = x86 ; - local default-global-setup-options-ia64 = x86_ia64 ; - - # When using 64-bit Windows, and targeting 64-bit, it is possible to - # use a native 64-bit compiler, selected by the "amd64" & "ia64" - # parameters to vcvarsall.bat. There are two variables we can use -- - # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is - # 'x86' when running 32-bit Windows, no matter which processor is - # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T) - # Windows. - # - if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ] - { - default-global-setup-options-amd64 = amd64 ; - } - # TODO: The same 'native compiler usage' should be implemented for - # the Itanium platform by using the "ia64" parameter. For this - # though we need someone with access to this platform who can find - # out how to correctly detect this case. - else if $(somehow-detect-the-itanium-platform) - { - default-global-setup-options-ia64 = ia64 ; - } - - local setup-prefix = "call " ; - local setup-suffix = " >nul"$(.nl) ; - if ! [ os.name ] in NT - { - setup-prefix = "cmd.exe /S /C call " ; - setup-suffix = " \">nul\" \"&&\" " ; - } - - for local c in $(cpu) - { - local setup-options ; - - setup-$(c) = [ feature.get-values <setup-$(c)> : $(options) ] ; - - if ! $(setup-$(c))-is-not-empty - { - if $(global-setup)-is-not-empty - { - setup-$(c) = $(global-setup) ; - - # If needed we can easily add using configuration flags - # here for overriding which options get passed to the - # global setup command for which target platform: - # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ; - - setup-options ?= $(default-global-setup-options-$(c)) ; - } - else - { - setup-$(c) = [ locate-default-setup $(command) : $(parent) : $(default-setup-$(c)) ] ; - } - } - - # Cygwin to Windows path translation. - setup-$(c) = "\""$(setup-$(c):W)"\"" ; - - # Append setup options to the setup name and add the final setup - # prefix & suffix. - setup-options ?= "" ; - setup-$(c) = $(setup-prefix)$(setup-$(c):J=" ")" "$(setup-options:J=" ")$(setup-suffix) ; - } - } - - # Get tool names (if any) and finish setup. - - compiler = [ feature.get-values <compiler> : $(options) ] ; - compiler ?= cl ; - - linker = [ feature.get-values <linker> : $(options) ] ; - linker ?= link ; - - resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ; - resource-compiler ?= rc ; - - # Turn on some options for i386 assembler - # -coff generate COFF format object file (compatible with cl.exe output) - local default-assembler-amd64 = ml64 ; - local default-assembler-i386 = "ml -coff" ; - local default-assembler-ia64 = ias ; - - assembler = [ feature.get-values <assembler> : $(options) ] ; - - idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ; - idl-compiler ?= midl ; - - mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ; - mc-compiler ?= mc ; - - manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ; - manifest-tool ?= mt ; - - local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ; - - for local c in $(cpu) - { - # Setup script is not required in some configurations. - setup-$(c) ?= "" ; - - local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ; - - if $(.debug-configuration) - { - for local cpu-condition in $(cpu-conditions) - { - ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ; - } - } - - local cpu-assembler = $(assembler) ; - cpu-assembler ?= $(default-assembler-$(c)) ; - - toolset.flags msvc.compile .CC $(cpu-conditions) : $(setup-$(c))$(compiler) /Zm800 -nologo ; - toolset.flags msvc.compile .RC $(cpu-conditions) : $(setup-$(c))$(resource-compiler) ; - toolset.flags msvc.compile .ASM $(cpu-conditions) : $(setup-$(c))$(cpu-assembler) -nologo ; - toolset.flags msvc.link .LD $(cpu-conditions) : $(setup-$(c))$(linker) /NOLOGO /INCREMENTAL:NO ; - toolset.flags msvc.archive .LD $(cpu-conditions) : $(setup-$(c))$(linker) /lib /NOLOGO ; - toolset.flags msvc.compile .IDL $(cpu-conditions) : $(setup-$(c))$(idl-compiler) ; - toolset.flags msvc.compile .MC $(cpu-conditions) : $(setup-$(c))$(mc-compiler) ; - - toolset.flags msvc.link .MT $(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ; - - if $(cc-filter) - { - toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ; - } - } - - # Set version-specific flags. - configure-version-specific msvc : $(version) : $(conditions) ; - } -} - - -# Returns the default installation path for the given version. -# -local rule default-path ( version ) -{ - # Use auto-detected path if possible. - local path = [ feature.get-values <command> : [ $(.versions).get $(version) - : options ] ] ; - - if $(path) - { - path = $(path:D) ; - } - else - { - # Check environment. - if $(.version-$(version)-env) - { - local vc-path = [ os.environ $(.version-$(version)-env) ] ; - if $(vc-path) - { - vc-path = [ path.make $(vc-path) ] ; - vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ; - vc-path = [ path.native $(vc-path) ] ; - - path = $(vc-path) ; - } - } - - # Check default path. - if ! $(path) && $(.version-$(version)-path) - { - path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ; - } - } - - return $(path) ; -} - - -# Returns either the default installation path (if 'version' is not empty) or -# list of all known default paths (if no version is given) -# -local rule default-paths ( version ? ) -{ - local possible-paths ; - - if $(version) - { - possible-paths += [ default-path $(version) ] ; - } - else - { - for local i in $(.known-versions) - { - possible-paths += [ default-path $(i) ] ; - } - } - - return $(possible-paths) ; -} - - -rule get-rspline ( target : lang-opt ) -{ - CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS) - $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES) - $(.nl)\"-I$(INCLUDES:W)\" ] ; -} - -class msvc-linking-generator : linking-generator -{ - # Calls the base version. If necessary, also create a target for the - # manifest file.specifying source's name as the name of the created - # target. As result, the PCH will be named whatever.hpp.gch, and not - # whatever.gch. - rule generated-targets ( sources + : property-set : project name ? ) - { - local result = [ linking-generator.generated-targets $(sources) - : $(property-set) : $(project) $(name) ] ; - - if $(result) - { - local name-main = [ $(result[0]).name ] ; - local action = [ $(result[0]).action ] ; - - if [ $(property-set).get <debug-symbols> ] = "on" - { - # We force exact name on PDB. The reason is tagging -- the tag rule may - # reasonably special case some target types, like SHARED_LIB. The tag rule - # will not catch PDB, and it cannot even easily figure if PDB is paired with - # SHARED_LIB or EXE or something else. Because PDB always get the - # same name as the main target, with .pdb as extension, just force it. - local target = [ class.new file-target $(name-main:S=.pdb) exact : PDB : $(project) : $(action) ] ; - local registered-target = [ virtual-target.register $(target) ] ; - if $(target) != $(registered-target) - { - $(action).replace-targets $(target) : $(registered-target) ; - } - result += $(registered-target) ; - } - - if [ $(property-set).get <embed-manifest> ] = "off" - { - # Manifest is evil target. It has .manifest appened to the name of - # main target, including extension. E.g. a.exe.manifest. We use 'exact' - # name because to achieve this effect. - local target = [ class.new file-target $(name-main).manifest exact : MANIFEST : $(project) : $(action) ] ; - local registered-target = [ virtual-target.register $(target) ] ; - if $(target) != $(registered-target) - { - $(action).replace-targets $(target) : $(registered-target) ; - } - result += $(registered-target) ; - } - } - return $(result) ; - } -} - - - -# Unsafe worker rule for the register-toolset() rule. Must not be called -# multiple times. -# -local rule register-toolset-really ( ) -{ - feature.extend toolset : msvc ; - - # Intel and msvc supposedly have link-compatible objects. - feature.subfeature toolset msvc : vendor : intel : propagated optional ; - - # Inherit MIDL flags. - toolset.inherit-flags msvc : midl ; - - # Inherit MC flags. - toolset.inherit-flags msvc : mc ; - - # Dynamic runtime comes only in MT flavour. - toolset.add-requirements - <toolset>msvc,<runtime-link>shared:<threading>multi ; - - # Declare msvc toolset specific features. - { - feature.feature debug-store : object database : propagated ; - feature.feature pch-source : : dependency free ; - } - - # Declare generators. - { - # TODO: Is it possible to combine these? Make the generators - # non-composing so that they do not convert each source into a separate - # .rsp file. - generators.register [ new msvc-linking-generator - msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ; - generators.register [ new msvc-linking-generator - msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ] ; - - generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ; - generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ; - generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ; - generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ; - generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ; - - # Using 'register-c-compiler' adds the build directory to INCLUDES. - generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ; - generators.override msvc.compile.rc : rc.compile.resource ; - generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ; - - generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ; - generators.override msvc.compile.idl : midl.compile.idl ; - - generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ; - generators.override msvc.compile.mc : mc.compile ; - - # Note: the 'H' source type will catch both '.h' and '.hpp' headers as - # the latter have their HPP type derived from H. The type of compilation - # is determined entirely by the destination type. - generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ; - generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ; - - generators.override msvc.compile.c.pch : pch.default-c-pch-generator ; - generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ; - } - - toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ; - toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ; - toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ; - - # - # Declare flags for compilation. - # - - toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ; - toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ; - - toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ; - toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ; - - toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ; - toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ; - toolset.flags msvc.compile CFLAGS <optimization>off : /Od ; - toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ; - toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ; - toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ; - - toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ; - toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ; - toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ; - toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ; - - toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ; - toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ; - toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ; - toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ; - - # By default 8.0 enables rtti support while prior versions disabled it. We - # simply enable or disable it explicitly so we do not have to depend on this - # default behaviour. - toolset.flags msvc.compile CFLAGS <rtti>on : /GR ; - toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ; - toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ; - toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ; - - toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ; - toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ; - - toolset.flags msvc.compile OPTIONS <cflags> : ; - toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ; - - toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ; - - toolset.flags msvc.compile DEFINES <define> ; - toolset.flags msvc.compile UNDEFS <undef> ; - toolset.flags msvc.compile INCLUDES <include> ; - - # Declare flags for the assembler. - toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ; - - toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ; - - toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ; - toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ; - toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ; - toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ; - - toolset.flags msvc.compile.asm DEFINES <define> ; - - # Declare flags for linking. - { - toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet - toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ; - toolset.flags msvc.link DEF_FILE <def-file> ; - - # The linker disables the default optimizations when using /DEBUG so we - # have to enable them manually for release builds with debug symbols. - toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ; - - toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ; - toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ; - toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ; - toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ; - toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ; - - toolset.flags msvc.link OPTIONS <linkflags> ; - toolset.flags msvc.link LINKPATH <library-path> ; - - toolset.flags msvc.link FINDLIBS_ST <find-static-library> ; - toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ; - toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ; - toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ; - } - - toolset.flags msvc.archive AROPTIONS <archiveflags> ; -} - - -# Locates the requested setup script under the given folder and returns its full -# path or nothing in case the script can not be found. In case multiple scripts -# are found only the first one is returned. -# -# TODO: There used to exist a code comment for the msvc.init rule stating that -# we do not correctly detect the location of the vcvars32.bat setup script for -# the free VC7.1 tools in case user explicitly provides a path. This should be -# tested or simply remove this whole comment in case this toolset version is no -# longer important. -# -local rule locate-default-setup ( command : parent : setup-name ) -{ - local result = [ GLOB $(command) $(parent) : $(setup-name) ] ; - if $(result[1]) - { - return $(result[1]) ; - } -} - - -# Validates given path, registers found configuration and prints debug -# information about it. -# -local rule register-configuration ( version : path ? ) -{ - if $(path) - { - local command = [ GLOB $(path) : cl.exe ] ; - - if $(command) - { - if $(.debug-configuration) - { - ECHO "notice: [msvc-cfg] msvc-$(version) detected, command: '$(command)'" ; - } - - $(.versions).register $(version) ; - $(.versions).set $(version) : options : <command>$(command) ; - } - } -} - - -################################################################################ -# -# Startup code executed when loading this module. -# -################################################################################ - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -# Miscellaneous constants. -.RM = [ common.rm-command ] ; -.nl = " -" ; -.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ; -.escaped-double-quote = "\"" ; -.TOUCH_FILE = [ common.file-touch-command ] ; - -# List of all registered configurations. -.versions = [ new configurations ] ; - -# Supported CPU architectures. -.cpu-arch-i386 = - <architecture>/<address-model> - <architecture>/<address-model>32 - <architecture>x86/<address-model> - <architecture>x86/<address-model>32 ; - -.cpu-arch-amd64 = - <architecture>/<address-model>64 - <architecture>x86/<address-model>64 ; - -.cpu-arch-ia64 = - <architecture>ia64/<address-model> - <architecture>ia64/<address-model>64 ; - - -# Supported CPU types (only Itanium optimization options are supported from -# VC++ 2005 on). See -# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more -# detailed information. -.cpu-type-g5 = i586 pentium pentium-mmx ; -.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6 - k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ; -.cpu-type-em64t = prescott nocona conroe conroe-xe conroe-l allendale mermon - mermon-xe kentsfield kentsfield-xe penryn wolfdale - yorksfield nehalem ; -.cpu-type-amd64 = k8 opteron athlon64 athlon-fx ; -.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp - athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ; -.cpu-type-itanium = itanium itanium1 merced ; -.cpu-type-itanium2 = itanium2 mckinley ; - - -# Known toolset versions, in order of preference. -.known-versions = 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ; - -# Version aliases. -.version-alias-6 = 6.0 ; -.version-alias-6.5 = 6.0 ; -.version-alias-7 = 7.0 ; -.version-alias-8 = 8.0 ; -.version-alias-9 = 9.0 ; -.version-alias-10 = 10.0 ; - -# Names of registry keys containing the Visual C++ installation path (relative -# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft"). -.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ; -.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ; -.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ; -.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ; -.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ; -.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ; -.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ; -.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ; -.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ; - -# Visual C++ Toolkit 2003 does not store its installation path in the registry. -# The environment variable 'VCToolkitInstallDir' and the default installation -# path will be checked instead. -.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ; -.version-7.1toolkit-env = VCToolkitInstallDir ; - -# Path to the folder containing "cl.exe" relative to the value of the -# corresponding environment variable. -.version-7.1toolkit-envpath = "bin" ; - - -# Auto-detect all the available msvc installations on the system. -auto-detect-toolset-versions ; - - -# And finally trigger the actual Boost Build toolset registration. -register-toolset ; diff --git a/jam-files/boost-build/tools/notfile.jam b/jam-files/boost-build/tools/notfile.jam deleted file mode 100644 index 97a5b0e8..00000000 --- a/jam-files/boost-build/tools/notfile.jam +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (c) 2005 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import generators ; -import project ; -import targets ; -import toolset ; -import type ; - - -type.register NOTFILE_MAIN ; - - -class notfile-generator : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * : multiple ? ) - { - local action ; - local action-name = [ $(property-set).get <action> ] ; - - local m = [ MATCH ^@(.*) : $(action-name) ] ; - - if $(m) - { - action = [ new action $(sources) : $(m[1]) - : $(property-set) ] ; - } - else - { - action = [ new action $(sources) : notfile.run - : $(property-set) ] ; - } - return [ virtual-target.register - [ new notfile-target $(name) : $(project) : $(action) ] ] ; - } -} - - -generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ; - - -toolset.flags notfile.run ACTION : <action> ; - - -actions run -{ - $(ACTION) -} - - -rule notfile ( target-name : action + : sources * : requirements * : default-build * ) -{ - local project = [ project.current ] ; - - requirements += <action>$(action) ; - - targets.main-target-alternative - [ new typed-target $(target-name) : $(project) : NOTFILE_MAIN - : [ targets.main-target-sources $(sources) : $(target-name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; -} - -IMPORT $(__name__) : notfile : : notfile ; diff --git a/jam-files/boost-build/tools/notfile.py b/jam-files/boost-build/tools/notfile.py deleted file mode 100644 index afbf68fb..00000000 --- a/jam-files/boost-build/tools/notfile.py +++ /dev/null @@ -1,51 +0,0 @@ -# Status: ported. -# Base revision: 64429. -# -# Copyright (c) 2005-2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - - -import b2.build.type as type -import b2.build.generators as generators -import b2.build.virtual_target as virtual_target -import b2.build.toolset as toolset -import b2.build.targets as targets - -from b2.manager import get_manager -from b2.util import bjam_signature - -type.register("NOTFILE_MAIN") - -class NotfileGenerator(generators.Generator): - - def run(self, project, name, ps, sources): - pass - action_name = ps.get('action')[0] - if action_name[0] == '@': - action = virtual_target.Action(get_manager(), sources, action_name[1:], ps) - else: - action = virtual_target.Action(get_manager(), sources, "notfile.run", ps) - - return [get_manager().virtual_targets().register( - virtual_target.NotFileTarget(name, project, action))] - -generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"])) - -toolset.flags("notfile.run", "ACTION", [], ["<action>"]) - -get_manager().engine().register_action("notfile.run", "$(ACTION)") - -@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"], - ["default_build", "*"])) -def notfile(target_name, action, sources, requirements, default_build): - - requirements.append("<action>" + action) - - return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements, - default_build, []) - - -get_manager().projects().add_rule("notfile", notfile) diff --git a/jam-files/boost-build/tools/package.jam b/jam-files/boost-build/tools/package.jam deleted file mode 100644 index 198c2231..00000000 --- a/jam-files/boost-build/tools/package.jam +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (c) 2005 Vladimir Prus. -# Copyright 2006 Rene Rivera. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Provides mechanism for installing whole packages into a specific directory -# structure. This is opposed to the 'install' rule, that installs a number of -# targets to a single directory, and does not care about directory structure at -# all. - -# Example usage: -# -# package.install boost : <properties> -# : <binaries> -# : <libraries> -# : <headers> -# ; -# -# This will install binaries, libraries and headers to the 'proper' location, -# given by command line options --prefix, --exec-prefix, --bindir, --libdir and -# --includedir. -# -# The rule is just a convenient wrapper, avoiding the need to define several -# 'install' targets. -# -# The only install-related feature is <install-source-root>. It will apply to -# headers only and if present, paths of headers relatively to source root will -# be retained after installing. If it is not specified, then "." is assumed, so -# relative paths in headers are always preserved. - -import "class" : new ; -import option ; -import project ; -import feature ; -import property ; -import stage ; -import targets ; -import modules ; - -feature.feature install-default-prefix : : free incidental ; - -rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * ) -{ - package-name ?= $(name) ; - if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ] - { - # If --prefix is explicitly specified on the command line, - # then we need wipe away any settings of libdir/includir that - # is specified via options in config files. - option.set bindir : ; - option.set libdir : ; - option.set includedir : ; - } - - # If <install-source-root> is not specified, all headers are installed to - # prefix/include, no matter what their relative path is. Sometimes that is - # what is needed. - local install-source-root = [ property.select <install-source-root> : - $(requirements) ] ; - install-source-root = $(install-source-root:G=) ; - requirements = [ property.change $(requirements) : <install-source-root> ] ; - - local install-header-subdir = [ property.select <install-header-subdir> : - $(requirements) ] ; - install-header-subdir = /$(install-header-subdir:G=) ; - install-header-subdir ?= "" ; - requirements = [ property.change $(requirements) : <install-header-subdir> ] - ; - - # First, figure out all locations. Use the default if no prefix option - # given. - local prefix = [ get-prefix $(name) : $(requirements) ] ; - - # Architecture dependent files. - local exec-locate = [ option.get exec-prefix : $(prefix) ] ; - - # Binaries. - local bin-locate = [ option.get bindir : $(prefix)/bin ] ; - - # Object code libraries. - local lib-locate = [ option.get libdir : $(prefix)/lib ] ; - - # Source header files. - local include-locate = [ option.get includedir : $(prefix)/include ] ; - - stage.install $(name)-bin : $(binaries) : $(requirements) - <location>$(bin-locate) ; - alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ; - - # Since the install location of shared libraries differs on universe - # and cygwin, use target alternatives to make different targets. - # We should have used indirection conditioanl requirements, but it's - # awkward to pass bin-locate and lib-locate from there to another rule. - alias $(name)-lib-shared : $(name)-lib-shared-universe ; - alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ; - - # For shared libraries, we install both explicitly specified one and the - # shared libraries that the installed executables depend on. - stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements) - <location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ; - stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements) - <location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ; - - # For static libraries, we do not care about executable dependencies, since - # static libraries are already incorporated into them. - stage.install $(name)-lib-static : $(libraries) : $(requirements) - <location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ; - stage.install $(name)-headers : $(headers) : $(requirements) - <location>$(include-locate)$(install-header-subdir) - <install-source-root>$(install-source-root) ; - alias $(name) : $(name)-bin $(name)-lib $(name)-headers ; - - local c = [ project.current ] ; - local project-module = [ $(c).project-module ] ; - module $(project-module) - { - explicit $(1)-bin $(1)-lib $(1)-headers $(1) $(1)-lib-shared $(1)-lib-static - $(1)-lib-shared-universe $(1)-lib-shared-cygwin ; - } -} - -rule install-data ( target-name : package-name : data * : requirements * ) -{ - package-name ?= target-name ; - if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ] - { - # If --prefix is explicitly specified on the command line, - # then we need wipe away any settings of datarootdir - option.set datarootdir : ; - } - - local prefix = [ get-prefix $(package-name) : $(requirements) ] ; - local datadir = [ option.get datarootdir : $(prefix)/share ] ; - - stage.install $(target-name) - : $(data) - : $(requirements) <location>$(datadir)/$(package-name) - ; - - local c = [ project.current ] ; - local project-module = [ $(c).project-module ] ; - module $(project-module) - { - explicit $(1) ; - } -} - -local rule get-prefix ( package-name : requirements * ) -{ - local prefix = [ option.get prefix : [ property.select - <install-default-prefix> : $(requirements) ] ] ; - prefix = $(prefix:G=) ; - requirements = [ property.change $(requirements) : <install-default-prefix> - ] ; - # Or some likely defaults if neither is given. - if ! $(prefix) - { - if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; } - else if [ modules.peek : UNIX ] { prefix = /usr/local ; } - } - return $(prefix) ; -} - diff --git a/jam-files/boost-build/tools/package.py b/jam-files/boost-build/tools/package.py deleted file mode 100644 index aa081b4f..00000000 --- a/jam-files/boost-build/tools/package.py +++ /dev/null @@ -1,168 +0,0 @@ -# Status: ported -# Base revision: 64488 -# -# Copyright (c) 2005, 2010 Vladimir Prus. -# Copyright 2006 Rene Rivera. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Provides mechanism for installing whole packages into a specific directory -# structure. This is opposed to the 'install' rule, that installs a number of -# targets to a single directory, and does not care about directory structure at -# all. - -# Example usage: -# -# package.install boost : <properties> -# : <binaries> -# : <libraries> -# : <headers> -# ; -# -# This will install binaries, libraries and headers to the 'proper' location, -# given by command line options --prefix, --exec-prefix, --bindir, --libdir and -# --includedir. -# -# The rule is just a convenient wrapper, avoiding the need to define several -# 'install' targets. -# -# The only install-related feature is <install-source-root>. It will apply to -# headers only and if present, paths of headers relatively to source root will -# be retained after installing. If it is not specified, then "." is assumed, so -# relative paths in headers are always preserved. - -import b2.build.feature as feature -import b2.build.property as property -import b2.util.option as option -import b2.tools.stage as stage - -from b2.build.alias import alias - -from b2.manager import get_manager - -from b2.util import bjam_signature -from b2.util.utility import ungrist - - -import os - -feature.feature("install-default-prefix", [], ["free", "incidental"]) - -@bjam_signature((["name", "package_name", "?"], ["requirements", "*"], - ["binaries", "*"], ["libraries", "*"], ["headers", "*"])) -def install(name, package_name=None, requirements=[], binaries=[], libraries=[], headers=[]): - - requirements = requirements[:] - binaries = binaries[:] - libraries - - if not package_name: - package_name = name - - if option.get("prefix"): - # If --prefix is explicitly specified on the command line, - # then we need wipe away any settings of libdir/includir that - # is specified via options in config files. - option.set("bindir", None) - option.set("libdir", None) - option.set("includedir", None) - - # If <install-source-root> is not specified, all headers are installed to - # prefix/include, no matter what their relative path is. Sometimes that is - # what is needed. - install_source_root = property.select('install-source-root', requirements) - if install_source_root: - requirements = property.change(requirements, 'install-source-root', None) - - install_header_subdir = property.select('install-header-subdir', requirements) - if install_header_subdir: - install_header_subdir = ungrist(install_header_subdir[0]) - requirements = property.change(requirements, 'install-header-subdir', None) - - # First, figure out all locations. Use the default if no prefix option - # given. - prefix = get_prefix(name, requirements) - - # Architecture dependent files. - exec_locate = option.get("exec-prefix", prefix) - - # Binaries. - bin_locate = option.get("bindir", os.path.join(prefix, "bin")) - - # Object code libraries. - lib_locate = option.get("libdir", os.path.join(prefix, "lib")) - - # Source header files. - include_locate = option.get("includedir", os.path.join(prefix, "include")) - - stage.install(name + "-bin", binaries, requirements + ["<location>" + bin_locate]) - - alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"]) - - # Since the install location of shared libraries differs on universe - # and cygwin, use target alternatives to make different targets. - # We should have used indirection conditioanl requirements, but it's - # awkward to pass bin-locate and lib-locate from there to another rule. - alias(name + "-lib-shared", [name + "-lib-shared-universe"]) - alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["<target-os>cygwin"]) - - # For shared libraries, we install both explicitly specified one and the - # shared libraries that the installed executables depend on. - stage.install(name + "-lib-shared-universe", binaries + libraries, - requirements + ["<location>" + lib_locate, "<install-dependencies>on", - "<install-type>SHARED_LIB"]) - stage.install(name + "-lib-shared-cygwin", binaries + libraries, - requirements + ["<location>" + bin_locate, "<install-dependencies>on", - "<install-type>SHARED_LIB"]) - - # For static libraries, we do not care about executable dependencies, since - # static libraries are already incorporated into them. - stage.install(name + "-lib-static", libraries, requirements + - ["<location>" + lib_locate, "<install-dependencies>on", "<install-type>STATIC_LIB"]) - stage.install(name + "-headers", headers, requirements \ - + ["<location>" + os.path.join(include_locate, s) for s in install_header_subdir] - + install_source_root) - - alias(name, [name + "-bin", name + "-lib", name + "-headers"]) - - pt = get_manager().projects().current() - - for subname in ["bin", "lib", "headers", "lib-shared", "lib-static", "lib-shared-universe", "lib-shared-cygwin"]: - pt.mark_targets_as_explicit([name + "-" + subname]) - -@bjam_signature((["target_name"], ["package_name"], ["data", "*"], ["requirements", "*"])) -def install_data(target_name, package_name, data, requirements): - if not package_name: - package_name = target_name - - if option.get("prefix"): - # If --prefix is explicitly specified on the command line, - # then we need wipe away any settings of datarootdir - option.set("datarootdir", None) - - prefix = get_prefix(package_name, requirements) - datadir = option.get("datarootdir", os.path.join(prefix, "share")) - - stage.install(target_name, data, - requirements + ["<location>" + os.path.join(datadir, package_name)]) - - get_manager().projects().current().mark_targets_as_explicit([target_name]) - -def get_prefix(package_name, requirements): - - specified = property.select("install-default-prefix", requirements) - if specified: - specified = ungrist(specified[0]) - prefix = option.get("prefix", specified) - requirements = property.change(requirements, "install-default-prefix", None) - # Or some likely defaults if neither is given. - if not prefix: - if os.name == "nt": - prefix = "C:\\" + package_name - elif os.name == "posix": - prefix = "/usr/local" - - return prefix - diff --git a/jam-files/boost-build/tools/pathscale.jam b/jam-files/boost-build/tools/pathscale.jam deleted file mode 100644 index 454e3454..00000000 --- a/jam-files/boost-build/tools/pathscale.jam +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright 2006 Noel Belcourt -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import property ; -import generators ; -import toolset : flags ; -import feature ; -import type ; -import common ; -import fortran ; - -feature.extend toolset : pathscale ; -toolset.inherit pathscale : unix ; -generators.override pathscale.prebuilt : builtin.prebuilt ; -generators.override pathscale.searched-lib-generator : searched-lib-generator ; - -# Documentation and toolchain description located -# http://www.pathscale.com/docs.html - -rule init ( version ? : command * : options * ) -{ - command = [ common.get-invocation-command pathscale : pathCC : $(command) - : /opt/ekopath/bin ] ; - - # Determine the version - local command-string = $(command:J=" ") ; - if $(command) - { - version ?= [ MATCH "^([0-9.]+)" - : [ SHELL "$(command-string) -dumpversion" ] ] ; - } - - local condition = [ common.check-init-parameters pathscale - : version $(version) ] ; - - common.handle-options pathscale : $(condition) : $(command) : $(options) ; - - toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) : - [ feature.get-values <fflags> : $(options) ] : unchecked ; - - command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ; - - toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ; - - # fortran support - local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ; - local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ; - local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ; - - toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ; - toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ; - - # always link lib rt to resolve clock_gettime() - flags pathscale.link FINDLIBS-SA : rt : unchecked ; -} - -# Declare generators -generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ; -generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ; -generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ; -generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ; - -# Declare flags and actions for compilation -flags pathscale.compile OPTIONS <optimization>off : -O0 ; -flags pathscale.compile OPTIONS <optimization>speed : -O3 ; -flags pathscale.compile OPTIONS <optimization>space : -Os ; - -flags pathscale.compile OPTIONS <inlining>off : -noinline ; -flags pathscale.compile OPTIONS <inlining>on : -inline ; -flags pathscale.compile OPTIONS <inlining>full : -inline ; - -flags pathscale.compile OPTIONS <warnings>off : -woffall ; -flags pathscale.compile OPTIONS <warnings>on : -Wall ; -flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ; -flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ; - -flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ; -flags pathscale.compile OPTIONS <profiling>on : -pg ; -flags pathscale.compile OPTIONS <link>shared : -fPIC ; -flags pathscale.compile OPTIONS <address-model>32 : -m32 ; -flags pathscale.compile OPTIONS <address-model>64 : -m64 ; - -flags pathscale.compile USER_OPTIONS <cflags> ; -flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ; -flags pathscale.compile DEFINES <define> ; -flags pathscale.compile INCLUDES <include> ; - -flags pathscale.compile.fortran USER_OPTIONS <fflags> ; -flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ; - -actions compile.c -{ - "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.fortran -{ - "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.fortran90 ( targets * : sources * : properties * ) -{ - # the space rule inserts spaces between targets and it's necessary - SPACE on $(targets) = " " ; - # Serialize execution of the compile.fortran90 action - # F90 source must be compiled in a particular order so we - # serialize the build as a parallel F90 compile might fail - JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ; -} - -actions compile.fortran90 -{ - "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ; -# Strip the binary when no debugging is needed -flags pathscale.link OPTIONS <debug-symbols>off : -g0 ; -flags pathscale.link OPTIONS <profiling>on : -pg ; -flags pathscale.link USER_OPTIONS <linkflags> ; -flags pathscale.link LINKPATH <library-path> ; -flags pathscale.link FINDLIBS-ST <find-static-library> ; -flags pathscale.link FINDLIBS-SA <find-shared-library> ; -flags pathscale.link FINDLIBS-SA <threading>multi : pthread ; -flags pathscale.link LIBRARIES <library-file> ; -flags pathscale.link LINK-RUNTIME <runtime-link>static : static ; -flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags pathscale.link RPATH <dll-path> ; -# On gcc, there are separate options for dll path at runtime and -# link time. On Solaris, there's only one: -R, so we have to use -# it, even though it's bad idea. -flags pathscale.link RPATH <xdll-path> ; - -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -} - -# Declare action for creating static libraries -# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)" -actions piecemeal archive -{ - ar $(ARFLAGS) ru "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/pch.jam b/jam-files/boost-build/tools/pch.jam deleted file mode 100644 index 0c6e98fa..00000000 --- a/jam-files/boost-build/tools/pch.jam +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -##### Using Precompiled Headers (Quick Guide) ##### -# -# Make precompiled mypch.hpp: -# -# import pch ; -# -# cpp-pch mypch -# : # sources -# mypch.hpp -# : # requiremnts -# <toolset>msvc:<source>mypch.cpp -# ; -# -# Add cpp-pch to sources: -# -# exe hello -# : main.cpp hello.cpp mypch -# ; - -import "class" : new ; -import type ; -import feature ; -import generators ; - -type.register PCH : pch ; - -type.register C_PCH : : PCH ; -type.register CPP_PCH : : PCH ; - -# Control precompiled header (PCH) generation. -feature.feature pch : - on - off - : propagated ; - - -feature.feature pch-header : : free dependency ; -feature.feature pch-file : : free dependency ; - -# Base PCH generator. The 'run' method has the logic to prevent this generator -# from being run unless it's being used for a top-level PCH target. -class pch-generator : generator -{ - import property-set ; - - rule action-class ( ) - { - return compile-action ; - } - - rule run ( project name ? : property-set : sources + ) - { - if ! $(name) - { - # Unless this generator is invoked as the top-most generator for a - # main target, fail. This allows using 'H' type as input type for - # this generator, while preventing Boost.Build to try this generator - # when not explicitly asked for. - # - # One bad example is msvc, where pch generator produces both PCH - # target and OBJ target, so if there's any header generated (like by - # bison, or by msidl), we'd try to use pch generator to get OBJ from - # that H, which is completely wrong. By restricting this generator - # only to pch main target, such problem is solved. - } - else - { - local r = [ run-pch $(project) $(name) - : [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ] - : $(sources) ] ; - return [ generators.add-usage-requirements $(r) - : <define>BOOST_BUILD_PCH_ENABLED ] ; - } - } - - # This rule must be overridden by the derived classes. - rule run-pch ( project name ? : property-set : sources + ) - { - } -} - - -# NOTE: requirements are empty, default pch generator can be applied when -# pch=off. -generators.register - [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ; -generators.register - [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ; diff --git a/jam-files/boost-build/tools/pch.py b/jam-files/boost-build/tools/pch.py deleted file mode 100644 index 21d3db09..00000000 --- a/jam-files/boost-build/tools/pch.py +++ /dev/null @@ -1,83 +0,0 @@ -# Status: Being ported by Steven Watanabe -# Base revision: 47077 -# -# Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov -# Copyright (c) 2008 Steven Watanabe -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -##### Using Precompiled Headers (Quick Guide) ##### -# -# Make precompiled mypch.hpp: -# -# import pch ; -# -# cpp-pch mypch -# : # sources -# mypch.hpp -# : # requiremnts -# <toolset>msvc:<source>mypch.cpp -# ; -# -# Add cpp-pch to sources: -# -# exe hello -# : main.cpp hello.cpp mypch -# ; - -from b2.build import type, feature, generators - -type.register('PCH', ['pch']) -type.register('C_PCH', [], 'PCH') -type.register('CPP_PCH', [], 'PCH') - -# Control precompiled header (PCH) generation. -feature.feature('pch', - ['on', 'off'], - ['propagated']) - -feature.feature('pch-header', [], ['free', 'dependency']) -feature.feature('pch-file', [], ['free', 'dependency']) - -class PchGenerator(generators.Generator): - """ - Base PCH generator. The 'run' method has the logic to prevent this generator - from being run unless it's being used for a top-level PCH target. - """ - def action_class(self): - return 'compile-action' - - def run(self, project, name, prop_set, sources): - if not name: - # Unless this generator is invoked as the top-most generator for a - # main target, fail. This allows using 'H' type as input type for - # this generator, while preventing Boost.Build to try this generator - # when not explicitly asked for. - # - # One bad example is msvc, where pch generator produces both PCH - # target and OBJ target, so if there's any header generated (like by - # bison, or by msidl), we'd try to use pch generator to get OBJ from - # that H, which is completely wrong. By restricting this generator - # only to pch main target, such problem is solved. - pass - else: - r = self.run_pch(project, name, - prop_set.add_raw('<define>BOOST_BUILD_PCH_ENABLED'), - sources) - return generators.add_usage_requirements( - r, ['<define>BOOST_BUILD_PCH_ENABLED']) - - # This rule must be overridden by the derived classes. - def run_pch(self, project, name, prop_set, sources): - pass - -#FIXME: dummy-generator in builtins.jam needs to be ported. -# NOTE: requirements are empty, default pch generator can be applied when -# pch=off. -###generators.register( -### [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ; -###generators.register -### [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ; diff --git a/jam-files/boost-build/tools/pgi.jam b/jam-files/boost-build/tools/pgi.jam deleted file mode 100644 index 3a35c644..00000000 --- a/jam-files/boost-build/tools/pgi.jam +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright Noel Belcourt 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature ; -import fortran ; -import type ; -import common ; -import gcc ; - -feature.extend toolset : pgi ; -toolset.inherit pgi : unix ; -generators.override pgi.prebuilt : builtin.lib-generator ; -generators.override pgi.searched-lib-generator : searched-lib-generator ; - -# Documentation and toolchain description located -# http://www.pgroup.com/resources/docs.htm - -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters pgi : version $(version) ] ; - - local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ; - - common.handle-options pgi : $(condition) : $(l_command) : $(options) ; - - command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ; - - toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ; - - flags pgi.compile DEFINES $(condition) : - [ feature.get-values <define> : $(options) ] : unchecked ; - - # IOV_MAX support - flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ; - - # set link flags - flags pgi.link FINDLIBS-ST : [ - feature.get-values <find-static-library> : $(options) ] : unchecked ; - - # always link lib rt to resolve clock_gettime() - flags pgi.link FINDLIBS-SA : rt [ - feature.get-values <find-shared-library> : $(options) ] : unchecked ; - - gcc.init-link-flags pgi gnu $(condition) ; -} - -# Declare generators -generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ; -generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ; -generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ; - -# Declare flags and actions for compilation -flags pgi.compile OPTIONS : -Kieee ; -flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ; -flags pgi.compile OPTIONS <debug-symbols>on : -gopt ; -flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ; -flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ; -flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ; -# flags pgi.compile OPTIONS <threading>multi : -mt ; - -flags pgi.compile OPTIONS <warnings>off : -Minform=severe ; -flags pgi.compile OPTIONS <warnings>on : -Minform=warn ; - -flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ; - -flags pgi.compile OPTIONS <cflags> ; -flags pgi.compile.c++ OPTIONS <cxxflags> ; -flags pgi.compile DEFINES <define> ; -flags pgi.compile INCLUDES <include> ; - -flags pgi.compile.fortran OPTIONS <fflags> ; - -actions compile.c -{ - "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.fortran -{ - "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags pgi.link OPTIONS <debug-symbols>on : -gopt ; -# Strip the binary when no debugging is needed -flags pgi.link OPTIONS <debug-symbols>off : -s ; -flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ; -flags pgi.link OPTIONS <linkflags> ; -flags pgi.link OPTIONS <link>shared : -fpic -fPIC ; -flags pgi.link LINKPATH <library-path> ; -flags pgi.link FINDLIBS-ST <find-static-library> ; -flags pgi.link FINDLIBS-SA <find-shared-library> ; -flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ; -flags pgi.link LIBRARIES <library-file> ; -flags pgi.link LINK-RUNTIME <runtime-link>static : static ; -flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags pgi.link RPATH <dll-path> ; - -# On gcc, there are separate options for dll path at runtime and -# link time. On Solaris, there's only one: -R, so we have to use -# it, even though it's bad idea. -flags pgi.link RPATH <xdll-path> ; - -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line -# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied. -# Yod on redstorm refuses to load an executable that is dynamically linked. -# removing the dynamic link options should get us where we need to be on redstorm. -# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME) -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -actions updated together piecemeal pgi.archive -{ - ar -rc$(ARFLAGS:E=) "$(<)" "$(>)" -} - diff --git a/jam-files/boost-build/tools/python-config.jam b/jam-files/boost-build/tools/python-config.jam deleted file mode 100644 index 40aa825b..00000000 --- a/jam-files/boost-build/tools/python-config.jam +++ /dev/null @@ -1,27 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for Python tools and librries. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ - for local R in 2.4 2.3 2.2 - { - local python-path = [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ; - local python-version = $(R) ; - - if $(python-path) - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ; - } - using python : $(python-version) : $(python-path) ; - } - } -} diff --git a/jam-files/boost-build/tools/python.jam b/jam-files/boost-build/tools/python.jam deleted file mode 100644 index 97a9f9a5..00000000 --- a/jam-files/boost-build/tools/python.jam +++ /dev/null @@ -1,1267 +0,0 @@ -# Copyright 2004 Vladimir Prus. -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Support for Python and the the Boost.Python library. -# -# This module defines -# -# - a project 'python' with a target 'python' in it, that corresponds to the -# python library -# -# - a main target rule 'python-extension' which can be used to build a python -# extension. -# -# Extensions that use Boost.Python must explicitly link to it. - -import type ; -import testing ; -import generators ; -import project ; -import errors ; -import targets ; -import "class" : new ; -import os ; -import common ; -import toolset ; -import regex ; -import numbers ; -import string ; -import property ; -import sequence ; -import path ; -import feature ; -import set ; -import builtin ; -import version ; - - -# Make this module a project. -project.initialize $(__name__) ; -project python ; - -# Save the project so that if 'init' is called several times we define new -# targets in the python project, not in whatever project we were called by. -.project = [ project.current ] ; - -# Dynamic linker lib. Necessary to specify it explicitly on some platforms. -lib dl ; -# This contains 'openpty' function need by python. Again, on some system need to -# pass this to linker explicitly. -lib util ; -# Python uses pthread symbols. -lib pthread ; -# Extra library needed by phtread on some platforms. -lib rt ; - -# The pythonpath feature specifies additional elements for the PYTHONPATH -# environment variable, set by run-pyd. For example, pythonpath can be used to -# access Python modules that are part of the product being built, but are not -# installed in the development system's default paths. -feature.feature pythonpath : : free optional path ; - -# Initializes the Python toolset. Note that all parameters are optional. -# -# - version -- the version of Python to use. Should be in Major.Minor format, -# for example 2.3. Do not include the subminor version. -# -# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter. -# Alternatively, the installation prefix for Python libraries and includes. If -# empty, will be guessed from the version, the platform's installation -# patterns, and the python executables that can be found in PATH. -# -# - includes: the include path to Python headers. If empty, will be guessed. -# -# - libraries: the path to Python library binaries. If empty, will be guessed. -# On MacOS/Darwin, you can also pass the path of the Python framework. -# -# - condition: if specified, should be a set of properties that are matched -# against the build configuration when Boost.Build selects a Python -# configuration to use. -# -# - extension-suffix: A string to append to the name of extension modules before -# the true filename extension. Ordinarily we would just compute this based on -# the value of the <python-debugging> feature. However ubuntu's python-dbg -# package uses the windows convention of appending _d to debug-build extension -# modules. We have no way of detecting ubuntu, or of probing python for the -# "_d" requirement, and if you configure and build python using -# --with-pydebug, you'll be using the standard *nix convention. Defaults to "" -# (or "_d" when targeting windows and <python-debugging> is set). -# -# Example usage: -# -# using python : 2.3 ; -# using python : 2.3 : /usr/local/bin/python ; -# -rule init ( version ? : cmd-or-prefix ? : includes * : libraries ? - : condition * : extension-suffix ? ) -{ - project.push-current $(.project) ; - - debug-message Configuring python... ; - for local v in version cmd-or-prefix includes libraries condition - { - if $($(v)) - { - debug-message " user-specified "$(v): \"$($(v))\" ; - } - } - - configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ; - - project.pop-current ; -} - -# A simpler version of SHELL that grabs stderr as well as stdout, but returns -# nothing if there was an error. -# -local rule shell-cmd ( cmd ) -{ - debug-message running command '$(cmd)" 2>&1"' ; - x = [ SHELL $(cmd)" 2>&1" : exit-status ] ; - if $(x[2]) = 0 - { - return $(x[1]) ; - } - else - { - return ; - } -} - - -# Try to identify Cygwin symlinks. Invoking such a file directly as an NT -# executable from a native Windows build of bjam would be fatal to the bjam -# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove -# that those are not also symlinks. ;-) -# -# If a symlink is found returns non-empty; we try to extract the target of the -# symlink from the file and return that. -# -# Note: 1. only works on NT 2. path is a native path. -local rule is-cygwin-symlink ( path ) -{ - local is-symlink = ; - - # Look for a file with the given path having the S attribute set, as cygwin - # symlinks do. /-C means "do not use thousands separators in file sizes." - local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ; - - if $(dir-listing) - { - # Escape any special regex characters in the base part of the path. - local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ; - - # Extract the file's size from the directory listing. - local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ; - - # If the file has a reasonably small size, look for the special symlink - # identification text. - if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ] - { - local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ; - if $(link[2]) != 0 - { - local nl = " - -" ; - is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ; - if $(is-symlink) - { - is-symlink = [ *nix-path-to-native $(is-symlink) ] ; - is-symlink = $(is-symlink:R=$(path:D)) ; - } - - } - } - } - return $(is-symlink) ; -} - - -# Append ext to each member of names that does not contain '.'. -# -local rule default-extension ( names * : ext * ) -{ - local result ; - for local n in $(names) - { - switch $(n) - { - case *.* : result += $(n) ; - case * : result += $(n)$(ext) ; - } - } - return $(result) ; -} - - -# Tries to determine whether invoking "cmd" would actually attempt to launch a -# cygwin symlink. -# -# Note: only works on NT. -# -local rule invokes-cygwin-symlink ( cmd ) -{ - local dirs = $(cmd:D) ; - if ! $(dirs) - { - dirs = . [ os.executable-path ] ; - } - local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ; - local paths = [ GLOB $(dirs) : $(base) ] ; - if $(paths) - { - # Make sure we have not run into a Cygwin symlink. Invoking such a file - # as an NT executable would be fatal for the bjam process. - return [ is-cygwin-symlink $(paths[1]) ] ; - } -} - - -local rule debug-message ( message * ) -{ - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO notice: [python-cfg] $(message) ; - } -} - - -# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and -# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result -# found. Also accounts for the fact that on 64-bit machines, 32-bit software has -# its own area, under SOFTWARE\Wow6432node. -# -local rule software-registry-value ( path : data ? ) -{ - local result ; - for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE - { - for local x64elt in "" Wow6432node\\ # Account for 64-bit windows - { - if ! $(result) - { - result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ; - } - } - - } - return $(result) ; -} - - -.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ; -.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ; - -.working-directory = [ PWD ] ; -.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ; -.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ; - - -local rule windows-to-cygwin-path ( path ) -{ - # If path is rooted with a drive letter, rewrite it using the /cygdrive - # mountpoint. - local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ; - - # Else if path is rooted without a drive letter, use the working directory. - p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ; - - # Else return the path unchanged. - return $(p:E=$(path:T)) ; -} - - -# :W only works in Cygwin builds of bjam. This one works on NT builds as well. -# -local rule cygwin-to-windows-path ( path ) -{ - path = $(path:R="") ; # strip any trailing slash - - local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ; - if $(drive-letter) - { - path = $(drive-letter) ; - } - else if $(path:R=/x) = $(path) # already rooted? - { - # Look for a cygwin mount that includes each head sequence in $(path). - local head = $(path) ; - local tail = "" ; - - while $(head) - { - local root = [ software-registry-value - "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ; - - if $(root) - { - path = $(tail:R=$(root)) ; - head = ; - } - tail = $(tail:R=$(head:D=)) ; - - if $(head) = / - { - head = ; - } - else - { - head = $(head:D) ; - } - } - } - return [ regex.replace $(path:R="") / \\ ] ; -} - - -# Convert a *nix path to native. -# -local rule *nix-path-to-native ( path ) -{ - if [ os.name ] = NT - { - path = [ cygwin-to-windows-path $(path) ] ; - } - return $(path) ; -} - - -# Convert an NT path to native. -# -local rule windows-path-to-native ( path ) -{ - if [ os.name ] = NT - { - return $(path) ; - } - else - { - return [ windows-to-cygwin-path $(path) ] ; - } -} - - -# Return nonempty if path looks like a windows path, i.e. it starts with a drive -# letter or contains backslashes. -# -local rule guess-windows-path ( path ) -{ - return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ; -} - - -local rule path-to-native ( paths * ) -{ - local result ; - - for local p in $(paths) - { - if [ guess-windows-path $(p) ] - { - result += [ windows-path-to-native $(p) ] ; - } - else - { - result += [ *nix-path-to-native $(p:T) ] ; - } - } - return $(result) ; -} - - -# Validate the version string and extract the major/minor part we care about. -# -local rule split-version ( version ) -{ - local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ; - if ! $(major-minor[2]) || $(major-minor[3]) - { - ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ; - - # Add a zero to account for the missing digit if necessary. - major-minor += 0 ; - } - - return $(major-minor[1]) $(major-minor[2]) ; -} - - -# Build a list of versions from 3.0 down to 1.5. Because bjam can not enumerate -# registry sub-keys, we have no way of finding a version with a 2-digit minor -# version, e.g. 2.10 -- let us hope that never happens. -# -.version-countdown = ; -for local v in [ numbers.range 15 30 ] -{ - .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ; -} - - -local rule windows-installed-pythons ( version ? ) -{ - version ?= $(.version-countdown) ; - local interpreters ; - - for local v in $(version) - { - local install-path = [ - software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ; - - if $(install-path) - { - install-path = [ windows-path-to-native $(install-path) ] ; - debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ; - } - - interpreters += $(:E=python:R=$(install-path)) ; - } - return $(interpreters) ; -} - - -local rule darwin-installed-pythons ( version ? ) -{ - version ?= $(.version-countdown) ; - - local prefix - = [ GLOB /System/Library/Frameworks /Library/Frameworks - : Python.framework ] ; - - return $(prefix)/Versions/$(version)/bin/python ; -} - - -# Assume "python-cmd" invokes a python interpreter and invoke it to extract all -# the information we care about from its "sys" module. Returns void if -# unsuccessful. -# -local rule probe ( python-cmd ) -{ - # Avoid invoking a Cygwin symlink on NT. - local skip-symlink ; - if [ os.name ] = NT - { - skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ; - } - - if $(skip-symlink) - { - debug-message -------------------------------------------------------------------- ; - debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ; - debug-message causing a bjam built for Windows to hang. ; - debug-message ; - debug-message If you intend to target a Cygwin build of Python, please ; - debug-message replace the path to the link with the path to a real executable ; - debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ; - debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ; - debug-message backslashes ; - debug-message -------------------------------------------------------------------- ; - } - else - { - # Prepare a List of Python format strings and expressions that can be - # used to print the constants we want from the sys module. - - # We do not really want sys.version since that is a complicated string, - # so get the information from sys.version_info instead. - local format = "version=%d.%d" ; - local exprs = "version_info[0]" "version_info[1]" ; - - for local s in $(sys-elements[2-]) - { - format += $(s)=%s ; - exprs += $(s) ; - } - - # Invoke Python and ask it for all those values. - if [ version.check-jam-version 3 1 17 ] || ( [ os.name ] != NT ) - { - # Prior to version 3.1.17 Boost Jam's SHELL command did not support - # quoted commands correctly on Windows. This means that on that - # platform we do not support using a Python command interpreter - # executable whose path contains a space character. - python-cmd = \"$(python-cmd)\" ; - } - local full-cmd = - $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ; - - local output = [ shell-cmd $(full-cmd) ] ; - if $(output) - { - # Parse the output to get all the results. - local nl = " - -" ; - for s in $(sys-elements) - { - # These variables are expected to be declared local in the - # caller, so Jam's dynamic scoping will set their values there. - sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ; - } - } - return $(output) ; - } -} - - -# Make sure the "libraries" and "includes" variables (in an enclosing scope) -# have a value based on the information given. -# -local rule compute-default-paths ( target-os : version ? : prefix ? : - exec-prefix ? ) -{ - exec-prefix ?= $(prefix) ; - - if $(target-os) = windows - { - # The exec_prefix is where you're supposed to look for machine-specific - # libraries. - local default-library-path = $(exec-prefix)\\libs ; - local default-include-path = $(:E=Include:R=$(prefix)) ; - - # If the interpreter was found in a directory called "PCBuild" or - # "PCBuild8," assume we're looking at a Python built from the source - # distro, and go up one additional level to the default root. Otherwise, - # the default root is the directory where the interpreter was found. - - # We ask Python itself what the executable path is in case of - # intermediate symlinks or shell scripts. - local executable-dir = $(sys.executable:D) ; - - if [ MATCH ^(PCBuild) : $(executable-dir:D=) ] - { - debug-message "This Python appears to reside in a source distribution;" ; - debug-message "prepending \""$(executable-dir)"\" to default library search path" ; - - default-library-path = $(executable-dir) $(default-library-path) ; - - default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ; - - debug-message "and \""$(default-include-path[1])"\" to default #include path" ; - } - - libraries ?= $(default-library-path) ; - includes ?= $(default-include-path) ; - } - else - { - includes ?= $(prefix)/include/python$(version) ; - - local lib = $(exec-prefix)/lib ; - libraries ?= $(lib)/python$(version)/config $(lib) ; - } -} - -# The version of the python interpreter to use. -feature.feature python : : propagated ; -feature.feature python.interpreter : : free ; - -toolset.flags python.capture-output PYTHON : <python.interpreter> ; - -# -# Support for Python configured --with-pydebug -# -feature.feature python-debugging : off on : propagated ; -builtin.variant debug-python : debug : <python-debugging>on ; - - -# Return a list of candidate commands to try when looking for a Python -# interpreter. prefix is expected to be a native path. -# -local rule candidate-interpreters ( version ? : prefix ? : target-os ) -{ - local bin-path = bin ; - if $(target-os) = windows - { - # On Windows, look in the root directory itself and, to work with the - # result of a build-from-source, the PCBuild directory. - bin-path = PCBuild8 PCBuild "" ; - } - - bin-path = $(bin-path:R=$(prefix)) ; - - if $(target-os) in windows darwin - { - return # Search: - $(:E=python:R=$(bin-path)) # Relative to the prefix, if any - python # In the PATH - [ $(target-os)-installed-pythons $(version) ] # Standard install locations - ; - } - else - { - # Search relative to the prefix, or if none supplied, in PATH. - local unversioned = $(:E=python:R=$(bin-path:E=)) ; - - # If a version was specified, look for a python with that specific - # version appended before looking for one called, simply, "python" - return $(unversioned)$(version) $(unversioned) ; - } -} - - -# Compute system library dependencies for targets linking with static Python -# libraries. -# -# On many systems, Python uses libraries such as pthreads or libdl. Since static -# libraries carry no library dependency information of their own that the linker -# can extract, these extra dependencies have to be given explicitly on the link -# line of the client. The information about these dependencies is packaged into -# the "python" target below. -# -# Even where Python itself uses pthreads, it never allows extension modules to -# be entered concurrently (unless they explicitly give up the interpreter lock). -# Therefore, extension modules do not need the efficiency overhead of threadsafe -# code as produced by <threading>multi, and we handle libpthread along with -# other libraries here. Note: this optimization is based on an assumption that -# the compiler generates link-compatible code in both the single- and -# multi-threaded cases, and that system libraries do not change their ABIs -# either. -# -# Returns a list of usage-requirements that link to the necessary system -# libraries. -# -local rule system-library-dependencies ( target-os ) -{ - switch $(target-os) - { - case s[uo][nl]* : # solaris, sun, sunos - # Add a librt dependency for the gcc toolset on SunOS (the sun - # toolset adds -lrt unconditionally). While this appears to - # duplicate the logic already in gcc.jam, it does not as long as - # we are not forcing <threading>multi. - - # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields - # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem - # to be the right list for extension modules. For example, on my - # installation, adding -ldl causes at least one test to fail because - # the library can not be found and removing it causes no failures. - - # Apparently, though, we need to add -lrt for gcc. - return <toolset>gcc:<library>rt ; - - case osf : return <library>pthread <toolset>gcc:<library>rt ; - - case qnx* : return ; - case darwin : return ; - case windows : return ; - - case hpux : return <library>rt ; - case *bsd : return <library>pthread <toolset>gcc:<library>util ; - - case aix : return <library>pthread <library>dl ; - - case * : return <library>pthread <library>dl - <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ; - } -} - - -# Declare a target to represent Python's library. -# -local rule declare-libpython-target ( version ? : requirements * ) -{ - # Compute the representation of Python version in the name of Python's - # library file. - local lib-version = $(version) ; - if <target-os>windows in $(requirements) - { - local major-minor = [ split-version $(version) ] ; - lib-version = $(major-minor:J="") ; - if <python-debugging>on in $(requirements) - { - lib-version = $(lib-version)_d ; - } - } - - if ! $(lib-version) - { - ECHO *** warning: could not determine Python version, which will ; - ECHO *** warning: probably prevent us from linking with the python ; - ECHO *** warning: library. Consider explicitly passing the version ; - ECHO *** warning: to 'using python'. ; - } - - # Declare it. - lib python.lib : : <name>python$(lib-version) $(requirements) ; -} - - -# Implementation of init. -local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? : - condition * : extension-suffix ? ) -{ - local prefix ; - local exec-prefix ; - local cmds-to-try ; - local interpreter-cmd ; - - local target-os = [ feature.get-values target-os : $(condition) ] ; - target-os ?= [ feature.defaults target-os ] ; - target-os = $(target-os:G=) ; - - if $(target-os) = windows && <python-debugging>on in $(condition) - { - extension-suffix ?= _d ; - } - extension-suffix ?= "" ; - - # Normalize and dissect any version number. - local major-minor ; - if $(version) - { - major-minor = [ split-version $(version) ] ; - version = $(major-minor:J=.) ; - } - - local cmds-to-try ; - - if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ] - { - # If the user did not pass a command, whatever we got was a prefix. - prefix = $(cmd-or-prefix) ; - cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ; - } - else - { - # Work with the command the user gave us. - cmds-to-try = $(cmd-or-prefix) ; - - # On Windows, do not nail down the interpreter command just yet in case - # the user specified something that turns out to be a cygwin symlink, - # which could bring down bjam if we invoke it. - if $(target-os) != windows - { - interpreter-cmd = $(cmd-or-prefix) ; - } - } - - # Values to use in case we can not really find anything in the system. - local fallback-cmd = $(cmds-to-try[1]) ; - local fallback-version ; - - # Anything left to find or check? - if ! ( $(interpreter-cmd) && $(includes) && $(libraries) ) - { - # Values to be extracted from python's sys module. These will be set by - # the probe rule, above, using Jam's dynamic scoping. - local sys-elements = version platform prefix exec_prefix executable ; - local sys.$(sys-elements) ; - - # Compute the string Python's sys.platform needs to match. If not - # targeting Windows or cygwin we will assume only native builds can - # possibly run, so we will not require a match and we leave sys.platform - # blank. - local platform ; - switch $(target-os) - { - case windows : platform = win32 ; - case cygwin : platform = cygwin ; - } - - while $(cmds-to-try) - { - # Pop top command. - local cmd = $(cmds-to-try[1]) ; - cmds-to-try = $(cmds-to-try[2-]) ; - - debug-message Checking interpreter command \"$(cmd)\"... ; - if [ probe $(cmd) ] - { - fallback-version ?= $(sys.version) ; - - # Check for version/platform validity. - for local x in version platform - { - if $($(x)) && $($(x)) != $(sys.$(x)) - { - debug-message ...$(x) "mismatch (looking for" - $($(x)) but found $(sys.$(x))")" ; - cmd = ; - } - } - - if $(cmd) - { - debug-message ...requested configuration matched! ; - - exec-prefix = $(sys.exec_prefix) ; - - compute-default-paths $(target-os) : $(sys.version) : - $(sys.prefix) : $(sys.exec_prefix) ; - - version = $(sys.version) ; - interpreter-cmd ?= $(cmd) ; - cmds-to-try = ; # All done. - } - } - else - { - debug-message ...does not invoke a working interpreter ; - } - } - } - - # Anything left to compute? - if $(includes) && $(libraries) - { - .configured = true ; - } - else - { - version ?= $(fallback-version) ; - version ?= 2.5 ; - exec-prefix ?= $(prefix) ; - compute-default-paths $(target-os) : $(version) : $(prefix:E=) ; - } - - if ! $(interpreter-cmd) - { - fallback-cmd ?= python ; - debug-message No working Python interpreter found. ; - if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ] - { - interpreter-cmd = $(fallback-cmd) ; - debug-message falling back to \"$(interpreter-cmd)\" ; - } - } - - includes = [ path-to-native $(includes) ] ; - libraries = [ path-to-native $(libraries) ] ; - - debug-message "Details of this Python configuration:" ; - debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ; - debug-message " include path:" \"$(includes:E=<empty>)\" ; - debug-message " library path:" \"$(libraries:E=<empty>)\" ; - if $(target-os) = windows - { - debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ; - } - - # - # End autoconfiguration sequence. - # - local target-requirements = $(condition) ; - - # Add the version, if any, to the target requirements. - if $(version) - { - if ! $(version) in [ feature.values python ] - { - feature.extend python : $(version) ; - } - target-requirements += <python>$(version:E=default) ; - } - - target-requirements += <target-os>$(target-os) ; - - # See if we can find a framework directory on darwin. - local framework-directory ; - if $(target-os) = darwin - { - # Search upward for the framework directory. - local framework-directory = $(libraries[-1]) ; - while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework - { - framework-directory = $(framework-directory:D) ; - } - - if $(framework-directory:D=) = Python.framework - { - debug-message framework directory is \"$(framework-directory)\" ; - } - else - { - debug-message "no framework directory found; using library path" ; - framework-directory = ; - } - } - - local dll-path = $(libraries) ; - - # Make sure that we can find the Python DLL on Windows. - if ( $(target-os) = windows ) && $(exec-prefix) - { - dll-path += $(exec-prefix) ; - } - - # - # Prepare usage requirements. - # - local usage-requirements = [ system-library-dependencies $(target-os) ] ; - usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ; - if <python-debugging>on in $(condition) - { - if $(target-os) = windows - { - # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define - # Py_DEBUG we will get multiple definition warnings. - usage-requirements += <define>_DEBUG ; - } - else - { - usage-requirements += <define>Py_DEBUG ; - } - } - - # Global, but conditional, requirements to give access to the interpreter - # for general utilities, like other toolsets, that run Python scripts. - toolset.add-requirements - $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ; - - # Register the right suffix for extensions. - register-extension-suffix $(extension-suffix) : $(target-requirements) ; - - # - # Declare the "python" target. This should really be called - # python_for_embedding. - # - - if $(framework-directory) - { - alias python - : - : $(target-requirements) - : - : $(usage-requirements) <framework>$(framework-directory) - ; - } - else - { - declare-libpython-target $(version) : $(target-requirements) ; - - # This is an evil hack. On, Windows, when Python is embedded, nothing - # seems to set up sys.path to include Python's standard library - # (http://article.gmane.org/gmane.comp.python.general/544986). The evil - # here, aside from the workaround necessitated by Python's bug, is that: - # - # a. we're guessing the location of the python standard library from the - # location of pythonXX.lib - # - # b. we're hijacking the <testing.launcher> property to get the - # environment variable set up, and the user may want to use it for - # something else (e.g. launch the debugger). - local set-PYTHONPATH ; - if $(target-os) = windows - { - set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH : - $(libraries:D)/Lib ] ; - } - - alias python - : - : $(target-requirements) - : - # Why python.lib must be listed here instead of along with the - # system libs is a mystery, but if we do not do it, on cygwin, - # -lpythonX.Y never appears in the command line (although it does on - # linux). - : $(usage-requirements) - <testing.launcher>$(set-PYTHONPATH) - <library-path>$(libraries) <library>python.lib - ; - } - - # On *nix, we do not want to link either Boost.Python or Python extensions - # to libpython, because the Python interpreter itself provides all those - # symbols. If we linked to libpython, we would get duplicate symbols. So - # declare two targets -- one for building extensions and another for - # embedding. - # - # Unlike most *nix systems, Mac OS X's linker does not permit undefined - # symbols when linking a shared library. So, we still need to link against - # the Python framework, even when building extensions. Note that framework - # builds of Python always use shared libraries, so we do not need to worry - # about duplicate Python symbols. - if $(target-os) in windows cygwin darwin - { - alias python_for_extensions : python : $(target-requirements) ; - } - # On AIX we need Python extensions and Boost.Python to import symbols from - # the Python interpreter. Dynamic libraries opened with dlopen() do not - # inherit the symbols from the Python interpreter. - else if $(target-os) = aix - { - alias python_for_extensions - : - : $(target-requirements) - : - : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp - ; - } - else - { - alias python_for_extensions - : - : $(target-requirements) - : - : $(usage-requirements) - ; - } -} - - -rule configured ( ) -{ - return $(.configured) ; -} - - -type.register PYTHON_EXTENSION : : SHARED_LIB ; - - -local rule register-extension-suffix ( root : condition * ) -{ - local suffix ; - - switch [ feature.get-values target-os : $(condition) ] - { - case windows : suffix = pyd ; - case cygwin : suffix = dll ; - case hpux : - { - if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4 - { - suffix = sl ; - } - else - { - suffix = so ; - } - } - case * : suffix = so ; - } - - type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ; -} - - -# Unset 'lib' prefix for PYTHON_EXTENSION -type.set-generated-target-prefix PYTHON_EXTENSION : : "" ; - - -rule python-extension ( name : sources * : requirements * : default-build * : - usage-requirements * ) -{ - if [ configured ] - { - requirements += <use>/python//python_for_extensions ; - } - requirements += <suppress-import-lib>true ; - - local project = [ project.current ] ; - - targets.main-target-alternative - [ new typed-target $(name) : $(project) : PYTHON_EXTENSION - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; -} - -IMPORT python : python-extension : : python-extension ; - -rule py2to3 -{ - common.copy $(>) $(<) ; - 2to3 $(<) ; -} - -actions 2to3 -{ - 2to3 -wn "$(<)" - 2to3 -dwn "$(<)" -} - - -# Support for testing. -type.register PY : py ; -type.register RUN_PYD_OUTPUT ; -type.register RUN_PYD : : TEST ; - - -class python-test-generator : generator -{ - import set ; - - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - self.composing = true ; - } - - rule run ( project name ? : property-set : sources * : multiple ? ) - { - local pyversion = [ $(property-set).get <python> ] ; - local python ; - local other-pythons ; - - # Make new target that converting Python source by 2to3 when running with Python 3. - local rule make-2to3-source ( source ) - { - if $(pyversion) >= 3.0 - { - local a = [ new action $(source) : python.py2to3 : $(property-set) ] ; - local t = [ utility.basename [ $(s).name ] ] ; - local p = [ new file-target $(t) : PY : $(project) : $(a) ] ; - return $(p) ; - } - else - { - return $(source) ; - } - } - - for local s in $(sources) - { - if [ $(s).type ] = PY - { - if ! $(python) - { - # First Python source ends up on command line. - python = [ make-2to3-source $(s) ] ; - - } - else - { - # Other Python sources become dependencies. - other-pythons += [ make-2to3-source $(s) ] ; - } - } - } - - local extensions ; - for local s in $(sources) - { - if [ $(s).type ] = PYTHON_EXTENSION - { - extensions += $(s) ; - } - } - - local libs ; - for local s in $(sources) - { - if [ type.is-derived [ $(s).type ] LIB ] - && ! $(s) in $(extensions) - { - libs += $(s) ; - } - } - - local new-sources ; - for local s in $(sources) - { - if [ type.is-derived [ $(s).type ] CPP ] - { - local name = [ utility.basename [ $(s).name ] ] ; - if $(name) = [ utility.basename [ $(python).name ] ] - { - name = $(name)_ext ; - } - local extension = [ generators.construct $(project) $(name) : - PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ; - - # The important part of usage requirements returned from - # PYTHON_EXTENSION generator are xdll-path properties that will - # allow us to find the python extension at runtime. - property-set = [ $(property-set).add $(extension[1]) ] ; - - # Ignore usage requirements. We're a top-level generator and - # nobody is going to use what we generate. - new-sources += $(extension[2-]) ; - } - } - - property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ; - - result = [ construct-result $(python) $(extensions) $(new-sources) : - $(project) $(name) : $(property-set) ] ; - } -} - - -generators.register - [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ; - -generators.register-standard testing.expect-success - : RUN_PYD_OUTPUT : RUN_PYD ; - - -# There are two different ways of spelling OS names. One is used for [ os.name ] -# and the other is used for the <host-os> and <target-os> properties. Until that -# is remedied, this sets up a crude mapping from the latter to the former, that -# will work *for the purposes of cygwin/NT cross-builds only*. Could not think -# of a better name than "translate". -# -.translate-os-windows = NT ; -.translate-os-cygwin = CYGWIN ; -local rule translate-os ( src-os ) -{ - local x = $(.translate-os-$(src-os)) [ os.name ] ; - return $(x[1]) ; -} - - -# Extract the path to a single ".pyd" source. This is used to build the -# PYTHONPATH for running bpl tests. -# -local rule pyd-pythonpath ( source ) -{ - return [ on $(source) return $(LOCATE) $(SEARCH) ] ; -} - - -# The flag settings on testing.capture-output do not apply to python.capture -# output at the moment. Redo this explicitly. -toolset.flags python.capture-output ARGS <testing.arg> ; - - -rule capture-output ( target : sources * : properties * ) -{ - # Setup up a proper DLL search path. Here, $(sources[1]) is a python module - # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to - # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not - # consulted. Move it over explicitly. - RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ; - - PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ; - PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ; - - # After test is run, we remove the Python module, but not the Python script. - testing.capture-output $(target) : $(sources[1]) : $(properties) : - $(sources[2-]) ; - - # PYTHONPATH is different; it will be interpreted by whichever Python is - # invoked and so must follow path rules for the target os. The only OSes - # where we can run python for other OSes currently are NT and CYGWIN so we - # only need to handle those cases. - local target-os = [ feature.get-values target-os : $(properties) ] ; - # Oddly, host-os is not in properties, so grab the default value. - local host-os = [ feature.defaults host-os ] ; - host-os = $(host-os:G=) ; - if $(target-os) != $(host-os) - { - PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path : - $(PYTHONPATH) ] ; - } - local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ; - local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH : - $(PYTHONPATH:J=$(path-separator)) ] ; - LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ; -} - - -rule bpl-test ( name : sources * : requirements * ) -{ - local s ; - sources ?= $(name).py $(name).cpp ; - return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python - : $(requirements) : $(name) ] ; -} - - -IMPORT $(__name__) : bpl-test : : bpl-test ; diff --git a/jam-files/boost-build/tools/qcc.jam b/jam-files/boost-build/tools/qcc.jam deleted file mode 100644 index 4f2a4fc1..00000000 --- a/jam-files/boost-build/tools/qcc.jam +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright (c) 2001 David Abrahams. -# Copyright (c) 2002-2003 Rene Rivera. -# Copyright (c) 2002-2003 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import common ; -import errors ; -import feature ; -import generators ; -import os ; -import property ; -import set ; -import toolset ; -import type ; -import unix ; - -feature.extend toolset : qcc ; - -toolset.inherit-generators qcc : unix : unix.link unix.link.dll ; -generators.override builtin.lib-generator : qcc.prebuilt ; -toolset.inherit-flags qcc : unix ; -toolset.inherit-rules qcc : unix ; - -# Initializes the qcc toolset for the given version. If necessary, command may -# be used to specify where the compiler is located. The parameter 'options' is a -# space-delimited list of options, each one being specified as -# <option-name>option-value. Valid option names are: cxxflags, linkflags and -# linker-type. Accepted values for linker-type are gnu and sun, gnu being the -# default. -# -# Example: -# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ; -# -rule init ( version ? : command * : options * ) -{ - local condition = [ common.check-init-parameters qcc : version $(version) ] ; - local command = [ common.get-invocation-command qcc : QCC : $(command) ] ; - common.handle-options qcc : $(condition) : $(command) : $(options) ; -} - - -generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ; -generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ; -generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ; - - -# Declare flags for compilation. -toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ; - -# Declare flags and action for compilation. -toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ; -toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags qcc.compile OPTIONS <optimization>space : -Os ; - -toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ; -toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ; -toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ; - -toolset.flags qcc.compile OPTIONS <warnings>off : -w ; -toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ; -toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ; - -toolset.flags qcc.compile OPTIONS <profiling>on : -p ; - -toolset.flags qcc.compile OPTIONS <cflags> ; -toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ; -toolset.flags qcc.compile DEFINES <define> ; -toolset.flags qcc.compile INCLUDES <include> ; - -toolset.flags qcc.compile OPTIONS <link>shared : -shared ; - -toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; - - -rule compile.c++ -{ - # Here we want to raise the template-depth parameter value to something - # higher than the default value of 17. Note that we could do this using the - # feature.set-default rule but we do not want to set the default value for - # all toolsets as well. - # - # TODO: This 'modified default' has been inherited from some 'older Boost - # Build implementation' and has most likely been added to make some Boost - # library parts compile correctly. We should see what exactly prompted this - # and whether we can get around the problem more locally. - local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ; - if ! $(template-depth) - { - TEMPLATE_DEPTH on $(1) = 128 ; - } -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.asm -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - - -# The class checking that we do not try to use the <runtime-link>static property -# while creating or using a shared library, since it is not supported by qcc/ -# /libc. -# -class qcc-linking-generator : unix-linking-generator -{ - rule generated-targets ( sources + : property-set : project name ? ) - { - if <runtime-link>static in [ $(property-set).raw ] - { - local m ; - if [ id ] = "qcc.link.dll" - { - m = "on qcc, DLL can't be build with <runtime-link>static" ; - } - if ! $(m) - { - for local s in $(sources) - { - local type = [ $(s).type ] ; - if $(type) && [ type.is-derived $(type) SHARED_LIB ] - { - m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ; - } - } - } - if $(m) - { - errors.user-error $(m) : "It is suggested to use" - "<runtime-link>static together with <link>static." ; - } - } - - return [ unix-linking-generator.generated-targets - $(sources) : $(property-set) : $(project) $(name) ] ; - } -} - -generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE - : <toolset>qcc ] ; - -generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ - : SHARED_LIB : <toolset>qcc ] ; - -generators.override qcc.prebuilt : builtin.prebuilt ; -generators.override qcc.searched-lib-generator : searched-lib-generator ; - - -# Declare flags for linking. -# First, the common flags. -toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ; -toolset.flags qcc.link OPTIONS <profiling>on : -p ; -toolset.flags qcc.link OPTIONS <linkflags> ; -toolset.flags qcc.link LINKPATH <library-path> ; -toolset.flags qcc.link FINDLIBS-ST <find-static-library> ; -toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ; -toolset.flags qcc.link LIBRARIES <library-file> ; - -toolset.flags qcc.link FINDLIBS-SA : m ; - -# For <runtime-link>static we made sure there are no dynamic libraries in the -# link. -toolset.flags qcc.link OPTIONS <runtime-link>static : -static ; - -# Assuming this is just like with gcc. -toolset.flags qcc.link RPATH : <dll-path> : unchecked ; -toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ; - - -# Declare actions for linking. -# -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; - # Serialize execution of the 'link' action, since running N links in - # parallel is just slower. For now, serialize only qcc links while it might - # be a good idea to serialize all links. - JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) -} - - -# Always remove archive and start again. Here is the rationale from Andre Hentz: -# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c, -# updated my Jamfiles and rebuilt. My program was crashing with absurd errors. -# After some debugging I traced it back to the fact that a1.o was *still* in -# liba.a -RM = [ common.rm-command ] ; -if [ os.name ] = NT -{ - RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ; -} - - -# Declare action for creating static libraries. The 'r' letter means to add -# files to the archive with replacement. Since we remove the archive, we do not -# care about replacement, but there is no option to "add without replacement". -# The 'c' letter suppresses warnings in case the archive does not exists yet. -# That warning is produced only on some platforms, for whatever reasons. -# -actions piecemeal archive -{ - $(RM) "$(<)" - ar rc "$(<)" "$(>)" -} - - -rule link.dll ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; - JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ; -} - - -# Differ from 'link' above only by -shared. -# -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) -} diff --git a/jam-files/boost-build/tools/qt.jam b/jam-files/boost-build/tools/qt.jam deleted file mode 100644 index 8aa7ca26..00000000 --- a/jam-files/boost-build/tools/qt.jam +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2006 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Forwarning toolset file to Qt GUI library. Forwards to the toolset file -# for the current version of Qt. - -import qt4 ; - -rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * ) -{ - qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ; -} - - diff --git a/jam-files/boost-build/tools/qt3.jam b/jam-files/boost-build/tools/qt3.jam deleted file mode 100644 index f82cf0ac..00000000 --- a/jam-files/boost-build/tools/qt3.jam +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Support for the Qt GUI library version 3 -# (http://www.trolltech.com/products/qt3/index.html). -# For new developments, it is recommended to use Qt4 via the qt4 Boost.Build -# module. - -import modules ; -import feature ; -import errors ; -import type ; -import "class" : new ; -import generators ; -import project ; -import toolset : flags ; - -# Convert this module into a project, so that we can declare targets here. -project.initialize $(__name__) ; -project qt3 ; - - -# Initialized the QT support module. The 'prefix' parameter tells where QT is -# installed. When not given, environmental variable QTDIR should be set. -# -rule init ( prefix ? ) -{ - if ! $(prefix) - { - prefix = [ modules.peek : QTDIR ] ; - if ! $(prefix) - { - errors.error - "QT installation prefix not given and QTDIR variable is empty" ; - } - } - - if $(.initialized) - { - if $(prefix) != $(.prefix) - { - errors.error - "Attempt the reinitialize QT with different installation prefix" ; - } - } - else - { - .initialized = true ; - .prefix = $(prefix) ; - - generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ; - # Note: the OBJ target type here is fake, take a look at - # qt4.jam/uic-h-generator for explanations that apply in this case as - # well. - generators.register [ new moc-h-generator-qt3 - qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ; - - # The UI type is defined in types/qt.jam, and UIC_H is only used in - # qt.jam, but not in qt4.jam, so define it here. - type.register UIC_H : : H ; - - generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ; - - # The following generator is used to convert UI files to CPP. It creates - # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also - # returns UIC_H target, so that it can be mocced. - class qt::uic-cpp-generator : generator - { - rule __init__ ( ) - { - generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ; - } - - rule run ( project name ? : properties * : sources + ) - { - # Consider this: - # obj test : test_a.cpp : <optimization>off ; - # - # This generator will somehow be called in this case, and, - # will fail -- which is okay. However, if there are <library> - # properties they will be converted to sources, so the size of - # 'sources' will be more than 1. In this case, the base generator - # will just crash -- and that's not good. Just use a quick test - # here. - - local result ; - if ! $(sources[2]) - { - # Construct CPP as usual - result = [ generator.run $(project) $(name) - : $(properties) : $(sources) ] ; - - # If OK, process UIC_H with moc. It's pretty clear that - # the object generated with UIC will have Q_OBJECT macro. - if $(result) - { - local action = [ $(result[1]).action ] ; - local sources = [ $(action).sources ] ; - local mocced = [ generators.construct $(project) $(name) - : CPP : $(properties) : $(sources[2]) ] ; - result += $(mocced[2-]) ; - } - } - - return $(result) ; - } - } - - generators.register [ new qt::uic-cpp-generator ] ; - - # Finally, declare prebuilt target for QT library. - local usage-requirements = - <include>$(.prefix)/include - <dll-path>$(.prefix)/lib - <library-path>$(.prefix)/lib - <allow>qt3 - ; - lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ; - lib qt : : <name>qt <threading>single : : $(usage-requirements) ; - } -} - -class moc-h-generator-qt3 : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP - { - name = [ $(sources[1]).name ] ; - name = $(name:B) ; - - local a = [ new action $(sources[1]) : qt3.moc.cpp : - $(property-set) ] ; - - local target = [ - new file-target $(name) : MOC : $(project) : $(a) ] ; - - local r = [ virtual-target.register $(target) ] ; - - # Since this generator will return a H target, the linking generator - # won't use it at all, and won't set any dependency on it. However, - # we need the target to be seen by bjam, so that the dependency from - # sources to this generated header is detected -- if Jam does not - # know about this target, it won't do anything. - DEPENDS all : [ $(r).actualize ] ; - - return $(r) ; - } - } -} - - -# Query the installation directory. This is needed in at least two scenarios. -# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt -# plugins to the Qt-Tree. -# -rule directory -{ - return $(.prefix) ; -} - -# -f forces moc to include the processed source file. Without it, it would think -# that .qpp is not a header and would not include it from the generated file. -# -actions moc -{ - $(.prefix)/bin/moc -f $(>) -o $(<) -} - -# When moccing .cpp files, we don't need -f, otherwise generated code will -# include .cpp and we'll get duplicated symbols. -# -actions moc.cpp -{ - $(.prefix)/bin/moc $(>) -o $(<) -} - - -space = " " ; - -# Sometimes it's required to make 'plugins' available during uic invocation. To -# help with this we add paths to all dependency libraries to uic commane line. -# The intention is that it's possible to write -# -# exe a : ... a.ui ... : <uses>some_plugin ; -# -# and have everything work. We'd add quite a bunch of unrelated paths but it -# won't hurt. -# -flags qt3.uic-h LIBRARY_PATH <xdll-path> ; -actions uic-h -{ - $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH) -} - - -flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ; -# The second target is uic-generated header name. It's placed in build dir, but -# we want to include it using only basename. -actions uic-cpp -{ - $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH) -} diff --git a/jam-files/boost-build/tools/qt4.jam b/jam-files/boost-build/tools/qt4.jam deleted file mode 100644 index 71d1b762..00000000 --- a/jam-files/boost-build/tools/qt4.jam +++ /dev/null @@ -1,724 +0,0 @@ -# Copyright 2002-2006 Vladimir Prus -# Copyright 2005 Alo Sarv -# Copyright 2005-2009 Juergen Hunold -# -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Qt4 library support module -# -# The module attempts to auto-detect QT installation location from QTDIR -# environment variable; failing that, installation location can be passed as -# argument: -# -# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ; -# -# The module supports code generation from .ui and .qrc files, as well as -# running the moc preprocessor on headers. Note that you must list all your -# moc-able headers in sources. -# -# Example: -# -# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc -# /qt4//QtGui /qt4//QtNetwork ; -# -# It's also possible to run moc on cpp sources: -# -# import cast ; -# -# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ; -# -# When moccing source file myapp.cpp you need to include "myapp.moc" from -# myapp.cpp. When moccing .h files, the output of moc will be automatically -# compiled and linked in, you don't need any includes. -# -# This is consistent with Qt guidelines: -# http://doc.trolltech.com/4.0/moc.html - -import modules ; -import feature ; -import errors ; -import type ; -import "class" : new ; -import generators ; -import project ; -import toolset : flags ; -import os ; -import virtual-target ; -import scanner ; - -# Qt3Support control feature -# -# Qt4 configure defaults to build Qt4 libraries with Qt3Support. -# The autodetection is missing, so we default to disable Qt3Support. -# This prevents the user from inadvertedly using a deprecated API. -# -# The Qt3Support library can be activated by adding -# "<qt3support>on" to requirements -# -# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS" -# to get warnings about deprecated Qt3 support funtions and classes. -# Files ported by the "qt3to4" conversion tool contain _tons_ of -# warnings, so this define is not set as default. -# -# Todo: Detect Qt3Support from Qt's configure data. -# Or add more auto-configuration (like python). -feature.feature qt3support : off on : propagated link-incompatible ; - -# The Qt version used for requirements -# Valid are <qt>4.4 or <qt>4.5.0 -# Auto-detection via qmake sets '<qt>major.minor.patch' -feature.feature qt : : propagated ; - -project.initialize $(__name__) ; -project qt ; - -# Save the project so that we tolerate 'import + using' combo. -.project = [ project.current ] ; - -# Helper utils for easy debug output -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = TRUE ; -} - -local rule debug-message ( message * ) -{ - if $(.debug-configuration) = TRUE - { - ECHO notice: [qt4-cfg] $(message) ; - } -} - -# Capture qmake output line by line -local rule read-output ( content ) -{ - local lines ; - local nl = " -" ; - local << = "([^$(nl)]*)[$(nl)](.*)" ; - local line+ = [ MATCH "$(<<)" : "$(content)" ] ; - while $(line+) - { - lines += $(line+[1]) ; - line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ; - } - return $(lines) ; -} - -# Capture Qt version from qmake -local rule check-version ( bin_prefix ) -{ - full-cmd = $(bin_prefix)"/qmake -v" ; - debug-message Running '$(full-cmd)' ; - local output = [ SHELL $(full-cmd) ] ; - for line in [ read-output $(output) ] - { - # Parse the output to get all the results. - if [ MATCH "QMake" : $(line) ] - { - # Skip first line of output - } - else - { - temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ; - } - } - return $(temp) ; -} - -# Validate the version string and extract the major/minor part we care about. -# -local rule split-version ( version ) -{ - local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ; - if ! $(major-minor[2]) || $(major-minor[3]) - { - ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ; - - # Add a zero to account for the missing digit if necessary. - major-minor += 0 ; - } - - return $(major-minor[1]) $(major-minor[2]) ; -} - -# Initialize the QT support module. -# Parameters: -# - 'prefix' parameter tells where Qt is installed. -# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc) -# - 'full_inc' optional full path to Qt top-level include directory -# - 'full_lib' optional full path to Qt library directory -# - 'version' optional version of Qt, else autodetected via 'qmake -v' -# - 'condition' optional requirements -rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * ) -{ - project.push-current $(.project) ; - - debug-message "==== Configuring Qt ... ====" ; - for local v in version cmd-or-prefix includes libraries condition - { - if $($(v)) - { - debug-message " user-specified "$(v): '$($(v))' ; - } - } - - # Needed as default value - .prefix = $(prefix) ; - - # pre-build paths to detect reinitializations changes - local inc_prefix lib_prefix bin_prefix ; - if $(full_inc) - { - inc_prefix = $(full_inc) ; - } - else - { - inc_prefix = $(prefix)/include ; - } - if $(full_lib) - { - lib_prefix = $(full_lib) ; - } - else - { - lib_prefix = $(prefix)/lib ; - } - if $(full_bin) - { - bin_prefix = $(full_bin) ; - } - else - { - bin_prefix = $(prefix)/bin ; - } - - # Globally needed variables - .incprefix = $(inc_prefix) ; - .libprefix = $(lib_prefix) ; - .binprefix = $(bin_prefix) ; - - if ! $(.initialized) - { - # Make sure this is initialised only once - .initialized = true ; - - # Generates cpp files from header files using "moc" tool - generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ; - - # The OBJ result type is a fake, 'H' will be really produced. See - # comments on the generator class, defined below the 'init' function. - generators.register [ new uic-generator qt4.uic : UI : OBJ : - <allow>qt4 ] ; - - # The OBJ result type is a fake here too. - generators.register [ new moc-h-generator - qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ; - - generators.register [ new moc-inc-generator - qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ; - - # Generates .cpp files from .qrc files. - generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ; - - # dependency scanner for wrapped files. - type.set-scanner QRC : qrc-scanner ; - - # Save value of first occuring prefix - .PREFIX = $(prefix) ; - } - - if $(version) - { - major-minor = [ split-version $(version) ] ; - version = $(major-minor:J=.) ; - } - else - { - version = [ check-version $(bin_prefix) ] ; - if $(version) - { - version = $(version:J=.) ; - } - debug-message Detected version '$(version)' ; - } - - local target-requirements = $(condition) ; - - # Add the version, if any, to the target requirements. - if $(version) - { - if ! $(version) in [ feature.values qt ] - { - feature.extend qt : $(version) ; - } - target-requirements += <qt>$(version:E=default) ; - } - - local target-os = [ feature.get-values target-os : $(condition) ] ; - if ! $(target-os) - { - target-os ?= [ feature.defaults target-os ] ; - target-os = $(target-os:G=) ; - target-requirements += <target-os>$(target-os) ; - } - - # Build exact requirements for the tools - local tools-requirements = $(target-requirements:J=/) ; - - debug-message "Details of this Qt configuration:" ; - debug-message " prefix: " '$(prefix:E=<empty>)' ; - debug-message " binary path: " '$(bin_prefix:E=<empty>)' ; - debug-message " include path:" '$(inc_prefix:E=<empty>)' ; - debug-message " library path:" '$(lib_prefix:E=<empty>)' ; - debug-message " target requirements:" '$(target-requirements)' ; - debug-message " tool requirements: " '$(tools-requirements)' ; - - # setup the paths for the tools - toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; - toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; - toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ; - - # TODO: 2009-02-12: Better support for directories - # Most likely needed are separate getters for: include,libraries,binaries and sources. - toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ; - - # Test for a buildable Qt. - if [ glob $(.prefix)/Jamroot ] - { - .bjam-qt = true - - # this will declare QtCore (and qtmain on <target-os>windows) - add-shared-library QtCore ; - } - else - # Setup common pre-built Qt. - # Special setup for QtCore on which everything depends - { - local usage-requirements = - <include>$(.incprefix) - <library-path>$(.libprefix) - <dll-path>$(.libprefix) - <threading>multi - <allow>qt4 ; - - local suffix ; - - # Since Qt-4.2, debug versions on unix have to be built - # separately and therefore have no suffix. - .suffix_version = "" ; - .suffix_debug = "" ; - - # Control flag for auto-configuration of the debug libraries. - # This setup requires Qt 'configure -debug-and-release'. - # Only available on some platforms. - # ToDo: 2009-02-12: Maybe throw this away and - # require separate setup with <variant>debug as condition. - .have_separate_debug = FALSE ; - - # Setup other platforms - if $(target-os) in windows cygwin - { - .have_separate_debug = TRUE ; - - # On NT, the libs have "4" suffix, and "d" suffix in debug builds. - .suffix_version = "4" ; - .suffix_debug = "d" ; - - # On Windows we must link against the qtmain library - lib qtmain - : # sources - : # requirements - <name>qtmain$(.suffix_debug) - <variant>debug - $(target-requirements) - ; - - lib qtmain - : # sources - : # requirements - <name>qtmain - $(target-requirements) - ; - } - else if $(target-os) = darwin - { - # On MacOS X, both debug and release libraries are available. - .suffix_debug = "_debug" ; - - .have_separate_debug = TRUE ; - - alias qtmain ; - } - else - { - alias qtmain : : $(target-requirements) ; - } - - lib QtCore : qtmain - : # requirements - <name>QtCore$(.suffix_version) - $(target-requirements) - : # default-build - : # usage-requirements - <define>QT_CORE_LIB - <define>QT_NO_DEBUG - <include>$(.incprefix)/QtCore - $(usage-requirements) - ; - - if $(.have_separate_debug) = TRUE - { - debug-message Configure debug libraries with suffix '$(.suffix_debug)' ; - - lib QtCore : $(main) - : # requirements - <name>QtCore$(.suffix_debug)$(.suffix_version) - <variant>debug - $(target-requirements) - : # default-build - : # usage-requirements - <define>QT_CORE_LIB - <include>$(.incprefix)/QtCore - $(usage-requirements) - ; - } - } - - # Initialising the remaining libraries is canonical - # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include' - # 'include' only for non-canonical include paths. - add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ; - add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ; - add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ; - add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ; - - add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql - : QT_QT3SUPPORT_LIB QT3_SUPPORT - : <qt3support>on $(target-requirements) ; - - # Dummy target to enable "<qt3support>off" and - # "<library>/qt//Qt3Support" at the same time. This enables quick - # switching from one to the other for test/porting purposes. - alias Qt3Support : : <qt3support>off $(target-requirements) ; - - # OpenGl Support - add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ; - - # SVG-Support (Qt 4.1) - add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ; - - # Test-Support (Qt 4.1) - add-shared-library QtTest : QtCore : : $(target-requirements) ; - - # Qt designer library - add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ; - add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ; - - # Support for dynamic Widgets (Qt 4.1) - add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ; - - # DBus-Support (Qt 4.2) - add-shared-library QtDBus : QtXml : : $(target-requirements) ; - - # Script-Engine (Qt 4.3) - add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ; - - # Tools for the Script-Engine (Qt 4.5) - add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ; - - # WebKit (Qt 4.4) - add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ; - - # Phonon Multimedia (Qt 4.4) - add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ; - - # Multimedia engine (Qt 4.6) - add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ; - - # XmlPatterns-Engine (Qt 4.4) - add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ; - - # Help-Engine (Qt 4.4) - add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ; - add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ; - - # QML-Engine (Qt 4.7) - add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ; - - # AssistantClient Support - # Compat library removed in 4.7.0 - # Pre-4.4 help system, use QtHelp for new programs - if $(version) < "4.7" - { - add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ; - } - debug-message "==== Configured Qt-$(version) ====" ; - - project.pop-current ; -} - -rule initialized ( ) -{ - return $(.initialized) ; -} - - - -# This custom generator is needed because in QT4, UI files are translated only -# into H files, and no C++ files are created. Further, the H files need not be -# passed via MOC. The header is used only via inclusion. If we define a standard -# UI -> H generator, Boost.Build will run MOC on H, and then compile the -# resulting cpp. It will give a warning, since output from moc will be empty. -# -# This generator is declared with a UI -> OBJ signature, so it gets invoked when -# linking generator tries to convert sources to OBJ, but it produces target of -# type H. This is non-standard, but allowed. That header won't be mocced. -# -class uic-generator : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - if ! $(name) - { - name = [ $(sources[0]).name ] ; - name = $(name:B) ; - } - - local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ; - - # The 'ui_' prefix is to match qmake's default behavior. - local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ; - - local r = [ virtual-target.register $(target) ] ; - - # Since this generator will return a H target, the linking generator - # won't use it at all, and won't set any dependency on it. However, we - # need the target to be seen by bjam, so that dependency from sources to - # this generated header is detected -- if jam does not know about this - # target, it won't do anything. - DEPENDS all : [ $(r).actualize ] ; - - return $(r) ; - } -} - - -class moc-h-generator : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP - { - name = [ $(sources[0]).name ] ; - name = $(name:B) ; - - local a = [ new action $(sources[1]) : qt4.moc.inc : - $(property-set) ] ; - - local target = [ new file-target $(name) : MOC : $(project) : $(a) - ] ; - - local r = [ virtual-target.register $(target) ] ; - - # Since this generator will return a H target, the linking generator - # won't use it at all, and won't set any dependency on it. However, - # we need the target to be seen by bjam, so that dependency from - # sources to this generated header is detected -- if jam does not - # know about this target, it won't do anything. - DEPENDS all : [ $(r).actualize ] ; - - return $(r) ; - } - } -} - - -class moc-inc-generator : generator -{ - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H - { - name = [ $(sources[0]).name ] ; - name = $(name:B) ; - - local a = [ new action $(sources[1]) : qt4.moc.inc : - $(property-set) ] ; - - local target = [ new file-target moc_$(name) : CPP : $(project) : - $(a) ] ; - - # Since this generator will return a H target, the linking generator - # won't use it at all, and won't set any dependency on it. However, - # we need the target to be seen by bjam, so that dependency from - # sources to this generated header is detected -- if jam does not - # know about this target, it won't do anything. - DEPENDS all : [ $(target).actualize ] ; - - return [ virtual-target.register $(target) ] ; - } - } -} - - -# Query the installation directory. This is needed in at least two scenarios. -# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt -# plugins to the Qt-Tree. -# -rule directory -{ - return $(.PREFIX) ; -} - -# Add a shared Qt library. -rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) -{ - add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; -} - -# Add a static Qt library. -rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) -{ - add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; -} - -# Add a Qt library. -# Static libs are unversioned, whereas shared libs have the major number as suffix. -# Creates both release and debug versions on platforms where both are enabled by Qt configure. -# Flags: -# - lib-name Qt library Name -# - version Qt major number used as shared library suffix (QtCore4.so) -# - depends-on other Qt libraries -# - usage-defines those are set by qmake, so set them when using this library -# - requirements addional requirements -# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name). -rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? ) -{ - if $(.bjam-qt) - { - # Import Qt module - # Eveything will be setup there - alias $(lib-name) - : $(.prefix)//$(lib-name) - : - : - : <allow>qt4 ; - } - else - { - local real_include ; - real_include ?= $(include) ; - real_include ?= $(lib-name) ; - - lib $(lib-name) - : # sources - $(depends-on) - : # requirements - <name>$(lib-name)$(version) - $(requirements) - : # default-build - : # usage-requirements - <define>$(usage-defines) - <include>$(.incprefix)/$(real_include) - ; - - if $(.have_separate_debug) = TRUE - { - lib $(lib-name) - : # sources - $(depends-on) - : # requirements - <name>$(lib-name)$(.suffix_debug)$(version) - $(requirements) - <variant>debug - : # default-build - : # usage-requirements - <define>$(usage-defines) - <include>$(.incprefix)/$(real_include) - ; - } - } - - # Make library explicit so that a simple <use>qt4 will not bring in everything. - # And some components like QtDBus/Phonon may not be available on all platforms. - explicit $(lib-name) ; -} - -# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match. -# The exact match is the last one. - -# Get <include> and <defines> from current toolset. -flags qt4.moc INCLUDES <include> ; -flags qt4.moc DEFINES <define> ; - -# need a newline for expansion of DEFINES and INCLUDES in the response file. -.nl = " -" ; - -# Processes headers to create Qt MetaObject information. Qt4-moc has its -# c++-parser, so pass INCLUDES and DEFINES. -# We use response file with one INCLUDE/DEFINE per line -# -actions moc -{ - $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" -} - -# When moccing files for include only, we don't need -f, otherwise the generated -# code will include the .cpp and we'll get duplicated symbols. -# -actions moc.inc -{ - $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" -} - - -# Generates source files from resource files. -# -actions rcc -{ - $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<) -} - - -# Generates user-interface source from .ui files. -# -actions uic -{ - $(.BINPREFIX[-1])/uic $(>) -o $(<) -} - - -# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore -# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for -# detailed documentation of the Qt Resource System. -# -class qrc-scanner : common-scanner -{ - rule pattern ( ) - { - return "<file.*>(.*)</file>" ; - } -} - - -# Wrapped files are "included". -scanner.register qrc-scanner : include ; diff --git a/jam-files/boost-build/tools/quickbook-config.jam b/jam-files/boost-build/tools/quickbook-config.jam deleted file mode 100644 index e983a78a..00000000 --- a/jam-files/boost-build/tools/quickbook-config.jam +++ /dev/null @@ -1,44 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for BoostBook tools. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ - local boost-dir = ; - for local R in snapshot cvs 1.33.0 - { - boost-dir += [ W32_GETREG - "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)" - : "InstallRoot" ] ; - } - local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ; - quickbook-path = $(quickbook-path[1]) ; - - if $(quickbook-path) - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using quickbook ":" $(quickbook-path) ; - } - using quickbook : $(quickbook-path) ; - } -} -else -{ - local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ; - quickbook-path = $(quickbook-path[1]) ; - - if $(quickbook-path) - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using quickbook ":" $(quickbook-path) ; - } - using quickbook : $(quickbook-path) ; - } -} diff --git a/jam-files/boost-build/tools/quickbook.jam b/jam-files/boost-build/tools/quickbook.jam deleted file mode 100644 index 6de2d42f..00000000 --- a/jam-files/boost-build/tools/quickbook.jam +++ /dev/null @@ -1,361 +0,0 @@ -# -# Copyright (c) 2005 João Abecasis -# Copyright (c) 2005 Vladimir Prus -# Copyright (c) 2006 Rene Rivera -# -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) -# - -# This toolset defines a generator to translate QuickBook to BoostBook. It can -# be used to generate nice (!) user documentation in different formats -# (pdf/html/...), from a single text file with simple markup. -# -# The toolset defines the QUICKBOOK type (file extension 'qbk') and -# a QUICKBOOK to XML (BOOSTBOOK) generator. -# -# -# =========================================================================== -# Q & A -# =========================================================================== -# -# If you don't know what this is all about, some Q & A will hopefully get you -# up to speed with QuickBook and this toolset. -# -# -# What is QuickBook ? -# -# QuickBook is a WikiWiki style documentation tool geared towards C++ -# documentation using simple rules and markup for simple formatting tasks. -# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook -# documents are simple text files. A single QuickBook document can -# generate a fully linked set of nice HTML and PostScript/PDF documents -# complete with images and syntax-colorized source code. -# -# -# Where can I get QuickBook ? -# -# Quickbook can be found in Boost's repository, under the tools/quickbook -# directory it was added there on Jan 2005, some time after the release of -# Boost v1.32.0 and has been an integral part of the Boost distribution -# since v1.33. -# -# Here's a link to the SVN repository: -# https://svn.boost.org/svn/boost/trunk/tools/quickbook -# -# And to QuickBook's QuickBook-generated docs: -# http://www.boost.org/doc/libs/release/tools/quickbook/index.html -# -# -# How do I use QuickBook and this toolset in my projects ? -# -# The minimal example is: -# -# using boostbook ; -# import quickbook ; -# -# boostbook my_docs : my_docs_source.qbk ; -# -# where my_docs is a target name and my_docs_source.qbk is a QuickBook -# file. The documentation format to be generated is determined by the -# boostbook toolset. By default html documentation should be generated, -# but you should check BoostBook's docs to be sure. -# -# -# What do I need ? -# -# You should start by setting up the BoostBook toolset. Please refer to -# boostbook.jam and the BoostBook documentation for information on how to -# do this. -# -# A QuickBook executable is also needed. The toolset will generate this -# executable if it can find the QuickBook sources. The following -# directories will be searched: -# -# BOOST_ROOT/tools/quickbook/ -# BOOST_BUILD_PATH/../../quickbook/ -# -# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables) -# -# If QuickBook sources are not found the toolset will then try to use -# the shell command 'quickbook'. -# -# -# How do I provide a custom QuickBook executable ? -# -# You may put the following in your user-config.jam or site-config.jam: -# -# using quickbook : /path/to/quickbook ; -# -# or, if 'quickbook' can be found in your PATH, -# -# using quickbook : quickbook ; -# -# -# For convenience three alternatives are tried to get a QuickBook executable: -# -# 1. If the user points us to the a QuickBook executable, that is used. -# -# 2. Otherwise, we search for the QuickBook sources and compile QuickBook -# using the default toolset. -# -# 3. As a last resort, we rely on the shell for finding 'quickbook'. -# - -import boostbook ; -import "class" : new ; -import feature ; -import generators ; -import toolset ; -import type ; -import scanner ; -import project ; -import targets ; -import build-system ; -import path ; -import common ; -import errors ; - -# The one and only QUICKBOOK type! -type.register QUICKBOOK : qbk ; - -# <quickbook-binary> shell command to run QuickBook -# <quickbook-binary-dependencies> targets to build QuickBook from sources. -feature.feature <quickbook-binary> : : free ; -feature.feature <quickbook-binary-dependencies> : : free dependency ; -feature.feature <quickbook-define> : : free ; -feature.feature <quickbook-indent> : : free ; -feature.feature <quickbook-line-width> : : free ; - - -# quickbook-binary-generator handles generation of the QuickBook executable, by -# marking it as a dependency for QuickBook docs. -# -# If the user supplied the QuickBook command that will be used. -# -# Otherwise we search some sensible places for the QuickBook sources and compile -# from scratch using the default toolset. -# -# As a last resort we rely on the shell to find 'quickbook'. -# -class quickbook-binary-generator : generator -{ - import modules path targets quickbook ; - - rule run ( project name ? : property-set : sources * : multiple ? ) - { - quickbook.freeze-config ; - # QuickBook invocation command and dependencies. - local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ; - local quickbook-binary-dependencies ; - - if ! $(quickbook-binary) - { - # If the QuickBook source directory was found, mark its main target - # as a dependency for the current project. Otherwise, try to find - # 'quickbook' in user's PATH - local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ; - if $(quickbook-dir) - { - # Get the main-target in QuickBook directory. - local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ; - - # The first element are actual targets, the second are - # properties found in target-id. We do not care about these - # since we have passed the id ourselves. - quickbook-main-target = - [ $(quickbook-main-target[1]).main-target quickbook ] ; - - quickbook-binary-dependencies = - [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ; - - # Ignore usage-requirements returned as first element. - quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ; - - # Some toolsets generate extra targets (e.g. RSP). We must mark - # all targets as dependencies for the project, but we will only - # use the EXE target for quickbook-to-boostbook translation. - for local target in $(quickbook-binary-dependencies) - { - if [ $(target).type ] = EXE - { - quickbook-binary = - [ path.native - [ path.join - [ $(target).path ] - [ $(target).name ] - ] - ] ; - } - } - } - } - - # Add $(quickbook-binary-dependencies) as a dependency of the current - # project and set it as the <quickbook-binary> feature for the - # quickbook-to-boostbook rule, below. - property-set = [ $(property-set).add-raw - <dependency>$(quickbook-binary-dependencies) - <quickbook-binary>$(quickbook-binary) - <quickbook-binary-dependencies>$(quickbook-binary-dependencies) - ] ; - - return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ; - } -} - - -# Define a scanner for tracking QBK include dependencies. -# -class qbk-scanner : common-scanner -{ - rule pattern ( ) - { - return "\\[[ ]*include[ ]+([^]]+)\\]" - "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]" - "\\[[ ]*import[ ]+([^]]+)\\]" ; - } -} - - -scanner.register qbk-scanner : include ; - -type.set-scanner QUICKBOOK : qbk-scanner ; - - -# Initialization of toolset. -# -# Parameters: -# command ? -> path to QuickBook executable. -# -# When command is not supplied toolset will search for QuickBook directory and -# compile the executable from source. If that fails we still search the path for -# 'quickbook'. -# -rule init ( - command ? # path to the QuickBook executable. - ) -{ - if $(command) - { - if $(.config-frozen) - { - errors.user-error "quickbook: configuration cannot be changed after it has been used." ; - } - .command = $(command) ; - } -} - -rule freeze-config ( ) -{ - if ! $(.config-frozen) - { - .config-frozen = true ; - - # QuickBook invocation command and dependencies. - - .quickbook-binary = $(.command) ; - - if $(.quickbook-binary) - { - # Use user-supplied command. - .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ; - } - else - { - # Search for QuickBook sources in sensible places, like - # $(BOOST_ROOT)/tools/quickbook - # $(BOOST_BUILD_PATH)/../../quickbook - - # And build quickbook executable from sources. - - local boost-root = [ modules.peek : BOOST_ROOT ] ; - local boost-build-path = [ build-system.location ] ; - - if $(boost-root) - { - .quickbook-dir += [ path.join $(boost-root) tools ] ; - } - - if $(boost-build-path) - { - .quickbook-dir += $(boost-build-path)/../.. ; - } - - .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ; - - # If the QuickBook source directory was found, mark its main target - # as a dependency for the current project. Otherwise, try to find - # 'quickbook' in user's PATH - if $(.quickbook-dir) - { - .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ; - } - else - { - ECHO "QuickBook warning: The path to the quickbook executable was" ; - ECHO " not provided. Additionally, couldn't find QuickBook" ; - ECHO " sources searching in" ; - ECHO " * BOOST_ROOT/tools/quickbook" ; - ECHO " * BOOST_BUILD_PATH/../../quickbook" ; - ECHO " Will now try to find a precompiled executable by searching" ; - ECHO " the PATH for 'quickbook'." ; - ECHO " To disable this warning in the future, or to completely" ; - ECHO " avoid compilation of quickbook, you can explicitly set the" ; - ECHO " path to a quickbook executable command in user-config.jam" ; - ECHO " or site-config.jam with the call" ; - ECHO " using quickbook : /path/to/quickbook ;" ; - - # As a last resort, search for 'quickbook' command in path. Note - # that even if the 'quickbook' command is not found, - # get-invocation-command will still return 'quickbook' and might - # generate an error while generating the virtual-target. - - .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ; - } - } - } -} - - -generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ; - - -# <quickbook-binary> shell command to run QuickBook -# <quickbook-binary-dependencies> targets to build QuickBook from sources. -toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ; -toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ; -toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ; -toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ; -toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ; -toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ; - - -rule quickbook-to-boostbook ( target : source : properties * ) -{ - # Signal dependency of quickbook sources on <quickbook-binary-dependencies> - # upon invocation of quickbook-to-boostbook. - DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ; -} - - -actions quickbook-to-boostbook -{ - "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)" -} - - -# Declare a main target to convert a quickbook source into a boostbook XML file. -# -rule to-boostbook ( target-name : sources * : requirements * : default-build * ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new typed-target $(target-name) : $(project) : XML - : [ targets.main-target-sources $(sources) : $(target-name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; -} diff --git a/jam-files/boost-build/tools/rc.jam b/jam-files/boost-build/tools/rc.jam deleted file mode 100644 index 9964d339..00000000 --- a/jam-files/boost-build/tools/rc.jam +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. -# -# Copyright (c) 2006 Rene Rivera. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import type ; -import generators ; -import feature ; -import errors ; -import scanner ; -import toolset : flags ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ - .debug-configuration = true ; -} - -type.register RC : rc ; - -rule init ( ) -{ -} - -# Configures a new resource compilation command specific to a condition, -# usually a toolset selection condition. The possible options are: -# -# * <rc-type>(rc|windres) - Indicates the type of options the command -# accepts. -# -# Even though the arguments are all optional, only when a command, condition, -# and at minimum the rc-type option are given will the command be configured. -# This is so that callers don't have to check auto-configuration values -# before calling this. And still get the functionality of build failures when -# the resource compiler can't be found. -# -rule configure ( command ? : condition ? : options * ) -{ - local rc-type = [ feature.get-values <rc-type> : $(options) ] ; - - if $(command) && $(condition) && $(rc-type) - { - flags rc.compile.resource .RC $(condition) : $(command) ; - flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ; - flags rc.compile.resource DEFINES <define> ; - flags rc.compile.resource INCLUDES <include> ; - if $(.debug-configuration) - { - ECHO notice: using rc compiler :: $(condition) :: $(command) ; - } - } -} - -rule compile.resource ( target : sources * : properties * ) -{ - local rc-type = [ on $(target) return $(.RC_TYPE) ] ; - rc-type ?= null ; - compile.resource.$(rc-type) $(target) : $(sources[1]) ; -} - -actions compile.resource.rc -{ - "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)" -} - -actions compile.resource.windres -{ - "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)" -} - -actions quietly compile.resource.null -{ - as /dev/null -o "$(<)" -} - -# Since it's a common practice to write -# exe hello : hello.cpp hello.rc -# we change the name of object created from RC file, to -# avoid conflict with hello.cpp. -# The reason we generate OBJ and not RES, is that gcc does not -# seem to like RES files, but works OK with OBJ. -# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/ -# -# Using 'register-c-compiler' adds the build directory to INCLUDES -generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ; - -# Register scanner for resources -class res-scanner : scanner -{ - import regex virtual-target path scanner ; - - rule __init__ ( includes * ) - { - scanner.__init__ ; - - self.includes = $(includes) ; - } - - rule pattern ( ) - { - return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ; - } - - rule process ( target : matches * : binding ) - { - local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ; - local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ; - local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ; - - # Icons and other includes may referenced as - # - # IDR_MAINFRAME ICON "res\\icon.ico" - # - # so we have to replace double backslashes to single ones. - res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ; - - # CONSIDER: the new scoping rule seem to defeat "on target" variables. - local g = [ on $(target) return $(HDRGRIST) ] ; - local b = [ NORMALIZE_PATH $(binding:D) ] ; - - # Attach binding of including file to included targets. - # When target is directly created from virtual target - # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. - # We don't need this extra information for angle includes, - # since they should not depend on including file (we can't - # get literal "." in include path). - local g2 = $(g)"#"$(b) ; - - angle = $(angle:G=$(g)) ; - quoted = $(quoted:G=$(g2)) ; - res = $(res:G=$(g2)) ; - - local all = $(angle) $(quoted) ; - - INCLUDES $(target) : $(all) ; - DEPENDS $(target) : $(res) ; - NOCARE $(all) $(res) ; - SEARCH on $(angle) = $(self.includes:G=) ; - SEARCH on $(quoted) = $(b) $(self.includes:G=) ; - SEARCH on $(res) = $(b) $(self.includes:G=) ; - - # Just propagate current scanner to includes, in a hope - # that includes do not change scanners. - scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ; - } -} - -scanner.register res-scanner : include ; -type.set-scanner RC : res-scanner ; diff --git a/jam-files/boost-build/tools/rc.py b/jam-files/boost-build/tools/rc.py deleted file mode 100644 index 0b82d231..00000000 --- a/jam-files/boost-build/tools/rc.py +++ /dev/null @@ -1,189 +0,0 @@ -# Status: being ported by Steven Watanabe -# Base revision: 47077 -# -# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. -# -# Copyright (c) 2006 Rene Rivera. -# -# Copyright (c) 2008 Steven Watanabe -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -##import type ; -##import generators ; -##import feature ; -##import errors ; -##import scanner ; -##import toolset : flags ; - -from b2.build import type, toolset, generators, scanner, feature -from b2.tools import builtin -from b2.util import regex -from b2.build.toolset import flags -from b2.manager import get_manager - -__debug = None - -def debug(): - global __debug - if __debug is None: - __debug = "--debug-configuration" in bjam.variable("ARGV") - return __debug - -type.register('RC', ['rc']) - -def init(): - pass - -def configure (command = None, condition = None, options = None): - """ - Configures a new resource compilation command specific to a condition, - usually a toolset selection condition. The possible options are: - - * <rc-type>(rc|windres) - Indicates the type of options the command - accepts. - - Even though the arguments are all optional, only when a command, condition, - and at minimum the rc-type option are given will the command be configured. - This is so that callers don't have to check auto-configuration values - before calling this. And still get the functionality of build failures when - the resource compiler can't be found. - """ - rc_type = feature.get_values('<rc-type>', options) - if rc_type: - assert(len(rc_type) == 1) - rc_type = rc_type[0] - - if command and condition and rc_type: - flags('rc.compile.resource', '.RC', condition, command) - flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower()) - flags('rc.compile.resource', 'DEFINES', [], ['<define>']) - flags('rc.compile.resource', 'INCLUDES', [], ['<include>']) - if debug(): - print 'notice: using rc compiler ::', condition, '::', command - -engine = get_manager().engine() - -class RCAction: - """Class representing bjam action defined from Python. - The function must register the action to execute.""" - - def __init__(self, action_name, function): - self.action_name = action_name - self.function = function - - def __call__(self, targets, sources, property_set): - if self.function: - self.function(targets, sources, property_set) - -# FIXME: What is the proper way to dispatch actions? -def rc_register_action(action_name, function = None): - global engine - if engine.actions.has_key(action_name): - raise "Bjam action %s is already defined" % action_name - engine.actions[action_name] = RCAction(action_name, function) - -def rc_compile_resource(targets, sources, properties): - rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE') - global engine - engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties) - -rc_register_action('rc.compile.resource', rc_compile_resource) - - -engine.register_action( - 'rc.compile.resource.rc', - '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"') - -engine.register_action( - 'rc.compile.resource.windres', - '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"') - -# FIXME: this was originally declared quietly -engine.register_action( - 'compile.resource.null', - 'as /dev/null -o "$(<)"') - -# Since it's a common practice to write -# exe hello : hello.cpp hello.rc -# we change the name of object created from RC file, to -# avoid conflict with hello.cpp. -# The reason we generate OBJ and not RES, is that gcc does not -# seem to like RES files, but works OK with OBJ. -# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/ -# -# Using 'register-c-compiler' adds the build directory to INCLUDES -# FIXME: switch to generators -builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], []) - -__angle_include_re = "#include[ ]*<([^<]+)>" - -# Register scanner for resources -class ResScanner(scanner.Scanner): - - def __init__(self, includes): - scanner.__init__ ; - self.includes = includes - - def pattern(self): - return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ - "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ; - - def process(self, target, matches, binding): - - angle = regex.transform(matches, "#include[ ]*<([^<]+)>") - quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"") - res = regex.transform(matches, - "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ - "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4]) - - # Icons and other includes may referenced as - # - # IDR_MAINFRAME ICON "res\\icon.ico" - # - # so we have to replace double backslashes to single ones. - res = [ re.sub(r'\\\\', '/', match) for match in res ] - - # CONSIDER: the new scoping rule seem to defeat "on target" variables. - g = bjam.call('get-target-variable', target, 'HDRGRIST') - b = os.path.normalize_path(os.path.dirname(binding)) - - # Attach binding of including file to included targets. - # When target is directly created from virtual target - # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. - # We don't need this extra information for angle includes, - # since they should not depend on including file (we can't - # get literal "." in include path). - g2 = g + "#" + b - - g = "<" + g + ">" - g2 = "<" + g2 + ">" - angle = [g + x for x in angle] - quoted = [g2 + x for x in quoted] - res = [g2 + x for x in res] - - all = angle + quoted - - bjam.call('mark-included', target, all) - - engine = get_manager().engine() - - engine.add_dependency(target, res) - bjam.call('NOCARE', all + res) - engine.set_target_variable(angle, 'SEARCH', ungrist(self.includes)) - engine.set_target_variable(quoted, 'SEARCH', b + ungrist(self.includes)) - engine.set_target_variable(res, 'SEARCH', b + ungrist(self.includes)) ; - - # Just propagate current scanner to includes, in a hope - # that includes do not change scanners. - get_manager().scanners().propagate(self, angle + quoted) - -scanner.register(ResScanner, 'include') -type.set_scanner('RC', ResScanner) diff --git a/jam-files/boost-build/tools/stage.jam b/jam-files/boost-build/tools/stage.jam deleted file mode 100644 index 296e7558..00000000 --- a/jam-files/boost-build/tools/stage.jam +++ /dev/null @@ -1,524 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'install' rule, used to copy a set of targets to a -# single location. - -import targets ; -import "class" : new ; -import errors ; -import type ; -import generators ; -import feature ; -import project ; -import virtual-target ; -import path ; -import types/register ; - - -feature.feature <install-dependencies> : off on : incidental ; -feature.feature <install-type> : : free incidental ; -feature.feature <install-source-root> : : free path ; -feature.feature <so-version> : : free incidental ; - -# If 'on', version symlinks for shared libraries will not be created. Affects -# Unix builds only. -feature.feature <install-no-version-symlinks> : on : optional incidental ; - - -class install-target-class : basic-target -{ - import feature ; - import project ; - import type ; - import errors ; - import generators ; - import path ; - import stage ; - import "class" : new ; - import property ; - import property-set ; - - rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * ) - { - basic-target.__init__ $(name-and-dir) : $(project) : $(sources) : - $(requirements) : $(default-build) ; - } - - # If <location> is not set, sets it based on the project data. - # - rule update-location ( property-set ) - { - local loc = [ $(property-set).get <location> ] ; - if ! $(loc) - { - loc = [ path.root $(self.name) [ $(self.project).get location ] ] ; - property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ; - } - - return $(property-set) ; - } - - # Takes a target that is installed and a property set which is used when - # installing. - # - rule adjust-properties ( target : build-property-set ) - { - local ps-raw ; - local a = [ $(target).action ] ; - if $(a) - { - local ps = [ $(a).properties ] ; - ps-raw = [ $(ps).raw ] ; - - # Unless <hardcode-dll-paths>true is in properties, which can happen - # only if the user has explicitly requested it, nuke all <dll-path> - # properties. - if [ $(build-property-set).get <hardcode-dll-paths> ] != true - { - ps-raw = [ property.change $(ps-raw) : <dll-path> ] ; - } - - # If any <dll-path> properties were specified for installing, add - # them. - local l = [ $(build-property-set).get <dll-path> ] ; - ps-raw += $(l:G=<dll-path>) ; - - # Also copy <linkflags> feature from current build set, to be used - # for relinking. - local l = [ $(build-property-set).get <linkflags> ] ; - ps-raw += $(l:G=<linkflags>) ; - - # Remove the <tag> feature on original targets. - ps-raw = [ property.change $(ps-raw) : <tag> ] ; - - # And <location>. If stage target has another stage target in - # sources, then we shall get virtual targets with the <location> - # property set. - ps-raw = [ property.change $(ps-raw) : <location> ] ; - } - - local d = [ $(build-property-set).get <dependency> ] ; - ps-raw += $(d:G=<dependency>) ; - - local d = [ $(build-property-set).get <location> ] ; - ps-raw += $(d:G=<location>) ; - - local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ; - ps-raw += $(ns:G=<install-no-version-symlinks>) ; - - local d = [ $(build-property-set).get <install-source-root> ] ; - # Make the path absolute: we shall use it to compute relative paths and - # making the path absolute will help. - if $(d) - { - d = [ path.root $(d) [ path.pwd ] ] ; - ps-raw += $(d:G=<install-source-root>) ; - } - - if $(ps-raw) - { - return [ property-set.create $(ps-raw) ] ; - } - else - { - return [ property-set.empty ] ; - } - } - - rule construct ( name : source-targets * : property-set ) - { - source-targets = [ targets-to-stage $(source-targets) : - $(property-set) ] ; - - property-set = [ update-location $(property-set) ] ; - - local ename = [ $(property-set).get <name> ] ; - - if $(ename) && $(source-targets[2]) - { - errors.error "When <name> property is used in 'install', only one" - "source is allowed" ; - } - - local result ; - for local i in $(source-targets) - { - local staged-targets ; - - local new-properties = [ adjust-properties $(i) : - $(property-set) ] ; - - # See if something special should be done when staging this type. It - # is indicated by the presence of a special "INSTALLED_" type. - local t = [ $(i).type ] ; - if $(t) && [ type.registered INSTALLED_$(t) ] - { - if $(ename) - { - errors.error "In 'install': <name> property specified with target that requires relinking." ; - } - else - { - local targets = [ generators.construct $(self.project) - $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ; - staged-targets += $(targets[2-]) ; - } - } - else - { - staged-targets = [ stage.copy-file $(self.project) $(ename) : - $(i) : $(new-properties) ] ; - } - - if ! $(staged-targets) - { - errors.error "Unable to generate staged version of " [ $(source).str ] ; - } - - for t in $(staged-targets) - { - result += [ virtual-target.register $(t) ] ; - } - } - - return [ property-set.empty ] $(result) ; - } - - # Given the list of source targets explicitly passed to 'stage', returns the - # list of targets which must be staged. - # - rule targets-to-stage ( source-targets * : property-set ) - { - local result ; - - # Traverse the dependencies, if needed. - if [ $(property-set).get <install-dependencies> ] = "on" - { - source-targets = [ collect-targets $(source-targets) ] ; - } - - # Filter the target types, if needed. - local included-types = [ $(property-set).get <install-type> ] ; - for local r in $(source-targets) - { - local ty = [ $(r).type ] ; - if $(ty) - { - # Do not stage searched libs. - if $(ty) != SEARCHED_LIB - { - if $(included-types) - { - if [ include-type $(ty) : $(included-types) ] - { - result += $(r) ; - } - } - else - { - result += $(r) ; - } - } - } - else if ! $(included-types) - { - # Don't install typeless target if there is an explicit list of - # allowed types. - result += $(r) ; - } - } - - return $(result) ; - } - - # CONSIDER: figure out why we can not use virtual-target.traverse here. - # - rule collect-targets ( targets * ) - { - # Find subvariants - local s ; - for local t in $(targets) - { - s += [ $(t).creating-subvariant ] ; - } - s = [ sequence.unique $(s) ] ; - - local result = [ new set ] ; - $(result).add $(targets) ; - - for local i in $(s) - { - $(i).all-referenced-targets $(result) ; - } - local result2 ; - for local r in [ $(result).list ] - { - if $(r:G) != <use> - { - result2 += $(r:G=) ; - } - } - DELETE_MODULE $(result) ; - result = [ sequence.unique $(result2) ] ; - } - - # Returns true iff 'type' is subtype of some element of 'types-to-include'. - # - local rule include-type ( type : types-to-include * ) - { - local found ; - while $(types-to-include) && ! $(found) - { - if [ type.is-subtype $(type) $(types-to-include[1]) ] - { - found = true ; - } - types-to-include = $(types-to-include[2-]) ; - } - - return $(found) ; - } -} - - -# Creates a copy of target 'source'. The 'properties' object should have a -# <location> property which specifies where the target must be placed. -# -rule copy-file ( project name ? : source : properties ) -{ - name ?= [ $(source).name ] ; - local relative ; - - local new-a = [ new non-scanning-action $(source) : common.copy : - $(properties) ] ; - local source-root = [ $(properties).get <install-source-root> ] ; - if $(source-root) - { - # Get the real path of the target. We probably need to strip relative - # path from the target name at construction. - local path = [ $(source).path ] ; - path = [ path.root $(name:D) $(path) ] ; - # Make the path absolute. Otherwise, it would be hard to compute the - # relative path. The 'source-root' is already absolute, see the - # 'adjust-properties' method above. - path = [ path.root $(path) [ path.pwd ] ] ; - - relative = [ path.relative-to $(source-root) $(path) ] ; - } - - # Note: Using $(name:D=$(relative)) might be faster here, but then we would - # need to explicitly check that relative is not ".", otherwise we might get - # paths like '<prefix>/boost/.', try to create it and mkdir would obviously - # fail. - name = [ path.join $(relative) $(name:D=) ] ; - - return [ new file-target $(name) exact : [ $(source).type ] : $(project) : - $(new-a) ] ; -} - - -rule symlink ( name : project : source : properties ) -{ - local a = [ new action $(source) : symlink.ln : $(properties) ] ; - return [ new file-target $(name) exact : [ $(source).type ] : $(project) : - $(a) ] ; -} - - -rule relink-file ( project : source : property-set ) -{ - local action = [ $(source).action ] ; - local cloned-action = [ virtual-target.clone-action $(action) : $(project) : - "" : $(property-set) ] ; - return [ $(cloned-action).targets ] ; -} - - -# Declare installed version of the EXE type. Generator for this type will cause -# relinking to the new location. -type.register INSTALLED_EXE : : EXE ; - - -class installed-exe-generator : generator -{ - import type ; - import property-set ; - import modules ; - import stage ; - - rule __init__ ( ) - { - generator.__init__ install-exe : EXE : INSTALLED_EXE ; - } - - rule run ( project name ? : property-set : source : multiple ? ) - { - local need-relink ; - - if [ $(property-set).get <os> ] in NT CYGWIN || - [ $(property-set).get <target-os> ] in windows cygwin - { - } - else - { - # See if the dll-path properties are not changed during - # install. If so, copy, don't relink. - local a = [ $(source).action ] ; - local p = [ $(a).properties ] ; - local original = [ $(p).get <dll-path> ] ; - local current = [ $(property-set).get <dll-path> ] ; - - if $(current) != $(original) - { - need-relink = true ; - } - } - - - if $(need-relink) - { - return [ stage.relink-file $(project) - : $(source) : $(property-set) ] ; - } - else - { - return [ stage.copy-file $(project) - : $(source) : $(property-set) ] ; - } - } -} - - -generators.register [ new installed-exe-generator ] ; - - -# Installing a shared link on Unix might cause a creation of versioned symbolic -# links. -type.register INSTALLED_SHARED_LIB : : SHARED_LIB ; - - -class installed-shared-lib-generator : generator -{ - import type ; - import property-set ; - import modules ; - import stage ; - - rule __init__ ( ) - { - generator.__init__ install-shared-lib : SHARED_LIB - : INSTALLED_SHARED_LIB ; - } - - rule run ( project name ? : property-set : source : multiple ? ) - { - if [ $(property-set).get <os> ] in NT CYGWIN || - [ $(property-set).get <target-os> ] in windows cygwin - { - local copied = [ stage.copy-file $(project) : $(source) : - $(property-set) ] ; - return [ virtual-target.register $(copied) ] ; - } - else - { - local a = [ $(source).action ] ; - local copied ; - if ! $(a) - { - # Non-derived file, just copy. - copied = [ stage.copy-file $(project) : $(source) : - $(property-set) ] ; - } - else - { - local cp = [ $(a).properties ] ; - local current-dll-path = [ $(cp).get <dll-path> ] ; - local new-dll-path = [ $(property-set).get <dll-path> ] ; - - if $(current-dll-path) != $(new-dll-path) - { - # Rpath changed, need to relink. - copied = [ stage.relink-file $(project) : $(source) : - $(property-set) ] ; - } - else - { - copied = [ stage.copy-file $(project) : $(source) : - $(property-set) ] ; - } - } - - copied = [ virtual-target.register $(copied) ] ; - - local result = $(copied) ; - # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and - # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY - # symbolic links. - local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$ - : [ $(copied).name ] ] ; - if $(m) - { - # Symlink without version at all is used to make - # -lsome_library work. - result += [ stage.symlink $(m[1]) : $(project) : $(copied) : - $(property-set) ] ; - - # Symlinks of some libfoo.N and libfoo.N.M are used so that - # library can found at runtime, if libfoo.N.M.X has soname of - # libfoo.N. That happens when the library makes some binary - # compatibility guarantees. If not, it is possible to skip those - # symlinks. - local suppress = - [ $(property-set).get <install-no-version-symlinks> ] ; - - if $(suppress) != "on" - { - result += [ stage.symlink $(m[1]).$(m[2]) : $(project) - : $(copied) : $(property-set) ] ; - result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project) - : $(copied) : $(property-set) ] ; - } - } - - return $(result) ; - } - } -} - -generators.register [ new installed-shared-lib-generator ] ; - - -# Main target rule for 'install'. -# -rule install ( name : sources * : requirements * : default-build * ) -{ - local project = [ project.current ] ; - - # Unless the user has explicitly asked us to hardcode dll paths, add - # <hardcode-dll-paths>false in requirements, to override default value. - if ! <hardcode-dll-paths>true in $(requirements) - { - requirements += <hardcode-dll-paths>false ; - } - - if <tag> in $(requirements:G) - { - errors.user-error - "The <tag> property is not allowed for the 'install' rule" ; - } - - targets.main-target-alternative - [ new install-target-class $(name) : $(project) - : [ targets.main-target-sources $(sources) : $(name) ] - : [ targets.main-target-requirements $(requirements) : $(project) ] - : [ targets.main-target-default-build $(default-build) : $(project) ] - ] ; -} - - -IMPORT $(__name__) : install : : install ; -IMPORT $(__name__) : install : : stage ; diff --git a/jam-files/boost-build/tools/stage.py b/jam-files/boost-build/tools/stage.py deleted file mode 100644 index 25eccbe5..00000000 --- a/jam-files/boost-build/tools/stage.py +++ /dev/null @@ -1,350 +0,0 @@ -# Status: ported. -# Base revision 64444. -# -# Copyright 2003 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'install' rule, used to copy a set of targets to a -# single location. - -import b2.build.feature as feature -import b2.build.targets as targets -import b2.build.property as property -import b2.build.property_set as property_set -import b2.build.generators as generators -import b2.build.virtual_target as virtual_target - -from b2.manager import get_manager -from b2.util.sequence import unique -from b2.util import bjam_signature - -import b2.build.type - -import os.path -import re -import types - -feature.feature('install-dependencies', ['off', 'on'], ['incidental']) -feature.feature('install-type', [], ['free', 'incidental']) -feature.feature('install-source-root', [], ['free', 'path']) -feature.feature('so-version', [], ['free', 'incidental']) - -# If 'on', version symlinks for shared libraries will not be created. Affects -# Unix builds only. -feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental']) - -class InstallTargetClass(targets.BasicTarget): - - def update_location(self, ps): - """If <location> is not set, sets it based on the project data.""" - - loc = ps.get('location') - if not loc: - loc = os.path.join(self.project().get('location'), self.name()) - ps = ps.add_raw(["<location>" + loc]) - - return ps - - def adjust_properties(self, target, build_ps): - a = target.action() - properties = [] - if a: - ps = a.properties() - properties = ps.all() - - # Unless <hardcode-dll-paths>true is in properties, which can happen - # only if the user has explicitly requested it, nuke all <dll-path> - # properties. - - if build_ps.get('hardcode-dll-paths') != ['true']: - properties = [p for p in properties if p.feature().name() != 'dll-path'] - - # If any <dll-path> properties were specified for installing, add - # them. - properties.extend(build_ps.get_properties('dll-path')) - - # Also copy <linkflags> feature from current build set, to be used - # for relinking. - properties.extend(build_ps.get_properties('linkflags')) - - # Remove the <tag> feature on original targets. - # And <location>. If stage target has another stage target in - # sources, then we shall get virtual targets with the <location> - # property set. - properties = [p for p in properties - if not p.feature().name() in ['tag', 'location']] - - properties.extend(build_ps.get_properties('dependency')) - - properties.extend(build_ps.get_properties('location')) - - - properties.extend(build_ps.get_properties('install-no-version-symlinks')) - - d = build_ps.get_properties('install-source-root') - - # Make the path absolute: we shall use it to compute relative paths and - # making the path absolute will help. - if d: - p = d[0] - properties.append(property.Property(p.feature(), os.path.abspath(p.value()))) - - return property_set.create(properties) - - - def construct(self, name, source_targets, ps): - - source_targets = self.targets_to_stage(source_targets, ps) - ps = self.update_location(ps) - - ename = ps.get('name') - if ename: - ename = ename[0] - if ename and len(source_targets) > 1: - get_manager().errors()("When <name> property is used in 'install', only one source is allowed") - - result = [] - - for i in source_targets: - - staged_targets = [] - new_ps = self.adjust_properties(i, ps) - - # See if something special should be done when staging this type. It - # is indicated by the presence of a special "INSTALLED_" type. - t = i.type() - if t and b2.build.type.registered("INSTALLED_" + t): - - if ename: - get_manager().errors()("In 'install': <name> property specified with target that requires relinking.") - else: - (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t, - new_ps, [i]) - assert isinstance(r, property_set.PropertySet) - staged_targets.extend(targets) - - else: - staged_targets.append(copy_file(self.project(), ename, i, new_ps)) - - if not staged_targets: - get_manager().errors()("Unable to generate staged version of " + i) - - result.extend(get_manager().virtual_targets().register(t) for t in staged_targets) - - return (property_set.empty(), result) - - def targets_to_stage(self, source_targets, ps): - """Given the list of source targets explicitly passed to 'stage', returns the - list of targets which must be staged.""" - - result = [] - - # Traverse the dependencies, if needed. - if ps.get('install-dependencies') == ['on']: - source_targets = self.collect_targets(source_targets) - - # Filter the target types, if needed. - included_types = ps.get('install-type') - for r in source_targets: - ty = r.type() - if ty: - # Do not stage searched libs. - if ty != "SEARCHED_LIB": - if included_types: - if self.include_type(ty, included_types): - result.append(r) - else: - result.append(r) - elif not included_types: - # Don't install typeless target if there is an explicit list of - # allowed types. - result.append(r) - - return result - - # CONSIDER: figure out why we can not use virtual-target.traverse here. - # - def collect_targets(self, targets): - - s = [t.creating_subvariant() for t in targets] - s = unique(s) - - result = set(targets) - for i in s: - i.all_referenced_targets(result) - - result2 = [] - for r in result: - if isinstance(r, property.Property): - - if r.feature().name() != 'use': - result2.append(r.value()) - else: - result2.append(r) - result2 = unique(result2) - return result2 - - # Returns true iff 'type' is subtype of some element of 'types-to-include'. - # - def include_type(self, type, types_to_include): - return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include) - -# Creates a copy of target 'source'. The 'properties' object should have a -# <location> property which specifies where the target must be placed. -# -def copy_file(project, name, source, ps): - - if not name: - name = source.name() - - relative = "" - - new_a = virtual_target.NonScanningAction([source], "common.copy", ps) - source_root = ps.get('install-source-root') - if source_root: - source_root = source_root[0] - # Get the real path of the target. We probably need to strip relative - # path from the target name at construction. - path = os.path.join(source.path(), os.path.dirname(name)) - # Make the path absolute. Otherwise, it would be hard to compute the - # relative path. The 'source-root' is already absolute, see the - # 'adjust-properties' method above. - path = os.path.abspath(path) - - relative = os.path.relpath(path, source_root) - - name = os.path.join(relative, os.path.basename(name)) - return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True) - -def symlink(name, project, source, ps): - a = virtual_target.Action([source], "symlink.ln", ps) - return virtual_target.FileTarget(name, source.type(), project, a, exact=True) - -def relink_file(project, source, ps): - action = source.action() - cloned_action = virtual_target.clone_action(action, project, "", ps) - targets = cloned_action.targets() - # We relink only on Unix, where exe or shared lib is always a single file. - assert len(targets) == 1 - return targets[0] - - -# Declare installed version of the EXE type. Generator for this type will cause -# relinking to the new location. -b2.build.type.register('INSTALLED_EXE', [], 'EXE') - -class InstalledExeGenerator(generators.Generator): - - def __init__(self): - generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE']) - - def run(self, project, name, ps, source): - - need_relink = False; - - if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']: - # Never relink - pass - else: - # See if the dll-path properties are not changed during - # install. If so, copy, don't relink. - need_relink = ps.get('dll-path') != source[0].action().properties().get('dll-path') - - if need_relink: - return [relink_file(project, source, ps)] - else: - return [copy_file(project, None, source[0], ps)] - -generators.register(InstalledExeGenerator()) - - -# Installing a shared link on Unix might cause a creation of versioned symbolic -# links. -b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB') - -class InstalledSharedLibGenerator(generators.Generator): - - def __init__(self): - generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB']) - - def run(self, project, name, ps, source): - - source = source[0] - if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']: - copied = copy_file(project, None, source, ps) - return [get_manager().virtual_targets().register(copied)] - else: - a = source.action() - if not a: - # Non-derived file, just copy. - copied = copy_file(project, source, ps) - else: - - need_relink = ps.get('dll-path') != source.action().properties().get('dll-path') - - if need_relink: - # Rpath changed, need to relink. - copied = relink_file(project, source, ps) - else: - copied = copy_file(project, None, source, ps) - - result = [get_manager().virtual_targets().register(copied)] - # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and - # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY - # symbolic links. - m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$", - copied.name()); - if m: - # Symlink without version at all is used to make - # -lsome_library work. - result.append(symlink(m.group(1), project, copied, ps)) - - # Symlinks of some libfoo.N and libfoo.N.M are used so that - # library can found at runtime, if libfoo.N.M.X has soname of - # libfoo.N. That happens when the library makes some binary - # compatibility guarantees. If not, it is possible to skip those - # symlinks. - if ps.get('install-no-version-symlinks') != ['on']: - - result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps)) - result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3), - project, copied, ps)) - - return result - -generators.register(InstalledSharedLibGenerator()) - - -# Main target rule for 'install'. -# -@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"], - ["default_build", "*"], ["usage_requirements", "*"])) -def install(name, sources, requirements=[], default_build=[], usage_requirements=[]): - - requirements = requirements[:] - # Unless the user has explicitly asked us to hardcode dll paths, add - # <hardcode-dll-paths>false in requirements, to override default value. - if not '<hardcode-dll-paths>true' in requirements: - requirements.append('<hardcode-dll-paths>false') - - if any(r.startswith('<tag>') for r in requirements): - get_manager().errors()("The <tag> property is not allowed for the 'install' rule") - - from b2.manager import get_manager - t = get_manager().targets() - - project = get_manager().projects().current() - - return t.main_target_alternative( - InstallTargetClass(name, project, - t.main_target_sources(sources, name), - t.main_target_requirements(requirements, project), - t.main_target_default_build(default_build, project), - t.main_target_usage_requirements(usage_requirements, project))) - -get_manager().projects().add_rule("install", install) -get_manager().projects().add_rule("stage", install) - diff --git a/jam-files/boost-build/tools/stlport.jam b/jam-files/boost-build/tools/stlport.jam deleted file mode 100644 index 62eebda5..00000000 --- a/jam-files/boost-build/tools/stlport.jam +++ /dev/null @@ -1,303 +0,0 @@ -# Copyright Gennadiy Rozental -# Copyright 2006 Rene Rivera -# Copyright 2003, 2004, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# The STLPort is usable by means of 'stdlib' feature. When -# stdlib=stlport is specified, default version of STLPort will be used, -# while stdlib=stlport-4.5 will use specific version. -# The subfeature value 'hostios' means to use host compiler's iostreams. -# -# The specific version of stlport is selected by features: -# The <runtime-link> feature selects between static and shared library -# The <runtime-debugging>on selects STLPort with debug symbols -# and stl debugging. -# There's no way to use STLPort with debug symbols but without -# stl debugging. - -# TODO: must implement selection of different STLPort installations based -# on used toolset. -# Also, finish various flags: -# -# This is copied from V1 toolset, "+" means "implemented" -#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ; -#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ; -# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ; -# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ; -# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ; -# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ; -#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ; -#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ; - - -import feature : feature subfeature ; -import project ; -import "class" : new ; -import targets ; -import property-set ; -import common ; -import type ; - -# Make this module into a project. -project.initialize $(__name__) ; -project stlport ; - -# The problem: how to request to use host compiler's iostreams? -# -# Solution 1: Global 'stlport-iostream' feature. -# That's ugly. Subfeature make more sense for stlport-specific thing. -# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream") -# is default. -# The problem is that such subfeature will appear in target paths, and that's ugly -# Solution 3: Use optional subfeature with only one value. - -feature.extend stdlib : stlport ; -feature.compose <stdlib>stlport : <library>/stlport//stlport ; - -# STLport iostreams or native iostreams -subfeature stdlib stlport : iostream : hostios : optional propagated ; - -# STLport extensions -subfeature stdlib stlport : extensions : noext : optional propagated ; - -# STLport anachronisms -- NOT YET SUPPORTED -# subfeature stdlib stlport : anachronisms : on off ; - -# STLport debug allocation -- NOT YET SUPPORTED -#subfeature stdlib stlport : debug-alloc : off on ; - -# Declare a special target class to handle the creation of search-lib-target -# instances for STLport. We need a special class, because otherwise we'll have -# - declare prebuilt targets for all possible toolsets. And by the time 'init' -# is called we don't even know the list of toolsets that are registered -# - when host iostreams are used, we really should produce nothing. It would -# be hard/impossible to achieve this using prebuilt targets. - -class stlport-target-class : basic-target -{ - import feature project type errors generators ; - import set : difference ; - - rule __init__ ( project : headers ? : libraries * : version ? ) - { - basic-target.__init__ stlport : $(project) ; - self.headers = $(headers) ; - self.libraries = $(libraries) ; - self.version = $(version) ; - self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ; - - local requirements ; - requirements += <stdlib-stlport:version>$(self.version) ; - self.requirements = [ property-set.create $(requirements) ] ; - } - - rule generate ( property-set ) - { - # Since this target is built with <stdlib>stlport, it will also - # have <library>/stlport//stlport in requirements, which will - # cause a loop in main target references. Remove that property - # manually. - - property-set = [ property-set.create - [ difference - [ $(property-set).raw ] : - <library>/stlport//stlport - <stdlib>stlport - ] - ] ; - return [ basic-target.generate $(property-set) ] ; - } - - rule construct ( name : source-targets * : property-set ) - { - # Deduce the name of stlport library, based on toolset and - # debug setting. - local raw = [ $(property-set).raw ] ; - local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ; - local toolset = [ feature.get-values <toolset> : $(raw) ] ; - - if $(self.version.5) - { - # Version 5.x - - # STLport host IO streams no longer supported. So we always - # need libraries. - - # name: stlport(stl)?[dg]?(_static)?.M.R - local name = stlport ; - if [ feature.get-values <runtime-debugging> : $(raw) ] = "on" - { - name += stl ; - switch $(toolset) - { - case gcc* : name += g ; - case darwin* : name += g ; - case * : name += d ; - } - } - - if [ feature.get-values <runtime-link> : $(raw) ] = "static" - { - name += _static ; - } - - # Starting with version 5.2.0, the STLport static libraries no longer - # include a version number in their name - local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ; - if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static" - { - name += .$(self.version.5) ; - } - - name = $(name:J=) ; - - if [ feature.get-values <install-dependencies> : $(raw) ] = "on" - { - #~ Allow explicitly asking to install the STLport lib by - #~ refering to it directly: /stlport//stlport/<install-dependencies>on - #~ This allows for install packaging of all libs one might need for - #~ a standalone distribution. - import path : make : path-make ; - local runtime-link - = [ feature.get-values <runtime-link> : $(raw) ] ; - local lib-file.props - = [ property-set.create $(raw) <link>$(runtime-link) ] ; - local lib-file.prefix - = [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ; - local lib-file.suffix - = [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ; - lib-file.prefix - ?= "" "lib" ; - lib-file.suffix - ?= "" ; - local lib-file - = [ GLOB $(self.libraries) [ modules.peek : PATH ] : - $(lib-file.prefix)$(name).$(lib-file.suffix) ] ; - lib-file - = [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ; - lib-file - = [ $(lib-file).generate "" ] ; - local lib-file.requirements - = [ targets.main-target-requirements - [ $(lib-file.props).raw ] <file>$(lib-file[-1]) - : $(self.project) ] ; - return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ; - } - else - { - #~ Otherwise, it's just a regular usage of the library. - return [ generators.construct - $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ; - } - } - else if ! $(hostios) && $(toolset) != msvc - { - # We don't need libraries if host istreams are used. For - # msvc, automatic library selection will be used. - - # name: stlport_<toolset>(_stldebug)? - local name = stlport ; - name = $(name)_$(toolset) ; - if [ feature.get-values <runtime-debugging> : $(raw) ] = "on" - { - name = $(name)_stldebug ; - } - - return [ generators.construct - $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ; - } - else - { - return [ property-set.empty ] ; - } - } - - rule compute-usage-requirements ( subvariant ) - { - local usage-requirements = - <include>$(self.headers) - <dll-path>$(self.libraries) - <library-path>$(self.libraries) - ; - - local rproperties = [ $(subvariant).build-properties ] ; - # CONSIDER: should this "if" sequence be replaced with - # some use of 'property-map' class? - if [ $(rproperties).get <runtime-debugging> ] = "on" - { - usage-requirements += - <define>_STLP_DEBUG=1 - <define>_STLP_DEBUG_UNINITIALIZED=1 ; - } - if [ $(rproperties).get <runtime-link> ] = "shared" - { - usage-requirements += - <define>_STLP_USE_DYNAMIC_LIB=1 ; - } - if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext - { - usage-requirements += - <define>_STLP_NO_EXTENSIONS=1 ; - } - if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios - { - usage-requirements += - <define>_STLP_NO_OWN_IOSTREAMS=1 - <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ; - } - if $(self.version.5) - { - # Version 5.x - if [ $(rproperties).get <threading> ] = "single" - { - # Since STLport5 doesn't normally support single-thread - # we force STLport5 into the multi-thread mode. Hence - # getting what other libs provide of single-thread code - # linking against a multi-thread lib. - usage-requirements += - <define>_STLP_THREADS=1 ; - } - } - - return [ property-set.create $(usage-requirements) ] ; - } -} - -rule stlport-target ( headers ? : libraries * : version ? ) -{ - local project = [ project.current ] ; - - targets.main-target-alternative - [ new stlport-target-class $(project) : $(headers) : $(libraries) - : $(version) - ] ; -} - -local .version-subfeature-defined ; - -# Initialize stlport support. -rule init ( - version ? : - headers : # Location of header files - libraries * # Location of libraries, lib and bin subdirs of STLport. - ) -{ - # FIXME: need to use common.check-init-parameters here. - # At the moment, that rule always tries to define subfeature - # of the 'toolset' feature, while we need to define subfeature - # of <stdlib>stlport, so tweaks to check-init-parameters are needed. - if $(version) - { - if ! $(.version-subfeature-defined) - { - feature.subfeature stdlib stlport : version : : propagated ; - .version-subfeature-defined = true ; - } - feature.extend-subfeature stdlib stlport : version : $(version) ; - } - - # Declare the main target for this STLPort version. - stlport-target $(headers) : $(libraries) : $(version) ; -} - diff --git a/jam-files/boost-build/tools/sun.jam b/jam-files/boost-build/tools/sun.jam deleted file mode 100644 index 0ca927d3..00000000 --- a/jam-files/boost-build/tools/sun.jam +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (C) Christopher Currie 2003. Permission to copy, use, -# modify, sell and distribute this software is granted provided this -# copyright notice appears in all copies. This software is provided -# "as is" without express or implied warranty, and with no claim as -# to its suitability for any purpose. - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature ; -import type ; -import common ; - -feature.extend toolset : sun ; -toolset.inherit sun : unix ; -generators.override sun.prebuilt : builtin.lib-generator ; -generators.override sun.prebuilt : builtin.prebuilt ; -generators.override sun.searched-lib-generator : searched-lib-generator ; - -feature.extend stdlib : sun-stlport ; -feature.compose <stdlib>sun-stlport - : <cxxflags>-library=stlport4 <linkflags>-library=stlport4 - ; - -rule init ( version ? : command * : options * ) -{ - local condition = [ - common.check-init-parameters sun : version $(version) ] ; - - command = [ common.get-invocation-command sun : CC - : $(command) : "/opt/SUNWspro/bin" ] ; - - # Even if the real compiler is not found, put CC to - # command line so that user see command line that would have being executed. - command ?= CC ; - - common.handle-options sun : $(condition) : $(command) : $(options) ; - - command_c = $(command[1--2]) $(command[-1]:B=cc) ; - - toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ; -} - -# Declare generators -generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ; -generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ; - -# Declare flags and actions for compilation -flags sun.compile OPTIONS <debug-symbols>on : -g ; -flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ; -flags sun.compile OPTIONS <optimization>speed : -xO4 ; -flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ; -flags sun.compile OPTIONS <threading>multi : -mt ; -flags sun.compile OPTIONS <warnings>off : -erroff ; -flags sun.compile OPTIONS <warnings>on : -erroff=%none ; -flags sun.compile OPTIONS <warnings>all : -erroff=%none ; -flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ; - -flags sun.compile.c++ OPTIONS <inlining>off : +d ; - -# The -m32 and -m64 options are supported starting -# with Sun Studio 12. On earlier compilers, the -# 'address-model' feature is not supported and should not -# be used. Instead, use -xarch=generic64 command line -# option. -# See http://svn.boost.org/trac/boost/ticket/1186 -# for details. -flags sun OPTIONS <address-model>32 : -m32 ; -flags sun OPTIONS <address-model>64 : -m64 ; -# On sparc, there's a difference between -Kpic -# and -KPIC. The first is slightly more efficient, -# but has the limits on the size of GOT table. -# For minimal fuss on user side, we use -KPIC here. -# See http://svn.boost.org/trac/boost/ticket/1186#comment:6 -# for detailed explanation. -flags sun OPTIONS <link>shared : -KPIC ; - -flags sun.compile OPTIONS <cflags> ; -flags sun.compile.c++ OPTIONS <cxxflags> ; -flags sun.compile DEFINES <define> ; -flags sun.compile INCLUDES <include> ; - -actions compile.c -{ - "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags sun.link OPTIONS <debug-symbols>on : -g ; -# Strip the binary when no debugging is needed -flags sun.link OPTIONS <debug-symbols>off : -s ; -flags sun.link OPTIONS <profiling>on : -xprofile=tcov ; -flags sun.link OPTIONS <threading>multi : -mt ; -flags sun.link OPTIONS <linkflags> ; -flags sun.link LINKPATH <library-path> ; -flags sun.link FINDLIBS-ST <find-static-library> ; -flags sun.link FINDLIBS-SA <find-shared-library> ; -flags sun.link LIBRARIES <library-file> ; -flags sun.link LINK-RUNTIME <runtime-link>static : static ; -flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags sun.link RPATH <dll-path> ; -# On gcc, there are separate options for dll path at runtime and -# link time. On Solaris, there's only one: -R, so we have to use -# it, even though it's bad idea. -flags sun.link RPATH <xdll-path> ; - -# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.) -flags sun.link FINDLIBS-SA : rt ; - -rule link ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -actions link bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ - SPACE on $(targets) = " " ; -} - -actions link.dll bind LIBRARIES -{ - "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -# Declare action for creating static libraries -actions piecemeal archive -{ - "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)" -} - diff --git a/jam-files/boost-build/tools/symlink.jam b/jam-files/boost-build/tools/symlink.jam deleted file mode 100644 index b33e8260..00000000 --- a/jam-files/boost-build/tools/symlink.jam +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003 Rene Rivera -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines the "symlink" special target. 'symlink' targets make symbolic links -# to the sources. - -import targets modules path class os feature project property-set ; - -.count = 0 ; - -feature.feature symlink-location : project-relative build-relative : incidental ; - -# The class representing "symlink" targets. -# -class symlink-targets : basic-target -{ - import numbers modules class property project path ; - - rule __init__ ( - project - : targets * - : sources * - ) - { - # Generate a fake name for now. Need unnamed targets eventually. - local c = [ modules.peek symlink : .count ] ; - modules.poke symlink : .count : [ numbers.increment $(c) ] ; - local fake-name = symlink#$(c) ; - - basic-target.__init__ $(fake-name) : $(project) : $(sources) ; - - # Remember the targets to map the sources onto. Pad or truncate - # to fit the sources given. - self.targets = ; - for local source in $(sources) - { - if $(targets) - { - self.targets += $(targets[1]) ; - targets = $(targets[2-]) ; - } - else - { - self.targets += $(source) ; - } - } - - # The virtual targets corresponding to the given targets. - self.virtual-targets = ; - } - - rule construct ( name : source-targets * : property-set ) - { - local i = 1 ; - for local t in $(source-targets) - { - local s = $(self.targets[$(i)]) ; - local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ; - local vt = [ class.new file-target $(s:D=) - : [ $(t).type ] : $(self.project) : $(a) ] ; - - # Place the symlink in the directory relative to the project - # location, instead of placing it in the build directory. - if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative - { - $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ; - } - - self.virtual-targets += $(vt) ; - i = [ numbers.increment $(i) ] ; - } - return [ property-set.empty ] $(self.virtual-targets) ; - } -} - -# Creates a symbolic link from a set of targets to a set of sources. -# The targets and sources map one to one. The symlinks generated are -# limited to be the ones given as the sources. That is, the targets -# are either padded or trimmed to equate to the sources. The padding -# is done with the name of the corresponding source. For example:: -# -# symlink : one two ; -# -# Is equal to:: -# -# symlink one two : one two ; -# -# Names for symlink are relative to the project location. They cannot -# include ".." path components. -rule symlink ( - targets * - : sources * - ) -{ - local project = [ project.current ] ; - - return [ targets.main-target-alternative - [ class.new symlink-targets $(project) : $(targets) : - # Note: inline targets are not supported for symlink, intentionally, - # since it's used to linking existing non-local targets. - $(sources) ] ] ; -} - -rule ln -{ - local os ; - if [ modules.peek : UNIX ] { os = UNIX ; } - else { os ?= [ os.name ] ; } - # Remember the path to make the link relative to where the symlink is located. - local path-to-source = [ path.relative-to - [ path.make [ on $(<) return $(LOCATE) ] ] - [ path.make [ on $(>) return $(LOCATE) ] ] ] ; - if $(path-to-source) = . - { - PATH_TO_SOURCE on $(<) = "" ; - } - else - { - PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ; - } - ln-$(os) $(<) : $(>) ; -} - -actions ln-UNIX -{ - ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)' -} - -# there is a way to do this; we fall back to a copy for now -actions ln-NT -{ - echo "NT symlinks not supported yet, making copy" - del /f /q "$(<)" 2>nul >nul - copy "$(>)" "$(<)" $(NULL_OUT) -} - -IMPORT $(__name__) : symlink : : symlink ; diff --git a/jam-files/boost-build/tools/symlink.py b/jam-files/boost-build/tools/symlink.py deleted file mode 100644 index 6345ded6..00000000 --- a/jam-files/boost-build/tools/symlink.py +++ /dev/null @@ -1,112 +0,0 @@ -# Status: ported. -# Base revision: 64488. - -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003 Rene Rivera -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines the "symlink" special target. 'symlink' targets make symbolic links -# to the sources. - -import b2.build.feature as feature -import b2.build.targets as targets -import b2.build.property_set as property_set -import b2.build.virtual_target as virtual_target -import b2.build.targets - -from b2.manager import get_manager - -import bjam - -import os - - -feature.feature("symlink-location", ["project-relative", "build-relative"], ["incidental"]) - -class SymlinkTarget(targets.BasicTarget): - - _count = 0 - - def __init__(self, project, targets, sources): - - # Generate a fake name for now. Need unnamed targets eventually. - fake_name = "symlink#%s" % SymlinkTarget._count - SymlinkTarget._count = SymlinkTarget._count + 1 - - b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources) - - # Remember the targets to map the sources onto. Pad or truncate - # to fit the sources given. - assert len(targets) <= len(sources) - self.targets = targets[:] + sources[len(targets):] - - # The virtual targets corresponding to the given targets. - self.virtual_targets = [] - - def construct(self, name, source_targets, ps): - i = 0 - for t in source_targets: - s = self.targets[i] - a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps) - vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a) - - # Place the symlink in the directory relative to the project - # location, instead of placing it in the build directory. - if not ps.get('symlink-location') == "project-relative": - vt.set_path(os.path.join(self.project().get('location'), os.path.dirname(s))) - - vt = get_manager().virtual_targets().register(vt) - self.virtual_targets.append(vt) - i = i + 1 - - return (property_set.empty(), self.virtual_targets) - -# Creates a symbolic link from a set of targets to a set of sources. -# The targets and sources map one to one. The symlinks generated are -# limited to be the ones given as the sources. That is, the targets -# are either padded or trimmed to equate to the sources. The padding -# is done with the name of the corresponding source. For example:: -# -# symlink : one two ; -# -# Is equal to:: -# -# symlink one two : one two ; -# -# Names for symlink are relative to the project location. They cannot -# include ".." path components. -def symlink(targets, sources): - - from b2.manager import get_manager - t = get_manager().targets() - p = get_manager().projects().current() - - return t.main_target_alternative( - SymlinkTarget(p, targets, - # Note: inline targets are not supported for symlink, intentionally, - # since it's used to linking existing non-local targets. - sources)) - - -def setup_ln(targets, sources, ps): - - source_path = bjam.call("get-target-variable", sources[0], "LOCATE")[0] - target_path = bjam.call("get-target-variable", targets[0], "LOCATE")[0] - rel = os.path.relpath(source_path, target_path) - if rel == ".": - bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", "") - else: - bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", rel) - -if os.name == 'nt': - ln_action = """echo "NT symlinks not supported yet, making copy" -del /f /q "$(<)" 2>nul >nul -copy "$(>)" "$(<)" $(NULL_OUT)""" -else: - ln_action = "ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'" - -get_manager().engine().register_action("symlink.ln", ln_action, function=setup_ln) - -get_manager().projects().add_rule("symlink", symlink) diff --git a/jam-files/boost-build/tools/testing-aux.jam b/jam-files/boost-build/tools/testing-aux.jam deleted file mode 100644 index 525dafd0..00000000 --- a/jam-files/boost-build/tools/testing-aux.jam +++ /dev/null @@ -1,210 +0,0 @@ -# This module is imported by testing.py. The definitions here are -# too tricky to do in Python - -# Causes the 'target' to exist after bjam invocation if and only if all the -# dependencies were successfully built. -# -rule expect-success ( target : dependency + : requirements * ) -{ - **passed** $(target) : $(sources) ; -} -IMPORT testing : expect-success : : testing.expect-success ; - -# Causes the 'target' to exist after bjam invocation if and only if all some of -# the dependencies were not successfully built. -# -rule expect-failure ( target : dependency + : properties * ) -{ - local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ; - local marker = $(dependency:G=$(grist)*fail) ; - (failed-as-expected) $(marker) ; - FAIL_EXPECTED $(dependency) ; - LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ; - RMOLD $(marker) ; - DEPENDS $(marker) : $(dependency) ; - DEPENDS $(target) : $(marker) ; - **passed** $(target) : $(marker) ; -} -IMPORT testing : expect-failure : : testing.expect-failure ; - -# The rule/action combination used to report successful passing of a test. -# -rule **passed** -{ - # Force deletion of the target, in case any dependencies failed to build. - RMOLD $(<) ; -} - - -# Used to create test files signifying passed tests. -# -actions **passed** -{ - echo passed > "$(<)" -} - - -# Used to create replacement object files that do not get created during tests -# that are expected to fail. -# -actions (failed-as-expected) -{ - echo failed as expected > "$(<)" -} - -# Runs executable 'sources' and stores stdout in file 'target'. Unless -# --preserve-test-targets command line option has been specified, removes the -# executable. The 'target-to-remove' parameter controls what should be removed: -# - if 'none', does not remove anything, ever -# - if empty, removes 'source' -# - if non-empty and not 'none', contains a list of sources to remove. -# -rule capture-output ( target : source : properties * : targets-to-remove * ) -{ - output-file on $(target) = $(target:S=.output) ; - LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ; - - # The INCLUDES kill a warning about independent target... - INCLUDES $(target) : $(target:S=.output) ; - # but it also puts .output into dependency graph, so we must tell jam it is - # OK if it cannot find the target or updating rule. - NOCARE $(target:S=.output) ; - - # This has two-fold effect. First it adds input files to the dependendency - # graph, preventing a warning. Second, it causes input files to be bound - # before target is created. Therefore, they are bound using SEARCH setting - # on them and not LOCATE setting of $(target), as in other case (due to jam - # bug). - DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ; - - if $(targets-to-remove) = none - { - targets-to-remove = ; - } - else if ! $(targets-to-remove) - { - targets-to-remove = $(source) ; - } - - if [ on $(target) return $(REMOVE_TEST_TARGETS) ] - { - TEMPORARY $(targets-to-remove) ; - # Set a second action on target that will be executed after capture - # output action. The 'RmTemps' rule has the 'ignore' modifier so it is - # always considered succeeded. This is needed for 'run-fail' test. For - # that test the target will be marked with FAIL_EXPECTED, and without - # 'ignore' successful execution will be negated and be reported as - # failure. With 'ignore' we do not detect a case where removing files - # fails, but it is not likely to happen. - RmTemps $(target) : $(targets-to-remove) ; - } -} - - -if [ os.name ] = NT -{ - .STATUS = %status% ; - .SET_STATUS = "set status=%ERRORLEVEL%" ; - .RUN_OUTPUT_NL = "echo." ; - .STATUS_0 = "%status% EQU 0 (" ; - .STATUS_NOT_0 = "%status% NEQ 0 (" ; - .VERBOSE = "%verbose% EQU 1 (" ; - .ENDIF = ")" ; - .SHELL_SET = "set " ; - .CATENATE = type ; - .CP = copy ; -} -else -{ - .STATUS = "$status" ; - .SET_STATUS = "status=$?" ; - .RUN_OUTPUT_NL = "echo" ; - .STATUS_0 = "test $status -eq 0 ; then" ; - .STATUS_NOT_0 = "test $status -ne 0 ; then" ; - .VERBOSE = "test $verbose -eq 1 ; then" ; - .ENDIF = "fi" ; - .SHELL_SET = "" ; - .CATENATE = cat ; - .CP = cp ; -} - - -.VERBOSE_TEST = 0 ; -if --verbose-test in [ modules.peek : ARGV ] -{ - .VERBOSE_TEST = 1 ; -} - - -.RM = [ common.rm-command ] ; - - -actions capture-output bind INPUT_FILES output-file -{ - $(PATH_SETUP) - $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 - $(.SET_STATUS) - $(.RUN_OUTPUT_NL) >> "$(output-file)" - echo EXIT STATUS: $(.STATUS) >> "$(output-file)" - if $(.STATUS_0) - $(.CP) "$(output-file)" "$(<)" - $(.ENDIF) - $(.SHELL_SET)verbose=$(.VERBOSE_TEST) - if $(.STATUS_NOT_0) - $(.SHELL_SET)verbose=1 - $(.ENDIF) - if $(.VERBOSE) - echo ====== BEGIN OUTPUT ====== - $(.CATENATE) "$(output-file)" - echo ====== END OUTPUT ====== - $(.ENDIF) - exit $(.STATUS) -} - -IMPORT testing : capture-output : : testing.capture-output ; - - -actions quietly updated ignore piecemeal together RmTemps -{ - $(.RM) "$(>)" -} - - -.MAKE_FILE = [ common.file-creation-command ] ; - -actions unit-test -{ - $(PATH_SETUP) - $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<) -} - -rule record-time ( target : source : start end user system ) -{ - local src-string = [$(source:G=:J=",")"] " ; - USER_TIME on $(target) += $(src-string)$(user) ; - SYSTEM_TIME on $(target) += $(src-string)$(system) ; -} - -# Calling this rule requests that Boost Build time how long it taks to build the -# 'source' target and display the results both on the standard output and in the -# 'target' file. -# -rule time ( target : source : properties * ) -{ - # Set up rule for recording timing information. - __TIMING_RULE__ on $(source) = testing.record-time $(target) ; - - # Make sure that the source is rebuilt any time we need to retrieve that - # information. - REBUILDS $(target) : $(source) ; -} - - -actions time -{ - echo user: $(USER_TIME) - echo system: $(SYSTEM_TIME) - - echo user: $(USER_TIME)" seconds" > "$(<)" - echo system: $(SYSTEM_TIME)" seconds" >> "$(<)" -} diff --git a/jam-files/boost-build/tools/testing.jam b/jam-files/boost-build/tools/testing.jam deleted file mode 100644 index c42075b7..00000000 --- a/jam-files/boost-build/tools/testing.jam +++ /dev/null @@ -1,581 +0,0 @@ -# Copyright 2005 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module implements regression testing framework. It declares a number of -# main target rules which perform some action and, if the results are OK, -# creates an output file. -# -# The exact list of rules is: -# 'compile' -- creates .test file if compilation of sources was -# successful. -# 'compile-fail' -- creates .test file if compilation of sources failed. -# 'run' -- creates .test file is running of executable produced from -# sources was successful. Also leaves behind .output file -# with the output from program run. -# 'run-fail' -- same as above, but .test file is created if running fails. -# -# In all cases, presence of .test file is an indication that the test passed. -# For more convenient reporting, you might want to use C++ Boost regression -# testing utilities (see http://www.boost.org/more/regression.html). -# -# For historical reason, a 'unit-test' rule is available which has the same -# syntax as 'exe' and behaves just like 'run'. - -# Things to do: -# - Teach compiler_status handle Jamfile.v2. -# Notes: -# - <no-warn> is not implemented, since it is Como-specific, and it is not -# clear how to implement it -# - std::locale-support is not implemented (it is used in one test). - - -import alias ; -import "class" ; -import common ; -import errors ; -import feature ; -import generators ; -import os ; -import path ; -import project ; -import property ; -import property-set ; -import regex ; -import sequence ; -import targets ; -import toolset ; -import type ; -import virtual-target ; - - -rule init ( ) -{ -} - - -# Feature controling the command used to lanch test programs. -feature.feature testing.launcher : : free optional ; - -feature.feature test-info : : free incidental ; -feature.feature testing.arg : : free incidental ; -feature.feature testing.input-file : : free dependency ; - -feature.feature preserve-test-targets : on off : incidental propagated ; - -# Register target types. -type.register TEST : test ; -type.register COMPILE : : TEST ; -type.register COMPILE_FAIL : : TEST ; -type.register RUN_OUTPUT : run ; -type.register RUN : : TEST ; -type.register RUN_FAIL : : TEST ; -type.register LINK_FAIL : : TEST ; -type.register LINK : : TEST ; -type.register UNIT_TEST : passed : TEST ; - - -# Declare the rules which create main targets. While the 'type' module already -# creates rules with the same names for us, we need extra convenience: default -# name of main target, so write our own versions. - -# Helper rule. Create a test target, using basename of first source if no target -# name is explicitly passed. Remembers the created target in a global variable. -# -rule make-test ( target-type : sources + : requirements * : target-name ? ) -{ - target-name ?= $(sources[1]:D=:S=) ; - - # Having periods (".") in the target name is problematic because the typed - # generator will strip the suffix and use the bare name for the file - # targets. Even though the location-prefix averts problems most times it - # does not prevent ambiguity issues when referring to the test targets. For - # example when using the XML log output. So we rename the target to remove - # the periods, and provide an alias for users. - local real-name = [ regex.replace $(target-name) "[.]" "~" ] ; - - local project = [ project.current ] ; - # The <location-prefix> forces the build system for generate paths in the - # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow - # post-processing tools to work. - local t = [ targets.create-typed-target [ type.type-from-rule-name - $(target-type) ] : $(project) : $(real-name) : $(sources) : - $(requirements) <location-prefix>$(real-name).test ] ; - - # The alias to the real target, per period replacement above. - if $(real-name) != $(target-name) - { - alias $(target-name) : $(t) ; - } - - # Remember the test (for --dump-tests). A good way would be to collect all - # given a project. This has some technical problems: e.g. we can not call - # this dump from a Jamfile since projects referred by 'build-project' are - # not available until the whole Jamfile has been loaded. - .all-tests += $(t) ; - return $(t) ; -} - - -# Note: passing more that one cpp file here is known to fail. Passing a cpp file -# and a library target works. -# -rule compile ( sources + : requirements * : target-name ? ) -{ - return [ make-test compile : $(sources) : $(requirements) : $(target-name) ] - ; -} - - -rule compile-fail ( sources + : requirements * : target-name ? ) -{ - return [ make-test compile-fail : $(sources) : $(requirements) : - $(target-name) ] ; -} - - -rule link ( sources + : requirements * : target-name ? ) -{ - return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ; -} - - -rule link-fail ( sources + : requirements * : target-name ? ) -{ - return [ make-test link-fail : $(sources) : $(requirements) : $(target-name) - ] ; -} - - -rule handle-input-files ( input-files * ) -{ - if $(input-files[2]) - { - # Check that sorting made when creating property-set instance will not - # change the ordering. - if [ sequence.insertion-sort $(input-files) ] != $(input-files) - { - errors.user-error "Names of input files must be sorted alphabetically" - : "due to internal limitations" ; - } - } - return <testing.input-file>$(input-files) ; -} - - -rule run ( sources + : args * : input-files * : requirements * : target-name ? : - default-build * ) -{ - requirements += <testing.arg>$(args:J=" ") ; - requirements += [ handle-input-files $(input-files) ] ; - return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ; -} - - -rule run-fail ( sources + : args * : input-files * : requirements * : - target-name ? : default-build * ) -{ - requirements += <testing.arg>$(args:J=" ") ; - requirements += [ handle-input-files $(input-files) ] ; - return [ make-test run-fail : $(sources) : $(requirements) : $(target-name) - ] ; -} - - -# Use 'test-suite' as a synonym for 'alias', for backward compatibility. -IMPORT : alias : : test-suite ; - - -# For all main targets in 'project-module', which are typed targets with type -# derived from 'TEST', produce some interesting information. -# -rule dump-tests -{ - for local t in $(.all-tests) - { - dump-test $(t) ; - } -} - - -# Given a project location in normalized form (slashes are forward), compute the -# name of the Boost library. -# -local rule get-library-name ( path ) -{ - # Path is in normalized form, so all slashes are forward. - local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ; - local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ; - local match3 = [ MATCH (/status$) : $(path) ] ; - - if $(match1) { return $(match1[2]) ; } - else if $(match2) { return $(match2[2]) ; } - else if $(match3) { return "" ; } - else if --dump-tests in [ modules.peek : ARGV ] - { - # The 'run' rule and others might be used outside boost. In that case, - # just return the path, since the 'library name' makes no sense. - return $(path) ; - } -} - - -# Was an XML dump requested? -.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ; - - -# Takes a target (instance of 'basic-target') and prints -# - its type -# - its name -# - comments specified via the <test-info> property -# - relative location of all source from the project root. -# -rule dump-test ( target ) -{ - local type = [ $(target).type ] ; - local name = [ $(target).name ] ; - local project = [ $(target).project ] ; - - local project-root = [ $(project).get project-root ] ; - local library = [ get-library-name [ path.root [ $(project).get location ] - [ path.pwd ] ] ] ; - if $(library) - { - name = $(library)/$(name) ; - } - - local sources = [ $(target).sources ] ; - local source-files ; - for local s in $(sources) - { - if [ class.is-a $(s) : file-reference ] - { - local location = [ path.root [ path.root [ $(s).name ] - [ $(s).location ] ] [ path.pwd ] ] ; - - source-files += [ path.relative-to [ path.root $(project-root) - [ path.pwd ] ] $(location) ] ; - } - } - - local target-name = [ $(project).get location ] // [ $(target).name ] .test - ; - target-name = $(target-name:J=) ; - - local r = [ $(target).requirements ] ; - # Extract values of the <test-info> feature. - local test-info = [ $(r).get <test-info> ] ; - - # If the user requested XML output on the command-line, add the test info to - # that XML file rather than dumping them to stdout. - if $(.out-xml) - { - local nl = " -" ; - .contents on $(.out-xml) += - "$(nl) <test type=\"$(type)\" name=\"$(name)\">" - "$(nl) <target><![CDATA[$(target-name)]]></target>" - "$(nl) <info><![CDATA[$(test-info)]]></info>" - "$(nl) <source><![CDATA[$(source-files)]]></source>" - "$(nl) </test>" - ; - } - else - { - # Format them into a single string of quoted strings. - test-info = \"$(test-info:J=\"\ \")\" ; - - ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":" - \"$(source-files)\" ; - } -} - - -# Register generators. Depending on target type, either 'expect-success' or -# 'expect-failure' rule will be used. -generators.register-standard testing.expect-success : OBJ : COMPILE ; -generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ; -generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ; -generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ; -generators.register-standard testing.expect-failure : EXE : LINK_FAIL ; -generators.register-standard testing.expect-success : EXE : LINK ; - -# Generator which runs an EXE and captures output. -generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ; - -# Generator which creates a target if sources run successfully. Differs from RUN -# in that run output is not captured. The reason why it exists is that the 'run' -# rule is much better for automated testing, but is not user-friendly (see -# http://article.gmane.org/gmane.comp.lib.boost.build/6353). -generators.register-standard testing.unit-test : EXE : UNIT_TEST ; - - -# The action rules called by generators. - -# Causes the 'target' to exist after bjam invocation if and only if all the -# dependencies were successfully built. -# -rule expect-success ( target : dependency + : requirements * ) -{ - **passed** $(target) : $(sources) ; -} - - -# Causes the 'target' to exist after bjam invocation if and only if all some of -# the dependencies were not successfully built. -# -rule expect-failure ( target : dependency + : properties * ) -{ - local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ; - local marker = $(dependency:G=$(grist)*fail) ; - (failed-as-expected) $(marker) ; - FAIL_EXPECTED $(dependency) ; - LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ; - RMOLD $(marker) ; - DEPENDS $(marker) : $(dependency) ; - DEPENDS $(target) : $(marker) ; - **passed** $(target) : $(marker) ; -} - - -# The rule/action combination used to report successful passing of a test. -# -rule **passed** -{ - # Dump all the tests, if needed. We do it here, since dump should happen - # only after all Jamfiles have been read, and there is no such place - # currently defined (but there should be). - if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] ) - { - .dumped-tests = true ; - dump-tests ; - } - - # Force deletion of the target, in case any dependencies failed to build. - RMOLD $(<) ; -} - - -# Used to create test files signifying passed tests. -# -actions **passed** -{ - echo passed > "$(<)" -} - - -# Used to create replacement object files that do not get created during tests -# that are expected to fail. -# -actions (failed-as-expected) -{ - echo failed as expected > "$(<)" -} - - -rule run-path-setup ( target : source : properties * ) -{ - # For testing, we need to make sure that all dynamic libraries needed by the - # test are found. So, we collect all paths from dependency libraries (via - # xdll-path property) and add whatever explicit dll-path user has specified. - # The resulting paths are added to the environment on each test invocation. - local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ; - dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ; - dll-paths += [ on $(source) return $(RUN_PATH) ] ; - dll-paths = [ sequence.unique $(dll-paths) ] ; - if $(dll-paths) - { - dll-paths = [ sequence.transform path.native : $(dll-paths) ] ; - PATH_SETUP on $(target) = [ common.prepend-path-variable-command - [ os.shared-library-path-variable ] : $(dll-paths) ] ; - } -} - - -local argv = [ modules.peek : ARGV ] ; - -toolset.flags testing.capture-output ARGS <testing.arg> ; -toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ; -toolset.flags testing.capture-output LAUNCHER <testing.launcher> ; - - -# Runs executable 'sources' and stores stdout in file 'target'. Unless -# --preserve-test-targets command line option has been specified, removes the -# executable. The 'target-to-remove' parameter controls what should be removed: -# - if 'none', does not remove anything, ever -# - if empty, removes 'source' -# - if non-empty and not 'none', contains a list of sources to remove. -# -rule capture-output ( target : source : properties * : targets-to-remove * ) -{ - output-file on $(target) = $(target:S=.output) ; - LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ; - - # The INCLUDES kill a warning about independent target... - INCLUDES $(target) : $(target:S=.output) ; - # but it also puts .output into dependency graph, so we must tell jam it is - # OK if it cannot find the target or updating rule. - NOCARE $(target:S=.output) ; - - # This has two-fold effect. First it adds input files to the dependendency - # graph, preventing a warning. Second, it causes input files to be bound - # before target is created. Therefore, they are bound using SEARCH setting - # on them and not LOCATE setting of $(target), as in other case (due to jam - # bug). - DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ; - - if $(targets-to-remove) = none - { - targets-to-remove = ; - } - else if ! $(targets-to-remove) - { - targets-to-remove = $(source) ; - } - - run-path-setup $(target) : $(source) : $(properties) ; - - if [ feature.get-values preserve-test-targets : $(properties) ] = off - { - TEMPORARY $(targets-to-remove) ; - # Set a second action on target that will be executed after capture - # output action. The 'RmTemps' rule has the 'ignore' modifier so it is - # always considered succeeded. This is needed for 'run-fail' test. For - # that test the target will be marked with FAIL_EXPECTED, and without - # 'ignore' successful execution will be negated and be reported as - # failure. With 'ignore' we do not detect a case where removing files - # fails, but it is not likely to happen. - RmTemps $(target) : $(targets-to-remove) ; - } -} - - -if [ os.name ] = NT -{ - .STATUS = %status% ; - .SET_STATUS = "set status=%ERRORLEVEL%" ; - .RUN_OUTPUT_NL = "echo." ; - .STATUS_0 = "%status% EQU 0 (" ; - .STATUS_NOT_0 = "%status% NEQ 0 (" ; - .VERBOSE = "%verbose% EQU 1 (" ; - .ENDIF = ")" ; - .SHELL_SET = "set " ; - .CATENATE = type ; - .CP = copy ; -} -else -{ - .STATUS = "$status" ; - .SET_STATUS = "status=$?" ; - .RUN_OUTPUT_NL = "echo" ; - .STATUS_0 = "test $status -eq 0 ; then" ; - .STATUS_NOT_0 = "test $status -ne 0 ; then" ; - .VERBOSE = "test $verbose -eq 1 ; then" ; - .ENDIF = "fi" ; - .SHELL_SET = "" ; - .CATENATE = cat ; - .CP = cp ; -} - - -.VERBOSE_TEST = 0 ; -if --verbose-test in [ modules.peek : ARGV ] -{ - .VERBOSE_TEST = 1 ; -} - - -.RM = [ common.rm-command ] ; - - -actions capture-output bind INPUT_FILES output-file -{ - $(PATH_SETUP) - $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 - $(.SET_STATUS) - $(.RUN_OUTPUT_NL) >> "$(output-file)" - echo EXIT STATUS: $(.STATUS) >> "$(output-file)" - if $(.STATUS_0) - $(.CP) "$(output-file)" "$(<)" - $(.ENDIF) - $(.SHELL_SET)verbose=$(.VERBOSE_TEST) - if $(.STATUS_NOT_0) - $(.SHELL_SET)verbose=1 - $(.ENDIF) - if $(.VERBOSE) - echo ====== BEGIN OUTPUT ====== - $(.CATENATE) "$(output-file)" - echo ====== END OUTPUT ====== - $(.ENDIF) - exit $(.STATUS) -} - - -actions quietly updated ignore piecemeal together RmTemps -{ - $(.RM) "$(>)" -} - - -.MAKE_FILE = [ common.file-creation-command ] ; - -toolset.flags testing.unit-test LAUNCHER <testing.launcher> ; -toolset.flags testing.unit-test ARGS <testing.arg> ; - - -rule unit-test ( target : source : properties * ) -{ - run-path-setup $(target) : $(source) : $(properties) ; -} - - -actions unit-test -{ - $(PATH_SETUP) - $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<) -} - - -IMPORT $(__name__) : compile compile-fail run run-fail link link-fail - : : compile compile-fail run run-fail link link-fail ; - - -type.register TIME : time ; -generators.register-standard testing.time : : TIME ; - - -rule record-time ( target : source : start end user system ) -{ - local src-string = [$(source:G=:J=",")"] " ; - USER_TIME on $(target) += $(src-string)$(user) ; - SYSTEM_TIME on $(target) += $(src-string)$(system) ; -} - - -IMPORT testing : record-time : : testing.record-time ; - - -# Calling this rule requests that Boost Build time how long it taks to build the -# 'source' target and display the results both on the standard output and in the -# 'target' file. -# -rule time ( target : source : properties * ) -{ - # Set up rule for recording timing information. - __TIMING_RULE__ on $(source) = testing.record-time $(target) ; - - # Make sure that the source is rebuilt any time we need to retrieve that - # information. - REBUILDS $(target) : $(source) ; -} - - -actions time -{ - echo user: $(USER_TIME) - echo system: $(SYSTEM_TIME) - - echo user: $(USER_TIME)" seconds" > "$(<)" - echo system: $(SYSTEM_TIME)" seconds" >> "$(<)" -} diff --git a/jam-files/boost-build/tools/testing.py b/jam-files/boost-build/tools/testing.py deleted file mode 100644 index 3b53500c..00000000 --- a/jam-files/boost-build/tools/testing.py +++ /dev/null @@ -1,342 +0,0 @@ -# Status: ported, except for --out-xml -# Base revision: 64488 -# -# Copyright 2005 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module implements regression testing framework. It declares a number of -# main target rules which perform some action and, if the results are OK, -# creates an output file. -# -# The exact list of rules is: -# 'compile' -- creates .test file if compilation of sources was -# successful. -# 'compile-fail' -- creates .test file if compilation of sources failed. -# 'run' -- creates .test file is running of executable produced from -# sources was successful. Also leaves behind .output file -# with the output from program run. -# 'run-fail' -- same as above, but .test file is created if running fails. -# -# In all cases, presence of .test file is an indication that the test passed. -# For more convenient reporting, you might want to use C++ Boost regression -# testing utilities (see http://www.boost.org/more/regression.html). -# -# For historical reason, a 'unit-test' rule is available which has the same -# syntax as 'exe' and behaves just like 'run'. - -# Things to do: -# - Teach compiler_status handle Jamfile.v2. -# Notes: -# - <no-warn> is not implemented, since it is Como-specific, and it is not -# clear how to implement it -# - std::locale-support is not implemented (it is used in one test). - -import b2.build.feature as feature -import b2.build.type as type -import b2.build.targets as targets -import b2.build.generators as generators -import b2.build.toolset as toolset -import b2.tools.common as common -import b2.util.option as option -import b2.build_system as build_system - - - -from b2.manager import get_manager -from b2.util import stem, bjam_signature -from b2.util.sequence import unique - -import bjam - -import re -import os.path -import sys - -def init(): - pass - -# Feature controling the command used to lanch test programs. -feature.feature("testing.launcher", [], ["free", "optional"]) - -feature.feature("test-info", [], ["free", "incidental"]) -feature.feature("testing.arg", [], ["free", "incidental"]) -feature.feature("testing.input-file", [], ["free", "dependency"]) - -feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"]) - -# Register target types. -type.register("TEST", ["test"]) -type.register("COMPILE", [], "TEST") -type.register("COMPILE_FAIL", [], "TEST") - -type.register("RUN_OUTPUT", ["run"]) -type.register("RUN", [], "TEST") -type.register("RUN_FAIL", [], "TEST") - -type.register("LINK", [], "TEST") -type.register("LINK_FAIL", [], "TEST") -type.register("UNIT_TEST", ["passed"], "TEST") - -__all_tests = [] - -# Declare the rules which create main targets. While the 'type' module already -# creates rules with the same names for us, we need extra convenience: default -# name of main target, so write our own versions. - -# Helper rule. Create a test target, using basename of first source if no target -# name is explicitly passed. Remembers the created target in a global variable. -def make_test(target_type, sources, requirements, target_name=None): - - if not target_name: - target_name = stem(os.path.basename(sources[0])) - - # Having periods (".") in the target name is problematic because the typed - # generator will strip the suffix and use the bare name for the file - # targets. Even though the location-prefix averts problems most times it - # does not prevent ambiguity issues when referring to the test targets. For - # example when using the XML log output. So we rename the target to remove - # the periods, and provide an alias for users. - real_name = target_name.replace(".", "~") - - project = get_manager().projects().current() - # The <location-prefix> forces the build system for generate paths in the - # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow - # post-processing tools to work. - t = get_manager().targets().create_typed_target( - type.type_from_rule_name(target_type), project, real_name, sources, - requirements + ["<location-prefix>" + real_name + ".test"], [], []) - - # The alias to the real target, per period replacement above. - if real_name != target_name: - get_manager().projects().project_rules().all_names_["alias"]( - target_name, [t]) - - # Remember the test (for --dump-tests). A good way would be to collect all - # given a project. This has some technical problems: e.g. we can not call - # this dump from a Jamfile since projects referred by 'build-project' are - # not available until the whole Jamfile has been loaded. - __all_tests.append(t) - return t - - -# Note: passing more that one cpp file here is known to fail. Passing a cpp file -# and a library target works. -# -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def compile(sources, requirements, target_name=None): - return make_test("compile", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def compile_fail(sources, requirements, target_name=None): - return make_test("compile-fail", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def link(sources, requirements, target_name=None): - return make_test("link", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def link_fail(sources, requirements, target_name=None): - return make_test("link-fail", sources, requirements, target_name) - -def handle_input_files(input_files): - if len(input_files) > 1: - # Check that sorting made when creating property-set instance will not - # change the ordering. - if sorted(input_files) != input_files: - get_manager().errors()("Names of input files must be sorted alphabetically\n" + - "due to internal limitations") - return ["<testing.input-file>" + f for f in input_files] - -@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], - ["requirements", "*"], ["target_name", "?"], - ["default_build", "*"])) -def run(sources, args, input_files, requirements, target_name=None, default_build=[]): - if args: - requirements.append("<testing.arg>" + " ".join(args)) - requirements.extend(handle_input_files(input_files)) - return make_test("run", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], - ["requirements", "*"], ["target_name", "?"], - ["default_build", "*"])) -def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]): - if args: - requirements.append("<testing.arg>" + " ".join(args)) - requirements.extend(handle_input_files(input_files)) - return make_test("run-fail", sources, requirements, target_name) - -# Register all the rules -for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]: - get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_"))) - -# Use 'test-suite' as a synonym for 'alias', for backward compatibility. -from b2.build.alias import alias -get_manager().projects().add_rule("test-suite", alias) - -# For all main targets in 'project-module', which are typed targets with type -# derived from 'TEST', produce some interesting information. -# -def dump_tests(): - for t in __all_tests: - dump_test(t) - -# Given a project location in normalized form (slashes are forward), compute the -# name of the Boost library. -# -__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)") -__ln2 = re.compile("/(tools|libs)/(.*)$") -__ln3 = re.compile("(/status$)") -def get_library_name(path): - - path = path.replace("\\", "/") - match1 = __ln1.match(path) - match2 = __ln2.match(path) - match3 = __ln3.match(path) - - if match1: - return match1.group(2) - elif match2: - return match2.group(2) - elif match3: - return "" - elif option.get("dump-tests", False, True): - # The 'run' rule and others might be used outside boost. In that case, - # just return the path, since the 'library name' makes no sense. - return path - -# Was an XML dump requested? -__out_xml = option.get("out-xml", False, True) - -# Takes a target (instance of 'basic-target') and prints -# - its type -# - its name -# - comments specified via the <test-info> property -# - relative location of all source from the project root. -# -def dump_test(target): - type = target.type() - name = target.name() - project = target.project() - - project_root = project.get('project-root') - library = get_library_name(os.path.abspath(project.get('location'))) - if library: - name = library + "/" + name - - sources = target.sources() - source_files = [] - for s in sources: - if isinstance(s, targets.FileReference): - location = os.path.abspath(os.path.join(s.location(), s.name())) - source_files.append(os.path.relpath(location, os.path.abspath(project_root))) - - target_name = project.get('location') + "//" + target.name() + ".test" - - test_info = target.requirements().get('test-info') - test_info = " ".join('"' + ti + '"' for ti in test_info) - - # If the user requested XML output on the command-line, add the test info to - # that XML file rather than dumping them to stdout. - #if $(.out-xml) - #{ -# local nl = " -#" ; -# .contents on $(.out-xml) += -# "$(nl) <test type=\"$(type)\" name=\"$(name)\">" -# "$(nl) <target><![CDATA[$(target-name)]]></target>" -# "$(nl) <info><![CDATA[$(test-info)]]></info>" -# "$(nl) <source><![CDATA[$(source-files)]]></source>" -# "$(nl) </test>" -# ; -# } -# else - - source_files = " ".join('"' + s + '"' for s in source_files) - if test_info: - print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files) - else: - print 'boost-test(%s) "%s" : %s' % (type, name, source_files) - -# Register generators. Depending on target type, either 'expect-success' or -# 'expect-failure' rule will be used. -generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"]) -generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"]) -generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"]) -generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"]) -generators.register_standard("testing.expect-success", ["EXE"], ["LINK"]) -generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"]) - -# Generator which runs an EXE and captures output. -generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"]) - -# Generator which creates a target if sources run successfully. Differs from RUN -# in that run output is not captured. The reason why it exists is that the 'run' -# rule is much better for automated testing, but is not user-friendly (see -# http://article.gmane.org/gmane.comp.lib.boost.build/6353). -generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"]) - -# FIXME: if those calls are after bjam.call, then bjam will crash -# when toolset.flags calls bjam.caller. -toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"]) -toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"]) -toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"]) - -toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"]) -toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"]) - -type.register("TIME", ["time"]) -generators.register_standard("testing.time", [], ["TIME"]) - - -# The following code sets up actions for this module. It's pretty convoluted, -# but the basic points is that we most of actions are defined by Jam code -# contained in testing-aux.jam, which we load into Jam module named 'testing' - -def run_path_setup(target, sources, ps): - - # For testing, we need to make sure that all dynamic libraries needed by the - # test are found. So, we collect all paths from dependency libraries (via - # xdll-path property) and add whatever explicit dll-path user has specified. - # The resulting paths are added to the environment on each test invocation. - dll_paths = ps.get('dll-path') - dll_paths.extend(ps.get('xdll-path')) - dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH")) - dll_paths = unique(dll_paths) - if dll_paths: - bjam.call("set-target-variable", target, "PATH_SETUP", - common.prepend_path_variable_command( - common.shared_library_path_variable(), dll_paths)) - -def capture_output_setup(target, sources, ps): - run_path_setup(target, sources, ps) - - if ps.get('preserve-test-targets') == ['off']: - bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1") - -get_manager().engine().register_bjam_action("testing.capture-output", - capture_output_setup) - - -path = os.path.dirname(get_manager().projects().loaded_tool_module_path_[__name__]) -import b2.util.os_j -get_manager().projects().project_rules()._import_rule("testing", "os.name", - b2.util.os_j.name) -import b2.tools.common -get_manager().projects().project_rules()._import_rule("testing", "common.rm-command", - b2.tools.common.rm_command) -get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command", - b2.tools.common.file_creation_command) - -bjam.call("load", "testing", os.path.join(path, "testing-aux.jam")) - - -for name in ["expect-success", "expect-failure", "time"]: - get_manager().engine().register_bjam_action("testing." + name) - -get_manager().engine().register_bjam_action("testing.unit-test", - run_path_setup) - -if option.get("dump-tests", False, True): - build_system.add_pre_build_hook(dump_tests) diff --git a/jam-files/boost-build/tools/types/__init__.py b/jam-files/boost-build/tools/types/__init__.py deleted file mode 100644 index f972b714..00000000 --- a/jam-files/boost-build/tools/types/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -__all__ = [ - 'asm', - 'cpp', - 'exe', - 'html', - 'lib', - 'obj', - 'rsp', -] - -def register_all (): - for i in __all__: - m = __import__ (__name__ + '.' + i) - reg = i + '.register ()' - #exec (reg) - -# TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't? -register_all () diff --git a/jam-files/boost-build/tools/types/asm.jam b/jam-files/boost-build/tools/types/asm.jam deleted file mode 100644 index a340db36..00000000 --- a/jam-files/boost-build/tools/types/asm.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright Craig Rodrigues 2005. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -type ASM : s S asm ; diff --git a/jam-files/boost-build/tools/types/asm.py b/jam-files/boost-build/tools/types/asm.py deleted file mode 100644 index b4e1c30e..00000000 --- a/jam-files/boost-build/tools/types/asm.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright Craig Rodrigues 2005. -# Copyright (c) 2008 Steven Watanabe -# -# Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register(): - type.register_type('ASM', ['s', 'S', 'asm']) - -register() diff --git a/jam-files/boost-build/tools/types/cpp.jam b/jam-files/boost-build/tools/types/cpp.jam deleted file mode 100644 index 3159cdd7..00000000 --- a/jam-files/boost-build/tools/types/cpp.jam +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright David Abrahams 2004. -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Copyright 2010 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -import type ; -import scanner ; - -class c-scanner : scanner -{ - import path ; - import regex ; - import scanner ; - import sequence ; - import virtual-target ; - - rule __init__ ( includes * ) - { - scanner.__init__ ; - - for local i in $(includes) - { - self.includes += [ sequence.transform path.native - : [ regex.split $(i:G=) "&&" ] ] ; - } - } - - rule pattern ( ) - { - return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ; - } - - rule process ( target : matches * : binding ) - { - local angle = [ regex.transform $(matches) : "<(.*)>" ] ; - angle = [ sequence.transform path.native : $(angle) ] ; - local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ; - quoted = [ sequence.transform path.native : $(quoted) ] ; - - # CONSIDER: the new scoping rule seem to defeat "on target" variables. - local g = [ on $(target) return $(HDRGRIST) ] ; - local b = [ NORMALIZE_PATH $(binding:D) ] ; - - # Attach binding of including file to included targets. When a target is - # directly created from virtual target this extra information is - # unnecessary. But in other cases, it allows us to distinguish between - # two headers of the same name included from different places. We do not - # need this extra information for angle includes, since they should not - # depend on including file (we can not get literal "." in include path). - local g2 = $(g)"#"$(b) ; - - angle = $(angle:G=$(g)) ; - quoted = $(quoted:G=$(g2)) ; - - local all = $(angle) $(quoted) ; - - INCLUDES $(target) : $(all) ; - NOCARE $(all) ; - SEARCH on $(angle) = $(self.includes:G=) ; - SEARCH on $(quoted) = $(b) $(self.includes:G=) ; - - # Just propagate the current scanner to includes in hope that includes - # do not change scanners. - scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ; - - ISFILE $(angle) $(quoted) ; - } -} - -scanner.register c-scanner : include ; - -type.register CPP : cpp cxx cc ; -type.register H : h ; -type.register HPP : hpp : H ; -type.register C : c ; - -# It most cases where a CPP file or a H file is a source of some action, we -# should rebuild the result if any of files included by CPP/H are changed. One -# case when this is not needed is installation, which is handled specifically. -type.set-scanner CPP : c-scanner ; -type.set-scanner C : c-scanner ; -# One case where scanning of H/HPP files is necessary is PCH generation -- if -# any header included by HPP being precompiled changes, we need to recompile the -# header. -type.set-scanner H : c-scanner ; -type.set-scanner HPP : c-scanner ; diff --git a/jam-files/boost-build/tools/types/cpp.py b/jam-files/boost-build/tools/types/cpp.py deleted file mode 100644 index 7b56111c..00000000 --- a/jam-files/boost-build/tools/types/cpp.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): - type.register_type ('CPP', ['cpp', 'cxx', 'cc']) - -register () diff --git a/jam-files/boost-build/tools/types/exe.jam b/jam-files/boost-build/tools/types/exe.jam deleted file mode 100644 index 47109513..00000000 --- a/jam-files/boost-build/tools/types/exe.jam +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import type ; - -type.register EXE ; -type.set-generated-target-suffix EXE : <target-os>windows : "exe" ; -type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ; diff --git a/jam-files/boost-build/tools/types/exe.py b/jam-files/boost-build/tools/types/exe.py deleted file mode 100644 index a4935e24..00000000 --- a/jam-files/boost-build/tools/types/exe.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): - type.register_type ('EXE', ['exe'], None, ['NT', 'CYGWIN']) - type.register_type ('EXE', [], None, []) - -register () diff --git a/jam-files/boost-build/tools/types/html.jam b/jam-files/boost-build/tools/types/html.jam deleted file mode 100644 index 5cd337d0..00000000 --- a/jam-files/boost-build/tools/types/html.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -type HTML : html ; diff --git a/jam-files/boost-build/tools/types/html.py b/jam-files/boost-build/tools/types/html.py deleted file mode 100644 index 63af4d90..00000000 --- a/jam-files/boost-build/tools/types/html.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): - type.register_type ('HTML', ['html']) - -register () diff --git a/jam-files/boost-build/tools/types/lib.jam b/jam-files/boost-build/tools/types/lib.jam deleted file mode 100644 index 854ab8fd..00000000 --- a/jam-files/boost-build/tools/types/lib.jam +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import type ; # for set-generated-target-suffix -import os ; - -# The following naming scheme is used for libraries. -# -# On *nix: -# libxxx.a static library -# libxxx.so shared library -# -# On windows (msvc) -# libxxx.lib static library -# xxx.dll DLL -# xxx.lib import library -# -# On windows (mingw): -# libxxx.a static library -# libxxx.dll DLL -# libxxx.dll.a import library -# -# On cygwin i.e. <target-os>cygwin -# libxxx.a static library -# cygxxx.dll DLL -# libxxx.dll.a import library -# - -type.register LIB ; - -# FIXME: should not register both extensions on both platforms. -type.register STATIC_LIB : a lib : LIB ; - -# The 'lib' prefix is used everywhere -type.set-generated-target-prefix STATIC_LIB : : lib ; - -# Use '.lib' suffix for windows -type.set-generated-target-suffix STATIC_LIB : <target-os>windows : lib ; - -# Except with gcc. -type.set-generated-target-suffix STATIC_LIB : <toolset>gcc <target-os>windows : a ; - -# Use xxx.lib for import libs -type IMPORT_LIB : : STATIC_LIB ; -type.set-generated-target-prefix IMPORT_LIB : : "" ; -type.set-generated-target-suffix IMPORT_LIB : : lib ; - -# Except with gcc (mingw or cygwin), where use libxxx.dll.a -type.set-generated-target-prefix IMPORT_LIB : <toolset>gcc : lib ; -type.set-generated-target-suffix IMPORT_LIB : <toolset>gcc : dll.a ; - -type.register SHARED_LIB : so dll dylib : LIB ; - -# Both mingw and cygwin use libxxx.dll naming scheme. -# On Linux, use "lib" prefix -type.set-generated-target-prefix SHARED_LIB : : lib ; -# But don't use it on windows -type.set-generated-target-prefix SHARED_LIB : <target-os>windows : "" ; -# But use it again on mingw -type.set-generated-target-prefix SHARED_LIB : <toolset>gcc <target-os>windows : lib ; -# And use 'cyg' on cygwin -type.set-generated-target-prefix SHARED_LIB : <target-os>cygwin : cyg ; - - -type.set-generated-target-suffix SHARED_LIB : <target-os>windows : dll ; -type.set-generated-target-suffix SHARED_LIB : <target-os>cygwin : dll ; -type.set-generated-target-suffix SHARED_LIB : <target-os>darwin : dylib ; - -type SEARCHED_LIB : : LIB ; -# This is needed so that when we create a target of SEARCHED_LIB -# type, there's no prefix or suffix automatically added. -type.set-generated-target-prefix SEARCHED_LIB : : "" ; -type.set-generated-target-suffix SEARCHED_LIB : : "" ; diff --git a/jam-files/boost-build/tools/types/lib.py b/jam-files/boost-build/tools/types/lib.py deleted file mode 100644 index d0ec1fb5..00000000 --- a/jam-files/boost-build/tools/types/lib.py +++ /dev/null @@ -1,77 +0,0 @@ -# Status: ported -# Base revision: 64456. -# Copyright David Abrahams 2004. -# Copyright Vladimir Prus 2010. -# Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import b2.build.type as type - -# The following naming scheme is used for libraries. -# -# On *nix: -# libxxx.a static library -# libxxx.so shared library -# -# On windows (msvc) -# libxxx.lib static library -# xxx.dll DLL -# xxx.lib import library -# -# On windows (mingw): -# libxxx.a static library -# libxxx.dll DLL -# libxxx.dll.a import library -# -# On cygwin i.e. <target-os>cygwin -# libxxx.a static library -# cygxxx.dll DLL -# libxxx.dll.a import library -# - -type.register('LIB') - -# FIXME: should not register both extensions on both platforms. -type.register('STATIC_LIB', ['a', 'lib'], 'LIB') - -# The 'lib' prefix is used everywhere -type.set_generated_target_prefix('STATIC_LIB', [], 'lib') - -# Use '.lib' suffix for windows -type.set_generated_target_suffix('STATIC_LIB', ['<target-os>windows'], 'lib') - -# Except with gcc. -type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>windows'], 'a') - -# Use xxx.lib for import libs -type.register('IMPORT_LIB', [], 'STATIC_LIB') -type.set_generated_target_prefix('IMPORT_LIB', [], '') -type.set_generated_target_suffix('IMPORT_LIB', [], 'lib') - -# Except with gcc (mingw or cygwin), where use libxxx.dll.a -type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc'], 'lib') -type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc'], 'dll.a') - -type.register('SHARED_LIB', ['so', 'dll', 'dylib'], 'LIB') - -# Both mingw and cygwin use libxxx.dll naming scheme. -# On Linux, use "lib" prefix -type.set_generated_target_prefix('SHARED_LIB', [], 'lib') -# But don't use it on windows -type.set_generated_target_prefix('SHARED_LIB', ['<target-os>windows'], '') -# But use it again on mingw -type.set_generated_target_prefix('SHARED_LIB', ['<toolset>gcc', '<target-os>windows'], 'lib') -# And use 'cyg' on cygwin -type.set_generated_target_prefix('SHARED_LIB', ['<target-os>cygwin'], 'cyg') - - -type.set_generated_target_suffix('SHARED_LIB', ['<target-os>windows'], 'dll') -type.set_generated_target_suffix('SHARED_LIB', ['<target-os>cygwin'], 'dll') -type.set_generated_target_suffix('SHARED_LIB', ['<target-os>darwin'], 'dylib') - -type.register('SEARCHED_LIB', [], 'LIB') -# This is needed so that when we create a target of SEARCHED_LIB -# type, there's no prefix or suffix automatically added. -type.set_generated_target_prefix('SEARCHED_LIB', [], '') -type.set_generated_target_suffix('SEARCHED_LIB', [], '') diff --git a/jam-files/boost-build/tools/types/obj.jam b/jam-files/boost-build/tools/types/obj.jam deleted file mode 100644 index 6afbcaa6..00000000 --- a/jam-files/boost-build/tools/types/obj.jam +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import type ; - -type.register OBJ : o obj ; -type.set-generated-target-suffix OBJ : <target-os>windows : obj ; -type.set-generated-target-suffix OBJ : <target-os>cygwin : obj ; diff --git a/jam-files/boost-build/tools/types/obj.py b/jam-files/boost-build/tools/types/obj.py deleted file mode 100644 index e61e99a8..00000000 --- a/jam-files/boost-build/tools/types/obj.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): - type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN']) - type.register_type ('OBJ', ['o']) - -register () diff --git a/jam-files/boost-build/tools/types/objc.jam b/jam-files/boost-build/tools/types/objc.jam deleted file mode 100644 index 709cbd0c..00000000 --- a/jam-files/boost-build/tools/types/objc.jam +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright Rene Rivera 2008, 2010. -# Distributed under the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -import type ; -import scanner ; -import types/cpp ; - -class objc-scanner : c-scanner -{ - rule __init__ ( includes * ) - { - c-scanner.__init__ $(includes) ; - } - - rule pattern ( ) - { - return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ; - } -} - -scanner.register objc-scanner : include ; - -type.register OBJECTIVE_C : m ; -type.register OBJECTIVE_CPP : mm ; -type.set-scanner OBJECTIVE_C : objc-scanner ; -type.set-scanner OBJECTIVE_CPP : objc-scanner ; diff --git a/jam-files/boost-build/tools/types/preprocessed.jam b/jam-files/boost-build/tools/types/preprocessed.jam deleted file mode 100644 index c9187ba6..00000000 --- a/jam-files/boost-build/tools/types/preprocessed.jam +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright Steven Watanabe 2011 -# Distributed under the Boost Software License Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import type ; - -type.register PREPROCESSED_C : i : C ; -type.register PREPROCESSED_CPP : ii : CPP ; diff --git a/jam-files/boost-build/tools/types/qt.jam b/jam-files/boost-build/tools/types/qt.jam deleted file mode 100644 index 6d1dfbd4..00000000 --- a/jam-files/boost-build/tools/types/qt.jam +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Vladimir Prus 2005. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -type UI : ui ; -type QRC : qrc ; -type MOCCABLE_CPP ; -type MOCCABLE_H ; -# Result of running moc. -type MOC : moc : H ; diff --git a/jam-files/boost-build/tools/types/register.jam b/jam-files/boost-build/tools/types/register.jam deleted file mode 100644 index 203992ca..00000000 --- a/jam-files/boost-build/tools/types/register.jam +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This module's job is to automatically import all the type -# registration modules in its directory. -import type os path modules ; - -# Register the given type on the specified OSes, or on remaining OSes -# if os is not specified. This rule is injected into each of the type -# modules for the sake of convenience. -local rule type ( type : suffixes * : base-type ? : os * ) -{ - if ! [ type.registered $(type) ] - { - if ( ! $(os) ) || [ os.name ] in $(os) - { - type.register $(type) : $(suffixes) : $(base-type) ; - } - } -} - -.this-module's-file = [ modules.binding $(__name__) ] ; -.this-module's-dir = [ path.parent $(.this-module's-file) ] ; -.sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ; -.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ; - -# A loop over all modules in this directory -for m in $(.sibling-modules) -{ - m = [ path.basename $(m) ] ; - m = types/$(m) ; - - # Inject the type rule into the new module - IMPORT $(__name__) : type : $(m) : type ; - import $(m) ; -} - - diff --git a/jam-files/boost-build/tools/types/rsp.jam b/jam-files/boost-build/tools/types/rsp.jam deleted file mode 100644 index bdf8a7c9..00000000 --- a/jam-files/boost-build/tools/types/rsp.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -type RSP : rsp ; diff --git a/jam-files/boost-build/tools/types/rsp.py b/jam-files/boost-build/tools/types/rsp.py deleted file mode 100644 index ccb379e9..00000000 --- a/jam-files/boost-build/tools/types/rsp.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): - type.register_type ('RSP', ['rsp']) - -register () diff --git a/jam-files/boost-build/tools/unix.jam b/jam-files/boost-build/tools/unix.jam deleted file mode 100644 index 75949851..00000000 --- a/jam-files/boost-build/tools/unix.jam +++ /dev/null @@ -1,224 +0,0 @@ -# Copyright (c) 2004 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# This file implements linking semantic common to all unixes. On unix, static -# libraries must be specified in a fixed order on the linker command line. Generators -# declared there store information about the order and use it property. - -import feature ; -import "class" : new ; -import generators ; -import type ; -import set ; -import order ; -import builtin ; - -class unix-linking-generator : linking-generator -{ - import property-set ; - import type ; - import unix ; - - rule __init__ ( id - composing ? : # Specify if generator is composing. The generator will be - # composing if non-empty string is passed, or parameter is - # not given. To make generator non-composing, pass empty - # string ("") - source-types + : target-types + : - requirements * ) - { - composing ?= true ; - generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) : - $(requirements) ; - } - - rule run ( project name ? : property-set : sources + ) - { - local result = [ linking-generator.run $(project) $(name) : $(property-set) - : $(sources) ] ; - - unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; - - return $(result) ; - } - - rule generated-targets ( sources + : property-set : project name ? ) - { - local sources2 ; - local libraries ; - for local l in $(sources) - { - if [ type.is-derived [ $(l).type ] LIB ] - { - libraries += $(l) ; - } - else - { - sources2 += $(l) ; - } - } - - sources = $(sources2) [ unix.order-libraries $(libraries) ] ; - - return [ linking-generator.generated-targets $(sources) : $(property-set) - : $(project) $(name) ] ; - } - -} - -class unix-archive-generator : archive-generator -{ - import unix ; - - rule __init__ ( id composing ? : source-types + : target-types + : - requirements * ) - { - composing ?= true ; - archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) : - $(requirements) ; - } - - rule run ( project name ? : property-set : sources + ) - { - local result = [ archive-generator.run $(project) $(name) : $(property-set) - : $(sources) ] ; - - unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; - - return $(result) ; - - } -} - -class unix-searched-lib-generator : searched-lib-generator -{ - import unix ; - rule __init__ ( * : * ) - { - generator.__init__ - $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule optional-properties ( ) - { - return $(self.requirements) ; - } - - rule run ( project name ? : property-set : sources * ) - { - local result = [ searched-lib-generator.run $(project) $(name) - : $(property-set) : $(sources) ] ; - - unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; - - return $(result) ; - } -} - -class unix-prebuilt-lib-generator : generator -{ - import unix ; - rule __init__ ( * : * ) - { - generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - - rule run ( project name ? : property-set : sources * ) - { - local f = [ $(property-set).get <file> ] ; - unix.set-library-order-aux $(f) : $(sources) ; - return $(f) $(sources) ; - } -} - -generators.register - [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB - : <file> <toolset>unix ] ; - -generators.override unix.prebuilt : builtin.lib-generator ; - - -# Declare generators -generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE - : <toolset>unix ] ; - -generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB - : <toolset>unix ] ; - -generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB - : <toolset>unix ] ; - -generators.register [ new unix-searched-lib-generator - unix.searched-lib-generator : : SEARCHED_LIB : <toolset>unix ] ; - - -# The derived toolset must specify their own actions. -actions link { -} - -actions link.dll { -} - -actions archive { -} - -actions searched-lib-generator { -} - -actions prebuilt { -} - - - - - -.order = [ new order ] ; - -rule set-library-order-aux ( from * : to * ) -{ - for local f in $(from) - { - for local t in $(to) - { - if $(f) != $(t) - { - $(.order).add-pair $(f) $(t) ; - } - } - } -} - -rule set-library-order ( sources * : property-set : result * ) -{ - local used-libraries ; - local deps = [ $(property-set).dependency ] ; - for local l in $(sources) $(deps:G=) - { - if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ] - { - used-libraries += $(l) ; - } - } - - local created-libraries ; - for local l in $(result) - { - if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ] - { - created-libraries += $(l) ; - } - } - - created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ; - set-library-order-aux $(created-libraries) : $(used-libraries) ; -} - -rule order-libraries ( libraries * ) -{ - local r = [ $(.order).order $(libraries) ] ; - return $(r) ; -} -
\ No newline at end of file diff --git a/jam-files/boost-build/tools/unix.py b/jam-files/boost-build/tools/unix.py deleted file mode 100644 index d409c2e4..00000000 --- a/jam-files/boost-build/tools/unix.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) 2004 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -""" This file implements linking semantics common to all unixes. On unix, static - libraries must be specified in a fixed order on the linker command line. Generators - declared there store information about the order and use it properly. -""" - -import builtin -from b2.build import generators, type -from b2.util.utility import * -from b2.util import set, sequence - -class UnixLinkingGenerator (builtin.LinkingGenerator): - - def __init__ (self, id, composing, source_types, target_types, requirements): - builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements) - - def run (self, project, name, prop_set, sources): - result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources) - if result: - set_library_order (project.manager (), sources, prop_set, result [1]) - - return result - - def generated_targets (self, sources, prop_set, project, name): - sources2 = [] - libraries = [] - for l in sources: - if type.is_derived (l.type (), 'LIB'): - libraries.append (l) - - else: - sources2.append (l) - - sources = sources2 + order_libraries (libraries) - - return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name) - - -class UnixArchiveGenerator (builtin.ArchiveGenerator): - def __init__ (self, id, composing, source_types, target_types_and_names, requirements): - builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - - def run (self, project, name, prop_set, sources): - result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources) - set_library_order(project.manager(), sources, prop_set, result) - return result - -class UnixSearchedLibGenerator (builtin.SearchedLibGenerator): - - def __init__ (self): - builtin.SearchedLibGenerator.__init__ (self) - - def optional_properties (self): - return self.requirements () - - def run (self, project, name, prop_set, sources, multiple): - result = SearchedLibGenerator.run (project, name, prop_set, sources, multiple) - - set_library_order (sources, prop_set, result) - - return result - -class UnixPrebuiltLibGenerator (generators.Generator): - def __init__ (self, id, composing, source_types, target_types_and_names, requirements): - generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - - def run (self, project, name, prop_set, sources, multiple): - f = prop_set.get ('<file>') - set_library_order_aux (f, sources) - return (f, sources) - -### # The derived toolset must specify their own rules and actions. -# FIXME: restore? -# action.register ('unix.prebuilt', None, None) - - -generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['<file>', '<toolset>unix'])) - - - - - -### # Declare generators -### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE -### : <toolset>unix ] ; -generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['<toolset>unix'])) - -### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB -### : <toolset>unix ] ; -### -### generators.register [ new UnixSearchedLibGenerator -### unix.SearchedLibGenerator : : SEARCHED_LIB : <toolset>unix ] ; -### -### -### # The derived toolset must specify their own actions. -### actions link { -### } -### -### actions link.dll { -### } - -def unix_archive (manager, targets, sources, properties): - pass - -# FIXME: restore? -#action.register ('unix.archive', unix_archive, ['']) - -### actions searched-lib-generator { -### } -### -### actions prebuilt { -### } - - -from b2.util.order import Order -__order = Order () - -def set_library_order_aux (from_libs, to_libs): - for f in from_libs: - for t in to_libs: - if f != t: - __order.add_pair (f, t) - -def set_library_order (manager, sources, prop_set, result): - used_libraries = [] - deps = prop_set.dependency () - - sources.extend(d.value() for d in deps) - sources = sequence.unique(sources) - - for l in sources: - if l.type () and type.is_derived (l.type (), 'LIB'): - used_libraries.append (l) - - created_libraries = [] - for l in result: - if l.type () and type.is_derived (l.type (), 'LIB'): - created_libraries.append (l) - - created_libraries = set.difference (created_libraries, used_libraries) - set_library_order_aux (created_libraries, used_libraries) - -def order_libraries (libraries): - return __order.order (libraries) - diff --git a/jam-files/boost-build/tools/vacpp.jam b/jam-files/boost-build/tools/vacpp.jam deleted file mode 100644 index f4080fc0..00000000 --- a/jam-files/boost-build/tools/vacpp.jam +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Toon Knapen 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# -# Boost.Build V2 toolset for the IBM XL C++ compiler -# - -import toolset : flags ; -import feature ; -import common ; -import generators ; -import os ; - -feature.extend toolset : vacpp ; -toolset.inherit vacpp : unix ; -generators.override vacpp.prebuilt : builtin.prebuilt ; -generators.override vacpp.searched-lib-generator : searched-lib-generator ; - -# Configure the vacpp toolset -rule init ( version ? : command * : options * ) -{ - local condition = [ - common.check-init-parameters vacpp : version $(version) ] ; - - command = [ common.get-invocation-command vacpp : xlC - : $(command) : "/usr/vacpp/bin/xlC" ] ; - - common.handle-options vacpp : $(condition) : $(command) : $(options) ; -} - -# Declare generators -generators.register-c-compiler vacpp.compile.c : C : OBJ : <toolset>vacpp ; -generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : <toolset>vacpp ; - -# Allow C++ style comments in C files -flags vacpp CFLAGS : -qcpluscmt ; - -# Declare flags -flags vacpp CFLAGS <optimization>off : -qNOOPTimize ; -flags vacpp CFLAGS <optimization>speed : -O3 -qstrict ; -flags vacpp CFLAGS <optimization>space : -O2 -qcompact ; - -# Discretionary inlining (not recommended) -flags vacpp CFLAGS <inlining>off : -qnoinline ; -flags vacpp CFLAGS <inlining>on : -qinline ; -#flags vacpp CFLAGS <inlining>full : -qinline ; -flags vacpp CFLAGS <inlining>full : ; - -# Exception handling -flags vacpp C++FLAGS <exception-handling>off : -qnoeh ; -flags vacpp C++FLAGS <exception-handling>on : -qeh ; - -# Run-time Type Identification -flags vacpp C++FLAGS <rtti>off : -qnortti ; -flags vacpp C++FLAGS <rtti>on : -qrtti ; - -# Enable 64-bit memory addressing model -flags vacpp CFLAGS <address-model>64 : -q64 ; -flags vacpp LINKFLAGS <address-model>64 : -q64 ; -flags vacpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ; - -# Use absolute path when generating debug information -flags vacpp CFLAGS <debug-symbols>on : -g -qfullpath ; -flags vacpp LINKFLAGS <debug-symbols>on : -g -qfullpath ; -flags vacpp LINKFLAGS <debug-symbols>off : -s ; - -if [ os.name ] = AIX -{ - flags vacpp.compile C++FLAGS : -qfuncsect ; - - # The -bnoipath strips the prepending (relative) path of libraries from - # the loader section in the target library or executable. Hence, during - # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded - # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without - # this option, the prepending (relative) path + library name is - # hard-coded in the loader section, causing *only* this path to be - # searched during load-time. Note that the AIX linker does not have an - # -soname equivalent, this is as close as it gets. - # - # The above options are definately for AIX 5.x, and most likely also for - # AIX 4.x and AIX 6.x. For details about the AIX linker see: - # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf - # - flags vacpp.link LINKFLAGS <link>shared : -bnoipath ; - - # Run-time linking - flags vacpp.link EXE-LINKFLAGS <link>shared : -brtl ; -} -else -{ - # Linux PPC - flags vacpp.compile CFLAGS <link>shared : -qpic=large ; - flags vacpp FINDLIBS : rt ; -} - -# Profiling -flags vacpp CFLAGS <profiling>on : -pg ; -flags vacpp LINKFLAGS <profiling>on : -pg ; - -flags vacpp.compile OPTIONS <cflags> ; -flags vacpp.compile.c++ OPTIONS <cxxflags> ; -flags vacpp DEFINES <define> ; -flags vacpp UNDEFS <undef> ; -flags vacpp HDRS <include> ; -flags vacpp STDHDRS <sysinclude> ; -flags vacpp.link OPTIONS <linkflags> ; -flags vacpp ARFLAGS <arflags> ; - -flags vacpp LIBPATH <library-path> ; -flags vacpp NEEDLIBS <library-file> ; -flags vacpp FINDLIBS <find-shared-library> ; -flags vacpp FINDLIBS <find-static-library> ; - -# Select the compiler name according to the threading model. -flags vacpp VA_C_COMPILER <threading>single : xlc ; -flags vacpp VA_C_COMPILER <threading>multi : xlc_r ; -flags vacpp VA_CXX_COMPILER <threading>single : xlC ; -flags vacpp VA_CXX_COMPILER <threading>multi : xlC_r ; - -SPACE = " " ; - -flags vacpp.link.dll HAVE_SONAME <target-os>linux : "" ; - -actions vacpp.link bind NEEDLIBS -{ - $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) -} - -actions vacpp.link.dll bind NEEDLIBS -{ - xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) -} - -actions vacpp.compile.c -{ - $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" -} - -actions vacpp.compile.c++ -{ - $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" -} - -actions updated together piecemeal vacpp.archive -{ - ar $(ARFLAGS) ru "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/whale.jam b/jam-files/boost-build/tools/whale.jam deleted file mode 100644 index 9335ff0c..00000000 --- a/jam-files/boost-build/tools/whale.jam +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Vladimir Prus 2002-2005. - -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# This module implements support for Whale/Dolphin/WD parser/lexer tools. -# See http://www.cs.queensu.ca/home/okhotin/whale/ for details. -# -# There are three interesting target types: -# - WHL (the parser sources), that are converted to CPP and H -# - DLP (the lexer sources), that are converted to CPP and H -# - WD (combined parser/lexer sources), that are converted to WHL + DLP - -import type ; -import generators ; -import path ; -import "class" : new ; -import errors ; - -rule init ( path # path the Whale/Dolphin/WD binaries - ) -{ - if $(.configured) && $(.path) != $(path) - { - errors.user-error "Attempt to reconfigure Whale support" : - "Previously configured with path \"$(.path:E=<empty>)\"" : - "Now configuring with path \"$(path:E=<empty>)\"" ; - - } - .configured = true ; - .path = $(path) ; - - .whale = [ path.join $(path) whale ] ; - .dolphin = [ path.join $(path) dolphin ] ; - .wd = [ path.join $(path) wd ] ; -} - - -# Declare the types. -type.register WHL : whl ; -type.register DLP : dlp ; -type.register WHL_LR0 : lr0 ; -type.register WD : wd ; - -# Declare standard generators. -generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ; -generators.register-standard whale.dolphin : DLP : CPP H ; -generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ; - -# The conversions defines above a ambiguious when we generated CPP from WD. -# We can either go via WHL type, or via DLP type. -# The following custom generator handles this by running both conversions. - -class wd-to-cpp : generator -{ - rule __init__ ( * : * : * ) - { - generator.__init__ $(1) : $(2) : $(3) ; - } - - rule run ( project name ? : property-set : source * ) - { - if ! $(source[2]) - { - local new-sources ; - if ! [ $(source).type ] in WHL DLP - { - local r1 = [ generators.construct $(project) $(name) - : WHL : $(property-set) : $(source) ] ; - local r2 = [ generators.construct $(project) $(name) - : DLP : $(property-set) : $(source) ] ; - - new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ; - } - else - { - new-sources = $(source) ; - } - - local result ; - for local i in $(new-sources) - { - local t = [ generators.construct $(project) $(name) : CPP - : $(property-set) : $(i) ] ; - result += $(t[2-]) ; - } - return $(result) ; - } - } - -} - - -generators.override whale.wd-to-cpp : whale.whale ; -generators.override whale.wd-to-cpp : whale.dolphin ; - - -generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ; - - -actions whale -{ - $(.whale) -d $(<[1]:D) $(>) -} - -actions dolphin -{ - $(.dolphin) -d $(<[1]:D) $(>) -} - -actions wd -{ - $(.wd) -d $(<[1]:D) -g $(>) -} - diff --git a/jam-files/boost-build/tools/xlf.jam b/jam-files/boost-build/tools/xlf.jam deleted file mode 100644 index e7fcc608..00000000 --- a/jam-files/boost-build/tools/xlf.jam +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# -# toolset configuration for the IBM Fortran compiler (xlf) -# - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags xlf OPTIONS <optimization>off : -O0 ; -flags xlf OPTIONS <optimization>speed : -O3 ; -flags xlf OPTIONS <optimization>space : -Os ; - -flags xlf OPTIONS <debug-symbols>on : -g ; -flags xlf OPTIONS <profiling>on : -pg ; - -flags xlf DEFINES <define> ; -flags xlf INCLUDES <include> ; - -rule compile-fortran -{ -} - -actions compile-fortran -{ - xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)" -} - -generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ; diff --git a/jam-files/boost-build/tools/xsltproc-config.jam b/jam-files/boost-build/tools/xsltproc-config.jam deleted file mode 100644 index de54a2eb..00000000 --- a/jam-files/boost-build/tools/xsltproc-config.jam +++ /dev/null @@ -1,37 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for Python tools and librries. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ - local xsltproc-path = [ GLOB [ modules.peek : PATH ] "C:\\Boost\\bin" : xsltproc\.exe ] ; - xsltproc-path = $(xsltproc-path[1]) ; - - if $(xsltproc-path) - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using xsltproc ":" $(xsltproc-path) ; - } - using xsltproc : $(xsltproc-path) ; - } -} -else -{ - local xsltproc-path = [ GLOB [ modules.peek : PATH ] : xsltproc ] ; - xsltproc-path = $(xsltproc-path[1]) ; - - if $(xsltproc-path) - { - if --debug-configuration in [ modules.peek : ARGV ] - { - ECHO "notice:" using xsltproc ":" $(xsltproc-path) ; - } - using xsltproc : $(xsltproc-path) ; - } -} diff --git a/jam-files/boost-build/tools/xsltproc.jam b/jam-files/boost-build/tools/xsltproc.jam deleted file mode 100644 index 96f5170b..00000000 --- a/jam-files/boost-build/tools/xsltproc.jam +++ /dev/null @@ -1,194 +0,0 @@ -# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -# This module defines rules to apply an XSLT stylesheet to an XML file using the -# xsltproc driver, part of libxslt. -# -# Note: except for 'init', this modules does not provide any rules for end -# users. - -import feature ; -import regex ; -import sequence ; -import common ; -import os ; -import modules ; -import path ; -import errors ; - -feature.feature xsl:param : : free ; -feature.feature xsl:path : : free ; -feature.feature catalog : : free ; - - -# Initialize xsltproc support. The parameters are: -# xsltproc: The xsltproc executable -# -rule init ( xsltproc ? ) -{ - if $(xsltproc) - { - modify-config ; - .xsltproc = $(xsltproc) ; - check-xsltproc ; - } -} - -rule freeze-config ( ) -{ - if ! $(.config-frozen) - { - .config-frozen = true ; - .xsltproc ?= [ modules.peek : XSLTPROC ] ; - .xsltproc ?= xsltproc ; - check-xsltproc ; - .is-cygwin = [ .is-cygwin $(.xsltproc) ] ; - } -} - -rule modify-config -{ - if $(.config-frozen) - { - errors.user-error "xsltproc: Cannot change xsltproc command after it has been used." ; - } -} - -rule check-xsltproc ( ) -{ - if $(.xsltproc) - { - local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] ; - if $(status[2]) != "0" - { - errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ; - } - } -} - -# Returns a non-empty string if a cygwin xsltproc binary was specified. -rule is-cygwin ( ) -{ - freeze-config ; - return $(.is-cygwin) ; -} - -rule .is-cygwin ( xsltproc ) -{ - if [ os.on-windows ] - { - local file = [ path.make [ modules.binding $(__name__) ] ] ; - local dir = [ path.native - [ path.join [ path.parent $(file) ] xsltproc ] ] ; - if [ os.name ] = CYGWIN - { - dir = $(dir:W) ; - } - local command = - "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ; - local status = [ SHELL $(command) : no-output : exit-status ] ; - if $(status[2]) != "0" - { - return true ; - } - } -} - -rule compute-xslt-flags ( target : properties * ) -{ - local flags ; - - # Raw flags. - flags += [ feature.get-values <flags> : $(properties) ] ; - - # Translate <xsl:param> into command line flags. - for local param in [ feature.get-values <xsl:param> : $(properties) ] - { - local namevalue = [ regex.split $(param) "=" ] ; - flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ; - } - - # Translate <xsl:path>. - for local path in [ feature.get-values <xsl:path> : $(properties) ] - { - flags += --path \"$(path:G=)\" ; - } - - # Take care of implicit dependencies. - local other-deps ; - for local dep in [ feature.get-values <implicit-dependency> : $(properties) ] - { - other-deps += [ $(dep:G=).creating-subvariant ] ; - } - - local implicit-target-directories ; - for local dep in [ sequence.unique $(other-deps) ] - { - implicit-target-directories += [ $(dep).all-target-directories ] ; - } - - for local dir in $(implicit-target-directories) - { - flags += --path \"$(dir:T)\" ; - } - - return $(flags) ; -} - - -local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : action ) -{ - freeze-config ; - STYLESHEET on $(target) = $(stylesheet) ; - FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ; - NAME on $(target) = $(.xsltproc) ; - - for local catalog in [ feature.get-values <catalog> : $(properties) ] - { - CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : $(catalog:T) ] ; - } - - if [ os.on-windows ] && ! [ is-cygwin ] - { - action = $(action).windows ; - } - - $(action) $(target) : $(source) ; -} - - -rule xslt ( target : source stylesheet : properties * ) -{ - return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : xslt-xsltproc ] ; -} - - -rule xslt-dir ( target : source stylesheet : properties * : dirname ) -{ - return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : $(dirname) : xslt-xsltproc-dir ] ; -} - -actions xslt-xsltproc.windows -{ - $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)" -} - - -actions xslt-xsltproc bind STYLESHEET -{ - $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)" -} - - -actions xslt-xsltproc-dir.windows bind STYLESHEET -{ - $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)" -} - - -actions xslt-xsltproc-dir bind STYLESHEET -{ - $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)" -} diff --git a/jam-files/boost-build/tools/xsltproc/included.xsl b/jam-files/boost-build/tools/xsltproc/included.xsl deleted file mode 100644 index ef86394a..00000000 --- a/jam-files/boost-build/tools/xsltproc/included.xsl +++ /dev/null @@ -1,11 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- - Copyright (c) 2010 Steven Watanabe - - Distributed under the Boost Software License, Version 1.0. - (See accompanying file LICENSE_1_0.txt or copy at - http://www.boost.org/LICENSE_1_0.txt) - --> -<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" - version="1.0"> -</xsl:stylesheet> diff --git a/jam-files/boost-build/tools/xsltproc/test.xml b/jam-files/boost-build/tools/xsltproc/test.xml deleted file mode 100644 index 57c8ba18..00000000 --- a/jam-files/boost-build/tools/xsltproc/test.xml +++ /dev/null @@ -1,2 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<root/> diff --git a/jam-files/boost-build/tools/xsltproc/test.xsl b/jam-files/boost-build/tools/xsltproc/test.xsl deleted file mode 100644 index a142c91d..00000000 --- a/jam-files/boost-build/tools/xsltproc/test.xsl +++ /dev/null @@ -1,12 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- - Copyright (c) 2010 Steven Watanabe - - Distributed under the Boost Software License, Version 1.0. - (See accompanying file LICENSE_1_0.txt or copy at - http://www.boost.org/LICENSE_1_0.txt) - --> -<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" - version="1.0"> - <xsl:include href="included.xsl"/> -</xsl:stylesheet> diff --git a/jam-files/boost-build/tools/zlib.jam b/jam-files/boost-build/tools/zlib.jam deleted file mode 100644 index f9138fd5..00000000 --- a/jam-files/boost-build/tools/zlib.jam +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Supports the zlib library -# -# After 'using zlib', the following targets are available: -# -# /zlib//zlib -- The zlib library - - -# In addition to direct purpose of supporting zlib, this module also -# serves as canonical example of how third-party condiguration works -# in Boost.Build. The operation is as follows -# -# - For each 'using zlib : condition ... : ...' we create a target alternative -# for zlib, with the specified condition. -# - There's one target alternative for 'zlib' with no specific condition -# properties. -# -# Two invocations of 'using zlib' with the same condition but different -# properties are not permitted, e.g.: -# -# using zlib : condition <target-os>windows : include foo ; -# using zlib : condition <target-os>windows : include bar ; -# -# is in error. One exception is for empty condition, 'using' without any -# parameters is overridable. That is: -# -# using zlib ; -# using zlib : include foo ; -# -# Is OK then the first 'using' is ignored. Likewise if the order of the statements -# is reversed. -# -# When 'zlib' target is built, a target alternative is selected as usual for -# Boost.Build. The selected alternative is a custom target class, which: -# -# - calls ac.find-include-path to find header path. If explicit path is provided -# in 'using', only that path is checked, and if no header is found there, error -# is emitted. Otherwise, we check a directory specified using ZLIB_INCLUDE -# environment variable, and failing that, in standard directories. -# [TODO: document sysroot handling] -# - calls ac.find-library to find the library, in an identical fashion. -# - -import project ; -import ac ; -import errors ; -import "class" : new ; -import targets ; - -project.initialize $(__name__) ; -project = [ project.current ] ; -project zlib ; - -header = zlib.h ; -names = z zlib zll zdll ; - -.default-alternative = [ new ac-library zlib : $(project) ] ; -$(.default-alternative).set-header $(header) ; -$(.default-alternative).set-default-names $(names) ; -targets.main-target-alternative $(.default-alternative) ; - -rule init ( * : * ) -{ - if ! $(condition) - { - # Special case the no-condition case so that 'using' without parameters - # can mix with more specific 'using'. - $(.default-alternative).reconfigure $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - } - else - { - # FIXME: consider if we should allow overriding definitions for a given - # condition -- e.g. project-config.jam might want to override whatever is - # in user-config.jam. - local mt = [ new ac-library zlib : $(project) - : $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; - $(mt).set-header $(header) ; - $(mt).set-default-names $(names) ; - targets.main-target-alternative $(mt) ; - } -} - - - - - - diff --git a/jam-files/boost-build/user-config.jam b/jam-files/boost-build/user-config.jam deleted file mode 100644 index fbbf13fd..00000000 --- a/jam-files/boost-build/user-config.jam +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2003, 2005 Douglas Gregor -# Copyright 2004 John Maddock -# Copyright 2002, 2003, 2004, 2007 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This file is used to configure your Boost.Build installation. You can modify -# this file in place, or you can place it in a permanent location so that it -# does not get overwritten should you get a new version of Boost.Build. See: -# -# http://www.boost.org/boost-build2/doc/html/bbv2/overview/configuration.html -# -# for documentation about possible permanent locations. - -# This file specifies which toolsets (C++ compilers), libraries, and other -# tools are available. Often, you should be able to just uncomment existing -# example lines and adjust them to taste. The complete list of supported tools, -# and configuration instructions can be found at: -# -# http://boost.org/boost-build2/doc/html/bbv2/reference/tools.html -# - -# This file uses Jam language syntax to describe available tools. Mostly, -# there are 'using' lines, that contain the name of the used tools, and -# parameters to pass to those tools -- where paremeters are separated by -# semicolons. Important syntax notes: -# -# - Both ':' and ';' must be separated from other tokens by whitespace -# - The '\' symbol is a quote character, so when specifying Windows paths you -# should use '/' or '\\' instead. -# -# More details about the syntax can be found at: -# -# http://boost.org/boost-build2/doc/html/bbv2/advanced.html#bbv2.advanced.jam_language -# - -# ------------------ -# GCC configuration. -# ------------------ - -# Configure gcc (default version). -# using gcc ; - -# Configure specific gcc version, giving alternative name to use. -# using gcc : 3.2 : g++-3.2 ; - - -# ------------------- -# MSVC configuration. -# ------------------- - -# Configure msvc (default version, searched for in standard locations and PATH). -# using msvc ; - -# Configure specific msvc version (searched for in standard locations and PATH). -# using msvc : 8.0 ; - - -# ---------------------- -# Borland configuration. -# ---------------------- -# using borland ; - - -# ---------------------- -# STLPort configuration. -# ---------------------- - -# Configure specifying location of STLPort headers. Libraries must be either -# not needed or available to the compiler by default. -# using stlport : : /usr/include/stlport ; - -# Configure specifying location of both headers and libraries explicitly. -# using stlport : : /usr/include/stlport /usr/lib ; - - -# ----------------- -# QT configuration. -# ----------------- - -# Configure assuming QTDIR gives the installation prefix. -# using qt ; - -# Configure with an explicit installation prefix. -# using qt : /usr/opt/qt ; - -# --------------------- -# Python configuration. -# --------------------- - -# Configure specific Python version. -# using python : 3.1 : /usr/bin/python3 : /usr/include/python3.1 : /usr/lib ; diff --git a/jam-files/boost-build/util/__init__.py b/jam-files/boost-build/util/__init__.py deleted file mode 100644 index f80fe70e..00000000 --- a/jam-files/boost-build/util/__init__.py +++ /dev/null @@ -1,136 +0,0 @@ - -import bjam -import re -import types - -# Decorator the specifies bjam-side prototype for a Python function -def bjam_signature(s): - - def wrap(f): - f.bjam_signature = s - return f - - return wrap - -def metatarget(f): - - f.bjam_signature = (["name"], ["sources", "*"], ["requirements", "*"], - ["default_build", "*"], ["usage_requirements", "*"]) - return f - -class cached(object): - - def __init__(self, function): - self.function = function - self.cache = {} - - def __call__(self, *args): - try: - return self.cache[args] - except KeyError: - v = self.function(*args) - self.cache[args] = v - return v - - def __get__(self, instance, type): - return types.MethodType(self, instance, type) - -def unquote(s): - if s and s[0] == '"' and s[-1] == '"': - return s[1:-1] - else: - return s - -_extract_jamfile_and_rule = re.compile("(Jamfile<.*>)%(.*)") - -def qualify_jam_action(action_name, context_module): - - if action_name.startswith("###"): - # Callable exported from Python. Don't touch - return action_name - elif _extract_jamfile_and_rule.match(action_name): - # Rule is already in indirect format - return action_name - else: - ix = action_name.find('.') - if ix != -1 and action_name[:ix] == context_module: - return context_module + '%' + action_name[ix+1:] - - return context_module + '%' + action_name - - -def set_jam_action(name, *args): - - m = _extract_jamfile_and_rule.match(name) - if m: - args = ("set-update-action-in-module", m.group(1), m.group(2)) + args - else: - args = ("set-update-action", name) + args - - return bjam.call(*args) - - -def call_jam_function(name, *args): - - m = _extract_jamfile_and_rule.match(name) - if m: - args = ("call-in-module", m.group(1), m.group(2)) + args - return bjam.call(*args) - else: - return bjam.call(*((name,) + args)) - -__value_id = 0 -__python_to_jam = {} -__jam_to_python = {} - -def value_to_jam(value, methods=False): - """Makes a token to refer to a Python value inside Jam language code. - - The token is merely a string that can be passed around in Jam code and - eventually passed back. For example, we might want to pass PropertySet - instance to a tag function and it might eventually call back - to virtual_target.add_suffix_and_prefix, passing the same instance. - - For values that are classes, we'll also make class methods callable - from Jam. - - Note that this is necessary to make a bit more of existing Jamfiles work. - This trick should not be used to much, or else the performance benefits of - Python port will be eaten. - """ - - global __value_id - - r = __python_to_jam.get(value, None) - if r: - return r - - exported_name = '###_' + str(__value_id) - __value_id = __value_id + 1 - __python_to_jam[value] = exported_name - __jam_to_python[exported_name] = value - - if methods and type(value) == types.InstanceType: - for field_name in dir(value): - field = getattr(value, field_name) - if callable(field) and not field_name.startswith("__"): - bjam.import_rule("", exported_name + "." + field_name, field) - - return exported_name - -def record_jam_to_value_mapping(jam_value, python_value): - __jam_to_python[jam_value] = python_value - -def jam_to_value_maybe(jam_value): - - if type(jam_value) == type(""): - return __jam_to_python.get(jam_value, jam_value) - else: - return jam_value - -def stem(filename): - i = filename.find('.') - if i != -1: - return filename[0:i] - else: - return filename diff --git a/jam-files/boost-build/util/assert.jam b/jam-files/boost-build/util/assert.jam deleted file mode 100644 index abedad52..00000000 --- a/jam-files/boost-build/util/assert.jam +++ /dev/null @@ -1,336 +0,0 @@ -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import errors ; -import modules ; - - -################################################################################ -# -# Private implementation details. -# -################################################################################ - -# Rule added as a replacement for the regular Jam = operator but which does not -# ignore trailing empty string elements. -# -local rule exact-equal-test ( lhs * : rhs * ) -{ - local lhs_extended = $(lhs) xxx ; - local rhs_extended = $(rhs) xxx ; - if $(lhs_extended) = $(rhs_extended) - { - return true ; - } -} - - -# Two lists are considered set-equal if they contain the same elements, ignoring -# duplicates and ordering. -# -local rule set-equal-test ( set1 * : set2 * ) -{ - if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) ) - { - return true ; - } -} - - -################################################################################ -# -# Public interface. -# -################################################################################ - -# Assert the equality of A and B, ignoring trailing empty string elements. -# -rule equal ( a * : b * ) -{ - if $(a) != $(b) - { - errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" - (ignoring trailing empty strings) ; - } -} - - -# Assert that the result of calling RULE-NAME on the given arguments has a false -# logical value (is either an empty list or all empty strings). -# -rule false ( rule-name args * : * ) -{ - local result ; - module [ CALLER_MODULE ] - { - modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) - : $(7) : $(8) : $(9) ] ; - } - - if $(result) - { - errors.error-skip-frames 3 assertion failure: Expected false result from - "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) : - $(5) : $(6) : $(7) : $(8) : $(9) ] "]" : Got: "[" \"$(result)\" "]" ; - } -} - - -# Assert that ELEMENT is present in LIST. -# -rule "in" ( element : list * ) -{ - if ! $(element) in $(list) - { - errors.error-skip-frames 3 assertion failure: Expected \"$(element)\" in - "[" \"$(list)\" "]" ; - } -} - - -# Assert the inequality of A and B, ignoring trailing empty string elements. -# -rule not-equal ( a * : b * ) -{ - if $(a) = $(b) - { - errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" - (ignoring trailing empty strings) ; - } -} - - -# Assert that ELEMENT is not present in LIST. -# -rule not-in ( element : list * ) -{ - if $(element) in $(list) - { - errors.error-skip-frames 3 assertion failure: Did not expect - \"$(element)\" in "[" \"$(list)\" "]" ; - } -} - - -# Assert the inequality of A and B as sets. -# -rule not-set-equal ( a * : b * ) -{ - if [ set-equal-test $(a) : $(b) ] - { - errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]" - and "[" \"$(b)\" "]" to not be equal as sets ; - } -} - - -# Assert that A and B are not exactly equal, not ignoring trailing empty string -# elements. -# -rule not-exact-equal ( a * : b * ) -{ - if [ exact-equal-test $(a) : $(b) ] - { - errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" ; - } -} - - -# Assert that EXPECTED is the result of calling RULE-NAME with the given -# arguments. -# -rule result ( expected * : rule-name args * : * ) -{ - local result ; - module [ CALLER_MODULE ] - { - modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) - : $(8) : $(9) ] ; - } - - if ! [ exact-equal-test $(result) : $(expected) ] - { - errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [ - errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : - $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "[" - \"$(result)\" "]" ; - } -} - - -# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored) -# to the result of calling RULE-NAME with the given arguments. Note that rules -# called this way may accept at most 8 parameters. -# -rule result-set-equal ( expected * : rule-name args * : * ) -{ - local result ; - module [ CALLER_MODULE ] - { - modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) - : $(8) : $(9) ] ; - } - - if ! [ set-equal-test $(result) : $(expected) ] - { - errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [ - errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : - $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "[" - \"$(result)\" "]" ; - } -} - - -# Assert the equality of A and B as sets. -# -rule set-equal ( a * : b * ) -{ - if ! [ set-equal-test $(a) : $(b) ] - { - errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]" - and "[" \"$(b)\" "]" to be equal as sets ; - } -} - - -# Assert that the result of calling RULE-NAME on the given arguments has a true -# logical value (is neither an empty list nor all empty strings). -# -rule true ( rule-name args * : * ) -{ - local result ; - module [ CALLER_MODULE ] - { - modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) - : $(7) : $(8) : $(9) ] ; - } - - if ! $(result) - { - errors.error-skip-frames 3 assertion failure: Expected true result from - "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) : - $(5) : $(6) : $(7) : $(8) : $(9) ] "]" ; - } -} - - -# Assert the exact equality of A and B, not ignoring trailing empty string -# elements. -# -rule exact-equal ( a * : b * ) -{ - if ! [ exact-equal-test $(a) : $(b) ] - { - errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" ; - } -} - - -# Assert that the given variable is not an empty list. -# -rule variable-not-empty ( name ) -{ - local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ; - if ! $(value)-is-not-empty - { - errors.error-skip-frames 3 assertion failure: Expected variable - \"$(name)\" not to be an empty list ; - } -} - - -rule __test__ ( ) -{ - # Helper rule used to avoid test duplication related to different list - # equality test rules. - # - local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? ) - { - local not-equality-assert = not-$(equality-assert) ; - - # When the given equality test is expected to ignore trailing empty - # strings some of the test results should be inverted. - local not-equality-assert-i = not-$(equality-assert) ; - if $(ignore-trailing-empty-strings) - { - not-equality-assert-i = $(equality-assert) ; - } - - $(equality-assert) : ; - $(equality-assert) "" "" : "" "" ; - $(not-equality-assert-i) : "" "" ; - $(equality-assert) x : x ; - $(not-equality-assert) : x ; - $(not-equality-assert) "" : x ; - $(not-equality-assert) "" "" : x ; - $(not-equality-assert-i) x : x "" ; - $(equality-assert) x "" : x "" ; - $(not-equality-assert) x : "" x ; - $(equality-assert) "" x : "" x ; - - $(equality-assert) 1 2 3 : 1 2 3 ; - $(not-equality-assert) 1 2 3 : 3 2 1 ; - $(not-equality-assert) 1 2 3 : 1 5 3 ; - $(not-equality-assert) 1 2 3 : 1 "" 3 ; - $(not-equality-assert) 1 2 3 : 1 1 2 3 ; - $(not-equality-assert) 1 2 3 : 1 2 2 3 ; - $(not-equality-assert) 1 2 3 : 5 6 7 ; - - # Extra variables used here just to make sure Boost Jam or Boost Build - # do not handle lists with empty strings differently depending on - # whether they are literals or stored in variables. - - local empty = ; - local empty-strings = "" "" ; - local x-empty-strings = x "" "" ; - local empty-strings-x = "" "" x ; - - $(equality-assert) : $(empty) ; - $(not-equality-assert-i) "" : $(empty) ; - $(not-equality-assert-i) "" "" : $(empty) ; - $(not-equality-assert-i) : $(empty-strings) ; - $(not-equality-assert-i) "" : $(empty-strings) ; - $(equality-assert) "" "" : $(empty-strings) ; - $(equality-assert) $(empty) : $(empty) ; - $(equality-assert) $(empty-strings) : $(empty-strings) ; - $(not-equality-assert-i) $(empty) : $(empty-strings) ; - $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ; - $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ; - $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ; - $(not-equality-assert-i) x : $(x-empty-strings) ; - $(not-equality-assert) x : $(empty-strings-x) ; - $(not-equality-assert-i) x : $(x-empty-strings) ; - $(not-equality-assert-i) x "" : $(x-empty-strings) ; - $(equality-assert) x "" "" : $(x-empty-strings) ; - $(not-equality-assert) x : $(empty-strings-x) ; - $(not-equality-assert) "" x : $(empty-strings-x) ; - $(equality-assert) "" "" x : $(empty-strings-x) ; - } - - - # --------------- - # Equality tests. - # --------------- - - run-equality-test equal : ignore-trailing-empty-strings ; - run-equality-test exact-equal ; - - - # ------------------------- - # assert.set-equal() tests. - # ------------------------- - - set-equal : ; - not-set-equal "" "" : ; - set-equal "" "" : "" ; - set-equal "" "" : "" "" ; - set-equal a b c : a b c ; - set-equal a b c : b c a ; - set-equal a b c a : a b c ; - set-equal a b c : a b c a ; - not-set-equal a b c : a b c d ; - not-set-equal a b c d : a b c ; -} diff --git a/jam-files/boost-build/util/container.jam b/jam-files/boost-build/util/container.jam deleted file mode 100644 index dd496393..00000000 --- a/jam-files/boost-build/util/container.jam +++ /dev/null @@ -1,339 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003 Rene Rivera -# Copyright 2002, 2003, 2004 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Various container classes. - -# Base for container objects. This lets us construct recursive structures. That -# is containers with containers in them, specifically so we can tell literal -# values from node values. -# -class node -{ - rule __init__ ( - value ? # Optional value to set node to initially. - ) - { - self.value = $(value) ; - } - - # Set the value of this node, passing nothing will clear it. - # - rule set ( value * ) - { - self.value = $(value) ; - } - - # Get the value of this node. - # - rule get ( ) - { - return $(self.value) ; - } -} - - -# A simple vector. Interface mimics the C++ std::vector and std::list, with the -# exception that indices are one (1) based to follow Jam standard. -# -# TODO: Possibly add assertion checks. -# -class vector : node -{ - import numbers ; - import utility ; - import sequence ; - - rule __init__ ( - values * # Initial contents of vector. - ) - { - node.__init__ ; - self.value = $(values) ; - } - - # Get the value of the first element. - # - rule front ( ) - { - return $(self.value[1]) ; - } - - # Get the value of the last element. - # - rule back ( ) - { - return $(self.value[-1]) ; - } - - # Get the value of the element at the given index, one based. Access to - # elements of recursive structures is supported directly. Specifying - # additional index values recursively accesses the elements as containers. - # For example: [ $(v).at 1 : 2 ] would retrieve the second element of our - # first element, assuming the first element is a container. - # - rule at ( - index # The element index, one based. - : * # Additional indices to access recursively. - ) - { - local r = $(self.value[$(index)]) ; - if $(2) - { - r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; - } - return $(r) ; - } - - # Get the value contained in the given element. This has the same - # functionality and interface as "at" but in addition gets the value of the - # referenced element, assuming it is a "node". - # - rule get-at ( - index # The element index, one based. - : * # Additional indices to access recursively. - ) - { - local r = $(self.value[$(index)]) ; - if $(2) - { - r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; - } - return [ $(r).get ] ; - } - - # Insert the given value into the front of the vector pushing the rest of - # the elements back. - # - rule push-front ( - value # Value to become first element. - ) - { - self.value = $(value) $(self.value) ; - } - - # Remove the front element from the vector. Does not return the value. No - # effect if vector is empty. - # - rule pop-front ( ) - { - self.value = $(self.value[2-]) ; - } - - # Add the given value at the end of the vector. - # - rule push-back ( - value # Value to become back element. - ) - { - self.value += $(value) ; - } - - # Remove the back element from the vector. Does not return the value. No - # effect if vector is empty. - # - rule pop-back ( ) - { - self.value = $(self.value[1--2]) ; - } - - # Insert the given value at the given index, one based. The values at and to - # the right of the index are pushed back to make room for the new value. - # If the index is passed the end of the vector the element is added to the - # end. - # - rule insert ( - index # The index to insert at, one based. - : value # The value to insert. - ) - { - local left = $(self.value[1-$(index)]) ; - local right = $(self.value[$(index)-]) ; - if $(right)-is-not-empty - { - left = $(left[1--2]) ; - } - self.value = $(left) $(value) $(right) ; - } - - # Remove one or more elements from the vector. The range is inclusive, and - # not specifying an end is equivalent to the [start, start] range. - # - rule erase ( - start # Index of first element to remove. - end ? # Optional, index of last element to remove. - ) - { - end ?= $(start) ; - local left = $(self.value[1-$(start)]) ; - left = $(left[1--2]) ; - local right = $(self.value[$(end)-]) ; - right = $(right[2-]) ; - self.value = $(left) $(right) ; - } - - # Remove all elements from the vector. - # - rule clear ( ) - { - self.value = ; - } - - # The number of elements in the vector. - # - rule size ( ) - { - return [ sequence.length $(self.value) ] ; - } - - # Returns "true" if there are NO elements in the vector, empty otherwise. - # - rule empty ( ) - { - if ! $(self.value)-is-not-empty - { - return true ; - } - } - - # Returns the textual representation of content. - # - rule str ( ) - { - return "[" [ sequence.transform utility.str : $(self.value) ] "]" ; - } - - # Sorts the vector inplace, calling 'utility.less' for comparisons. - # - rule sort ( ) - { - self.value = [ sequence.insertion-sort $(self.value) : utility.less ] ; - } - - # Returns true if content is equal to the content of other vector. Uses - # 'utility.equal' for comparison. - # - rule equal ( another ) - { - local mismatch ; - local size = [ size ] ; - if $(size) = [ $(another).size ] - { - for local i in [ numbers.range 1 $(size) ] - { - if ! [ utility.equal [ at $(i) ] [ $(another).at $(i) ] ] - { - mismatch = true ; - } - } - } - else - { - mismatch = true ; - } - - if ! $(mismatch) - { - return true ; - } - } -} - - -rule __test__ ( ) -{ - import assert ; - import "class" : new ; - - local v1 = [ new vector ] ; - assert.true $(v1).equal $(v1) ; - assert.true $(v1).empty ; - assert.result 0 : $(v1).size ; - assert.result "[" "]" : $(v1).str ; - $(v1).push-back b ; - $(v1).push-front a ; - assert.result "[" a b "]" : $(v1).str ; - assert.result a : $(v1).front ; - assert.result b : $(v1).back ; - $(v1).insert 2 : d ; - $(v1).insert 2 : c ; - $(v1).insert 4 : f ; - $(v1).insert 4 : e ; - $(v1).pop-back ; - assert.result 5 : $(v1).size ; - assert.result d : $(v1).at 3 ; - $(v1).pop-front ; - assert.result c : $(v1).front ; - assert.false $(v1).empty ; - $(v1).erase 3 4 ; - assert.result 2 : $(v1).size ; - - local v2 = [ new vector q w e r t y ] ; - assert.result 6 : $(v2).size ; - $(v1).push-back $(v2) ; - assert.result 3 : $(v1).size ; - local v2-alias = [ $(v1).back ] ; - assert.result e : $(v2-alias).at 3 ; - $(v1).clear ; - assert.true $(v1).empty ; - assert.false $(v2-alias).empty ; - $(v2).pop-back ; - assert.result t : $(v2-alias).back ; - - local v3 = [ new vector ] ; - $(v3).push-back [ new vector 1 2 3 4 5 ] ; - $(v3).push-back [ new vector a b c ] ; - assert.result "[" "[" 1 2 3 4 5 "]" "[" a b c "]" "]" : $(v3).str ; - $(v3).push-back [ new vector [ new vector x y z ] [ new vector 7 8 9 ] ] ; - assert.result 1 : $(v3).at 1 : 1 ; - assert.result b : $(v3).at 2 : 2 ; - assert.result a b c : $(v3).get-at 2 ; - assert.result 7 8 9 : $(v3).get-at 3 : 2 ; - - local v4 = [ new vector 4 3 6 ] ; - $(v4).sort ; - assert.result 3 4 6 : $(v4).get ; - assert.false $(v4).equal $(v3) ; - - local v5 = [ new vector 3 4 6 ] ; - assert.true $(v4).equal $(v5) ; - # Check that vectors of different sizes are considered non-equal. - $(v5).pop-back ; - assert.false $(v4).equal $(v5) ; - - local v6 = [ new vector [ new vector 1 2 3 ] ] ; - assert.true $(v6).equal [ new vector [ new vector 1 2 3 ] ] ; - - local v7 = [ new vector 111 222 333 ] ; - assert.true $(v7).equal $(v7) ; - $(v7).insert 4 : 444 ; - assert.result 111 222 333 444 : $(v7).get ; - $(v7).insert 999 : xxx ; - assert.result 111 222 333 444 xxx : $(v7).get ; - - local v8 = [ new vector "" "" "" ] ; - assert.true $(v8).equal $(v8) ; - assert.false $(v8).empty ; - assert.result 3 : $(v8).size ; - assert.result "" : $(v8).at 1 ; - assert.result "" : $(v8).at 2 ; - assert.result "" : $(v8).at 3 ; - assert.result : $(v8).at 4 ; - $(v8).insert 2 : 222 ; - assert.result 4 : $(v8).size ; - assert.result "" 222 "" "" : $(v8).get ; - $(v8).insert 999 : "" ; - assert.result 5 : $(v8).size ; - assert.result "" 222 "" "" "" : $(v8).get ; - $(v8).insert 999 : xxx ; - assert.result 6 : $(v8).size ; - assert.result "" 222 "" "" "" xxx : $(v8).get ; - - # Regression test for a bug causing vector.equal to compare only the first - # and the last element in the given vectors. - local v9 = [ new vector 111 xxx 222 ] ; - local v10 = [ new vector 111 yyy 222 ] ; - assert.false $(v9).equal $(v10) ; -} diff --git a/jam-files/boost-build/util/doc.jam b/jam-files/boost-build/util/doc.jam deleted file mode 100644 index a7515588..00000000 --- a/jam-files/boost-build/util/doc.jam +++ /dev/null @@ -1,997 +0,0 @@ -# Copyright 2002, 2005 Dave Abrahams -# Copyright 2002, 2003, 2006 Rene Rivera -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Documentation system, handles --help requests. -# It defines rules that attach documentation to modules, rules, and variables. -# Collects and generates documentation for the various parts of the build -# system. The documentation is collected from comments integrated into the code. - -import modules ; -import print ; -import set ; -import container ; -import "class" ; -import sequence ; -import path ; - - -# The type of output to generate. -# "console" is formated text echoed to the console (the default); -# "text" is formated text appended to the output file; -# "html" is HTML output to the file. -# -help-output = console ; - - -# The file to output documentation to when generating "text" or "html" help. -# This is without extension as the extension is determined by the type of -# output. -# -help-output-file = help ; - -# Whether to include local rules in help output. -# -.option.show-locals ?= ; - -# When showing documentation for a module, whether to also generate -# automatically the detailed docs for each item in the module. -# -.option.detailed ?= ; - -# Generate debug output as the help is generated and modules are parsed. -# -.option.debug ?= ; - -# Enable or disable a documentation option. -# -local rule set-option ( - option # The option name. - : value ? # Enabled (non-empty), or disabled (empty) -) -{ - .option.$(option) = $(value) ; -} - - -# Set the type of output. -# -local rule set-output ( type ) -{ - help-output = $(type) ; -} - - -# Set the output to a file. -# -local rule set-output-file ( file ) -{ - help-output-file = $(file) ; -} - - -# Extracts the brief comment from a complete comment. The brief comment is the -# first sentence. -# -local rule brief-comment ( - docs * # The comment documentation. -) -{ - local d = $(docs:J=" ") ; - local p = [ MATCH ".*([.])$" : $(d) ] ; - if ! $(p) { d = $(d)"." ; } - d = $(d)" " ; - local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ; - local brief = $(m[1]) ; - while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ] - { - m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ; - brief += $(m[1]) ; - } - return $(brief:J="") ; -} - - -# Specifies the documentation for the current module. -# -local rule set-module-doc ( - module-name ? # The name of the module to document. - : docs * # The documentation for the module. -) -{ - module-name ?= * ; - - $(module-name).brief = [ brief-comment $(docs) ] ; - $(module-name).docs = $(docs) ; - - if ! $(module-name) in $(documented-modules) - { - documented-modules += $(module-name) ; - } -} - - -# Specifies the documentation for the current module. -# -local rule set-module-copyright ( - module-name ? # The name of the module to document. - : copyright * # The copyright for the module. -) -{ - module-name ?= * ; - - $(module-name).copy-brief = [ brief-comment $(copyright) ] ; - $(module-name).copy-docs = $(docs) ; - - if ! $(module-name) in $(documented-modules) - { - documented-modules += $(module-name) ; - } -} - - -# Specifies the documentation for a rule in the current module. If called in the -# global module, this documents a global rule. -# -local rule set-rule-doc ( - name # The name of the rule. - module-name ? # The name of the module to document. - is-local ? # Whether the rule is local to the module. - : docs * # The documentation for the rule. -) -{ - module-name ?= * ; - - $(module-name).$(name).brief = [ brief-comment $(docs) ] ; - $(module-name).$(name).docs = $(docs) ; - $(module-name).$(name).is-local = $(is-local) ; - - if ! $(name) in $($(module-name).rules) - { - $(module-name).rules += $(name) ; - } -} - - -# Specify a class, will turn a rule into a class. -# -local rule set-class-doc ( - name # The name of the class. - module-name ? # The name of the module to document. - : super-name ? # The super class name. -) -{ - module-name ?= * ; - - $(module-name).$(name).is-class = true ; - $(module-name).$(name).super-name = $(super-name) ; - $(module-name).$(name).class-rules = - [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ; - $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ; - - $(module-name).classes += $(name) ; - $(module-name).class-rules += $($(module-name).$(name).class-rules) ; - $(module-name).rules = - [ set.difference $($(module-name).rules) : - $(name) $($(module-name).$(name).class-rules) ] ; -} - - -# Set the argument call signature of a rule. -# -local rule set-rule-arguments-signature ( - name # The name of the rule. - module-name ? # The name of the module to document. - : signature * # The arguments signature. -) -{ - module-name ?= * ; - - $(module-name).$(name).signature = $(signature) ; -} - - -# Specifies the documentation for an argument of a rule. -# -local rule set-argument-doc ( - name # The name of the argument. - qualifier # Argument syntax qualifier, "*", "+", etc. - rule-name # The name of the rule. - module-name ? # THe optional name of the module. - : docs * # The documentation. -) -{ - module-name ?= * ; - - $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ; - $(module-name).$(rule-name).args.$(name).docs = $(docs) ; - - if ! $(name) in $($(module-name).$(rule-name).args) - { - $(module-name).$(rule-name).args += $(name) ; - } -} - - -# Specifies the documentation for a variable in the current module. If called in -# the global module, the global variable is documented. -# -local rule set-variable-doc ( - name # The name of the variable. - default # The default value. - initial # The initial value. - module-name ? # The name of the module to document. - : docs * # The documentation for the variable. -) -{ - module-name ?= * ; - - $(module-name).$(name).brief = [ brief-comment $(docs) ] ; - $(module-name).$(name).default = $(default) ; - $(module-name).$(name).initial = $(initial) ; - $(module-name).$(name).docs = $(docs) ; - - if ! $(name) in $($(module-name).variables) - { - $(module-name).variables += $(name) ; - } -} - - -# Generates a general description of the documentation and help system. -# -local rule print-help-top ( ) -{ - print.section "General command line usage" ; - - print.text " bjam [options] [properties] [targets] - - Options, properties and targets can be specified in any order. - " ; - - print.section "Important Options" ; - - print.list-start ; - print.list-item "--clean Remove targets instead of building" ; - print.list-item "-a Rebuild everything" ; - print.list-item "-n Don't execute the commands, only print them" ; - print.list-item "-d+2 Show commands as they are executed" ; - print.list-item "-d0 Supress all informational messages" ; - print.list-item "-q Stop at first error" ; - print.list-item "--debug-configuration Diagnose configuration" ; - print.list-item "--debug-building Report which targets are built with what properties" ; - print.list-item "--debug-generator Diagnose generator search/execution" ; - print.list-end ; - - print.section "Further Help" - The following options can be used to obtain additional documentation. - ; - - print.list-start ; - print.list-item "--help-options Print more obscure command line options." ; - print.list-item "--help-internal Boost.Build implementation details." ; - print.list-item "--help-doc-options Implementation details doc formatting." ; - print.list-end ; -} - - -# Generate Jam/Boost.Jam command usage information. -# -local rule print-help-usage ( ) -{ - print.section "Boost.Jam Usage" - "bjam [ options... ] targets..." - ; - print.list-start ; - print.list-item -a; - Build all targets, even if they are current. ; - print.list-item -fx; - Read '"x"' as the Jamfile for building instead of searching for the - Boost.Build system. ; - print.list-item -jx; - Run up to '"x"' commands concurrently. ; - print.list-item -n; - Do not execute build commands. Instead print out the commands as they - would be executed if building. ; - print.list-item -ox; - Output the used build commands to file '"x"'. ; - print.list-item -q; - Quit as soon as a build failure is encountered. Without this option - Boost.Jam will continue building as many targets as it can. - print.list-item -sx=y; - Sets a Jam variable '"x"' to the value '"y"', overriding any value that - variable would have from the environment. ; - print.list-item -tx; - Rebuild the target '"x"', even if it is up-to-date. ; - print.list-item -v; - Display the version of bjam. ; - print.list-item --x; - Any option not explicitly handled by Boost.Jam remains available to - build scripts using the '"ARGV"' variable. ; - print.list-item -dn; - Enables output of diagnostic messages. The debug level '"n"' and all - below it are enabled by this option. ; - print.list-item -d+n; - Enables output of diagnostic messages. Only the output for debug level - '"n"' is enabled. ; - print.list-end ; - print.section "Debug Levels" - Each debug level shows a different set of information. Usually with - higher levels producing more verbose information. The following levels - are supported: ; - print.list-start ; - print.list-item 0; - Turn off all diagnostic output. Only errors are reported. ; - print.list-item 1; - Show the actions taken for building targets, as they are executed. ; - print.list-item 2; - Show "quiet" actions and display all action text, as they are executed. ; - print.list-item 3; - Show dependency analysis, and target/source timestamps/paths. ; - print.list-item 4; - Show arguments of shell invocations. ; - print.list-item 5; - Show rule invocations and variable expansions. ; - print.list-item 6; - Show directory/header file/archive scans, and attempts at binding to targets. ; - print.list-item 7; - Show variable settings. ; - print.list-item 8; - Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ; - print.list-item 9; - Show variable manipulation, scanner tokens, and memory usage. ; - print.list-item 10; - Show execution times for rules. ; - print.list-item 11; - Show parsing progress of Jamfiles. ; - print.list-item 12; - Show graph for target dependencies. ; - print.list-item 13; - Show changes in target status (fate). ; - print.list-end ; -} - - -# Generates description of options controlling the help system. This -# automatically reads the options as all variables in the doc module of the form -# ".option.*". -# -local rule print-help-options ( - module-name # The doc module. -) -{ - print.section "Help Options" - These are all the options available for enabling or disabling to control - the help system in various ways. Options can be enabled or disabled with - '"--help-enable-<option>"', and "'--help-disable-<option>'" - respectively. - ; - local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ; - if $(options-to-list) - { - print.list-start ; - for local option in [ sequence.insertion-sort $(options-to-list) ] - { - local def = disabled ; - if $($(module-name)..option.$(option).default) != "(empty)" - { - def = enabled ; - } - print.list-item $(option): $($(module-name)..option.$(option).docs) - Default is $(def). ; - } - print.list-end ; - } -} - - -# Generate brief documentation for all the known items in the section for a -# module. Possible sections are: "rules", and "variables". -# -local rule print-help-module-section ( - module # The module name. - section # rules or variables. - : section-head # The title of the section. - section-description * # The detailed description of the section. -) -{ - if $($(module).$(section)) - { - print.section $(section-head) $(section-description) ; - print.list-start ; - for local item in [ sequence.insertion-sort $($(module).$(section)) ] - { - local show = ; - if ! $($(module).$(item).is-local) - { - show = yes ; - } - if $(.option.show-locals) - { - show = yes ; - } - if $(show) - { - print.list-item $(item): $($(module).$(item).brief) ; - } - } - print.list-end ; - } -} - - -# Generate documentation for all possible modules. We attempt to list all known -# modules together with a brief description of each. -# -local rule print-help-all ( - ignored # Usually the module name, but is ignored here. -) -{ - print.section "Modules" - "These are all the known modules. Use --help <module> to get more" - "detailed information." - ; - if $(documented-modules) - { - print.list-start ; - for local module-name in [ sequence.insertion-sort $(documented-modules) ] - { - # The brief docs for each module. - print.list-item $(module-name): $($(module-name).brief) ; - } - print.list-end ; - } - # The documentation for each module when details are requested. - if $(documented-modules) && $(.option.detailed) - { - for local module-name in [ sequence.insertion-sort $(documented-modules) ] - { - # The brief docs for each module. - print-help-module $(module-name) ; - } - } -} - - -# Generate documentation for a module. Basic information about the module is -# generated. -# -local rule print-help-module ( - module-name # The module to generate docs for. -) -{ - # Print the docs. - print.section "Module '$(module-name)'" $($(module-name).docs) ; - - # Print out the documented classes. - print-help-module-section $(module-name) classes : "Module '$(module-name)' classes" - Use --help $(module-name).<class-name> to get more information. ; - - # Print out the documented rules. - print-help-module-section $(module-name) rules : "Module '$(module-name)' rules" - Use --help $(module-name).<rule-name> to get more information. ; - - # Print out the documented variables. - print-help-module-section $(module-name) variables : "Module '$(module-name)' variables" - Use --help $(module-name).<variable-name> to get more information. ; - - # Print out all the same information but indetailed form. - if $(.option.detailed) - { - print-help-classes $(module-name) ; - print-help-rules $(module-name) ; - print-help-variables $(module-name) ; - } -} - - -# Generate documentation for a set of rules in a module. -# -local rule print-help-rules ( - module-name # Module of the rules. - : name * # Optional list of rules to describe. -) -{ - name ?= $($(module-name).rules) ; - if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ] - { - # Print out the given rules. - for local rule-name in [ sequence.insertion-sort $(name) ] - { - if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local) - { - local signature = $($(module-name).$(rule-name).signature:J=" ") ; - signature ?= "" ; - print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'" - $($(module-name).$(rule-name).docs) ; - if $($(module-name).$(rule-name).args) - { - print.list-start ; - for local arg-name in $($(module-name).$(rule-name).args) - { - print.list-item $(arg-name): $($(module-name).$(rule-name).args.$(arg-name).docs) ; - } - print.list-end ; - } - } - } - } -} - - -# Generate documentation for a set of classes in a module. -# -local rule print-help-classes ( - module-name # Module of the classes. - : name * # Optional list of classes to describe. -) -{ - name ?= $($(module-name).classes) ; - if [ set.intersection $(name) : $($(module-name).classes) ] - { - # Print out the given classes. - for local class-name in [ sequence.insertion-sort $(name) ] - { - if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local) - { - local signature = $($(module-name).$(class-name).signature:J=" ") ; - signature ?= "" ; - print.section "Class '$(module-name).$(class-name) ( $(signature) )'" - $($(module-name).$(class-name).docs) - "Inherits from '"$($(module-name).$(class-name).super-name)"'." ; - if $($(module-name).$(class-name).args) - { - print.list-start ; - for local arg-name in $($(module-name).$(class-name).args) - { - print.list-item $(arg-name): $($(module-name).$(class-name).args.$(arg-name).docs) ; - } - print.list-end ; - } - } - - # Print out the documented rules of the class. - print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules" - Use --help $(module-name).<rule-name> to get more information. ; - - # Print out all the rules if details are requested. - if $(.option.detailed) - { - print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ; - } - } - } -} - - -# Generate documentation for a set of variables in a module. -# -local rule print-help-variables ( - module-name ? # Module of the variables. - : name * # Optional list of variables to describe. -) -{ - name ?= $($(module-name).variables) ; - if [ set.intersection $(name) : $($(module-name).variables) ] - { - # Print out the given variables. - for local variable-name in [ sequence.insertion-sort $(name) ] - { - print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ; - if $($(module-name).$(variable-name).default) || - $($(module-name).$(variable-name).initial) - { - print.list-start ; - if $($(module-name).$(variable-name).default) - { - print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ; - } - if $($(module-name).$(variable-name).initial) - { - print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ; - } - print.list-end ; - } - } - } -} - - -# Generate documentation for a project. -# -local rule print-help-project ( - unused ? - : jamfile * # The project Jamfile. -) -{ - if $(jamfile<$(jamfile)>.docs) - { - # Print the docs. - print.section "Project-specific help" - Project has jamfile at $(jamfile) ; - - print.lines $(jamfile<$(jamfile)>.docs) "" ; - } -} - - -# Generate documentation for a config file. -# -local rule print-help-config ( - unused ? - : type # The type of configuration file user or site. - config-file # The configuration Jamfile. -) -{ - if $(jamfile<$(config-file)>.docs) - { - # Print the docs. - print.section "Configuration help" - Configuration file at $(config-file) ; - - print.lines $(jamfile<$(config-file)>.docs) "" ; - } -} - - -ws = " " ; - -# Extract the text from a block of comments. -# -local rule extract-comment ( - var # The name of the variable to extract from. -) -{ - local comment = ; - local line = $($(var)[1]) ; - local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ; - while $(l[1]) && $($(var)) - { - if $(l[2]) { comment += [ MATCH "^[$(ws)]?(.*)$" : $(l[2]) ] ; } - else { comment += "" ; } - $(var) = $($(var)[2-]) ; - line = $($(var)[1]) ; - l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ; - } - return $(comment) ; -} - - -# Extract s single line of Jam syntax, ignoring any comments. -# -local rule extract-syntax ( - var # The name of the variable to extract from. -) -{ - local syntax = ; - local line = $($(var)[1]) ; - while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var)) - { - local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ; - if $(m) && ! $(m) = "" - { - syntax = $(m) ; - } - $(var) = $($(var)[2-]) ; - line = $($(var)[1]) ; - } - return $(syntax) ; -} - - -# Extract the next token, this is either a single Jam construct or a comment as -# a single token. -# -local rule extract-token ( - var # The name of the variable to extract from. -) -{ - local parts = ; - while ! $(parts) - { - parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ; - if ! $(parts) - { - $(var) = $($(var)[2-]) ; - } - } - local token = ; - if [ MATCH "^(#)" : $(parts[1]) ] - { - token = $(parts:J=" ") ; - $(var) = $($(var)[2-]) ; - } - else - { - token = $(parts[1]) ; - $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ; - } - return $(token) ; -} - - -# Scan for a rule declaration as the next item in the variable. -# -local rule scan-rule ( - syntax ? # The first part of the text which contains the rule declaration. - : var # The name of the variable to extract from. -) -{ - local rule-parts = - [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ; - if $(rule-parts[1]) - { - # Mark as doc for rule. - local rule-name = $(rule-parts[2]) ; - if $(scope-name) - { - rule-name = $(scope-name).$(rule-name) ; - } - local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ; - if $(comment-block) - { - set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ; - } - # Parse args of rule. - $(var) = $(rule-parts[3-]) $($(var)) ; - set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ; - # Scan within this rules scope. - local scope-level = [ extract-token $(var) ] ; - local scope-name = $(rule-name) ; - while $(scope-level) - { - local comment-block = [ extract-comment $(var) ] ; - local syntax-block = [ extract-syntax $(var) ] ; - if [ scan-rule $(syntax-block) : $(var) ] - { - } - else if [ MATCH "^(\\{)" : $(syntax-block) ] - { - scope-level += "{" ; - } - else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ] - { - scope-level = $(scope-level[2-]) ; - } - } - - return true ; - } -} - - -# Scan the arguments of a rule. -# -local rule scan-rule-arguments ( - var # The name of the variable to extract from. -) -{ - local arg-syntax = ; - local token = [ extract-token $(var) ] ; - while $(token) != "(" && $(token) != "{" - { - token = [ extract-token $(var) ] ; - } - if $(token) != "{" - { - token = [ extract-token $(var) ] ; - } - local arg-signature = ; - while $(token) != ")" && $(token) != "{" - { - local arg-name = ; - local arg-qualifier = " " ; - local arg-doc = ; - if $(token) = ":" - { - arg-signature += $(token) ; - token = [ extract-token $(var) ] ; - } - arg-name = $(token) ; - arg-signature += $(token) ; - token = [ extract-token $(var) ] ; - if [ MATCH "^([\\*\\+\\?])" : $(token) ] - { - arg-qualifier = $(token) ; - arg-signature += $(token) ; - token = [ extract-token $(var) ] ; - } - if $(token) = ":" - { - arg-signature += $(token) ; - token = [ extract-token $(var) ] ; - } - if [ MATCH "^(#)" : $(token) ] - { - $(var) = $(token) $($(var)) ; - arg-doc = [ extract-comment $(var) ] ; - token = [ extract-token $(var) ] ; - } - set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ; - } - while $(token) != "{" - { - token = [ extract-token $(var) ] ; - } - $(var) = "{" $($(var)) ; - arg-signature ?= "" ; - return $(arg-signature) ; -} - - -# Scan for a variable declaration. -# -local rule scan-variable ( - syntax ? # The first part of the text which contains the variable declaration. - : var # The name of the variable to extract from. -) -{ - # [1] = name, [2] = value(s) - local var-parts = - [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ; - if $(var-parts) - { - local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ; - local default-value = "" ; - local initial-valie = "" ; - if $(var-parts[2]) = "?=" - { - default-value = $(value) ; - default-value ?= "(empty)" ; - } - else - { - initial-value = $(value) ; - initial-value ?= "(empty)" ; - } - if $(comment-block) - { - set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ; - } - return true ; - } -} - - -# Scan a class declaration. -# -local rule scan-class ( - syntax ? # The syntax text for the class declaration. -) -{ - # [1] = class?, [2] = name, [3] = superclass - local class-parts = - [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]+:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ; - if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class" - { - set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ; - } -} - - -# Scan a module file for documentation comments. This also invokes any actions -# assigned to the module. The actions are the rules that do the actual output of -# the documentation. This rule is invoked as the header scan rule for the module -# file. -# -rule scan-module ( - target # The module file. - : text * # The text in the file, one item per line. - : action * # Rule to call to output docs for the module. -) -{ - if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; } - local module-name = $(target:B) ; - local module-documented = ; - local comment-block = ; - local syntax-block = ; - # This is a hack because we can not get the line of a file if it happens to - # not have a new-line termination. - text += "}" ; - while $(text) - { - comment-block = [ extract-comment text ] ; - syntax-block = [ extract-syntax text ] ; - if $(.option.debug) - { - ECHO "HELP:" comment block; '$(comment-block)' ; - ECHO "HELP:" syntax block; '$(syntax-block)' ; - } - if [ scan-rule $(syntax-block) : text ] { } - else if [ scan-variable $(syntax-block) : text ] { } - else if [ scan-class $(syntax-block) ] { } - else if [ MATCH .*([cC]opyright).* : $(comment-block:J=" ") ] - { - # mark as the copy for the module. - set-module-copyright $(module-name) : $(comment-block) ; - } - else if $(action[1]) in "print-help-project" "print-help-config" - && ! $(jamfile<$(target)>.docs) - { - # special module docs for the project jamfile. - jamfile<$(target)>.docs = $(comment-block) ; - } - else if ! $(module-documented) - { - # document the module. - set-module-doc $(module-name) : $(comment-block) ; - module-documented = true ; - } - } - if $(action) - { - $(action[1]) $(module-name) : $(action[2-]) ; - } -} - - -# Import scan-module to global scope, so that it is available during header -# scanning phase. -# -IMPORT $(__name__) : scan-module : : doc.scan-module ; - - -# Read in a file using the SHELL builtin and return the individual lines as -# would be done for header scanning. -# -local rule read-file ( - file # The file to read in. -) -{ - file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ; - if ! $(.file<$(file)>.lines) - { - local content ; - switch [ modules.peek : OS ] - { - case NT : - content = [ SHELL "TYPE \"$(file)\"" ] ; - - case * : - content = [ SHELL "cat \"$(file)\"" ] ; - } - local lines ; - local nl = " -" ; - local << = "([^$(nl)]*)[$(nl)](.*)" ; - local line+ = [ MATCH "$(<<)" : "$(content)" ] ; - while $(line+) - { - lines += $(line+[1]) ; - line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ; - } - .file<$(file)>.lines = $(lines) ; - } - return $(.file<$(file)>.lines) ; -} - - -# Add a scan action to perform to generate the help documentation. The action -# rule is passed the name of the module as the first argument. The second -# argument(s) are optional and passed directly as specified here. -# -local rule do-scan ( - modules + # The modules to scan and perform the action on. - : action * # The action rule, plus the secondary arguments to pass to the action rule. -) -{ - if $(help-output) = text - { - print.output $(help-output-file).txt plain ; - ALWAYS $(help-output-file).txt ; - DEPENDS all : $(help-output-file).txt ; - } - if $(help-output) = html - { - print.output $(help-output-file).html html ; - ALWAYS $(help-output-file).html ; - DEPENDS all : $(help-output-file).html ; - } - for local module-file in $(modules[1--2]) - { - scan-module $(module-file) : [ read-file $(module-file) ] ; - } - scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ; -} diff --git a/jam-files/boost-build/util/indirect.jam b/jam-files/boost-build/util/indirect.jam deleted file mode 100644 index ec63f192..00000000 --- a/jam-files/boost-build/util/indirect.jam +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import modules ; -import numbers ; - - -# The pattern that indirect rules must match: module%rule -.pattern = ^([^%]*)%([^%]+)$ ; - - -# -# Type checking rules. -# -local rule indirect-rule ( x ) -{ - if ! [ MATCH $(.pattern) : $(x) ] - { - return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ; - } -} - - -# Make an indirect rule which calls the given rule. If context is supplied it is -# expected to be the module in which to invoke the rule by the 'call' rule -# below. Otherwise, the rule will be invoked in the module of this rule's -# caller. -# -rule make ( rulename bound-args * : context ? ) -{ - context ?= [ CALLER_MODULE ] ; - context ?= "" ; - return $(context)%$(rulename) $(bound-args) ; -} - - -# Make an indirect rule which calls the given rule. 'rulename' may be a -# qualified rule; if so it is returned unchanged. Otherwise, if frames is not -# supplied, the result will be invoked (by 'call', below) in the module of the -# caller. Otherwise, frames > 1 specifies additional call frames to back up in -# order to find the module context. -# -rule make-qualified ( rulename bound-args * : frames ? ) -{ - if [ MATCH $(.pattern) : $(rulename) ] - { - return $(rulename) $(bound-args) ; - } - else - { - frames ?= 1 ; - # If the rule name includes a Jamfile module, grab it. - local module-context = [ MATCH ^(Jamfile<[^>]*>)\\..* : $(rulename) ] ; - - if ! $(module-context) - { - # Take the first dot-separated element as module name. This disallows - # module names with dots, but allows rule names with dots. - module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ; - } - module-context ?= [ CALLER_MODULE $(frames) ] ; - return [ make $(rulename) $(bound-args) : $(module-context) ] ; - } -} - - -# Returns the module name in which the given indirect rule will be invoked. -# -rule get-module ( [indirect-rule] x ) -{ - local m = [ MATCH $(.pattern) : $(x) ] ; - if ! $(m[1]) - { - m = ; - } - return $(m[1]) ; -} - - -# Returns the rulename that will be called when x is invoked. -# -rule get-rule ( [indirect-rule] x ) -{ - local m = [ MATCH $(.pattern) : $(x) ] ; - return $(m[2]) ; -} - - -# Invoke the given indirect-rule. -# -rule call ( [indirect-rule] r args * : * ) -{ - return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args) - : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; -} - - -rule __test__ -{ - import assert ; - - rule foo-barr! ( x ) - { - assert.equal $(x) : x ; - } - - assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ; - assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ; - - call [ make foo-barr! ] x ; - call [ make foo-barr! x ] ; - call [ make foo-barr! : [ CALLER_MODULE ] ] x ; -} diff --git a/jam-files/boost-build/util/indirect.py b/jam-files/boost-build/util/indirect.py deleted file mode 100644 index 78fa8994..00000000 --- a/jam-files/boost-build/util/indirect.py +++ /dev/null @@ -1,15 +0,0 @@ -# Status: minimally ported. This module is not supposed to be used much -# with Boost.Build/Python. -# -# Copyright 2003 Dave Abrahams -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -from b2.util import call_jam_function, bjam_signature - -def call(*args): - a1 = args[0] - name = a1[0] - a1tail = a1[1:] - call_jam_function(name, *((a1tail,) + args[1:])) diff --git a/jam-files/boost-build/util/logger.py b/jam-files/boost-build/util/logger.py deleted file mode 100644 index de652129..00000000 --- a/jam-files/boost-build/util/logger.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright Pedro Ferreira 2005. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import sys - -class NullLogger: - def __init__ (self): - self.indent_ = '' - - def log (self, source_name, *args): - if self.on () and self.interesting (source_name): - self.do_log (self.indent_) - for i in args: - self.do_log (i) - self.do_log ('\n') - - def increase_indent (self): - if self.on (): - self.indent_ += ' ' - - def decrease_indent (self): - if self.on () and len (self.indent_) > 4: - self.indent_ = self.indent_ [-4:] - - def do_log (self, *args): - pass - - def interesting (self, source_name): - return False - - def on (self): - return True - -class TextLogger (NullLogger): - def __init__ (self): - NullLogger.__init__ (self) - - def do_log (self, arg): - sys.stdout.write (str (arg)) - - def interesting (self, source_name): - return True - - def on (self): - return True diff --git a/jam-files/boost-build/util/numbers.jam b/jam-files/boost-build/util/numbers.jam deleted file mode 100644 index 665347d3..00000000 --- a/jam-files/boost-build/util/numbers.jam +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright 2001, 2002 Dave Abrahams -# Copyright 2002, 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import errors ; - - -rule trim-leading-zeroes ( value ) -{ - return [ CALC $(value) + 0 ] ; -} - - -rule check ( numbers * ) -{ - for local n in $(numbers) - { - switch $(n) - { - case *[^0-9]* : - errors.error $(n) "in" $(numbers) : is not a number ; - } - } -} - - -rule increment ( number ) -{ - return [ CALC $(number) + 1 ] ; -} - - -rule decrement ( number ) -{ - return [ CALC $(number) - 1 ] ; -} - - -rule range ( start finish ? : step ? ) -{ - if ! $(finish) - { - finish = $(start) ; - start = 1 ; - } - step ?= 1 ; - - check $(start) $(finish) $(step) ; - - if $(finish) != 0 - { - local result ; - while [ less $(start) $(finish) ] || $(start) = $(finish) - { - result += $(start) ; - start = [ CALC $(start) + $(step) ] ; - } - return $(result) ; - } -} - - -rule less ( n1 n2 ) -{ - switch [ CALC $(n2) - $(n1) ] - { - case [1-9]* : return true ; - } -} - - -rule log10 ( number ) -{ - switch $(number) - { - case *[^0-9]* : errors.error $(number) is not a number ; - case 0 : errors.error can't take log of zero ; - case [1-9] : return 0 ; - case [1-9]? : return 1 ; - case [1-9]?? : return 2 ; - case [1-9]??? : return 3 ; - case [1-9]???? : return 4 ; - case [1-9]????? : return 5 ; - case [1-9]?????? : return 6 ; - case [1-9]??????? : return 7 ; - case [1-9]???????? : return 8 ; - case [1-9]????????? : return 9 ; - case * : - { - import sequence ; - import string ; - local chars = [ string.chars $(number) ] ; - while $(chars[1]) = 0 - { - chars = $(chars[2-]) ; - } - if ! $(chars) - { - errors.error can't take log of zero ; - } - else - { - return [ decrement [ sequence.length $(chars) ] ] ; - } - } - } -} - - -rule __test__ ( ) -{ - import assert ; - - assert.result 1 : increment 0 ; - assert.result 2 : increment 1 ; - assert.result 1 : decrement 2 ; - assert.result 0 : decrement 1 ; - assert.result 50 : increment 49 ; - assert.result 49 : decrement 50 ; - assert.result 99 : increment 98 ; - assert.result 99 : decrement 100 ; - assert.result 100 : increment 99 ; - assert.result 999 : decrement 1000 ; - assert.result 1000 : increment 999 ; - - assert.result 1 2 3 : range 3 ; - assert.result 1 2 3 4 5 6 7 8 9 10 11 12 : range 12 ; - assert.result 3 4 5 6 7 8 9 10 11 : range 3 11 ; - assert.result : range 0 ; - assert.result 1 4 7 10 : range 10 : 3 ; - assert.result 2 4 6 8 10 : range 2 10 : 2 ; - assert.result 25 50 75 100 : range 25 100 : 25 ; - - assert.result 0 : trim-leading-zeroes 0 ; - assert.result 1234 : trim-leading-zeroes 1234 ; - assert.result 123456 : trim-leading-zeroes 0000123456 ; - assert.result 1000123456 : trim-leading-zeroes 1000123456 ; - assert.result 10000 : trim-leading-zeroes 10000 ; - assert.result 10000 : trim-leading-zeroes 00010000 ; - - assert.true less 1 2 ; - assert.true less 1 12 ; - assert.true less 1 21 ; - assert.true less 005 217 ; - assert.false less 0 0 ; - assert.false less 03 3 ; - assert.false less 3 03 ; - assert.true less 005 217 ; - assert.true less 0005 217 ; - assert.true less 5 00217 ; - - # TEMPORARY disabled, because nested "try"/"catch" do not work and I do no - # have the time to fix that right now. - if $(0) - { - try ; - { - decrement 0 ; - } - catch can't decrement zero! ; - - try ; - { - check foo ; - } - catch : not a number ; - - try ; - { - increment foo ; - } - catch : not a number ; - - try ; - { - log10 0 ; - } - catch can't take log of zero ; - - try ; - { - log10 000 ; - } - catch can't take log of zero ; - - } - - assert.result 0 : log10 1 ; - assert.result 0 : log10 9 ; - assert.result 1 : log10 10 ; - assert.result 1 : log10 99 ; - assert.result 2 : log10 100 ; - assert.result 2 : log10 101 ; - assert.result 2 : log10 125 ; - assert.result 2 : log10 999 ; - assert.result 3 : log10 1000 ; - assert.result 10 : log10 12345678901 ; - - for local x in [ range 75 110 : 5 ] - { - for local y in [ range $(x) 111 : 3 ] - { - if $(x) != $(y) - { - assert.true less $(x) $(y) ; - } - } - } - - for local x in [ range 90 110 : 2 ] - { - for local y in [ range 80 $(x) : 4 ] - { - assert.false less $(x) $(y) ; - } - } -} diff --git a/jam-files/boost-build/util/option.jam b/jam-files/boost-build/util/option.jam deleted file mode 100644 index f6dc3752..00000000 --- a/jam-files/boost-build/util/option.jam +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (c) 2005 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import modules ; - -# Set a value for a named option, to be used when not overridden on the command -# line. -rule set ( name : value ? ) -{ - .option.$(name) = $(value) ; -} - -rule get ( name : default-value ? : implied-value ? ) -{ - local m = [ MATCH --$(name)=(.*) : [ modules.peek : ARGV ] ] ; - if $(m) - { - return $(m[1]) ; - } - else - { - m = [ MATCH (--$(name)) : [ modules.peek : ARGV ] ] ; - if $(m) && $(implied-value) - { - return $(implied-value) ; - } - else if $(.option.$(name)) - { - return $(.option.$(name)) ; - } - else - { - return $(default-value) ; - } - } -} - - -# Check command-line args as soon as possible. For each option try to load -# module named after option. Is that succeeds, invoke 'process' rule in the -# module. The rule may return "true" to indicate that the regular build process -# should not be attempted. -# -# Options take the general form of: --<name>[=<value>] [<value>] -# -rule process ( ) -{ - local ARGV = [ modules.peek : ARGV ] ; - local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ; - - local dont-build ; - local args = $(ARGV) ; - while $(args) - { - local arg = [ MATCH ^--(.*) : $(args[1]) ] ; - while $(args[2-]) && ! $(arg) - { - args = $(args[2-]) ; - arg = [ MATCH ^--(.*) : $(args[1]) ] ; - } - args = $(args[2-]) ; - - if $(arg) - { - local split = [ MATCH ^(([^-=]+)[^=]*)(=?)(.*)$ : $(arg) ] ; - local full-name = $(split[1]) ; - local prefix = $(split[2]) ; - local values ; - - if $(split[3]) - { - values = $(split[4]) ; - } - if $(args) && ! [ MATCH ^(--).* : $(args[1]) ] - { - values += $(args[1]) ; - args = $(args[2-]) ; - } - - # Jook in options subdirectories of BOOST_BUILD_PATH for modules - # matching the full option name and then its prefix. - local plugin-dir = options ; - local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) : - $(full-name).jam $(prefix).jam ] ; - - if $(option-files) - { - # Load the file into a module named for the option. - local f = $(option-files[1]) ; - local module-name = --$(f:D=:S=) ; - modules.load $(module-name) : $(f:D=) : $(f:D) ; - - # If there is a process rule, call it with the full option name - # and its value (if any). If there was no "=" in the option, the - # value will be empty. - if process in [ RULENAMES $(module-name) ] - { - dont-build += [ modules.call-in $(module-name) : process - --$(full-name) : $(values) ] ; - } - } - } - } - - return $(dont-build) ; -} diff --git a/jam-files/boost-build/util/option.py b/jam-files/boost-build/util/option.py deleted file mode 100644 index 47d6abdf..00000000 --- a/jam-files/boost-build/util/option.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2005-2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -import sys -import re -import b2.util.regex - -options = {} - -# Set a value for a named option, to be used when not overridden on the command -# line. -def set(name, value=None): - - global options - - options[name] = value - -def get(name, default_value=None, implied_value=None): - - global options - - matches = b2.util.regex.transform(sys.argv, "--" + re.escape(name) + "=(.*)") - if matches: - return matches[-1] - else: - m = b2.util.regex.transform(sys.argv, "--(" + re.escape(name) + ")") - if m and implied_value: - return implied_value - elif options.has_key(name) and options[name] != None: - return options[name] - else: - return default_value diff --git a/jam-files/boost-build/util/order.jam b/jam-files/boost-build/util/order.jam deleted file mode 100644 index a74fc8c8..00000000 --- a/jam-files/boost-build/util/order.jam +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright (C) 2003 Vladimir Prus -# Use, modification, and distribution is subject to the Boost Software -# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy -# at http://www.boost.org/LICENSE_1_0.txt) - -# This module defines a class which allows to order arbitrary object with -# regard to arbitrary binary relation. -# -# The primary use case is the gcc toolset, which is sensitive to library order: -# if library 'a' uses symbols from library 'b', then 'a' must be present before -# 'b' on the linker's command line. -# -# This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris -# LD (and for Solaris toolset as well), the order always matters. -# -# So, we need to store order requirements and then order libraries according to -# them. It is not possible to use the dependency graph as order requirements. -# What we need is a "use symbols" relationship while dependency graph provides -# the "needs to be updated" relationship. -# -# For example:: -# lib a : a.cpp b; -# lib b ; -# -# For static linking, library 'a' need not depend on 'b'. However, it should -# still come before 'b' on the command line. - -class order -{ - rule __init__ ( ) - { - } - - # Adds the constraint that 'first' should preceede 'second'. - rule add-pair ( first second ) - { - .constraits += $(first)--$(second) ; - } - NATIVE_RULE class@order : add-pair ; - - # Given a list of objects, reorder them so that the constraints specified by - # 'add-pair' are satisfied. - # - # The algorithm was adopted from an awk script by Nikita Youshchenko - # (yoush at cs dot msu dot su) - rule order ( objects * ) - { - # The algorithm used is the same is standard transitive closure, except - # that we're not keeping in-degree for all vertices, but rather removing - # edges. - local result ; - if $(objects) - { - local constraints = [ eliminate-unused-constraits $(objects) ] ; - - # Find some library that nobody depends upon and add it to the - # 'result' array. - local obj ; - while $(objects) - { - local new_objects ; - while $(objects) - { - obj = $(objects[1]) ; - if [ has-no-dependents $(obj) : $(constraints) ] - { - # Emulate break ; - new_objects += $(objects[2-]) ; - objects = ; - } - else - { - new_objects += $(obj) ; - obj = ; - objects = $(objects[2-]) ; - } - } - - if ! $(obj) - { - errors.error "Circular order dependencies" ; - } - # No problem with placing first. - result += $(obj) ; - # Remove all contraints where 'obj' comes first, since they are - # already satisfied. - constraints = [ remove-satisfied $(constraints) : $(obj) ] ; - - # Add the remaining objects for further processing on the next - # iteration - objects = $(new_objects) ; - } - - } - return $(result) ; - } - NATIVE_RULE class@order : order ; - - # Eliminate constraints which mention objects not in 'objects'. In - # graph-theory terms, this is finding a subgraph induced by ordered - # vertices. - rule eliminate-unused-constraits ( objects * ) - { - local result ; - for local c in $(.constraints) - { - local m = [ MATCH (.*)--(.*) : $(c) ] ; - if $(m[1]) in $(objects) && $(m[2]) in $(objects) - { - result += $(c) ; - } - } - return $(result) ; - } - - # Returns true if there's no constraint in 'constaraints' where 'obj' comes - # second. - rule has-no-dependents ( obj : constraints * ) - { - local failed ; - while $(constraints) && ! $(failed) - { - local c = $(constraints[1]) ; - local m = [ MATCH (.*)--(.*) : $(c) ] ; - if $(m[2]) = $(obj) - { - failed = true ; - } - constraints = $(constraints[2-]) ; - } - if ! $(failed) - { - return true ; - } - } - - rule remove-satisfied ( constraints * : obj ) - { - local result ; - for local c in $(constraints) - { - local m = [ MATCH (.*)--(.*) : $(c) ] ; - if $(m[1]) != $(obj) - { - result += $(c) ; - } - } - return $(result) ; - } -} - - -rule __test__ ( ) -{ - import "class" : new ; - import assert ; - - c1 = [ new order ] ; - $(c1).add-pair l1 l2 ; - - assert.result l1 l2 : $(c1).order l1 l2 ; - assert.result l1 l2 : $(c1).order l2 l1 ; - - $(c1).add-pair l2 l3 ; - assert.result l1 l2 : $(c1).order l2 l1 ; - $(c1).add-pair x l2 ; - assert.result l1 l2 : $(c1).order l2 l1 ; - assert.result l1 l2 l3 : $(c1).order l2 l3 l1 ; -} diff --git a/jam-files/boost-build/util/order.py b/jam-files/boost-build/util/order.py deleted file mode 100644 index 4e67b3f1..00000000 --- a/jam-files/boost-build/util/order.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (C) 2003 Vladimir Prus -# Use, modification, and distribution is subject to the Boost Software -# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy -# at http://www.boost.org/LICENSE_1_0.txt) - -class Order: - """Allows ordering arbitrary objects with regard to arbitrary binary relation. - - The primary use case is the gcc toolset, which is sensitive to - library order: if library 'a' uses symbols from library 'b', - then 'a' must be present before 'b' on the linker's command line. - - This requirement can be lifted for gcc with GNU ld, but for gcc with - Solaris LD (and for Solaris toolset as well), the order always matters. - - So, we need to store order requirements and then order libraries - according to them. It it not possible to use dependency graph as - order requirements. What we need is "use symbols" relationship - while dependency graph provides "needs to be updated" relationship. - - For example:: - lib a : a.cpp b; - lib b ; - - For static linking, the 'a' library need not depend on 'b'. However, it - still should come before 'b' on the command line. - """ - - def __init__ (self): - self.constraints_ = [] - - def add_pair (self, first, second): - """ Adds the constraint that 'first' should precede 'second'. - """ - self.constraints_.append ((first, second)) - - def order (self, objects): - """ Given a list of objects, reorder them so that the constains specified - by 'add_pair' are satisfied. - - The algorithm was adopted from an awk script by Nikita Youshchenko - (yoush at cs dot msu dot su) - """ - # The algorithm used is the same is standard transitive closure, - # except that we're not keeping in-degree for all vertices, but - # rather removing edges. - result = [] - - if not objects: - return result - - constraints = self.__eliminate_unused_constraits (objects) - - # Find some library that nobody depends upon and add it to - # the 'result' array. - obj = None - while objects: - new_objects = [] - while objects: - obj = objects [0] - - if self.__has_no_dependents (obj, constraints): - # Emulate break ; - new_objects.extend (objects [1:]) - objects = [] - - else: - new_objects.append (obj) - obj = None - objects = objects [1:] - - if not obj: - raise BaseException ("Circular order dependencies") - - # No problem with placing first. - result.append (obj) - - # Remove all containts where 'obj' comes first, - # since they are already satisfied. - constraints = self.__remove_satisfied (constraints, obj) - - # Add the remaining objects for further processing - # on the next iteration - objects = new_objects - - return result - - def __eliminate_unused_constraits (self, objects): - """ Eliminate constraints which mention objects not in 'objects'. - In graph-theory terms, this is finding subgraph induced by - ordered vertices. - """ - result = [] - for c in self.constraints_: - if c [0] in objects and c [1] in objects: - result.append (c) - - return result - - def __has_no_dependents (self, obj, constraints): - """ Returns true if there's no constraint in 'constraints' where - 'obj' comes second. - """ - failed = False - while constraints and not failed: - c = constraints [0] - - if c [1] == obj: - failed = True - - constraints = constraints [1:] - - return not failed - - def __remove_satisfied (self, constraints, obj): - result = [] - for c in constraints: - if c [0] != obj: - result.append (c) - - return result diff --git a/jam-files/boost-build/util/os.jam b/jam-files/boost-build/util/os.jam deleted file mode 100644 index daef27f7..00000000 --- a/jam-files/boost-build/util/os.jam +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2001, 2002, 2003, 2005 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2003, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import modules ; -import string ; - - -# Return the value(s) of the given environment variable(s) at the time bjam was -# invoked. -rule environ ( variable-names + ) -{ - return [ modules.peek .ENVIRON : $(variable-names) ] ; -} - -.name = [ modules.peek : OS ] ; -.platform = [ modules.peek : OSPLAT ] ; -.version = [ modules.peek : OSVER ] ; - - -local rule constant ( c : os ? ) -{ - os ?= $(.name) ; - # First look for a platform-specific name, then the general value. - local variables = .$(c)-$(os) .$(c) ; - local result = $($(variables)) ; - return $(result[1]) ; -} - -rule get-constant ( os ? ) -{ - # Find the name of the constant being accessed, which is equal to the name - # used to invoke us. - local bt = [ BACKTRACE 1 ] ; - local rulename = [ MATCH ([^.]*)$ : $(bt[4]) ] ; - return [ constant $(rulename) : $(os) ] ; -} - - -# export all the common constants -.constants = name platform version shared-library-path-variable path-separator executable-path-variable executable-suffix ; -for local constant in $(.constants) -{ - IMPORT $(__name__) : get-constant : $(__name__) : $(constant) ; -} -EXPORT $(__name__) : $(.constants) ; - -.executable-path-variable-NT = PATH ; -# On Windows the case and capitalization of PATH is not always predictable, so -# let's find out what variable name was really set. -if $(.name) = NT -{ - for local n in [ VARNAMES .ENVIRON ] - { - if $(n:L) = path - { - .executable-path-variable-NT = $(n) ; - } - } -} - -# Specific constants for various platforms. There's no need to define any -# constant whose value would be the same as the default, below. -.shared-library-path-variable-NT = $(.executable-path-variable-NT) ; -.path-separator-NT = ";" ; -.expand-variable-prefix-NT = % ; -.expand-variable-suffix-NT = % ; -.executable-suffix-NT = .exe ; - -.shared-library-path-variable-CYGWIN = PATH ; - -.shared-library-path-variable-MACOSX = DYLD_LIBRARY_PATH ; - -.shared-library-path-variable-AIX = LIBPATH ; - -# Default constants -.shared-library-path-variable = LD_LIBRARY_PATH ; -.path-separator = ":" ; -.expand-variable-prefix = $ ; -.expand-variable-suffix = "" ; -.executable-path-variable = PATH ; -.executable-suffix = "" ; - - -# Return a list of the directories in the PATH. Yes, that information is (sort -# of) available in the global module, but jam code can change those values, and -# it isn't always clear what case/capitalization to use when looking. This rule -# is a more reliable way to get there. -rule executable-path ( ) -{ - return [ string.words [ environ [ constant executable-path-variable ] ] - : [ constant path-separator ] ] ; -} - - -# Initialize the list of home directories for the current user depending on the -# OS. -if $(.name) = NT -{ - local home = [ environ HOMEDRIVE HOMEPATH ] ; - .home-directories = $(home[1])$(home[2]) [ environ HOME ] [ environ USERPROFILE ] ; -} -else -{ - .home-directories = [ environ HOME ] ; -} - - -# Can't use 'constant' mechanism because it only returns 1-element values. -rule home-directories ( ) -{ - return $(.home-directories) ; -} - - -# Return the string needed to represent the expansion of the named shell -# variable. -rule expand-variable ( variable ) -{ - local prefix = [ constant expand-variable-prefix ] ; - local suffix = [ constant expand-variable-suffix ] ; - return $(prefix)$(variable)$(suffix) ; -} - - -# Returns true if running on windows, whether in cygwin or not. -rule on-windows ( ) -{ - local result ; - if [ modules.peek : NT ] - { - result = true ; - } - else if [ modules.peek : UNIX ] - { - switch [ modules.peek : JAMUNAME ] - { - case CYGWIN* : - { - result = true ; - } - } - } - return $(result) ; -} - - -if ! [ on-windows ] -{ - .on-unix = 1 ; -} - - -rule on-unix -{ - return $(.on-unix) ; -} - - -rule __test__ -{ - import assert ; - if ! ( --quiet in [ modules.peek : ARGV ] ) - { - ECHO os: name= [ name ] ; - ECHO os: version= [ version ] ; - } - assert.true name ; -} diff --git a/jam-files/boost-build/util/os_j.py b/jam-files/boost-build/util/os_j.py deleted file mode 100644 index f44cca62..00000000 --- a/jam-files/boost-build/util/os_j.py +++ /dev/null @@ -1,19 +0,0 @@ -# Status: stub, just enough to make tests work. -# -# Named os_j to avoid conflicts with standard 'os'. See -# project.py:import for special-casing. -# -# Copyright 2001, 2002, 2003, 2005 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2003, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import bjam - -__OS = bjam.call("peek", [], "OS")[0] - -# Return Jam's name of OS to prevent existing code from burning -# when faced with Python naming -def name(): - return __OS diff --git a/jam-files/boost-build/util/path.jam b/jam-files/boost-build/util/path.jam deleted file mode 100644 index ea26b816..00000000 --- a/jam-files/boost-build/util/path.jam +++ /dev/null @@ -1,934 +0,0 @@ -# Copyright Vladimir Prus 2002-2006. -# Copyright Dave Abrahams 2003-2004. -# Copyright Rene Rivera 2003-2006. -# -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Performs various path manipulations. Paths are always in a 'normalized' -# representation. In it, a path may be either: -# -# - '.', or -# -# - ['/'] [ ( '..' '/' )* (token '/')* token ] -# -# In plain english, path can be rooted, '..' elements are allowed only at the -# beginning, and it never ends in slash, except for path consisting of slash -# only. - -import errors ; -import modules ; -import regex ; -import sequence ; -import set ; -import version ; - - -os = [ modules.peek : OS ] ; -if [ modules.peek : UNIX ] -{ - local uname = [ modules.peek : JAMUNAME ] ; - switch $(uname) - { - case CYGWIN* : os = CYGWIN ; - case * : os = UNIX ; - } -} - - -# Converts the native path into normalized form. -# -rule make ( native ) -{ - return [ make-$(os) $(native) ] ; -} - - -# Builds native representation of the path. -# -rule native ( path ) -{ - return [ native-$(os) $(path) ] ; -} - - -# Tests if a path is rooted. -# -rule is-rooted ( path ) -{ - return [ MATCH "^(/)" : $(path) ] ; -} - - -# Tests if a path has a parent. -# -rule has-parent ( path ) -{ - if $(path) != / - { - return 1 ; - } - else - { - return ; - } -} - - -# Returns the path without any directory components. -# -rule basename ( path ) -{ - return [ MATCH "([^/]+)$" : $(path) ] ; -} - - -# Returns parent directory of the path. If no parent exists, error is issued. -# -rule parent ( path ) -{ - if [ has-parent $(path) ] - { - if $(path) = . - { - return .. ; - } - else - { - # Strip everything at the end of path up to and including the last - # slash. - local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ; - - # Did we strip what we shouldn't? - if $(result[2]) = ".." - { - return $(path)/.. ; - } - else - { - if ! $(result[1]) - { - if [ is-rooted $(path) ] - { - result = / ; - } - else - { - result = . ; - } - } - return $(result[1]) ; - } - } - } - else - { - errors.error "Path '$(path)' has no parent" ; - } -} - - -# Returns path2 such that "[ join path path2 ] = .". The path may not contain -# ".." element or be rooted. -# -rule reverse ( path ) -{ - if $(path) = . - { - return $(path) ; - } - else - { - local tokens = [ regex.split $(path) "/" ] ; - local tokens2 ; - for local i in $(tokens) - { - tokens2 += .. ; - } - return [ sequence.join $(tokens2) : "/" ] ; - } -} - - -# Concatenates the passed path elements. Generates an error if any element other -# than the first one is rooted. Skips any empty or undefined path elements. -# -rule join ( elements + ) -{ - if ! $(elements[2-]) - { - return $(elements[1]) ; - } - else - { - for local e in $(elements[2-]) - { - if [ is-rooted $(e) ] - { - errors.error only the first element may be rooted ; - } - } - if [ version.check-jam-version 3 1 17 ] - { - return [ NORMALIZE_PATH "$(elements)" ] ; - } - else - { - # Boost Jam prior to version 3.1.17 had problems with its - # NORMALIZE_PATH rule in case you passed it a leading backslash - # instead of a slash, in some cases when you sent it an empty - # initial path element and possibly some others. At least some of - # those cases were being hit and relied upon when calling this rule - # from the path.make-NT rule. - if ! $(elements[1]) && $(elements[2]) - { - return [ NORMALIZE_PATH "/" "$(elements[2-])" ] ; - } - else - { - return [ NORMALIZE_PATH "$(elements)" ] ; - } - } - } -} - - -# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged. -# -rule root ( path root ) -{ - if [ is-rooted $(path) ] - { - return $(path) ; - } - else - { - return [ join $(root) $(path) ] ; - } -} - - -# Returns the current working directory. -# -rule pwd ( ) -{ - if ! $(.pwd) - { - .pwd = [ make [ PWD ] ] ; - } - return $(.pwd) ; -} - - -# Returns the list of files matching the given pattern in the specified -# directory. Both directories and patterns are supplied as portable paths. Each -# pattern should be non-absolute path, and can't contain "." or ".." elements. -# Each slash separated element of pattern can contain the following special -# characters: -# - '?', which match any character -# - '*', which matches arbitrary number of characters. -# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and -# only if e1 matches p1, e2 matches p2 and so on. -# -# For example: -# [ glob . : *.cpp ] -# [ glob . : */build/Jamfile ] -# -rule glob ( dirs * : patterns + : exclude-patterns * ) -{ - local result ; - local real-patterns ; - local real-exclude-patterns ; - for local d in $(dirs) - { - for local p in $(patterns) - { - local pattern = [ path.root $(p) $(d) ] ; - real-patterns += [ path.native $(pattern) ] ; - } - - for local p in $(exclude-patterns) - { - local pattern = [ path.root $(p) $(d) ] ; - real-exclude-patterns += [ path.native $(pattern) ] ; - } - } - - local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ; - inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ; - local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ; - exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ; - - return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ] - ; -} - - -# Recursive version of GLOB. Builds the glob of files while also searching in -# the subdirectories of the given roots. An optional set of exclusion patterns -# will filter out the matching entries from the result. The exclusions also -# apply to the subdirectory scanning, such that directories that match the -# exclusion patterns will not be searched. -# -rule glob-tree ( roots * : patterns + : exclude-patterns * ) -{ - return [ sequence.transform path.make : [ .glob-tree [ sequence.transform - path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ; -} - - -local rule .glob-tree ( roots * : patterns * : exclude-patterns * ) -{ - local excluded ; - if $(exclude-patterns) - { - excluded = [ GLOB $(roots) : $(exclude-patterns) ] ; - } - local result = [ set.difference [ GLOB $(roots) : $(patterns) ] : - $(excluded) ] ; - local subdirs ; - for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ] - { - if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ] - { - subdirs += $(d) ; - } - } - if $(subdirs) - { - result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ] - ; - } - return $(result) ; -} - - -# Returns true is the specified file exists. -# -rule exists ( file ) -{ - return [ path.glob $(file:D) : $(file:D=) ] ; -} -NATIVE_RULE path : exists ; - - -# Find out the absolute name of path and returns the list of all the parents, -# starting with the immediate one. Parents are returned as relative names. If -# 'upper_limit' is specified, directories above it will be pruned. -# -rule all-parents ( path : upper_limit ? : cwd ? ) -{ - cwd ?= [ pwd ] ; - local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ; - - if ! $(upper_limit) - { - upper_limit = / ; - } - local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ; - - # Leave only elements in 'path_ele' below 'upper_ele'. - while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) ) - { - upper_ele = $(upper_ele[2-]) ; - path_ele = $(path_ele[2-]) ; - } - - # Have all upper elements been removed ? - if $(upper_ele) - { - errors.error "$(upper_limit) is not prefix of $(path)" ; - } - - # Create the relative paths to parents, number of elements in 'path_ele'. - local result ; - for local i in $(path_ele) - { - path = [ parent $(path) ] ; - result += $(path) ; - } - return $(result) ; -} - - -# Search for 'pattern' in parent directories of 'dir', up till and including -# 'upper_limit', if it is specified, or till the filesystem root otherwise. -# -rule glob-in-parents ( dir : patterns + : upper-limit ? ) -{ - local result ; - local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ; - - while $(parent-dirs) && ! $(result) - { - result = [ glob $(parent-dirs[1]) : $(patterns) ] ; - parent-dirs = $(parent-dirs[2-]) ; - } - return $(result) ; -} - - -# Assuming 'child' is a subdirectory of 'parent', return the relative path from -# 'parent' to 'child'. -# -rule relative ( child parent : no-error ? ) -{ - local not-a-child ; - if $(parent) = "." - { - return $(child) ; - } - else - { - local split1 = [ regex.split $(parent) / ] ; - local split2 = [ regex.split $(child) / ] ; - - while $(split1) - { - if $(split1[1]) = $(split2[1]) - { - split1 = $(split1[2-]) ; - split2 = $(split2[2-]) ; - } - else - { - not-a-child = true ; - split1 = ; - } - } - if $(split2) - { - if $(not-a-child) - { - if $(no-error) - { - return not-a-child ; - } - else - { - errors.error $(child) is not a subdir of $(parent) ; - } - } - else - { - return [ join $(split2) ] ; - } - } - else - { - return "." ; - } - } -} - - -# Returns the minimal path to path2 that is relative path1. -# -rule relative-to ( path1 path2 ) -{ - local root_1 = [ regex.split [ reverse $(path1) ] / ] ; - local split1 = [ regex.split $(path1) / ] ; - local split2 = [ regex.split $(path2) / ] ; - - while $(split1) && $(root_1) - { - if $(split1[1]) = $(split2[1]) - { - root_1 = $(root_1[2-]) ; - split1 = $(split1[2-]) ; - split2 = $(split2[2-]) ; - } - else - { - split1 = ; - } - } - return [ join . $(root_1) $(split2) ] ; -} - - -# Returns the list of paths which are used by the operating system for looking -# up programs. -# -rule programs-path ( ) -{ - local result ; - local raw = [ modules.peek : PATH Path path ] ; - for local p in $(raw) - { - if $(p) - { - result += [ path.make $(p) ] ; - } - } - return $(result) ; -} - -rule makedirs ( path ) -{ - local result = true ; - local native = [ native $(path) ] ; - if ! [ exists $(native) ] - { - if [ makedirs [ parent $(path) ] ] - { - if ! [ MAKEDIR $(native) ] - { - errors.error "Could not create directory '$(path)'" ; - result = ; - } - } - } - return $(result) ; -} - -# Converts native Windows paths into our internal canonic path representation. -# Supports 'invalid' paths containing multiple successive path separator -# characters. -# -# TODO: Check and if needed add support for Windows 'X:file' path format where -# the file is located in the current folder on drive X. -# -rule make-NT ( native ) -{ - local result ; - - if [ version.check-jam-version 3 1 17 ] - { - result = [ NORMALIZE_PATH $(native) ] ; - } - else - { - # This old implementation is really fragile due to a not so clear way - # NORMALIZE_PATH rule worked in Boost.Jam versions prior to 3.1.17. E.g. - # path.join would mostly ignore empty path elements but would root the - # joined path in case the initial two path elements were empty or some - # similar accidental wierdness. - result = [ path.join [ regex.split $(native) "[/\\]" ] ] ; - } - - # We need to add an extra '/' in front in case this is a rooted Windows path - # starting with a drive letter and not a path separator character since the - # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter - # and treats it as a regular folder name. - if [ regex.match "(^.:)" : $(native) ] - { - result = /$(result) ; - } - - return $(result) ; -} - - -rule native-NT ( path ) -{ - local result ; - if [ is-rooted $(path) ] && ! [ regex.match "^/(.:)" : $(path) ] - { - result = $(path) ; - } - else - { - result = [ MATCH "^/?(.*)" : $(path) ] ; - } - result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ; - return $(result) ; -} - - -rule make-UNIX ( native ) -{ - # VP: I have no idea now 'native' can be empty here! But it can! - if ! $(native) - { - errors.error "Empty path passed to 'make-UNIX'" ; - } - else - { - return [ NORMALIZE_PATH $(native:T) ] ; - } -} - - -rule native-UNIX ( path ) -{ - return $(path) ; -} - - -rule make-CYGWIN ( path ) -{ - return [ make-NT $(path) ] ; -} - - -rule native-CYGWIN ( path ) -{ - local result = $(path) ; - if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path. - { - result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'. - } - return [ native-UNIX $(result) ] ; -} - - -# split-path-VMS: splits input native path into device dir file (each part is -# optional). -# -# example: -# -# dev:[dir]file.c => dev: [dir] file.c -# -rule split-path-VMS ( native ) -{ - local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ; - local device = $(matches[1]) ; - local dir = $(matches[2]) ; - local file = $(matches[3]) ; - - return $(device) $(dir) $(file) ; -} - - -# Converts a native VMS path into a portable path spec. -# -# Does not handle current-device absolute paths such as "[dir]File.c" as it is -# not clear how to represent them in the portable path notation. -# -# Adds a trailing dot (".") to the file part if no extension is present (helps -# when converting it back into native path). -# -rule make-VMS ( native ) -{ - if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ] - { - errors.error "Can't handle default-device absolute paths: " $(native) ; - } - - local parts = [ split-path-VMS $(native) ] ; - local device = $(parts[1]) ; - local dir = $(parts[2]) ; - local file = $(parts[3]) ; - local elems ; - - if $(device) - { - # - # rooted - # - elems = /$(device) ; - } - - if $(dir) = "[]" - { - # - # Special case: current directory - # - elems = $(elems) "." ; - } - else if $(dir) - { - dir = [ regex.replace $(dir) "\\[|\\]" "" ] ; - local dir_parts = [ regex.split $(dir) \\. ] ; - - if $(dir_parts[1]) = "" - { - # - # Relative path - # - dir_parts = $(dir_parts[2--1]) ; - } - - # - # replace "parent-directory" parts (- => ..) - # - dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ; - - elems = $(elems) $(dir_parts) ; - } - - if $(file) - { - if ! [ MATCH (\\.) : $(file) ] - { - # - # Always add "." to end of non-extension file. - # - file = $(file). ; - } - elems = $(elems) $(file) ; - } - - local portable = [ path.join $(elems) ] ; - - return $(portable) ; -} - - -# Converts a portable path spec into a native VMS path. -# -# Relies on having at least one dot (".") included in the file name to be able -# to differentiate it from the directory part. -# -rule native-VMS ( path ) -{ - local device = "" ; - local dir = $(path) ; - local file = "" ; - local native ; - local split ; - - # - # Has device ? - # - if [ is-rooted $(dir) ] - { - split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ; - device = $(split[1]) ; - dir = $(split[2]) ; - } - - # - # Has file ? - # - # This is no exact science, just guess work: - # - # If the last part of the current path spec - # includes some chars, followed by a dot, - # optionally followed by more chars - - # then it is a file (keep your fingers crossed). - # - split = [ regex.split $(dir) / ] ; - local maybe_file = $(split[-1]) ; - - if [ MATCH ^([^.]+\\..*) : $(maybe_file) ] - { - file = $(maybe_file) ; - dir = [ sequence.join $(split[1--2]) : / ] ; - } - - # - # Has dir spec ? - # - if $(dir) = "." - { - dir = "[]" ; - } - else if $(dir) - { - dir = [ regex.replace $(dir) \\.\\. - ] ; - dir = [ regex.replace $(dir) / . ] ; - - if $(device) = "" - { - # - # Relative directory - # - dir = "."$(dir) ; - } - dir = "["$(dir)"]" ; - } - - native = [ sequence.join $(device) $(dir) $(file) ] ; - - return $(native) ; -} - - -rule __test__ ( ) -{ - import assert ; - import errors : try catch ; - - assert.true is-rooted "/" ; - assert.true is-rooted "/foo" ; - assert.true is-rooted "/foo/bar" ; - assert.result : is-rooted "." ; - assert.result : is-rooted "foo" ; - assert.result : is-rooted "foo/bar" ; - - assert.true has-parent "foo" ; - assert.true has-parent "foo/bar" ; - assert.true has-parent "." ; - assert.result : has-parent "/" ; - - assert.result "." : basename "." ; - assert.result ".." : basename ".." ; - assert.result "foo" : basename "foo" ; - assert.result "foo" : basename "bar/foo" ; - assert.result "foo" : basename "gaz/bar/foo" ; - assert.result "foo" : basename "/gaz/bar/foo" ; - - assert.result "." : parent "foo" ; - assert.result "/" : parent "/foo" ; - assert.result "foo/bar" : parent "foo/bar/giz" ; - assert.result ".." : parent "." ; - assert.result ".." : parent "../foo" ; - assert.result "../../foo" : parent "../../foo/bar" ; - - assert.result "." : reverse "." ; - assert.result ".." : reverse "foo" ; - assert.result "../../.." : reverse "foo/bar/giz" ; - - assert.result "foo" : join "foo" ; - assert.result "/foo" : join "/" "foo" ; - assert.result "foo/bar" : join "foo" "bar" ; - assert.result "foo/bar" : join "foo/giz" "../bar" ; - assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ; - assert.result ".." : join "." ".." ; - assert.result ".." : join "foo" "../.." ; - assert.result "../.." : join "../foo" "../.." ; - assert.result "/foo" : join "/bar" "../foo" ; - assert.result "foo/giz" : join "foo/giz" "." ; - assert.result "." : join lib2 ".." ; - assert.result "/" : join "/a" ".." ; - - assert.result /a/b : join /a/b/c .. ; - - assert.result "foo/bar/giz" : join "foo" "bar" "giz" ; - assert.result "giz" : join "foo" ".." "giz" ; - assert.result "foo/giz" : join "foo" "." "giz" ; - - try ; - { - join "a" "/b" ; - } - catch only first element may be rooted ; - - local CWD = "/home/ghost/build" ; - assert.result : all-parents . : . : $(CWD) ; - assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ; - assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ; - assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ; - - local CWD = "/home/ghost" ; - assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ; - assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ; - - assert.result "c/d" : relative "a/b/c/d" "a/b" ; - assert.result "foo" : relative "foo" "." ; - - local save-os = [ modules.peek path : os ] ; - modules.poke path : os : NT ; - - assert.result "foo/bar/giz" : make "foo/bar/giz" ; - assert.result "foo/bar/giz" : make "foo\\bar\\giz" ; - assert.result "foo" : make "foo/" ; - assert.result "foo" : make "foo\\" ; - assert.result "foo" : make "foo/." ; - assert.result "foo" : make "foo/bar/.." ; - assert.result "foo" : make "foo/bar/../" ; - assert.result "foo" : make "foo/bar/..\\" ; - assert.result "foo/bar" : make "foo/././././bar" ; - assert.result "/foo" : make "\\foo" ; - assert.result "/D:/My Documents" : make "D:\\My Documents" ; - assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ; - - # Test processing 'invalid' paths containing multiple successive path - # separators. - assert.result "foo" : make "foo//" ; - assert.result "foo" : make "foo///" ; - assert.result "foo" : make "foo\\\\" ; - assert.result "foo" : make "foo\\\\\\" ; - assert.result "/foo" : make "//foo" ; - assert.result "/foo" : make "///foo" ; - assert.result "/foo" : make "\\\\foo" ; - assert.result "/foo" : make "\\\\\\foo" ; - assert.result "/foo" : make "\\/\\/foo" ; - assert.result "foo/bar" : make "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ; - assert.result "foo" : make "foo/bar//.." ; - assert.result "foo/bar" : make "foo/bar/giz//.." ; - assert.result "foo/giz" : make "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ; - assert.result "../../../foo" : make "..///.//..///.//..////foo///" ; - - # Test processing 'invalid' rooted paths with too many '..' path elements - # that would place them before the root. - assert.result : make "/.." ; - assert.result : make "/../" ; - assert.result : make "/../." ; - assert.result : make "/.././" ; - assert.result : make "/foo/../bar/giz/.././././../../." ; - assert.result : make "/foo/../bar/giz/.././././../.././" ; - assert.result : make "//foo/../bar/giz/.././././../../." ; - assert.result : make "//foo/../bar/giz/.././././../.././" ; - assert.result : make "\\\\foo/../bar/giz/.././././../../." ; - assert.result : make "\\\\foo/../bar/giz/.././././../.././" ; - assert.result : make "/..///.//..///.//..////foo///" ; - - assert.result "foo\\bar\\giz" : native "foo/bar/giz" ; - assert.result "foo" : native "foo" ; - assert.result "\\foo" : native "/foo" ; - assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ; - - modules.poke path : os : UNIX ; - - assert.result "foo/bar/giz" : make "foo/bar/giz" ; - assert.result "/sub1" : make "/sub1/." ; - assert.result "/sub1" : make "/sub1/sub2/.." ; - assert.result "sub1" : make "sub1/." ; - assert.result "sub1" : make "sub1/sub2/.." ; - assert.result "/foo/bar" : native "/foo/bar" ; - - modules.poke path : os : VMS ; - - # - # Don't really need to poke os before these - # - assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ; - assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ; - assert.result "disk:" "" "" : split-path-VMS "disk:" ; - assert.result "disk:" "" "file" : split-path-VMS "disk:file" ; - assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ; - assert.result "" "[dir]" "" : split-path-VMS "[dir]" ; - assert.result "" "" "file" : split-path-VMS "file" ; - assert.result "" "" "" : split-path-VMS "" ; - - # - # Special case: current directory - # - assert.result "" "[]" "" : split-path-VMS "[]" ; - assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ; - assert.result "" "[]" "file" : split-path-VMS "[]file" ; - assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ; - - # - # Make portable paths - # - assert.result "/disk:" : make "disk:" ; - assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ; - assert.result "foo" : make "[.foo]" ; - assert.result "foo" : make "[.foo.bar.-]" ; - assert.result ".." : make "[.-]" ; - assert.result ".." : make "[-]" ; - assert.result "." : make "[]" ; - assert.result "giz.h" : make "giz.h" ; - assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ; - assert.result "/disk:/my_docs" : make "disk:[my_docs]" ; - assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ; - - # - # Special case (adds '.' to end of file w/o extension to - # disambiguate from directory in portable path spec). - # - assert.result "Jamfile." : make "Jamfile" ; - assert.result "dir/Jamfile." : make "[.dir]Jamfile" ; - assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ; - - # - # Make native paths - # - assert.result "disk:" : native "/disk:" ; - assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ; - assert.result "[.foo]" : native "foo" ; - assert.result "[.-]" : native ".." ; - assert.result "[.foo.-]" : native "foo/.." ; - assert.result "[]" : native "." ; - assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ; - assert.result "giz.h" : native "giz.h" ; - assert.result "disk:Jamfile." : native "/disk:Jamfile." ; - assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ; - - modules.poke path : os : $(save-os) ; -} diff --git a/jam-files/boost-build/util/path.py b/jam-files/boost-build/util/path.py deleted file mode 100644 index 222b96bf..00000000 --- a/jam-files/boost-build/util/path.py +++ /dev/null @@ -1,904 +0,0 @@ -# Status: this module is ported on demand by however needs something -# from it. Functionality that is not needed by Python port will -# be dropped. - -# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -# Performs various path manipulations. Path are always in a 'normilized' -# representation. In it, a path may be either: -# -# - '.', or -# -# - ['/'] [ ( '..' '/' )* (token '/')* token ] -# -# In plain english, path can be rooted, '..' elements are allowed only -# at the beginning, and it never ends in slash, except for path consisting -# of slash only. - -import os.path -from utility import to_seq -from glob import glob as builtin_glob - -from b2.util import bjam_signature - -@bjam_signature((["path", "root"],)) -def root (path, root): - """ If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged. - """ - if os.path.isabs (path): - return path - else: - return os.path.join (root, path) - -@bjam_signature((["native"],)) -def make (native): - """ Converts the native path into normalized form. - """ - # TODO: make os selection here. - return make_UNIX (native) - -def make_UNIX (native): - - # VP: I have no idea now 'native' can be empty here! But it can! - assert (native) - - return os.path.normpath (native) - -@bjam_signature((["path"],)) -def native (path): - """ Builds a native representation of the path. - """ - # TODO: make os selection here. - return native_UNIX (path) - -def native_UNIX (path): - return path - - -def pwd (): - """ Returns the current working directory. - # TODO: is it a good idea to use the current dir? Some use-cases - may not allow us to depend on the current dir. - """ - return make (os.getcwd ()) - -def is_rooted (path): - """ Tests if a path is rooted. - """ - return path and path [0] == '/' - - -################################################################### -# Still to port. -# Original lines are prefixed with "# " -# -# # Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -# # distribute this software is granted provided this copyright notice appears in -# # all copies. This software is provided "as is" without express or implied -# # warranty, and with no claim as to its suitability for any purpose. -# -# # Performs various path manipulations. Path are always in a 'normilized' -# # representation. In it, a path may be either: -# # -# # - '.', or -# # -# # - ['/'] [ ( '..' '/' )* (token '/')* token ] -# # -# # In plain english, path can be rooted, '..' elements are allowed only -# # at the beginning, and it never ends in slash, except for path consisting -# # of slash only. -# -# import modules ; -# import sequence ; -# import regex ; -# import errors : error ; -# -# -# os = [ modules.peek : OS ] ; -# if [ modules.peek : UNIX ] -# { -# local uname = [ modules.peek : JAMUNAME ] ; -# switch $(uname) -# { -# case CYGWIN* : -# os = CYGWIN ; -# -# case * : -# os = UNIX ; -# } -# } -# -# # -# # Tests if a path is rooted. -# # -# rule is-rooted ( path ) -# { -# return [ MATCH "^(/)" : $(path) ] ; -# } -# -# # -# # Tests if a path has a parent. -# # -# rule has-parent ( path ) -# { -# if $(path) != / { -# return 1 ; -# } else { -# return ; -# } -# } -# -# # -# # Returns the path without any directory components. -# # -# rule basename ( path ) -# { -# return [ MATCH "([^/]+)$" : $(path) ] ; -# } -# -# # -# # Returns parent directory of the path. If no parent exists, error is issued. -# # -# rule parent ( path ) -# { -# if [ has-parent $(path) ] { -# -# if $(path) = . { -# return .. ; -# } else { -# -# # Strip everything at the end of path up to and including -# # the last slash -# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ; -# -# # Did we strip what we shouldn't? -# if $(result[2]) = ".." { -# return $(path)/.. ; -# } else { -# if ! $(result[1]) { -# if [ is-rooted $(path) ] { -# result = / ; -# } else { -# result = . ; -# } -# } -# return $(result[1]) ; -# } -# } -# } else { -# error "Path '$(path)' has no parent" ; -# } -# } -# -# # -# # Returns path2 such that "[ join path path2 ] = .". -# # The path may not contain ".." element or be rooted. -# # -# rule reverse ( path ) -# { -# if $(path) = . -# { -# return $(path) ; -# } -# else -# { -# local tokens = [ regex.split $(path) "/" ] ; -# local tokens2 ; -# for local i in $(tokens) { -# tokens2 += .. ; -# } -# return [ sequence.join $(tokens2) : "/" ] ; -# } -# } -# -# # -# # Auxillary rule: does all the semantic of 'join', except for error cheching. -# # The error checking is separated because this rule is recursive, and I don't -# # like the idea of checking the same input over and over. -# # -# local rule join-imp ( elements + ) -# { -# return [ NORMALIZE_PATH $(elements:J="/") ] ; -# } -# -# # -# # Contanenates the passed path elements. Generates an error if -# # any element other than the first one is rooted. -# # -# rule join ( elements + ) -# { -# if ! $(elements[2]) -# { -# return $(elements[1]) ; -# } -# else -# { -# for local e in $(elements[2-]) -# { -# if [ is-rooted $(e) ] -# { -# error only first element may be rooted ; -# } -# } -# return [ join-imp $(elements) ] ; -# } -# } - - -def glob (dirs, patterns): - """ Returns the list of files matching the given pattern in the - specified directory. Both directories and patterns are - supplied as portable paths. Each pattern should be non-absolute - path, and can't contain "." or ".." elements. Each slash separated - element of pattern can contain the following special characters: - - '?', which match any character - - '*', which matches arbitrary number of characters. - A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 - if and only if e1 matches p1, e2 matches p2 and so on. - - For example: - [ glob . : *.cpp ] - [ glob . : */build/Jamfile ] - """ -# { -# local result ; -# if $(patterns:D) -# { -# # When a pattern has a directory element, we first glob for -# # directory, and then glob for file name is the found directories. -# for local p in $(patterns) -# { -# # First glob for directory part. -# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ; -# result += [ glob $(globbed-dirs) : $(p:D="") ] ; -# } -# } -# else -# { -# # When a pattern has not directory, we glob directly. -# # Take care of special ".." value. The "GLOB" rule simply ignores -# # the ".." element (and ".") element in directory listings. This is -# # needed so that -# # -# # [ glob libs/*/Jamfile ] -# # -# # don't return -# # -# # libs/../Jamfile (which is the same as ./Jamfile) -# # -# # On the other hand, when ".." is explicitly present in the pattern -# # we need to return it. -# # -# for local dir in $(dirs) -# { -# for local p in $(patterns) -# { -# if $(p) != ".." -# { -# result += [ sequence.transform make -# : [ GLOB [ native $(dir) ] : $(p) ] ] ; -# } -# else -# { -# result += [ path.join $(dir) .. ] ; -# } -# } -# } -# } -# return $(result) ; -# } -# - -# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested. - result = [] - dirs = to_seq (dirs) - patterns = to_seq (patterns) - - splitdirs = [] - for dir in dirs: - splitdirs += dir.split (os.pathsep) - - for dir in splitdirs: - for pattern in patterns: - p = os.path.join (dir, pattern) - import glob - result.extend (glob.glob (p)) - return result - -# -# Find out the absolute name of path and returns the list of all the parents, -# starting with the immediate one. Parents are returned as relative names. -# If 'upper_limit' is specified, directories above it will be pruned. -# -def all_parents(path, upper_limit=None, cwd=None): - - if not cwd: - cwd = os.getcwd() - - path_abs = os.path.join(cwd, path) - - if upper_limit: - upper_limit = os.path.join(cwd, upper_limit) - - result = [] - while path_abs and path_abs != upper_limit: - (head, tail) = os.path.split(path) - path = os.path.join(path, "..") - result.append(path) - path_abs = head - - if upper_limit and path_abs != upper_limit: - raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path)) - - return result - -# Search for 'pattern' in parent directories of 'dir', up till and including -# 'upper_limit', if it is specified, or till the filesystem root otherwise. -# -def glob_in_parents(dir, patterns, upper_limit=None): - - result = [] - parent_dirs = all_parents(dir, upper_limit) - - for p in parent_dirs: - result = glob(p, patterns) - if result: break - - return result - -# -# # -# # Assuming 'child' is a subdirectory of 'parent', return the relative -# # path from 'parent' to 'child' -# # -# rule relative ( child parent ) -# { -# if $(parent) = "." -# { -# return $(child) ; -# } -# else -# { -# local split1 = [ regex.split $(parent) / ] ; -# local split2 = [ regex.split $(child) / ] ; -# -# while $(split1) -# { -# if $(split1[1]) = $(split2[1]) -# { -# split1 = $(split1[2-]) ; -# split2 = $(split2[2-]) ; -# } -# else -# { -# errors.error $(child) is not a subdir of $(parent) ; -# } -# } -# return [ join $(split2) ] ; -# } -# } -# -# # Returns the minimal path to path2 that is relative path1. -# # -# rule relative-to ( path1 path2 ) -# { -# local root_1 = [ regex.split [ reverse $(path1) ] / ] ; -# local split1 = [ regex.split $(path1) / ] ; -# local split2 = [ regex.split $(path2) / ] ; -# -# while $(split1) && $(root_1) -# { -# if $(split1[1]) = $(split2[1]) -# { -# root_1 = $(root_1[2-]) ; -# split1 = $(split1[2-]) ; -# split2 = $(split2[2-]) ; -# } -# else -# { -# split1 = ; -# } -# } -# return [ join . $(root_1) $(split2) ] ; -# } - -# Returns the list of paths which are used by the operating system -# for looking up programs -def programs_path (): - raw = [] - names = ['PATH', 'Path', 'path'] - - for name in names: - raw.append(os.environ.get (name, '')) - - result = [] - for elem in raw: - if elem: - for p in elem.split(os.path.pathsep): - result.append(make(p)) - - return result - -# rule make-NT ( native ) -# { -# local tokens = [ regex.split $(native) "[/\\]" ] ; -# local result ; -# -# # Handle paths ending with slashes -# if $(tokens[-1]) = "" -# { -# tokens = $(tokens[1--2]) ; # discard the empty element -# } -# -# result = [ path.join $(tokens) ] ; -# -# if [ regex.match "(^.:)" : $(native) ] -# { -# result = /$(result) ; -# } -# -# if $(native) = "" -# { -# result = "." ; -# } -# -# return $(result) ; -# } -# -# rule native-NT ( path ) -# { -# local result = [ MATCH "^/?(.*)" : $(path) ] ; -# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ; -# return $(result) ; -# } -# -# rule make-CYGWIN ( path ) -# { -# return [ make-NT $(path) ] ; -# } -# -# rule native-CYGWIN ( path ) -# { -# local result = $(path) ; -# if [ regex.match "(^/.:)" : $(path) ] # win absolute -# { -# result = [ MATCH "^/?(.*)" : $(path) ] ; # remove leading '/' -# } -# return [ native-UNIX $(result) ] ; -# } -# -# # -# # split-VMS: splits input native path into -# # device dir file (each part is optional), -# # example: -# # -# # dev:[dir]file.c => dev: [dir] file.c -# # -# rule split-path-VMS ( native ) -# { -# local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ; -# local device = $(matches[1]) ; -# local dir = $(matches[2]) ; -# local file = $(matches[3]) ; -# -# return $(device) $(dir) $(file) ; -# } -# -# # -# # Converts a native VMS path into a portable path spec. -# # -# # Does not handle current-device absolute paths such -# # as "[dir]File.c" as it is not clear how to represent -# # them in the portable path notation. -# # -# # Adds a trailing dot (".") to the file part if no extension -# # is present (helps when converting it back into native path). -# # -# rule make-VMS ( native ) -# { -# if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ] -# { -# errors.error "Can't handle default-device absolute paths: " $(native) ; -# } -# -# local parts = [ split-path-VMS $(native) ] ; -# local device = $(parts[1]) ; -# local dir = $(parts[2]) ; -# local file = $(parts[3]) ; -# local elems ; -# -# if $(device) -# { -# # -# # rooted -# # -# elems = /$(device) ; -# } -# -# if $(dir) = "[]" -# { -# # -# # Special case: current directory -# # -# elems = $(elems) "." ; -# } -# else if $(dir) -# { -# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ; -# local dir_parts = [ regex.split $(dir) \\. ] ; -# -# if $(dir_parts[1]) = "" -# { -# # -# # Relative path -# # -# dir_parts = $(dir_parts[2--1]) ; -# } -# -# # -# # replace "parent-directory" parts (- => ..) -# # -# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ; -# -# elems = $(elems) $(dir_parts) ; -# } -# -# if $(file) -# { -# if ! [ MATCH (\\.) : $(file) ] -# { -# # -# # Always add "." to end of non-extension file -# # -# file = $(file). ; -# } -# elems = $(elems) $(file) ; -# } -# -# local portable = [ path.join $(elems) ] ; -# -# return $(portable) ; -# } -# -# # -# # Converts a portable path spec into a native VMS path. -# # -# # Relies on having at least one dot (".") included in the file -# # name to be able to differentiate it ftom the directory part. -# # -# rule native-VMS ( path ) -# { -# local device = "" ; -# local dir = $(path) ; -# local file = "" ; -# local native ; -# local split ; -# -# # -# # Has device ? -# # -# if [ is-rooted $(dir) ] -# { -# split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ; -# device = $(split[1]) ; -# dir = $(split[2]) ; -# } -# -# # -# # Has file ? -# # -# # This is no exact science, just guess work: -# # -# # If the last part of the current path spec -# # includes some chars, followed by a dot, -# # optionally followed by more chars - -# # then it is a file (keep your fingers crossed). -# # -# split = [ regex.split $(dir) / ] ; -# local maybe_file = $(split[-1]) ; -# -# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ] -# { -# file = $(maybe_file) ; -# dir = [ sequence.join $(split[1--2]) : / ] ; -# } -# -# # -# # Has dir spec ? -# # -# if $(dir) = "." -# { -# dir = "[]" ; -# } -# else if $(dir) -# { -# dir = [ regex.replace $(dir) \\.\\. - ] ; -# dir = [ regex.replace $(dir) / . ] ; -# -# if $(device) = "" -# { -# # -# # Relative directory -# # -# dir = "."$(dir) ; -# } -# dir = "["$(dir)"]" ; -# } -# -# native = [ sequence.join $(device) $(dir) $(file) ] ; -# -# return $(native) ; -# } -# -# -# rule __test__ ( ) { -# -# import assert ; -# import errors : try catch ; -# -# assert.true is-rooted "/" ; -# assert.true is-rooted "/foo" ; -# assert.true is-rooted "/foo/bar" ; -# assert.result : is-rooted "." ; -# assert.result : is-rooted "foo" ; -# assert.result : is-rooted "foo/bar" ; -# -# assert.true has-parent "foo" ; -# assert.true has-parent "foo/bar" ; -# assert.true has-parent "." ; -# assert.result : has-parent "/" ; -# -# assert.result "." : basename "." ; -# assert.result ".." : basename ".." ; -# assert.result "foo" : basename "foo" ; -# assert.result "foo" : basename "bar/foo" ; -# assert.result "foo" : basename "gaz/bar/foo" ; -# assert.result "foo" : basename "/gaz/bar/foo" ; -# -# assert.result "." : parent "foo" ; -# assert.result "/" : parent "/foo" ; -# assert.result "foo/bar" : parent "foo/bar/giz" ; -# assert.result ".." : parent "." ; -# assert.result ".." : parent "../foo" ; -# assert.result "../../foo" : parent "../../foo/bar" ; -# -# -# assert.result "." : reverse "." ; -# assert.result ".." : reverse "foo" ; -# assert.result "../../.." : reverse "foo/bar/giz" ; -# -# assert.result "foo" : join "foo" ; -# assert.result "/foo" : join "/" "foo" ; -# assert.result "foo/bar" : join "foo" "bar" ; -# assert.result "foo/bar" : join "foo/giz" "../bar" ; -# assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ; -# assert.result ".." : join "." ".." ; -# assert.result ".." : join "foo" "../.." ; -# assert.result "../.." : join "../foo" "../.." ; -# assert.result "/foo" : join "/bar" "../foo" ; -# assert.result "foo/giz" : join "foo/giz" "." ; -# assert.result "." : join lib2 ".." ; -# assert.result "/" : join "/a" ".." ; -# -# assert.result /a/b : join /a/b/c .. ; -# -# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ; -# assert.result "giz" : join "foo" ".." "giz" ; -# assert.result "foo/giz" : join "foo" "." "giz" ; -# -# try ; -# { -# join "a" "/b" ; -# } -# catch only first element may be rooted ; -# -# local CWD = "/home/ghost/build" ; -# assert.result : all-parents . : . : $(CWD) ; -# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ; -# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ; -# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ; -# -# local CWD = "/home/ghost" ; -# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ; -# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ; -# -# assert.result "c/d" : relative "a/b/c/d" "a/b" ; -# assert.result "foo" : relative "foo" "." ; -# -# local save-os = [ modules.peek path : os ] ; -# modules.poke path : os : NT ; -# -# assert.result "foo/bar/giz" : make "foo/bar/giz" ; -# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ; -# assert.result "foo" : make "foo/." ; -# assert.result "foo" : make "foo/bar/.." ; -# assert.result "/D:/My Documents" : make "D:\\My Documents" ; -# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ; -# -# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ; -# assert.result "foo" : native "foo" ; -# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ; -# -# modules.poke path : os : UNIX ; -# -# assert.result "foo/bar/giz" : make "foo/bar/giz" ; -# assert.result "/sub1" : make "/sub1/." ; -# assert.result "/sub1" : make "/sub1/sub2/.." ; -# assert.result "sub1" : make "sub1/." ; -# assert.result "sub1" : make "sub1/sub2/.." ; -# assert.result "/foo/bar" : native "/foo/bar" ; -# -# modules.poke path : os : VMS ; -# -# # -# # Don't really need to poke os before these -# # -# assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ; -# assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ; -# assert.result "disk:" "" "" : split-path-VMS "disk:" ; -# assert.result "disk:" "" "file" : split-path-VMS "disk:file" ; -# assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ; -# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ; -# assert.result "" "" "file" : split-path-VMS "file" ; -# assert.result "" "" "" : split-path-VMS "" ; -# -# # -# # Special case: current directory -# # -# assert.result "" "[]" "" : split-path-VMS "[]" ; -# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ; -# assert.result "" "[]" "file" : split-path-VMS "[]file" ; -# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ; -# -# # -# # Make portable paths -# # -# assert.result "/disk:" : make "disk:" ; -# assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ; -# assert.result "foo" : make "[.foo]" ; -# assert.result "foo" : make "[.foo.bar.-]" ; -# assert.result ".." : make "[.-]" ; -# assert.result ".." : make "[-]" ; -# assert.result "." : make "[]" ; -# assert.result "giz.h" : make "giz.h" ; -# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ; -# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ; -# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ; -# -# # -# # Special case (adds '.' to end of file w/o extension to -# # disambiguate from directory in portable path spec). -# # -# assert.result "Jamfile." : make "Jamfile" ; -# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ; -# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ; -# -# # -# # Make native paths -# # -# assert.result "disk:" : native "/disk:" ; -# assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ; -# assert.result "[.foo]" : native "foo" ; -# assert.result "[.-]" : native ".." ; -# assert.result "[.foo.-]" : native "foo/.." ; -# assert.result "[]" : native "." ; -# assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ; -# assert.result "giz.h" : native "giz.h" ; -# assert.result "disk:Jamfile." : native "/disk:Jamfile." ; -# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ; -# -# modules.poke path : os : $(save-os) ; -# -# } - -# - - -#def glob(dir, patterns): -# result = [] -# for pattern in patterns: -# result.extend(builtin_glob(os.path.join(dir, pattern))) -# return result - -def glob(dirs, patterns, exclude_patterns=None): - """Returns the list of files matching the given pattern in the - specified directory. Both directories and patterns are - supplied as portable paths. Each pattern should be non-absolute - path, and can't contain '.' or '..' elements. Each slash separated - element of pattern can contain the following special characters: - - '?', which match any character - - '*', which matches arbitrary number of characters. - A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 - if and only if e1 matches p1, e2 matches p2 and so on. - For example: - [ glob . : *.cpp ] - [ glob . : */build/Jamfile ] - """ - - assert(isinstance(patterns, list)) - assert(isinstance(dirs, list)) - - if not exclude_patterns: - exclude_patterns = [] - else: - assert(isinstance(exclude_patterns, list)) - - real_patterns = [os.path.join(d, p) for p in patterns for d in dirs] - real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns - for d in dirs] - - inc = [os.path.normpath(name) for p in real_patterns - for name in builtin_glob(p)] - exc = [os.path.normpath(name) for p in real_exclude_patterns - for name in builtin_glob(p)] - return [x for x in inc if x not in exc] - -def glob_tree(roots, patterns, exclude_patterns=None): - """Recursive version of GLOB. Builds the glob of files while - also searching in the subdirectories of the given roots. An - optional set of exclusion patterns will filter out the - matching entries from the result. The exclusions also apply - to the subdirectory scanning, such that directories that - match the exclusion patterns will not be searched.""" - - if not exclude_patterns: - exclude_patterns = [] - - result = glob(roots, patterns, exclude_patterns) - subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)] - if subdirs: - result.extend(glob_tree(subdirs, patterns, exclude_patterns)) - - return result - -def glob_in_parents(dir, patterns, upper_limit=None): - """Recursive version of GLOB which glob sall parent directories - of dir until the first match is found. Returns an empty result if no match - is found""" - - assert(isinstance(dir, str)) - assert(isinstance(patterns, list)) - - result = [] - - absolute_dir = os.path.join(os.getcwd(), dir) - absolute_dir = os.path.normpath(absolute_dir) - while absolute_dir: - new_dir = os.path.split(absolute_dir)[0] - if new_dir == absolute_dir: - break - result = glob([new_dir], patterns) - if result: - break - absolute_dir = new_dir - - return result - - -# The relpath functionality is written by -# Cimarron Taylor -def split(p, rest=[]): - (h,t) = os.path.split(p) - if len(h) < 1: return [t]+rest - if len(t) < 1: return [h]+rest - return split(h,[t]+rest) - -def commonpath(l1, l2, common=[]): - if len(l1) < 1: return (common, l1, l2) - if len(l2) < 1: return (common, l1, l2) - if l1[0] != l2[0]: return (common, l1, l2) - return commonpath(l1[1:], l2[1:], common+[l1[0]]) - -def relpath(p1, p2): - (common,l1,l2) = commonpath(split(p1), split(p2)) - p = [] - if len(l1) > 0: - p = [ '../' * len(l1) ] - p = p + l2 - if p: - return os.path.join( *p ) - else: - return "." diff --git a/jam-files/boost-build/util/print.jam b/jam-files/boost-build/util/print.jam deleted file mode 100644 index 708d21ab..00000000 --- a/jam-files/boost-build/util/print.jam +++ /dev/null @@ -1,488 +0,0 @@ -# Copyright 2003 Douglas Gregor -# Copyright 2002, 2003, 2005 Rene Rivera -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Utilities for generating format independent output. Using these -# will help in generation of documentation in at minimum plain/console -# and html. - -import modules ; -import numbers ; -import string ; -import regex ; -import "class" ; -import scanner ; -import path ; - -# The current output target. Defaults to console. -output-target = console ; - -# The current output type. Defaults to plain. Other possible values are "html". -output-type = plain ; - -# Whitespace. -.whitespace = [ string.whitespace ] ; - - -# Set the target and type of output to generate. This sets both the destination -# output and the type of docs to generate to that output. The target can be -# either a file or "console" for echoing to the console. If the type of output -# is not specified it defaults to plain text. -# -rule output ( - target # The target file or device; file or "console". - type ? # The type of output; "plain" or "html". -) -{ - type ?= plain ; - if $(output-target) != $(target) - { - output-target = $(target) ; - output-type = $(type) ; - if $(output-type) = html - { - text - "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">" - "<html>" - "<head>" - "</head>" - "<body link=\"#0000ff\" vlink=\"#800080\">" - : true - : prefix ; - text - "</body>" - "</html>" - : - : suffix ; - } - } -} - - -# Generate a section with a description. The type of output can be controlled by -# the value of the 'output-type' variable. -# -rule section ( - name # The name of the section. - description * # A number of description lines. -) -{ - if $(output-type) = plain - { - lines [ split-at-words $(name): ] ; - lines ; - } - else if $(output-type) = html - { - name = [ escape-html $(name) ] ; - text <h3>$(name)</h3> <p> ; - } - local pre = ; - while $(description) - { - local paragraph = ; - while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; } - if $(pre) - { - while $(description) && ( - $(pre) = " $(description[1])" || - ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] ) - ) - { paragraph += $(description[1]) ; description = $(description[2-]) ; } - while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; } - pre = ; - if $(output-type) = plain - { - lines $(paragraph) "" : " " " " ; - } - else if $(output-type) = html - { - text <blockquote> ; - lines $(paragraph) ; - text </blockquote> ; - } - } - else - { - while $(description) && ! [ string.is-whitespace $(description[1]) ] - { paragraph += $(description[1]) ; description = $(description[2-]) ; } - if $(paragraph[1]) = :: && ! $(paragraph[2]) - { - pre = " " ; - } - if $(paragraph[1]) = :: - { - if $(output-type) = plain - { - lines $(paragraph[2-]) "" : " " " " ; - lines ; - } - else if $(output-type) = html - { - text <blockquote> ; - lines $(paragraph[2-]) ; - text </blockquote> ; - } - } - else - { - local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ; - local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ; - p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ; - if $(p[3]) = :: - { - pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ; - if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) $(p[1]): ; } - else { paragraph = $(paragraph[1--2]) $(p[1]) ; } - if $(output-type) = plain - { - lines [ split-at-words " " $(paragraph) ] : " " " " ; - lines ; - } - else if $(output-type) = html - { - text </p> <p> [ escape-html $(paragraph) ] ; - } - } - else - { - if $(output-type) = plain - { - lines [ split-at-words " " $(paragraph) ] : " " " " ; - lines ; - } - else if $(output-type) = html - { - text </p> <p> [ escape-html $(paragraph) ] ; - } - } - } - } - } - if $(output-type) = html - { - text </p> ; - } -} - - -# Generate the start of a list of items. The type of output can be controlled by -# the value of the 'output-type' variable. -# -rule list-start ( ) -{ - if $(output-type) = plain - { - } - else if $(output-type) = html - { - text <ul> ; - } -} - - -# Generate an item in a list. The type of output can be controlled by the value -# of the 'output-type' variable. -# -rule list-item ( - item + # The item to list. -) -{ - if $(output-type) = plain - { - lines [ split-at-words "*" $(item) ] : " " " " ; - } - else if $(output-type) = html - { - text <li> [ escape-html $(item) ] </li> ; - } -} - - -# Generate the end of a list of items. The type of output can be controlled by -# the value of the 'output-type' variable. -# -rule list-end ( ) -{ - if $(output-type) = plain - { - lines ; - } - else if $(output-type) = html - { - text </ul> ; - } -} - - -# Split the given text into separate lines, word-wrapping to a margin. The -# default margin is 78 characters. -# -rule split-at-words ( - text + # The text to split. - : margin ? # An optional margin, default is 78. -) -{ - local lines = ; - text = [ string.words $(text:J=" ") ] ; - text = $(text:J=" ") ; - margin ?= 78 ; - local char-match-1 = ".?" ; - local char-match = "" ; - while $(margin) != 0 - { - char-match = $(char-match)$(char-match-1) ; - margin = [ numbers.decrement $(margin) ] ; - } - while $(text) - { - local s = "" ; - local t = "" ; - # divide s into the first X characters and the rest - s = [ MATCH "^($(char-match))(.*)" : $(text) ] ; - - if $(s[2]) - { - # split the first half at a space - t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ; - } - else - { - t = $(s) ; - } - - if ! $(t[2]) - { - t += "" ; - } - - text = $(t[2])$(s[2]) ; - lines += $(t[1]) ; - } - return $(lines) ; -} - - -# Generate a set of fixed lines. Each single item passed in is output on a -# separate line. For console this just echos each line, but for html this will -# split them with <br>. -# -rule lines ( - text * # The lines of text. - : indent ? # Optional indentation prepended to each line after the first one. - outdent ? # Optional indentation to prepend to the first line. -) -{ - text ?= "" ; - indent ?= "" ; - outdent ?= "" ; - if $(output-type) = plain - { - text $(outdent)$(text[1]) $(indent)$(text[2-]) ; - } - else if $(output-type) = html - { - local indent-chars = [ string.chars $(indent) ] ; - indent = "" ; - for local c in $(indent-chars) - { - if $(c) = " " { c = " " ; } - else if $(c) = " " { c = " " ; } - indent = $(indent)$(c) ; - } - local html-text = [ escape-html $(text) : " " ] ; - text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ; - } -} - - -# Output text directly to the current target. When doing output to a file, one -# can indicate if the text should be output to "prefix" it, as the "body" -# (default), or "suffix" of the file. This is independant of the actual -# execution order of the text rule. This rule invokes a singular action, one -# action only once, which does the build of the file. Therefore actions on the -# target outside of this rule will happen entirely before and/or after all -# output using this rule. -# -rule text ( - strings * # The strings of text to output. - : overwrite ? # true to overwrite the output (if it is a file) - : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for a file). -) -{ - prefix-body-suffix ?= body ; - if $(output-target) = console - { - if ! $(strings) - { - ECHO ; - } - else - { - for local s in $(strings) - { - ECHO $(s) ; - } - } - } - if ! $($(output-target).did-action) - { - $(output-target).did-action = yes ; - $(output-target).text-prefix = ; - $(output-target).text-body = ; - $(output-target).text-suffix = ; - - nl on $(output-target) = " -" ; - text-redirect on $(output-target) = ">>" ; - if $(overwrite) - { - text-redirect on $(output-target) = ">" ; - } - text-content on $(output-target) = ; - - text-action $(output-target) ; - - if $(overwrite) && $(output-target) != console - { - check-for-update $(output-target) ; - } - } - $(output-target).text-$(prefix-body-suffix) += $(strings) ; - text-content on $(output-target) = - $($(output-target).text-prefix) - $($(output-target).text-body) - $($(output-target).text-suffix) ; -} - - -# Outputs the text to the current targets, after word-wrapping it. -# -rule wrapped-text ( text + ) -{ - local lines = [ split-at-words $(text) ] ; - text $(lines) ; -} - - -# Escapes text into html/xml printable equivalents. Does not know about tags and -# therefore tags fed into this will also be escaped. Currently escapes space, -# "<", ">", and "&". -# -rule escape-html ( - text + # The text to escape. - : space ? # What to replace spaces with, defaults to " ". -) -{ - local html-text = ; - while $(text) - { - local html = $(text[1]) ; - text = $(text[2-]) ; - html = [ regex.replace $(html) "&" "&" ] ; - html = [ regex.replace $(html) "<" "<" ] ; - html = [ regex.replace $(html) ">" ">" ] ; - if $(space) - { - html = [ regex.replace $(html) " " "$(space)" ] ; - } - html-text += $(html) ; - } - return $(html-text) ; -} - - -# Outputs the text strings collected by the text rule to the output file. -# -actions quietly text-action -{ - @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)" -} - - -rule get-scanner ( ) -{ - if ! $(.scanner) - { - .scanner = [ class.new print-scanner ] ; - } - return $(.scanner) ; -} - - -# The following code to update print targets when their contents -# change is a horrible hack. It basically creates a target which -# binds to this file (print.jam) and installs a scanner on it -# which reads the target and compares its contents to the new -# contents that we're writing. -# -rule check-for-update ( target ) -{ - local scanner = [ get-scanner ] ; - local file = [ path.native [ modules.binding $(__name__) ] ] ; - local g = [ MATCH <(.*)> : $(target:G) ] ; - local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ; - DEPENDS $(target) : $(dependency-target) ; - SEARCH on $(dependency-target) = $(file:D) ; - ISFILE $(dependency-target) ; - NOUPDATE $(dependency-target) ; - base on $(dependency-target) = $(target) ; - scanner.install $(scanner) : $(dependency-target) none ; - return $(dependency-target) ; -} - - -class print-scanner : scanner -{ - import path ; - import os ; - - rule pattern ( ) - { - return "(One match...)" ; - } - - rule process ( target : matches * : binding ) - { - local base = [ on $(target) return $(base) ] ; - local nl = [ on $(base) return $(nl) ] ; - local text-content = [ on $(base) return $(text-content) ] ; - local dir = [ on $(base) return $(LOCATE) ] ; - if $(dir) - { - dir = [ path.make $(dir) ] ; - } - local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ; - local actual-content ; - if [ os.name ] = NT - { - actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ; - } - else - { - actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ; - } - if $(text-content:J=$(nl)) != $(actual-content) - { - ALWAYS $(base) ; - } - } -} - - -rule __test__ ( ) -{ - import assert ; - - assert.result one two three : split-at-words one two three : 5 ; - assert.result "one two" three : split-at-words one two three : 8 ; - assert.result "one two" three : split-at-words one two three : 9 ; - assert.result "one two three" : split-at-words one two three ; - - # VP, 2004-12-03 The following test fails for some reason, so commenting it - # out. - #assert.result "one two three" "&<>" : - # escape-html "one two three" "&<>" ; -} diff --git a/jam-files/boost-build/util/regex.jam b/jam-files/boost-build/util/regex.jam deleted file mode 100644 index 234c36f6..00000000 --- a/jam-files/boost-build/util/regex.jam +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright 2001, 2002 Dave Abrahams -# Copyright 2003 Douglas Gregor -# Copyright 2003 Rene Rivera -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# -# Returns a list of the following substrings: -# 1) from beginning till the first occurrence of 'separator' or till the end, -# 2) between each occurrence of 'separator' and the next occurrence, -# 3) from the last occurrence of 'separator' till the end. -# If no separator is present, the result will contain only one element. -# - -rule split ( string separator ) -{ - local result ; - local s = $(string) ; - - # Break pieaces off 's' until it has no separators left. - local match = 1 ; - while $(match) - { - match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ; - if $(match) - { - match += "" ; # in case 3rd item was empty - works around MATCH bug - result = $(match[3]) $(result) ; - s = $(match[1]) ; - } - } - # Combine the remaining part at the beginning, which does not have - # separators, with the pieces broken off. Note that the rule's signature - # does not allow the initial s to be empty. - return $(s) $(result) ; -} - - -# Returns the concatenated results of Applying regex.split to every element of -# the list using the separator pattern. -# -rule split-list ( list * : separator ) -{ - local result ; - for s in $(list) - { - result += [ split $(s) $(separator) ] ; - } - return $(result) ; -} - - -# Match string against pattern, and return the elements indicated by indices. -# -rule match ( pattern : string : indices * ) -{ - indices ?= 1 2 3 4 5 6 7 8 9 ; - local x = [ MATCH $(pattern) : $(string) ] ; - return $(x[$(indices)]) ; -} - - -# Matches all elements of 'list' agains the 'pattern' and returns a list of -# elements indicated by indices of all successful matches. If 'indices' is -# omitted returns a list of first paranthethised groups of all successful -# matches. -# -rule transform ( list * : pattern : indices * ) -{ - indices ?= 1 ; - local result ; - for local e in $(list) - { - local m = [ MATCH $(pattern) : $(e) ] ; - if $(m) - { - result += $(m[$(indices)]) ; - } - } - return $(result) ; -} - -NATIVE_RULE regex : transform ; - - -# Escapes all of the characters in symbols using the escape symbol escape-symbol -# for the given string, and returns the escaped string. -# -rule escape ( string : symbols : escape-symbol ) -{ - local result = "" ; - local m = 1 ; - while $(m) - { - m = [ MATCH ^([^$(symbols)]*)([$(symbols)])(.*) : $(string) ] ; - if $(m) - { - m += "" ; # Supposedly a bug fix; borrowed from regex.split - result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ; - string = $(m[3]) ; - } - } - string ?= "" ; - result = "$(result)$(string)" ; - return $(result) ; -} - - -# Replaces occurrences of a match string in a given string and returns the new -# string. The match string can be a regex expression. -# -rule replace ( - string # The string to modify. - match # The characters to replace. - replacement # The string to replace with. - ) -{ - local result = "" ; - local parts = 1 ; - while $(parts) - { - parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ; - if $(parts) - { - parts += "" ; - result = "$(replacement)$(parts[3])$(result)" ; - string = $(parts[1]) ; - } - } - string ?= "" ; - result = "$(string)$(result)" ; - return $(result) ; -} - - -# Replaces occurrences of a match string in a given list of strings and returns -# a list of new strings. The match string can be a regex expression. -# -# list - the list of strings to modify. -# match - the search expression. -# replacement - the string to replace with. -# -rule replace-list ( list * : match : replacement ) -{ - local result ; - for local e in $(list) - { - result += [ replace $(e) $(match) $(replacement) ] ; - } - return $(result) ; -} - - -rule __test__ ( ) -{ - import assert ; - - assert.result a b c : split "a/b/c" / ; - assert.result "" a b c : split "/a/b/c" / ; - assert.result "" "" a b c : split "//a/b/c" / ; - assert.result "" a "" b c : split "/a//b/c" / ; - assert.result "" a "" b c "" : split "/a//b/c/" / ; - assert.result "" a "" b c "" "" : split "/a//b/c//" / ; - - assert.result a c b d - : match (.)(.)(.)(.) : abcd : 1 3 2 4 ; - - assert.result a b c d - : match (.)(.)(.)(.) : abcd ; - - assert.result ababab cddc - : match ((ab)*)([cd]+) : abababcddc : 1 3 ; - - assert.result a.h c.h - : transform <a.h> \"b.h\" <c.h> : <(.*)> ; - - assert.result a.h b.h c.h - : transform <a.h> \"b.h\" <c.h> : <([^>]*)>|\"([^\"]*)\" : 1 2 ; - - assert.result "^<?xml version=\"1.0\"^>" - : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ; - - assert.result "<?xml version=\\\"1.0\\\">" - : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ; - - assert.result "string string " : replace "string string " " " " " ; - assert.result " string string" : replace " string string" " " " " ; - assert.result "string string" : replace "string string" " " " " ; - assert.result "-" : replace "&" "&" "-" ; - - assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ; -} diff --git a/jam-files/boost-build/util/regex.py b/jam-files/boost-build/util/regex.py deleted file mode 100644 index 29e26ecf..00000000 --- a/jam-files/boost-build/util/regex.py +++ /dev/null @@ -1,25 +0,0 @@ -# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -import re - -def transform (list, pattern, indices = [1]): - """ Matches all elements of 'list' agains the 'pattern' - and returns a list of the elements indicated by indices of - all successfull matches. If 'indices' is omitted returns - a list of first paranthethised groups of all successfull - matches. - """ - result = [] - - for e in list: - m = re.match (pattern, e) - - if m: - for i in indices: - result.append (m.group (i)) - - return result - diff --git a/jam-files/boost-build/util/sequence.jam b/jam-files/boost-build/util/sequence.jam deleted file mode 100644 index 73919a65..00000000 --- a/jam-files/boost-build/util/sequence.jam +++ /dev/null @@ -1,335 +0,0 @@ -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import assert ; -import numbers ; -import modules ; - - -# Note that algorithms in this module execute largely in the caller's module -# namespace, so that local rules can be used as function objects. Also note that -# most predicates can be multi-element lists. In that case, all but the first -# element are prepended to the first argument which is passed to the rule named -# by the first element. - - -# Return the elements e of $(sequence) for which [ $(predicate) e ] has a -# non-null value. -# -rule filter ( predicate + : sequence * ) -{ - local caller = [ CALLER_MODULE ] ; - local result ; - - for local e in $(sequence) - { - if [ modules.call-in $(caller) : $(predicate) $(e) ] - { - result += $(e) ; - } - } - return $(result) ; -} - - -# Return a new sequence consisting of [ $(function) $(e) ] for each element e of -# $(sequence). -# -rule transform ( function + : sequence * ) -{ - local caller = [ CALLER_MODULE ] ; - local result ; - - for local e in $(sequence) - { - result += [ modules.call-in $(caller) : $(function) $(e) ] ; - } - return $(result) ; -} - - -rule reverse ( s * ) -{ - local r ; - for local x in $(s) - { - r = $(x) $(r) ; - } - return $(r) ; -} - - -rule less ( a b ) -{ - if $(a) < $(b) - { - return true ; - } -} - - -# Insertion-sort s using the BinaryPredicate ordered. -# -rule insertion-sort ( s * : ordered * ) -{ - if ! $(ordered) - { - return [ SORT $(s) ] ; - } - else - { - local caller = [ CALLER_MODULE ] ; - ordered ?= sequence.less ; - local result = $(s[1]) ; - if $(ordered) = sequence.less - { - local head tail ; - for local x in $(s[2-]) - { - head = ; - tail = $(result) ; - while $(tail) && ( $(tail[1]) < $(x) ) - { - head += $(tail[1]) ; - tail = $(tail[2-]) ; - } - result = $(head) $(x) $(tail) ; - } - } - else - { - for local x in $(s[2-]) - { - local head tail ; - tail = $(result) ; - while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ] - { - head += $(tail[1]) ; - tail = $(tail[2-]) ; - } - result = $(head) $(x) $(tail) ; - } - } - - return $(result) ; - } -} - - -# Merge two ordered sequences using the BinaryPredicate ordered. -# -rule merge ( s1 * : s2 * : ordered * ) -{ - ordered ?= sequence.less ; - local result__ ; - local caller = [ CALLER_MODULE ] ; - - while $(s1) && $(s2) - { - if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ] - { - result__ += $(s1[1]) ; - s1 = $(s1[2-]) ; - } - else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ] - { - result__ += $(s2[1]) ; - s2 = $(s2[2-]) ; - } - else - { - s2 = $(s2[2-]) ; - } - - } - result__ += $(s1) ; - result__ += $(s2) ; - - return $(result__) ; -} - - -# Join the elements of s into one long string. If joint is supplied, it is used -# as a separator. -# -rule join ( s * : joint ? ) -{ - joint ?= "" ; - return $(s:J=$(joint)) ; -} - - -# Find the length of any sequence. -# -rule length ( s * ) -{ - local result = 0 ; - for local i in $(s) - { - result = [ CALC $(result) + 1 ] ; - } - return $(result) ; -} - - -rule unique ( list * : stable ? ) -{ - local result ; - local prev ; - if $(stable) - { - for local f in $(list) - { - if ! $(f) in $(result) - { - result += $(f) ; - } - } - } - else - { - for local i in [ SORT $(list) ] - { - if $(i) != $(prev) - { - result += $(i) ; - } - prev = $(i) ; - } - } - return $(result) ; -} - - -# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or -# 'numbers.less' if none is provided. -# -rule max-element ( elements + : ordered ? ) -{ - ordered ?= numbers.less ; - - local max = $(elements[1]) ; - for local e in $(elements[2-]) - { - if [ $(ordered) $(max) $(e) ] - { - max = $(e) ; - } - } - return $(max) ; -} - - -# Returns all of 'elements' for which corresponding element in parallel list -# 'rank' is equal to the maximum value in 'rank'. -# -rule select-highest-ranked ( elements * : ranks * ) -{ - if $(elements) - { - local max-rank = [ max-element $(ranks) ] ; - local result ; - while $(elements) - { - if $(ranks[1]) = $(max-rank) - { - result += $(elements[1]) ; - } - elements = $(elements[2-]) ; - ranks = $(ranks[2-]) ; - } - return $(result) ; - } -} -NATIVE_RULE sequence : select-highest-ranked ; - - -rule __test__ ( ) -{ - # Use a unique module so we can test the use of local rules. - module sequence.__test__ - { - import assert ; - import sequence ; - - local rule is-even ( n ) - { - if $(n) in 0 2 4 6 8 - { - return true ; - } - } - - assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ; - - # Test that argument binding works. - local rule is-equal-test ( x y ) - { - if $(x) = $(y) - { - return true ; - } - } - - assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ; - - local rule append-x ( n ) - { - return $(n)x ; - } - - assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ; - - local rule repeat2 ( x ) - { - return $(x) $(x) ; - } - - assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ; - - local rule test-greater ( a b ) - { - if $(a) > $(b) - { - return true ; - } - } - assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ; - assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ; - assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ; - assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ; - assert.result 1 2 3 : sequence.merge 1 2 3 : ; - assert.result 1 : sequence.merge 1 : 1 ; - - assert.result foo-bar-baz : sequence.join foo bar baz : - ; - assert.result substandard : sequence.join sub stan dard ; - assert.result 3.0.1 : sequence.join 3.0.1 : - ; - - assert.result 0 : sequence.length ; - assert.result 3 : sequence.length a b c ; - assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ; - - assert.result 1 : sequence.length a ; - assert.result 10 : sequence.length a b c d e f g h i j ; - assert.result 11 : sequence.length a b c d e f g h i j k ; - assert.result 12 : sequence.length a b c d e f g h i j k l ; - - local p2 = x ; - for local i in 1 2 3 4 5 6 7 8 - { - p2 = $(p2) $(p2) ; - } - assert.result 256 : sequence.length $(p2) ; - - assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ; - - assert.result 5 : sequence.max-element 1 3 5 0 4 ; - - assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ; - - assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ; - } -} diff --git a/jam-files/boost-build/util/sequence.py b/jam-files/boost-build/util/sequence.py deleted file mode 100644 index 1d32efd2..00000000 --- a/jam-files/boost-build/util/sequence.py +++ /dev/null @@ -1,50 +0,0 @@ -# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -import operator - -def unique (values, stable=False): - if stable: - s = set() - r = [] - for v in values: - if not v in s: - r.append(v) - s.add(v) - return r - else: - return list(set(values)) - -def max_element (elements, ordered = None): - """ Returns the maximum number in 'elements'. Uses 'ordered' for comparisons, - or '<' is none is provided. - """ - if not ordered: ordered = operator.lt - - max = elements [0] - for e in elements [1:]: - if ordered (max, e): - max = e - - return max - -def select_highest_ranked (elements, ranks): - """ Returns all of 'elements' for which corresponding element in parallel - list 'rank' is equal to the maximum value in 'rank'. - """ - if not elements: - return [] - - max_rank = max_element (ranks) - - result = [] - while elements: - if ranks [0] == max_rank: - result.append (elements [0]) - - elements = elements [1:] - ranks = ranks [1:] - - return result diff --git a/jam-files/boost-build/util/set.jam b/jam-files/boost-build/util/set.jam deleted file mode 100644 index fc179134..00000000 --- a/jam-files/boost-build/util/set.jam +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright 2001, 2002 Dave Abrahams -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -class set -{ - rule __init__ ( ) - { - } - - rule add ( elements * ) - { - for local e in $(elements) - { - if ! $($(e)) - { - $(e) = 1 ; - self.result += $(e) ; - } - } - } - - rule contains ( element ) - { - return $($(element)) ; - } - - rule list ( ) - { - return $(self.result) ; - } -} - - - -# Returns the elements of set1 that are not in set2. -# -rule difference ( set1 * : set2 * ) -{ - local result = ; - for local element in $(set1) - { - if ! ( $(element) in $(set2) ) - { - result += $(element) ; - } - } - return $(result) ; -} - -NATIVE_RULE set : difference ; - - -# Removes all the items appearing in both set1 & set2. -# -rule intersection ( set1 * : set2 * ) -{ - local result ; - for local v in $(set1) - { - if $(v) in $(set2) - { - result += $(v) ; - } - } - return $(result) ; -} - - -# Returns whether set1 & set2 contain the same elements. Note that this ignores -# any element ordering differences as well as any element duplication. -# -rule equal ( set1 * : set2 * ) -{ - if $(set1) in $(set2) && ( $(set2) in $(set1) ) - { - return true ; - } -} - - -rule __test__ ( ) -{ - import assert ; - - assert.result 0 1 4 6 8 9 : difference 0 1 2 3 4 5 6 7 8 9 : 2 3 5 7 ; - assert.result 2 5 7 : intersection 0 1 2 4 5 6 7 8 9 : 2 3 5 7 ; - - assert.true equal : ; - assert.true equal 1 1 2 3 : 3 2 2 1 ; - assert.false equal 2 3 : 3 2 2 1 ; -} diff --git a/jam-files/boost-build/util/set.py b/jam-files/boost-build/util/set.py deleted file mode 100644 index dc7cf328..00000000 --- a/jam-files/boost-build/util/set.py +++ /dev/null @@ -1,42 +0,0 @@ -# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -from utility import to_seq - -def difference (b, a): - """ Returns the elements of B that are not in A. - """ - result = [] - for element in b: - if not element in a: - result.append (element) - - return result - -def intersection (set1, set2): - """ Removes from set1 any items which don't appear in set2 and returns the result. - """ - result = [] - for v in set1: - if v in set2: - result.append (v) - return result - -def contains (small, large): - """ Returns true iff all elements of 'small' exist in 'large'. - """ - small = to_seq (small) - large = to_seq (large) - - for s in small: - if not s in large: - return False - return True - -def equal (a, b): - """ Returns True iff 'a' contains the same elements as 'b', irrespective of their order. - # TODO: Python 2.4 has a proper set class. - """ - return contains (a, b) and contains (b, a) diff --git a/jam-files/boost-build/util/string.jam b/jam-files/boost-build/util/string.jam deleted file mode 100644 index a39ed119..00000000 --- a/jam-files/boost-build/util/string.jam +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2002 Dave Abrahams -# Copyright 2002, 2003 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import regex ; - - -# Characters considered whitespace, as a list. -.whitespace-chars = " " " " " -" ; - -# Characters considered whitespace, as a single string. -.whitespace = $(.whitespace-chars:J="") ; - - -# Returns the canonical set of whitespace characters, as a list. -# -rule whitespace-chars ( ) -{ - return $(.whitespace-chars) ; -} - - -# Returns the canonical set of whitespace characters, as a single string. -# -rule whitespace ( ) -{ - return $(.whitespace) ; -} - - -# Splits the given string into a list of strings composed of each character of -# the string in sequence. -# -rule chars ( - string # The string to split. - ) -{ - local result ; - while $(string) - { - local s = [ MATCH (.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.*) : $(string) ] ; - string = $(s[9]) ; - result += $(s[1-8]) ; - } - - # Trim off empty strings. - while $(result[1]) && ! $(result[-1]) - { - result = $(result[1--2]) ; - } - - return $(result) ; -} - - -# Apply a set of standard transformations to string to produce an abbreviation -# no more than 5 characters long. -# -rule abbreviate ( string ) -{ - local r = $(.abbreviated-$(string)) ; - if $(r) - { - return $(r) ; - } - # Anything less than 4 characters gets no abbreviation. - else if ! [ MATCH (....) : $(string) ] - { - .abbreviated-$(string) = $(string) ; - return $(string) ; - } - else - { - # Separate the initial letter in case it's a vowel. - local s1 = [ MATCH ^(.)(.*) : $(string) ] ; - - # Drop trailing "ing". - local s2 = [ MATCH ^(.*)ing$ : $(s1[2]) ] ; - s2 ?= $(s1[2]) ; - - # Reduce all doubled characters to one. - local last = "" ; - for local c in [ chars $(s2) ] - { - if $(c) != $(last) - { - r += $(c) ; - last = $(c) ; - } - } - s2 = $(r:J="") ; - - # Chop all vowels out of the remainder. - s2 = [ regex.replace $(s2) [AEIOUaeiou] "" ] ; - - # Shorten remaining consonants to 4 characters. - s2 = [ MATCH ^(.?.?.?.?) : $(s2) ] ; - - # Glue the initial character back on to the front. - s2 = $(s1[1])$(s2) ; - - .abbreviated-$(string) = $(s2) ; - return $(s2) ; - } -} - - -# Concatenates the given strings, inserting the given separator between each -# string. -# -rule join ( - strings * # The strings to join. - : separator ? # The optional separator. - ) -{ - separator ?= "" ; - return $(strings:J=$(separator)) ; -} - - -# Split a string into whitespace separated words. -# -rule words ( - string # The string to split. - : whitespace * # Optional, characters to consider as whitespace. - ) -{ - whitespace = $(whitespace:J="") ; - whitespace ?= $(.whitespace) ; - local w = ; - while $(string) - { - string = [ MATCH "^[$(whitespace)]*([^$(whitespace)]*)(.*)" : $(string) ] ; - if $(string[1]) && $(string[1]) != "" - { - w += $(string[1]) ; - } - string = $(string[2]) ; - } - return $(w) ; -} - - -# Check that the given string is composed entirely of whitespace. -# -rule is-whitespace ( - string ? # The string to test. - ) -{ - if ! $(string) { return true ; } - else if $(string) = "" { return true ; } - else if [ MATCH "^([$(.whitespace)]+)$" : $(string) ] { return true ; } - else { return ; } -} - -rule __test__ ( ) -{ - import assert ; - assert.result a b c : chars abc ; - - assert.result rntm : abbreviate runtime ; - assert.result ovrld : abbreviate overload ; - assert.result dbg : abbreviate debugging ; - assert.result async : abbreviate asynchronous ; - assert.result pop : abbreviate pop ; - assert.result aaa : abbreviate aaa ; - assert.result qck : abbreviate quack ; - assert.result sttc : abbreviate static ; - - # Check boundary cases. - assert.result a : chars a ; - assert.result : chars "" ; - assert.result a b c d e f g h : chars abcdefgh ; - assert.result a b c d e f g h i : chars abcdefghi ; - assert.result a b c d e f g h i j : chars abcdefghij ; - assert.result a b c d e f g h i j k : chars abcdefghijk ; - - assert.result a//b/c/d : join a "" b c d : / ; - assert.result abcd : join a "" b c d ; - - assert.result a b c : words "a b c" ; - - assert.true is-whitespace " " ; - assert.false is-whitespace " a b c " ; - assert.true is-whitespace "" ; - assert.true is-whitespace ; -} diff --git a/jam-files/boost-build/util/utility.jam b/jam-files/boost-build/util/utility.jam deleted file mode 100644 index c46747f5..00000000 --- a/jam-files/boost-build/util/utility.jam +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright 2001, 2002 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Copyright 2008 Jurko Gospodnetic -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -import "class" : is-instance ; -import errors ; - - -# For all elements of 'list' which do not already have 'suffix', add 'suffix'. -# -rule apply-default-suffix ( suffix : list * ) -{ - local result ; - for local i in $(list) - { - if $(i:S) = $(suffix) - { - result += $(i) ; - } - else - { - result += $(i)$(suffix) ; - } - } - return $(result) ; -} - - -# If 'name' contains a dot, returns the part before the last dot. If 'name' -# contains no dot, returns it unmodified. -# -rule basename ( name ) -{ - if $(name:S) - { - name = $(name:B) ; - } - return $(name) ; -} - - -# Return the file of the caller of the rule that called caller-file. -# -rule caller-file ( ) -{ - local bt = [ BACKTRACE ] ; - return $(bt[9]) ; -} - - -# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal' -# method. Uses ordinary jam's comparison otherwise. -# -rule equal ( a b ) -{ - if [ is-instance $(a) ] - { - return [ $(a).equal $(b) ] ; - } - else - { - if $(a) = $(b) - { - return true ; - } - } -} - - -# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less' -# method. Uses ordinary jam's comparison otherwise. -# -rule less ( a b ) -{ - if [ is-instance $(a) ] - { - return [ $(a).less $(b) ] ; - } - else - { - if $(a) < $(b) - { - return true ; - } - } -} - - -# Returns the textual representation of argument. If it is a class instance, -# class its 'str' method. Otherwise, returns the argument. -# -rule str ( value ) -{ - if [ is-instance $(value) ] - { - return [ $(value).str ] ; - } - else - { - return $(value) ; - } -} - - -# Accepts a list of gristed values and returns them ungristed. Reports an error -# in case any of the passed parameters is not gristed, i.e. surrounded in angle -# brackets < and >. -# -rule ungrist ( names * ) -{ - local result ; - for local name in $(names) - { - local stripped = [ MATCH ^<(.*)>$ : $(name) ] ; - if ! $(stripped) - { - errors.error "in ungrist $(names) : $(name) is not of the form <.*>" ; - } - result += $(stripped) ; - } - return $(result) ; -} - - -# If the passed value is quoted, unquotes it. Otherwise returns the value -# unchanged. -# -rule unquote ( value ? ) -{ - local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ; - if $(match-result) - { - return $(match-result[2]) ; - } - else - { - return $(value) ; - } -} - - -rule __test__ ( ) -{ - import assert ; - import "class" : new ; - import errors : try catch ; - - assert.result 123 : str 123 ; - - class test-class__ - { - rule __init__ ( ) { } - rule str ( ) { return "str-test-class" ; } - rule less ( a ) { return "yes, of course!" ; } - rule equal ( a ) { return "not sure" ; } - } - - assert.result "str-test-class" : str [ new test-class__ ] ; - assert.true less 1 2 ; - assert.false less 2 1 ; - assert.result "yes, of course!" : less [ new test-class__ ] 1 ; - assert.true equal 1 1 ; - assert.false equal 1 2 ; - assert.result "not sure" : equal [ new test-class__ ] 1 ; - - assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib - ; - - assert.result foo : basename foo ; - assert.result foo : basename foo.so ; - assert.result foo.so : basename foo.so.1 ; - - assert.result : unquote ; - assert.result "" : unquote "" ; - assert.result foo : unquote foo ; - assert.result \"foo : unquote \"foo ; - assert.result foo\" : unquote foo\" ; - assert.result foo : unquote \"foo\" ; - assert.result \"foo\" : unquote \"\"foo\"\" ; - - assert.result : ungrist ; - assert.result foo : ungrist <foo> ; - assert.result <foo> : ungrist <<foo>> ; - assert.result foo bar : ungrist <foo> <bar> ; - - try ; - { - ungrist "" ; - } - catch "in ungrist : is not of the form <.*>" ; - - try ; - { - ungrist <> ; - } - catch "in ungrist <> : <> is not of the form <.*>" ; - - try ; - { - ungrist foo ; - } - catch "in ungrist foo : foo is not of the form <.*>" ; - - try ; - { - ungrist <foo ; - } - catch "in ungrist <foo : <foo is not of the form <.*>" ; - - try ; - { - ungrist foo> ; - } - catch "in ungrist foo> : foo> is not of the form <.*>" ; - - try ; - { - ungrist foo bar ; - } - catch "in ungrist foo : foo is not of the form <.*>" ; - - try ; - { - ungrist foo <bar> ; - } - catch "in ungrist foo : foo is not of the form <.*>" ; - - try ; - { - ungrist <foo> bar ; - } - catch "in ungrist bar : bar is not of the form <.*>" ; -} diff --git a/jam-files/boost-build/util/utility.py b/jam-files/boost-build/util/utility.py deleted file mode 100644 index afea765b..00000000 --- a/jam-files/boost-build/util/utility.py +++ /dev/null @@ -1,155 +0,0 @@ -# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -""" Utility functions to add/remove/get grists. - Grists are string enclosed in angle brackets (<>) that are used as prefixes. See Jam for more information. -""" - -import re -import os -import bjam -from b2.exceptions import * - -__re_grist_and_value = re.compile (r'(<[^>]*>)(.*)') -__re_grist_content = re.compile ('^<(.*)>$') -__re_backslash = re.compile (r'\\') - -def to_seq (value): - """ If value is a sequence, returns it. - If it is a string, returns a sequence with value as its sole element. - """ - if not value: - return [] - - if isinstance (value, str): - return [value] - - else: - return value - -def replace_references_by_objects (manager, refs): - objs = [] - for r in refs: - objs.append (manager.get_object (r)) - return objs - -def add_grist (features): - """ Transform a string by bracketing it with "<>". If already bracketed, does nothing. - features: one string or a sequence of strings - return: the gristed string, if features is a string, or a sequence of gristed strings, if features is a sequence - """ - - def grist_one (feature): - if feature [0] != '<' and feature [len (feature) - 1] != '>': - return '<' + feature + '>' - else: - return feature - - if isinstance (features, str): - return grist_one (features) - else: - return [ grist_one (feature) for feature in features ] - -def replace_grist (features, new_grist): - """ Replaces the grist of a string by a new one. - Returns the string with the new grist. - """ - def replace_grist_one (name, new_grist): - split = __re_grist_and_value.match (name) - if not split: - return new_grist + name - else: - return new_grist + split.group (2) - - if isinstance (features, str): - return replace_grist_one (features, new_grist) - else: - return [ replace_grist_one (feature, new_grist) for feature in features ] - -def get_value (property): - """ Gets the value of a property, that is, the part following the grist, if any. - """ - return replace_grist (property, '') - -def get_grist (value): - """ Returns the grist of a string. - If value is a sequence, does it for every value and returns the result as a sequence. - """ - def get_grist_one (name): - split = __re_grist_and_value.match (name) - if not split: - return '' - else: - return split.group (1) - - if isinstance (value, str): - return get_grist_one (value) - else: - return [ get_grist_one (v) for v in value ] - -def ungrist (value): - """ Returns the value without grist. - If value is a sequence, does it for every value and returns the result as a sequence. - """ - def ungrist_one (value): - stripped = __re_grist_content.match (value) - if not stripped: - raise BaseException ("in ungrist: '%s' is not of the form <.*>" % value) - - return stripped.group (1) - - if isinstance (value, str): - return ungrist_one (value) - else: - return [ ungrist_one (v) for v in value ] - -def replace_suffix (name, new_suffix): - """ Replaces the suffix of name by new_suffix. - If no suffix exists, the new one is added. - """ - split = os.path.splitext (name) - return split [0] + new_suffix - -def forward_slashes (s): - """ Converts all backslashes to forward slashes. - """ - return __re_backslash.sub ('/', s) - - -def split_action_id (id): - """ Splits an id in the toolset and specific rule parts. E.g. - 'gcc.compile.c++' returns ('gcc', 'compile.c++') - """ - split = id.split ('.', 1) - toolset = split [0] - name = '' - if len (split) > 1: - name = split [1] - return (toolset, name) - -def os_name (): - result = bjam.variable("OS") - assert(len(result) == 1) - return result[0] - -def platform (): - return bjam.variable("OSPLAT") - -def os_version (): - return bjam.variable("OSVER") - -def on_windows (): - """ Returns true if running on windows, whether in cygwin or not. - """ - if bjam.variable("NT"): - return True - - elif bjam.variable("UNIX"): - - uname = bjam.variable("JAMUNAME") - if uname and uname[0].startswith("CYGWIN"): - return True - - return False |