diff options
Diffstat (limited to 'jam-files/boost-build/tools')
113 files changed, 0 insertions, 22829 deletions
diff --git a/jam-files/boost-build/tools/__init__.py b/jam-files/boost-build/tools/__init__.py deleted file mode 100644 index e69de29b..00000000 --- a/jam-files/boost-build/tools/__init__.py +++ /dev/null diff --git a/jam-files/boost-build/tools/acc.jam b/jam-files/boost-build/tools/acc.jam deleted file mode 100644 index f04c9dc8..00000000 --- a/jam-files/boost-build/tools/acc.jam +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Toon Knapen 2004. -# Copyright Boris Gubenko 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# -# Boost.Build V2 toolset for the HP aC++ compiler. -# - -import toolset : flags ; -import feature ; -import generators ; -import common ; - -feature.extend toolset : acc ; -toolset.inherit acc : unix ; -generators.override builtin.lib-generator : acc.prebuilt ; -generators.override acc.searched-lib-generator : searched-lib-generator ; - -# Configures the acc toolset. -rule init ( version ? : user-provided-command * : options * ) -{ -    local condition = [ common.check-init-parameters acc -        : version $(version) ] ; - -    local command = [ common.get-invocation-command acc : aCC -        : $(user-provided-command) ] ; - -    common.handle-options acc : $(condition) : $(command) : $(options) ; -} - - -# Declare generators -generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ; -generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ; - -# Declare flags. -flags acc CFLAGS <optimization>off : ; -flags acc CFLAGS <optimization>speed : -O3 ; -flags acc CFLAGS <optimization>space : -O2 ; - -flags acc CFLAGS <inlining>off : +d ; -flags acc CFLAGS <inlining>on : ; -flags acc CFLAGS <inlining>full : ; - -flags acc C++FLAGS <exception-handling>off : ; -flags acc C++FLAGS <exception-handling>on : ; - -flags acc C++FLAGS <rtti>off : ; -flags acc C++FLAGS <rtti>on : ; - -# We want the full path to the sources in the debug symbols because otherwise -# the debugger won't find the sources when we use boost.build. -flags acc CFLAGS <debug-symbols>on : -g ; -flags acc LINKFLAGS <debug-symbols>on : -g ; -flags acc LINKFLAGS <debug-symbols>off : -s ; - -# V2 does not have <shared-linkable>, not sure what this meant in V1. -# flags acc CFLAGS <shared-linkable>true : +Z ; - -flags acc CFLAGS <profiling>on : -pg ; -flags acc LINKFLAGS <profiling>on : -pg ; - -flags acc CFLAGS <address-model>64 : +DD64 ; -flags acc LINKFLAGS <address-model>64 : +DD64 ; - -# It is unknown if there's separate option for rpath used only -# at link time, similar to -rpath-link in GNU. We'll use -L. -flags acc RPATH_LINK : <xdll-path> ; - -flags acc CFLAGS <cflags> ; -flags acc C++FLAGS <cxxflags> ; -flags acc DEFINES <define> ; -flags acc UNDEFS <undef> ; -flags acc HDRS <include> ; -flags acc STDHDRS <sysinclude> ; -flags acc LINKFLAGS <linkflags> ; -flags acc ARFLAGS <arflags> ; - -flags acc LIBPATH <library-path> ; -flags acc NEEDLIBS <library-file> ; -flags acc FINDLIBS <find-shared-library> ; -flags acc FINDLIBS <find-static-library> ; - -# Select the compiler name according to the threading model. -flags acc CFLAGS <threading>multi : -mt   ; -flags acc LINKFLAGS <threading>multi : -mt ; - -flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; - - -actions acc.link bind NEEDLIBS -{ -    $(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) -} - -SPACE = " " ; -actions acc.link.dll bind NEEDLIBS -{ -    $(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) -} - -actions acc.compile.c -{ -    cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS) -} - -actions acc.compile.c++ -{ -    $(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS) -} - -actions updated together piecemeal acc.archive -{ -    ar ru$(ARFLAGS:E="") "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/auto-index.jam b/jam-files/boost-build/tools/auto-index.jam deleted file mode 100644 index ebbf344e..00000000 --- a/jam-files/boost-build/tools/auto-index.jam +++ /dev/null @@ -1,212 +0,0 @@ - -import feature ; -import generators ; -import "class" ; -import toolset ; -import targets ; -import "class" : new ; -import project ; - -feature.feature auto-index : off "on" ; -feature.feature auto-index-internal : off "on" ; -feature.feature auto-index-verbose : off "on" ; -feature.feature auto-index-no-duplicates : off "on" ; -feature.feature auto-index-script : : free ; -feature.feature auto-index-prefix : : free ; -feature.feature auto-index-type : : free ; -feature.feature auto-index-section-names : "on" off ; - -toolset.flags auto-index.auto-index FLAGS <auto-index-internal>on : --internal-index ; -toolset.flags auto-index.auto-index SCRIPT <auto-index-script> ; -toolset.flags auto-index.auto-index PREFIX <auto-index-prefix> ; -toolset.flags auto-index.auto-index INDEX_TYPE <auto-index-type> ; -toolset.flags auto-index.auto-index FLAGS <auto-index-verbose>on : --verbose ; -toolset.flags auto-index.auto-index FLAGS <auto-index-no-duplicates>on : --no-duplicates ; -toolset.flags auto-index.auto-index FLAGS <auto-index-section-names>off : --no-section-names ; - -# <auto-index-binary> shell command to run AutoIndex -# <auto-index-binary-dependencies> targets to build AutoIndex from sources. -feature.feature <auto-index-binary> : : free ; -feature.feature <auto-index-binary-dependencies> : : free dependency ; - -class auto-index-generator : generator -{ -    import common modules path targets build-system ; -    rule run ( project name ? : property-set : sources * ) -    { -        # AutoIndex invocation command and dependencies. -        local auto-index-binary = [ modules.peek auto-index : .command ] ; -        local auto-index-binary-dependencies ; - -        if $(auto-index-binary) -        { -            # Use user-supplied command. -            auto-index-binary = [ common.get-invocation-command auto-index : auto-index : $(auto-index-binary) ] ; -        } -        else -        { -            # Search for AutoIndex sources in sensible places, like -            #   $(BOOST_ROOT)/tools/auto_index -            #   $(BOOST_BUILD_PATH)/../../auto_index - -            # And build auto-index executable from sources. - -            local boost-root = [ modules.peek : BOOST_ROOT ] ; -            local boost-build-path = [ build-system.location ] ; -            local boost-build-path2 = [ modules.peek : BOOST_BUILD_PATH ] ; - -            local auto-index-dir ; - -            if $(boost-root) -            { -                auto-index-dir += [ path.join $(boost-root) tools ] ; -            } - -            if $(boost-build-path) -            { -                auto-index-dir += $(boost-build-path)/../.. ; -            } -            if $(boost-build-path2) -            { -                auto-index-dir += $(boost-build-path2)/.. ; -            } - -            #ECHO $(auto-index-dir) ; -            auto-index-dir = [ path.glob $(auto-index-dir) : auto_index ] ; -            #ECHO $(auto-index-dir) ; - -            # If the AutoIndex source directory was found, mark its main target -            # as a dependency for the current project. Otherwise, try to find -            # 'auto-index' in user's PATH -            if $(auto-index-dir) -            { -                auto-index-dir = [ path.make $(auto-index-dir[1]) ] ; -                auto-index-dir = $(auto-index-dir)/build ; -                 -                #ECHO $(auto-index-dir) ; - -                # Get the main-target in AutoIndex directory. -                local auto-index-main-target = [ targets.resolve-reference $(auto-index-dir) : $(project) ] ; -                 -                #ECHO $(auto-index-main-target) ; - -                # The first element are actual targets, the second are -                # properties found in target-id. We do not care about these -                # since we have passed the id ourselves. -                auto-index-main-target = -                    [ $(auto-index-main-target[1]).main-target auto_index ] ; - -                #ECHO $(auto-index-main-target) ; - -                auto-index-binary-dependencies = -                    [ $(auto-index-main-target).generate [ $(property-set).propagated ] ] ; - -                # Ignore usage-requirements returned as first element. -                auto-index-binary-dependencies = $(auto-index-binary-dependencies[2-]) ; - -                # Some toolsets generate extra targets (e.g. RSP). We must mark -                # all targets as dependencies for the project, but we will only -                # use the EXE target for auto-index-to-boostbook translation. -                for local target in $(auto-index-binary-dependencies) -                { -                    if [ $(target).type ] = EXE -                    { -                        auto-index-binary =  -                            [ path.native  -                                [ path.join -                                    [ $(target).path ] -                                    [ $(target).name ] -                                ] -                            ] ; -                    } -                } -            } -            else -            { -                ECHO "AutoIndex warning: The path to the auto-index executable was" ; -                ECHO "  not provided. Additionally, couldn't find AutoIndex" ; -                ECHO "  sources searching in" ; -                ECHO "    * BOOST_ROOT/tools/auto-index" ; -                ECHO "    * BOOST_BUILD_PATH/../../auto-index" ; -                ECHO "  Will now try to find a precompiled executable by searching" ; -                ECHO "  the PATH for 'auto-index'." ; -                ECHO "  To disable this warning in the future, or to completely" ; -                ECHO "  avoid compilation of auto-index, you can explicitly set the" ; -                ECHO "  path to a auto-index executable command in user-config.jam" ; -                ECHO "  or site-config.jam with the call" ; -                ECHO "    using auto-index : /path/to/auto-index ;" ; - -                # As a last resort, search for 'auto-index' command in path. Note -                # that even if the 'auto-index' command is not found, -                # get-invocation-command will still return 'auto-index' and might -                # generate an error while generating the virtual-target. - -                auto-index-binary = [ common.get-invocation-command auto-index : auto-index ] ; -            } -        } - -        # Add $(auto-index-binary-dependencies) as a dependency of the current -        # project and set it as the <auto-index-binary> feature for the -        # auto-index-to-boostbook rule, below. -        property-set = [ $(property-set).add-raw -            <dependency>$(auto-index-binary-dependencies) -            <auto-index-binary>$(auto-index-binary) -            <auto-index-binary-dependencies>$(auto-index-binary-dependencies) -        ] ; -         -        #ECHO "binary = " $(auto-index-binary) ; -        #ECHO "dependencies = " $(auto-index-binary-dependencies) ; - -        if [ $(property-set).get <auto-index> ] = "on" -        { -            return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ; -        } -        else -        { -            return [ generators.construct $(project) $(name) : DOCBOOK : $(property-set) -              : $(sources) ] ; -        }         -    } -} - -# Initialization of toolset. -# -# Parameters: -#   command ?    -> path to AutoIndex executable. -# -# When command is not supplied toolset will search for AutoIndex directory and -# compile the executable from source. If that fails we still search the path for -# 'auto_index'. -# -rule init ( -        command ?   # path to the AutoIndex executable. -    ) -{ -    if ! $(.initialized) -    { -        .initialized = true ; -        .command = $(command) ; -    } -} - -toolset.flags auto-index.auto-index AI-COMMAND      <auto-index-binary> ; -toolset.flags auto-index.auto-index AI-DEPENDENCIES <auto-index-binary-dependencies> ; - -generators.register [ class.new auto-index-generator auto-index.auto-index : DOCBOOK : DOCBOOK(%.auto_index) ] ; -generators.override auto-index.auto-index : boostbook.boostbook-to-docbook ; - -rule auto-index ( target : source : properties * ) -{ -    # Signal dependency of auto-index sources on <auto-index-binary-dependencies> -    # upon invocation of auto-index-to-boostbook. -    #ECHO "AI-COMMAND= " $(AI-COMMAND) ; -    DEPENDS $(target) : [ on $(target) return $(AI-DEPENDENCIES) ] ; -    #DEPENDS $(target) : [ on $(target) return $(SCRIPT) ] ; -} - -actions auto-index -{ -    $(AI-COMMAND) $(FLAGS) "--prefix="$(PREFIX) "--script="$(SCRIPT) "--index-type="$(INDEX_TYPE) "--in="$(>) "--out="$(<) -} - - diff --git a/jam-files/boost-build/tools/bison.jam b/jam-files/boost-build/tools/bison.jam deleted file mode 100644 index 0689d4bd..00000000 --- a/jam-files/boost-build/tools/bison.jam +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2003 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -import generators ; -import feature ; -import type ; -import property ; - -feature.feature bison.prefix : : free ; -type.register Y : y ; -type.register YY : yy ; -generators.register-standard bison.bison : Y : C H ; -generators.register-standard bison.bison : YY : CPP HPP ; - -rule init ( ) -{ -} - -rule bison ( dst dst_header : src : properties * ) -{ -    local r = [ property.select bison.prefix : $(properties) ] ; -    if $(r) -    { -        PREFIX_OPT on $(<) = -p $(r:G=) ; -    } -} - -actions bison  -{ -    bison $(PREFIX_OPT) -d -o $(<[1]) $(>) -} diff --git a/jam-files/boost-build/tools/boostbook-config.jam b/jam-files/boost-build/tools/boostbook-config.jam deleted file mode 100644 index 6e3f3ddc..00000000 --- a/jam-files/boost-build/tools/boostbook-config.jam +++ /dev/null @@ -1,13 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for BoostBook tools. To use, just import this module. -# -# This module is deprecated. -#   using boostbook ; -# with no arguments now suffices. - -import toolset : using ; - -using boostbook ; diff --git a/jam-files/boost-build/tools/boostbook.jam b/jam-files/boost-build/tools/boostbook.jam deleted file mode 100644 index 3a5964c6..00000000 --- a/jam-files/boost-build/tools/boostbook.jam +++ /dev/null @@ -1,727 +0,0 @@ -# Copyright 2003, 2004, 2005 Dave Abrahams  -# Copyright 2003, 2004, 2005 Douglas Gregor  -# Copyright 2005, 2006, 2007 Rene Rivera  -# Copyright 2003, 2004, 2005 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -#  This module defines rules to handle generation of documentation -#  from BoostBook sources. -# -#  The type of output is controlled by the <format> feature which can -#  have the following values:: -# -#   * html: Generates html documention.  This is the default. -#   * xhtml: Generates xhtml documentation -#   * htmlhelp: Generates html help output. -#   * onehtml: Generates a single html page. -#   * man: Generates man pages. -#   * pdf: Generates pdf documentation. -#   * ps: Generates postscript output. -#   * docbook: Generates docbook XML. -#   * fo: Generates XSL formating objects. -#   * tests: Extracts test cases from the boostbook XML. -# -#  format is an implicit feature, so typing pdf on the command -#  line (for example) is a short-cut for format=pdf. - -import "class" : new ; -import common ; -import errors ; -import targets ; -import feature ; -import generators ; -import print ; -import property ; -import project ; -import property-set ; -import regex ; -import scanner ; -import sequence ; -import make ; -import os ; -import type ; -import modules path project ; -import build-system ; - -import xsltproc : xslt xslt-dir ; - -# Make this module into a project. -project.initialize $(__name__) ; -project boostbook ; - - -feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests -  : incidental implicit composite propagated ; - -type.register DTDXML : dtdxml ; -type.register XML : xml ; -type.register BOOSTBOOK : boostbook : XML ; -type.register DOCBOOK : docbook : XML ;  -type.register FO : fo : XML ; -type.register PDF : pdf ; -type.register PS : ps ; -type.register XSLT : xsl : XML ; -type.register HTMLDIR ; -type.register XHTMLDIR ; -type.register HTMLHELP ; -type.register MANPAGES ; -type.register TESTS : tests ; -# Artificial target type, used to require invocation of top-level -# BoostBook generator. -type.register BOOSTBOOK_MAIN ; - - -# Initialize BoostBook support. -rule init ( -      docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not -                        # provided, we use DOCBOOK_XSL_DIR from the environment -                        # (if available) or look in standard locations. -                        # Otherwise, we let the XML processor load the -                        # stylesheets remotely. -             -    : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use -                        # DOCBOOK_DTD_DIR From the environment (if available) or -                        # look in standard locations.  Otherwise, we let the XML -                        # processor load the DTD remotely. - -    : boostbook-dir ?   # The BoostBook directory with the DTD and XSL subdirs. -) -{ - -  if ! $(.initialized)  -  { -    .initialized = true ; -     -    check-boostbook-dir $(boostbook-dir) ; -    find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ; - -    # Register generators only if we've were called via "using boostbook ; " -    generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ; -    generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ; -    generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ; -    generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ; -    generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ; -    generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ; -    generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ; -    generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ; -    generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ; -     -    # The same about Jamfile main target rules. -    IMPORT $(__name__) : boostbook : : boostbook ; -  } -  else -  { -    if $(docbook-xsl-dir)  -    { -      modify-config ; -      .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ; -      check-docbook-xsl-dir ; -    } -    if $(docbook-dtd-dir)  -    { -      modify-config ; -      .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ; -      check-docbook-dtd-dir ; -    } -    if $(boostbook-dir) -    { -      modify-config ; -      check-boostbook-dir $(boostbook-dir) ; -      local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ; -      local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ; -      .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ; -      .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ; -      check-boostbook-xsl-dir ; -      check-boostbook-dtd-dir ; -    } -  } -} - -rule lock-config ( ) -{ -  if ! $(.initialized) -  { -    errors.user-error "BoostBook has not been configured." ; -  } -  if ! $(.config-locked) -  { -    .config-locked = true ; -  } -} - -rule modify-config ( ) -{ -  if $(.config-locked) -  { -    errors.user-error "BoostBook configuration cannot be changed after it has been used." ; -  } -} - -rule find-boost-in-registry ( keys * ) -{ -  local boost-root = ; -  for local R in $(keys)  -  { -    local installed-boost = [ W32_GETREG -      "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)" -      : "InstallRoot" ] ; -    if $(installed-boost) -    { -      boost-root += [ path.make $(installed-boost) ] ; -    } -  } -  return $(boost-root) ; -} - -rule check-docbook-xsl-dir ( ) -{ -  if $(.docbook-xsl-dir) -  { -    if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ] -    { -      errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ; -    } -    else -    { -      if --debug-configuration in [ modules.peek : ARGV ] -      { -        ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ; -      } -    } -  } -} - -rule check-docbook-dtd-dir ( ) -{ -  if $(.docbook-dtd-dir) -  { -    if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ] -    { -      errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ; -    } -    else -    { -      if --debug-configuration in [ modules.peek : ARGV ] -      { -        ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ; -      } -    } -  } -} - -rule check-boostbook-xsl-dir ( ) -{ -  if ! $(.boostbook-xsl-dir) -  { -    errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ; -  } -  else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ] -  { -    errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ; -  } -  else -  { -    if --debug-configuration in [ modules.peek : ARGV ] -    { -      ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ; -    } -  } -} - -rule check-boostbook-dtd-dir ( ) -{ -  if ! $(.boostbook-dtd-dir) -  { -    errors.user-error "error: BoostBook: could not find boostbook DTD." ; -  } -  else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ] -  { -    errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ; -  } -  else -  { -    if --debug-configuration in [ modules.peek : ARGV ] -    { -      ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ; -    } -  } -} - -rule check-boostbook-dir ( boostbook-dir ? ) -{ -  if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ] -  { -    errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ; -  } -} - -rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? ) -{ -  docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ; -  docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ; -  boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ; - -  # Look for the boostbook stylesheets relative to BOOST_ROOT -  # and Boost.Build. -  local boost-build-root = [ path.make [ build-system.location ] ] ; -  local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ; - -  local boost-root =  [ modules.peek : BOOST_ROOT ] ; -  if $(boost-root) -  { -    boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ; -  } -  boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;  - -  # Try to find the tools in platform specific locations -  if [ os.name ] = NT -  { -    # If installed by the Boost installer. -    local boost-root = ; - -    local boost-installer-versions = snapshot cvs 1.33.0 ; -    local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ; -    local boostpro-installer-versions = -        1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0 -        1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ; - -    local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ; - -    # Make sure that the most recent version is searched for first -    boost-root += [ sequence.reverse -      [ find-boost-in-registry -        Boost-Consulting.com\\$(boost-consulting-installer-versions) -        boostpro.com\\$(boostpro-installer-versions) ] ] ; - -    # Plausible locations. -    local root = [ PWD ] ; -    while $(root) != $(root:D) { root = $(root:D) ; } -    root = [ path.make $(root) ] ; -    local search-dirs = ; -    local docbook-search-dirs = ; -    for local p in $(boost-root) { -      search-dirs += [ path.join $(p) tools ] ; -    } -    for local p in $(old-installer-root) -    { -      search-dirs += [ path.join $(p) share ] ; -      docbook-search-dirs += [ path.join $(p) share ] ; -    } -    search-dirs += [ path.join $(root) Boost tools ] ; -    search-dirs += [ path.join $(root) Boost share ] ; -    docbook-search-dirs += [ path.join $(root) Boost share ] ; - -    docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ; -    docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ; -    boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ; -  } -  else -  { -    # Plausible locations. - -    local share = /usr/local/share /usr/share /opt/share /opt/local/share ; -    local dtd-versions = 4.2 ; - -    docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ; -    docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ; -    docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ; - -    docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ; -    docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ; -    docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ; - -    boostbook-dir ?= [ path.glob $(share) : boostbook* ] ; - -    # Ubuntu Linux -    docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ; -    docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ; -  } - -  if $(docbook-xsl-dir)  -  { -    .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ; -  } -  if $(docbook-dtd-dir)  -  { -    .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ; -  } - -  if --debug-configuration in [ modules.peek : ARGV ]  -  { -    ECHO "notice: Boost.Book: searching XSL/DTD in" ; -    ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ; -  }     -  local boostbook-xsl-dir ; -  for local dir in $(boostbook-dir) { -    boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ; -  } -  local boostbook-dtd-dir ;  -  for local dir in $(boostbook-dir) { -    boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ; -  } -  .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ; -  .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ; - -  check-docbook-xsl-dir ; -  check-docbook-dtd-dir ; -  check-boostbook-xsl-dir ; -  check-boostbook-dtd-dir ;  -} - -rule xsl-dir -{ -  lock-config ; -  return $(.boostbook-xsl-dir) ; -} - -rule dtd-dir -{ -  lock-config ; -  return $(.boostbook-dtd-dir) ; -} - -rule docbook-xsl-dir -{ -  lock-config ; -  return $(.docbook-xsl-dir) ; -} - -rule docbook-dtd-dir -{ -  lock-config ; -  return $(.docbook-dtd-dir) ; -} - -rule dtdxml-to-boostbook ( target : source : properties * ) -{ -  lock-config ; -  xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl"  -                 : $(properties) ; -} - -rule boostbook-to-docbook ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ; -  xslt $(target) : $(source) $(stylesheet) : $(properties) ; -} - -rule docbook-to-onehtml ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ; -  xslt $(target) : $(source) $(stylesheet) : $(properties) ; -} - -rule docbook-to-htmldir ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ; -  xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ; -} - -rule docbook-to-xhtmldir ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ; -  xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ; -} - -rule docbook-to-htmlhelp ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ; -  xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ; -} - -rule docbook-to-manpages ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ; -  xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ; -} - -rule docbook-to-fo ( target : source : properties * ) -{ -  lock-config ; -  local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ; -  xslt $(target) : $(source) $(stylesheet) : $(properties) ; -} - -rule format-catalog-path ( path ) -{ -    local result = $(path) ; -    if [ xsltproc.is-cygwin ] -    { -        if [ os.name ] = NT -        { -            drive = [ MATCH ^/(.):(.*)$ : $(path) ] ; -            result = /cygdrive/$(drive[1])$(drive[2]) ; -        } -    } -    else -    { -        if [ os.name ] = CYGWIN -        { -            local native-path = [ path.native $(path) ] ; -            result = [ path.make $(native-path:W) ] ; -        } -    } -    return [ regex.replace $(result) " " "%20" ] ; -} - -rule generate-xml-catalog ( target : sources * : properties * ) -{ -  print.output $(target) ; - -  # BoostBook DTD catalog entry -  local boostbook-dtd-dir = [ boostbook.dtd-dir ] ; -  if $(boostbook-dtd-dir) -  {       -    boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ; -  } -     -  print.text -    "<?xml version=\"1.0\"?>" -    "<!DOCTYPE catalog " -    "  PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\"" -    "  \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">" -    "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">" -    "  <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>" -    : true ; - -  local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;     -  if ! $(docbook-xsl-dir)  -  { -    ECHO "BoostBook warning: no DocBook XSL directory specified." ; -    ECHO "  If you have the DocBook XSL stylesheets installed, please " ; -    ECHO "  set DOCBOOK_XSL_DIR to the stylesheet directory on either " ; -    ECHO "  the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ; -    ECHO "  Boost.Jam configuration file. The DocBook XSL stylesheets " ; -    ECHO "  are available here: http://docbook.sourceforge.net/ " ; -    ECHO "  Stylesheets will be downloaded on-the-fly (very slow!) " ; -  } -  else  -  { -    docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;       -    print.text "  <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ; -  } - -  local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;   -  if ! $(docbook-dtd-dir) -  { -    ECHO "BoostBook warning: no DocBook DTD directory specified." ; -    ECHO "  If you have the DocBook DTD installed, please set " ; -    ECHO "  DOCBOOK_DTD_DIR to the DTD directory on either " ; -    ECHO "  the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ; -    ECHO "  Boost.Jam configuration file. The DocBook DTD is available " ; -    ECHO "  here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ; -    ECHO "  The DTD will be downloaded on-the-fly (very slow!) " ; -  } -  else  -  { -    docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;       -    print.text "  <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ; -  } - -  print.text "</catalog>" ; -} - -rule xml-catalog ( ) -{ -    if ! $(.xml-catalog) -    { -        # The target is created as part of the root project. But ideally -        # it would be created as part of the boostbook project. This is not -        # current possible as such global projects don't inherit things like -        # the build directory. -         -        # Find the root project. -        local root-project = [ project.current ] ; -        root-project = [ $(root-project).project-module ] ; -        while -            [ project.attribute $(root-project) parent-module ] && -            [ project.attribute $(root-project) parent-module ] != user-config && -            [ project.attribute $(root-project) parent-module ] != project-config -        { -            root-project = [ project.attribute $(root-project) parent-module ] ; -        } -        .xml-catalog = [ new file-target boostbook_catalog -            : XML -            : [ project.target $(root-project) ] -            : [ new action : boostbook.generate-xml-catalog ] -            : -            ] ; -        .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ; -        .xml-catalog-file = $(.xml-catalog-file:J=/) ; -    } -    return $(.xml-catalog) $(.xml-catalog-file) ; -} - -class boostbook-generator : generator -{ -    import feature ; -    import virtual-target ; -    import generators ; -    import boostbook ; -       -     -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -     -    rule run ( project name ? : property-set : sources * ) -    { -        # Generate the catalog, but only once... -        local global-catalog = [ boostbook.xml-catalog ] ; -        local catalog = $(global-catalog[1]) ; -        local catalog-file = $(global-catalog[2]) ; -        local targets ; -        -        # Add the catalog to the property set -        property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ; - -        local type = none ; -        local manifest ;  -        local format = [ $(property-set).get <format> ] ; -        switch $(format)  -        { -            case html    :  -            { -                type = HTMLDIR ; -                manifest = HTML.manifest ; -            } -            case xhtml    :  -            { -                type = XHTMLDIR ; -                manifest = HTML.manifest ; -            } -            case htmlhelp    :  -            { -                type = HTMLHELP ; -                manifest = HTML.manifest ; -            } -             -            case onehtml : type = HTML ; -             -            case man :  -            { -                type = MANPAGES ; -                manifest = man.manifest ; -            } -             -            case docbook : type = DOCBOOK ; -            case fo      : type = FO ; -            case pdf     : type = PDF ; -            case ps      : type = PS ; -            case tests   : type = TESTS ; -        } -         -        if $(manifest) -        { -            # Create DOCBOOK file from BOOSTBOOK sources. -            local base-target = [ generators.construct $(project)  -              : DOCBOOK : $(property-set) : $(sources) ] ; -            base-target = $(base-target[2]) ; -            $(base-target).depends $(catalog) ; -             -            # Generate HTML/PDF/PS from DOCBOOK. -            local target = [ generators.construct $(project) $(name)_$(manifest) -                : $(type) -                : [ $(property-set).add-raw -                    <xsl:param>manifest=$(name)_$(manifest) ] -                : $(base-target) ] ; -            local name = [ $(property-set).get <name> ] ; -            name ?= $(format) ; -            $(target[2]).set-path $(name) ; -            $(target[2]).depends $(catalog) ;             - -            targets += $(target[2]) ; -        } -        else { -            local target = [ generators.construct $(project) -              : $(type) : $(property-set) : $(sources) ] ; -             -            if ! $(target) -            { -                errors.error "Cannot build documentation type '$(format)'" ; -            } -            else  -            { -                $(target[2]).depends $(catalog) ; -                targets += $(target[2]) ; -            } -        } -         -        return $(targets) ; -    } -} - -generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ; - -# Creates a boostbook target. -rule boostbook ( target-name : sources * : requirements * : default-build * ) -{  -  local project = [ project.current ] ; -     -  targets.main-target-alternative  -    [ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN -        : [ targets.main-target-sources $(sources) : $(target-name) ]  -        : [ targets.main-target-requirements $(requirements) : $(project) ] -        : [ targets.main-target-default-build $(default-build) : $(project) ]  -    ] ; -} - -############################################################################# -# Dependency scanners -############################################################################# -# XInclude scanner. Mostly stolen from c-scanner :) -# Note that this assumes an "xi" prefix for XIncludes. This isn't always the -# case for XML documents, but we'll assume it's true for anything we encounter. -class xinclude-scanner : scanner  -{ -    import virtual-target ; -    import path ; -    import scanner ; -         -    rule __init__ ( includes * ) -    { -        scanner.__init__ ; -        self.includes = $(includes) ; -    } -     -  rule pattern ( ) -  { -    return "xi:include[ ]*href=\"([^\"]*)\"" ; -  } - -  rule process ( target : matches * : binding ) -  { -    local target_path = [ NORMALIZE_PATH $(binding:D) ] ; - -    NOCARE $(matches) ; -    INCLUDES $(target) : $(matches) ; -    SEARCH on $(matches) = $(target_path) $(self.includes:G=) ; -     -    scanner.propagate $(__name__) : $(matches) : $(target) ;      -  } -} - -scanner.register xinclude-scanner : xsl:path ; -type.set-scanner XML : xinclude-scanner ; - -rule boostbook-to-tests ( target : source : properties * ) -{ -  lock-config ; -  local boost_root = [ modules.peek : BOOST_ROOT ] ; -  local native-path = -    [ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ; -  local stylesheet = $(native-path:S=.xsl) ; -  xslt $(target) : $(source) $(stylesheet)  -                 : $(properties) <xsl:param>boost.root=$(boost_root)  -                 ; -} - - diff --git a/jam-files/boost-build/tools/borland.jam b/jam-files/boost-build/tools/borland.jam deleted file mode 100644 index 6e43ca93..00000000 --- a/jam-files/boost-build/tools/borland.jam +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2005 Dave Abrahams  -# Copyright 2003 Rene Rivera  -# Copyright 2003, 2004, 2005 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -#  Support for the Borland's command line compiler - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature : get-values ; -import type ; -import common ; - -feature.extend toolset : borland ; - -rule init ( version ? : command * : options * ) -{ -    local condition = [ common.check-init-parameters borland : -        version $(version) ] ; -     -    local command = [ common.get-invocation-command borland : bcc32.exe  -        : $(command) ] ; -      -    common.handle-options borland : $(condition) : $(command) : $(options) ;     -     -    if $(command) -    { -        command = [ common.get-absolute-tool-path $(command[-1]) ] ; -    }    -    root = $(command:D) ;     -     -    flags borland.compile STDHDRS $(condition) : $(root)/include/ ; -    flags borland.link STDLIBPATH $(condition) : $(root)/lib ; -    flags borland.link RUN_PATH $(condition) : $(root)/bin ; -    flags borland .root $(condition) : $(root)/bin/ ;     -} - - -# A borland-specific target type -type.register BORLAND.TDS : tds ; - -# Declare generators - -generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ; -generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ; - -generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ; -generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ; -generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ; -generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ; - -# Declare flags  - -flags borland.compile OPTIONS <debug-symbols>on : -v ; -flags borland.link OPTIONS <debug-symbols>on : -v ; - -flags borland.compile OPTIONS <optimization>off : -Od ; -flags borland.compile OPTIONS <optimization>speed : -O2 ; -flags borland.compile OPTIONS <optimization>space : -O1 ; - -if $(.BORLAND_HAS_FIXED_INLINING_BUGS) -{ -    flags borland CFLAGS <inlining>off : -vi- ; -    flags borland CFLAGS <inlining>on : -vi -w-inl ; -    flags borland CFLAGS <inlining>full : -vi -w-inl ; -} -else -{ -    flags borland CFLAGS : -vi- ; -} - -flags borland.compile OPTIONS <warnings>off : -w- ; -flags borland.compile OPTIONS <warnings>all : -w ; -flags borland.compile OPTIONS <warnings-as-errors>on : -w! ; - - -# Deal with various runtime configs... - -# This should be not for DLL -flags borland OPTIONS <user-interface>console : -tWC ; - -# -tWR sets -tW as well, so we turn it off here and then turn it  -# on again later if we need it: -flags borland OPTIONS <runtime-link>shared : -tWR -tWC ; -flags borland OPTIONS <user-interface>gui : -tW ; - -flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ; -# Hmm.. not sure what's going on here. -flags borland OPTIONS : -WM- ; -flags borland OPTIONS <threading>multi : -tWM ; - - - -flags borland.compile OPTIONS <cxxflags> ; -flags borland.compile DEFINES <define> ; -flags borland.compile INCLUDES <include> ; - -flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ; - -# -# for C++ compiles the following options are turned on by default: -# -# -j5    stops after 5 errors -# -g255  allow an unlimited number of warnings -# -q     no banner -# -c     compile to object -# -P     C++ code regardless of file extention -# -a8    8 byte alignment, this option is on in the IDE by default  -#        and effects binary compatibility. -# - -# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)"  -I"$(STDHDRS)" -o"$(<)" "$(>)" - - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)" -} - -# For C, we don't pass -P flag -actions compile.c -{ -    "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)" -} - - -# Declare flags and action for linking -toolset.flags borland.link OPTIONS <debug-symbols>on : -v ; -toolset.flags borland.link LIBRARY_PATH <library-path> ; -toolset.flags borland.link FINDLIBS_ST <find-static-library> ; -toolset.flags borland.link FINDLIBS_SA <find-shared-library> ; -toolset.flags borland.link LIBRARIES <library-file> ; - -flags borland.link OPTIONS <linkflags> ; -flags borland.link OPTIONS <link>shared : -tWD ; - -flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ; -flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ; - - - -# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly -# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add -# $(BCC_TOOL_PATH) to the path -# The NEED_IMPLIB variable controls whether we need to invoke implib. - -flags borland.archive AROPTIONS <archiveflags> ; - -# Declare action for archives. We don't use response file -# since it's hard to get "+-" there. -# The /P256 increases 'page' size -- with too low -# values tlib fails when building large applications. -# CONSIDER: don't know what 'together' is for... -actions updated together piecemeal archive -{  -    $(.set-path)$(.root:W)$(.old-path) -    tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)" -} - - -if [ os.name ] = CYGWIN -{ -    .set-path = "cmd /S /C set \"PATH=" ; -    .old-path = ";%PATH%\" \"&&\"" ; -     - -    # Couldn't get TLIB to stop being confused about pathnames -    # containing dashes (it seemed to treat them as option separators -    # when passed through from bash), so we explicitly write the -    # command into a .bat file and execute that.  TLIB is also finicky -    # about pathname style! Forward slashes, too, are treated as -    # options. -    actions updated together piecemeal archive -    {  -       chdir $(<:D) -       echo +-$(>:BS) > $(<:BS).rsp -       $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp -    }     -} -else if [ os.name ] = NT -{ -    .set-path = "set \"PATH=" ; -    .old-path = ";%PATH%\" -      " ; -} -else -{ -    .set-path = "PATH=\"" ; -    .old-path = "\":$PATH -      export PATH -      " ; -} - -RM = [ common.rm-command ] ; - -nl = " -" ; - -actions link -{ -    $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -} - - -actions link.dll bind LIBRARIES RSP -{ -    $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"  && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)" -} - -# It seems impossible to specify output file with directory when compiling -# asm files using bcc32, so use tasm32 directly. -# /ml makes all symbol names case-sensitive -actions asm -{ -    $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)" -} - diff --git a/jam-files/boost-build/tools/builtin.jam b/jam-files/boost-build/tools/builtin.jam deleted file mode 100644 index 148e7308..00000000 --- a/jam-files/boost-build/tools/builtin.jam +++ /dev/null @@ -1,960 +0,0 @@ -# Copyright 2002, 2003, 2004, 2005 Dave Abrahams -# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera -# Copyright 2006 Juergen Hunold -# Copyright 2005 Toon Knapen -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines standard features and rules. - -import alias ; -import "class" : new ; -import errors ; -import feature ; -import generators ; -import numbers ; -import os ; -import path ; -import print ; -import project ; -import property ; -import regex ; -import scanner ; -import sequence ; -import stage ; -import symlink ; -import toolset ; -import type ; -import targets ; -import types/register ; -import utility ; -import virtual-target ; -import message ; -import convert ; - -# FIXME: the following generate module import is not needed here but removing it -# too hastly will break using code (e.g. the main Boost library Jamroot file) -# that forgot to import the generate module before calling the generate rule. -import generate ; - - -.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd -    openbsd osf qnx qnxnto sgi solaris unix unixware windows  -    elf # Not actually an OS -- used for targeting bare metal where -        # object format is ELF.  This catches both -elf and -eabi gcc -        # targets and well as other compilers targeting ELF. It is not -        # clear how often do we need to key of ELF specifically as opposed -        # to other bare metal targets, but let's stick with gcc naming. -    ; - -# Feature used to determine which OS we're on. New <target-os> and <host-os> -# features should be used instead. -local os = [ modules.peek : OS ] ; -feature.feature os : $(os) : propagated link-incompatible ; - - -# Translates from bjam current OS to the os tags used in host-os and target-os, -# i.e. returns the running host-os. -# -local rule default-host-os ( ) -{ -    local host-os ; -    if [ os.name ] in $(.os-names:U) -    { -        host-os = [ os.name ] ; -    } -    else -    { -        switch [ os.name ] -        { -            case NT           : host-os = windows ; -            case AS400        : host-os = unix    ; -            case MINGW        : host-os = windows ; -            case BSDI         : host-os = bsd     ; -            case COHERENT     : host-os = unix    ; -            case DRAGONFLYBSD : host-os = bsd     ; -            case IRIX         : host-os = sgi     ; -            case MACOSX       : host-os = darwin  ; -            case KFREEBSD     : host-os = freebsd ; -            case LINUX        : host-os = linux   ; -            case SUNOS        :  -              ECHO "SunOS is not a supported operating system." ; -              ECHO "We believe last version of SunOS was released in 1992, " ; -              ECHO "so if you get this message, something is very wrong with configuration logic. " ; -              ECHO "Please report this as a bug. " ; -              EXIT ; -            case *            : host-os = unix    ; -        } -    } -    return $(host-os:L) ; -} - - -# The two OS features define a known set of abstract OS names. The host-os is -# the OS under which bjam is running. Even though this should really be a fixed -# property we need to list all the values to prevent unknown value errors. Both -# set the default value to the current OS to account for the default use case of -# building on the target OS. -feature.feature host-os : $(.os-names) ; -feature.set-default host-os : [ default-host-os ] ; - -feature.feature target-os : $(.os-names) : propagated link-incompatible ; -feature.set-default target-os : [ default-host-os ] ; - - -feature.feature toolset            :                 : implicit propagated symmetric ; -feature.feature stdlib             : native          : propagated composite ; -feature.feature link               : shared static   : propagated ; -feature.feature runtime-link       : shared static   : propagated ; -feature.feature runtime-debugging  : on off          : propagated ; -feature.feature optimization       : off speed space none : propagated ; -feature.feature profiling          : off on          : propagated ; -feature.feature inlining           : off on full     : propagated ; -feature.feature threading          : single multi    : propagated ; -feature.feature rtti               : on off          : propagated ; -feature.feature exception-handling : on off          : propagated ; - -# Whether there is support for asynchronous EH (e.g. catching SEGVs). -feature.feature asynch-exceptions  : off on          : propagated ; - -# Whether all extern "C" functions are considered nothrow by default. -feature.feature extern-c-nothrow   : off on          : propagated ; - -feature.feature debug-symbols      : on off none          : propagated ; -# Controls whether the binary should be stripped -- that is have -# everything not necessary to running removed. This option should -# not be very often needed. Also, this feature will show up in -# target paths of everything, not just binaries. Should fix that -# when impelementing feature relevance. -feature.feature strip              : off on          : propagated ; -feature.feature define             :                 : free ; -feature.feature undef              :                 : free ; -feature.feature "include"          :                 : free path ; #order-sensitive ; -feature.feature cflags             :                 : free ; -feature.feature cxxflags           :                 : free ; -feature.feature fflags             :                 : free ; -feature.feature asmflags           :                 : free ; -feature.feature linkflags          :                 : free ; -feature.feature archiveflags       :                 : free ; -feature.feature version            :                 : free ; - -# Generic, i.e. non-language specific, flags for tools. -feature.feature flags           : : free ; -feature.feature location-prefix : : free ; - - -# The following features are incidental since they have no effect on built -# products. Not making them incidental will result in problems in corner cases, -# e.g.: -# -#    unit-test a : a.cpp : <use>b ; -#    lib b : a.cpp b ; -# -# Here, if <use> is not incidental, we would decide we have two targets for -# a.obj with different properties and complain about it. -# -# Note that making a feature incidental does not mean it is ignored. It may be -# ignored when creating a virtual target, but the rest of build process will use -# them. -feature.feature use                 : : free dependency incidental ; -feature.feature dependency          : : free dependency incidental ; -feature.feature implicit-dependency : : free dependency incidental ; - -feature.feature warnings : -    on         # Enable default/"reasonable" warning level for the tool. -    all        # Enable all possible warnings issued by the tool. -    off        # Disable all warnings issued by the tool. -  : incidental propagated ; - -feature.feature warnings-as-errors : -    off        # Do not fail the compilation if there are warnings. -    on         # Fail the compilation if there are warnings. -  : incidental propagated ; - -# Feature that allows us to configure the maximal template instantiation depth -# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers -# actually support this configuration setting. -# -# Note that Boost Build currently does not allow defining features that take any -# positive integral value as a parameter, which is what we need here, so we just -# define some of the values here and leave it up to the user to extend this set -# as he needs using the feature.extend rule. -# -# TODO: This should be upgraded as soon as Boost Build adds support for custom -# validated feature values or at least features allowing any positive integral -# value. See related Boost Build related trac ticket #194. -# -feature.feature c++-template-depth -    : -        [ numbers.range 64 1024 : 64 ] -        [ numbers.range 20 1000 : 10 ] -        #   Maximum template instantiation depth guaranteed for ANSI/ISO C++ -        # conforming programs. -        17 -    : -        incidental optional propagated ; - -feature.feature source              :            : free dependency incidental ; -feature.feature library             :            : free dependency incidental ; -feature.feature file                :            : free dependency incidental ; -feature.feature find-shared-library :            : free ; #order-sensitive ; -feature.feature find-static-library :            : free ; #order-sensitive ; -feature.feature library-path        :            : free path ; #order-sensitive ; - -# Internal feature. -feature.feature library-file        :            : free dependency ; - -feature.feature name                :            : free ; -feature.feature tag                 :            : free ; -feature.feature search              :            : free path ; #order-sensitive ; -feature.feature location            :            : free path ; -feature.feature dll-path            :            : free path ; -feature.feature hardcode-dll-paths  : true false : incidental ; - - -# An internal feature that holds the paths of all dependency shared libraries. -# On Windows, it is needed so that we can add all those paths to PATH when -# running applications. On Linux, it is needed to add proper -rpath-link command -# line options. -feature.feature xdll-path : : free path ; - -# Provides means to specify def-file for windows DLLs. -feature.feature def-file : : free dependency ; - -feature.feature suppress-import-lib : false true : incidental ; - -# Internal feature used to store the name of a bjam action to call when building -# a target. -feature.feature action : : free ; - -# This feature is used to allow specific generators to run. For example, QT -# tools can only be invoked when QT library is used. In that case, <allow>qt -# will be in usage requirement of the library. -feature.feature allow : : free ; - -# The addressing model to generate code for. Currently a limited set only -# specifying the bit size of pointers. -feature.feature address-model : 16 32 64 32_64 : propagated optional ; - -# Type of CPU architecture to compile for. -feature.feature architecture : -    # x86 and x86-64 -    x86 - -    # ia64 -    ia64 - -    # Sparc -    sparc - -    # RS/6000 & PowerPC -    power - -    # MIPS/SGI -    mips1 mips2 mips3 mips4 mips32 mips32r2 mips64 - -    # HP/PA-RISC -    parisc - -    # Advanced RISC Machines -    arm - -    # Combined architectures for platforms/toolsets that support building for -    # multiple architectures at once. "combined" would be the default multi-arch -    # for the toolset. -    combined -    combined-x86-power - -    : propagated optional ; - -# The specific instruction set in an architecture to compile. -feature.feature instruction-set : -    # x86 and x86-64 -    native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3 -    pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe -    conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale -    yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp -    athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2 - -    # ia64 -    itanium itanium1 merced itanium2 mckinley - -    # Sparc -    v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934 -    sparclet tsc701 v9 ultrasparc ultrasparc3 - -    # RS/6000 & PowerPC -    401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400 -    7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2 -    power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a - -    # MIPS -    4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650 -    r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300 -    vr5000 vr5400 vr5500 - -    # HP/PA-RISC -    700 7100 7100lc 7200 7300 8000 - -    # Advanced RISC Machines -    armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312 - -    : propagated optional ; - -# Used to select a specific variant of C++ ABI if the compiler supports several. -feature.feature c++abi : : propagated optional ; - -feature.feature conditional : : incidental free ; - -# The value of 'no' prevents building of a target. -feature.feature build : yes no : optional ; - -# Windows-specific features - -feature.feature user-interface : console gui wince native auto ; - -feature.feature variant : : implicit composite propagated symmetric ; - - -# Declares a new variant. -# -# First determines explicit properties for this variant, by refining parents' -# explicit properties with the passed explicit properties. The result is -# remembered and will be used if this variant is used as parent. -# -# Second, determines the full property set for this variant by adding to the -# explicit properties default values for all missing non-symmetric properties. -# -# Lastly, makes appropriate value of 'variant' property expand to the full -# property set. -# -rule variant ( name            # Name of the variant -    : parents-or-properties *  # Specifies parent variants, if -                               # 'explicit-properties' are given, and -                               # explicit-properties or parents otherwise. -    : explicit-properties *    # Explicit properties. -    ) -{ -    local parents ; -    if ! $(explicit-properties) -    { -        if $(parents-or-properties[1]:G) -        { -            explicit-properties = $(parents-or-properties) ; -        } -        else -        { -            parents = $(parents-or-properties) ; -        } -    } -    else -    { -        parents = $(parents-or-properties) ; -    } - -    # The problem is that we have to check for conflicts between base variants. -    if $(parents[2]) -    { -        errors.error "multiple base variants are not yet supported" ; -    } - -    local inherited ; -    # Add explicitly specified properties for parents. -    for local p in $(parents) -    { -        # TODO: This check may be made stricter. -        if ! [ feature.is-implicit-value $(p) ] -        { -            errors.error "Invalid base variant" $(p)  ; -        } - -        inherited += $(.explicit-properties.$(p)) ; -    } -    property.validate $(explicit-properties) ; -    explicit-properties = [ property.refine $(inherited) -        : $(explicit-properties) ] ; - -    # Record explicitly specified properties for this variant. We do this after -    # inheriting parents' properties so they affect other variants derived from -    # this one. -    .explicit-properties.$(name) = $(explicit-properties) ; - -    feature.extend variant : $(name) ; -    feature.compose <variant>$(name) : $(explicit-properties) ; -} -IMPORT $(__name__) : variant : : variant ; - - -variant debug   : <optimization>off <debug-symbols>on <inlining>off -                  <runtime-debugging>on ; -variant release : <optimization>speed <debug-symbols>off <inlining>full -                  <runtime-debugging>off <define>NDEBUG ; -variant profile : release : <profiling>on <debug-symbols>on ; - - -class searched-lib-target : abstract-file-target -{ -    rule __init__ ( name -        : project -        : shared ? -        : search * -        : action -    ) -    { -        abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project) -          : $(action) : ; - -        self.shared = $(shared) ; -        self.search = $(search) ; -    } - -    rule shared ( ) -    { -        return $(self.shared) ; -    } - -    rule search ( ) -    { -        return $(self.search) ; -    } - -    rule actualize-location ( target ) -    { -        NOTFILE $(target) ; -    } - -    rule path ( ) -    { -    } -} - - -# The generator class for libraries (target type LIB). Depending on properties -# it will request building of the appropriate specific library type -- -# -- SHARED_LIB, STATIC_LIB or SHARED_LIB. -# -class lib-generator : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        # The lib generator is composing, and can be only invoked with an -        # explicit name. This check is present in generator.run (and so in -        # builtin.linking-generator) but duplicated here to avoid doing extra -        # work. -        if $(name) -        { -            local properties = [ $(property-set).raw ] ; -            # Determine the needed target type. -            local actual-type ; -                # <source>files can be generated by <conditional>@rule feature -                # in which case we do not consider it a SEARCHED_LIB type. -            if ! <source> in $(properties:G) && -               ( <search> in $(properties:G) || <name> in $(properties:G) ) -            { -                actual-type = SEARCHED_LIB ; -            } -            else if <file> in $(properties:G) -            { -                actual-type = LIB ; -            } -            else if <link>shared in $(properties) -            { -                actual-type = SHARED_LIB ; -            } -            else -            { -                actual-type = STATIC_LIB ; -            } -            property-set = [ $(property-set).add-raw <main-target-type>LIB ] ; -            # Construct the target. -            return [ generators.construct $(project) $(name) : $(actual-type) -                : $(property-set) : $(sources) ] ; -        } -    } - -    rule viable-source-types ( ) -    { -        return * ; -    } -} - - -generators.register [ new lib-generator builtin.lib-generator :  : LIB ] ; - - -# The implementation of the 'lib' rule. Beyond standard syntax that rule allows -# simplified: "lib a b c ;". -# -rule lib ( names + : sources * : requirements * : default-build * : -    usage-requirements * ) -{ -    if $(names[2]) -    { -        if <name> in $(requirements:G) -        { -            errors.user-error "When several names are given to the 'lib' rule" : -                "it is not allowed to specify the <name> feature." ; -        } -        if $(sources) -        { -            errors.user-error "When several names are given to the 'lib' rule" : -                "it is not allowed to specify sources." ; -        } -    } - -    # This is a circular module dependency so it must be imported here. -    import targets ; - -    local project = [ project.current ] ; -    local result ; - -    for local name in $(names) -    { -        local r = $(requirements) ; -        # Support " lib a ; " and " lib a b c ; " syntax. -        if ! $(sources) && ! <name> in $(requirements:G) -                        && ! <file> in $(requirements:G) -        { -            r += <name>$(name) ; -        } -        result += [ targets.main-target-alternative -            [ new typed-target $(name) : $(project) : LIB -                : [ targets.main-target-sources $(sources) : $(name) ] -                : [ targets.main-target-requirements $(r) : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -                : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] -            ] ] ; -    } -    return $(result) ; -} -IMPORT $(__name__) : lib : : lib ; - - -class searched-lib-generator : generator -{ -    import property-set ; - -    rule __init__ ( ) -    { -        # The requirements cause the generators to be tried *only* when we're -        # building a lib target with a 'search' feature. This seems ugly --- all -        # we want is to make sure searched-lib-generator is not invoked deep -        # inside transformation search to produce intermediate targets. -        generator.__init__ searched-lib-generator : : SEARCHED_LIB ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        if $(name) -        { -            # If 'name' is empty, it means we have not been called to build a -            # top-level target. In this case, we just fail immediately, because -            # searched-lib-generator cannot be used to produce intermediate -            # targets. - -            local properties = [ $(property-set).raw ] ; -            local shared ; -            if <link>shared in $(properties) -            { -                shared = true ; -            } - -            local search = [ feature.get-values <search> : $(properties) ] ; - -            local a = [ new null-action $(property-set) ] ; -            local lib-name = [ feature.get-values <name> : $(properties) ] ; -            lib-name ?= $(name) ; -            local t = [ new searched-lib-target $(lib-name) : $(project) -                : $(shared) : $(search) : $(a) ] ; -            # We return sources for a simple reason. If there is -            #    lib png : z : <name>png ; -            # the 'z' target should be returned, so that apps linking to 'png' -            # will link to 'z', too. -            return [ property-set.create <xdll-path>$(search) ] -                   [ virtual-target.register $(t) ] $(sources) ; -        } -    } -} - -generators.register [ new searched-lib-generator ] ; - - -class prebuilt-lib-generator : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        local f = [ $(property-set).get <file> ] ; -        return $(f) $(sources) ; -    } -} - -generators.register -  [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ; - -generators.override builtin.prebuilt : builtin.lib-generator ; - -class preprocessed-target-class : basic-target -{ -    import generators ; -    rule construct ( name : sources * : property-set ) -    { -        local result = [ generators.construct [ project ] -            $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ; -        if ! $(result) -        { -            result = [ generators.construct [ project ] -                $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ; -        } -        if ! $(result) -        { -            local s ; -            for x in $(sources) -            { -                s += [ $(x).name ] ; -            } -            local p = [ project ] ; -            errors.user-error -                "In project" [ $(p).name ] : -                "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ; -        } -        return $(result) ; -    } -} - -rule preprocessed ( name : sources * : requirements * : default-build * : -    usage-requirements * ) -{ -    local project = [ project.current ] ; -    return [ targets.main-target-alternative -        [ new preprocessed-target-class $(name) : $(project) -            : [ targets.main-target-sources $(sources) : $(name) ] -            : [ targets.main-target-requirements $(r) : $(project) ] -            : [ targets.main-target-default-build $(default-build) : $(project) ] -            : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] -        ] ] ; -} - -IMPORT $(__name__) : preprocessed : : preprocessed ; - -class compile-action : action -{ -    import sequence ; - -    rule __init__ ( targets * : sources * : action-name : properties * ) -    { -        action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ; -    } - -    # For all virtual targets for the same dependency graph as self, i.e. which -    # belong to the same main target, add their directories to the include path. -    # -    rule adjust-properties ( property-set ) -    { -        local s = [ $(self.targets[1]).creating-subvariant ] ; -        return [ $(property-set).add-raw -          [ $(s).implicit-includes "include" : H ] ] ; -    } -} - - -# Declare a special compiler generator. The only thing it does is changing the -# type used to represent 'action' in the constructed dependency graph to -# 'compile-action'. That class in turn adds additional include paths to handle -# cases when a source file includes headers which are generated themselves. -# -class C-compiling-generator : generator -{ -    rule __init__ ( id : source-types + : target-types + : requirements * -        : optional-properties * ) -    { -        generator.__init__ $(id) : $(source-types) : $(target-types) : -            $(requirements) : $(optional-properties) ; -    } - -    rule action-class ( ) -    { -        return compile-action ; -    } -} - - -rule register-c-compiler ( id : source-types + : target-types + : requirements * -    : optional-properties * ) -{ -    generators.register [ new C-compiling-generator $(id) : $(source-types) : -        $(target-types) : $(requirements) : $(optional-properties) ] ; -} - -# FIXME: this is ugly, should find a better way (we would like client code to -# register all generators as "generators.some-rule" instead of -# "some-module.some-rule".) -# -IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ; - - -# The generator class for handling EXE and SHARED_LIB creation. -# -class linking-generator : generator -{ -    import path ; -    import project ; -    import property-set ; -    import type ; - -    rule __init__ ( id -        composing ?    :  # The generator will be composing if a non-empty -                          # string is passed or the parameter is not given. To -                          # make the generator non-composing, pass an empty -                          # string (""). -        source-types + : -        target-types + : -        requirements * ) -    { -        composing ?= true ; -        generator.__init__ $(id) $(composing) : $(source-types) -            : $(target-types) : $(requirements) ; -    } - -    rule run ( project name ? : property-set : sources + ) -    { -        sources += [ $(property-set).get <library>  ] ; - -        # Add <library-path> properties for all searched libraries. -        local extra ; -        for local s in $(sources) -        { -            if [ $(s).type ] = SEARCHED_LIB -            { -                local search = [ $(s).search ] ; -                extra += <library-path>$(search) ; -            } -        } - -        # It is possible that sources include shared libraries that did not came -        # from 'lib' targets, e.g. .so files specified as sources. In this case -        # we have to add extra dll-path properties and propagate extra xdll-path -        # properties so that application linking to us will get xdll-path to -        # those libraries. -        local extra-xdll-paths ; -        for local s in $(sources) -        { -            if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ] -            { -                # Unfortunately, we do not have a good way to find the path to a -                # file, so use this nasty approach. -                # -                # TODO: This needs to be done better. One thing that is really -                # broken with this is that it does not work correctly with -                # projects having multiple source locations. -                local p = [ $(s).project ] ; -                local location = [ path.root [ $(s).name ] -                    [ $(p).get source-location ] ] ; -                extra-xdll-paths += [ path.parent $(location) ] ; -            } -        } - -        # Hardcode DLL paths only when linking executables. -        # Pros: do not need to relink libraries when installing. -        # Cons: "standalone" libraries (plugins, python extensions) can not -        # hardcode paths to dependent libraries. -        if [ $(property-set).get <hardcode-dll-paths> ] = true -            && [ type.is-derived $(self.target-types[1]) EXE ] -        { -            local xdll-path = [ $(property-set).get <xdll-path> ] ; -            extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ; -        } - -        if $(extra) -        { -            property-set = [ $(property-set).add-raw $(extra) ] ; -        } - -        local result = [ generator.run $(project) $(name) : $(property-set) -            : $(sources) ] ; - -        local ur ; -        if $(result) -        { -            ur = [ extra-usage-requirements $(result) : $(property-set) ] ; -            ur = [ $(ur).add -              [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ; -        } -        return $(ur) $(result) ; -    } - -    rule extra-usage-requirements ( created-targets * : property-set ) -    { -        local result = [ property-set.empty ] ; -        local extra ; - -        # Add appropricate <xdll-path> usage requirements. -        local raw = [ $(property-set).raw ] ; -        if <link>shared in $(raw) -        { -            local paths ; -            local pwd = [ path.pwd ] ; -            for local t in $(created-targets) -            { -                if [ type.is-derived [ $(t).type ] SHARED_LIB ] -                { -                    paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ; -                } -            } -            extra += $(paths:G=<xdll-path>) ; -        } - -        # We need to pass <xdll-path> features that we've got from sources, -        # because if a shared library is built, exe using it needs to know paths -        # to other shared libraries this one depends on in order to be able to -        # find them all at runtime. - -        # Just pass all features in property-set, it is theorically possible -        # that we will propagate <xdll-path> features explicitly specified by -        # the user, but then the user is to blaim for using an internal feature. -        local values = [ $(property-set).get <xdll-path> ] ; -        extra += $(values:G=<xdll-path>) ; - -        if $(extra) -        { -            result = [ property-set.create $(extra) ] ; -        } -        return $(result) ; -    } - -    rule generated-targets ( sources + : property-set : project name ? ) -    { -        local sources2 ;     # Sources to pass to inherited rule. -        local properties2 ;  # Properties to pass to inherited rule. -        local libraries ;    # Library sources. - -        # Searched libraries are not passed as arguments to the linker but via -        # some option. So, we pass them to the action using a property. -        properties2 = [ $(property-set).raw ] ; -        local fsa ; -        local fst ; -        for local s in $(sources) -        { -            if [ type.is-derived [ $(s).type ] SEARCHED_LIB ] -            { -                local name = [ $(s).name ] ; -                if [ $(s).shared ] -                { -                    fsa += $(name) ; -                } -                else -                { -                    fst += $(name) ; -                } -            } -            else -            { -                sources2 += $(s) ; -            } -        } -        properties2 += <find-shared-library>$(fsa:J=&&) -                       <find-static-library>$(fst:J=&&) ; - -        return [ generator.generated-targets $(sources2) -            : [ property-set.create $(properties2) ] : $(project) $(name) ] ; -    } -} - - -rule register-linker ( id composing ? : source-types + : target-types + -    : requirements * ) -{ -    generators.register [ new linking-generator $(id) $(composing) -        : $(source-types) : $(target-types) : $(requirements) ] ; -} - - -# The generator class for handling STATIC_LIB creation. -# -class archive-generator : generator -{ -    import property-set ; - -    rule __init__ ( id composing ? : source-types + : target-types + -        : requirements * ) -    { -        composing ?= true ; -        generator.__init__ $(id) $(composing) : $(source-types) -            : $(target-types) : $(requirements) ; -    } - -    rule run ( project name ? : property-set : sources + ) -    { -        sources += [ $(property-set).get <library>  ] ; - -        local result = [ generator.run $(project) $(name) : $(property-set) -            : $(sources) ] ; - -        # For static linking, if we get a library in source, we can not directly -        # link to it so we need to cause our dependencies to link to that -        # library. There are two approaches: -        # - adding the library to the list of returned targets. -        # - using the <library> usage requirements. -        # The problem with the first is: -        # -        #     lib a1 : : <file>liba1.a ; -        #     lib a2 : a2.cpp a1 : <link>static ; -        #     install dist : a2 ; -        # -        # here we will try to install 'a1', even though it is not necessary in -        # the general case. With the second approach, even indirect dependants -        # will link to the library, but it should not cause any harm. So, return -        # all LIB sources together with created targets, so that dependants link -        # to them. -        local usage-requirements ; -        if [ $(property-set).get <link> ] = static -        { -            for local t in $(sources) -            { -                if [ type.is-derived [ $(t).type ] LIB ] -                { -                    usage-requirements += <library>$(t) ; -                } -            } -        } - -        usage-requirements = [ property-set.create $(usage-requirements) ] ; - -        return $(usage-requirements) $(result) ; -    } -} - - -rule register-archiver ( id composing ? : source-types + : target-types + -    : requirements * ) -{ -    generators.register [ new archive-generator $(id) $(composing) -        : $(source-types) : $(target-types) : $(requirements) ] ; -} - - -# Generator that accepts everything and produces nothing. Useful as a general -# fallback for toolset-specific actions like PCH generation. -# -class dummy-generator : generator -{ -    import property-set ; - -    rule run ( project name ? : property-set : sources + ) -    { -        return [ property-set.empty ] ; -    } -} - -IMPORT $(__name__) : register-linker register-archiver -    : : generators.register-linker generators.register-archiver ; diff --git a/jam-files/boost-build/tools/builtin.py b/jam-files/boost-build/tools/builtin.py deleted file mode 100644 index 31a7bffe..00000000 --- a/jam-files/boost-build/tools/builtin.py +++ /dev/null @@ -1,718 +0,0 @@ -# Status: minor updates by Steven Watanabe to make gcc work -# -#  Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -#  distribute this software is granted provided this copyright notice appears in -#  all copies. This software is provided "as is" without express or implied -#  warranty, and with no claim as to its suitability for any purpose. - -""" Defines standard features and rules. -""" - -import b2.build.targets as targets - -import sys -from b2.build import feature, property, virtual_target, generators, type, property_set, scanner -from b2.util.utility import * -from b2.util import path, regex, bjam_signature -import b2.tools.types -from b2.manager import get_manager - - -# Records explicit properties for a variant. -# The key is the variant name. -__variant_explicit_properties = {} - -def reset (): -    """ Clear the module state. This is mainly for testing purposes. -    """ -    global __variant_explicit_properties - -    __variant_explicit_properties = {} - -@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"])) -def variant (name, parents_or_properties, explicit_properties = []): -    """ Declares a new variant. -        First determines explicit properties for this variant, by -        refining parents' explicit properties with the passed explicit -        properties. The result is remembered and will be used if -        this variant is used as parent. -         -        Second, determines the full property set for this variant by -        adding to the explicit properties default values for all properties  -        which neither present nor are symmetric. -         -        Lastly, makes appropriate value of 'variant' property expand -        to the full property set. -        name:                   Name of the variant -        parents_or_properties:  Specifies parent variants, if  -                                'explicit_properties' are given, -                                and explicit_properties otherwise. -        explicit_properties:    Explicit properties. -    """ -    parents = [] -    if not explicit_properties: -        explicit_properties = parents_or_properties -    else: -        parents = parents_or_properties -     -    inherited = property_set.empty() -    if parents: - -        # If we allow multiple parents, we'd have to to check for conflicts -        # between base variants, and there was no demand for so to bother. -        if len (parents) > 1: -            raise BaseException ("Multiple base variants are not yet supported") -         -        p = parents[0] -        # TODO: the check may be stricter -        if not feature.is_implicit_value (p): -            raise BaseException ("Invalid base varaint '%s'" % p) -         -        inherited = __variant_explicit_properties[p] - -    explicit_properties = property_set.create_with_validation(explicit_properties) -    explicit_properties = inherited.refine(explicit_properties) -     -    # Record explicitly specified properties for this variant -    # We do this after inheriting parents' properties, so that -    # they affect other variants, derived from this one. -    __variant_explicit_properties[name] = explicit_properties -            -    feature.extend('variant', [name]) -    feature.compose ("<variant>" + name, explicit_properties.all()) - -__os_names = """ -    amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd -    openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware -    vms windows -""".split() - -# Translates from bjam current OS to the os tags used in host-os and target-os, -# i.e. returns the running host-os. -# -def default_host_os(): -    host_os = os_name() -    if host_os not in (x.upper() for x in __os_names): -        if host_os == 'NT': host_os = 'windows' -        elif host_os == 'AS400': host_os = 'unix' -        elif host_os == 'MINGW': host_os = 'windows' -        elif host_os == 'BSDI': host_os = 'bsd' -        elif host_os == 'COHERENT': host_os = 'unix' -        elif host_os == 'DRAGONFLYBSD': host_os = 'bsd' -        elif host_os == 'IRIX': host_os = 'sgi' -        elif host_os == 'MACOSX': host_os = 'darwin' -        elif host_os == 'KFREEBSD': host_os = 'freebsd' -        elif host_os == 'LINUX': host_os = 'linux' -        else: host_os = 'unix' -    return host_os.lower() - -def register_globals (): -    """ Registers all features and variants declared by this module. -    """ - -    # This feature is used to determine which OS we're on. -    # In future, this may become <target-os> and <host-os> -    # TODO: check this. Compatibility with bjam names? Subfeature for version? -    os = sys.platform -    feature.feature ('os', [os], ['propagated', 'link-incompatible']) - - -    # The two OS features define a known set of abstract OS names. The host-os is -    # the OS under which bjam is running. Even though this should really be a fixed -    # property we need to list all the values to prevent unknown value errors. Both -    # set the default value to the current OS to account for the default use case of -    # building on the target OS. -    feature.feature('host-os', __os_names) -    feature.set_default('host-os', default_host_os()) - -    feature.feature('target-os', __os_names, ['propagated', 'link-incompatible']) -    feature.set_default('target-os', default_host_os()) -     -    feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric']) -     -    feature.feature ('stdlib', ['native'], ['propagated', 'composite']) -     -    feature.feature ('link', ['shared', 'static'], ['propagated']) -    feature.feature ('runtime-link', ['shared', 'static'], ['propagated']) -    feature.feature ('runtime-debugging', ['on', 'off'], ['propagated']) -     -     -    feature.feature ('optimization',  ['off', 'speed', 'space'], ['propagated']) -    feature.feature ('profiling', ['off', 'on'], ['propagated']) -    feature.feature ('inlining', ['off', 'on', 'full'], ['propagated']) -     -    feature.feature ('threading', ['single', 'multi'], ['propagated']) -    feature.feature ('rtti', ['on', 'off'], ['propagated']) -    feature.feature ('exception-handling', ['on', 'off'], ['propagated']) -    feature.feature ('debug-symbols', ['on', 'off'], ['propagated']) -    feature.feature ('define', [], ['free']) -    feature.feature ('include', [], ['free', 'path']) #order-sensitive -    feature.feature ('cflags', [], ['free']) -    feature.feature ('cxxflags', [], ['free']) -    feature.feature ('linkflags', [], ['free']) -    feature.feature ('archiveflags', [], ['free']) -    feature.feature ('version', [], ['free']) -     -    feature.feature ('location-prefix', [], ['free']) - -    feature.feature ('action', [], ['free']) - -     -    # The following features are incidental, since -    # in themself they have no effect on build products. -    # Not making them incidental will result in problems in corner -    # cases, for example: -    #  -    #    unit-test a : a.cpp : <use>b ; -    #    lib b : a.cpp b ; -    #  -    # Here, if <use> is not incidental, we'll decide we have two  -    # targets for a.obj with different properties, and will complain. -    # -    # Note that making feature incidental does not mean it's ignored. It may -    # be ignored when creating the virtual target, but the rest of build process -    # will use them. -    feature.feature ('use', [], ['free', 'dependency', 'incidental']) -    feature.feature ('dependency', [], ['free', 'dependency', 'incidental']) -    feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental']) - -    feature.feature('warnings', [ -        'on',         # Enable default/"reasonable" warning level for the tool. -        'all',        # Enable all possible warnings issued by the tool. -        'off'],       # Disable all warnings issued by the tool. -        ['incidental', 'propagated']) - -    feature.feature('warnings-as-errors', [ -        'off',        # Do not fail the compilation if there are warnings. -        'on'],        # Fail the compilation if there are warnings. -        ['incidental', 'propagated']) -     -    feature.feature ('source', [], ['free', 'dependency', 'incidental']) -    feature.feature ('library', [], ['free', 'dependency', 'incidental']) -    feature.feature ('file', [], ['free', 'dependency', 'incidental']) -    feature.feature ('find-shared-library', [], ['free']) #order-sensitive ; -    feature.feature ('find-static-library', [], ['free']) #order-sensitive ; -    feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ; -    # Internal feature. -    feature.feature ('library-file', [], ['free', 'dependency']) -     -    feature.feature ('name', [], ['free']) -    feature.feature ('tag', [], ['free']) -    feature.feature ('search', [], ['free', 'path']) #order-sensitive ; -    feature.feature ('location', [], ['free', 'path']) -     -    feature.feature ('dll-path', [], ['free', 'path']) -    feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental']) -     -     -    # This is internal feature which holds the paths of all dependency -    # dynamic libraries. On Windows, it's needed so that we can all -    # those paths to PATH, when running applications. -    # On Linux, it's needed to add proper -rpath-link command line options. -    feature.feature ('xdll-path', [], ['free', 'path']) -     -    #provides means to specify def-file for windows dlls. -    feature.feature ('def-file', [], ['free', 'dependency']) -     -    # This feature is used to allow specific generators to run. -    # For example, QT tools can only be invoked when QT library -    # is used. In that case, <allow>qt will be in usage requirement -    # of the library. -    feature.feature ('allow', [], ['free']) -     -    # The addressing model to generate code for. Currently a limited set only -    # specifying the bit size of pointers. -    feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional']) - -    # Type of CPU architecture to compile for. -    feature.feature('architecture', [ -        # x86 and x86-64 -        'x86', - -        # ia64 -        'ia64', - -        # Sparc -        'sparc', - -        # RS/6000 & PowerPC -        'power', - -        # MIPS/SGI -        'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64', - -        # HP/PA-RISC -        'parisc', -         -        # Advanced RISC Machines -        'arm', - -        # Combined architectures for platforms/toolsets that support building for -        # multiple architectures at once. "combined" would be the default multi-arch -        # for the toolset. -        'combined', -        'combined-x86-power'], - -        ['propagated', 'optional']) - -    # The specific instruction set in an architecture to compile. -    feature.feature('instruction-set', [ -        # x86 and x86-64 -        'i386', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3', -        'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'conroe', 'conroe-xe', -        'conroe-l', 'allendale', 'mermon', 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale', -        'yorksfield', 'nehalem', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp', -        'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'winchip-c6', 'winchip2', 'c3', 'c3-2', - -        # ia64 -        'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley', - -        # Sparc -        'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934', -        'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3', - -        # RS/6000 & PowerPC -        '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602', -        '603', '603e', '604', '604e', '620', '630', '740', '7400', -        '7450', '750', '801', '821', '823', '860', '970', '8540', -        'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2', -        'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios', -        'rios1', 'rsc', 'rios2', 'rs64a', - -        # MIPS -        '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000', -        'r4100', 'r4300', 'r4400', 'r4600', 'r4650', -        'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100', -        'vr4111', 'vr4120', 'vr4130', 'vr4300', -        'vr5000', 'vr5400', 'vr5500', - -        # HP/PA-RISC -        '700', '7100', '7100lc', '7200', '7300', '8000', -         -        # Advanced RISC Machines -        'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5', -        'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'], - -        ['propagated', 'optional']) - -    feature.feature('conditional', [], ['incidental', 'free']) - -    # The value of 'no' prevents building of a target. -    feature.feature('build', ['yes', 'no'], ['optional']) -     -    # Windows-specific features -    feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], []) -    feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric']) - - -    variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on']) -    variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',  -                         '<runtime-debugging>off', '<define>NDEBUG']) -    variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on']) - -    type.register ('H', ['h']) -    type.register ('HPP', ['hpp'], 'H') -    type.register ('C', ['c']) -     - -reset () -register_globals () - -class SearchedLibTarget (virtual_target.AbstractFileTarget): -    def __init__ (self, name, project, shared, real_name, search, action): -        virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action) -         -        self.shared_ = shared -        self.real_name_ = real_name -        if not self.real_name_: -            self.real_name_ = name -        self.search_ = search - -    def shared (self): -        return self.shared_ -     -    def real_name (self): -        return self.real_name_ -     -    def search (self): -        return self.search_ -         -    def actualize_location (self, target): -        bjam.call("NOTFILE", target) -     -    def path (self): -        #FIXME: several functions rely on this not being None -        return "" - - -class CScanner (scanner.Scanner): -    def __init__ (self, includes): -        scanner.Scanner.__init__ (self) -     -        self.includes_ = includes - -    def pattern (self): -        return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")' - -    def process (self, target, matches, binding): -        -        angle = regex.transform (matches, "<(.*)>") -        quoted = regex.transform (matches, '"(.*)"') - -        g = str(id(self)) -        b = os.path.normpath(os.path.dirname(binding[0])) -         -        # Attach binding of including file to included targets. -        # When target is directly created from virtual target -        # this extra information is unnecessary. But in other -        # cases, it allows to distinguish between two headers of the  -        # same name included from different places.       -        # We don't need this extra information for angle includes, -        # since they should not depend on including file (we can't -        # get literal "." in include path). -        g2 = g + "#" + b - -        g = "<" + g + ">" -        g2 = "<" + g2 + ">" -        angle = [g + x for x in angle] -        quoted = [g2 + x for x in quoted] - -        all = angle + quoted -        bjam.call("mark-included", target, all) - -        engine = get_manager().engine() -        engine.set_target_variable(angle, "SEARCH", get_value(self.includes_)) -        engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_)) -         -        # Just propagate current scanner to includes, in a hope -        # that includes do not change scanners.  -        get_manager().scanners().propagate(self, angle + quoted) -         -scanner.register (CScanner, 'include') -type.set_scanner ('CPP', CScanner) -type.set_scanner ('C', CScanner) - -# Ported to trunk@47077 -class LibGenerator (generators.Generator): -    """ The generator class for libraries (target type LIB). Depending on properties it will -        request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or  -        SHARED_LIB. -    """ - -    def __init__(self, id = 'LibGenerator', composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []): -        generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements) -     -    def run(self, project, name, prop_set, sources): - -        # The lib generator is composing, and can be only invoked with -        # explicit name. This check is present in generator.run (and so in -        # builtin.LinkingGenerator), but duplicate it here to avoid doing -        # extra work. -        if name: -            properties = prop_set.raw() -            # Determine the needed target type -            actual_type = None -            properties_grist = get_grist(properties) -            if '<source>' not in properties_grist  and \ -               ('<search>' in properties_grist or '<name>' in properties_grist): -                actual_type = 'SEARCHED_LIB' -            elif '<file>' in properties_grist: -                # The generator for  -                actual_type = 'LIB' -            elif '<link>shared' in properties: -                actual_type = 'SHARED_LIB' -            else: -                actual_type = 'STATIC_LIB' - -            prop_set = prop_set.add_raw(['<main-target-type>LIB']) - -            # Construct the target. -            return generators.construct(project, name, actual_type, prop_set, sources) - -    def viable_source_types(self): -        return ['*'] - -generators.register(LibGenerator()) - -def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]): -    """The implementation of the 'lib' rule. Beyond standard syntax that rule allows -    simplified: 'lib a b c ;'.""" - -    if len(names) > 1: -        if any(r.startswith('<name>') for r in requirements): -            get_manager().errors()("When several names are given to the 'lib' rule\n" + -                                   "it is not allowed to specify the <name> feature.") - -        if sources: -            get_manager().errors()("When several names are given to the 'lib' rule\n" + -                                   "it is not allowed to specify sources.") - -    project = get_manager().projects().current() -    result = [] - -    for name in names: -        r = requirements[:] - -        # Support " lib a ; " and " lib a b c ; " syntax. -        if not sources and not any(r.startswith("<name>") for r in requirements) \ -           and not any(r.startswith("<file") for r in requirements): -            r.append("<name>" + name) - -        result.append(targets.create_typed_metatarget(name, "LIB", sources, -                                                      r, -                                                      default_build, -                                                      usage_requirements)) -    return result - -get_manager().projects().add_rule("lib", lib) - - -# Updated to trunk@47077 -class SearchedLibGenerator (generators.Generator): -    def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []): -        # TODO: the comment below looks strange. There are no requirements! -        # The requirements cause the generators to be tried *only* when we're building -        # lib target and there's 'search' feature. This seems ugly --- all we want -        # is make sure SearchedLibGenerator is not invoked deep in transformation -        # search. -        generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) -     -    def run(self, project, name, prop_set, sources): - -        if not name: -            return None - -        # If name is empty, it means we're called not from top-level. -        # In this case, we just fail immediately, because SearchedLibGenerator -        # cannot be used to produce intermediate targets. -         -        properties = prop_set.raw () -        shared = '<link>shared' in properties - -        a = virtual_target.NullAction (project.manager(), prop_set) -         -        real_name = feature.get_values ('<name>', properties) -        if real_name: -            real_name = real_name[0] -        else: -            real_nake = name -        search = feature.get_values('<search>', properties) -        usage_requirements = property_set.create(['<xdll-path>' + p for p in search]) -        t = SearchedLibTarget(name, project, shared, real_name, search, a) - -        # We return sources for a simple reason. If there's -        #    lib png : z : <name>png ;  -        # the 'z' target should be returned, so that apps linking to -        # 'png' will link to 'z', too. -        return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources) - -generators.register (SearchedLibGenerator ()) - -### class prebuilt-lib-generator : generator -### { -###     rule __init__ ( * : * ) -###     { -###         generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -###     } -###  -###     rule run ( project name ? : prop_set : sources * : multiple ? ) -###     { -###         local f = [ $(prop_set).get <file> ] ; -###         return $(f) $(sources) ; -###     }     -### } -###  -### generators.register  -###   [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ; - - -class CompileAction (virtual_target.Action): -    def __init__ (self, manager, sources, action_name, prop_set): -        virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set) - -    def adjust_properties (self, prop_set): -        """ For all virtual targets for the same dependency graph as self,  -            i.e. which belong to the same main target, add their directories -            to include path. -        """ -        s = self.targets () [0].creating_subvariant () - -        return prop_set.add_raw (s.implicit_includes ('include', 'H')) - -class CCompilingGenerator (generators.Generator): -    """ Declare a special compiler generator. -        The only thing it does is changing the type used to represent -        'action' in the constructed dependency graph to 'CompileAction'. -        That class in turn adds additional include paths to handle a case -        when a source file includes headers which are generated themselfs. -    """ -    def __init__ (self, id, composing, source_types, target_types_and_names, requirements): -        # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong. -        generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) -             -    def action_class (self): -        return CompileAction - -def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []): -    g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties) -    return generators.register (g) - - -class LinkingGenerator (generators.Generator): -    """ The generator class for handling EXE and SHARED_LIB creation. -    """ -    def __init__ (self, id, composing, source_types, target_types_and_names, requirements): -        generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) -         -    def run (self, project, name, prop_set, sources): -        -        lib_sources = prop_set.get('<library>') -        sources.extend(lib_sources) -         -        # Add <library-path> properties for all searched libraries -        extra = [] -        for s in sources: -            if s.type () == 'SEARCHED_LIB': -                search = s.search() -                extra.extend(property.Property('<library-path>', sp) for sp in search) - -        orig_xdll_path = [] -                    -        if prop_set.get('<hardcode-dll-paths>') == ['true'] \ -               and type.is_derived(self.target_types_ [0], 'EXE'): -            xdll_path = prop_set.get('<xdll-path>') -            orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ] -            # It's possible that we have libraries in sources which did not came -            # from 'lib' target. For example, libraries which are specified -            # just as filenames as sources. We don't have xdll-path properties -            # for such target, but still need to add proper dll-path properties. -            for s in sources: -                if type.is_derived (s.type (), 'SHARED_LIB') and not s.action (): -                    # Unfortunately, we don't have a good way to find the path -                    # to a file, so use this nasty approach. -                    p = s.project() -                    location = path.root(s.name(), p.get('source-location')) -                    xdll_path.append(path.parent(location)) -                           -            extra.extend(property.Property('<dll-path>', sp) for sp in xdll_path) -         -        if extra: -            prop_set = prop_set.add_raw (extra) -                         -        result = generators.Generator.run(self, project, name, prop_set, sources) - -        if result: -            ur = self.extra_usage_requirements(result, prop_set) -            ur = ur.add(property_set.create(orig_xdll_path)) -        else: -            return None -         -        return(ur, result) -     -    def extra_usage_requirements (self, created_targets, prop_set): -         -        result = property_set.empty () -        extra = [] -                         -        # Add appropriate <xdll-path> usage requirements. -        raw = prop_set.raw () -        if '<link>shared' in raw: -            paths = [] -             -            # TODO: is it safe to use the current directory? I think we should use  -            # another mechanism to allow this to be run from anywhere. -            pwd = os.getcwd() -             -            for t in created_targets: -                if type.is_derived(t.type(), 'SHARED_LIB'): -                    paths.append(path.root(path.make(t.path()), pwd)) - -            extra += replace_grist(paths, '<xdll-path>') -         -        # We need to pass <xdll-path> features that we've got from sources, -        # because if shared library is built, exe which uses it must know paths -        # to other shared libraries this one depends on, to be able to find them -        # all at runtime. -                         -        # Just pass all features in property_set, it's theorically possible -        # that we'll propagate <xdll-path> features explicitly specified by -        # the user, but then the user's to blaim for using internal feature.                 -        values = prop_set.get('<xdll-path>') -        extra += replace_grist(values, '<xdll-path>') -         -        if extra: -            result = property_set.create(extra) - -        return result - -    def generated_targets (self, sources, prop_set, project, name): - -        # sources to pass to inherited rule -        sources2 = [] -        # sources which are libraries -        libraries  = [] -         -        # Searched libraries are not passed as argument to linker -        # but via some option. So, we pass them to the action -        # via property.  -        fsa = [] -        fst = [] -        for s in sources: -            if type.is_derived(s.type(), 'SEARCHED_LIB'): -                n = s.real_name() -                if s.shared(): -                    fsa.append(n) - -                else: -                    fst.append(n) - -            else: -                sources2.append(s) - -        add = [] -        if fsa: -            add.append("<find-shared-library>" + '&&'.join(fsa)) -        if fst: -            add.append("<find-static-library>" + '&&'.join(fst)) - -        spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)        -        return spawn - - -def register_linker(id, source_types, target_types, requirements): -    g = LinkingGenerator(id, True, source_types, target_types, requirements) -    generators.register(g) - -class ArchiveGenerator (generators.Generator): -    """ The generator class for handling STATIC_LIB creation. -    """ -    def __init__ (self, id, composing, source_types, target_types_and_names, requirements): -        generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) -         -    def run (self, project, name, prop_set, sources): -        sources += prop_set.get ('<library>') -         -        result = generators.Generator.run (self, project, name, prop_set, sources) -              -        return result - -### rule register-archiver ( id composing ? : source_types + : target_types + : -###                             requirements * ) -### { -###     local g = [ new ArchiveGenerator $(id) $(composing) : $(source_types)  -###                 : $(target_types) : $(requirements) ] ; -###     generators.register $(g) ; -### } -###  -###  -### IMPORT $(__name__) : register-linker register-archiver  -###   : : generators.register-linker generators.register-archiver ; -###  -###  -###  - -get_manager().projects().add_rule("variant", variant) - -import stage -import symlink -import message diff --git a/jam-files/boost-build/tools/cast.jam b/jam-files/boost-build/tools/cast.jam deleted file mode 100644 index 6c84922f..00000000 --- a/jam-files/boost-build/tools/cast.jam +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2005 Vladimir Prus. -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target 'cast', used to change type for target. For example, in Qt -# library one wants two kinds of CPP files -- those that just compiled and those -# that are passed via the MOC tool. -# -# This is done with: -# -#    exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ; -# -# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then, -# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt -# support will run the MOC tool as part of the build process. -# -# At the moment, the 'cast' rule only works for non-derived (source) targets. -# -# TODO: The following comment is unclear or incorrect. Clean it up. -# > Another solution would be to add a separate main target 'moc-them' that -# > would moc all the passed sources, no matter what their type is, but I prefer -# > cast, as defining a new target type + generator for that type is somewhat -# > simpler than defining a main target rule. - -import "class" : new ; -import errors ; -import project ; -import property-set ; -import targets ; -import type ; - - -class cast-target-class : typed-target -{ -    import type ; - -    rule __init__ ( name : project : type : sources * : requirements * : -        default-build * : usage-requirements * ) -    { -        typed-target.__init__ $(name) : $(project) : $(type) : $(sources) : -            $(requirements) : $(default-build) : $(usage-requirements) ; -    } - -    rule construct ( name : source-targets * : property-set ) -    { -        local result ; -        for local s in $(source-targets) -        { -            if ! [ class.is-a $(s) : file-target ] -            { -                import errors ; -                errors.user-error Source to the 'cast' rule is not a file! ; -            } -            if [ $(s).action ] -            { -                import errors ; -                errors.user-error Only non-derived target are allowed for -                    'cast'. : when building [ full-name ] ; -            } -            local r = [ $(s).clone-with-different-type $(self.type) ] ; -            result += [ virtual-target.register $(r) ] ; -        } -        return [ property-set.empty ] $(result) ; -    } -} - - -rule cast ( name type : sources * : requirements * : default-build * : -    usage-requirements * ) -{ -    local project = [ project.current ] ; - -    local real-type = [ type.type-from-rule-name $(type) ] ; -    if ! $(real-type) -    { -        errors.user-error No type corresponds to the main target rule name -            '$(type)' : "Hint: try a lowercase name" ; -    } - -    targets.main-target-alternative [ new cast-target-class $(name) : $(project) -        : $(real-type) -        : [ targets.main-target-sources $(sources) : $(name) ] -        : [ targets.main-target-requirements $(requirements) : $(project) ] -        : [ targets.main-target-default-build $(default-build) : $(project) ] -        : [ targets.main-target-usage-requirements $(usage-requirements) : -            $(project) ] ] ; -} - - -IMPORT $(__name__) : cast : : cast ; diff --git a/jam-files/boost-build/tools/cast.py b/jam-files/boost-build/tools/cast.py deleted file mode 100644 index 8f053f11..00000000 --- a/jam-files/boost-build/tools/cast.py +++ /dev/null @@ -1,69 +0,0 @@ -# Status: ported -# Base revision: 64432. -# Copyright 2005-2010 Vladimir Prus. -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target 'cast', used to change type for target. For example, in Qt -# library one wants two kinds of CPP files -- those that just compiled and those -# that are passed via the MOC tool. -# -# This is done with: -# -#    exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ; -# -# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then, -# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt -# support will run the MOC tool as part of the build process. -# -# At the moment, the 'cast' rule only works for non-derived (source) targets. -# -# TODO: The following comment is unclear or incorrect. Clean it up. -# > Another solution would be to add a separate main target 'moc-them' that -# > would moc all the passed sources, no matter what their type is, but I prefer -# > cast, as defining a new target type + generator for that type is somewhat -# > simpler than defining a main target rule. - -import b2.build.targets as targets -import b2.build.virtual_target as virtual_target - -from b2.manager import get_manager -from b2.util import bjam_signature - -class CastTargetClass(targets.TypedTarget): - -    def construct(name, source_targets, ps): -        result = [] -        for s in source_targets: -            if not isinstance(s, virtual_targets.FileTarget): -                get_manager().errors()("Source to the 'cast' metatager is not a file") - -            if s.action(): -                get_manager().errors()("Only non-derived targets allowed as sources for 'cast'.") - - -            r = s.clone_with_different_type(self.type()) -            result.append(get_manager().virtual_targets().register(r)) - -        return result -     - -@bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"], -                 ["default_build", "*"], ["usage_requirements", "*"])) -def cast(name, type, sources, requirements, default_build, usage_requirements): -    -    from b2.manager import get_manager -    t = get_manager().targets() -     -    project = get_manager().projects().current() -         -    return t.main_target_alternative( -        CastTargetClass(name, project, type, -                        t.main_target_sources(sources, name), -                        t.main_target_requirements(requirements, project), -                        t.main_target_default_build(default_build, project), -                        t.main_target_usage_requirements(usage_requirements, project))) - - -get_manager().projects().add_rule("cast", cast) diff --git a/jam-files/boost-build/tools/clang-darwin.jam b/jam-files/boost-build/tools/clang-darwin.jam deleted file mode 100644 index a8abc7d6..00000000 --- a/jam-files/boost-build/tools/clang-darwin.jam +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Noel Belcourt 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -import clang ; -import feature : feature ; -import os ; -import toolset ; -import toolset : flags ; -import gcc ; -import common ; -import errors ; -import generators ; - -feature.extend-subfeature toolset clang : platform : darwin ; - -toolset.inherit-generators clang-darwin  -  <toolset>clang <toolset-clang:platform>darwin  -  : gcc  -  # Don't inherit PCH generators. They were not tested, and probably -  # don't work for this compiler. -  : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch -  ; - -generators.override clang-darwin.prebuilt : builtin.lib-generator ; -generators.override clang-darwin.prebuilt : builtin.prebuilt ; -generators.override clang-darwin.searched-lib-generator : searched-lib-generator ; - -toolset.inherit-rules clang-darwin : gcc ; -toolset.inherit-flags clang-darwin : gcc  -        : <inlining>off <inlining>on <inlining>full <optimization>space  -          <warnings>off <warnings>all <warnings>on -          <architecture>x86/<address-model>32 -          <architecture>x86/<address-model>64 -        ; -         -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} -                        -# vectorization diagnostics -feature vectorize : off on full ; - -# Initializes the clang-darwin toolset -#   version in optional -#   name (default clang++) is used to invoke the specified clang complier -#   compile and link options allow you to specify addition command line options for each version -rule init ( version ? :  command * : options * ) -{ -    command = [ common.get-invocation-command clang-darwin : clang++  -        : $(command) ] ; - -    # Determine the version -    local command-string = $(command:J=" ") ; -    if $(command) -    {     -        version ?= [ MATCH "^([0-9.]+)" -            : [ SHELL "$(command-string) -dumpversion" ] ] ; -    } - -    local condition = [ common.check-init-parameters clang-darwin -        : version $(version) ] ;     - -    common.handle-options clang-darwin : $(condition) : $(command) : $(options) ; - -    gcc.init-link-flags clang-darwin darwin $(condition) ; - -} - -SPACE = " " ; - -flags clang-darwin.compile OPTIONS <cflags> ; -flags clang-darwin.compile OPTIONS <cxxflags> ; -# flags clang-darwin.compile INCLUDES <include> ; - -# Declare flags and action for compilation. -toolset.flags clang-darwin.compile OPTIONS <optimization>off   : -O0 ; -toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ; - -toolset.flags clang-darwin.compile OPTIONS <inlining>off  : -fno-inline ; -toolset.flags clang-darwin.compile OPTIONS <inlining>on   : -Wno-inline ; -toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ; - -toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ; -toolset.flags clang-darwin.compile OPTIONS <warnings>on  : -Wall ; -toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ; -toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ; - -toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ; -toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ; -toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ; - -actions compile.c -{ -    "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -flags clang-darwin ARFLAGS <archiveflags> ; - -# Default value. Mostly for the sake of clang-linux -# that inherits from gcc, but does not has the same -# logic to set the .AR variable. We can put the same -# logic in clang-linux, but that's hardly worth the trouble -# as on Linux, 'ar' is always available. -.AR = ar ; - -rule archive ( targets * : sources * : properties * ) -{ -  # Always remove archive and start again. Here's rationale from -  # Andre Hentz: -  # -  # I had a file, say a1.c, that was included into liba.a.  -  # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.  -  # My program was crashing with absurd errors.  -  # After some debugging I traced it back to the fact that a1.o was *still*  -  # in liba.a  -  # -  # Rene Rivera: -  # -  # Originally removing the archive was done by splicing an RM -  # onto the archive action. That makes archives fail to build on NT -  # when they have many files because it will no longer execute the -  # action directly and blow the line length limit. Instead we -  # remove the file in a different action, just before the building -  # of the archive. -  # -  local clean.a = $(targets[1])(clean) ; -  TEMPORARY $(clean.a) ; -  NOCARE $(clean.a) ; -  LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; -  DEPENDS $(clean.a) : $(sources) ; -  DEPENDS $(targets) : $(clean.a) ; -  common.RmTemps $(clean.a) : $(targets) ; -} - -actions piecemeal archive -{ -  "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" -  "ranlib" -cs "$(<)" -} - -flags clang-darwin.link USER_OPTIONS <linkflags> ; - -# Declare actions for linking -rule link ( targets * : sources * : properties * ) -{ -  SPACE on $(targets) = " " ; -  # Serialize execution of the 'link' action, since -  # running N links in parallel is just slower. -  JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)"  "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} diff --git a/jam-files/boost-build/tools/clang-linux.jam b/jam-files/boost-build/tools/clang-linux.jam deleted file mode 100644 index 036d749e..00000000 --- a/jam-files/boost-build/tools/clang-linux.jam +++ /dev/null @@ -1,196 +0,0 @@ -#  Copyright (c) 2003      Michael Stevens -#  Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer) -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import toolset ; -import feature ; -import toolset : flags ; - -import clang ; -import gcc ; -import common ; -import errors ; -import generators ; -import type ; -import numbers ; - -feature.extend-subfeature toolset clang : platform : linux ; - -toolset.inherit-generators clang-linux  -    <toolset>clang <toolset-clang:platform>linux : gcc -  : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ; -generators.override clang-linux.prebuilt : builtin.lib-generator ; -generators.override clang-linux.prebuilt : builtin.prebuilt ; -generators.override clang-linux.searched-lib-generator : searched-lib-generator ; - -# Override default do-nothing generators. -generators.override clang-linux.compile.c.pch   : pch.default-c-pch-generator   ; -generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ; -  -type.set-generated-target-suffix PCH -  : <toolset>clang <toolset-clang:platform>linux : pth ; - -toolset.inherit-rules clang-linux : gcc ; -toolset.inherit-flags clang-linux : gcc  -  : <inlining>off <inlining>on <inlining>full -    <optimization>space <optimization>speed -    <warnings>off <warnings>all <warnings>on ; -         -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] { -  .debug-configuration = true ; -} -                        -rule init ( version ? :  command * : options * ) { -  command = [ common.get-invocation-command clang-linux : clang++  -    : $(command) ] ; -                 -  # Determine the version -  local command-string = $(command:J=" ") ; - -  if $(command) {     -    version ?= [ MATCH "version ([0-9.]+)" -      : [ SHELL "$(command-string) --version" ] ] ; -  } - -  local condition = [ common.check-init-parameters clang-linux -    : version $(version) ] ; -     -  common.handle-options clang-linux : $(condition) : $(command) : $(options) ; - -  gcc.init-link-flags clang-linux gnu $(condition) ; -} - -############################################################################### -# Flags - -toolset.flags clang-linux.compile OPTIONS <cflags> ; -toolset.flags clang-linux.compile OPTIONS <cxxflags> ; - -toolset.flags clang-linux.compile OPTIONS <optimization>off   : ; -toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ; - -# note: clang silently ignores some of these inlining options -toolset.flags clang-linux.compile OPTIONS <inlining>off  : -fno-inline ; -toolset.flags clang-linux.compile OPTIONS <inlining>on   : -Wno-inline ; -toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ; - -toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ; -toolset.flags clang-linux.compile OPTIONS <warnings>on  : -Wall ; -toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ; -toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ; - -toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ; -toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ; -toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ; - -############################################################################### -# C and C++ compilation - -rule compile.c++ ( targets * : sources * : properties * ) { -  gcc.setup-threading $(targets) : $(sources) : $(properties) ; -  gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -  gcc.setup-address-model $(targets) : $(sources) : $(properties) ; - -  local pth-file = [ on $(<) return $(PCH_FILE) ] ; - -  if $(pth-file) { -    DEPENDS $(<) : $(pth-file) ; -    compile.c++.with-pch $(targets) : $(sources) ; -  } -  else { -    compile.c++.without-pth $(targets) : $(sources) ; -  } -} - -actions compile.c++.without-pth { -  "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" -} - -actions compile.c++.with-pch bind PCH_FILE -{ -  "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)" -} - -rule compile.c ( targets * : sources * : properties * ) -{ -  gcc.setup-threading $(targets) : $(sources) : $(properties) ; -  gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -  gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     - -  local pth-file = [ on $(<) return $(PCH_FILE) ] ; - -  if $(pth-file) { -    DEPENDS $(<) : $(pth-file) ; -    compile.c.with-pch $(targets) : $(sources) ; -  } -  else { -    compile.c.without-pth $(targets) : $(sources) ; -  } -} - -actions compile.c.without-pth -{ -  "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c.with-pch bind PCH_FILE -{ -  "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)" -} - -############################################################################### -# PCH emission - -rule compile.c++.pch ( targets * : sources * : properties * ) { -  gcc.setup-threading $(targets) : $(sources) : $(properties) ; -  gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -  gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -} - -actions compile.c++.pch { -  rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)" -} - -rule compile.c.pch ( targets * : sources * : properties * ) { -  gcc.setup-threading $(targets) : $(sources) : $(properties) ; -  gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -  gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -} - -actions compile.c.pch -{ -  rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)" -} - -############################################################################### -# Linking - -SPACE = " " ; - -rule link ( targets * : sources * : properties * ) { -  gcc.setup-threading $(targets) : $(sources) : $(properties) ; -  gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -  SPACE on $(targets) = " " ; -  JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ; -} - -actions link bind LIBRARIES { -  "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - -rule link.dll ( targets * : sources * : properties * ) { -  gcc.setup-threading $(targets) : $(sources) : $(properties) ; -  gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -  SPACE on $(targets) = " " ; -  JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ; -} - -# Differ from 'link' above only by -shared. -actions link.dll bind LIBRARIES { -  "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)"  "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - diff --git a/jam-files/boost-build/tools/clang.jam b/jam-files/boost-build/tools/clang.jam deleted file mode 100644 index e0ac9a55..00000000 --- a/jam-files/boost-build/tools/clang.jam +++ /dev/null @@ -1,27 +0,0 @@ -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This is a generic 'clang' toolset. Depending on the current system, it -# forwards either to 'clang-unix' or 'clang-darwin' modules. - -import feature ; -import os ; -import toolset ; - -feature.extend toolset : clang ; -feature.subfeature toolset clang : platform : : propagated link-incompatible ; - -rule init ( * : * ) -{ -    if [ os.name ] = MACOSX -    { -        toolset.using clang-darwin :  -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -    else -    { -        toolset.using clang-linux :  -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -} diff --git a/jam-files/boost-build/tools/common.jam b/jam-files/boost-build/tools/common.jam deleted file mode 100644 index ed835a36..00000000 --- a/jam-files/boost-build/tools/common.jam +++ /dev/null @@ -1,994 +0,0 @@ -# Copyright 2003, 2005 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2005 Toon Knapen -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -#   Provides actions common to all toolsets, such as creating directories and -# removing files. - -import os ; -import modules ; -import utility ; -import print ; -import type ; -import feature ; -import errors ; -import path ; -import sequence ; -import toolset ; -import virtual-target ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} -if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ] -{ -    .show-configuration = true ; -} - -# Configurations -# -# The following class helps to manage toolset configurations. Each configuration -# has a unique ID and one or more parameters. A typical example of a unique ID -# is a condition generated by 'common.check-init-parameters' rule. Other kinds -# of IDs can be used. Parameters may include any details about the configuration -# like 'command', 'path', etc. -# -# A toolset configuration may be in one of the following states: -# -#   - registered -#       Configuration has been registered (e.g. explicitly or by auto-detection -#       code) but has not yet been marked as used, i.e. 'toolset.using' rule has -#       not yet been called for it. -#   - used -#       Once called 'toolset.using' rule marks the configuration as 'used'. -# -# The main difference between the states above is that while a configuration is -# 'registered' its options can be freely changed. This is useful in particular -# for autodetection code - all detected configurations may be safely overwritten -# by user code. - -class configurations -{ -    import errors ; - -    rule __init__ ( ) -    { -    } - -    # Registers a configuration. -    # -    # Returns 'true' if the configuration has been added and an empty value if -    # it already exists. Reports an error if the configuration is 'used'. -    # -    rule register ( id ) -    { -        if $(id) in $(self.used) -        { -            errors.error "common: the configuration '$(id)' is in use" ; -        } - -        local retval ; - -        if ! $(id) in $(self.all) -        { -            self.all += $(id) ; - -            # Indicate that a new configuration has been added. -            retval = true ; -        } - -        return $(retval) ; -    } - -    # Mark a configuration as 'used'. -    # -    # Returns 'true' if the state of the configuration has been changed to -    # 'used' and an empty value if it the state has not been changed. Reports an -    # error if the configuration is not known. -    # -    rule use ( id ) -    { -        if ! $(id) in $(self.all) -        { -            errors.error "common: the configuration '$(id)' is not known" ; -        } - -        local retval ; - -        if ! $(id) in $(self.used) -        { -            self.used += $(id) ; - -            # Indicate that the configuration has been marked as 'used'. -            retval = true ; -        } - -        return $(retval) ; -    } - -    # Return all registered configurations. -    # -    rule all ( ) -    { -        return $(self.all) ; -    } - -    # Return all used configurations. -    # -    rule used ( ) -    { -        return $(self.used) ; -    } - -    # Returns the value of a configuration parameter. -    # -    rule get ( id : param ) -    { -        return $(self.$(param).$(id)) ; -    } - -    # Sets the value of a configuration parameter. -    # -    rule set ( id : param : value * ) -    { -        self.$(param).$(id) = $(value) ; -    } -} - - -# The rule for checking toolset parameters. Trailing parameters should all be -# parameter name/value pairs. The rule will check that each parameter either has -# a value in each invocation or has no value in each invocation. Also, the rule -# will check that the combination of all parameter values is unique in all -# invocations. -# -# Each parameter name corresponds to a subfeature. This rule will declare a -# subfeature the first time a non-empty parameter value is passed and will -# extend it with all the values. -# -# The return value from this rule is a condition to be used for flags settings. -# -rule check-init-parameters ( toolset requirement * : * ) -{ -    local sig = $(toolset) ; -    local condition = <toolset>$(toolset) ; -    local subcondition ; -    for local index in 2 3 4 5 6 7 8 9 -    { -        local name = $($(index)[1]) ; -        local value = $($(index)[2]) ; - -        if $(value)-is-not-empty -        { -            condition = $(condition)-$(value) ; -            if $(.had-unspecified-value.$(toolset).$(name)) -            { -                errors.user-error -                    "$(toolset) initialization: parameter '$(name)'" -                    "inconsistent" : "no value was specified in earlier" -                    "initialization" : "an explicit value is specified now" ; -            } -            # The below logic is for intel compiler. It calls this rule with -            # 'intel-linux' and 'intel-win' as toolset, so we need to get the -            # base part of toolset name. We can not pass 'intel' as toolset -            # because in that case it will be impossible to register versionless -            # intel-linux and intel-win toolsets of a specific version. -            local t = $(toolset) ; -            local m = [ MATCH ([^-]*)- : $(toolset) ] ; -            if $(m) -            { -                t = $(m[1]) ; -            } -            if ! $(.had-value.$(toolset).$(name)) -            { -                if ! $(.declared-subfeature.$(t).$(name)) -                { -                    feature.subfeature toolset $(t) : $(name) : : propagated ; -                    .declared-subfeature.$(t).$(name) = true ; -                } -                .had-value.$(toolset).$(name) = true ; -            } -            feature.extend-subfeature toolset $(t) : $(name) : $(value) ; -            subcondition += <toolset-$(t):$(name)>$(value) ; -        } -        else -        { -            if $(.had-value.$(toolset).$(name)) -            { -                errors.user-error -                    "$(toolset) initialization: parameter '$(name)'" -                    "inconsistent" : "an explicit value was specified in an" -                    "earlier initialization" : "no value is specified now" ; -            } -            .had-unspecified-value.$(toolset).$(name) = true ; -        } -        sig = $(sig)$(value:E="")- ; -    } -    if $(sig) in $(.all-signatures) -    { -        local message = -            "duplicate initialization of $(toolset) with the following parameters: " ; -        for local index in 2 3 4 5 6 7 8 9 -        { -            local p = $($(index)) ; -            if $(p) -            { -                message += "$(p[1]) = $(p[2]:E=<unspecified>)" ; -            } -        } -        message += "previous initialization at $(.init-loc.$(sig))" ; -        errors.user-error -            $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) : -            $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ; -    } -    .all-signatures += $(sig) ; -    .init-loc.$(sig) = [ errors.nearest-user-location ] ; - -    # If we have a requirement, this version should only be applied under that -    # condition. To accomplish this we add a toolset requirement that imposes -    # the toolset subcondition, which encodes the version. -    if $(requirement) -    { -        local r = <toolset>$(toolset) $(requirement) ; -        r = $(r:J=,) ; -        toolset.add-requirements $(r):$(subcondition) ; -    } - -    # We add the requirements, if any, to the condition to scope the toolset -    # variables and options to this specific version. -    condition += $(requirement) ; - -    if $(.show-configuration) -    { -        ECHO notice: $(condition) ; -    } -    return $(condition:J=/) ; -} - - -# A helper rule to get the command to invoke some tool. If -# 'user-provided-command' is not given, tries to find binary named 'tool' in -# PATH and in the passed 'additional-path'. Otherwise, verifies that the first -# element of 'user-provided-command' is an existing program. -# -# This rule returns the command to be used when invoking the tool. If we can not -# find the tool, a warning is issued. If 'path-last' is specified, PATH is -# checked after 'additional-paths' when searching for 'tool'. -# -rule get-invocation-command-nodefault ( toolset : tool : -    user-provided-command * : additional-paths * : path-last ? ) -{ -    local command ; -    if ! $(user-provided-command) -    { -        command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ; -        if ! $(command) && $(.debug-configuration) -        { -            ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ; -            ECHO "warning: initialized from" [ errors.nearest-user-location ] ; -        } -    } -    else -    { -        command = [ check-tool $(user-provided-command) ] ; -        if ! $(command) && $(.debug-configuration) -        { -            ECHO "warning: toolset $(toolset) initialization: " ; -            ECHO "warning: can not find user-provided command " '$(user-provided-command)' ; -            ECHO "warning: initialized from" [ errors.nearest-user-location ] ; -        } -    } - -    return $(command) ; -} - - -# Same as get-invocation-command-nodefault, except that if no tool is found, -# returns either the user-provided-command, if present, or the 'tool' parameter. -# -rule get-invocation-command ( toolset : tool : user-provided-command * : -    additional-paths * : path-last ? ) -{ -    local result = [ get-invocation-command-nodefault $(toolset) : $(tool) : -        $(user-provided-command) : $(additional-paths) : $(path-last) ] ; - -    if ! $(result) -    { -        if $(user-provided-command) -        { -            result = $(user-provided-command) ; -        } -        else -        { -            result = $(tool) ; -        } -    } -    return $(result) ; -} - - -# Given an invocation command return the absolute path to the command. This -# works even if command has no path element and was found on the PATH. -# -rule get-absolute-tool-path ( command ) -{ -    if $(command:D) -    { -        return $(command:D) ; -    } -    else -    { -        local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ; -        return $(m[1]:D) ; -    } -} - - -# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'. -# If found in PATH, returns 'name' and if found in additional paths, returns -# absolute name. If the tool is found in several directories, returns the -# first path found. Otherwise, returns an empty string. If 'path-last' is -# specified, PATH is searched after 'additional-paths'. -# -local rule find-tool ( name : additional-paths * : path-last ? ) -{ -    local path = [ path.programs-path ] ; -    local match = [ path.glob $(path) : $(name) $(name).exe ] ; -    local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ; - -    local result ; -    if $(path-last) -    { -        result = $(additional-match) ; -        if ! $(result) && $(match) -        { -            result = $(name) ; -        } -    } -    else -    { -        if $(match) -        { -            result = $(name) ; -        } -        else -        { -            result = $(additional-match) ; -        } -    } -    if $(result) -    { -        return [ path.native $(result[1]) ] ; -    } -} - - -# Checks if 'command' can be found either in path or is a full name to an -# existing file. -# -local rule check-tool-aux ( command ) -{ -    if $(command:D) -    { -        if [ path.exists $(command) ] -            # Both NT and Cygwin will run .exe files by their unqualified names. -            || ( [ os.on-windows ] && [ path.exists $(command).exe ] ) -            # Only NT will run .bat & .cmd files by their unqualified names. -            || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] || -                [ path.exists $(command).cmd ] ) ) -        { -            return $(command) ; -        } -    } -    else -    { -        if [ GLOB [ modules.peek : PATH Path path ] : $(command) ] -        { -            return $(command) ; -        } -    } -} - - -# Checks that a tool can be invoked by 'command'. If command is not an absolute -# path, checks if it can be found in 'path'. If comand is an absolute path, -# check that it exists. Returns 'command' if ok or empty string otherwise. -# -local rule check-tool ( xcommand + ) -{ -    if [ check-tool-aux $(xcommand[1]) ] || -       [ check-tool-aux $(xcommand[-1]) ] -    { -        return $(xcommand) ; -    } -} - - -# Handle common options for toolset, specifically sets the following flag -# variables: -# - CONFIG_COMMAND to $(command) -# - OPTIONS for compile         to the value of <compileflags> in $(options) -# - OPTIONS for compile.c       to the value of <cflags>       in $(options) -# - OPTIONS for compile.c++     to the value of <cxxflags>     in $(options) -# - OPTIONS for compile.fortran to the value of <fflags>       in $(options) -# - OPTIONS for link            to the value of <linkflags>    in $(options) -# -rule handle-options ( toolset : condition * : command * : options * ) -{ -    if $(.debug-configuration) -    { -        ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ; -    } - -    #   The last parameter ('unchecked') says it is OK to set flags for another -    # module. -    toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command) -        : unchecked ; - -    toolset.flags $(toolset).compile         OPTIONS $(condition) : -        [ feature.get-values <compileflags> : $(options) ] : unchecked ; - -    toolset.flags $(toolset).compile.c       OPTIONS $(condition) : -        [ feature.get-values <cflags>       : $(options) ] : unchecked ; - -    toolset.flags $(toolset).compile.c++     OPTIONS $(condition) : -        [ feature.get-values <cxxflags>     : $(options) ] : unchecked ; - -    toolset.flags $(toolset).compile.fortran OPTIONS $(condition) : -        [ feature.get-values <fflags>       : $(options) ] : unchecked ; - -    toolset.flags $(toolset).link            OPTIONS $(condition) : -        [ feature.get-values <linkflags>    : $(options) ] : unchecked ; -} - - -# Returns the location of the "program files" directory on a Windows platform. -# -rule get-program-files-dir ( ) -{ -    local ProgramFiles = [ modules.peek : ProgramFiles ] ; -    if $(ProgramFiles) -    { -        ProgramFiles = "$(ProgramFiles:J= )" ; -    } -    else -    { -        ProgramFiles = "c:\\Program Files" ; -    } -    return $(ProgramFiles) ; -} - - -if [ os.name ] = NT -{ -    RM = del /f /q ; -    CP = copy /b ; -    IGNORE = "2>nul >nul & setlocal" ; -    LN ?= $(CP) ; -    # Ugly hack to convince copy to set the timestamp of the -    # destination to the current time by concatenating the -    # source with a nonexistent file.  Note that this requires -    # /b (binary) as the default when concatenating files is /a (ascii). -    WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ; -} -else -{ -    RM = rm -f ; -    CP = cp ; -    LN = ln ; -} - - -rule rm-command ( ) -{ -    return $(RM) ; -} - - -rule copy-command ( ) -{ -    return $(CP) ; -} - - -if "\n" = "n"  -{     -    # Escape characters are not supported. Use ugly hacks that won't work, -    # see below. -    nl = " -" ; -    q = "" ; -} -else -{ -    nl = "\n" ;  -    q = "\"" ; -} - -# Returns the command needed to set an environment variable on the current -# platform. The variable setting persists through all following commands and is -# visible in the environment seen by subsequently executed commands. In other -# words, on Unix systems, the variable is exported, which is consistent with the -# only possible behavior on Windows systems. -# -rule variable-setting-command ( variable : value ) -{ -    if [ os.name ] = NT -    { -        return "set $(variable)=$(value)$(nl)" ; -    } -    else -    { -        # If we don't have escape characters support in bjam, the below blows -        # up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n  -        # sequence that messes up the executed export command which then reports -        # that the passed variable name is incorrect. -        # But we have a check for cygwin in kernel/bootstrap.jam already. -        return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ; -    } -} - - -# Returns a command to sets a named shell path variable to the given NATIVE -# paths on the current platform. -# -rule path-variable-setting-command ( variable : paths * ) -{ -    local sep = [ os.path-separator ] ; -    return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ; -} - - -# Returns a command that prepends the given paths to the named path variable on -# the current platform. -# -rule prepend-path-variable-command ( variable : paths * ) -{ -    return [ path-variable-setting-command $(variable) -        : $(paths) [ os.expand-variable $(variable) ] ] ; -} - - -# Return a command which can create a file. If 'r' is result of invocation, then -# 'r foobar' will create foobar with unspecified content. What happens if file -# already exists is unspecified. -# -rule file-creation-command ( ) -{ -    if [ os.name ] = NT -    { -        # A few alternative implementations on Windows: -        # -        #   'type NUL >> ' -        #        That would construct an empty file instead of a file containing -        #      a space and an end-of-line marker but it would also not change -        #      the target's timestamp in case the file already exists. -        # -        #   'type NUL > ' -        #        That would construct an empty file instead of a file containing -        #      a space and an end-of-line marker but it would also destroy an -        #      already existing file by overwriting it with an empty one. -        # -        #   I guess the best solution would be to allow Boost Jam to define -        # built-in functions such as 'create a file', 'touch a file' or 'copy a -        # file' which could be used from inside action code. That would allow -        # completely portable operations without this kind of kludge. -        #                                            (22.02.2009.) (Jurko) -        return "echo. > " ; -    } -    else -    { -        return "touch " ; -    } -} - - -# Returns a command that may be used for 'touching' files. It is not a real -# 'touch' command on NT because it adds an empty line at the end of file but it -# works with source files. -# -rule file-touch-command ( ) -{ -    if [ os.name ] = NT -    { -        return "echo. >> " ; -    } -    else -    { -        return "touch " ; -    } -} - - -rule MkDir -{ -    # If dir exists, do not update it. Do this even for $(DOT). -    NOUPDATE $(<) ; - -    if $(<) != $(DOT) && ! $($(<)-mkdir) -    { -        # Cheesy gate to prevent multiple invocations on same dir. -        $(<)-mkdir = true ; - -        # Schedule the mkdir build action. -        common.mkdir $(<) ; - -        # Prepare a Jam 'dirs' target that can be used to make the build only -        # construct all the target directories. -        DEPENDS dirs : $(<) ; - -        # Recursively create parent directories. $(<:P) = $(<)'s parent & we -        # recurse until root. - -        local s = $(<:P) ; -        if [ os.name ] = NT -        { -            switch $(s) -            { -                case *:   : s = ; -                case *:\\ : s = ; -            } -        } - -        if $(s) -        { -            if $(s) != $(<) -            { -                DEPENDS $(<) : $(s) ; -                MkDir $(s) ; -            } -            else -            { -                NOTFILE $(s) ; -            } -        } -    } -} - - -#actions MkDir1 -#{ -#    mkdir "$(<)" -#} - -#   The following quick-fix actions should be replaced using the original MkDir1 -# action once Boost Jam gets updated to correctly detect different paths leading -# up to the same filesystem target and triggers their build action only once. -#                                             (todo) (04.07.2008.) (Jurko) - -if [ os.name ] = NT -{ -    actions mkdir -    { -        if not exist "$(<)\\" mkdir "$(<)" -    } -} -else -{ -    actions mkdir -    { -        mkdir -p "$(<)" -    } -} - -actions piecemeal together existing Clean -{ -    $(RM) "$(>)" -} - - -rule copy -{ -} - - -actions copy -{ -    $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)" -} - - -rule RmTemps -{ -} - - -actions quietly updated piecemeal together RmTemps -{ -    $(RM) "$(>)" $(IGNORE) -} - - -actions hard-link -{ -    $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT) -    $(LN) "$(>)" "$(<)" $(NULL_OUT) -} - - -# Given a target, as given to a custom tag rule, returns a string formatted -# according to the passed format. Format is a list of properties that is -# represented in the result. For each element of format the corresponding target -# information is obtained and added to the result string. For all, but the -# literal, the format value is taken as the as string to prepend to the output -# to join the item to the rest of the result. If not given "-" is used as a -# joiner. -# -# The format options can be: -# -#   <base>[joiner] -#       ::  The basename of the target name. -#   <toolset>[joiner] -#       ::  The abbreviated toolset tag being used to build the target. -#   <threading>[joiner] -#       ::  Indication of a multi-threaded build. -#   <runtime>[joiner] -#       ::  Collective tag of the build runtime. -#   <version:/version-feature | X.Y[.Z]/>[joiner] -#       ::  Short version tag taken from the given "version-feature" in the -#           build properties. Or if not present, the literal value as the -#           version number. -#   <property:/property-name/>[joiner] -#       ::  Direct lookup of the given property-name value in the build -#           properties. /property-name/ is a regular expression. E.g. -#           <property:toolset-.*:flavor> will match every toolset. -#   /otherwise/ -#       ::  The literal value of the format argument. -# -# For example this format: -# -#   boost_ <base> <toolset> <threading> <runtime> <version:boost-version> -# -# Might return: -# -#   boost_thread-vc80-mt-gd-1_33.dll, or -#   boost_regex-vc80-gd-1_33.dll -# -# The returned name also has the target type specific prefix and suffix which -# puts it in a ready form to use as the value from a custom tag rule. -# -rule format-name ( format * : name : type ? : property-set ) -{ -    local result = "" ; -    for local f in $(format) -    { -        switch $(f:G) -        { -            case <base> : -                local matched = [ MATCH "^(boost.*python)-.*" : $(name) ] ; -                if $(matched) = boost_python || $(matched) = boost_mpi_python -                { -                    result += $(name) ; -                } -                else -                { -                    result += $(name:B) ; -                } -             -            case <toolset> : -                result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) : -                $(property-set) ] ] ; -             -            case <threading> : -                result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type) -                : $(property-set) ] ] ; -             -            case <runtime> : -                result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) : -                $(property-set) ] ] ; -             -            case <qt> : -            result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) : -                $(property-set) ] ] ; - -            case <address-model> : -            result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) : -                $(property-set) ] ] ; - -            case <version:*> : -                local key = [ MATCH <version:(.*)> : $(f:G) ] ; -                local version = [ $(property-set).get <$(key)> ] ; -                version ?= $(key) ; -                version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ; -                result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ; - -            case <property:*> : -                local key = [ MATCH <property:(.*)> : $(f:G) ] ; -                local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ; -                if $(p0) -                { -                    local p = [ $(property-set).get <$(p0)> ] ; -                    if $(p) -                    { -                        result += [ join-tag $(f:G=) : $(p) ] ; -                    } -                } - -            case * : -                result += $(f:G=) ; -        } -    } -    result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) : -      $(property-set) ] ; -    return $(result) ; -} - - -local rule join-tag ( joiner ? : tag ? ) -{ -    if ! $(joiner) { joiner = - ; } -    return $(joiner)$(tag) ; -} - - -local rule toolset-tag ( name : type ? : property-set ) -{ -    local tag = ; - -    local properties = [ $(property-set).raw ] ; -    switch [ $(property-set).get <toolset> ] -    { -        case borland* : tag += bcb ; -        case clang* : -        { -            switch [ $(property-set).get <toolset-clang:platform> ] -            { -               case darwin : tag += clang-darwin ; -               case linux  : tag += clang ; -            } -        } -        case como* : tag += como ; -        case cw : tag += cw ; -        case darwin* : tag += xgcc ; -        case edg* : tag += edg ; -        case gcc* : -        { -            switch [ $(property-set).get <toolset-gcc:flavor> ] -            { -                case *mingw* : tag += mgw ; -                case * : tag += gcc ; -            } -        } -        case intel : -        if [ $(property-set).get <toolset-intel:platform> ] = win -        { -            tag += iw ; -        } -        else -        { -            tag += il ; -        } -        case kcc* : tag += kcc ; -        case kylix* : tag += bck ; -        #case metrowerks* : tag += cw ; -        #case mingw* : tag += mgw ; -        case mipspro* : tag += mp ; -        case msvc* : tag += vc ; -        case qcc* : tag += qcc ; -        case sun* : tag += sw ; -        case tru64cxx* : tag += tru ; -        case vacpp* : tag += xlc ; -    } -    local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)" -        : $(properties) ] ; -    # For historical reasons, vc6.0 and vc7.0 use different naming. -    if $(tag) = vc -    { -        if $(version[1]) = 6 -        { -            # Cancel minor version. -            version = 6 ; -        } -        else if $(version[1]) = 7 && $(version[2]) = 0 -        { -            version = 7 ; -        } -    } -    # On intel, version is not added, because it does not matter and it is the -    # version of vc used as backend that matters. Ideally, we should encode the -    # backend version but that would break compatibility with V1. -    if $(tag) = iw -    { -        version = ; -    } - -    # On borland, version is not added for compatibility with V1. -    if $(tag) = bcb -    { -        version = ; -    } - -    tag += $(version) ; - -    return $(tag:J=) ; -} - - -local rule threading-tag ( name : type ? : property-set ) -{ -    local tag = ; -    local properties = [ $(property-set).raw ] ; -    if <threading>multi in $(properties) { tag = mt ; } - -    return $(tag:J=) ; -} - - -local rule runtime-tag ( name : type ? : property-set ) -{ -    local tag = ; - -    local properties = [ $(property-set).raw ] ; -    if <runtime-link>static in $(properties) { tag += s ; } - -    # This is an ugly thing. In V1, there is code to automatically detect which -    # properties affect a target. So, if <runtime-debugging> does not affect gcc -    # toolset, the tag rules will not even see <runtime-debugging>. Similar -    # functionality in V2 is not implemented yet, so we just check for toolsets -    # known to care about runtime debugging. -    if ( <toolset>msvc in $(properties) ) || -        ( <stdlib>stlport in $(properties) ) || -        ( <toolset-intel:platform>win in $(properties) ) -    { -        if <runtime-debugging>on in $(properties) { tag += g ; } -    } - -    if <python-debugging>on in $(properties) { tag += y ; } -    if <variant>debug in $(properties) { tag += d ; } -    if <stdlib>stlport in $(properties) { tag += p ; } -    if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; } - -    return $(tag:J=) ; -} - -# Create a tag for the Qt library version -# "<qt>4.6.0" will result in tag "qt460" -local rule qt-tag ( name : type ? : property-set ) -{ -    local properties = [ $(property-set).get <qt> ] ; -    local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)" -        : $(properties) ] ; -    local tag = "qt"$(version:J=) ; -    return $(tag) ; -} - -# Create a tag for the address-model -# <address-model>64 will simply generate "64" -local rule address-model-tag ( name : type ? : property-set ) -{ -    local tag = ; -    local version = [ $(property-set).get <address-model> ] ; -    return $(version) ; -} - -rule __test__ ( ) -{ -    import assert ; - -    local nl = " -" ; - -    local save-os = [ modules.peek os : .name ] ; - -    modules.poke os : .name : LINUX ; - -    assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)" -        : path-variable-setting-command PATH : foo bar baz ; - -    assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)" -        : prepend-path-variable-command PATH : foo bar ; - -    modules.poke os : .name : NT ; - -    assert.result "set PATH=foo;bar;baz$(nl)" -        : path-variable-setting-command PATH : foo bar baz ; - -    assert.result "set PATH=foo;bar;%PATH%$(nl)" -        : prepend-path-variable-command PATH : foo bar ; - -    modules.poke os : .name : $(save-os) ; -} diff --git a/jam-files/boost-build/tools/common.py b/jam-files/boost-build/tools/common.py deleted file mode 100644 index 612745b8..00000000 --- a/jam-files/boost-build/tools/common.py +++ /dev/null @@ -1,840 +0,0 @@ -#  Status: being ported by Steven Watanabe -#  Base revision: 47174 -# -#  Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and -#  distribute this software is granted provided this copyright notice appears in -#  all copies. This software is provided "as is" without express or implied -#  warranty, and with no claim as to its suitability for any purpose. - -""" Provides actions common to all toolsets, such as creating directories and -    removing files. -""" - -import re -import bjam -import os -import os.path -import sys - -from b2.build import feature -from b2.util.utility import * -from b2.util import path - -__re__before_first_dash = re.compile ('([^-]*)-') - -def reset (): -    """ Clear the module state. This is mainly for testing purposes. -        Note that this must be called _after_ resetting the module 'feature'. -    """     -    global __had_unspecified_value, __had_value, __declared_subfeature -    global __init_loc -    global __all_signatures, __debug_configuration, __show_configuration -     -    # Stores toolsets without specified initialization values. -    __had_unspecified_value = {} - -    # Stores toolsets with specified initialization values. -    __had_value = {} -     -    # Stores toolsets with declared subfeatures. -    __declared_subfeature = {} -     -    # Stores all signatures of the toolsets. -    __all_signatures = {} - -    # Stores the initialization locations of each toolset -    __init_loc = {} - -    __debug_configuration = '--debug-configuration' in bjam.variable('ARGV') -    __show_configuration = '--show-configuration' in bjam.variable('ARGV') - -    global __executable_path_variable -    OS = bjam.call("peek", [], "OS")[0] -    if OS == "NT": -        # On Windows the case and capitalization of PATH is not always predictable, so -        # let's find out what variable name was really set. -        for n in sys.environ: -            if n.lower() == "path": -                __executable_path_variable = n -                break -    else: -        __executable_path_variable = "PATH" - -    m = {"NT": __executable_path_variable, -         "CYGWIN": "PATH", -         "MACOSX": "DYLD_LIBRARY_PATH", -         "AIX": "LIBPATH"} -    global __shared_library_path_variable -    __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH") -                             -reset() - -def shared_library_path_variable(): -    return __shared_library_path_variable - -# ported from trunk@47174 -class Configurations(object): -    """ -        This class helps to manage toolset configurations. Each configuration -        has a unique ID and one or more parameters. A typical example of a unique ID -        is a condition generated by 'common.check-init-parameters' rule. Other kinds -        of IDs can be used. Parameters may include any details about the configuration -        like 'command', 'path', etc. - -        A toolset configuration may be in one of the following states: - -        - registered -              Configuration has been registered (e.g. by autodetection code) but has -              not yet been marked as used, i.e. 'toolset.using' rule has not yet been -              called for it. -          - used -              Once called 'toolset.using' rule marks the configuration as 'used'. - -        The main difference between the states above is that while a configuration is -        'registered' its options can be freely changed. This is useful in particular -        for autodetection code - all detected configurations may be safely overwritten -        by user code. -    """ - -    def __init__(self): -        self.used_ = set() -        self.all_ = set() -        self.params = {} - -    def register(self, id): -        """ -            Registers a configuration. - -            Returns True if the configuration has been added and False if -            it already exists. Reports an error if the configuration is 'used'. -        """ -        if id in self.used_: -            #FIXME -            errors.error("common: the configuration '$(id)' is in use") - -        if id not in self.all_: -            self.all_ += [id] - -            # Indicate that a new configuration has been added. -            return True -        else: -            return False - -    def use(self, id): -        """ -            Mark a configuration as 'used'. - -            Returns True if the state of the configuration has been changed to -            'used' and False if it the state wasn't changed. Reports an error -            if the configuration isn't known. -        """ -        if id not in self.all_: -            #FIXME: -            errors.error("common: the configuration '$(id)' is not known") - -        if id not in self.used_: -            self.used_ += [id] - -            # indicate that the configuration has been marked as 'used' -            return True -        else: -            return False - -    def all(self): -        """ Return all registered configurations. """ -        return self.all_ - -    def used(self): -        """ Return all used configurations. """ -        return self.used_ - -    def get(self, id, param): -        """ Returns the value of a configuration parameter. """ -        self.params_.getdefault(param, {}).getdefault(id, None) - -    def set (self, id, param, value): -        """ Sets the value of a configuration parameter. """ -        self.params_.setdefault(param, {})[id] = value - -# Ported from trunk@47174 -def check_init_parameters(toolset, requirement, *args): -    """ The rule for checking toolset parameters. Trailing parameters should all be -        parameter name/value pairs. The rule will check that each parameter either has -        a value in each invocation or has no value in each invocation. Also, the rule -        will check that the combination of all parameter values is unique in all -        invocations. - -        Each parameter name corresponds to a subfeature. This rule will declare a -        subfeature the first time a non-empty parameter value is passed and will -        extend it with all the values. - -        The return value from this rule is a condition to be used for flags settings. -    """ -    # The type checking here is my best guess about -    # what the types should be. -    assert(isinstance(toolset, str)) -    assert(isinstance(requirement, str) or requirement is None) -    sig = toolset -    condition = replace_grist(toolset, '<toolset>') -    subcondition = [] -     -    for arg in args: -        assert(isinstance(arg, tuple)) -        assert(len(arg) == 2) -        name = arg[0] -        value = arg[1] -        assert(isinstance(name, str)) -        assert(isinstance(value, str) or value is None) -         -        str_toolset_name = str((toolset, name)) - -        # FIXME: is this the correct translation? -        ### if $(value)-is-not-empty -        if value is not None: -            condition = condition + '-' + value -            if __had_unspecified_value.has_key(str_toolset_name): -                raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \ -                "no value was specified in earlier initialization\n" \ -                "an explicit value is specified now" % (toolset, name)) - -            # The logic below is for intel compiler. It calls this rule -            # with 'intel-linux' and 'intel-win' as toolset, so we need to -            # get the base part of toolset name. -            # We can't pass 'intel' as toolset, because it that case it will -            # be impossible to register versionles intel-linux and -            # intel-win of specific version. -            t = toolset -            m = __re__before_first_dash.match(toolset) -            if m: -                t = m.group(1) - -            if not __had_value.has_key(str_toolset_name): -                if not __declared_subfeature.has_key(str((t, name))): -                    feature.subfeature('toolset', t, name, [], ['propagated']) -                    __declared_subfeature[str((t, name))] = True - -                __had_value[str_toolset_name] = True - -            feature.extend_subfeature('toolset', t, name, [value]) -            subcondition += ['<toolset-' + t + ':' + name + '>' + value ] - -        else: -            if __had_value.has_key(str_toolset_name): -                raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \ -                "an explicit value was specified in an earlier initialization\n" \ -                "no value is specified now" % (toolset, name)) - -            __had_unspecified_value[str_toolset_name] = True - -        if value == None: value = '' -         -        sig = sig + value + '-' - -    if __all_signatures.has_key(sig): -        message = "duplicate initialization of '%s' with the following parameters: " % toolset -         -        for arg in args: -            name = arg[0] -            value = arg[1] -            if value == None: value = '<unspecified>' -             -            message += "'%s' = '%s'\n" % (name, value) - -        raise BaseException(message) - -    __all_signatures[sig] = True -    # FIXME -    __init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ; - -    # If we have a requirement, this version should only be applied under that -    # condition. To accomplish this we add a toolset requirement that imposes -    # the toolset subcondition, which encodes the version. -    if requirement: -        r = ['<toolset>' + toolset, requirement] -        r = ','.join(r) -        toolset.add_requirements([r + ':' + c for c in subcondition]) - -    # We add the requirements, if any, to the condition to scope the toolset -    # variables and options to this specific version. -    condition = [condition] -    if requirement: -        condition += [requirement] - -    if __show_configuration: -        print "notice:", condition -    return ['/'.join(condition)] - -# Ported from trunk@47077 -def get_invocation_command_nodefault( -    toolset, tool, user_provided_command=[], additional_paths=[], path_last=False): -    """ -        A helper rule to get the command to invoke some tool. If -        'user-provided-command' is not given, tries to find binary named 'tool' in -        PATH and in the passed 'additional-path'. Otherwise, verifies that the first -        element of 'user-provided-command' is an existing program. -         -        This rule returns the command to be used when invoking the tool. If we can't -        find the tool, a warning is issued. If 'path-last' is specified, PATH is -        checked after 'additional-paths' when searching for 'tool'. -    """ -    assert(isinstance(toolset, str)) -    assert(isinstance(tool, str)) -    assert(isinstance(user_provided_command, list)) -    if additional_paths is not None: -        assert(isinstance(additional_paths, list)) -        assert(all([isinstance(path, str) for path in additional_paths])) -    assert(all(isinstance(path, str) for path in additional_paths)) -    assert(isinstance(path_last, bool)) -     -    if not user_provided_command: -        command = find_tool(tool, additional_paths, path_last)  -        if not command and __debug_configuration: -            print "warning: toolset", toolset, "initialization: can't find tool, tool" -            #FIXME -            #print "warning: initialized from" [ errors.nearest-user-location ] ; -    else: -        command = check_tool(user_provided_command) -        if not command and __debug_configuration: -            print "warning: toolset", toolset, "initialization:" -            print "warning: can't find user-provided command", user_provided_command -            #FIXME -            #ECHO "warning: initialized from" [ errors.nearest-user-location ] - -    assert(isinstance(command, str)) -     -    return command - -# ported from trunk@47174 -def get_invocation_command(toolset, tool, user_provided_command = [], -                           additional_paths = [], path_last = False): -    """ Same as get_invocation_command_nodefault, except that if no tool is found, -        returns either the user-provided-command, if present, or the 'tool' parameter. -    """ - -    assert(isinstance(toolset, str)) -    assert(isinstance(tool, str)) -    assert(isinstance(user_provided_command, list)) -    if additional_paths is not None: -        assert(isinstance(additional_paths, list)) -        assert(all([isinstance(path, str) for path in additional_paths])) -    assert(isinstance(path_last, bool)) - -    result = get_invocation_command_nodefault(toolset, tool, -                                              user_provided_command, -                                              additional_paths, -                                              path_last) - -    if not result: -        if user_provided_command: -            result = user_provided_command[0] -        else: -            result = tool - -    assert(isinstance(result, str)) -     -    return result - -# ported from trunk@47281 -def get_absolute_tool_path(command): -    """ -        Given an invocation command, -        return the absolute path to the command. This works even if commnad -        has not path element and is present in PATH. -    """ -    if os.path.dirname(command): -        return os.path.dirname(command) -    else: -        programs = path.programs_path() -        m = path.glob(programs, [command, command + '.exe' ]) -        if not len(m): -            print "Could not find:", command, "in", programs -        return os.path.dirname(m[0]) - -# ported from trunk@47174 -def find_tool(name, additional_paths = [], path_last = False): -    """ Attempts to find tool (binary) named 'name' in PATH and in -        'additional-paths'.  If found in path, returns 'name'.  If -        found in additional paths, returns full name.  If the tool -        is found in several directories, returns the first path found. -        Otherwise, returns the empty string.  If 'path_last' is specified, -        path is checked after 'additional_paths'. -    """ -    assert(isinstance(name, str)) -    assert(isinstance(additional_paths, list)) -    assert(isinstance(path_last, bool)) - -    programs = path.programs_path() -    match = path.glob(programs, [name, name + '.exe']) -    additional_match = path.glob(additional_paths, [name, name + '.exe']) - -    result = [] -    if path_last: -        result = additional_match -        if not result and match: -            result = match - -    else: -        if match: -            result = match - -        elif additional_match: -            result = additional_match - -    if result: -        return path.native(result[0]) -    else: -        return '' - -#ported from trunk@47281 -def check_tool_aux(command): -    """ Checks if 'command' can be found either in path -        or is a full name to an existing file. -    """ -    assert(isinstance(command, str)) -    dirname = os.path.dirname(command) -    if dirname: -        if os.path.exists(command): -            return command -        # Both NT and Cygwin will run .exe files by their unqualified names. -        elif on_windows() and os.path.exists(command + '.exe'): -            return command -        # Only NT will run .bat files by their unqualified names. -        elif os_name() == 'NT' and os.path.exists(command + '.bat'): -            return command -    else: -        paths = path.programs_path() -        if path.glob(paths, [command]): -            return command - -# ported from trunk@47281 -def check_tool(command): -    """ Checks that a tool can be invoked by 'command'.  -        If command is not an absolute path, checks if it can be found in 'path'. -        If comand is absolute path, check that it exists. Returns 'command' -        if ok and empty string otherwise. -    """ -    assert(isinstance(command, list)) -    assert(all(isinstance(c, str) for c in command)) -    #FIXME: why do we check the first and last elements???? -    if check_tool_aux(command[0]) or check_tool_aux(command[-1]): -        return command - -# ported from trunk@47281 -def handle_options(tool, condition, command, options): -    """ Handle common options for toolset, specifically sets the following -        flag variables: -        - CONFIG_COMMAND to 'command' -        - OPTIOns for compile to the value of <compileflags> in options -        - OPTIONS for compile.c to the value of <cflags> in options -        - OPTIONS for compile.c++ to the value of <cxxflags> in options -        - OPTIONS for compile.fortran to the value of <fflags> in options -        - OPTIONs for link to the value of <linkflags> in options -    """ -    from b2.build import toolset - -    assert(isinstance(tool, str)) -    assert(isinstance(condition, list)) -    assert(isinstance(command, str)) -    assert(isinstance(options, list)) -    assert(command) -    toolset.flags(tool, 'CONFIG_COMMAND', condition, [command]) -    toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options)) -    toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options)) -    toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options)) -    toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options)) -    toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options)) - -# ported from trunk@47281 -def get_program_files_dir(): -    """ returns the location of the "program files" directory on a windows -        platform -    """ -    ProgramFiles = bjam.variable("ProgramFiles") -    if ProgramFiles: -        ProgramFiles = ' '.join(ProgramFiles) -    else: -        ProgramFiles = "c:\\Program Files" -    return ProgramFiles - -# ported from trunk@47281 -def rm_command(): -    return __RM - -# ported from trunk@47281 -def copy_command(): -    return __CP - -# ported from trunk@47281 -def variable_setting_command(variable, value): -    """ -        Returns the command needed to set an environment variable on the current -        platform. The variable setting persists through all following commands and is -        visible in the environment seen by subsequently executed commands. In other -        words, on Unix systems, the variable is exported, which is consistent with the -        only possible behavior on Windows systems. -    """ -    assert(isinstance(variable, str)) -    assert(isinstance(value, str)) - -    if os_name() == 'NT': -        return "set " + variable + "=" + value + os.linesep -    else: -        # (todo) -        #   The following does not work on CYGWIN and needs to be fixed. On -        # CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that -        # messes up the executed export command which then reports that the -        # passed variable name is incorrect. This is most likely due to the -        # extra \r character getting interpreted as a part of the variable name. -        # -        #   Several ideas pop to mind on how to fix this: -        #     * One way would be to separate the commands using the ; shell -        #       command separator. This seems like the quickest possible -        #       solution but I do not know whether this would break code on any -        #       platforms I I have no access to. -        #     * Another would be to not use the terminating $(nl) but that would -        #       require updating all the using code so it does not simply -        #       prepend this variable to its own commands. -        #     * I guess the cleanest solution would be to update Boost Jam to -        #       allow explicitly specifying \n & \r characters in its scripts -        #       instead of always relying only on the 'current OS native newline -        #       sequence'. -        # -        #   Some code found to depend on this behaviour: -        #     * This Boost Build module. -        #         * __test__ rule. -        #         * path-variable-setting-command rule. -        #     * python.jam toolset. -        #     * xsltproc.jam toolset. -        #     * fop.jam toolset. -        #                                     (todo) (07.07.2008.) (Jurko) -        # -        # I think that this works correctly in python -- Steven Watanabe -        return variable + "=" + value + os.linesep + "export " + variable + os.linesep - -def path_variable_setting_command(variable, paths): -    """ -        Returns a command to sets a named shell path variable to the given NATIVE -        paths on the current platform. -    """ -    assert(isinstance(variable, str)) -    assert(isinstance(paths, list)) -    sep = os.path.pathsep -    return variable_setting_command(variable, sep.join(paths)) - -def prepend_path_variable_command(variable, paths): -    """ -        Returns a command that prepends the given paths to the named path variable on -        the current platform. -    """     -    return path_variable_setting_command(variable, -        paths + os.environ.get(variable, "").split(os.pathsep)) - -def file_creation_command(): -    """ -        Return a command which can create a file. If 'r' is result of invocation, then -        'r foobar' will create foobar with unspecified content. What happens if file -        already exists is unspecified. -    """ -    if os_name() == 'NT': -        return "echo. > " -    else: -        return "touch " - -#FIXME: global variable -__mkdir_set = set() -__re_windows_drive = re.compile(r'^.*:\$') - -def mkdir(engine, target): -    # If dir exists, do not update it. Do this even for $(DOT). -    bjam.call('NOUPDATE', target) - -    global __mkdir_set - -    # FIXME: Where is DOT defined? -    #if $(<) != $(DOT) && ! $($(<)-mkdir): -    if target != '.' and target not in __mkdir_set: -        # Cheesy gate to prevent multiple invocations on same dir. -        __mkdir_set.add(target) - -        # Schedule the mkdir build action. -        if os_name() == 'NT': -            engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, []) -        else: -            engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, []) - -        # Prepare a Jam 'dirs' target that can be used to make the build only -        # construct all the target directories. -        engine.add_dependency('dirs', target) - -        # Recursively create parent directories. $(<:P) = $(<)'s parent & we -        # recurse until root. - -        s = os.path.dirname(target) -        if os_name() == 'NT': -            if(__re_windows_drive.match(s)): -                s = '' -                 -        if s: -            if s != target: -                engine.add_dependency(target, s) -                mkdir(engine, s) -            else: -                bjam.call('NOTFILE', s) - -__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)') - -def format_name(format, name, target_type, prop_set): -    """ Given a target, as given to a custom tag rule, returns a string formatted -        according to the passed format. Format is a list of properties that is -        represented in the result. For each element of format the corresponding target -        information is obtained and added to the result string. For all, but the -        literal, the format value is taken as the as string to prepend to the output -        to join the item to the rest of the result. If not given "-" is used as a -        joiner. - -        The format options can be: - -          <base>[joiner] -              ::  The basename of the target name. -          <toolset>[joiner] -              ::  The abbreviated toolset tag being used to build the target. -          <threading>[joiner] -              ::  Indication of a multi-threaded build. -          <runtime>[joiner] -              ::  Collective tag of the build runtime. -          <version:/version-feature | X.Y[.Z]/>[joiner] -              ::  Short version tag taken from the given "version-feature" -                  in the build properties. Or if not present, the literal -                  value as the version number. -          <property:/property-name/>[joiner] -              ::  Direct lookup of the given property-name value in the -                  build properties. /property-name/ is a regular expression. -                  e.g. <property:toolset-.*:flavor> will match every toolset. -          /otherwise/ -              ::  The literal value of the format argument. - -        For example this format: - -          boost_ <base> <toolset> <threading> <runtime> <version:boost-version> - -        Might return: - -          boost_thread-vc80-mt-gd-1_33.dll, or -          boost_regex-vc80-gd-1_33.dll - -        The returned name also has the target type specific prefix and suffix which -        puts it in a ready form to use as the value from a custom tag rule. -    """ -    assert(isinstance(format, list)) -    assert(isinstance(name, str)) -    assert(isinstance(target_type, str) or not type) -    # assert(isinstance(prop_set, property_set.PropertySet)) -    if type.is_derived(target_type, 'LIB'): -        result = "" ; -        for f in format: -            grist = get_grist(f) -            if grist == '<base>': -                result += os.path.basename(name) -            elif grist == '<toolset>': -                result += join_tag(ungrist(f),  -                    toolset_tag(name, target_type, prop_set)) -            elif grist == '<threading>': -                result += join_tag(ungrist(f), -                    threading_tag(name, target_type, prop_set)) -            elif grist == '<runtime>': -                result += join_tag(ungrist(f), -                    runtime_tag(name, target_type, prop_set)) -            elif grist.startswith('<version:'): -                key = grist[len('<version:'):-1] -                version = prop_set.get('<' + key + '>') -                if not version: -                    version = key -                version = __re_version.match(version) -                result += join_tag(ungrist(f), version[1] + '_' + version[2]) -            elif grist.startswith('<property:'): -                key = grist[len('<property:'):-1] -                property_re = re.compile('<(' + key + ')>') -                p0 = None -                for prop in prop_set.raw(): -                    match = property_re.match(prop) -                    if match: -                        p0 = match[1] -                        break -                if p0: -                    p = prop_set.get('<' + p0 + '>') -                    if p: -                        assert(len(p) == 1) -                        result += join_tag(ungrist(f), p) -            else: -                result += ungrist(f) - -        result = virtual_target.add_prefix_and_suffix( -            ''.join(result), target_type, prop_set) -        return result - -def join_tag(joiner, tag): -    if not joiner: joiner = '-' -    return joiner + tag - -__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)") - -def toolset_tag(name, target_type, prop_set): -    tag = '' - -    properties = prop_set.raw() -    tools = prop_set.get('<toolset>') -    assert(len(tools) == 0) -    tools = tools[0] -    if tools.startswith('borland'): tag += 'bcb' -    elif tools.startswith('como'): tag += 'como' -    elif tools.startswith('cw'): tag += 'cw' -    elif tools.startswith('darwin'): tag += 'xgcc' -    elif tools.startswith('edg'): tag += edg -    elif tools.startswith('gcc'): -        flavor = prop_set.get('<toolset-gcc:flavor>') -        ''.find -        if flavor.find('mingw') != -1: -            tag += 'mgw' -        else: -            tag += 'gcc' -    elif tools == 'intel': -        if prop_set.get('<toolset-intel:platform>') == ['win']: -            tag += 'iw' -        else: -            tag += 'il' -    elif tools.startswith('kcc'): tag += 'kcc' -    elif tools.startswith('kylix'): tag += 'bck' -    #case metrowerks* : tag += cw ; -    #case mingw* : tag += mgw ; -    elif tools.startswith('mipspro'): tag += 'mp' -    elif tools.startswith('msvc'): tag += 'vc' -    elif tools.startswith('sun'): tag += 'sw' -    elif tools.startswith('tru64cxx'): tag += 'tru' -    elif tools.startswith('vacpp'): tag += 'xlc' - -    for prop in properties: -        match = __re_toolset_version.match(prop) -        if(match): -            version = match -            break -    version_string = None -    # For historical reasons, vc6.0 and vc7.0 use different naming. -    if tag == 'vc': -        if version.group(1) == '6': -            # Cancel minor version. -            version_string = '6' -        elif version.group(1) == '7' and version.group(2) == '0': -            version_string = '7' - -    # On intel, version is not added, because it does not matter and it's the -    # version of vc used as backend that matters. Ideally, we'd encode the -    # backend version but that would break compatibility with V1. -    elif tag == 'iw': -        version_string = '' - -    # On borland, version is not added for compatibility with V1. -    elif tag == 'bcb': -        version_string = '' - -    if version_string is None: -        version = version.group(1) + version.group(2) - -    tag += version - -    return tag - - -def threading_tag(name, target_type, prop_set): -    tag = '' -    properties = prop_set.raw() -    if '<threading>multi' in properties: tag = 'mt' - -    return tag - - -def runtime_tag(name, target_type, prop_set ): -    tag = '' - -    properties = prop_set.raw() -    if '<runtime-link>static' in properties: tag += 's' - -    # This is an ugly thing. In V1, there's a code to automatically detect which -    # properties affect a target. So, if <runtime-debugging> does not affect gcc -    # toolset, the tag rules won't even see <runtime-debugging>. Similar -    # functionality in V2 is not implemented yet, so we just check for toolsets -    # which are known to care about runtime debug. -    if '<toolset>msvc' in properties \ -       or '<stdlib>stlport' in properties \ -       or '<toolset-intel:platform>win' in properties: -        if '<runtime-debugging>on' in properties: tag += 'g' - -    if '<python-debugging>on' in properties: tag += 'y' -    if '<variant>debug' in properties: tag += 'd' -    if '<stdlib>stlport' in properties: tag += 'p' -    if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n' - -    return tag - - -## TODO: -##rule __test__ ( ) -##{ -##    import assert ; -## -##    local nl = " -##" ; -## -##    local save-os = [ modules.peek os : .name ] ; -## -##    modules.poke os : .name : LINUX ; -## -##    assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)" -##        : path-variable-setting-command PATH : foo bar baz ; -## -##    assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)" -##        : prepend-path-variable-command PATH : foo bar ; -## -##    modules.poke os : .name : NT ; -## -##    assert.result "set PATH=foo;bar;baz$(nl)" -##        : path-variable-setting-command PATH : foo bar baz ; -## -##    assert.result "set PATH=foo;bar;%PATH%$(nl)" -##        : prepend-path-variable-command PATH : foo bar ; -## -##    modules.poke os : .name : $(save-os) ; -##} - -def init(manager): -    engine = manager.engine() - -    engine.register_action("common.MkDir1-quick-fix-for-unix", 'mkdir -p "$(<)"') -    engine.register_action("common.MkDir1-quick-fix-for-windows", 'if not exist "$(<)\\" mkdir "$(<)"') - -    import b2.tools.make -    import b2.build.alias - -    global __RM, __CP, __IGNORE, __LN -    # ported from trunk@47281 -    if os_name() == 'NT': -        __RM = 'del /f /q' -        __CP = 'copy' -        __IGNORE = '2>nul >nul & setlocal' -        __LN = __CP -        #if not __LN: -        #    __LN = CP -    else: -        __RM = 'rm -f' -        __CP = 'cp' -        __IGNORE = '' -        __LN = 'ln' -         -    engine.register_action("common.Clean", __RM + ' "$(>)"', -                           flags=['piecemeal', 'together', 'existing']) -    engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"') -    engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE, -                           flags=['quietly', 'updated', 'piecemeal', 'together']) - -    engine.register_action("common.hard-link",  -        __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep + -        __LN + ' "$(>)" "$(<)" $(NULL_OUT)') diff --git a/jam-files/boost-build/tools/como-linux.jam b/jam-files/boost-build/tools/como-linux.jam deleted file mode 100644 index 5c554c8f..00000000 --- a/jam-files/boost-build/tools/como-linux.jam +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# The following #// line will be used by the regression test table generation -# program as the column heading for HTML tables. Must not include a version -# number. -#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a> - -import toolset ; -import feature ; -import toolset : flags ; -import common ; -import generators ; - -import unix ; -import como ; - -feature.extend-subfeature toolset como : platform : linux ; - -toolset.inherit-generators como-linux -     <toolset>como <toolset-como:platform>linux : unix ; -generators.override como-linux.prebuilt : builtin.lib-generator ; -generators.override como-linux.searched-lib-generator : searched-lib-generator ; -toolset.inherit-flags como-linux : unix ; -toolset.inherit-rules como-linux : gcc ; - -generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ -    : <toolset>como <toolset-como:platform>linux ; -generators.register-c-compiler como-linux.compile.c : C : OBJ -    : <toolset>como <toolset-como:platform>linux ; - - -rule init ( version ? : command * : options * ) -{ -    local condition = [ common.check-init-parameters como-linux -        : version $(version) ] ; - -    command = [ common.get-invocation-command como-linux : como -        : $(command) ] ; - -    common.handle-options como-linux : $(condition) : $(command) : $(options) ; -} - - -flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ; -flags como-linux C++FLAGS <exception-handling>on : --exceptions ; - -flags como-linux CFLAGS <inlining>off : --no_inlining ; -flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ; - -flags como-linux CFLAGS <optimization>off : -O0 ; -flags como-linux CFLAGS <optimization>speed : -O3 ; -flags como-linux CFLAGS <optimization>space : -Os ; - -flags como-linux CFLAGS <debug-symbols>on : -g ; -flags como-linux LINKFLAGS <debug-symbols>on : -g ; - -flags como-linux FINDLIBS : m ; -flags como-linux FINDLIBS : rt ; - -flags como-linux CFLAGS <cflags> ; -flags como-linux C++FLAGS <cxxflags> ; -flags como-linux DEFINES <define> ; -flags como-linux UNDEFS <undef> ; -flags como-linux HDRS <include> ; -flags como-linux STDHDRS <sysinclude> ; -flags como-linux LINKFLAGS <linkflags> ; -flags como-linux ARFLAGS <arflags> ; - -flags como-linux.link LIBRARIES <library-file> ; -flags como-linux.link LINKPATH <library-path> ; -flags como-linux.link FINDLIBS-ST <find-static-library> ; -flags como-linux.link FINDLIBS-SA <find-shared-library> ; - -flags como-linux.link RPATH <dll-path> ; -flags como-linux.link RPATH_LINK <xdll-path> ; - - -actions link bind LIBRARIES -{ -    $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)"  "$(LIBRARIES)"  "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1 -} - -actions link.dll bind LIBRARIES -{ -    $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)"  "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1 -} - -actions compile.c -{ -    $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1 -} - -actions compile.c++ -{ -    $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)"  -o "$(<)" "$(>)" 2>&1 -} - -actions archive -{ -    ar rcu $(<) $(>) -} diff --git a/jam-files/boost-build/tools/como-win.jam b/jam-files/boost-build/tools/como-win.jam deleted file mode 100644 index d21a70d6..00000000 --- a/jam-files/boost-build/tools/como-win.jam +++ /dev/null @@ -1,117 +0,0 @@ -# (C) Copyright David Abrahams 2001. -# (C) Copyright MetaCommunications, Inc. 2004. - -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# The following #// line will be used by the regression test table generation -# program as the column heading for HTML tables. Must not include a version -# number. -#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a> - -import common ; -import como ; -import feature ; -import generators ; -import toolset : flags ; - -feature.extend-subfeature toolset como : platform : win ; - - -# Initializes the Comeau toolset for windows. The command is the command which -# invokes the compiler. You should either set environment variable -# COMO_XXX_INCLUDE where XXX is the used backend (as described in the -# documentation), or pass that as part of command, e.g: -# -#   using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ; -# -rule init ( version ? : command * : options * ) -{ -    local condition = [  common.check-init-parameters como-win -        : version $(version) ] ; - -    command = [ common.get-invocation-command como-win : como.exe : -        $(command) ] ; - -    common.handle-options como-win : $(condition) : $(command) : $(options) ; -} - -generators.register-c-compiler como-win.compile.c++ : CPP : OBJ -    : <toolset>como <toolset-como:platform>win ; -generators.register-c-compiler como-win.compile.c : C : OBJ -    : <toolset>como <toolset-como:platform>win ; - - -generators.register-linker como-win.link -    : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -    : EXE -    : <toolset>como <toolset-como:platform>win ; - -# Note that status of shared libraries support is not clear, so we do not define -# the link.dll generator. -generators.register-archiver como-win.archive -    : OBJ : STATIC_LIB -    : <toolset>como <toolset-como:platform>win ; - - -flags como-win C++FLAGS <exception-handling>off : --no_exceptions ; -flags como-win C++FLAGS <exception-handling>on : --exceptions ; - -flags como-win CFLAGS <inlining>off : --no_inlining ; -flags como-win CFLAGS <inlining>on <inlining>full : --inlining ; - - -# The following seems to be VC-specific options. At least, when I uncomment -# then, Comeau with bcc as backend reports that bcc32 invocation failed. -# -#flags como-win CFLAGS <debug-symbols>on : /Zi ; -#flags como-win CFLAGS <optimization>off : /Od ; - - -flags como-win CFLAGS <cflags> ; -flags como-win CFLAGS : -D_WIN32 ;  # Make sure that we get the Boost Win32 platform config header. -flags como-win CFLAGS <threading>multi : -D_MT ;  # Make sure that our config knows that threading is on. -flags como-win C++FLAGS <cxxflags> ; -flags como-win DEFINES <define> ; -flags como-win UNDEFS <undef> ; -flags como-win HDRS <include> ; -flags como-win SYSHDRS <sysinclude> ; -flags como-win LINKFLAGS <linkflags> ; -flags como-win ARFLAGS <arflags> ; -flags como-win NO_WARN <no-warn> ; - -#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ; -#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ; - -flags como-win LIBPATH <library-path> ; -flags como-win LIBRARIES <library-file> ; -flags como-win FINDLIBS <find-shared-library> ; -flags como-win FINDLIBS <find-static-library> ; - -nl = " -" ; - - -# For como, we repeat all libraries so that dependencies are always resolved. -# -actions link bind LIBRARIES -{ -    $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)" -} - -actions compile.c -{ -    $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)" -} - -actions compile.c++ -{ -    $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)" -} - -actions archive -{ -    $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" -    lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" -} diff --git a/jam-files/boost-build/tools/como.jam b/jam-files/boost-build/tools/como.jam deleted file mode 100644 index 04a05a94..00000000 --- a/jam-files/boost-build/tools/como.jam +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This is a generic 'como' toolset. Depending on the current system, it -# forwards either to 'como-linux' or 'como-win' modules. - -import feature ; -import os ; -import toolset ; - -feature.extend toolset : como ; -feature.subfeature toolset como : platform : : propagated link-incompatible ; - -rule init ( * : * ) -{ -    if [ os.name ] = LINUX -    { -        toolset.using como-linux :  -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -    else -    { -        toolset.using como-win : -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; - -    }         -} diff --git a/jam-files/boost-build/tools/convert.jam b/jam-files/boost-build/tools/convert.jam deleted file mode 100644 index ac1d7010..00000000 --- a/jam-files/boost-build/tools/convert.jam +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) 2009 Vladimir Prus -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Implements 'convert' target that takes a bunch of source and -# tries to convert each one to the specified type. -# -# For example: -# -#   convert objects obj : a.cpp b.cpp ; -# - -import targets ; -import generators ; -import project ; -import type ; -import "class" : new ; - -class convert-target-class : typed-target -{ -    rule __init__ ( name : project : type -    : sources * : requirements * : default-build * : usage-requirements * ) -    { -        typed-target.__init__ $(name) : $(project) : $(type) -          : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ; -    } - -    rule construct ( name : source-targets * : property-set ) -    { -        local r = [ generators.construct $(self.project) : $(self.type) -          : [ property-set.create [ $(property-set).raw ] # [ feature.expand -              <main-target-type>$(self.type) ] -          # ] -            : $(source-targets) ] ; -        if ! $(r) -        { -            errors.error "unable to construct" [ full-name ] ; -        } - -        return $(r) ; -    } - -} - -rule convert ( name type : sources * : requirements * : default-build * -    : usage-requirements * ) -{ -    local project = [ project.current ] ; - -    # This is a circular module dependency, so it must be imported here -    modules.import targets ; -    targets.main-target-alternative -      [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ] -        : [ targets.main-target-sources $(sources) : $(name) ] -        : [ targets.main-target-requirements $(requirements) : $(project) ] -        : [ targets.main-target-default-build $(default-build) : $(project) ] -        : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] -      ] ; -} -IMPORT $(__name__) : convert : : convert ; diff --git a/jam-files/boost-build/tools/cw-config.jam b/jam-files/boost-build/tools/cw-config.jam deleted file mode 100644 index 1211b7c0..00000000 --- a/jam-files/boost-build/tools/cw-config.jam +++ /dev/null @@ -1,34 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for CodeWarrior toolset. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ -    for local R in 9 8 7 -    { -        local cw-path = [ W32_GETREG -            "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)" -            : "PATH" ] ; -        local cw-version = [ W32_GETREG -            "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)" -            : "VERSION" ] ; -        cw-path ?= [ W32_GETREG -            "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0" -            : "PATH" ] ; -        cw-version ?= $(R).0 ; -         -        if $(cw-path) -        { -            if --debug-configuration in [ modules.peek : ARGV ] -            { -                ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ; -            } -            using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ; -        } -    } -} diff --git a/jam-files/boost-build/tools/cw.jam b/jam-files/boost-build/tools/cw.jam deleted file mode 100644 index ddcbfeb2..00000000 --- a/jam-files/boost-build/tools/cw.jam +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright (C) Reece H Dunn 2004 -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# based on the msvc.jam toolset - -import property ; -import generators ; -import os ; -import type ; -import toolset : flags ; -import errors : error ; -import feature : feature get-values ; -import path ; -import sequence : unique ; -import common ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -   .debug-configuration = true ; -} - -feature.extend toolset : cw ; - -toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ; - -nl = " -" ; - -rule init ( version ? : command * : options * ) -{ -   # TODO: fix the $(command[1]) = $(compiler) issue -     -    setup = [ get-values <setup> : $(options) ] ; -    setup    ?= cwenv.bat ; -    compiler = [ get-values <compiler> : $(options) ] ; -    compiler ?= mwcc ; -    linker = [ get-values <linker> : $(options) ] ; -    linker   ?= mwld ; - -    local condition = [ common.check-init-parameters cw : -        version $(version) ] ; - -    command = [ common.get-invocation-command cw : mwcc.exe : $(command) : -        [ default-paths $(version) ] ] ; -     -    common.handle-options cw : $(condition) : $(command) : $(options) ; - -    local root = [ feature.get-values <root> : $(options) ] ;     -    if $(command) -    { -        command = [ common.get-absolute-tool-path $(command[-1]) ] ; -    } -    local tool-root = $(command) ; - -    setup = $(tool-root)\\$(setup) ; - -   # map the batch file in setup so it can be executed - -    other-tools = $(tool-root:D) ; -    root ?= $(other-tools:D) ; -     -    flags cw.link RUN_PATH $(condition) :  -        "$(root)\\Win32-x86 Support\\Libraries\\Runtime" -        "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ;       -     -    setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ; - -   if [ os.name ] = NT -   { -      setup = $(setup)" -" ; -   } -   else -   { -      setup = "cmd /S /C "$(setup)" \"&&\" " ; -   } - -   # bind the setup command to the tool so it can be executed before the -   # command - -   local prefix = $(setup) ; - -   flags cw.compile .CC $(condition) : $(prefix)$(compiler) ; -   flags cw.link .LD $(condition) : $(prefix)$(linker) ; -   flags cw.archive .LD $(condition) : $(prefix)$(linker) ; - -    if [ MATCH ^([89]\\.) : $(version) ]    -    { -        if [ os.name ] = NT -        { -        # The runtime libraries -        flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ; -        flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ; -         -        flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ; -        flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ; -         -        flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ; -        flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ; -        }         -    }     -} - - -local rule default-paths ( version ? )  # FIXME -{ -   local possible-paths ; -   local ProgramFiles = [ common.get-program-files-dir ] ; - -   # TODO: add support for cw8 and cw9 detection - -   local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ; -   possible-paths += $(version-6-path) ; - -   # perform post-processing - -   possible-paths -      = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ; - -   possible-paths += [ modules.peek : PATH Path path ] ; - -   return $(possible-paths) ; -} - - - - -## declare generators - -generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ; -generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ; - -generators.register-linker cw.link -   : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -   : EXE -   : <toolset>cw -   ; -generators.register-linker cw.link.dll -   : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -   : SHARED_LIB IMPORT_LIB -   : <toolset>cw -   ; - -generators.register-archiver cw.archive -   : OBJ -   : STATIC_LIB -   : <toolset>cw -   ; - -## compilation phase - -flags cw WHATEVER <toolset-cw:version> ; - -flags cw.compile CFLAGS <debug-symbols>on : -g ; -flags cw.compile CFLAGS <optimization>off : -O0 ; -flags cw.compile CFLAGS <optimization>speed : -O4,p ; -flags cw.compile CFLAGS <optimization>space : -O4,s ; -flags cw.compile CFLAGS <inlining>off : -inline off ; -flags cw.compile CFLAGS <inlining>on : -inline on ; -flags cw.compile CFLAGS <inlining>full : -inline all ; -flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ; - - -flags cw.compile CFLAGS <rtti>on : -RTTI on ; -flags cw.compile CFLAGS <rtti>off : -RTTI off ; - -flags cw.compile CFLAGS <warnings>on : -w on ; -flags cw.compile CFLAGS <warnings>off : -w off ; -flags cw.compile CFLAGS <warnings>all : -w all ; -flags cw.compile CFLAGS <warnings-as-errors>on : -w error ; - -flags cw.compile USER_CFLAGS <cflags> : ; -flags cw.compile.c++ USER_CFLAGS <cxxflags> : ; - -flags cw.compile DEFINES <define> ; -flags cw.compile UNDEFS <undef> ; -flags cw.compile INCLUDES <include> ; - -actions compile.c -{ -   $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")" -} -actions compile.c++ -{ -   $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")" -} - -## linking phase - -flags cw.link DEF_FILE <def-file> ; - -flags cw LINKFLAGS : -search ; -flags cw LINKFLAGS <debug-symbols>on : -g ; -flags cw LINKFLAGS <user-interface>console : -subsystem console ; -flags cw LINKFLAGS <user-interface>gui : -subsystem windows ; -flags cw LINKFLAGS <user-interface>wince : -subsystem wince ; -flags cw LINKFLAGS <user-interface>native : -subsystem native ; -flags cw LINKFLAGS <user-interface>auto : -subsystem auto ; - -flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ; - -flags cw.link USER_LINKFLAGS <linkflags> ; -flags cw.link LINKPATH <library-path> ; - -flags cw.link FINDLIBS_ST <find-static-library> ; -flags cw.link FINDLIBS_SA <find-shared-library> ; -flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ; -flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ; - -rule link.dll ( targets + : sources * : properties * ) -{ -    DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ; -} - -if [ os.name ] in NT -{ -   actions archive -   { -      if exist "$(<[1])" DEL "$(<[1])" -      $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -   } -} -else # cygwin -{ -   actions archive -   { -      _bbv2_out_="$(<)" -      if test -f "$_bbv2_out_" ; then -         _bbv2_existing_="$(<:W)" -      fi -      $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -   } -} - -actions link bind DEF_FILE -{ -   $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -} - -actions link.dll bind DEF_FILE -{ -   $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" -} - diff --git a/jam-files/boost-build/tools/darwin.jam b/jam-files/boost-build/tools/darwin.jam deleted file mode 100644 index bb6dd45e..00000000 --- a/jam-files/boost-build/tools/darwin.jam +++ /dev/null @@ -1,568 +0,0 @@ -# Copyright 2003 Christopher Currie -# Copyright 2006 Dave Abrahams  -# Copyright 2003, 2004, 2005, 2006 Vladimir Prus  -# Copyright 2005-2007 Mat Marcus -# Copyright 2005-2007 Adobe Systems Incorporated -# Copyright 2007-2010 Rene Rivera -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -#  Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ -#  for explanation why it's a separate toolset. - -import feature : feature ; -import toolset : flags ; -import type ; -import common ; -import generators ; -import path : basename ; -import version ; -import property-set ; -import regex ; -import errors ; - -## Use a framework. -feature framework : : free ; - -## The MacOSX version to compile for, which maps to the SDK to use (sysroot). -feature macosx-version : : propagated link-incompatible symmetric optional ; - -## The minimal MacOSX version to target. -feature macosx-version-min : : propagated optional ; - -## A dependency, that is forced to be included in the link. -feature force-load : : free dependency incidental ; - -############################################################################# - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} - -feature.extend toolset : darwin ; -import gcc ; -toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ; - -generators.override darwin.prebuilt : builtin.prebuilt ; -generators.override darwin.searched-lib-generator : searched-lib-generator ; - -# Override default do-nothing generators. -generators.override darwin.compile.c.pch   : pch.default-c-pch-generator   ; -generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ; - -type.set-generated-target-suffix PCH : <toolset>darwin : gch ; - -toolset.inherit-rules darwin : gcc : localize ; -toolset.inherit-flags darwin : gcc      -  :  <runtime-link>static -     <architecture>arm/<address-model>32 -     <architecture>arm/<address-model>64 -     <architecture>arm/<instruction-set> -     <architecture>x86/<address-model>32 -     <architecture>x86/<address-model>64 -     <architecture>x86/<instruction-set> -     <architecture>power/<address-model>32 -     <architecture>power/<address-model>64 -     <architecture>power/<instruction-set>  ; - -# Options: -# -#   <root>PATH -#       Platform root path. The common autodetection will set this to -#       "/Developer". And when a command is given it will be set to -#       the corresponding "*.platform/Developer" directory. -# -rule init ( version ? : command * : options * : requirement * ) -{ -    # First time around, figure what is host OSX version -    if ! $(.host-osx-version)   -    { -        .host-osx-version = [ MATCH "^([0-9.]+)" -          : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ; -        if $(.debug-configuration) -        { -            ECHO notice: OSX version on this machine is $(.host-osx-version) ; -        } -    } -                       -    # - The root directory of the tool install. -    local root = [ feature.get-values <root> : $(options) ] ; -     -    # - The bin directory where to find the commands to execute. -    local bin ; -     -    # - The configured compile driver command. -    local command = [ common.get-invocation-command darwin : g++ : $(command) ] ; -     -    # The version as reported by the compiler -    local real-version ; -     -    # - Autodetect the root and bin dir if not given. -    if $(command) -    { -        bin ?= [ common.get-absolute-tool-path $(command[1]) ] ; -        if $(bin) = "/usr/bin" -        { -            root ?= /Developer ; -        } -        else -        { -            local r = $(bin:D) ; -            r = $(r:D) ; -            root ?= $(r) ; -        } -    } -     -    # - Autodetect the version if not given. -    if $(command) -    { -        # - The 'command' variable can have multiple elements. When calling -        #   the SHELL builtin we need a single string. -        local command-string = $(command:J=" ") ; -        real-version = [ MATCH "^([0-9.]+)" -            : [ SHELL "$(command-string) -dumpversion" ] ] ; -        version ?= $(real-version) ; -    } -     -    .real-version.$(version) = $(real-version) ; -     -    # - Define the condition for this toolset instance. -    local condition = -        [ common.check-init-parameters darwin $(requirement) : version $(version) ] ; -     -    # - Set the toolset generic common options. -    common.handle-options darwin : $(condition) : $(command) : $(options) ; -     -    # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates. -    if $(real-version) < "4.0.0" -    { -        flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ; -    } -    # - GCC 4.2 and higher in Darwin does not have -Wno-long-double. -    if $(real-version) < "4.2.0" -    { -        flags darwin.compile OPTIONS $(condition) : -Wno-long-double ; -    } - -    # - Set the link flags common with the GCC toolset. -    gcc.init-link-flags darwin darwin $(condition) ; - -    # - The symbol strip program. -    local strip ; -    if <striper> in $(options) -    { -        # We can turn off strip by specifying it as empty. In which -        # case we switch to using the linker to do the strip. -        flags darwin.link.dll OPTIONS -            $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ; -        flags darwin.link.dll OPTIONS -            $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ; -        flags darwin.link OPTIONS -            $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ; -        flags darwin.link OPTIONS -            $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ; -    } -    else -    { -        # Otherwise we need to find a strip program to use. And hence -        # also tell the link action that we need to use a strip -        # post-process. -        flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ; -        strip = -            [ common.get-invocation-command darwin -                : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ; -        flags darwin.link .STRIP $(condition) : $(strip[1]) ; -        if $(.debug-configuration) -        { -            ECHO notice: using strip for $(condition) at $(strip[1]) ; -        } -    } - -    # - The archive builder (libtool is the default as creating -    #   archives in darwin is complicated. -    local archiver = -        [ common.get-invocation-command darwin -            : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ; -    flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ; -    if $(.debug-configuration) -    { -        ECHO notice: using archiver for $(condition) at $(archiver[1]) ; -    } -     -    # - Initialize the SDKs available in the root for this tool. -    local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ; -     -    #~ ECHO --- ; -    #~ ECHO --- bin :: $(bin) ; -    #~ ECHO --- root :: $(root) ; -    #~ ECHO --- version :: $(version) ; -    #~ ECHO --- condition :: $(condition) ; -    #~ ECHO --- strip :: $(strip) ; -    #~ ECHO --- archiver :: $(archiver) ; -    #~ ECHO --- sdks :: $(sdks) ; -    #~ ECHO --- ; -    #~ EXIT ; -} - -# Add and set options for a discovered SDK version. -local rule init-sdk ( condition * : root ? : version + : version-feature ? ) -{ -    local rule version-to-feature ( version + ) -    { -        switch $(version[1]) -        { -            case iphone* : -            { -                return $(version[1])-$(version[2-]:J=.) ; -            } -            case mac* : -            { -                return $(version[2-]:J=.) ; -            } -            case * : -            { -                return $(version:J=.) ; -            } -        } -    } -     -    if $(version-feature) -    { -        if $(.debug-configuration) -        { -            ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ; -        } -         -        # Add the version to the features for specifying them. -        if ! $(version-feature) in [ feature.values macosx-version ] -        { -            feature.extend macosx-version : $(version-feature) ; -        } -        if ! $(version-feature) in [ feature.values macosx-version-min ] -        { -            feature.extend macosx-version-min : $(version-feature) ; -        } -         -        # Set the flags the version needs to compile with, first -        # generic options. -        flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature) -            : -isysroot $(sdk) ; -        flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature) -            : -isysroot $(sdk) ; -         -        # Then device variation options. -        switch $(version[1]) -        { -            case iphonesim* : -            { -                local N = $(version[2]) ; -                if ! $(version[3]) { N += 00 ; } -                else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; } -                else { N += 0$(version[3]) ; } -                if ! $(version[4]) { N += 00 ; } -                else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; } -                else { N += 0$(version[4]) ; } -                N = $(N:J=) ; -                flags darwin.compile OPTIONS <macosx-version-min>$(version-feature) -                    : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; -                flags darwin.link OPTIONS <macosx-version-min>$(version-feature) -                    : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; -            } -             -            case iphone* : -            { -                flags darwin.compile OPTIONS <macosx-version-min>$(version-feature) -                    : -miphoneos-version-min=$(version[2-]:J=.) ; -                flags darwin.link OPTIONS <macosx-version-min>$(version-feature) -                    : -miphoneos-version-min=$(version[2-]:J=.) ; -            } -             -            case mac* : -            { -                flags darwin.compile OPTIONS <macosx-version-min>$(version-feature) -                    : -mmacosx-version-min=$(version[2-]:J=.) ; -                flags darwin.link OPTIONS <macosx-version-min>$(version-feature) -                    : -mmacosx-version-min=$(version[2-]:J=.) ; -            } -        } -         -        return $(version-feature) ; -    } -    else if $(version[4]) -    { -        # We have a patch version of an SDK. We want to set up -        # both the specific patch version, and the minor version. -        # So we recurse to set up the minor version. Plus the minor version. -        return -            [ init-sdk $(condition) : $(root) -                : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ] -            [ init-sdk $(condition) : $(root) -                : $(version) : [ version-to-feature $(version) ] ] ; -    } -    else -    { -        # Yes, this is intentionally recursive. -        return -            [ init-sdk $(condition) : $(root) -                : $(version) : [ version-to-feature $(version) ] ] ; -    } -} - -# Determine the MacOSX SDK versions installed and their locations. -local rule init-available-sdk-versions ( condition * : root ? ) -{ -    root ?= /Developer ; -    local sdks-root = $(root)/SDKs ; -    local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ; -    local result ; -    for local sdk in $(sdks) -    { -        local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ; -        local sdk-platform = $(sdk-match[1]:L) ; -        local sdk-version = $(sdk-match[2-]) ; -        if $(sdk-version) -        { -            switch $(sdk-platform) -            { -                case macosx : -                { -                    sdk-version = mac $(sdk-version) ; -                } -                case iphoneos : -                { -                    sdk-version = iphone $(sdk-version) ; -                } -                case iphonesimulator : -                { -                    sdk-version = iphonesim $(sdk-version) ; -                } -                case * : -                { -                    sdk-version = $(sdk-version:J=-) ; -                } -            } -            result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ; -        } -    } -    return $(result) ; -} - -# Generic options. -flags darwin.compile OPTIONS <flags> ; - -# The following adds objective-c support to darwin. -# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759 - -generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ; -generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ; - -rule setup-address-model ( targets * : sources * : properties * ) -{ -    local ps = [ property-set.create $(properties) ] ; -    local arch = [ $(ps).get <architecture> ] ; -    local address-model = [ $(ps).get <address-model> ] ; -    local osx-version = [ $(ps).get <macosx-version> ] ; -    local gcc-version = [ $(ps).get <toolset-darwin:version> ] ; -    gcc-version = $(.real-version.$(gcc-version)) ; -    local options ; -     -    local support-ppc64 = 1 ; -     -    osx-version ?= $(.host-osx-version) ; - -    switch $(osx-version) -    { -        case iphone* : -        { -            support-ppc64 = ; -        } -         -        case * : -        if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ] -        { -            # When targeting 10.6: -            # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested -            # - gcc 4.0 will compile fine, somehow, but then fail at link time -            support-ppc64 = ; -        } -    } -    switch $(arch) -    { -        case combined :  -        { -            if $(address-model) = 32_64 { -                if $(support-ppc64) { -                    options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;                     -                } else { -                    # Build 3-way binary -                    options = -arch i386 -arch ppc -arch x86_64 ; -                }                                 -            } else if $(address-model) = 64 { -                if $(support-ppc64) { -                    options = -arch x86_64 -arch ppc64 ; -                } else { -                    errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ; -                }                 -            } else { -                options = -arch i386 -arch ppc ; -            } -        } -         -        case x86 :  -        { -            if $(address-model) = 32_64 { -                options = -arch i386 -arch x86_64 ; -            } else if $(address-model) = 64 { -                options = -arch x86_64 ; -            } else { -                options = -arch i386 ; -            } -        }         -         -        case power : -        { -            if ! $(support-ppc64)  -              && (  $(address-model) = 32_64 || $(address-model) = 64 ) -            { -                errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ; -            } -             -            if $(address-model) = 32_64 { -                options = -arch ppc -arch ppc64 ; -            } else if $(address-model) = 64 { -                options = -arch ppc64 ; -            } else { -                options = -arch ppc ; -            } -        } -         -        case arm : -        { -            options = -arch armv6 ; -        }         -    } -     -    if $(options) -    { -        OPTIONS on $(targets) += $(options) ; -    }             -} - -rule setup-threading ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -} - -rule setup-fpic ( targets * : sources * : properties * ) -{ -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -} - -rule compile.m ( targets * : sources * : properties * ) -{ -    LANG on $(<) = "-x objective-c" ; -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.m -{ -    "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.mm  ( targets * : sources * : properties * ) -{ -    LANG on $(<) = "-x objective-c++" ; -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.mm -{ -    "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Set the max header padding to allow renaming of libs for installation. -flags darwin.link.dll OPTIONS : -headerpad_max_install_names ; - -# To link the static runtime we need to link to all the core runtime libraries. -flags darwin.link OPTIONS <runtime-link>static -    : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ; - -# Strip as much as possible when optimizing. -flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ; -flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ; - -# Dynamic/shared linking. -flags darwin.compile OPTIONS <link>shared : -dynamic ; - -# Misc options. -flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ; -#~ flags darwin.link OPTIONS : -fexceptions ; - -# Add the framework names to use. -flags darwin.link FRAMEWORK <framework> ; - -# -flags darwin.link FORCE_LOAD <force-load> ; - -# This is flag is useful for debugging the link step -# uncomment to see what libtool is doing under the hood -#~ flags darwin.link.dll OPTIONS : -Wl,-v ; - -_ = " " ; - -# set up the -F option to include the paths to any frameworks used. -local rule prepare-framework-path ( target + ) -{ -    # The -framework option only takes basename of the framework. -    # The -F option specifies the directories where a framework -    # is searched for.  So, if we find <framework> feature -    # with some path, we need to generate property -F option. -    local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ; - -    # Be sure to generate no -F if there's no path. -    for local framework-path in $(framework-paths) -    { -        if $(framework-path) != "" -        { -            FRAMEWORK_PATH on $(target) += -F$(framework-path) ; -        } -    } -} - -rule link ( targets * : sources * : properties * ) -{ -    DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -    prepare-framework-path $(<) ; -} - -# Note that using strip without any options was reported to result in broken -# binaries, at least on OS X 10.5.5, see: -#    http://svn.boost.org/trac/boost/ticket/2347 -# So we pass -S -x. -actions link bind LIBRARIES FORCE_LOAD -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS) -    $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)" -} - -rule link.dll ( targets * : sources * : properties * ) -{ -    setup-address-model $(targets) : $(sources) : $(properties) ; -    prepare-framework-path $(<) ; -} - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS) -} - -# We use libtool instead of ar to support universal binary linking -# TODO: Find a way to use the underlying tools, i.e. lipo, to do this. -actions piecemeal archive -{ -    "$(.LIBTOOL)" -static -o "$(<:T)"  $(ARFLAGS)  "$(>:T)" -} diff --git a/jam-files/boost-build/tools/darwin.py b/jam-files/boost-build/tools/darwin.py deleted file mode 100644 index c2919606..00000000 --- a/jam-files/boost-build/tools/darwin.py +++ /dev/null @@ -1,57 +0,0 @@ -#  Copyright (C) Christopher Currie 2003. Permission to copy, use, -#  modify, sell and distribute this software is granted provided this -#  copyright notice appears in all copies. This software is provided -# "as is" without express or implied warranty, and with no claim as to -#  its suitability for any purpose. - -#  Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ -#  for explanation why it's a separate toolset. - -import common, gcc, builtin -from b2.build import feature, toolset, type, action, generators -from b2.util.utility import * - -toolset.register ('darwin') - -toolset.inherit_generators ('darwin', [], 'gcc') -toolset.inherit_flags ('darwin', 'gcc') -toolset.inherit_rules ('darwin', 'gcc') - -def init (version = None, command = None, options = None): -    options = to_seq (options) - -    condition = common.check_init_parameters ('darwin', None, ('version', version)) -     -    command = common.get_invocation_command ('darwin', 'g++', command) - -    common.handle_options ('darwin', condition, command, options) -     -    gcc.init_link_flags ('darwin', 'darwin', condition) - -# Darwin has a different shared library suffix -type.set_generated_target_suffix ('SHARED_LIB', ['<toolset>darwin'], 'dylib') - -# we need to be able to tell the type of .dylib files -type.register_suffixes ('dylib', 'SHARED_LIB') - -feature.feature ('framework', [], ['free']) - -toolset.flags ('darwin.compile', 'OPTIONS', '<link>shared', ['-dynamic']) -toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp']) -toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates']) - -toolset.flags ('darwin.link', 'FRAMEWORK', '<framework>') - -# This is flag is useful for debugging the link step -# uncomment to see what libtool is doing under the hood -# toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v']) - -action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) - -# TODO: how to set 'bind LIBRARIES'? -action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) - -def darwin_archive (manager, targets, sources, properties): -    pass - -action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"']) diff --git a/jam-files/boost-build/tools/dmc.jam b/jam-files/boost-build/tools/dmc.jam deleted file mode 100644 index 8af8725a..00000000 --- a/jam-files/boost-build/tools/dmc.jam +++ /dev/null @@ -1,134 +0,0 @@ -# Digital Mars C++ - -# (C) Copyright Christof Meerwald 2003. -# (C) Copyright Aleksey Gurtovoy 2004. -# (C) Copyright Arjan Knepper 2006. -# -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# The following #// line will be used by the regression test table generation -# program as the column heading for HTML tables. Must not include version number. -#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a> - -import feature generators common ; -import toolset : flags ; -import sequence regex ; - -feature.extend toolset : dmc ; - -rule init ( version ? : command * : options * ) -{ -    local condition = [ common.check-init-parameters dmc : version $(version) ] ; -     -    local command = [ common.get-invocation-command dmc : dmc : $(command) ] ; -    command ?= dmc ; - -    common.handle-options dmc : $(condition) : $(command) : $(options) ;    -     -    if $(command) -    { -        command = [ common.get-absolute-tool-path $(command[-1]) ] ; -    }    -    root = $(command:D) ;     -     -    if $(root) -    {         -        # DMC linker is sensitive the the direction of slashes, and -        # won't link if forward slashes are used in command. -        root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ; -        flags dmc .root $(condition) : $(root)\\bin\\ ; -    } -    else -    { -        flags dmc .root $(condition) : "" ; -    }     -} - - -# Declare generators -generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ; -generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ; - -generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ; -generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ; -generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ; - - -# Declare flags -# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds. -# flags dmc.compile OPTIONS <debug-symbols>on : -g ; -flags dmc.compile OPTIONS <debug-symbols>on : -gl ; -flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ; -flags dmc.link OPTIONS <debug-symbols>off : /PACKF ; - -flags dmc.compile OPTIONS <optimization>off : -S -o+none ; -flags dmc.compile OPTIONS <optimization>speed : -o+time ; -flags dmc.compile OPTIONS <optimization>space : -o+space ; -flags dmc.compile OPTIONS <exception-handling>on : -Ae ; -flags dmc.compile OPTIONS <rtti>on : -Ar ; -# FIXME: -# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used -# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used -# But for some reason the -WD cflag is always in use. -# flags dmc.compile OPTIONS <link>shared : -WD ; -# flags dmc.compile OPTIONS <link>static : -WA ; - -# Note that these two options actually imply multithreading support on DMC -# because there is no single-threaded dynamic runtime library. Specifying -# <threading>multi would be a bad idea, though, because no option would be -# matched when the build uses the default settings of <runtime-link>dynamic -# and <threading>single. -flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ; -flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ; - -flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ; -flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ; -flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ; -flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ; - -flags dmc.compile OPTIONS : <cflags> ; -flags dmc.compile.c++ OPTIONS : <cxxflags> ; - -flags dmc.compile DEFINES : <define> ; -flags dmc.compile INCLUDES : <include> ; - -flags dmc.link <linkflags> ; -flags dmc.archive OPTIONS <arflags> ; - -flags dmc LIBPATH <library-path> ; -flags dmc LIBRARIES <library-file> ; -flags dmc FINDLIBS <find-library-sa> ; -flags dmc FINDLIBS <find-library-st> ; - -actions together link bind LIBRARIES -{ -    "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def" -} - -actions together link.dll bind LIBRARIES -{ -    echo LIBRARY "$(<[1])" > $(<[2]:B).def -    echo DESCRIPTION 'A Library' >> $(<[2]:B).def -    echo EXETYPE NT >> $(<[2]:B).def -    echo SUBSYSTEM WINDOWS >> $(<[2]:B).def -    echo CODE	EXECUTE READ >> $(<[2]:B).def -    echo DATA	READ WRITE >> $(<[2]:B).def -    "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def" -} - -actions compile.c -{ -    "$(.root)dmc"  -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)" -} - -actions together piecemeal archive -{ -    "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/docutils.jam b/jam-files/boost-build/tools/docutils.jam deleted file mode 100644 index bf061617..00000000 --- a/jam-files/boost-build/tools/docutils.jam +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -# Support for docutils ReStructuredText processing. - -import type ; -import scanner ; -import generators ; -import os ; -import common ; -import toolset ; -import path ; -import feature : feature ; -import property ; - -.initialized = ; - -type.register ReST : rst ; - -class rst-scanner : common-scanner -{ -    rule __init__ ( paths * ) -    { -        common-scanner.__init__ . $(paths) ; -    } -     -    rule pattern ( ) -    { -         return "^[ 	]*\\.\\.[ 	]+include::[ 	]+([^ -]+)" -        "^[ 	]*\\.\\.[ 	]+image::[ 	]+([^ -]+)" -        "^[ 	]*\\.\\.[ 	]+figure::[ 	]+([^ -]+)" -        ; -    } -} - -scanner.register rst-scanner : include ; -type.set-scanner ReST : rst-scanner ; - -generators.register-standard docutils.html : ReST : HTML ; - -rule init ( docutils-dir ? : tools-dir ? ) -{ -    docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ; -    tools-dir ?= $(docutils-dir)/tools ; - -    if ! $(.initialized) -    { -        .initialized = true ; -        .docutils-dir = $(docutils-dir) ; -        .tools-dir = $(tools-dir:R="") ; -        -        .setup = [  -          common.prepend-path-variable-command PYTHONPATH  -            : $(.docutils-dir) $(.docutils-dir)/extras ] ; -    } -} - -rule html ( target : source : properties *  ) -{ -    if ! [ on $(target) return $(RST2XXX) ] -    { -        local python-cmd = [ property.select <python.interpreter> : $(properties) ] ; -        RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ; -    } -} - - -feature docutils : : free ; -feature docutils-html : : free ; -feature docutils-cmd : : free ; -toolset.flags docutils COMMON-FLAGS : <docutils> ; -toolset.flags docutils HTML-FLAGS : <docutils-html> ; -toolset.flags docutils RST2XXX : <docutils-cmd> ; -   -actions html -{ -    $(.setup) -    "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<) -} - diff --git a/jam-files/boost-build/tools/doxproc.py b/jam-files/boost-build/tools/doxproc.py deleted file mode 100644 index 4cbd5edd..00000000 --- a/jam-files/boost-build/tools/doxproc.py +++ /dev/null @@ -1,859 +0,0 @@ -#!/usr/bin/python -# Copyright 2006 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -''' -Processing of Doxygen generated XML. -''' - -import os -import os.path -import sys -import time -import string -import getopt -import glob -import re -import xml.dom.minidom - -     -def usage(): -    print ''' -Usage: -    %s options - -Options: -    --xmldir        Directory with the Doxygen xml result files. -    --output        Write the output BoostBook to the given location. -    --id            The ID of the top level BoostBook section. -    --title         The title of the top level BoostBook section. -    --enable-index  Generate additional index sections for classes and -                    types. -''' % ( sys.argv[0] ) - - -def get_args( argv = sys.argv[1:] ): -    spec = [ -        'xmldir=', -        'output=', -        'id=', -        'title=', -        'enable-index', -        'help' ] -    options = { -        '--xmldir' : 'xml', -        '--output' : None, -        '--id' : 'dox', -        '--title' : 'Doxygen' -        } -    ( option_pairs, other ) = getopt.getopt( argv, '', spec ) -    map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs ) -     -    if options.has_key( '--help' ): -        usage() -        sys.exit(1) -     -    return { -        'xmldir' : options['--xmldir'], -        'output' : options['--output'], -        'id' : options['--id'], -        'title' : options['--title'], -        'index' : options.has_key('--enable-index') -        } - -def if_attribute(node, attribute, true_value, false_value=None): -    if node.getAttribute(attribute) == 'yes': -        return true_value -    else: -        return false_value - -class Doxygen2BoostBook: -     -    def __init__( self, **kwargs ): -        ## -        self.args = kwargs -        self.args.setdefault('id','') -        self.args.setdefault('title','') -        self.args.setdefault('last_revision', time.asctime()) -        self.args.setdefault('index', False) -        self.id = '%(id)s.reference' % self.args -        self.args['id'] = self.id -        #~ This is our template BoostBook document we insert the generated content into. -        self.boostbook = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?> -<section id="%(id)s" name="%(title)s" last-revision="%(last_revision)s"> -    <title>%(title)s</title> -    <library-reference id="%(id)s.headers"> -        <title>Headers</title> -    </library-reference> -    <index id="%(id)s.classes"> -        <title>Classes</title> -    </index> -    <index id="%(id)s.index"> -        <title>Index</title> -    </index> -</section> -''' % self.args ) -        self.section = { -            'headers' : self._getChild('library-reference',id='%(id)s.headers' % self.args), -            'classes' : self._getChild('index',id='%(id)s.classes' % self.args), -            'index' : self._getChild('index',id='%(id)s.index' % self.args) -            } -        #~ Remove the index sections if we aren't generating it. -        if not self.args['index']: -            self.section['classes'].parentNode.removeChild(self.section['classes']) -            self.section['classes'].unlink() -            del self.section['classes'] -            self.section['index'].parentNode.removeChild(self.section['index']) -            self.section['index'].unlink() -            del self.section['index'] -        #~ The symbols, per Doxygen notion, that we translated. -        self.symbols = {} -        #~ Map of Doxygen IDs and BoostBook IDs, so we can translate as needed. -        self.idmap = {} -        #~ Marks generation, to prevent redoing it. -        self.generated = False -     -    #~ Add an Doxygen generated XML document to the content we are translating. -    def addDox( self, document ): -        self._translateNode(document.documentElement) -     -    #~ Turns the internal XML tree into an output UTF-8 string. -    def tostring( self ): -        self._generate() -        #~ return self.boostbook.toprettyxml('  ') -        return self.boostbook.toxml('utf-8') -     -    #~ Does post-processing on the partial generated content to generate additional info -    #~ now that we have the complete source documents. -    def _generate( self ): -        if not self.generated: -            self.generated = True -            symbols = self.symbols.keys() -            symbols.sort() -            #~ Populate the header section. -            for symbol in symbols: -                if self.symbols[symbol]['kind'] in ('header'): -                    self.section['headers'].appendChild(self.symbols[symbol]['dom']) -            for symbol in symbols: -                if self.symbols[symbol]['kind'] not in ('namespace', 'header'): -                    container = self._resolveContainer(self.symbols[symbol], -                        self.symbols[self.symbols[symbol]['header']]['dom']) -                    if container.nodeName != 'namespace': -                        ## The current BoostBook to Docbook translation doesn't -                        ## respect, nor assign, IDs to inner types of any kind. -                        ## So nuke the ID entry so as not create bogus links. -                        del self.idmap[self.symbols[symbol]['id']] -                    container.appendChild(self.symbols[symbol]['dom']) -            self._rewriteIDs(self.boostbook.documentElement) -     -    #~ Rewrite the various IDs from Doxygen references to the newly created -    #~ BoostBook references. -    def _rewriteIDs( self, node ): -        if node.nodeName in ('link'): -            if (self.idmap.has_key(node.getAttribute('linkend'))): -                #~ A link, and we have someplace to repoint it at. -                node.setAttribute('linkend',self.idmap[node.getAttribute('linkend')]) -            else: -                #~ A link, but we don't have a generated target for it. -                node.removeAttribute('linkend') -        elif hasattr(node,'hasAttribute') and node.hasAttribute('id') and self.idmap.has_key(node.getAttribute('id')): -            #~ Simple ID, and we have a translation. -            node.setAttribute('id',self.idmap[node.getAttribute('id')]) -        #~ Recurse, and iterate, depth-first traversal which turns out to be -        #~ left-to-right and top-to-bottom for the document. -        if node.firstChild: -            self._rewriteIDs(node.firstChild) -        if node.nextSibling: -            self._rewriteIDs(node.nextSibling) -     -    def _resolveContainer( self, cpp, root ): -        container = root -        for ns in cpp['namespace']: -            node = self._getChild('namespace',name=ns,root=container) -            if not node: -                node = container.appendChild( -                    self._createNode('namespace',name=ns)) -            container = node -        for inner in cpp['name'].split('::'): -            node = self._getChild(name=inner,root=container) -            if not node: -                break -            container = node -        return container -     -    def _setID( self, id, name ): -        self.idmap[id] = name.replace('::','.').replace('/','.') -        #~ print '--| setID:',id,'::',self.idmap[id] -     -    #~ Translate a given node within a given context. -    #~ The translation dispatches to a local method of the form -    #~ "_translate[_context0,...,_contextN]", and the keyword args are -    #~ passed along. If there is no translation handling method we -    #~ return None. -    def _translateNode( self, *context, **kwargs ): -        node = None -        names = [ ] -        for c in context: -            if c: -                if not isinstance(c,xml.dom.Node): -                    suffix = '_'+c.replace('-','_') -                else: -                    suffix = '_'+c.nodeName.replace('-','_') -                    node = c -                names.append('_translate') -                names = map(lambda x: x+suffix,names) -        if node: -            for name in names: -                if hasattr(self,name): -                    return getattr(self,name)(node,**kwargs) -        return None -     -    #~ Translates the children of the given parent node, appending the results -    #~ to the indicated target. For nodes not translated by the translation method -    #~ it copies the child over and recurses on that child to translate any -    #~ possible interior nodes. Hence this will translate the entire subtree. -    def _translateChildren( self, parent, **kwargs ): -        target = kwargs['target'] -        for n in parent.childNodes: -            child = self._translateNode(n,target=target) -            if child: -                target.appendChild(child) -            else: -                child = n.cloneNode(False) -                if hasattr(child,'data'): -                    child.data = re.sub(r'\s+',' ',child.data) -                target.appendChild(child) -                self._translateChildren(n,target=child) -     -    #~ Translate the given node as a description, into the description subnode -    #~ of the target. If no description subnode is present in the target it -    #~ is created. -    def _translateDescription( self, node, target=None, tag='description', **kwargs ): -        description = self._getChild(tag,root=target) -        if not description: -            description = target.appendChild(self._createNode(tag)) -        self._translateChildren(node,target=description) -        return description -     -    #~ Top level translation of: <doxygen ...>...</doxygen>, -    #~ translates the children. -    def _translate_doxygen( self, node ): -        #~ print '_translate_doxygen:', node.nodeName -        result = [] -        for n in node.childNodes: -            newNode = self._translateNode(n) -            if newNode: -                result.append(newNode) -        return result -     -    #~ Top level translation of: -    #~ <doxygenindex ...> -    #~   <compound ...> -    #~     <member ...> -    #~       <name>...</name> -    #~     </member> -    #~     ... -    #~   </compound> -    #~   ... -    #~ </doxygenindex> -    #~ builds the class and symbol sections, if requested. -    def _translate_doxygenindex( self, node ): -        #~ print '_translate_doxygenindex:', node.nodeName -        if self.args['index']: -            entries = [] -            classes = [] -            #~ Accumulate all the index entries we care about. -            for n in node.childNodes: -                if n.nodeName == 'compound': -                    if n.getAttribute('kind') not in ('file','dir','define'): -                        cpp = self._cppName(self._getChildData('name',root=n)) -                        entry = { -                            'name' : cpp['name'], -                            'compoundname' : cpp['compoundname'], -                            'id' : n.getAttribute('refid') -                            } -                        if n.getAttribute('kind') in ('class','struct'): -                            classes.append(entry) -                        entries.append(entry) -                        for m in n.childNodes: -                            if m.nodeName == 'member': -                                cpp = self._cppName(self._getChildData('name',root=m)) -                                entry = { -                                    'name' : cpp['name'], -                                    'compoundname' : cpp['compoundname'], -                                    'id' : n.getAttribute('refid') -                                    } -                                if hasattr(m,'getAttribute') and m.getAttribute('kind') in ('class','struct'): -                                    classes.append(entry) -                                entries.append(entry) -            #~ Put them in a sensible order. -            entries.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower())) -            classes.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower())) -            #~ And generate the BoostBook for them. -            self._translate_index_(entries,target=self.section['index']) -            self._translate_index_(classes,target=self.section['classes']) -        return None -     -    #~ Translate a set of index entries in the BoostBook output. The output -    #~ is grouped into groups of the first letter of the entry names. -    def _translate_index_(self, entries, target=None, **kwargs ): -        i = 0 -        targetID = target.getAttribute('id') -        while i < len(entries): -            dividerKey = entries[i]['name'][0].upper() -            divider = target.appendChild(self._createNode('indexdiv',id=targetID+'.'+dividerKey)) -            divider.appendChild(self._createText('title',dividerKey)) -            while i < len(entries) and dividerKey == entries[i]['name'][0].upper(): -                iename = entries[i]['name'] -                ie = divider.appendChild(self._createNode('indexentry')) -                ie = ie.appendChild(self._createText('primaryie',iename)) -                while i < len(entries) and entries[i]['name'] == iename: -                    ie.appendChild(self.boostbook.createTextNode(' (')) -                    ie.appendChild(self._createText( -                        'link',entries[i]['compoundname'],linkend=entries[i]['id'])) -                    ie.appendChild(self.boostbook.createTextNode(')')) -                    i += 1 -     -    #~ Translate a <compounddef ...>...</compounddef>, -    #~ by retranslating with the "kind" of compounddef. -    def _translate_compounddef( self, node, target=None, **kwargs ): -        return self._translateNode(node,node.getAttribute('kind')) -     -    #~ Translate a <compounddef kind="namespace"...>...</compounddef>. For -    #~ namespaces we just collect the information for later use as there is no -    #~ currently namespaces are not included in the BoostBook format. In the future -    #~ it might be good to generate a namespace index. -    def _translate_compounddef_namespace( self, node, target=None, **kwargs ): -        namespace = { -            'id' : node.getAttribute('id'), -            'kind' : 'namespace', -            'name' : self._getChildData('compoundname',root=node), -            'brief' : self._getChildData('briefdescription',root=node), -            'detailed' : self._getChildData('detaileddescription',root=node), -            'parsed' : False -            } -        if self.symbols.has_key(namespace['name']): -            if not self.symbols[namespace['name']]['parsed']: -                self.symbols[namespace['name']]['parsed'] = True -                #~ for n in node.childNodes: -                    #~ if hasattr(n,'getAttribute'): -                        #~ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) -        else: -            self.symbols[namespace['name']] = namespace -            #~ self._setID(namespace['id'],namespace['name']) -        return None -     -    #~ Translate a <compounddef kind="class"...>...</compounddef>, which -    #~ forwards to the kind=struct as they are the same. -    def _translate_compounddef_class( self, node, target=None, **kwargs ): -        return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs) -     -    #~ Translate a <compounddef kind="struct"...>...</compounddef> into: -    #~ <header id="?" name="?"> -    #~   <struct name="?"> -    #~     ... -    #~   </struct> -    #~ </header> -    def _translate_compounddef_struct( self, node, tag='struct', target=None, **kwargs ): -        result = None -        includes = self._getChild('includes',root=node) -        if includes: -            ## Add the header into the output table. -            self._translate_compounddef_includes_(includes,includes,**kwargs) -            ## Compounds are the declared symbols, classes, types, etc. -            ## We add them to the symbol table, along with the partial DOM for them -            ## so that they can be organized into the output later. -            compoundname = self._getChildData('compoundname',root=node) -            compoundname = self._cppName(compoundname) -            self._setID(node.getAttribute('id'),compoundname['compoundname']) -            struct = self._createNode(tag,name=compoundname['name'].split('::')[-1]) -            self.symbols[compoundname['compoundname']] = { -                'header' : includes.firstChild.data, -                'namespace' : compoundname['namespace'], -                'id' : node.getAttribute('id'), -                'kind' : tag, -                'name' : compoundname['name'], -                'dom' : struct -                } -            ## Add the children which will be the members of the struct. -            for n in node.childNodes: -                self._translateNode(n,target=struct,scope=compoundname['compoundname']) -            result = struct -        return result -     -    #~ Translate a <compounddef ...><includes ...>...</includes></compounddef>, -    def _translate_compounddef_includes_( self, node, target=None, **kwargs ): -        name = node.firstChild.data -        if not self.symbols.has_key(name): -            self._setID(node.getAttribute('refid'),name) -            self.symbols[name] = { -                'kind' : 'header', -                'id' : node.getAttribute('refid'), -                'dom' : self._createNode('header', -                    id=node.getAttribute('refid'), -                    name=name) -                } -        return None -     -    #~ Translate a <basecompoundref...>...</basecompoundref> into: -    #~ <inherit access="?"> -    #~   ... -    #~ </inherit> -    def _translate_basecompoundref( self, ref, target=None, **kwargs ): -        inherit = target.appendChild(self._createNode('inherit', -            access=ref.getAttribute('prot'))) -        self._translateChildren(ref,target=inherit) -        return -     -    #~ Translate: -    #~   <templateparamlist> -    #~     <param> -    #~       <type>...</type> -    #~       <declname>...</declname> -    #~       <defname>...</defname> -    #~       <defval>...</defval> -    #~     </param> -    #~     ... -    #~   </templateparamlist> -    #~ Into: -    #~   <template> -    #~     <template-type-parameter name="?" /> -    #~     <template-nontype-parameter name="?"> -    #~       <type>?</type> -    #~       <default>?</default> -    #~     </template-nontype-parameter> -    #~   </template> -    def _translate_templateparamlist( self, templateparamlist, target=None, **kwargs ): -        template = target.appendChild(self._createNode('template')) -        for param in templateparamlist.childNodes: -            if param.nodeName == 'param': -                type = self._getChildData('type',root=param) -                defval = self._getChild('defval',root=param) -                paramKind = None -                if type in ('class','typename'): -                    paramKind = 'template-type-parameter' -                else: -                    paramKind = 'template-nontype-parameter' -                templateParam = template.appendChild( -                    self._createNode(paramKind, -                        name=self._getChildData('declname',root=param))) -                if paramKind == 'template-nontype-parameter': -                    template_type = templateParam.appendChild(self._createNode('type')) -                    self._translate_type( -                        self._getChild('type',root=param),target=template_type) -                if defval: -                    value = self._getChildData('ref',root=defval.firstChild) -                    if not value: -                        value = self._getData(defval) -                    templateParam.appendChild(self._createText('default',value)) -        return template -     -    #~ Translate: -    #~   <briefdescription>...</briefdescription> -    #~ Into: -    #~   <purpose>...</purpose> -    def _translate_briefdescription( self, brief, target=None, **kwargs ): -        self._translateDescription(brief,target=target,**kwargs) -        return self._translateDescription(brief,target=target,tag='purpose',**kwargs) -     -    #~ Translate: -    #~   <detaileddescription>...</detaileddescription> -    #~ Into: -    #~   <description>...</description> -    def _translate_detaileddescription( self, detailed, target=None, **kwargs ): -        return self._translateDescription(detailed,target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="?">...</sectiondef> -    #~ With kind specific translation. -    def _translate_sectiondef( self, sectiondef, target=None, **kwargs ): -        self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs) -     -    #~ Translate non-function sections. -    def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ): -        for n in sectiondef.childNodes: -            if hasattr(n,'getAttribute'): -                self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) -        return None -     -    #~ Translate: -    #~   <sectiondef kind="public-type">...</sectiondef> -    def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ): -        return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="public-sttrib">...</sectiondef> -    def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs): -        return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="?-func">...</sectiondef> -    #~ All the various function group translations end up here for which -    #~ they are translated into: -    #~   <method-group name="?"> -    #~   ... -    #~   </method-group> -    def _translate_sectiondef_func_( self, sectiondef, name='functions', target=None, **kwargs ): -        members = target.appendChild(self._createNode('method-group',name=name)) -        for n in sectiondef.childNodes: -            if hasattr(n,'getAttribute'): -                self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs) -        return members -     -    #~ Translate: -    #~   <sectiondef kind="public-func">...</sectiondef> -    def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ): -        return self._translate_sectiondef_func_(sectiondef, -            name='public member functions',target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="public-static-func">...</sectiondef> -    def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs): -        return self._translate_sectiondef_func_(sectiondef, -            name='public static functions',target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="protected-func">...</sectiondef> -    def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ): -        return self._translate_sectiondef_func_(sectiondef, -            name='protected member functions',target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="private-static-func">...</sectiondef> -    def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs): -        return self._translate_sectiondef_func_(sectiondef, -            name='private static functions',target=target,**kwargs) -     -    #~ Translate: -    #~   <sectiondef kind="public-func">...</sectiondef> -    def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ): -        return self._translate_sectiondef_func_(sectiondef, -            name='private member functions',target=target,**kwargs) - -    #~ Translate: -    #~   <sectiondef kind="user-defined"><header>...</header>...</sectiondef> -    def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ): -        return self._translate_sectiondef_func_(sectiondef, -            name=self._getChildData('header', root=sectiondef),target=target,**kwargs) -     -    #~ Translate: -    #~   <memberdef kind="typedef" id="?"> -    #~     <name>...</name> -    #~   </memberdef> -    #~ To: -    #~   <typedef id="?" name="?"> -    #~     <type>...</type> -    #~   </typedef> -    def _translate_memberdef_typedef( self, memberdef, target=None, scope=None, **kwargs ): -        self._setID(memberdef.getAttribute('id'), -            scope+'::'+self._getChildData('name',root=memberdef)) -        typedef = target.appendChild(self._createNode('typedef', -            id=memberdef.getAttribute('id'), -            name=self._getChildData('name',root=memberdef))) -        typedef_type = typedef.appendChild(self._createNode('type')) -        self._translate_type(self._getChild('type',root=memberdef),target=typedef_type) -        return typedef -     -    #~ Translate: -    #~   <memberdef kind="function" id="?" const="?" static="?" explicit="?" inline="?"> -    #~     <name>...</name> -    #~   </memberdef> -    #~ To: -    #~   <method name="?" cv="?" specifiers="?"> -    #~     ... -    #~   </method> -    def _translate_memberdef_function( self, memberdef, target=None, scope=None, **kwargs ): -        name = self._getChildData('name',root=memberdef) -        self._setID(memberdef.getAttribute('id'),scope+'::'+name) -        ## Check if we have some specific kind of method. -        if name == scope.split('::')[-1]: -            kind = 'constructor' -            target = target.parentNode -        elif name == '~'+scope.split('::')[-1]: -            kind = 'destructor' -            target = target.parentNode -        elif name == 'operator=': -            kind = 'copy-assignment' -            target = target.parentNode -        else: -            kind = 'method' -        method = target.appendChild(self._createNode(kind, -            # id=memberdef.getAttribute('id'), -            name=name, -            cv=' '.join([ -                if_attribute(memberdef,'const','const','').strip() -                ]), -            specifiers=' '.join([ -                if_attribute(memberdef,'static','static',''), -                if_attribute(memberdef,'explicit','explicit',''), -                if_attribute(memberdef,'inline','inline','') -                ]).strip() -            )) -        ## We iterate the children to translate each part of the function. -        for n in memberdef.childNodes: -            self._translateNode(memberdef,'function',n,target=method) -        return method -     -    #~ Translate: -    #~   <memberdef kind="function"...><templateparamlist>...</templateparamlist></memberdef> -    def _translate_memberdef_function_templateparamlist( -        self, templateparamlist, target=None, **kwargs ): -        return self._translate_templateparamlist(templateparamlist,target=target,**kwargs) -     -    #~ Translate: -    #~   <memberdef kind="function"...><type>...</type></memberdef> -    #~ To: -    #~   ...<type>?</type> -    def _translate_memberdef_function_type( self, resultType, target=None, **kwargs ): -        methodType = self._createNode('type') -        self._translate_type(resultType,target=methodType) -        if methodType.hasChildNodes(): -            target.appendChild(methodType) -        return methodType -     -    #~ Translate: -    #~   <memberdef kind="function"...><briefdescription>...</briefdescription></memberdef> -    def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ): -        result = self._translateDescription(description,target=target,**kwargs) -        ## For functions if we translate the brief docs to the purpose they end up -        ## right above the regular description. And since we just added the brief to that -        ## on the previous line, don't bother with the repetition. -        # result = self._translateDescription(description,target=target,tag='purpose',**kwargs) -        return result -     -    #~ Translate: -    #~   <memberdef kind="function"...><detaileddescription>...</detaileddescription></memberdef> -    def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ): -        return self._translateDescription(description,target=target,**kwargs) -     -    #~ Translate: -    #~   <memberdef kind="function"...><inbodydescription>...</inbodydescription></memberdef> -    def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ): -        return self._translateDescription(description,target=target,**kwargs) -     -    #~ Translate: -    #~   <memberdef kind="function"...><param>...</param></memberdef> -    def _translate_memberdef_function_param( self, param, target=None, **kwargs ): -        return self._translate_param(param,target=target,**kwargs) -     -    #~ Translate: -    #~   <memberdef kind="variable" id="?"> -    #~     <name>...</name> -    #~     <type>...</type> -    #~   </memberdef> -    #~ To: -    #~   <data-member id="?" name="?"> -    #~     <type>...</type> -    #~   </data-member> -    def _translate_memberdef_variable( self, memberdef, target=None, scope=None, **kwargs ): -        self._setID(memberdef.getAttribute('id'), -            scope+'::'+self._getChildData('name',root=memberdef)) -        data_member = target.appendChild(self._createNode('data-member', -            id=memberdef.getAttribute('id'), -            name=self._getChildData('name',root=memberdef))) -        data_member_type = data_member.appendChild(self._createNode('type')) -        self._translate_type(self._getChild('type',root=memberdef),target=data_member_type) -     -    #~ Translate: -    #~   <memberdef kind="enum" id="?"> -    #~     <name>...</name> -    #~     ... -    #~   </memberdef> -    #~ To: -    #~   <enum id="?" name="?"> -    #~     ... -    #~   </enum> -    def _translate_memberdef_enum( self, memberdef, target=None, scope=None, **kwargs ): -        self._setID(memberdef.getAttribute('id'), -            scope+'::'+self._getChildData('name',root=memberdef)) -        enum = target.appendChild(self._createNode('enum', -            id=memberdef.getAttribute('id'), -            name=self._getChildData('name',root=memberdef))) -        for n in memberdef.childNodes: -            self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs) -        return enum -     -    #~ Translate: -    #~   <memberdef kind="enum"...> -    #~     <enumvalue id="?"> -    #~       <name>...</name> -    #~       <initializer>...</initializer> -    #~     </enumvalue> -    #~   </memberdef> -    #~ To: -    #~   <enumvalue id="?" name="?"> -    #~     <default>...</default> -    #~   </enumvalue> -    def _translate_memberdef_enum_enumvalue( self, enumvalue, target=None, scope=None, **kwargs ): -        self._setID(enumvalue.getAttribute('id'), -            scope+'::'+self._getChildData('name',root=enumvalue)) -        value = target.appendChild(self._createNode('enumvalue', -            id=enumvalue.getAttribute('id'), -            name=self._getChildData('name',root=enumvalue))) -        initializer = self._getChild('initializer',root=enumvalue) -        if initializer: -            self._translateChildren(initializer, -                target=target.appendChild(self._createNode('default'))) -        return value -     -    #~ Translate: -    #~   <param> -    #~     <type>...</type> -    #~     <declname>...</declname> -    #~     <defval>...</defval> -    #~   </param> -    #~ To: -    #~   <parameter name="?"> -    #~     <paramtype>...</paramtype> -    #~     ... -    #~   </parameter> -    def _translate_param( self, param, target=None, **kwargs): -        parameter = target.appendChild(self._createNode('parameter', -            name=self._getChildData('declname',root=param))) -        paramtype = parameter.appendChild(self._createNode('paramtype')) -        self._translate_type(self._getChild('type',root=param),target=paramtype) -        defval = self._getChild('defval',root=param) -        if defval: -            self._translateChildren(self._getChild('defval',root=param),target=parameter) -        return parameter -     -    #~ Translate: -    #~   <ref kindref="?" ...>...</ref> -    def _translate_ref( self, ref, **kwargs ): -        return self._translateNode(ref,ref.getAttribute('kindref')) -     -    #~ Translate: -    #~   <ref refid="?" kindref="compound">...</ref> -    #~ To: -    #~   <link linkend="?"><classname>...</classname></link> -    def _translate_ref_compound( self, ref, **kwargs ): -        result = self._createNode('link',linkend=ref.getAttribute('refid')) -        classname = result.appendChild(self._createNode('classname')) -        self._translateChildren(ref,target=classname) -        return result -     -    #~ Translate: -    #~   <ref refid="?" kindref="member">...</ref> -    #~ To: -    #~   <link linkend="?">...</link> -    def _translate_ref_member( self, ref, **kwargs ): -        result = self._createNode('link',linkend=ref.getAttribute('refid')) -        self._translateChildren(ref,target=result) -        return result -     -    #~ Translate: -    #~   <type>...</type> -    def _translate_type( self, type, target=None, **kwargs ): -        result = self._translateChildren(type,target=target,**kwargs) -        #~ Filter types to clean up various readability problems, most notably -        #~ with really long types. -        xml = target.toxml('utf-8'); -        if ( -            xml.startswith('<type>boost::mpl::') or -            xml.startswith('<type>BOOST_PP_') or -            re.match('<type>boost::(lazy_)?(enable|disable)_if',xml) -            ): -            while target.firstChild: -                target.removeChild(target.firstChild) -            target.appendChild(self._createText('emphasis','unspecified')) -        return result -     -    def _getChild( self, tag = None, id = None, name = None, root = None ): -        if not root: -            root = self.boostbook.documentElement -        for n in root.childNodes: -            found = True -            if tag and found: -                found = found and tag == n.nodeName -            if id and found: -                if n.hasAttribute('id'): -                    found = found and n.getAttribute('id') == id -                else: -                    found = found and n.hasAttribute('id') and n.getAttribute('id') == id -            if name and found: -                found = found and n.hasAttribute('name') and n.getAttribute('name') == name -            if found: -                #~ print '--|', n -                return n -        return None -     -    def _getChildData( self, tag, **kwargs ): -        return self._getData(self._getChild(tag,**kwargs),**kwargs) -     -    def _getData( self, node, **kwargs ): -        if node: -            text = self._getChild('#text',root=node) -            if text: -                return text.data.strip() -        return '' -     -    def _cppName( self, type ): -        parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':')) -        result = { -            'compoundname' : parts.group(1), -            'namespace' : parts.group(1).split('::')[0:-1], -            'name' : parts.group(1).split('::')[-1], -            'specialization' : parts.group(2) -            } -        if result['namespace'] and len(result['namespace']) > 0: -            namespace = '::'.join(result['namespace']) -            while ( -                len(result['namespace']) > 0 and ( -                    not self.symbols.has_key(namespace) or -                    self.symbols[namespace]['kind'] != 'namespace') -                ): -                result['name'] = result['namespace'].pop()+'::'+result['name'] -                namespace = '::'.join(result['namespace']) -        return result -     -    def _createNode( self, tag, **kwargs ): -        result = self.boostbook.createElement(tag) -        for k in kwargs.keys(): -            if kwargs[k] != '': -                if k == 'id': -                    result.setAttribute('id',kwargs[k]) -                else: -                    result.setAttribute(k,kwargs[k]) -        return result -     -    def _createText( self, tag, data, **kwargs ): -        result = self._createNode(tag,**kwargs) -        data = data.strip() -        if len(data) > 0: -            result.appendChild(self.boostbook.createTextNode(data)) -        return result - - -def main( xmldir=None, output=None, id=None, title=None, index=False ): -    #~ print '--- main: xmldir = %s, output = %s' % (xmldir,output) -     -    input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) ) -    input.sort -    translator = Doxygen2BoostBook(id=id, title=title, index=index) -    #~ Feed in the namespaces first to build up the set of namespaces -    #~ and definitions so that lookup is unambiguous when reading in the definitions. -    namespace_files = filter( -        lambda x: -            os.path.basename(x).startswith('namespace'), -        input) -    decl_files = filter( -        lambda x: -            not os.path.basename(x).startswith('namespace') and not os.path.basename(x).startswith('_'), -        input) -    for dox in namespace_files: -        #~ print '--|',os.path.basename(dox) -        translator.addDox(xml.dom.minidom.parse(dox)) -    for dox in decl_files: -        #~ print '--|',os.path.basename(dox) -        translator.addDox(xml.dom.minidom.parse(dox)) -     -    if output: -        output = open(output,'w') -    else: -        output = sys.stdout -    if output: -        output.write(translator.tostring()) - - -main( **get_args() ) diff --git a/jam-files/boost-build/tools/doxygen-config.jam b/jam-files/boost-build/tools/doxygen-config.jam deleted file mode 100644 index 2cd2ccae..00000000 --- a/jam-files/boost-build/tools/doxygen-config.jam +++ /dev/null @@ -1,11 +0,0 @@ -#~ Copyright 2005, 2006 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for Doxygen tools. To use, just import this module. - -import toolset : using ; - -ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ; - -using doxygen ; diff --git a/jam-files/boost-build/tools/doxygen.jam b/jam-files/boost-build/tools/doxygen.jam deleted file mode 100644 index 8394848d..00000000 --- a/jam-files/boost-build/tools/doxygen.jam +++ /dev/null @@ -1,776 +0,0 @@ -# Copyright 2003, 2004 Douglas Gregor -# Copyright 2003, 2004, 2005 Vladimir Prus -# Copyright 2006 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines rules to handle generation of various outputs from source -# files documented with doxygen comments. The supported transformations are: -# -# * Source -> Doxygen XML -> BoostBook XML -# * Source -> Doxygen HTML -# -# The type of transformation is selected based on the target requested. For -# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an -# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen -# HTML specifying a target with an ".html" suffix will produce a directory -# <target> with the Doxygen html files, and a <target>.html file redirecting to -# that directory. - -import "class" : new ; -import targets ; -import feature ; -import property ; -import generators ; -import boostbook ; -import type ; -import path ; -import print ; -import regex ; -import stage ; -import project ; -import xsltproc ; -import make ; -import os ; -import toolset : flags ; -import alias ; -import common ; -import modules ; -import project ; -import utility ; -import errors ; - - -# Use to specify extra configuration paramters. These get translated -# into a doxyfile which configures the building of the docs. -feature.feature doxygen:param : : free ; - -# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option. -feature.feature prefix : : free ; - -# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option. -feature.feature reftitle : : free ; - -# Which processor to use for various translations from Doxygen. -feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ; - -# To generate, or not, index sections. -feature.feature doxygen.doxproc.index : no yes : propagated incidental ; - -# The ID for the resulting BoostBook reference section. -feature.feature doxygen.doxproc.id : : free ; - -# The title for the resulting BoostBook reference section. -feature.feature doxygen.doxproc.title : : free ; - -# Location for images when generating XML -feature.feature doxygen:xml-imagedir : : free ; - -# Indicates whether the entire directory should be deleted -feature.feature doxygen.rmdir : off on : optional incidental ; - -# Doxygen configuration input file. -type.register DOXYFILE : doxyfile ; - -# Doxygen XML multi-file output. -type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ; - -# Doxygen XML coallesed output. -type.register DOXYGEN_XML : doxygen : XML ; - -# Doxygen HTML multifile directory. -type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ; - -# Redirection HTML file to HTML multifile directory. -type.register DOXYGEN_HTML : : HTML ; - -type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ; - -# Initialize the Doxygen module. Parameters are: -#   name: the name of the 'doxygen' executable. If not specified, the name -#         'doxygen' will be used -# -rule init ( name ? ) -{ -    if ! $(.initialized) -    { -        .initialized = true ; - -        .doxproc = [ modules.binding $(__name__) ] ; -        .doxproc = $(.doxproc:D)/doxproc.py ; - -        generators.register-composing doxygen.headers-to-doxyfile -            : H HPP CPP : DOXYFILE ; -        generators.register-standard doxygen.run -            : DOXYFILE : DOXYGEN_XML_MULTIFILE ; -        generators.register-standard doxygen.xml-dir-to-boostbook -            : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ; -        generators.register-standard doxygen.xml-to-boostbook -            : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ; -        generators.register-standard doxygen.collect -            : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ; -        generators.register-standard doxygen.run -            : DOXYFILE : DOXYGEN_HTML_MULTIFILE ; -        generators.register-standard doxygen.html-redirect -            : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ; -        generators.register-standard doxygen.copy-latex-pngs -            : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ; - -        IMPORT $(__name__) : doxygen : : doxygen ; -    } - -    if $(name) -    { -        modify-config ; -        .doxygen = $(name) ; -        check-doxygen ; -    } - -    if ! $(.doxygen) -    { -        check-doxygen ; -    } -} - -rule freeze-config ( ) -{ -    if ! $(.initialized) -    { -        errors.user-error "doxygen must be initialized before it can be used." ; -    } -    if ! $(.config-frozen) -    { -        .config-frozen = true ; - -        if [ .is-cygwin ] -        { -            .is-cygwin = true ; -        } -    } -} - -rule modify-config ( ) -{ -    if $(.config-frozen) -    { -        errors.user-error "Cannot change doxygen after it has been used." ; -    } -} - -rule check-doxygen ( ) -{ -    if --debug-configuration in [ modules.peek : ARGV ] -    { -        ECHO "notice:" using doxygen ":" $(.doxygen) ; -    } -    local extra-paths ; -    if [ os.name ] = NT -    { -        local ProgramFiles = [ modules.peek : ProgramFiles ] ; -        if $(ProgramFiles) -        { -            extra-paths = "$(ProgramFiles:J= )" ; -        } -        else -        { -            extra-paths = "C:\\Program Files" ; -        } -    } -    .doxygen = [ common.get-invocation-command doxygen : -                 doxygen : $(.doxygen) : $(extra-paths) ] ; -} - -rule name ( ) -{ -    freeze-config ; -    return $(.doxygen) ; -} - -rule .is-cygwin ( ) -{ -    if [ os.on-windows ] -    { -        local file = [ path.make [ modules.binding $(__name__) ] ] ; -        local dir = [ path.native -                      [ path.join [ path.parent $(file) ] doxygen ] ] ; -        local command = -         "cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ; -        result = [ SHELL $(command) ] ; -        if [ MATCH "(Parsing file /)" : $(result) ] -        { -            return true ; -        } -    } -} - -# Runs Doxygen on the given Doxygen configuration file (the source) to generate -# the Doxygen files. The output is dumped according to the settings in the -# Doxygen configuration file, not according to the target! Because of this, we -# essentially "touch" the target file, in effect making it look like we have -# really written something useful to it. Anyone that uses this action must deal -# with this behavior. -# -actions doxygen-action -{ -    $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)" -} - - -# Runs the Python doxproc XML processor. -# -actions doxproc -{ -    python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)" -} - - -rule translate-path ( path ) -{ -    freeze-config ; -    if [ os.on-windows ] -    { -        if [ os.name ] = CYGWIN -        { -            if $(.is-cygwin) -            { -                return $(path) ; -            } -            else -            { -                return $(path:W) ; -            } -        } -        else -        { -            if $(.is-cygwin) -            { -                match = [ MATCH ^(.):(.*) : $(path) ] ; -                if $(match) -                { -                    return /cygdrive/$(match[1])$(match[2]:T) ; -                } -                else -                { -                    return $(path:T) ; -                } -            } -            else -            { -                return $(path) ; -            } -        } -    } -    else -    { -        return $(path) ; -    } -} - - -# Generates a doxygen configuration file (doxyfile) given a set of C++ sources -# and a property list that may contain <doxygen:param> features. -# -rule headers-to-doxyfile ( target : sources * : properties * ) -{ -    local text "# Generated by Boost.Build version 2" ; - -    local output-dir ; - -    # Translate <doxygen:param> into command line flags. -    for local param in [ feature.get-values <doxygen:param> : $(properties) ] -    { -        local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ; -        if $(namevalue[1]) = OUTPUT_DIRECTORY -        { -            output-dir = [ translate-path -                           [ utility.unquote $(namevalue[2]) ] ] ; -            text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ; -        } -        else -        { -            text += "$(namevalue[1]) = $(namevalue[2])" ; -        } -    } - -    if ! $(output-dir) -    { -        output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ; -        text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ; -    } - -    local headers = ; -    for local header in $(sources:G=) -    { -        header = [ translate-path $(header) ] ; -        headers += \"$(header)\" ; -    } - -    # Doxygen generates LaTex by default. So disable it unconditionally, or at -    # least until someone needs, and hence writes support for, LaTex output. -    text += "GENERATE_LATEX = NO" ; -    text += "INPUT = $(headers:J= )" ; -    print.output $(target) plain ; -    print.text $(text) : true ; -} - - -# Run Doxygen. See doxygen-action for a description of the strange properties of -# this rule. -# -rule run ( target : source : properties * ) -{ -    freeze-config ; -    if <doxygen.rmdir>on in $(properties) -    { -        local output-dir = -            [ path.make -                [ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) : -                  $(properties) ] ] ; -        local html-dir = -            [ path.make -                [ MATCH <doxygen:param>HTML_OUTPUT=(.*) : -                  $(properties) ] ] ; -        if $(output-dir) && $(html-dir) && -            [ path.glob $(output-dir) : $(html-dir) ] -        { -            HTMLDIR on $(target) = -                [ path.native [ path.join $(output-dir) $(html-dir) ] ] ; -            rm-htmldir $(target) ; -        } -    } -    doxygen-action $(target) : $(source) ; -    NAME on $(target) = $(.doxygen) ; -    RM on $(target) = [ modules.peek common : RM ] ; -    *.XML on $(target) = -        [ path.native -            [ path.join -                [ path.make [ on $(target) return $(LOCATE) ] ] -                $(target:B:S=) -                *.xml ] ] ; -} - -if [ os.name ] = NT -{ -    RMDIR = rmdir /s /q ; -} -else -{ -    RMDIR = rm -rf ; -} - -actions quietly rm-htmldir -{ -    $(RMDIR) $(HTMLDIR) -} - -# The rules below require Boost.Book stylesheets, so we need some code to check -# that the boostbook module has actualy been initialized. -# -rule check-boostbook ( ) -{ -    if ! [ modules.peek boostbook : .initialized ] -    { -        ECHO "error: the boostbook module is not initialized" ; -        ECHO "error: you've attempted to use the 'doxygen' toolset, " ; -        ECHO "error: which requires Boost.Book," ; -        ECHO "error: but never initialized Boost.Book." ; -        EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ; -    } -} - - -# Collect the set of Doxygen XML files into a single XML source file that can be -# handled by an XSLT processor. The source is completely ignored (see -# doxygen-action), because this action picks up the Doxygen XML index file -# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL -# program and take a "-o output.xml" argument (grrrr). The target of the -# collection will be a single Doxygen XML file. -# -rule collect ( target : source : properties * ) -{ -    check-boostbook ; -    local collect-xsl-dir -        = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ; -    local source-path -        = [ path.make [ on $(source) return $(LOCATE) ] ] ; -    local collect-path -        = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ; -    local native-path -        = [ path.native $(collect-path) ] ; -    local real-source -        = [ path.native [ path.join $(collect-path) index.xml ] ] ; -    xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl) -        : <xsl:param>doxygen.xml.path=$(native-path) ; -} - - -# Translate Doxygen XML into BoostBook. -# -rule xml-to-boostbook ( target : source : properties * ) -{ -    check-boostbook ; -    local xsl-dir = [ boostbook.xsl-dir ] ; -    local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen -        doxygen2boostbook.xsl ] ] ; - -    local xslt-properties = $(properties) ; -    for local prefix in [ feature.get-values <prefix> : $(properties) ] -    { -        xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ; -    } -    for local title in [ feature.get-values <reftitle> : $(properties) ] -    { -        xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ; -    } - -    xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ; -} - - -flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ; -flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ; -flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ; - - -rule xml-dir-to-boostbook ( target : source : properties * ) -{ -    DOXPROC on $(target) = $(.doxproc) ; - -    LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ; - -    doxygen.doxproc $(target) : $(source:S=) ; -} - - -# Generate the HTML redirect to HTML dir index.html file. -# -rule html-redirect ( target : source : properties * ) -{ -    local uri = "$(target:B)/index.html" ; -    print.output $(target) plain ; -    print.text -"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" -    \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\"> -<html xmlns=\"http://www.w3.org/1999/xhtml\"> -<head> -  <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" /> - -  <title></title> -</head> - -<body> -  Automatic redirection failed, please go to <a href= -  \"$(uri)\">$(uri)</a>. -</body> -</html> -" -        : true ; -} - -rule copy-latex-pngs ( target : source : requirements * ) -{ -    local directory = [ path.native -                        [ feature.get-values <doxygen:xml-imagedir> : -                          $(requirements) ] ] ; - -    local location = [ on $(target) return $(LOCATE) ] ; - -    local pdf-location = -        [ path.native -            [ path.join -                [ path.make $(location) ] -                [ path.make $(directory) ] ] ] ; -    local html-location = -        [ path.native -            [ path.join -                . -                html -                [ path.make $(directory) ] ] ] ; - -    common.MkDir $(pdf-location) ; -    common.MkDir $(html-location) ; - -    DEPENDS $(target) : $(pdf-location) $(html-location) ; - -    if [ os.name ] = NT -    { -        CP on $(target) = copy /y ; -        FROM on $(target) = \\*.png ; -        TOHTML on $(target) = .\\html\\$(directory) ; -        TOPDF on $(target) = \\$(directory) ; -    } -    else -    { -        CP on $(target) = cp ; -        FROM on $(target) = /*.png ; -        TOHTML on $(target) = ./html/$(directory) ; -        TOPDF on $(target) = $(target:D)/$(directory) ; -    } -} - -actions copy-latex-pngs -{ -    $(CP) $(>:S=)$(FROM) $(TOHTML) -    $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF) -    echo "Stamped" > "$(<)" -} - -# building latex images for doxygen XML depends -# on latex, dvips, and ps being in your PATH. -# This is true for most Unix installs, but -# not on Win32, where you will need to install -# MkTex and Ghostscript and add these tools -# to your path. - -actions check-latex -{ -    latex -version >$(<) -} - -actions check-dvips -{ -    dvips -version >$(<) -} - -if [ os.name ] = "NT" -{ -   actions check-gs -   { -       gswin32c -version >$(<) -   } -} -else -{ -   actions check-gs -   { -       gs -version >$(<) -   } -} - -rule check-tools ( ) -{ -    if ! $(.check-tools-targets) -    { -        # Find the root project. -        local root-project = [ project.current ] ; -        root-project = [ $(root-project).project-module ] ; -        while -            [ project.attribute $(root-project) parent-module ] && -            [ project.attribute $(root-project) parent-module ] != user-config -        { -            root-project = -                [ project.attribute $(root-project) parent-module ] ; -        } - -        .latex.check = [ new file-target latex.check -            : -            : [ project.target $(root-project) ] -            : [ new action : doxygen.check-latex ] -            : -            ] ; -        .dvips.check = [ new file-target dvips.check -            : -            : [ project.target $(root-project) ] -            : [ new action : doxygen.check-dvips ] -            : -            ] ; -        .gs.check = [ new file-target gs.check -            : -            : [ project.target $(root-project) ] -            : [ new action : doxygen.check-gs ] -            : -            ] ; -        .check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ; -    } -    return $(.check-tools-targets) ; -} - -project.initialize $(__name__) ; -project doxygen ; - -class doxygen-check-tools-target-class : basic-target -{ -    import doxygen ; -    rule construct ( name : sources * : property-set ) -    { -        return [ property-set.empty ] [ doxygen.check-tools ] ; -    } -} - -local project = [ project.current ] ; - -targets.main-target-alternative -    [ new doxygen-check-tools-target-class check-tools : $(project) -        : [ targets.main-target-sources : check-tools : no-renaming ] -        : [ targets.main-target-requirements : $(project) ] -        : [ targets.main-target-default-build : $(project) ] -        : [ targets.main-target-usage-requirements : $(project) ] -    ] ; - -# User-level rule to generate BoostBook XML from a set of headers via Doxygen. -# -rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * ) -{ -    freeze-config ; -    local project = [ project.current ] ; - -    if $(target:S) = .html -    { -        # Build an HTML directory from the sources. -        local html-location = [ feature.get-values <location> : $(requirements) ] ; -        local output-dir ; -        if [ $(project).get build-dir ]  -        { -            # Explicitly specified build dir. Add html at the end. -            output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ; -        } -        else -        { -            # Trim 'bin' from implicit build dir, for no other reason that backward -            # compatibility. -            output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]  -              $(html-location:E=html) ] ; -        } -        output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;         -        local output-dir-native = [ path.native $(output-dir) ] ; -        requirements = [ property.change $(requirements) : <location> ] ; - -        ## The doxygen configuration file. -        targets.main-target-alternative -            [ new typed-target $(target:S=.tag) : $(project) : DOXYFILE -                : [ targets.main-target-sources $(sources) : $(target:S=.tag) ] -                : [ targets.main-target-requirements $(requirements) -                    <doxygen:param>GENERATE_HTML=YES -                    <doxygen:param>GENERATE_XML=NO -                    <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\"" -                    <doxygen:param>HTML_OUTPUT=$(target:B) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -            ] ; -        $(project).mark-target-as-explicit $(target:S=.tag) ; - -        ## The html directory to generate by running doxygen. -        targets.main-target-alternative -            [ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE -                : $(target:S=.tag) -                : [ targets.main-target-requirements $(requirements) -                    <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\"" -                    <doxygen:param>HTML_OUTPUT=$(target:B) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -                ] ; -        $(project).mark-target-as-explicit $(target:S=.dir) ; - -        ## The redirect html file into the generated html. -        targets.main-target-alternative -            [ new typed-target $(target) : $(project) : DOXYGEN_HTML -                : $(target:S=.dir) -                : [ targets.main-target-requirements $(requirements) -                    <location>$(output-dir) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -                ] ; -    } -    else -    { -        # Build a BoostBook XML file from the sources. -        local location-xml = [ feature.get-values <location> : $(requirements) ] ; -        requirements = [ property.change $(requirements) : <location> ] ; -        local target-xml = $(target:B=$(target:B)-xml) ; - -        # Check whether we need to build images -        local images-location = -            [ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ; -        if $(images-location) -        { -            doxygen $(target).doxygen-xml-images.html : $(sources) -                : $(requirements) -                  <doxygen.rmdir>on -                  <doxygen:param>QUIET=YES -                  <doxygen:param>WARNINGS=NO -                  <doxygen:param>WARN_IF_UNDOCUMENTED=NO -                  <dependency>/doxygen//check-tools ; -            $(project).mark-target-as-explicit -                $(target).doxygen-xml-images.html ; - -            targets.main-target-alternative -                [ new typed-target $(target).doxygen-xml-images -                    : $(project) : DOXYGEN_XML_IMAGES -                    : $(target).doxygen-xml-images.html -                    : [ targets.main-target-requirements $(requirements) -                        : $(project) ] -                    : [ targets.main-target-default-build $(default-build) -                        : $(project) ] -                ] ; - -            $(project).mark-target-as-explicit -                $(target).doxygen-xml-images ; - -            if ! [ regex.match "^(.*/)$" : $(images-location) ] -            { -                images-location = $(images-location)/ ; -            } - -            requirements += -                <dependency>$(target).doxygen-xml-images -                <xsl:param>boost.doxygen.formuladir=$(images-location) ; -        } - -        ## The doxygen configuration file. -        targets.main-target-alternative -            [ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE -                : [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ] -                : [ targets.main-target-requirements $(requirements) -                    <doxygen:param>GENERATE_HTML=NO -                    <doxygen:param>GENERATE_XML=YES -                    <doxygen:param>XML_OUTPUT=$(target-xml) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -            ] ; -        $(project).mark-target-as-explicit $(target-xml:S=.tag) ; - -        ## The Doxygen XML directory of the processed source files. -        targets.main-target-alternative -            [ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE -                : $(target-xml:S=.tag) -                : [ targets.main-target-requirements $(requirements) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -            ] ; -        $(project).mark-target-as-explicit $(target-xml:S=.dir) ; - -        ## The resulting BoostBook file is generated by the processor tool. The -        ## tool can be either the xsltproc plus accompanying XSL scripts. Or it -        ## can be the python doxproc.py script. -        targets.main-target-alternative -            [ new typed-target $(target-xml) : $(project) : BOOSTBOOK -                :  $(target-xml:S=.dir) -                : [ targets.main-target-requirements $(requirements) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -            ] ; -        $(project).mark-target-as-explicit $(target-xml) ; - -        targets.main-target-alternative -            [ new install-target-class $(target:S=.xml) : $(project) -                : $(target-xml) -                : [ targets.main-target-requirements $(requirements) -                    <location>$(location-xml:E=.) -                    <name>$(target:S=.xml) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -            ] ; -        $(project).mark-target-as-explicit $(target:S=.xml) ; - -        targets.main-target-alternative -            [ new alias-target-class $(target) : $(project) -                : -                : [ targets.main-target-requirements $(requirements) -                    : $(project) ] -                : [ targets.main-target-default-build $(default-build) : $(project) ] -                : [ targets.main-target-usage-requirements $(usage-requirements) -                    <dependency>$(target:S=.xml) -                    : $(project) ] -            ] ; -    } -} diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile deleted file mode 100644 index 9b969df9..00000000 --- a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile +++ /dev/null @@ -1,3 +0,0 @@ -INPUT = windows-paths-check.hpp -GENERATE_HTML = NO -GENERATE_LATEX = NO diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp deleted file mode 100644 index e69de29b..00000000 --- a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp +++ /dev/null diff --git a/jam-files/boost-build/tools/fop.jam b/jam-files/boost-build/tools/fop.jam deleted file mode 100644 index c24b8725..00000000 --- a/jam-files/boost-build/tools/fop.jam +++ /dev/null @@ -1,69 +0,0 @@ -#  Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed -# under the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) -# -#  This module defines rules to handle generation of PDF and -#  PostScript files from XSL Formatting Objects via Apache FOP - -import generators ; -import common ; -import boostbook ; - -generators.register-standard fop.render.pdf : FO : PDF ; -generators.register-standard fop.render.ps : FO : PS ; - -# Initializes the fop toolset. -# -rule init ( fop-command ? : java-home ? : java ? ) -{ -    local has-command = $(.has-command) ; - -    if $(fop-command) -    { -        .has-command = true ; -    } - -    if $(fop-command) || ! $(has-command) -    { -        fop-command = [ common.get-invocation-command fop : fop : $(fop-command)  -          : [ modules.peek : FOP_DIR ] ] ; -    } -     -    if $(fop-command) -    { -        .FOP_COMMAND = $(fop-command) ; -    } - -    if $(java-home) || $(java) -    { -        .FOP_SETUP = ; - -     -        # JAVA_HOME is the location that java was installed to. -     -        if $(java-home) -        { -            .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ; -        } - -        # JAVACMD is the location that of the java executable, useful for a -        # non-standard java installation, where the executable isn't at -        # $JAVA_HOME/bin/java. -     -        if $(java) -        { -            .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ; -        } -    } -} - -actions render.pdf -{ -    $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<) -} - -actions render.ps -{ -    $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<) -} diff --git a/jam-files/boost-build/tools/fortran.jam b/jam-files/boost-build/tools/fortran.jam deleted file mode 100644 index 37665825..00000000 --- a/jam-files/boost-build/tools/fortran.jam +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -# -# This file contains common settings for all fortran tools -# - -import "class" : new ; -import feature : feature ; - -import type ; -import generators ; -import common ; - -type.register FORTRAN : f F for f77 ; -type.register FORTRAN90 : f90 F90 ; - -feature fortran : : free ; -feature fortran90 : : free ; - -class fortran-compiling-generator : generator -{ -    rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * ) -    { -        generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ; -    } -} - -rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * ) -{ -    local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ; -    generators.register $(g) ; -} - -class fortran90-compiling-generator : generator -{ -    rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * ) -    { -        generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ; -    } -} - -rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * ) -{ -    local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ; -    generators.register $(g) ; -} - -# FIXME: this is ugly, should find a better way (we'd want client code to -# register all generators as "generator.some-rule", not with "some-module.some-rule".) -IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ; -IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ; diff --git a/jam-files/boost-build/tools/gcc.jam b/jam-files/boost-build/tools/gcc.jam deleted file mode 100644 index f7b0da54..00000000 --- a/jam-files/boost-build/tools/gcc.jam +++ /dev/null @@ -1,1185 +0,0 @@ -# Copyright 2001 David Abrahams. -# Copyright 2002-2006 Rene Rivera. -# Copyright 2002-2003 Vladimir Prus. -#  Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov. -# Copyright 2007 Roland Schwarz -# Copyright 2007 Boris Gubenko. -# -# Distributed under the Boost Software License, Version 1.0. -#    (See accompanying file LICENSE_1_0.txt or copy at -#          http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import common ; -import errors ; -import feature ; -import generators ; -import os ; -import pch ; -import property ; -import property-set ; -import toolset ; -import type ; -import rc ; -import regex ; -import set ; -import unix ; -import fortran ; - - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} - - -feature.extend toolset : gcc ; -# feature.subfeature toolset gcc : flavor : : optional ; - -toolset.inherit-generators gcc : unix : unix.link unix.link.dll ; -toolset.inherit-flags gcc : unix ; -toolset.inherit-rules gcc : unix ; - -generators.override gcc.prebuilt : builtin.prebuilt ; -generators.override gcc.searched-lib-generator : searched-lib-generator ; - -# Make gcc toolset object files use the "o" suffix on all platforms. -type.set-generated-target-suffix OBJ : <toolset>gcc : o ; -type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ; -type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ; - -# Initializes the gcc toolset for the given version. If necessary, command may -# be used to specify where the compiler is located. The parameter 'options' is a -# space-delimited list of options, each one specified as -# <option-name>option-value. Valid option names are: cxxflags, linkflags and -# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or -# sun and the default value will be selected based on the current OS. -# Example: -#   using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ; -# -# The compiler command to use is detected in a three step manner: -# 1) If an explicit command is specified by the user, it will be used and must available. -# 2) If only a certain version is specified, it is enforced: -#    - either a command 'g++-VERSION' must be available -#    - or the default command 'g++' must be available and match the exact version. -# 3) Without user-provided restrictions use default 'g++' -rule init ( version ? : command * : options * ) -{ -    #1): use user-provided command -    local tool-command = ; -    if $(command) -    { -       tool-command  =  [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ; -       if ! $(tool-command) -       { -           errors.error "toolset gcc initialization:" : -                        "provided command '$(command)' not found" : -                        "initialized from" [ errors.nearest-user-location ] ; -       } -    } -    #2): enforce user-provided version -    else if $(version) -    { -        tool-command  =  [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ; -         -        #2.1) fallback: check whether "g++" reports the requested version -        if ! $(tool-command) -        { -            tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ; -            if $(tool-command) -            { -                local tool-command-string = $(tool-command:J=" ") ; -                local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ; -                if $(tool-version) != $(version) -                { -                    # Permit a match betwen two-digit version specified by the user -                    # (e.g. 4.4) and 3-digit version reported by gcc. -                    # Since only two digits are present in binary name anyway, -                    # insisting that user specify 3-digit version when -                    # configuring Boost.Build while it's not required on  -                    # command like would be strange. -                    local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ; -                    if $(stripped) != $(version) -                    {                                             -                        errors.error "toolset gcc initialization:" : -                          "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" : -                            "initialized from" [ errors.nearest-user-location ] ; -                        tool-command = ; -                    } -                    # Use full 3-digit version to be compatible with the 'using gcc ;' case -                    version = $(tool-version) ; -                } -            } -            else -            { -                errors.error "toolset gcc initialization:" : -                             "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" : -                             "initialized from" [ errors.nearest-user-location ] ; -            } -        } -    } -    #3) default: no command and no version specified, try using default command "g++" -    else -    { -        tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ; -        if ! $(tool-command) -        { -            errors.error "toolset gcc initialization:" : -                         "no command provided, default command 'g++' not found" : -                         "initialized from" [ errors.nearest-user-location ] ; -        } -    } -     -     -    # Information about the gcc command... -    #   The command. -    local command = $(tool-command) ; -    #   The root directory of the tool install. -    local root = [ feature.get-values <root> : $(options) ] ; -    #   The bin directory where to find the command to execute. -    local bin ; -    #   The flavor of compiler. -    local flavor = [ feature.get-values <flavor> : $(options) ] ; -    #   Autodetect the root and bin dir if not given. -    if $(command) -    { -        bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; -        root ?= $(bin:D) ; -    } -    # The 'command' variable can have multiple elements. When calling -    # the SHELL builtin we need a single string. -    local command-string = $(command:J=" ") ; -    #   Autodetect the version and flavor if not given. -    if $(command) -    {     -        local machine = [ MATCH "^([^ ]+)" -            : [ SHELL "$(command-string) -dumpmachine" ] ] ; -        version ?= [ MATCH "^([0-9.]+)" -            : [ SHELL "$(command-string) -dumpversion" ] ] ; -        switch $(machine:L) -        { -            case *mingw* : flavor ?= mingw ; -        } -    } - -    local condition ; -    if $(flavor) -    { -        condition = [ common.check-init-parameters gcc -            : version $(version) -            : flavor $(flavor) -            ] ; -    } -    else -    { -        condition = [ common.check-init-parameters gcc -            : version $(version) -            ] ; -        condition = $(condition) ;  #/<toolset-gcc:flavor> ; -    } - -    common.handle-options gcc : $(condition) : $(command) : $(options) ; - -    local linker = [ feature.get-values <linker-type> : $(options) ] ; -    # The logic below should actually be keyed on <target-os> -    if ! $(linker) -    { -        if [ os.name ] = OSF -        { -            linker = osf ; -        } -        else if [ os.name ] = HPUX -        { -            linker = hpux ; -        } -        else if [ os.name ] = AIX -        { -            linker = aix ; -        } -        else if [ os.name ] = SOLARIS -        { -            linker = sun ; -        }         -        else -        { -            linker = gnu ; -        } -    } -    init-link-flags gcc $(linker) $(condition) ; - - -    # If gcc is installed in non-standard location, we'd need to add -    # LD_LIBRARY_PATH when running programs created with it (for unit-test/run -    # rules). -    if $(command) -    { -        # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries -        # and all must be added to LD_LIBRARY_PATH. The linker will pick the -        # right onces. Note that we don't provide a clean way to build 32-bit -        # binary with 64-bit compiler, but user can always pass -m32 manually. -        local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ; -        if $(.debug-configuration) -        { -            ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ; -        } -        toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ; -    } - -    # If it's not a system gcc install we should adjust the various programs as -    # needed to prefer using the install specific versions. This is essential -    # for correct use of MinGW and for cross-compiling. -     -    local nl = " -" ; - -    # - The archive builder. -    local archiver = [ common.get-invocation-command gcc -            : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ar" ] ] ] -            : [ feature.get-values <archiver> : $(options) ]  -            : $(bin)  -            : search-path ] ; -    toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ; -    if $(.debug-configuration) -    { -        ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ; -    } - -    # - Ranlib -    local ranlib = [ common.get-invocation-command gcc -            : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ] -            : [ feature.get-values <ranlib> : $(options) ]  -            : $(bin)  -            : search-path ] ; -    toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ; -    if $(.debug-configuration) -    { -        ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ; -    } - - -    # - The resource compiler. -    local rc = -        [ common.get-invocation-command-nodefault gcc -            : windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ; -    local rc-type = -        [ feature.get-values <rc-type> : $(options) ] ; -    rc-type ?= windres ; -    if ! $(rc) -    { -        # If we can't find an RC compiler we fallback to a null RC compiler that -        # creates empty object files. This allows the same Jamfiles to work -        # across the board. The null RC uses the assembler to create the empty -        # objects, so configure that. -        rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ; -        rc-type = null ; -    } -    rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ; -} - -if [ os.name ] = NT -{ -    # This causes single-line command invocation to not go through .bat files, -    # thus avoiding command-line length limitations. -    JAMSHELL = % ; -} - -generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ; -generators.register-c-compiler gcc.compile.c.preprocess   : C   : PREPROCESSED_C   : <toolset>gcc ; -generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ; -generators.register-c-compiler gcc.compile.c   : C   : OBJ : <toolset>gcc ; -generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ; -generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ; - -# pch support - -# The compiler looks for a precompiled header in each directory just before it -# looks for the include file in that directory. The name searched for is the -# name specified in the #include directive with ".gch" suffix appended. The -# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to -# full name of the header. - -type.set-generated-target-suffix PCH : <toolset>gcc : gch ; - -# GCC-specific pch generator. -class gcc-pch-generator : pch-generator -{ -    import project ; -    import property-set ; -    import type ; - -    rule run-pch ( project name ? : property-set : sources + ) -    { -        # Find the header in sources. Ignore any CPP sources. -        local header ; -        for local s in $(sources) -        { -            if [ type.is-derived [ $(s).type ] H ] -            { -                header = $(s) ; -            } -        } - -        # Error handling: Base header file name should be the same as the base -        # precompiled header name. -        local header-name = [ $(header).name ] ; -        local header-basename = $(header-name:B) ; -        if $(header-basename) != $(name) -        { -            local location = [ $(project).project-module ] ; -            errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ; -        } - -        local pch-file = [ generator.run $(project) $(name) : $(property-set) -            : $(header) ] ; - -        # return result of base class and pch-file property as usage-requirements -        return -            [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ] -            $(pch-file) -          ; -    } - -    # Calls the base version specifying source's name as the name of the created -    # target. As result, the PCH will be named whatever.hpp.gch, and not -    # whatever.gch. -    rule generated-targets ( sources + : property-set : project name ? ) -    { -        name = [ $(sources[1]).name ] ; -        return [ generator.generated-targets $(sources) -          : $(property-set) : $(project) $(name) ] ; -    } -} - -# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The -# latter have HPP type, but HPP type is derived from H. The type of compilation -# is determined entirely by the destination type. -generators.register [ new gcc-pch-generator gcc.compile.c.pch   : H :   C_PCH : <pch>on <toolset>gcc ] ; -generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ; - -# Override default do-nothing generators. -generators.override gcc.compile.c.pch   : pch.default-c-pch-generator   ; -generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ; - -toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ; - -# Declare flags and action for compilation. -toolset.flags gcc.compile OPTIONS <optimization>off   : -O0 ; -toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags gcc.compile OPTIONS <optimization>space : -Os ; - -toolset.flags gcc.compile OPTIONS <inlining>off  : -fno-inline ; -toolset.flags gcc.compile OPTIONS <inlining>on   : -Wno-inline ; -toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ; - -toolset.flags gcc.compile OPTIONS <warnings>off : -w ; -toolset.flags gcc.compile OPTIONS <warnings>on  : -Wall ; -toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ; -toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ; - -toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ; -toolset.flags gcc.compile OPTIONS <profiling>on : -pg ; -toolset.flags gcc.compile OPTIONS <rtti>off : -fno-rtti ; - -rule setup-fpic ( targets * : sources * : properties * ) -{ -    local link = [ feature.get-values link : $(properties) ] ; -    if $(link) = shared -    {         -        local target = [ feature.get-values target-os : $(properties) ] ; -         -        # This logic will add -fPIC for all compilations: -        # -        # lib a : a.cpp b ; -        # obj b : b.cpp ; -        # exe c : c.cpp a d ; -        # obj d : d.cpp ; -        # -        # This all is fine, except that 'd' will be compiled with -fPIC even though -        # it is not needed, as 'd' is used only in exe. However, it is hard to -        # detect where a target is going to be used. Alternatively, we can set -fPIC -        # only when main target type is LIB but than 'b' would be compiled without -        # -fPIC which would lead to link errors on x86-64. So, compile everything -        # with -fPIC. -        # -        # Yet another alternative would be to create a propagated <sharedable> -        # feature and set it when building shared libraries, but that would be hard -        # to implement and would increase the target path length even more. -         -        # On Windows, fPIC is default, specifying -fPIC explicitly leads to -        # a warning. -        if $(target) != cygwin && $(target) != windows -        { -            OPTIONS on $(targets) += -fPIC ; -        }         -    } -} - -rule setup-address-model ( targets * : sources * : properties * ) -{ -    local model = [ feature.get-values address-model : $(properties) ] ; -    if $(model) -    { -        local option ; -        local os = [ feature.get-values target-os : $(properties) ] ; -        if $(os) = aix -        { -            if $(model) = 32 -            { -                option = -maix32 ; -            } -            else -            { -                option = -maix64 ; -            } -        } -        else if $(os) = hpux -        { -            if $(model) = 32 -            { -                option = -milp32 ; -            } -            else -            { -                option = -mlp64 ; -            } -        } -        else -        { -            if $(model) = 32 -            { -                option = -m32 ; -            } -            else if $(model) = 64 -            { -                option = -m64 ; -            } -            # For darwin, the model can be 32_64. darwin.jam will handle that -            # on its own. -        } -        OPTIONS on $(targets) += $(option) ; -    }     -} - - -# FIXME: this should not use os.name. -if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX && [ os.name ] != AIX -{ -    # OSF does have an option called -soname but it does not seem to work as -    # expected, therefore it has been disabled. -    HAVE_SONAME   = "" ; -    SONAME_OPTION = -h ; -} - -# HPUX, for some reason, seem to use '+h', not '-h'. -if [ os.name ] = HPUX -{ -    HAVE_SONAME   = "" ; -    SONAME_OPTION = +h ; -} - -toolset.flags gcc.compile USER_OPTIONS <cflags> ; -toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ; -toolset.flags gcc.compile DEFINES <define> ; -toolset.flags gcc.compile INCLUDES <include> ; -toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; -toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ; - -rule compile.c++.pch ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c++.pch -{ -    "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.c.pch ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ;     -} - -actions compile.c.pch -{ -    "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.c++.preprocess ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -     -    # Some extensions are compiled as C++ by default. For others, we need to -    # pass -x c++. We could always pass -x c++ but distcc does not work with it. -    if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C -    { -        LANG on $(<) = "-x c++" ; -    } -    DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -rule compile.c.preprocess ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -     -    # If we use the name g++ then default file suffix -> language mapping does -    # not work. So have to pass -x option. Maybe, we can work around this by -    # allowing the user to specify both C and C++ compiler names. -    #if $(>:S) != .c -    #{ -        LANG on $(<) = "-x c" ; -    #} -    DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -rule compile.c++ ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -     -    # Some extensions are compiled as C++ by default. For others, we need to -    # pass -x c++. We could always pass -x c++ but distcc does not work with it. -    if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C -    { -        LANG on $(<) = "-x c++" ; -    } -    DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -     -    # Here we want to raise the template-depth parameter value to something -    # higher than the default value of 17. Note that we could do this using the -    # feature.set-default rule but we do not want to set the default value for -    # all toolsets as well. -    # -    # TODO: This 'modified default' has been inherited from some 'older Boost -    # Build implementation' and has most likely been added to make some Boost -    # library parts compile correctly. We should see what exactly prompted this -    # and whether we can get around the problem more locally. -    local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ; -    if ! $(template-depth) -    { -        TEMPLATE_DEPTH on $(<) = 128 ; -    } -} - -rule compile.c ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -     -    # If we use the name g++ then default file suffix -> language mapping does -    # not work. So have to pass -x option. Maybe, we can work around this by -    # allowing the user to specify both C and C++ compiler names. -    #if $(>:S) != .c -    #{ -        LANG on $(<) = "-x c" ; -    #} -    DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -rule compile.fortran ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-fpic $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -} - -actions compile.c++ bind PCH_FILE -{ -    "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)" -} - -actions compile.c bind PCH_FILE -{ -    "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++.preprocess bind PCH_FILE -{ -    "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)" -} - -actions compile.c.preprocess bind PCH_FILE -{ -    "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<) -} - -actions compile.fortran -{ -  "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"  -} - -rule compile.asm ( targets * : sources * : properties * ) -{ -    setup-fpic $(targets) : $(sources) : $(properties) ;     -    setup-address-model $(targets) : $(sources) : $(properties) ; -    LANG on $(<) = "-x assembler-with-cpp" ; -} - -actions compile.asm -{ -    "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# The class which check that we don't try to use the <runtime-link>static -# property while creating or using shared library, since it's not supported by -# gcc/libc. -class gcc-linking-generator : unix-linking-generator -{ -    rule run ( project name ? : property-set : sources + ) -    { -        # TODO: Replace this with the use of a target-os property. -        local no-static-link = ; -        if [ modules.peek : UNIX ] -        { -            switch [ modules.peek : JAMUNAME ] -            { -                case * : no-static-link = true ; -            } -        } - -        local properties = [ $(property-set).raw ] ; -        local reason ; -        if $(no-static-link) && <runtime-link>static in $(properties) -        { -            if <link>shared in $(properties) -            { -                reason = -                    "On gcc, DLL can't be build with '<runtime-link>static'." ; -            } -            else if [ type.is-derived $(self.target-types[1]) EXE ] -            { -                for local s in $(sources) -                { -                    local type = [ $(s).type ] ; -                    if $(type) &&  [ type.is-derived $(type) SHARED_LIB ] -                    { -                        reason = -                            "On gcc, using DLLS together with the" -                            "<runtime-link>static options is not possible " ; -                    } -                } -            } -        } -        if $(reason) -        { -            ECHO warning: -                $(reason) ; -            ECHO warning: -                "It is suggested to use '<runtime-link>static' together" -                "with '<link>static'." ; -            return ; -        } -        else -        { -            local generated-targets = [ unix-linking-generator.run $(project) -                $(name) : $(property-set) : $(sources) ] ; -            return $(generated-targets) ; -        } -    } -} - -# The set of permissible input types is different on mingw. -# So, define two sets of generators, with mingw generators -# selected when target-os=windows. - -local g ; -g = [ new gcc-linking-generator gcc.mingw.link -      : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -      : EXE -      : <toolset>gcc <target-os>windows ] ; -$(g).set-rule-name gcc.link ; -generators.register $(g) ; - -g = [ new gcc-linking-generator gcc.mingw.link.dll -      : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -      : IMPORT_LIB SHARED_LIB -      : <toolset>gcc <target-os>windows ] ; -$(g).set-rule-name gcc.link.dll ; -generators.register $(g) ; - -generators.register -  [ new gcc-linking-generator gcc.link -      : LIB OBJ -      : EXE -      : <toolset>gcc ] ; -generators.register -  [ new gcc-linking-generator gcc.link.dll -      : LIB OBJ -      : SHARED_LIB -      : <toolset>gcc ] ; - -generators.override gcc.mingw.link : gcc.link ; -generators.override gcc.mingw.link.dll : gcc.link.dll ; - -# Cygwin is similar to msvc and mingw in that it uses import libraries. -# While in simple cases, it can directly link to a shared library, -# it is believed to be slower, and not always possible. Define cygwin-specific -# generators here. - -g = [ new gcc-linking-generator gcc.cygwin.link -      : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -      : EXE -      : <toolset>gcc <target-os>cygwin ] ; -$(g).set-rule-name gcc.link ; -generators.register $(g) ; - -g = [ new gcc-linking-generator gcc.cygwin.link.dll -      : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB -      : IMPORT_LIB SHARED_LIB -      : <toolset>gcc <target-os>cygwin ] ; -$(g).set-rule-name gcc.link.dll ; -generators.register $(g) ; - -generators.override gcc.cygwin.link : gcc.link ; -generators.override gcc.cygwin.link.dll : gcc.link.dll ; - -# Declare flags for linking. -# First, the common flags. -toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ; -toolset.flags gcc.link OPTIONS <profiling>on : -pg ; -toolset.flags gcc.link USER_OPTIONS <linkflags> ; -toolset.flags gcc.link LINKPATH <library-path> ; -toolset.flags gcc.link FINDLIBS-ST <find-static-library> ; -toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ; -toolset.flags gcc.link LIBRARIES <library-file> ; - -toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ; -toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ; - -# For <runtime-link>static we made sure there are no dynamic libraries in the -# link. On HP-UX not all system libraries exist as archived libraries (for -# example, there is no libunwind.a), so, on this platform, the -static option -# cannot be specified. -if [ os.name ] != HPUX -{ -    toolset.flags gcc.link OPTIONS <runtime-link>static : -static ; -} - -# Now, the vendor specific flags. -# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun. -rule init-link-flags ( toolset linker condition ) -{ -    switch $(linker) -    { -    case aix : -        { -        # -        # On AIX we *have* to use the native linker. -        # -        # Using -brtl, the AIX linker will look for libraries with both the .a -        # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the -        # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived -        # file that may contain shared objects and is different from static libs -        # as on Linux. -        # -        # The -bnoipath strips the prepending (relative) path of libraries from -        # the loader section in the target library or executable. Hence, during -        # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded -        # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without -        # this option, the prepending (relative) path + library name is -        # hard-coded in the loader section, causing *only* this path to be -        # searched during load-time. Note that the AIX linker does not have an -        # -soname equivalent, this is as close as it gets. -        # -        # The above options are definately for AIX 5.x, and most likely also for -        # AIX 4.x and AIX 6.x. For details about the AIX linker see: -        # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf -        # - -        toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath -            : unchecked ; -        } - -    case darwin : -        { -        # On Darwin, the -s option to ld does not work unless we pass -static, -        # and passing -static unconditionally is a bad idea. So, don't pass -s. -        # at all, darwin.jam will use separate 'strip' invocation. -        toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ; -        toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ; -        } - -    case gnu : -        { -        # Strip the binary when no debugging is needed. We use --strip-all flag -        # as opposed to -s since icc (intel's compiler) is generally -        # option-compatible with and inherits from the gcc toolset, but does not -        # support -s. -        toolset.flags $(toolset).link OPTIONS     $(condition)/<strip>on          : -Wl,--strip-all   : unchecked ; -        toolset.flags $(toolset).link RPATH       $(condition)                    : <dll-path>        : unchecked ; -        toolset.flags $(toolset).link RPATH_LINK  $(condition)                    : <xdll-path>       : unchecked ; -        toolset.flags $(toolset).link START-GROUP $(condition)                    : -Wl,--start-group : unchecked ; -        toolset.flags $(toolset).link END-GROUP   $(condition)                    : -Wl,--end-group   : unchecked ; - -        # gnu ld has the ability to change the search behaviour for libraries -        # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic -        # and change search for -l switches that follow them. The following list -        # shows the tried variants. -        # The search stops at the first variant that has a match. -        # *nix: -Bstatic -lxxx -        #    libxxx.a -        # -        # *nix: -Bdynamic -lxxx -        #    libxxx.so -        #    libxxx.a -        # -        # windows (mingw,cygwin) -Bstatic -lxxx -        #    libxxx.a -        #    xxx.lib -        # -        # windows (mingw,cygwin) -Bdynamic -lxxx -        #    libxxx.dll.a -        #    xxx.dll.a -        #    libxxx.a -        #    xxx.lib -        #    cygxxx.dll (*) -        #    libxxx.dll -        #    xxx.dll -        #    libxxx.a -        # -        # (*) This is for cygwin -        # Please note that -Bstatic and -Bdynamic are not a guarantee that a -        # static or dynamic lib indeed gets linked in. The switches only change -        # search patterns! - -        # On *nix mixing shared libs with static runtime is not a good idea. -        toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared -            : -Wl,-Bstatic : unchecked ; -        toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared -            : -Wl,-Bdynamic : unchecked ; - -        # On windows allow mixing of static and dynamic libs with static -        # runtime. -        toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows -            : -Wl,-Bstatic : unchecked ; -        toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows -            : -Wl,-Bdynamic : unchecked ; -        toolset.flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows -            : -Wl,-Bstatic : unchecked ; -        } - -    case hpux : -        { -        toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on -            : -Wl,-s : unchecked ; -        toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared -            : -fPIC : unchecked ; -        } - -    case osf : -        { -        # No --strip-all, just -s. -        toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on -            : -Wl,-s : unchecked ; -        toolset.flags $(toolset).link RPATH $(condition) : <dll-path> -            : unchecked ; -        # This does not supports -R. -        toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath -            : unchecked ; -        # -rpath-link is not supported at all. -        } - -    case sun : -        { -        toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on -            : -Wl,-s : unchecked ; -        toolset.flags $(toolset).link RPATH $(condition) : <dll-path> -            : unchecked ; -        # Solaris linker does not have a separate -rpath-link, but allows to use -        # -L for the same purpose. -        toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path> -            : unchecked ; - -        # This permits shared libraries with non-PIC code on Solaris. -        # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the -        # following is not needed. Whether -fPIC should be hardcoded, is a -        # separate question. -        # AH, 2004/10/16: it is still necessary because some tests link against -        # static libraries that were compiled without PIC. -        toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared -            : -mimpure-text : unchecked ; -        } - -    case * : -        { -        errors.user-error -            "$(toolset) initialization: invalid linker '$(linker)'" : -            "The value '$(linker)' specified for <linker> is not recognized." : -            "Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'" ; -        } -    } -} - -# Enclose the RPATH variable on 'targets' in (double) quotes, -# unless it's already enclosed in single quotes. -# This special casing is done because it's common to pass -# '$ORIGIN' to linker -- and it has to have single quotes -# to prevent expansion by shell -- and if we add double -# quotes then preventing properties of single quotes disappear. -rule quote-rpath ( targets * ) -{ -    local r = [ on $(targets[1]) return $(RPATH) ] ; -    if ! [ MATCH "('.*')" : $(r) ]  -    { -        r = "\"$(r)\"" ; -    } -    RPATH on $(targets) = $(r) ; -} - -# Declare actions for linking. -rule link ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -    SPACE on $(targets) = " " ; -    # Serialize execution of the 'link' action, since running N links in -    # parallel is just slower. For now, serialize only gcc links, it might be a -    # good idea to serialize all links. -    JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ; -    quote-rpath $(targets) ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) - -} - -# Default value. Mostly for the sake of intel-linux that inherits from gcc, but -# does not have the same logic to set the .AR variable. We can put the same -# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is -# always available. -.AR = ar ; -.RANLIB = ranlib ; - -toolset.flags gcc.archive AROPTIONS <archiveflags> ; - -rule archive ( targets * : sources * : properties * ) -{ -    # Always remove archive and start again. Here is the rationale from -    # -    # Andre Hentz: -    # -    # I had a file, say a1.c, that was included into liba.a. I moved a1.c to -    # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd -    # errors. After some debugging I traced it back to the fact that a1.o was -    # *still* in liba.a -    # -    # Rene Rivera: -    # -    # Originally removing the archive was done by splicing an RM onto the -    # archive action. That makes archives fail to build on NT when they have -    # many files because it will no longer execute the action directly and blow -    # the line length limit. Instead we remove the file in a different action, -    # just before building the archive. -    # -    local clean.a = $(targets[1])(clean) ; -    TEMPORARY $(clean.a) ; -    NOCARE $(clean.a) ; -    LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; -    DEPENDS $(clean.a) : $(sources) ; -    DEPENDS $(targets) : $(clean.a) ; -    common.RmTemps $(clean.a) : $(targets) ; -} - -# Declare action for creating static libraries. -# The letter 'r' means to add files to the archive with replacement. Since we -# remove archive, we don't care about replacement, but there's no option "add -# without replacement". -# The letter 'c' suppresses the warning in case the archive does not exists yet. -# That warning is produced only on some platforms, for whatever reasons. -actions piecemeal archive -{ -    "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" -    "$(.RANLIB)" "$(<)" -} - -rule link.dll ( targets * : sources * : properties * ) -{ -    setup-threading $(targets) : $(sources) : $(properties) ; -    setup-address-model $(targets) : $(sources) : $(properties) ; -    SPACE on $(targets) = " " ; -    JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ; -    quote-rpath $(targets) ; -} - -# Differs from 'link' above only by -shared. -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) -} - -rule setup-threading ( targets * : sources * : properties * ) -{ -    local threading = [ feature.get-values threading : $(properties) ] ; -    if $(threading) = multi -    {         -        local target = [ feature.get-values target-os : $(properties) ] ; -        local option ; -        local libs ; -         -        switch $(target) -        { -            case windows : -            { -                option = -mthreads ; -            } -            case cygwin : -            { -                option = -mthreads ; -            } -            case solaris : -            { -                option = -pthreads ; -                libs = rt ; -            } -            case beos : -            {             -                # BeOS has no threading options, so do not set anything here. -            }         -            case *bsd : -            { -                option = -pthread ; -                # There is no -lrt on BSD. -            } -            case sgi : -            { -                # gcc on IRIX does not support multi-threading so do not set anything -                # here. -            } -            case darwin : -            { -                # Darwin has no threading options so do not set anything here. -            } -            case * : -            { -                option = -pthread ; -                libs = rt ; -            } -        } -     -        if $(option) -        { -            OPTIONS on $(targets) += $(option) ; -        } -        if $(libs) -        { -            FINDLIBS-SA on $(targets) += $(libs) ; -        } -    }     -} - -local rule cpu-flags ( toolset variable : architecture : instruction-set + : values + : default ? ) -{ -    if $(default) -    { -        toolset.flags $(toolset) $(variable) -            <architecture>$(architecture)/<instruction-set> -            : $(values) ; -    } -    toolset.flags $(toolset) $(variable) -        <architecture>/<instruction-set>$(instruction-set) -        <architecture>$(architecture)/<instruction-set>$(instruction-set) -        : $(values) ; -} - -# Set architecture/instruction-set options. -# -# x86 and compatible -# The 'native' option appeared in gcc 4.2 so we cannot safely use it -# as default. Use conservative i386 instead. -cpu-flags gcc OPTIONS : x86 : native : -march=native ; -cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 : default ; -cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ; -cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ; -cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ; -cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ; -cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ; -cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ; -cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ; -cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ; -cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ; -cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ; -cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ; -cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ; -cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ; -cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ; -cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ; -cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ; -cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ; -cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ; -cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ; -cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ; -cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ; -cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ; -cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ; -## -cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ; -cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ; -cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ; -cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ; -cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ; -cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ; -cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ; -cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ; -# Sparc -cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ; -cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ; -cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ; -cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ; -cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ; -cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ; -cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ; -cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ; -cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ; -cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ; -cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ; -cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ; -cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ; -cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ; -cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ; -# RS/6000 & PowerPC -cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ; -cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ; -cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ; -cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ; -cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ; -cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ; -cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ; -cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ; -cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ; -cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ; -cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ; -cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ; -cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ; -cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ; -cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ; -cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ; -cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ; -cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ; -cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ; -cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ; -cpu-flags gcc OPTIONS : power : power : -mcpu=power ; -cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ; -cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ; -cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ; -cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ; -cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ; -cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ; -cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ; -cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ; -cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ; -cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ; -cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ; -# AIX variant of RS/6000 & PowerPC -toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X 64" ; diff --git a/jam-files/boost-build/tools/gcc.py b/jam-files/boost-build/tools/gcc.py deleted file mode 100644 index 2a3e675e..00000000 --- a/jam-files/boost-build/tools/gcc.py +++ /dev/null @@ -1,796 +0,0 @@ -# Status: being ported by Steven Watanabe -# Base revision: 47077 -# TODO: common.jam needs to be ported -# TODO: generators.jam needs to have register_c_compiler. -# -# Copyright 2001 David Abrahams. -# Copyright 2002-2006 Rene Rivera. -# Copyright 2002-2003 Vladimir Prus. -#  Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov. -# Copyright 2007 Roland Schwarz -# Copyright 2007 Boris Gubenko. -# Copyright 2008 Steven Watanabe -# -# Distributed under the Boost Software License, Version 1.0. -#    (See accompanying file LICENSE_1_0.txt or copy at -#          http://www.boost.org/LICENSE_1_0.txt) - -import os -import subprocess -import re - -import bjam - -from b2.tools import unix, common, rc, pch, builtin -from b2.build import feature, type, toolset, generators -from b2.util.utility import os_name, on_windows -from b2.manager import get_manager -from b2.build.generators import Generator -from b2.build.toolset import flags -from b2.util.utility import to_seq - -__debug = None - -def debug(): -    global __debug -    if __debug is None: -        __debug = "--debug-configuration" in bjam.variable("ARGV")         -    return __debug - -feature.extend('toolset', ['gcc']) - - -toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll']) -toolset.inherit_flags('gcc', 'unix') -toolset.inherit_rules('gcc', 'unix') - -generators.override('gcc.prebuilt', 'builtin.prebuilt') -generators.override('gcc.searched-lib-generator', 'searched-lib-generator') - -# Target naming is determined by types/lib.jam and the settings below this -# comment. -# -# On *nix: -#     libxxx.a     static library -#     libxxx.so    shared library -# -# On windows (mingw): -#     libxxx.lib   static library -#     xxx.dll      DLL -#     xxx.lib      import library -# -# On windows (cygwin) i.e. <target-os>cygwin -#     libxxx.a     static library -#     xxx.dll      DLL -#     libxxx.dll.a import library -# -# Note: user can always override by using the <tag>@rule -#       This settings have been choosen, so that mingw -#       is in line with msvc naming conventions. For -#       cygwin the cygwin naming convention has been choosen. - -# Make the "o" suffix used for gcc toolset on all -# platforms -type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o') -type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a') - -type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a') -type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib') - -__machine_match = re.compile('^([^ ]+)') -__version_match = re.compile('^([0-9.]+)') - -def init(version = None, command = None, options = None): -    """ -        Initializes the gcc toolset for the given version. If necessary, command may -        be used to specify where the compiler is located. The parameter 'options' is a -        space-delimited list of options, each one specified as -        <option-name>option-value. Valid option names are: cxxflags, linkflags and -        linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun -        and the default value will be selected based on the current OS. -        Example: -          using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ; -    """ - -    options = to_seq(options) -    command = to_seq(command) - -    # Information about the gcc command... -    #   The command. -    command = to_seq(common.get_invocation_command('gcc', 'g++', command)) -    #   The root directory of the tool install. -    root = feature.get_values('<root>', options) ; -    #   The bin directory where to find the command to execute. -    bin = None -    #   The flavor of compiler. -    flavor = feature.get_values('<flavor>', options) -    #   Autodetect the root and bin dir if not given. -    if command: -        if not bin: -            bin = common.get_absolute_tool_path(command[-1]) -        if not root: -            root = os.path.dirname(bin) -    #   Autodetect the version and flavor if not given. -    if command: -        machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0] -        machine = __machine_match.search(machine_info).group(1) - -        version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0] -        version = __version_match.search(version_info).group(1) -        if not flavor and machine.find('mingw') != -1: -            flavor = 'mingw' - -    condition = None -    if flavor: -        condition = common.check_init_parameters('gcc', None, -            ('version', version), -            ('flavor', flavor)) -    else: -        condition = common.check_init_parameters('gcc', None, -            ('version', version)) - -    if command: -        command = command[0] - -    common.handle_options('gcc', condition, command, options) - -    linker = feature.get_values('<linker-type>', options) -    if not linker: -        if os_name() == 'OSF': -            linker = 'osf' -        elif os_name() == 'HPUX': -            linker = 'hpux' ; -        else: -            linker = 'gnu' - -    init_link_flags('gcc', linker, condition) - -    # If gcc is installed in non-standard location, we'd need to add -    # LD_LIBRARY_PATH when running programs created with it (for unit-test/run -    # rules). -    if command: -        # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries -        # and all must be added to LD_LIBRARY_PATH. The linker will pick the -        # right onces. Note that we don't provide a clean way to build 32-bit -        # binary with 64-bit compiler, but user can always pass -m32 manually. -        lib_path = [os.path.join(root, 'bin'), -                    os.path.join(root, 'lib'), -                    os.path.join(root, 'lib32'), -                    os.path.join(root, 'lib64')] -        if debug(): -            print 'notice: using gcc libraries ::', condition, '::', lib_path -        toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path) - -    # If it's not a system gcc install we should adjust the various programs as -    # needed to prefer using the install specific versions. This is essential -    # for correct use of MinGW and for cross-compiling. - -    # - The archive builder. -    archiver = common.get_invocation_command('gcc', -            'ar', feature.get_values('<archiver>', options), [bin], path_last=True) -    toolset.flags('gcc.archive', '.AR', condition, [archiver]) -    if debug(): -        print 'notice: using gcc archiver ::', condition, '::', archiver - -    # - The resource compiler. -    rc_command = common.get_invocation_command_nodefault('gcc', -            'windres', feature.get_values('<rc>', options), [bin], path_last=True) -    rc_type = feature.get_values('<rc-type>', options) - -    if not rc_type: -        rc_type = 'windres' - -    if not rc_command: -        # If we can't find an RC compiler we fallback to a null RC compiler that -        # creates empty object files. This allows the same Jamfiles to work -        # across the board. The null RC uses the assembler to create the empty -        # objects, so configure that. -        rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True) -        rc_type = 'null' -    rc.configure(rc_command, condition, '<rc-type>' + rc_type) - -###if [ os.name ] = NT -###{ -###    # This causes single-line command invocation to not go through .bat files, -###    # thus avoiding command-line length limitations. -###    JAMSHELL = % ; -###} - -#FIXME: when register_c_compiler is moved to -# generators, these should be updated -builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc']) -builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc']) -builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc']) - -# pch support - -# The compiler looks for a precompiled header in each directory just before it -# looks for the include file in that directory. The name searched for is the -# name specified in the #include directive with ".gch" suffix appended. The -# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to -# full name of the header. - -type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch') - -# GCC-specific pch generator. -class GccPchGenerator(pch.PchGenerator): - -    # Inherit the __init__ method - -    def run_pch(self, project, name, prop_set, sources): -        # Find the header in sources. Ignore any CPP sources. -        header = None -        for s in sources: -            if type.is_derived(s.type, 'H'): -                header = s - -        # Error handling: Base header file name should be the same as the base -        # precompiled header name. -        header_name = header.name -        header_basename = os.path.basename(header_name).rsplit('.', 1)[0] -        if header_basename != name: -            location = project.project_module -            ###FIXME: -            raise Exception() -            ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ; - -        pch_file = Generator.run(self, project, name, prop_set, [header]) - -        # return result of base class and pch-file property as usage-requirements -        # FIXME: what about multiple results from generator.run? -        return (property_set.create('<pch-file>' + pch_file[0], '<cflags>-Winvalid-pch'), -                pch_file) - -    # Calls the base version specifying source's name as the name of the created -    # target. As result, the PCH will be named whatever.hpp.gch, and not -    # whatever.gch. -    def generated_targets(self, sources, prop_set, project, name = None): -        name = sources[0].name -        return Generator.generated_targets(self, sources, -            prop_set, project, name) - -# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The -# latter have HPP type, but HPP type is derived from H. The type of compilation -# is determined entirely by the destination type. -generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ])) -generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ])) - -# Override default do-nothing generators. -generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator') -generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator') - -flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>']) - -# Declare flags and action for compilation -flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0']) -flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3']) -flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os']) - -flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline']) -flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline']) -flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline']) - -flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w']) -flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall']) -flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic']) -flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror']) - -flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g']) -flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg']) -flags('gcc.compile', 'OPTIONS', ['<rtti>off'], ['-fno-rtti']) - -# On cygwin and mingw, gcc generates position independent code by default, and -# warns if -fPIC is specified. This might not be the right way of checking if -# we're using cygwin. For example, it's possible to run cygwin gcc from NT -# shell, or using crosscompiling. But we'll solve that problem when it's time. -# In that case we'll just add another parameter to 'init' and move this login -# inside 'init'. -if not os_name () in ['CYGWIN', 'NT']: -    # This logic will add -fPIC for all compilations: -    # -    # lib a : a.cpp b ; -    # obj b : b.cpp ; -    # exe c : c.cpp a d ; -    # obj d : d.cpp ; -    # -    # This all is fine, except that 'd' will be compiled with -fPIC even though -    # it's not needed, as 'd' is used only in exe. However, it's hard to detect -    # where a target is going to be used. Alternative, we can set -fPIC only -    # when main target type is LIB but than 'b' will be compiled without -fPIC. -    # In x86-64 that will lead to link errors. So, compile everything with -    # -fPIC. -    # -    # Yet another alternative would be to create propagated <sharedable> -    # feature, and set it when building shared libraries, but that's hard to -    # implement and will increase target path length even more. -    flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC']) - -if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX': -    # OSF does have an option called -soname but it doesn't seem to work as -    # expected, therefore it has been disabled. -    HAVE_SONAME   = '' -    SONAME_OPTION = '-h' - - -flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>']) -flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>']) -flags('gcc.compile', 'DEFINES', [], ['<define>']) -flags('gcc.compile', 'INCLUDES', [], ['<include>']) - -engine = get_manager().engine() - -engine.register_action('gcc.compile.c++.pch',  -    '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') - -engine.register_action('gcc.compile.c.pch', -    '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') - - -def gcc_compile_cpp(targets, sources, properties): -    # Some extensions are compiled as C++ by default. For others, we need to -    # pass -x c++. We could always pass -x c++ but distcc does not work with it. -    extension = os.path.splitext (sources [0]) [1] -    lang = '' -    if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']: -        lang = '-x c++' -    get_manager().engine().set_target_variable (targets, 'LANG', lang) -    engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) - -def gcc_compile_c(targets, sources, properties): -    engine = get_manager().engine() -    # If we use the name g++ then default file suffix -> language mapping does -    # not work. So have to pass -x option. Maybe, we can work around this by -    # allowing the user to specify both C and C++ compiler names. -    #if $(>:S) != .c -    #{ -    engine.set_target_variable (targets, 'LANG', '-x c') -    #} -    engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) -     -engine.register_action( -    'gcc.compile.c++', -    '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' + -        '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' + -        '-c -o "$(<:W)" "$(>:W)"', -    function=gcc_compile_cpp, -    bound_list=['PCH_FILE']) - -engine.register_action( -    'gcc.compile.c', -    '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' + -        '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"', -    function=gcc_compile_c, -    bound_list=['PCH_FILE']) - -def gcc_compile_asm(targets, sources, properties): -    get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp') - -engine.register_action( -    'gcc.compile.asm', -    '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"', -    function=gcc_compile_asm) - - -class GccLinkingGenerator(unix.UnixLinkingGenerator): -    """ -        The class which check that we don't try to use the <runtime-link>static -        property while creating or using shared library, since it's not supported by -        gcc/libc. -    """ -    def run(self, project, name, ps, sources): -        # TODO: Replace this with the use of a target-os property. - -        no_static_link = False -        if bjam.variable('UNIX'): -            no_static_link = True; -        ##FIXME: what does this mean? -##        { -##            switch [ modules.peek : JAMUNAME ] -##            { -##                case * : no-static-link = true ; -##            } -##        } - -        reason = None -        if no_static_link and ps.get('runtime-link') == 'static': -            if ps.get('link') == 'shared': -                reason = "On gcc, DLL can't be build with '<runtime-link>static'." -            elif type.is_derived(self.target_types[0], 'EXE'): -                for s in sources: -                    source_type = s.type() -                    if source_type and type.is_derived(source_type, 'SHARED_LIB'): -                        reason = "On gcc, using DLLS together with the " +\ -                                 "<runtime-link>static options is not possible " -        if reason: -            print 'warning:', reason -            print 'warning:',\ -                "It is suggested to use '<runtime-link>static' together",\ -                "with '<link>static'." ; -            return -        else: -            generated_targets = unix.UnixLinkingGenerator.run(self, project, -                name, ps, sources) -            return generated_targets - -if on_windows(): -    flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,']) -    generators.register( -        GccLinkingGenerator('gcc.link', True, -            ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], -            [ 'EXE' ], -            [ '<toolset>gcc' ])) -    generators.register( -        GccLinkingGenerator('gcc.link.dll', True, -            ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], -            ['IMPORT_LIB', 'SHARED_LIB'], -            ['<toolset>gcc'])) -else: -    generators.register( -        GccLinkingGenerator('gcc.link', True, -            ['LIB', 'OBJ'], -            ['EXE'], -            ['<toolset>gcc'])) -    generators.register( -        GccLinkingGenerator('gcc.link.dll', True, -            ['LIB', 'OBJ'], -            ['SHARED_LIB'], -            ['<toolset>gcc'])) - -# Declare flags for linking. -# First, the common flags. -flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g']) -flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg']) -flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>']) -flags('gcc.link', 'LINKPATH', [], ['<library-path>']) -flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>']) -flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>']) -flags('gcc.link', 'LIBRARIES', [], ['<library-file>']) - -# For <runtime-link>static we made sure there are no dynamic libraries in the -# link. On HP-UX not all system libraries exist as archived libraries (for -# example, there is no libunwind.a), so, on this platform, the -static option -# cannot be specified. -if os_name() != 'HPUX': -    flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static']) - -# Now, the vendor specific flags. -# The parameter linker can be either gnu, darwin, osf, hpux or sun. -def init_link_flags(toolset, linker, condition): -    """ -        Now, the vendor specific flags. -        The parameter linker can be either gnu, darwin, osf, hpux or sun. -    """ -    toolset_link = toolset + '.link' -    if linker == 'gnu': -        # Strip the binary when no debugging is needed. We use --strip-all flag -        # as opposed to -s since icc (intel's compiler) is generally -        # option-compatible with and inherits from the gcc toolset, but does not -        # support -s. - -        # FIXME: what does unchecked translate to? -        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all'])  # : unchecked ; -        flags(toolset_link, 'RPATH',       condition,                      ['<dll-path>'])       # : unchecked ; -        flags(toolset_link, 'RPATH_LINK',  condition,                      ['<xdll-path>'])      # : unchecked ; -        flags(toolset_link, 'START-GROUP', condition,                      ['-Wl,--start-group'])# : unchecked ; -        flags(toolset_link, 'END-GROUP',   condition,                      ['-Wl,--end-group'])  # : unchecked ; - -        # gnu ld has the ability to change the search behaviour for libraries -        # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic -        # and change search for -l switches that follow them. The following list -        # shows the tried variants. -        # The search stops at the first variant that has a match. -        # *nix: -Bstatic -lxxx -        #    libxxx.a -        # -        # *nix: -Bdynamic -lxxx -        #    libxxx.so -        #    libxxx.a -        # -        # windows (mingw,cygwin) -Bstatic -lxxx -        #    libxxx.a -        #    xxx.lib -        # -        # windows (mingw,cygwin) -Bdynamic -lxxx -        #    libxxx.dll.a -        #    xxx.dll.a -        #    libxxx.a -        #    xxx.lib -        #    cygxxx.dll (*) -        #    libxxx.dll -        #    xxx.dll -        #    libxxx.a -        # -        # (*) This is for cygwin -        # Please note that -Bstatic and -Bdynamic are not a guarantee that a -        # static or dynamic lib indeed gets linked in. The switches only change -        # search patterns! - -        # On *nix mixing shared libs with static runtime is not a good idea. -        flags(toolset_link, 'FINDLIBS-ST-PFX', -              map(lambda x: x + '/<runtime-link>shared', condition), -            ['-Wl,-Bstatic']) # : unchecked ; -        flags(toolset_link, 'FINDLIBS-SA-PFX', -              map(lambda x: x + '/<runtime-link>shared', condition), -            ['-Wl,-Bdynamic']) # : unchecked ; - -        # On windows allow mixing of static and dynamic libs with static -        # runtime. -        flags(toolset_link, 'FINDLIBS-ST-PFX', -              map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition), -              ['-Wl,-Bstatic']) # : unchecked ; -        flags(toolset_link, 'FINDLIBS-SA-PFX', -              map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition), -              ['-Wl,-Bdynamic']) # : unchecked ; -        flags(toolset_link, 'OPTIONS', -              map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition), -              ['-Wl,-Bstatic']) # : unchecked ; - -    elif linker == 'darwin': -        # On Darwin, the -s option to ld does not work unless we pass -static, -        # and passing -static unconditionally is a bad idea. So, don't pass -s. -        # at all, darwin.jam will use separate 'strip' invocation. -        flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; -        flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ; - -    elif linker == 'osf': -        # No --strip-all, just -s. -        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s']) -            # : unchecked ; -        flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; -        # This does not supports -R. -        flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ; -        # -rpath-link is not supported at all. - -    elif linker == 'sun': -        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s']) -            # : unchecked ; -        flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ; -        # Solaris linker does not have a separate -rpath-link, but allows to use -        # -L for the same purpose. -        flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ; - -        # This permits shared libraries with non-PIC code on Solaris. -        # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the -        # following is not needed. Whether -fPIC should be hardcoded, is a -        # separate question. -        # AH, 2004/10/16: it is still necessary because some tests link against -        # static libraries that were compiled without PIC. -        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text']) -            # : unchecked ; - -    elif linker == 'hpux': -        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), -            ['-Wl,-s']) # : unchecked ; -        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), -            ['-fPIC']) # : unchecked ; - -    else: -        # FIXME: -        errors.user_error( -        "$(toolset) initialization: invalid linker '$(linker)' " + -        "The value '$(linker)' specified for <linker> is not recognized. " + -        "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'") - -# Declare actions for linking. -def gcc_link(targets, sources, properties): -    engine = get_manager().engine() -    engine.set_target_variable(targets, 'SPACE', ' ') -    # Serialize execution of the 'link' action, since running N links in -    # parallel is just slower. For now, serialize only gcc links, it might be a -    # good idea to serialize all links. -    engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore') - -engine.register_action( -    'gcc.link', -    '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' + -        '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' + -        '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' + -        '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' + -        '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' + -        '$(OPTIONS) $(USER_OPTIONS)', -    function=gcc_link, -    bound_list=['LIBRARIES']) - -# Default value. Mostly for the sake of intel-linux that inherits from gcc, but -# does not have the same logic to set the .AR variable. We can put the same -# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is -# always available. -__AR = 'ar' - -flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>']) - -def gcc_archive(targets, sources, properties): -    # Always remove archive and start again. Here's rationale from -    # -    # Andre Hentz: -    # -    # I had a file, say a1.c, that was included into liba.a. I moved a1.c to -    # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd -    # errors. After some debugging I traced it back to the fact that a1.o was -    # *still* in liba.a -    # -    # Rene Rivera: -    # -    # Originally removing the archive was done by splicing an RM onto the -    # archive action. That makes archives fail to build on NT when they have -    # many files because it will no longer execute the action directly and blow -    # the line length limit. Instead we remove the file in a different action, -    # just before building the archive. -    clean = targets[0] + '(clean)' -    bjam.call('TEMPORARY', clean) -    bjam.call('NOCARE', clean) -    engine = get_manager().engine() -    engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE')) -    engine.add_dependency(clean, sources) -    engine.add_dependency(targets, clean) -    engine.set_update_action('common.RmTemps', clean, targets) - -# Declare action for creating static libraries. -# The letter 'r' means to add files to the archive with replacement. Since we -# remove archive, we don't care about replacement, but there's no option "add -# without replacement". -# The letter 'c' suppresses the warning in case the archive does not exists yet. -# That warning is produced only on some platforms, for whatever reasons. -engine.register_action('gcc.archive', -                       '"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"', -                       function=gcc_archive, -                       flags=['piecemeal']) - -def gcc_link_dll(targets, sources, properties): -    engine = get_manager().engine() -    engine.set_target_variable(targets, 'SPACE', ' ') -    engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore') -    engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME) -    engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION) - -engine.register_action( -    'gcc.link.dll', -    # Differ from 'link' above only by -shared. -    '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' + -        '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' + -        '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' + -        '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' + -        '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' + -        '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' + -        '$(OPTIONS) $(USER_OPTIONS)', -    function = gcc_link_dll, -    bound_list=['LIBRARIES']) - -# Set up threading support. It's somewhat contrived, so perform it at the end, -# to avoid cluttering other code. - -if on_windows(): -    flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads']) -elif bjam.variable('UNIX'): -    jamuname = bjam.variable('JAMUNAME') -    host_os_name = jamuname[0] -    if host_os_name.startswith('SunOS'): -        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads']) -        flags('gcc', 'FINDLIBS-SA', [], ['rt']) -    elif host_os_name == 'BeOS': -        # BeOS has no threading options, don't set anything here. -        pass -    elif host_os_name.endswith('BSD'): -        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread']) -        # there is no -lrt on BSD -    elif host_os_name == 'DragonFly': -        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread']) -        # there is no -lrt on BSD - DragonFly is a FreeBSD variant, -        # which anoyingly doesn't say it's a *BSD. -    elif host_os_name == 'IRIX': -        # gcc on IRIX does not support multi-threading, don't set anything here. -        pass -    elif host_os_name == 'Darwin': -        # Darwin has no threading options, don't set anything here. -        pass -    else: -        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread']) -        flags('gcc', 'FINDLIBS-SA', [], ['rt']) - -def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None): -    #FIXME: for some reason this fails.  Probably out of date feature code -##    if default: -##        flags(toolset, variable, -##              ['<architecture>' + architecture + '/<instruction-set>'], -##              values) -    flags(toolset, variable, -          #FIXME: same as above -          [##'<architecture>/<instruction-set>' + instruction_set, -           '<architecture>' + architecture + '/<instruction-set>' + instruction_set], -          values) - -# Set architecture/instruction-set options. -# -# x86 and compatible -flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32']) -flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i386', ['-march=i386'], default=True) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp']) -## -cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3']) -cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2']) -# Sparc -flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32']) -flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3']) -# RS/6000 & PowerPC -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32']) -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403']) -cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505']) -cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601']) -cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602']) -cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603']) -cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e']) -cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604']) -cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e']) -cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620']) -cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630']) -cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740']) -cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400']) -cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450']) -cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750']) -cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801']) -cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821']) -cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823']) -cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860']) -cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970']) -cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4']) -cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5']) -cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc']) -cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc']) -cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64']) -# AIX variant of RS/6000 & PowerPC -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32']) -flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64']) -flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X 64']) diff --git a/jam-files/boost-build/tools/generate.jam b/jam-files/boost-build/tools/generate.jam deleted file mode 100644 index 6732fa35..00000000 --- a/jam-files/boost-build/tools/generate.jam +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Declares main target 'generate' used to produce targets by calling a -# user-provided rule that takes and produces virtual targets. - -import "class" : new ; -import errors ; -import feature ; -import project ; -import property ; -import property-set ; -import targets ; -import regex ; - - -feature.feature generating-rule : : free ; - - -class generated-target-class : basic-target -{ -    import errors ; -    import indirect ; -    import virtual-target ; - -    rule __init__ ( name : project : sources * : requirements * -        : default-build * : usage-requirements * ) -    { -        basic-target.__init__ $(name) : $(project) : $(sources) -            : $(requirements) : $(default-build) : $(usage-requirements) ; - -        if ! [ $(self.requirements).get <generating-rule> ] -        { -            errors.user-error "The generate rule requires the <generating-rule>" -                "property to be set" ; -        } -    } - -    rule construct ( name : sources * : property-set ) -    { -        local result ; -        local gr = [ $(property-set).get <generating-rule> ] ; - -        # FIXME: this is a copy-paste from virtual-target.jam. We should add a -        # utility rule to call a rule like this. -        local rule-name = [ MATCH ^@(.*) : $(gr) ] ; -        if $(rule-name) -        { -            if $(gr[2]) -            { -                local target-name = [ full-name ] ; -                errors.user-error "Multiple <generating-rule> properties" -                    "encountered for target $(target-name)." ; -            } - -            result = [ indirect.call $(rule-name) $(self.project) $(name) -                : $(property-set) : $(sources) ] ; - -            if ! $(result) -            { -                ECHO "warning: Unable to construct" [ full-name ] ; -            } -        } - -        local ur ; -        local targets ; - -        if $(result) -        { -            if  [ class.is-a $(result[1]) : property-set ] -            { -                ur = $(result[1]) ; -                targets = $(result[2-]) ; -            } -            else -            { -                ur = [ property-set.empty ] ; -                targets = $(result) ; -            } -        } -        # FIXME: the following loop should be doable using sequence.transform or -        # some similar utility rule. -        local rt ; -        for local t in $(targets) -        { -            rt += [ virtual-target.register $(t) ] ; -        } -        return $(ur) $(rt) ; -    } -} - - -rule generate ( name : sources * : requirements * : default-build * -    : usage-requirements * ) -{ -    local project = [ project.current ] ; - -    targets.main-target-alternative -        [ new generated-target-class $(name) : $(project) -            : [ targets.main-target-sources $(sources) : $(name) ] -            : [ targets.main-target-requirements $(requirements) : $(project) ] -            : [ targets.main-target-default-build $(default-build) : $(project) ] -            : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] -        ] ; -} - -IMPORT $(__name__) : generate : : generate ; diff --git a/jam-files/boost-build/tools/gettext.jam b/jam-files/boost-build/tools/gettext.jam deleted file mode 100644 index 99a43ffe..00000000 --- a/jam-files/boost-build/tools/gettext.jam +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -#  This module support GNU gettext internationalization utilities. -# -#  It provides two main target rules: 'gettext.catalog', used for -#  creating machine-readable catalogs from translations files, and -#  'gettext.update', used for update translation files from modified -#  sources. -# -#  To add i18n support to your application you should follow these -#  steps. -# -#  - Decide on a file name which will contain translations and -#  what main target name will be used to update it. For example:: -# -#    gettext.update update-russian : russian.po a.cpp my_app ; -# -#  - Create the initial translation file by running:: -# -#    bjam update-russian -# -#  - Edit russian.po. For example, you might change fields like LastTranslator. -# -#  - Create a main target for final message catalog:: -# -#    gettext.catalog russian : russian.po ; -# -#  The machine-readable catalog will be updated whenever you update -#  "russian.po". The "russian.po" file will be updated only on explicit -#  request. When you're ready to update translations, you should -# -#  - Run:: -# -#    bjam update-russian -# -#  - Edit "russian.po" in appropriate editor. -# -#  The next bjam run will convert "russian.po" into machine-readable form. -# -#  By default, translations are marked by 'i18n' call. The 'gettext.keyword' -#  feature can be used to alter this. - - -import targets ; -import property-set ; -import virtual-target ; -import "class" : new ; -import project ; -import type ; -import generators ; -import errors ; -import feature : feature ; -import toolset : flags ; -import regex ; - -.path = "" ; - -# Initializes the gettext module. -rule init ( path ? # Path where all tools are located. If not specified, -                   # they should be in PATH. -          ) -{ -    if $(.initialized) && $(.path) != $(path) -    { -        errors.error "Attempt to reconfigure with different path" ; -    } -    .initialized = true ; -    if $(path) -    { -        .path = $(path)/ ; -    } -} - -# Creates a main target 'name', which, when updated, will cause -# file 'existing-translation' to be updated with translations -# extracted from 'sources'. It's possible to specify main target -# in sources --- it which case all target from dependency graph -# of those main targets will be scanned, provided they are of -# appropricate type. The 'gettext.types' feature can be used to -# control the types. -# -# The target will be updated only if explicitly requested on the -# command line. -rule update ( name : existing-translation sources + : requirements * ) -{ -    local project = [ project.current ] ; - -    targets.main-target-alternative -      [ new typed-target $(name) : $(project) : gettext.UPDATE : -        $(existing-translation) $(sources) -        : [ targets.main-target-requirements $(requirements) : $(project) ] -      ] ; -    $(project).mark-target-as-explicit $(name) ; -} - - -# The human editable source, containing translation. -type.register gettext.PO : po ; -# The machine readable message catalog. -type.register gettext.catalog : mo ; -# Intermediate type produce by extracting translations from -# sources. -type.register gettext.POT : pot ; -# Pseudo type used to invoke update-translations generator -type.register gettext.UPDATE ; - -# Identifies the keyword that should be used when scanning sources. -# Default: i18n -feature gettext.keyword : : free ; -# Contains space-separated list of sources types which should be scanned. -# Default: "C CPP" -feature gettext.types : : free ; - -generators.register-standard gettext.compile : gettext.PO : gettext.catalog ; - -class update-translations-generator : generator -{ -    import regex : split ; -    import property-set ; - -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    # The rule should be called with at least two sources. The first source -    # is the translation (.po) file to update. The remaining sources are targets -    # which should be scanned for new messages. All sources files for those targets -    # will be found and passed to the 'xgettext' utility, which extracts the -    # messages for localization. Those messages will be merged to the .po file. -    rule run ( project name ? : property-set : sources * : multiple ? ) -    { -        local types = [ $(property-set).get <gettext.types> ] ; -        types ?= "C CPP" ; -        types = [ regex.split $(types) " " ] ; - -        local keywords = [ $(property-set).get <gettext.keyword> ] ; -        property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ; - -        # First deterime the list of sources that must be scanned for -        # messages. -        local all-sources ; -        # CONSIDER: I'm not sure if the logic should be the same as for 'stage': -        # i.e. following dependency properties as well. -        for local s in $(sources[2-]) -        { -            all-sources += [ virtual-target.traverse $(s) : : include-sources ] ; -        } -        local right-sources ; -        for local s in $(all-sources) -        { -            if [ $(s).type ] in $(types) -            { -                right-sources += $(s) ; -            } -        } - -        local .constructed ; -        if $(right-sources) -        { -            # Create the POT file, which will contain list of messages extracted -            # from the sources. -            local extract = -              [ new action $(right-sources) : gettext.extract : $(property-set) ] ; -            local new-messages = [ new file-target $(name) : gettext.POT -              : $(project) : $(extract) ] ; - -            # Create a notfile target which will update the existing translation file -            # with new messages. -            local a = [ new action $(sources[1]) $(new-messages) -              : gettext.update-po-dispatch ] ; -            local r = [ new notfile-target $(name) : $(project) : $(a) ] ; -            .constructed = [ virtual-target.register $(r) ] ; -        } -        else -        { -            errors.error "No source could be scanned by gettext tools" ; -        } -        return $(.constructed) ; -    } -} -generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ; - -flags gettext.extract KEYWORD <gettext.keyword> ; -actions extract -{ -    $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>) -} - -# Does realy updating of po file. The tricky part is that -# we're actually updating one of the sources: -# $(<) is the NOTFILE target we're updating -# $(>[1]) is the PO file to be really updated. -# $(>[2]) is the PO file created from sources. -# -# When file to be updated does not exist (during the -# first run), we need to copy the file created from sources. -# In all other cases, we need to update the file. -rule update-po-dispatch -{ -    NOCARE $(>[1]) ; -    gettext.create-po $(<) : $(>) ; -    gettext.update-po $(<) : $(>) ; -    _ on $(<) = " " ; -    ok on $(<) = "" ; -    EXISTING_PO on $(<) = $(>[1]) ; -} - -# Due to fancy interaction of existing and updated, this rule can be called with -# one source, in which case we copy the lonely source into EXISTING_PO, or with -# two sources, in which case the action body expands to nothing. I'd really like -# to have "missing" action modifier. -actions quietly existing updated create-po bind EXISTING_PO -{ -    cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok)) -} - -actions updated update-po bind EXISTING_PO -{ -    $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])" -} - -actions gettext.compile -{ -    $(.path)msgfmt -o $(<) $(>) -} - -IMPORT $(__name__) : update : : gettext.update ; diff --git a/jam-files/boost-build/tools/gfortran.jam b/jam-files/boost-build/tools/gfortran.jam deleted file mode 100644 index 0aa69b85..00000000 --- a/jam-files/boost-build/tools/gfortran.jam +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags gfortran OPTIONS <fflags> ; - -flags gfortran OPTIONS <optimization>off : -O0 ; -flags gfortran OPTIONS <optimization>speed : -O3 ; -flags gfortran OPTIONS <optimization>space : -Os ; - -flags gfortran OPTIONS <debug-symbols>on : -g ; -flags gfortran OPTIONS <profiling>on : -pg ; - -flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ; - -flags gfortran DEFINES <define> ; -flags gfortran INCLUDES <include> ; - -rule compile.fortran -{ -} - -actions compile.fortran -{ -  gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"  -} - -generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ;  diff --git a/jam-files/boost-build/tools/hp_cxx.jam b/jam-files/boost-build/tools/hp_cxx.jam deleted file mode 100644 index 86cd783e..00000000 --- a/jam-files/boost-build/tools/hp_cxx.jam +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright 2001 David Abrahams. -# Copyright 2004, 2005 Markus Schoepflin. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# -# HP CXX compiler -# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN -# -# -# Notes on this toolset: -# -# - Because of very subtle issues with the default ansi mode, strict_ansi mode -#   is used for compilation. One example of things that don't work correctly in -#   the default ansi mode is overload resolution of function templates when -#   mixed with non-template functions. -# -# - For template instantiation "-timplicit_local" is used. Previously, -#   "-tlocal" has been tried to avoid the need for a template repository -#   but this doesn't work with manually instantiated templates. "-tweak" -#   has not been used to avoid the stream of warning messages issued by -#   ar or ld when creating a library or linking an application. -# -# - Debug symbols are generated with "-g3", as this works both in debug and -#   release mode. When compiling C++ code without optimization, we additionally -#   use "-gall", which generates full symbol table information for all classes, -#   structs, and unions. As this turns off optimization, it can't be used when -#   optimization is needed. -# - -import feature generators common ; -import toolset : flags ; - -feature.extend toolset : hp_cxx ; -feature.extend c++abi : cxxarm ; - -# Inherit from Unix toolset to get library ordering magic. -toolset.inherit  hp_cxx : unix ; - -generators.override hp_cxx.prebuilt : builtin.lib-generator ; -generators.override hp_cxx.prebuilt : builtin.prebuilt ; -generators.override hp_cxx.searched-lib-generator : searched-lib-generator ; - - -rule init ( version ? : command * : options * ) -{ -    local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ; -     -    local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ; -     -    if $(command) -    { -        local root = [ common.get-absolute-tool-path $(command[-1]) ] ; - -        if $(root) -        { -            flags hp_cxx .root $(condition) : "\"$(root)\"/" ; -        }         -    }       -    # If we can't find 'cxx' anyway, at least show 'cxx' in the commands -    command ?= cxx ; -         -    common.handle-options hp_cxx : $(condition) : $(command) : $(options) ;                -} - -generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ; -generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ; - - - -# No static linking as far as I can tell. -# flags cxx LINKFLAGS <runtime-link>static : -bstatic ; -flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ; -flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ; -flags hp_cxx.link OPTIONS <debug-symbols>on : -g ; -flags hp_cxx.link OPTIONS <debug-symbols>off : -s ; - -flags hp_cxx.compile OPTIONS <optimization>off : -O0 ; -flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ; -flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ; - -# This (undocumented) macro needs to be defined to get all C function -# overloads required by the C++ standard. -flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ; - -# Added for threading support -flags hp_cxx.compile OPTIONS <threading>multi : -pthread ; -flags hp_cxx.link OPTIONS <threading>multi : -pthread ; - -flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ; -flags hp_cxx.compile OPTIONS <optimization>space : -O1 ; -flags hp_cxx.compile OPTIONS <inlining>off : -inline none ; - -# The compiler versions tried (up to V6.5-040) hang when compiling Boost code -# with full inlining enabled. So leave it at the default level for now. -# -# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ; - -flags hp_cxx.compile OPTIONS <profiling>on : -pg ; -flags hp_cxx.link OPTIONS <profiling>on : -pg ; - -# Selection of the object model. This flag is needed on both the C++ compiler -# and linker command line. - -# Unspecified ABI translates to '-model ansi' as most -# standard-conforming. -flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ; -flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ; -flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ; -flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ; - -# Display a descriptive tag together with each compiler message. This tag can -# be used by the user to explicitely suppress the compiler message. -flags hp_cxx.compile OPTIONS : -msg_display_tag ; - -flags hp_cxx.compile OPTIONS <cflags> ; -flags hp_cxx.compile.c++ OPTIONS <cxxflags> ; -flags hp_cxx.compile DEFINES <define> ; -flags hp_cxx.compile INCLUDES <include> ; -flags hp_cxx.link OPTIONS <linkflags> ; - -flags hp_cxx.link LIBPATH <library-path> ; -flags hp_cxx.link LIBRARIES <library-file> ; -flags hp_cxx.link FINDLIBS-ST <find-static-library> ; -flags hp_cxx.link FINDLIBS-SA <find-shared-library> ; - -flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; - -actions link bind LIBRARIES -{ -    $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm -} - -# When creating dynamic libraries, we don't want to be warned about unresolved -# symbols, therefore all unresolved symbols are marked as expected by -# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool -# chain. - -actions link.dll bind LIBRARIES -{ -    $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH)  "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm -} - - -# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI -# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std -# is the default, no special flag is needed. -actions compile.c -{ -    $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" -} - -# Note: The compiler is forced to compile the files as C++ (-x cxx) because -# otherwise it will silently ignore files with no file extension. -# -# Note: We deliberately don't suppress any warnings on the compiler command -# line, the user can always do this in a customized toolset later on. - -rule compile.c++ -{ -    # We preprocess the TEMPLATE_DEPTH command line option here because we found -    # no way to do it correctly in the actual action code. There we either get -    # the -pending_instantiations parameter when no c++-template-depth property -    # has been specified or we get additional quotes around -    # "-pending_instantiations ". -    local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ; -    TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ; -} - -actions compile.c++ -{ -    $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" -} - -# Always create archive from scratch. See the gcc toolet for rationale. -RM = [ common.rm-command ] ; -actions together piecemeal archive -{ -  $(RM) "$(<)" -  ar rc $(<) $(>) -} diff --git a/jam-files/boost-build/tools/hpfortran.jam b/jam-files/boost-build/tools/hpfortran.jam deleted file mode 100644 index 96e8d18b..00000000 --- a/jam-files/boost-build/tools/hpfortran.jam +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags hpfortran OPTIONS <optimization>off : -O0 ; -flags hpfortran OPTIONS <optimization>speed : -O3 ; -flags hpfortran OPTIONS <optimization>space : -O1 ; - -flags hpfortran OPTIONS <debug-symbols>on : -g ; -flags hpfortran OPTIONS <profiling>on : -pg ; - -flags hpfortran DEFINES <define> ; -flags hpfortran INCLUDES <include> ; - -rule compile.fortran -{ -} - -actions compile.fortran -{ -  f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"  -} - -generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ;  diff --git a/jam-files/boost-build/tools/ifort.jam b/jam-files/boost-build/tools/ifort.jam deleted file mode 100644 index eb7c1988..00000000 --- a/jam-files/boost-build/tools/ifort.jam +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags ifort OPTIONS <fflags> ; - -flags ifort OPTIONS <optimization>off : /Od ; -flags ifort OPTIONS <optimization>speed : /O3 ; -flags ifort OPTIONS <optimization>space : /O1 ; - -flags ifort OPTIONS <debug-symbols>on : /debug:full ; -flags ifort OPTIONS <profiling>on : /Qprof_gen ; - -flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ; -flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ; -flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ; -flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ; -flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ; -flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ; - -flags ifort DEFINES <define> ; -flags ifort INCLUDES <include> ; - -rule compile.fortran -{ -} - -actions compile.fortran -{ -  ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)"  -} - -generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ;  diff --git a/jam-files/boost-build/tools/intel-darwin.jam b/jam-files/boost-build/tools/intel-darwin.jam deleted file mode 100644 index aa0fd8fb..00000000 --- a/jam-files/boost-build/tools/intel-darwin.jam +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Noel Belcourt 2007. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -import intel ; -import feature : feature ; -import os ; -import toolset ; -import toolset : flags ; -import gcc ; -import common ; -import errors ; -import generators ; - -feature.extend-subfeature toolset intel : platform : darwin ; - -toolset.inherit-generators intel-darwin  -  <toolset>intel <toolset-intel:platform>darwin  -  : gcc  -  # Don't inherit PCH generators. They were not tested, and probably -  # don't work for this compiler. -  : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch -  ; - -generators.override intel-darwin.prebuilt : builtin.lib-generator ; -generators.override intel-darwin.prebuilt : builtin.prebuilt ; -generators.override intel-darwin.searched-lib-generator : searched-lib-generator ; - -toolset.inherit-rules intel-darwin : gcc ; -toolset.inherit-flags intel-darwin : gcc  -        : <inlining>off <inlining>on <inlining>full <optimization>space  -          <warnings>off <warnings>all <warnings>on -          <architecture>x86/<address-model>32 -          <architecture>x86/<address-model>64 -        ; -         -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} -                        -# vectorization diagnostics -feature vectorize : off on full ; - -# Initializes the intel-darwin toolset -#   version in mandatory -#   name (default icc) is used to invoke the specified intel complier -#   compile and link options allow you to specify addition command line options for each version -rule init ( version ? :  command * : options * ) -{ -    local condition = [ common.check-init-parameters intel-darwin -        : version $(version) ] ; -     -    command = [ common.get-invocation-command intel-darwin : icc  -        : $(command) : /opt/intel_cc_80/bin ] ; - -    common.handle-options intel-darwin : $(condition) : $(command) : $(options) ; - -    gcc.init-link-flags intel-darwin darwin $(condition) ; - -    # handle <library-path> -    # local library-path = [ feature.get-values <library-path> : $(options) ] ; -    # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ; - -    local root = [ feature.get-values <root> : $(options) ] ; -    local bin ; -    if $(command) || $(root) -    { -        bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; -        root ?= $(bin:D) ; -     -        if $(root) -        { -            # Libraries required to run the executable may be in either -            # $(root)/lib (10.1 and earlier)  -            #     or  -            # $(root)/lib/architecture-name (11.0 and later: -            local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ; -            if $(.debug-configuration) -            { -                ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ; -            } -            flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ; -        }    -    } - -    local m = [ MATCH (..).* : $(version) ] ; -    local n = [ MATCH (.)\\. : $(m) ] ; -    if $(n) { -      m = $(n) ; -    } - -    local major = $(m) ; -     -    if $(major) = "9" { -      flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ; -      flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1  ; -      flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2  ; -      flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ; -      flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ; -      flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ; -      flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ; -      flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ; -    } -    else { -      flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ; -      flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1  ; -      flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2  ; -      flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ; -      flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ; -      flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ; -      flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ; -      flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ; -    } - -    local minor = [ MATCH ".*\\.(.).*" : $(version) ] ; - -    # wchar_t char_traits workaround for compilers older than 10.2 -    if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) { -        flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ; -    } -} - -SPACE = " " ; - -flags intel-darwin.compile OPTIONS <cflags> ; -flags intel-darwin.compile OPTIONS <cxxflags> ; -# flags intel-darwin.compile INCLUDES <include> ; - -flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc - -#  -cpu-type-em64t = prescott nocona ; -flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ; -flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ; - -flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ; -flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ; -flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ; - -flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ; -flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ; -flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ; - -actions compile.c -{ -    "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -flags intel-darwin ARFLAGS <archiveflags> ; - -# Default value. Mostly for the sake of intel-linux -# that inherits from gcc, but does not has the same -# logic to set the .AR variable. We can put the same -# logic in intel-linux, but that's hardly worth the trouble -# as on Linux, 'ar' is always available. -.AR = ar ; - -rule archive ( targets * : sources * : properties * ) -{ -  # Always remove archive and start again. Here's rationale from -  # Andre Hentz: -  # -  # I had a file, say a1.c, that was included into liba.a.  -  # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.  -  # My program was crashing with absurd errors.  -  # After some debugging I traced it back to the fact that a1.o was *still*  -  # in liba.a  -  # -  # Rene Rivera: -  # -  # Originally removing the archive was done by splicing an RM -  # onto the archive action. That makes archives fail to build on NT -  # when they have many files because it will no longer execute the -  # action directly and blow the line length limit. Instead we -  # remove the file in a different action, just before the building -  # of the archive. -  # -  local clean.a = $(targets[1])(clean) ; -  TEMPORARY $(clean.a) ; -  NOCARE $(clean.a) ; -  LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; -  DEPENDS $(clean.a) : $(sources) ; -  DEPENDS $(targets) : $(clean.a) ; -  common.RmTemps $(clean.a) : $(targets) ; -} - -actions piecemeal archive -{ -  "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" -  "ranlib" -cs "$(<)" -} - -flags intel-darwin.link USER_OPTIONS <linkflags> ; - -# Declare actions for linking -rule link ( targets * : sources * : properties * ) -{ -  SPACE on $(targets) = " " ; -  # Serialize execution of the 'link' action, since -  # running N links in parallel is just slower. -  JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)"  "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) -} diff --git a/jam-files/boost-build/tools/intel-linux.jam b/jam-files/boost-build/tools/intel-linux.jam deleted file mode 100644 index d9164add..00000000 --- a/jam-files/boost-build/tools/intel-linux.jam +++ /dev/null @@ -1,250 +0,0 @@ -#  Copyright (c) 2003 Michael Stevens -#  Copyright (c) 2011 Bryce Lelbach -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import toolset ; -import feature ; -import toolset : flags ; - -import intel ; -import gcc ; -import common ; -import errors ; -import generators ; -import type ; -import numbers ; - -feature.extend-subfeature toolset intel : platform : linux ; - -toolset.inherit-generators intel-linux  -     <toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ; -generators.override intel-linux.prebuilt : builtin.lib-generator ; -generators.override intel-linux.prebuilt : builtin.prebuilt ; -generators.override intel-linux.searched-lib-generator : searched-lib-generator ; - -# Override default do-nothing generators. -generators.override intel-linux.compile.c.pch   : pch.default-c-pch-generator   ; -generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ; -  -type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ; - -toolset.inherit-rules intel-linux : gcc ; -toolset.inherit-flags intel-linux : gcc  -        : <inlining>off <inlining>on <inlining>full -          <optimization>space <optimization>speed -          <warnings>off <warnings>all <warnings>on -        ; -         -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} -                        -# Initializes the intel-linux toolset -#   version in mandatory -#   name (default icpc) is used to invoke the specified intel-linux complier -#   compile and link options allow you to specify addition command line options for each version -rule init ( version ? :  command * : options * ) -{ -    local condition = [ common.check-init-parameters intel-linux -        : version $(version) ] ; -     -    if $(.debug-configuration) -    { -        ECHO "notice: intel-linux version is" $(version) ; -    } - -    local default_path ; - -    # Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0, -    # aka intel-linux-12.0. In this version, Intel thankfully decides to install -    # to a sane 'intel' folder in /opt. -    if [ MATCH "(12[.]0|12)" : $(version) ] -        { default_path = /opt/intel/bin ; } -    # Intel C++ Compiler 11.1.  -    else if [ MATCH "(11[.]1)" : $(version) ] -        { default_path = /opt/intel_cce_11.1.064.x86_64/bin ; } -    # Intel C++ Compiler 11.0.  -    else if [ MATCH "(11[.]0|11)" : $(version) ] -        { default_path = /opt/intel_cce_11.0.074.x86_64/bin ; } -    # Intel C++ Compiler 10.1.  -    else if [ MATCH "(10[.]1)" : $(version) ] -        { default_path = /opt/intel_cce_10.1.013_x64/bin ; } -    # Intel C++ Compiler 9.1.  -    else if [ MATCH "(9[.]1)" : $(version) ] -        { default_path = /opt/intel_cc_91/bin ; } -    # Intel C++ Compiler 9.0.  -    else if [ MATCH "(9[.]0|9)" : $(version) ] -        { default_path = /opt/intel_cc_90/bin ; } -    # Intel C++ Compiler 8.1.  -    else if [ MATCH "(8[.]1)" : $(version) ] -        { default_path = /opt/intel_cc_81/bin ; } -    # Intel C++ Compiler 8.0 - this used to be the default, so now it's the -    # fallback.  -    else  -        { default_path = /opt/intel_cc_80/bin ; } -             -    if $(.debug-configuration) -    { -        ECHO "notice: default search path for intel-linux is" $(default_path) ; -    } - -    command = [ common.get-invocation-command intel-linux : icpc  -        : $(command) : $(default_path) ] ; -                 -    common.handle-options intel-linux : $(condition) : $(command) : $(options) ; - -    gcc.init-link-flags intel-linux gnu $(condition) ; -     -    local root = [ feature.get-values <root> : $(options) ] ; -    local bin ; -    if $(command) || $(root) -    { -        bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; -        root ?= $(bin:D) ; -         -        local command-string = $(command:J=" ") ; -        local version-output = [ SHELL "$(command-string) --version" ] ; -        local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ; -        local major = [ MATCH "([0-9]+).*" : $(real-version) ] ; -         -        # If we failed to determine major version, use the behaviour for -        # the current compiler. -        if $(major) && [ numbers.less $(major) 10 ] -        { -            flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ; -            flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ; -            flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ;             -            flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ; -            flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ; -        } -        else if $(major) && [ numbers.less $(major) 11 ] -        { -            flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;  -            flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;  -            flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;                 -            flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ; -            flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ; -        } -        else # newer version of intel do have -Os (at least 11+, don't know about 10) -        {                         -            flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;  -            flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;  -            flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;                 -            flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ; -            flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ; -        }         -            -        if $(root) -        { -            # Libraries required to run the executable may be in either -            # $(root)/lib (10.1 and earlier)  -            #     or  -            # $(root)/lib/architecture-name (11.0 and later: -            local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ; -            if $(.debug-configuration) -            { -                ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ; -            } -            flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ; -        }    -    } -} - -SPACE = " " ; - -flags intel-linux.compile OPTIONS <warnings>off : -w0 ; -flags intel-linux.compile OPTIONS <warnings>on : -w1 ; -flags intel-linux.compile OPTIONS <warnings>all : -w2 ; - -rule compile.c++ ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -    gcc.setup-address-model $(targets) : $(sources) : $(properties) ; -    DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -actions compile.c++ bind PCH_FILE -{ -    "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)"  -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)" -} - -rule compile.c ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -    gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -    DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; -} - -actions compile.c bind PCH_FILE -{ -    "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)" -} - -rule compile.c++.pch ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -    gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -} -# -# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler -# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi -# etc - which appear not to do anything except take up disk space :-( -# -actions compile.c++.pch -{ -    rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)" -} - -actions compile.fortran -{ -    "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.c.pch ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -    gcc.setup-fpic $(targets) : $(sources) : $(properties) ; -    gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -} - -actions compile.c.pch -{ -    rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)" -} - -rule link ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -    gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -    SPACE on $(targets) = " " ; -    JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - -rule link.dll ( targets * : sources * : properties * ) -{ -    gcc.setup-threading $(targets) : $(sources) : $(properties) ; -    gcc.setup-address-model $(targets) : $(sources) : $(properties) ;     -    SPACE on $(targets) = " " ; -    JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ; -} - -# Differ from 'link' above only by -shared. -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)"  "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) -} - - - diff --git a/jam-files/boost-build/tools/intel-win.jam b/jam-files/boost-build/tools/intel-win.jam deleted file mode 100644 index 691b5dce..00000000 --- a/jam-files/boost-build/tools/intel-win.jam +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# Importing common is needed because the rules we inherit here depend on it. -# That is nasty. -import common ; -import errors ; -import feature ; -import intel ; -import msvc ; -import os ; -import toolset ; -import generators ; -import type ; - -feature.extend-subfeature toolset intel : platform : win ; - -toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ; -toolset.inherit-flags intel-win : msvc : : YLOPTION ; -toolset.inherit-rules intel-win : msvc ; - -# Override default do-nothing generators. -generators.override intel-win.compile.c.pch   : pch.default-c-pch-generator   ; -generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ; -generators.override intel-win.compile.rc : rc.compile.resource ; -generators.override intel-win.compile.mc : mc.compile ; - -toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ; - -toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ; - -# Initializes the intel toolset for windows -rule init ( version ? :     # the compiler version -            command * :     # the command to invoke the compiler itself -            options *       # Additional option: <compatibility> -                            # either 'vc6', 'vc7', 'vc7.1' -                            # or 'native'(default). -          ) -{ -    local compatibility = -      [ feature.get-values <compatibility> : $(options) ] ; -    local condition = [  common.check-init-parameters intel-win -        : version $(version) : compatibility $(compatibility) ] ; - -    command = [ common.get-invocation-command intel-win : icl.exe : -        $(command) ] ; - -    common.handle-options intel-win : $(condition) : $(command) : $(options) ; - -    local root ; -    if $(command) -    { -        root = [ common.get-absolute-tool-path $(command[-1]) ] ; -        root = $(root)/ ; -    } - -    local setup ; -    setup = [ GLOB $(root) : iclvars_*.bat ] ; -    if ! $(setup) -    { -       setup = $(root)/iclvars.bat ; -    } -    setup = "call \""$(setup)"\" > nul " ; - -    if [ os.name ] = NT -    { -        setup = $(setup)" -" ; -    } -    else -    { -        setup = "cmd /S /C "$(setup)" \"&&\" " ; -    } - -    toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ; -    toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ; -    toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ; -    toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ; -    toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ; -    toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ; - -    local m = [ MATCH (.).* : $(version) ] ; -    local major = $(m[1]) ; - -    local C++FLAGS ; - -    C++FLAGS += /nologo ; - -    # Reduce the number of spurious error messages -    C++FLAGS += /Qwn5 /Qwd985 ; - -    # Enable ADL -    C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too - -    # Disable Microsoft "secure" overloads in Dinkumware libraries since they -    # cause compile errors with Intel versions 9 and 10. -    C++FLAGS += -D_SECURE_SCL=0 ; - -    if $(major) > 5 -    { -        C++FLAGS += /Zc:forScope ;  # Add support for correct for loop scoping. -    } - -    # Add options recognized only by intel7 and above. -    if $(major) >= 7 -    { -        C++FLAGS += /Qansi_alias ; -    } - -    if $(compatibility) = vc6 -    { -        C++FLAGS += -          # Emulate VC6 -          /Qvc6 - -          # No wchar_t support in vc6 dinkum library.  Furthermore, in vc6 -          # compatibility-mode, wchar_t is not a distinct type from unsigned -          # short. -          -DBOOST_NO_INTRINSIC_WCHAR_T -          ; -    } -    else -    { -        if $(major) > 5 -        { -            # Add support for wchar_t -            C++FLAGS += /Zc:wchar_t -              # Tell the dinkumware library about it. -              -D_NATIVE_WCHAR_T_DEFINED -              ; -        } -    } - -    if $(compatibility) && $(compatibility) != native -    { -        C++FLAGS += /Q$(base-vc) ; -    } -    else -    { -        C++FLAGS += -          -Qoption,cpp,--arg_dep_lookup -          # The following options were intended to disable the Intel compiler's -          # 'bug-emulation' mode, but were later reported to be causing ICE with -          # Intel-Win 9.0. It is not yet clear which options can be safely used. -          # -Qoption,cpp,--const_string_literals -          # -Qoption,cpp,--new_for_init -          # -Qoption,cpp,--no_implicit_typename -          # -Qoption,cpp,--no_friend_injection -          # -Qoption,cpp,--no_microsoft_bugs -          ; -    } - -    toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ; -    # By default, when creating PCH, intel adds 'i' to the explicitly -    # specified name of the PCH file. Of course, Boost.Build is not -    # happy when compiler produces not the file it was asked for. -    # The option below stops this behaviour. -    toolset.flags intel-win CFLAGS : -Qpchi- ; - -    if ! $(compatibility) -    { -        # If there's no backend version, assume 7.1. -        compatibility = vc7.1 ; -    } - -    local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ; -    if ! $(extract-version) -    { -        errors.user-error "Invalid value for compatibility option:" -            $(compatibility) ; -    } - -    # Depending on the settings, running of tests require some runtime DLLs. -    toolset.flags intel-win RUN_PATH $(condition) : $(root) ; - -    msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ; -} - -toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ; - -toolset.flags intel-win YLOPTION ; - diff --git a/jam-files/boost-build/tools/intel.jam b/jam-files/boost-build/tools/intel.jam deleted file mode 100644 index 67038aa2..00000000 --- a/jam-files/boost-build/tools/intel.jam +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This is a generic 'intel' toolset. Depending on the current -# system, it forwards either to 'intel-linux' or 'intel-win' -# modules. - -import feature ; -import os ; -import toolset ; - -feature.extend toolset : intel ; -feature.subfeature toolset intel : platform : : propagated link-incompatible ; - -rule init ( * : * ) -{ -    if [ os.name ] = LINUX -    { -        toolset.using intel-linux :  -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -    else if [ os.name ] = MACOSX -    { -        toolset.using intel-darwin :  -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -    else -    { -        toolset.using intel-win : -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    }         -} diff --git a/jam-files/boost-build/tools/lex.jam b/jam-files/boost-build/tools/lex.jam deleted file mode 100644 index 75d64131..00000000 --- a/jam-files/boost-build/tools/lex.jam +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2003 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -import type ; -import generators ; -import feature ; -import property ; - - -feature.feature flex.prefix : : free ; -type.register LEX : l ; -type.register LEX++ : ll ; -generators.register-standard lex.lex : LEX : C ; -generators.register-standard lex.lex : LEX++ : CPP ; - -rule init ( ) -{ -} - -rule lex ( target : source : properties * ) -{    -    local r = [ property.select flex.prefix : $(properties) ] ; -    if $(r) -    { -        PREFIX on $(<) = $(r:G=) ; -    } -} - -actions lex  -{ -    flex -P$(PREFIX) -o$(<) $(>)     -} diff --git a/jam-files/boost-build/tools/make.jam b/jam-files/boost-build/tools/make.jam deleted file mode 100644 index 08567285..00000000 --- a/jam-files/boost-build/tools/make.jam +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2003 Douglas Gregor -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -#  This module defines the 'make' main target rule. - -import "class" : new ; -import errors : error ; -import project ; -import property ; -import property-set ; -import regex ; -import targets ; - - -class make-target-class : basic-target -{ -    import type regex virtual-target ; -    import "class" : new ; - -    rule __init__ ( name : project : sources * : requirements * -        : default-build * : usage-requirements * ) -    { -        basic-target.__init__ $(name) : $(project) : $(sources) : -            $(requirements) : $(default-build) : $(usage-requirements) ; -    } - -    rule construct ( name : source-targets * : property-set ) -    { -        local action-name = [ $(property-set).get <action> ] ; -        # 'm' will always be set -- we add '@' ourselves in the 'make' rule -        # below. -        local m = [ MATCH ^@(.*) : $(action-name) ] ; - -        local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ; -        local t = [ new file-target $(self.name) exact : [ type.type -            $(self.name) ] : $(self.project) : $(a) ] ; -        return [ property-set.empty ] [ virtual-target.register $(t) ] ; -    } -} - - -# Declares the 'make' main target. -# -rule make ( target-name : sources * : generating-rule + : requirements * : -    usage-requirements * ) -{ -    local project = [ project.current ] ; - -    # The '@' sign causes the feature.jam module to qualify rule name with the -    # module name of current project, if needed. -    local m = [ MATCH ^(@).* : $(generating-rule) ] ; -    if ! $(m) -    { -        generating-rule = @$(generating-rule) ; -    } -    requirements += <action>$(generating-rule) ; - -    targets.main-target-alternative -        [ new make-target-class $(target-name) : $(project) -            : [ targets.main-target-sources $(sources) : $(target-name) ] -            : [ targets.main-target-requirements $(requirements) : $(project) ] -            : [ targets.main-target-default-build : $(project) ] -            : [ targets.main-target-usage-requirements $(usage-requirements) : -                $(project) ] ] ; -} - - -IMPORT $(__name__) : make : : make ; diff --git a/jam-files/boost-build/tools/make.py b/jam-files/boost-build/tools/make.py deleted file mode 100644 index 10baa1cb..00000000 --- a/jam-files/boost-build/tools/make.py +++ /dev/null @@ -1,59 +0,0 @@ -# Status: ported. -# Base revision: 64068 - -# Copyright 2003 Dave Abrahams  -# Copyright 2003 Douglas Gregor  -# Copyright 2006 Rene Rivera  -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -#  This module defines the 'make' main target rule. - -from b2.build.targets import BasicTarget -from b2.build.virtual_target import Action, FileTarget -from b2.build import type -from b2.manager import get_manager -import b2.build.property_set - - -class MakeTarget(BasicTarget): -   -    def construct(self, name, source_targets, property_set): - -        action_name = property_set.get("<action>")[0]             -        action = Action(get_manager(), source_targets, action_name[1:], property_set) -        target = FileTarget(self.name(), type.type(self.name()), -                            self.project(), action, exact=True)     -        return [ b2.build.property_set.empty(), -                 [self.project().manager().virtual_targets().register(target)]] - -def make (target_name, sources, generating_rule, -          requirements=None, usage_requirements=None): - -    target_name = target_name[0] -    generating_rule = generating_rule[0] -    if generating_rule[0] != '@': -        generating_rule = '@' + generating_rule - -    if not requirements: -        requirements = [] - -         -    requirements.append("<action>%s" % generating_rule) -     -    m = get_manager() -    targets = m.targets() -    project = m.projects().current() -    engine = m.engine() -    engine.register_bjam_action(generating_rule) - -    targets.main_target_alternative(MakeTarget( -        target_name, project, -        targets.main_target_sources(sources, target_name), -        targets.main_target_requirements(requirements, project), -        targets.main_target_default_build([], project), -        targets.main_target_usage_requirements(usage_requirements or [], project))) - -get_manager().projects().add_rule("make", make) - diff --git a/jam-files/boost-build/tools/mc.jam b/jam-files/boost-build/tools/mc.jam deleted file mode 100644 index 57837773..00000000 --- a/jam-files/boost-build/tools/mc.jam +++ /dev/null @@ -1,44 +0,0 @@ -#~ Copyright 2005 Alexey Pakhunov. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -#  Support for Microsoft message compiler tool. -#  Notes: -#  - there's just message compiler tool, there's no tool for  -#    extracting message strings from sources -#  - This file allows to use Microsoft message compiler -#    with any toolset. In msvc.jam, there's more specific -#    message compiling action. - -import common ; -import generators ; -import feature : feature get-values ; -import toolset : flags ; -import type ; -import rc ; - -rule init ( ) -{ -} - -type.register MC : mc ; - - -# Command line options -feature mc-input-encoding : ansi unicode : free ; -feature mc-output-encoding : unicode ansi : free ; -feature mc-set-customer-bit : no yes : free ; - -flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ; -flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ; -flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ; -flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ; -flags mc.compile MCFLAGS <mc-set-customer-bit>no : ; -flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ; - -generators.register-standard mc.compile : MC : H RC ; - -actions compile -{ -    mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)" -} diff --git a/jam-files/boost-build/tools/message.jam b/jam-files/boost-build/tools/message.jam deleted file mode 100644 index 212d8542..00000000 --- a/jam-files/boost-build/tools/message.jam +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2008 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target type 'message', that prints a message when built for the -# first time. - -import project ; -import "class" : new ; -import targets ; -import property-set ; - -class message-target-class : basic-target -{ -    rule __init__ ( name-and-dir : project : * ) -    { -        basic-target.__init__ $(name-and-dir) : $(project) ; -        self.3 = $(3) ; -        self.4 = $(4) ; -        self.5 = $(5) ; -        self.6 = $(6) ; -        self.7 = $(7) ; -        self.8 = $(8) ;         -        self.9 = $(9) ;         -        self.built = ; -    } -     -    rule construct ( name : source-targets * : property-set ) -    { -        if ! $(self.built) -        { -            for i in 3 4 5 6 7 8 9 -            { -                if $(self.$(i)) -                { -                    ECHO $(self.$(i)) ; -                } -            } -            self.built = 1 ; -        } -         -        return [ property-set.empty ] ; -    } -} - - -rule message ( name : * ) -{ -    local project = [ project.current ] ; - -    targets.main-target-alternative -      [ new message-target-class $(name) : $(project)  -        : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ; -} -IMPORT $(__name__) : message : : message ;
\ No newline at end of file diff --git a/jam-files/boost-build/tools/message.py b/jam-files/boost-build/tools/message.py deleted file mode 100644 index cc0b946f..00000000 --- a/jam-files/boost-build/tools/message.py +++ /dev/null @@ -1,46 +0,0 @@ -# Status: ported. -# Base revision: 64488. -# -# Copyright 2008, 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Defines main target type 'message', that prints a message when built for the -# first time. - -import b2.build.targets as targets -import b2.build.property_set as property_set - -from b2.manager import get_manager - -class MessageTargetClass(targets.BasicTarget): - -    def __init__(self, name, project, *args): - -        targets.BasicTarget.__init__(self, name, project, []) -        self.args = args -        self.built = False - -    def construct(self, name, sources, ps): - -        if not self.built: -            for arg in self.args: -                if type(arg) == type([]): -                    arg = " ".join(arg)                 -                print arg -            self.built = True - -        return (property_set.empty(), []) - -def message(name, *args): - -    if type(name) == type([]): -        name = name[0] - -    t = get_manager().targets() -     -    project = get_manager().projects().current() -         -    return t.main_target_alternative(MessageTargetClass(*((name, project) + args))) - -get_manager().projects().add_rule("message", message) diff --git a/jam-files/boost-build/tools/midl.jam b/jam-files/boost-build/tools/midl.jam deleted file mode 100644 index 0aa5dda3..00000000 --- a/jam-files/boost-build/tools/midl.jam +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (c) 2005 Alexey Pakhunov. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Microsoft Interface Definition Language (MIDL) related routines - -import common ; -import generators ; -import feature : feature get-values ; -import os ; -import scanner ; -import toolset : flags ; -import type ; - -rule init ( ) -{ -} - -type.register IDL : idl ; - -# A type library (.tlb) is generated by MIDL compiler and can be included -# to resources of an application (.rc). In order to be found by a resource  -# compiler its target type should be derived from 'H' - otherwise -# the property '<implicit-dependency>' will be ignored. -type.register MSTYPELIB : tlb : H ; - - -# Register scanner for MIDL files -class midl-scanner : scanner  -{ -    import path property-set regex scanner type virtual-target ; -     -    rule __init__ ( includes * ) -    { -        scanner.__init__ ; -     -        self.includes = $(includes) ; - -        # List of quoted strings  -        self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ; - -        # 'import' and 'importlib' directives  -        self.re-import    = "import"$(self.re-strings)"[ \t]*;" ; -        self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ; - -        # C preprocessor 'include' directive -        self.re-include-angle  = "#[ \t]*include[ \t]*<(.*)>" ; -        self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ; -    }     - -    rule pattern ( ) -    { -        # Match '#include', 'import' and 'importlib' directives -        return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ; -    } - -    rule process ( target : matches * : binding ) -    { -        local included-angle  = [ regex.transform $(matches) : $(self.re-include-angle)  : 1 ] ; -        local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ; -        local imported        = [ regex.transform $(matches) : $(self.re-import)         : 1 3 ] ; -        local imported_tlbs   = [ regex.transform $(matches) : $(self.re-importlib)      : 1 3 ] ; - -        # CONSIDER: the new scoping rule seem to defeat "on target" variables. -        local g = [ on $(target) return $(HDRGRIST) ] ;   -        local b = [ NORMALIZE_PATH $(binding:D) ] ; - -        # Attach binding of including file to included targets. -        # When target is directly created from virtual target -        # this extra information is unnecessary. But in other -        # cases, it allows to distinguish between two headers of the  -        # same name included from different places.       -        local g2 = $(g)"#"$(b) ; - -        included-angle = $(included-angle:G=$(g)) ; -        included-quoted = $(included-quoted:G=$(g2)) ; -        imported = $(imported:G=$(g2)) ; -        imported_tlbs = $(imported_tlbs:G=$(g2)) ; - -        local all = $(included-angle) $(included-quoted) $(imported) ; - -        INCLUDES $(target) : $(all) ; -        DEPENDS $(target) : $(imported_tlbs) ; -        NOCARE $(all) $(imported_tlbs) ; -        SEARCH on $(included-angle)  = $(self.includes:G=) ; -        SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ; -        SEARCH on $(imported)        = $(b) $(self.includes:G=) ; -        SEARCH on $(imported_tlbs)   = $(b) $(self.includes:G=) ; -         -        scanner.propagate  -            [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] :  -            $(included-angle) $(included-quoted) : $(target) ; - -        scanner.propagate $(__name__) : $(imported) : $(target) ; -    }         -} - -scanner.register midl-scanner : include ; -type.set-scanner IDL : midl-scanner ; - - -# Command line options -feature midl-stubless-proxy : yes no : propagated ; -feature midl-robust : yes no : propagated ; - -flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ; -flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ; -flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ; -flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ; - -# Architecture-specific options -architecture-x86 = <architecture> <architecture>x86 ; -address-model-32 = <address-model> <address-model>32 ; -address-model-64 = <address-model> <address-model>64 ; - -flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ; -flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ; -flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ; - - -flags midl.compile.idl DEFINES <define> ; -flags midl.compile.idl UNDEFS <undef> ; -flags midl.compile.idl INCLUDES <include> ; - - -generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ; - - -# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior  -# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures -# that both files will be created so bjam will not try to recreate them  -# constantly. -TOUCH_FILE = [ common.file-touch-command ] ; - -actions compile.idl -{ -    midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")" -    $(TOUCH_FILE) "$(<[4]:W)"   -    $(TOUCH_FILE) "$(<[5]:W)"   -} diff --git a/jam-files/boost-build/tools/mipspro.jam b/jam-files/boost-build/tools/mipspro.jam deleted file mode 100644 index 417eaefc..00000000 --- a/jam-files/boost-build/tools/mipspro.jam +++ /dev/null @@ -1,145 +0,0 @@ -#  Copyright Noel Belcourt 2007. -#  Distributed under the Boost Software License, Version 1.0. -#    (See accompanying file LICENSE_1_0.txt or copy at -#          http://www.boost.org/LICENSE_1_0.txt) - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature ; -import fortran ; -import type ; -import common ; - -feature.extend toolset : mipspro ; -toolset.inherit mipspro : unix ; -generators.override mipspro.prebuilt : builtin.lib-generator ; -generators.override mipspro.searched-lib-generator : searched-lib-generator ; - -#  Documentation and toolchain description located -#  http://www.sgi.com/products/software/irix/tools/ - -rule init ( version ? : command * : options * )  -{ -  local condition = [  -    common.check-init-parameters mipspro : version $(version) ] ; - -  command = [ common.get-invocation-command mipspro : CC : $(command) ] ; - -  common.handle-options mipspro : $(condition) : $(command) : $(options) ; -     -  command_c = $(command_c[1--2]) $(command[-1]:B=cc) ; - -  toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ; - -  # fortran support -  local command = [  -    common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ; - -  command_f = $(command_f[1--2]) $(command[-1]:B=f77) ; -  toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ; - -  # set link flags -  flags mipspro.link FINDLIBS-ST : [  -    feature.get-values <find-static-library> : $(options) ] : unchecked ; - -  flags mipspro.link FINDLIBS-SA : [  -    feature.get-values <find-shared-library> : $(options) ] : unchecked ; -} - -# Declare generators -generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ; -generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ; -generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ; - -cpu-arch-32 = -  <architecture>/<address-model> -  <architecture>/<address-model>32 ; - -cpu-arch-64 = -  <architecture>/<address-model>64 ; - -flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ; -flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ; - -# Declare flags and actions for compilation -flags mipspro.compile OPTIONS <debug-symbols>on : -g ;  -# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ; -flags mipspro.compile OPTIONS <warnings>off : -w ; -flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning -flags mipspro.compile OPTIONS <warnings>all : -fullwarn ; -flags mipspro.compile OPTIONS <optimization>speed : -Ofast ; -flags mipspro.compile OPTIONS <optimization>space : -O2 ; -flags mipspro.compile OPTIONS <cflags> : -LANG:std ; -flags mipspro.compile.c++ OPTIONS <inlining>off : -INLINE:none ; -flags mipspro.compile.c++ OPTIONS <cxxflags> ; -flags mipspro.compile DEFINES <define> ; -flags mipspro.compile INCLUDES <include> ; - - -flags mipspro.compile.fortran OPTIONS <fflags> ; - -actions compile.c -{ -    "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.fortran -{ -    "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags mipspro.link OPTIONS <debug-symbols>on : -g ; -# Strip the binary when no debugging is needed -# flags mipspro.link OPTIONS <debug-symbols>off : -s ; -# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ; -# flags mipspro.link OPTIONS <threading>multi : -mt ; - -flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ; -flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ; - -flags mipspro.link OPTIONS <optimization>speed : -Ofast ; -flags mipspro.link OPTIONS <optimization>space : -O2 ; -flags mipspro.link OPTIONS <linkflags> ; -flags mipspro.link LINKPATH <library-path> ; -flags mipspro.link FINDLIBS-ST <find-static-library> ; -flags mipspro.link FINDLIBS-SA <find-shared-library> ; -flags mipspro.link FINDLIBS-SA <threading>multi : pthread ; -flags mipspro.link LIBRARIES <library-file> ; -flags mipspro.link LINK-RUNTIME <runtime-link>static : static ; -flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags mipspro.link RPATH <dll-path> ; - -rule link ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -# Declare action for creating static libraries -actions piecemeal archive -{ -    ar -cr "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/mpi.jam b/jam-files/boost-build/tools/mpi.jam deleted file mode 100644 index 0fe490be..00000000 --- a/jam-files/boost-build/tools/mpi.jam +++ /dev/null @@ -1,583 +0,0 @@ -# Support for the Message Passing Interface (MPI) -# -# (C) Copyright 2005, 2006 Trustees of Indiana University -# (C) Copyright 2005 Douglas Gregor -# -# Distributed under the Boost Software License, Version 1.0. (See accompanying  -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.) -# -# Authors: Douglas Gregor -#          Andrew Lumsdaine -# -# ==== MPI Configuration ==== -#  -# For many users, MPI support can be enabled simply by adding the following  -# line to your user-config.jam file: -# -#   using mpi ; -# -# This should auto-detect MPI settings based on the MPI wrapper compiler in  -# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or -# has a different name, you can pass the name of the wrapper compiler as the -# first argument to the mpi module: -# -#   using mpi : /opt/mpich2-1.0.4/bin/mpiCC ; -# -# If your MPI implementation does not have a wrapper compiler, or the MPI  -# auto-detection code does not work with your MPI's wrapper compiler, -# you can pass MPI-related options explicitly via the second parameter to the  -# mpi module: -# -#    using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++ -#                  <find-shared-library>mpi <find-shared-library>lam  -#                  <find-shared-library>dl ; -# -# To see the results of MPI auto-detection, pass "--debug-configuration" on -# the bjam command line. -# -# The (optional) fourth argument configures Boost.MPI for running -# regression tests. These parameters specify the executable used to -# launch jobs (default: "mpirun") followed by any necessary arguments -# to this to run tests and tell the program to expect the number of -# processors to follow (default: "-np").  With the default parameters, -# for instance, the test harness will execute, e.g., -#   -#    mpirun -np 4 all_gather_test -# -# ==== Linking Against the MPI Libraries === -# -# To link against the MPI libraries, import the "mpi" module and add the  -# following requirement to your target: -#  -#   <library>/mpi//mpi  -# -# Since MPI support is not always available, you should check  -# "mpi.configured" before trying to link against the MPI libraries. - -import "class" : new ; -import common ; -import feature : feature ; -import generators ; -import os ; -import project ; -import property ; -import testing ; -import toolset ; -import type ; -import path ; - -# Make this module a project -project.initialize $(__name__) ; -project mpi ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -  .debug-configuration = true ; -} - -# Assuming the first part of the command line is the given prefix -# followed by some non-empty value, remove the first argument. Returns -# either nothing (if there was no prefix or no value) or a pair -# -#   <name>value rest-of-cmdline -# -# This is a subroutine of cmdline_to_features -rule add_feature ( prefix name cmdline )  -{ -    local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; - -    # If there was no value associated with the prefix, abort -    if ! $(match) { -      return ; -    } - -    local value = $(match[1]) ; - -    if [ MATCH " +" : $(value) ] { -      value = "\"$(value)\"" ; -    } - -    return "<$(name)>$(value)" $(match[2]) ; -} - -# Strip any end-of-line characters off the given string and return the -# result. -rule strip-eol ( string ) -{ -  local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ; - -  if $(match) -  { -    return $(match[1]) ; -  } -  else -  { -    return $(string) ; -  } -} - -# Split a command-line into a set of features. Certain kinds of -# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced -# with their Boost.Build equivalents (e.g., <include>, <define>, -# <library-path>, <find-library>). All other arguments are introduced -# using the features in the unknown-features parameter, because we -# don't know how to deal with them. For instance, if your compile and -# correct. The incoming command line should be a string starting with -# an executable (e.g., g++ -I/include/path") and may contain any -# number of command-line arguments thereafter. The result is a list of -# features corresponding to the given command line, ignoring the -# executable. -rule cmdline_to_features ( cmdline : unknown-features ? ) -{ -    local executable ; -    local features ; -    local otherflags ; -    local result ; - -    unknown-features ?= <cxxflags> <linkflags> ; - -    # Pull the executable out of the command line. At this point, the -    # executable is just thrown away. -    local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; -    executable = $(match[1]) ; -    cmdline = $(match[2]) ; - -    # List the prefix/feature pairs that we will be able to transform.  -    # Every kind of parameter not mentioned here will be placed in both -    # cxxflags and linkflags, because we don't know where they should go. -    local feature_kinds-D = "define" ; -    local feature_kinds-I = "include" ; -    local feature_kinds-L = "library-path" ; -    local feature_kinds-l = "find-shared-library" ; - -    while $(cmdline) { - -        # Check for one of the feature prefixes we know about. If we -        # find one (and the associated value is nonempty), convert it -        # into a feature. -        local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ; -        local matched ; -        if $(match) && $(match[2]) { -           local prefix = $(match[1]) ; -           if $(feature_kinds$(prefix)) { -               local name = $(feature_kinds$(prefix)) ; -               local add = [ add_feature $(prefix) $(name) $(cmdline) ] ; - -               if $(add) { - -                  if $(add[1]) = <find-shared-library>pthread -                  { -                      # Uhm. It's not really nice that this MPI implementation -                      # uses -lpthread as opposed to -pthread. We do want to -                      # set <threading>multi, instead of -lpthread. -                      result += "<threading>multi" ; -                      MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;                       -                  } -                  else -                  {                       -                      result += $(add[1]) ;                   -                  } -                                      -                  cmdline = $(add[2]) ; -                  matched = yes ; -               } -           } -        } - -        # If we haven't matched a feature prefix, just grab the command-line -        # argument itself. If we can map this argument to a feature -        # (e.g., -pthread -> <threading>multi), then do so; otherwise, -        # and add it to the list of "other" flags that we don't -        # understand. -        if ! $(matched) { -           match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; -           local value = $(match[1]) ; -           cmdline = $(match[2]) ; - -           # Check for multithreading support -           if $(value) = "-pthread" || $(value) = "-pthreads" -           { -             result += "<threading>multi" ; - -             # DPG: This is a hack intended to work around a BBv2 bug where -             # requirements propagated from libraries are not checked for -             # conflicts when BBv2 determines which "common" properties to -             # apply to a target. In our case, the <threading>single property -             # gets propagated from the common properties to Boost.MPI -             # targets, even though <threading>multi is in the usage  -             # requirements of <library>/mpi//mpi. -             MPI_EXTRA_REQUIREMENTS += "<threading>multi" ; -           } -           else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] { -              otherflags += $(value) ; -           } -        } -    } - -    # If there are other flags that we don't understand, add them to the -    # result as both <cxxflags> and <linkflags> -    if $(otherflags) { -       for unknown in $(unknown-features) -       { -         result += "$(unknown)$(otherflags:J= )" ; -       } -    } - -    return $(result) ; -} - -# Determine if it is safe to execute the given shell command by trying -# to execute it and determining whether the exit code is zero or -# not. Returns true for an exit code of zero, false otherwise. -local rule safe-shell-command ( cmdline ) -{ -  local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ; -  return [ MATCH ".*(SSCOK).*" : $(result) ] ; -} - -# Initialize the MPI module.   -rule init ( mpicxx ? : options * : mpirun-with-options * ) -{ -  if ! $(options) && $(.debug-configuration) -  { -    ECHO "===============MPI Auto-configuration===============" ; -  } -     -  if ! $(mpicxx) && [ os.on-windows ] -  {   -    # Try to auto-configure to the Microsoft Compute Cluster Pack -    local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ; -    local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ; -    if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ] -    { -      if $(.debug-configuration) -      { -        ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ; -      } -       -      # Pick up either the 32-bit or 64-bit library, depending on which address -      # model the user has selected. Default to 32-bit. -      options = <include>$(cluster_pack_path)/Include  -                <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64 -                <library-path>$(cluster_pack_path)/Lib/i386 -                <find-static-library>msmpi -                <toolset>msvc:<define>_SECURE_SCL=0 -              ; -               -      # Setup the "mpirun" equivalent (mpiexec) -      .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ; -      .mpirun_flags = -n ; -    } -    else if $(.debug-configuration) -    { -      ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ; -    } -  }  -    -  if ! $(options) -  {  -    # Try to auto-detect options based on the wrapper compiler -    local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ; - -    if ! $(mpicxx) && ! $(command)  -    { -      # Try "mpiCC", which is used by MPICH  -      command = [ common.get-invocation-command mpi : mpiCC ] ; -    } - -    if ! $(mpicxx) && ! $(command)  -    { -      # Try "mpicxx", which is used by OpenMPI and MPICH2 -      command = [ common.get-invocation-command mpi : mpicxx ] ; -    } - -    local result ; -    local compile_flags ; -    local link_flags ; - -    if ! $(command) -    {  -      # Do nothing: we'll complain later -    } -    # OpenMPI and newer versions of LAM-MPI have -showme:compile and  -    # -showme:link. -    else if [ safe-shell-command "$(command) -showme:compile" ] && -              [ safe-shell-command "$(command) -showme:link" ] -    { -      if $(.debug-configuration) -      { -        ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ; -      } - -      compile_flags = [ SHELL "$(command) -showme:compile" ] ; -      link_flags = [ SHELL "$(command) -showme:link" ] ; -    -      # Prepend COMPILER as the executable name, to match the format of  -      # other compilation commands. -      compile_flags = "COMPILER $(compile_flags)" ; -      link_flags = "COMPILER $(link_flags)" ; -    } -    # Look for LAM-MPI's -showme -    else if [ safe-shell-command "$(command) -showme" ] -    { -      if $(.debug-configuration) -      { -        ECHO "Found older LAM-MPI wrapper compiler: $(command)" ; -      } - -      result = [ SHELL "$(command) -showme" ] ; -    } -    # Look for MPICH -    else if [ safe-shell-command "$(command) -show" ] -    { -      if $(.debug-configuration) -      { -        ECHO "Found MPICH wrapper compiler: $(command)" ; -      } -      compile_flags = [ SHELL "$(command) -compile_info" ] ; -      link_flags = [ SHELL "$(command) -link_info" ] ; -    } -    # Sun HPC and Ibm POE -    else if [ SHELL "$(command) -v 2>/dev/null" ] -    { -      compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ; - -      local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ; -      if $(back) -      { -        # Sun HPC -        if $(.debug-configuration) -        { -          ECHO "Found Sun MPI wrapper compiler: $(command)" ; -        } - -        compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ; -        compile_flags = [ MATCH "(.*)-v" :  $(compile_flags) ] ; -        link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ; -        link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ; -        link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ; - -        # strip out -v from compile options -        local front = [ MATCH "(.*)-v" :  $(link_flags) ] ; -        local back = [ MATCH "-v(.*)" :  $(link_flags) ] ; -        link_flags = "$(front) $(back)" ; -        front = [ MATCH "(.*)-xtarget=native64" :  $(link_flags) ] ; -        back = [ MATCH "-xtarget=native64(.*)" :  $(link_flags) ] ; -        link_flags = "$(front) $(back)" ; -      } -      else -      { -        # Ibm POE -        if $(.debug-configuration) -        { -          ECHO "Found IBM MPI wrapper compiler: $(command)" ; -        } - -        #  -        compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ; -        compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ; -        local front = [ MATCH "(.*)-v" :  $(compile_flags) ] ; -        local back = [ MATCH "-v(.*)" :  $(compile_flags) ] ; -        compile_flags = "$(front) $(back)" ; -        front = [ MATCH "(.*)-c" :  $(compile_flags) ] ; -        back = [ MATCH "-c(.*)" :  $(compile_flags) ] ; -        compile_flags = "$(front) $(back)" ; -        link_flags = $(compile_flags) ; - -        # get location of mpif.h from mpxlf -        local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ; -        f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ; -        front = [ MATCH "(.*)-v" :  $(f_flags) ] ; -        back = [ MATCH "-v(.*)" :  $(f_flags) ] ; -        f_flags = "$(front) $(back)" ; -        f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ; -        f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ; -        compile_flags = [ strip-eol $(compile_flags) ] ; -        compile_flags = "$(compile_flags) $(f_flags)" ; -      } -    } - -    if $(result) || $(compile_flags) && $(link_flags) -    { -      if $(result) -      { -         result = [ strip-eol $(result) ] ; -         options = [ cmdline_to_features $(result) ] ; -      } -      else  -      {  -         compile_flags = [ strip-eol $(compile_flags) ] ; -         link_flags = [ strip-eol $(link_flags) ] ; - -         # Separately process compilation and link features, then combine -         # them at the end. -         local compile_features = [ cmdline_to_features $(compile_flags)  -                                                        : "<cxxflags>" ] ;  -         local link_features = [ cmdline_to_features $(link_flags)  -                                                     : "<linkflags>" ] ;  -         options = $(compile_features) $(link_features) ; -      } - -      # If requested, display MPI configuration information. -      if $(.debug-configuration) -      { -        if $(result) -        { -          ECHO "  Wrapper compiler command line: $(result)" ; -        } -        else -        { -	  local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"  -                                : $(compile_flags) ] ; -          ECHO "MPI compilation flags: $(match[2])" ; -	  local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"  -                                : $(link_flags) ] ; -          ECHO "MPI link flags: $(match[2])" ; -        } -      } -    }  -    else  -    { -      if $(command) -      { -        ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ; -        ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ; -      }      -      else if $(mpicxx) -      { -        ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ; -      }  -      else -      { -        ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ; -      } -      ECHO "You will need to manually configure MPI support." ; -    } -  -  } - -  # Find mpirun (or its equivalent) and its flags -  if ! $(.mpirun) -  { -    .mpirun =  -        [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ; -    .mpirun_flags = $(mpirun-with-options[2-]) ; -    .mpirun_flags ?= -np ; -  } -   -  if $(.debug-configuration) -  { -    if $(options) -    { -      echo "MPI build features: " ; -      ECHO $(options) ; -    } - -    if $(.mpirun) -    { -      echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ; -    } -         -    ECHO "====================================================" ; -  } - -  if $(options)   -  { -    .configured = true ; - -    # Set up the "mpi" alias  -    alias mpi : : : : $(options) ; -  } -} - -# States whether MPI has bee configured -rule configured ( ) -{ -  return $(.configured) ; -} - -# Returs the "extra" requirements needed to build MPI. These requirements are -# part of the /mpi//mpi library target, but they need to be added to anything -# that uses MPI directly to work around bugs in BBv2's propagation of -# requirements. -rule extra-requirements ( ) -{ -  return $(MPI_EXTRA_REQUIREMENTS) ; -} - -# Support for testing; borrowed from Python -type.register RUN_MPI_OUTPUT ; -type.register RUN_MPI : : TEST ; - -class mpi-test-generator : generator -{ -    import property-set ; - -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -        self.composing = true ; -    } - -    rule run ( project name ? : property-set : sources * : multiple ? ) -    {   -      # Generate an executable from the sources. This is the executable we will run. -      local executable =  -        [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ; - -      result =  -        [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ; -    } -} - -# Use mpi-test-generator to generate MPI tests from sources -generators.register  -  [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ; - -generators.register-standard testing.expect-success  -  : RUN_MPI_OUTPUT : RUN_MPI ; - -# The number of processes to spawn when executing an MPI test. -feature mpi:processes : : free incidental ; - -# The flag settings on testing.capture-output do not -# apply to mpi.capture output at the moment. -# Redo this explicitly. -toolset.flags mpi.capture-output ARGS <testing.arg> ; -rule capture-output ( target : sources * : properties * ) -{ -    # Use the standard capture-output rule to run the tests -    testing.capture-output $(target) : $(sources[1]) : $(properties) ; - -    # Determine the number of processes we should run on. -    local num_processes = [ property.select <mpi:processes> : $(properties) ] ; -    num_processes = $(num_processes:G=) ; - -    # serialize the MPI tests to avoid overloading systems -    JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ; - -    # We launch MPI processes using the "mpirun" equivalent specified by the user. -    LAUNCHER on $(target) =   -      [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ; -} - -# Creates a set of test cases to be run through the MPI launcher. The name, sources,  -# and requirements are the same as for any other test generator. However, schedule is  -# a list of numbers, which indicates how many processes each test run will use. For  -# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7 -# 7 processes. The name provided is just the base name: the actual tests will be  -# the name followed by a hypen, then the number of processes.  -rule mpi-test ( name : sources * : requirements * : schedule * ) -{         -    sources ?= $(name).cpp ; -    schedule ?= 1 2 3 4 7 8 13 17 ; - -    local result ; -    for processes in $(schedule) -    {   -      result += [ testing.make-test  -        run-mpi : $(sources) /boost/mpi//boost_mpi -          : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ; -    } -    return $(result) ; -} diff --git a/jam-files/boost-build/tools/msvc-config.jam b/jam-files/boost-build/tools/msvc-config.jam deleted file mode 100644 index 6c71e3b0..00000000 --- a/jam-files/boost-build/tools/msvc-config.jam +++ /dev/null @@ -1,12 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for VisualStudio toolset. To use, just import this module. - -import toolset : using ; - -ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ; - -using msvc : all ; - diff --git a/jam-files/boost-build/tools/msvc.jam b/jam-files/boost-build/tools/msvc.jam deleted file mode 100644 index e33a66d2..00000000 --- a/jam-files/boost-build/tools/msvc.jam +++ /dev/null @@ -1,1392 +0,0 @@ -# Copyright (c) 2003 David Abrahams. -# Copyright (c) 2005 Vladimir Prus. -# Copyright (c) 2005 Alexey Pakhunov. -# Copyright (c) 2006 Bojan Resnik. -# Copyright (c) 2006 Ilya Sokolov. -# Copyright (c) 2007 Rene Rivera -# Copyright (c) 2008 Jurko Gospodnetic -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -################################################################################ -# -# MSVC Boost Build toolset module. -# -------------------------------- -# -# All toolset versions need to have their location either auto-detected or -# explicitly specified except for the special 'default' version that expects the -# environment to find the needed tools or report an error. -# -################################################################################ - -import "class" : new ; -import common ; -import errors ; -import feature ; -import generators ; -import mc ; -import midl ; -import os ; -import path ; -import pch ; -import property ; -import rc ; -import toolset ; -import type ; - - -type.register MANIFEST : manifest ; -feature.feature embed-manifest : on off : incidental propagated ; - -type.register PDB : pdb ; - -################################################################################ -# -# Public rules. -# -################################################################################ - -# Initialize a specific toolset version configuration. As the result, path to -# compiler and, possible, program names are set up, and will be used when that -# version of compiler is requested. For example, you might have: -# -#    using msvc : 6.5 : cl.exe ; -#    using msvc : 7.0 : Y:/foo/bar/cl.exe ; -# -# The version parameter may be ommited: -# -#    using msvc : : Z:/foo/bar/cl.exe ; -# -# The following keywords have special meanings when specified as versions: -#   - all     - all detected but not yet used versions will be marked as used -#               with their default options. -#   - default - this is an equivalent to an empty version. -# -# Depending on a supplied version, detected configurations and presence 'cl.exe' -# in the path different results may be achieved. The following table describes -# the possible scenarios: -# -#                                      Nothing            "x.y" -# Passed   Nothing       "x.y"         detected,          detected, -# version  detected      detected      cl.exe in path     cl.exe in path -# -# default  Error         Use "x.y"     Create "default"   Use "x.y" -# all      None          Use all       None               Use all -# x.y      -             Use "x.y"     -                  Use "x.y" -# a.b      Error         Error         Create "a.b"       Create "a.b" -# -# "x.y" - refers to a detected version; -# "a.b" - refers to an undetected version. -# -# FIXME: Currently the command parameter and the <compiler> property parameter -# seem to overlap in duties. Remove this duplication. This seems to be related -# to why someone started preparing to replace init with configure rules. -# -rule init ( -    # The msvc version being configured. When omitted the tools invoked when no -    # explicit version is given will be configured. -    version ? - -    # The command used to invoke the compiler. If not specified: -    #   - if version is given, default location for that version will be -    #     searched -    # -    #   - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0 -    #     and 6.* will be searched -    # -    #   - if compiler is not found in the default locations, PATH will be -    #     searched. -    : command * - -    # Options may include: -    # -    #     All options shared by multiple toolset types as handled by the -    #   common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>, -    #   <fflags> & <linkflags>. -    # -    #   <assembler> -    #   <compiler> -    #   <idl-compiler> -    #   <linker> -    #   <mc-compiler> -    #   <resource-compiler> -    #       Exact tool names to be used by this msvc toolset configuration. -    # -    #   <compiler-filter> -    #       Command through which to pipe the output of running the compiler. -    #     For example to pass the output to STLfilt. -    # -    #   <setup> -    #       Global setup command to invoke before running any of the msvc tools. -    #     It will be passed additional option parameters depending on the actual -    #     target platform. -    # -    #   <setup-amd64> -    #   <setup-i386> -    #   <setup-ia64> -    #       Platform specific setup command to invoke before running any of the -    #     msvc tools used when builing a target for a specific platform, e.g. -    #     when building a 32 or 64 bit executable. -    : options * -) -{ -    if $(command) -    { -        options += <command>$(command) ; -    } -    configure $(version) : $(options) ; -} - - -# 'configure' is a newer version of 'init'. The parameter 'command' is passed as -# a part of the 'options' list. See the 'init' rule comment for more detailed -# information. -# -rule configure ( version ? : options * ) -{ -    switch $(version) -    { -        case "all" : -            if $(options) -            { -                errors.error "MSVC toolset configuration: options should be" -                    "empty when '$(version)' is specified." ; -            } - -            # Configure (i.e. mark as used) all registered versions. -            local all-versions = [ $(.versions).all ] ; -            if ! $(all-versions) -            { -                if $(.debug-configuration) -                { -                    ECHO "notice: [msvc-cfg] Asked to configure all registered" -                        "msvc toolset versions when there are none currently" -                        "registered." ; -                } -            } -            else -            { -                for local v in $(all-versions) -                { -                    # Note that there is no need to skip already configured -                    # versions here as this will request configure-really rule -                    # to configure the version using default options which will -                    # in turn cause it to simply do nothing in case the version -                    # has already been configured. -                    configure-really $(v) ; -                } -            } - -        case "default" : -            configure-really : $(options) ; - -        case * : -            configure-really $(version) : $(options) ; -    } -} - - -# Sets up flag definitions dependent on the compiler version used. -# - 'version' is the version of compiler in N.M format. -# - 'conditions' is the property set to be used as flag conditions. -# - 'toolset' is the toolset for which flag settings are to be defined. -#   This makes the rule reusable for other msvc-option-compatible compilers. -# -rule configure-version-specific ( toolset : version : conditions ) -{ -    toolset.push-checking-for-flags-module unchecked ; -    # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and -    # /Zc:wchar_t options that improve C++ standard conformance, but those -    # options are off by default. If we are sure that the msvc version is at -    # 7.*, add those options explicitly. We can be sure either if user specified -    # version 7.* explicitly or if we auto-detected the version ourselves. -    if ! [ MATCH ^(6\\.) : $(version) ] -    { -        toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ; -        toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ; - -        # Explicitly disable the 'function is deprecated' warning. Some msvc -        # versions have a bug, causing them to emit the deprecation warning even -        # with /W0. -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ; - -        if [ MATCH ^([78]\\.) : $(version) ] -        { -            # 64-bit compatibility warning deprecated since 9.0, see -            # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx -            toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ; -        } -    } - -    # -    # Processor-specific optimization. -    # - -    if [ MATCH ^([67]) : $(version) ] -    { -        # 8.0 deprecates some of the options. -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ; - -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i386 : /G3 ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ; - -        # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math" -        # tests will fail. -        toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ; - -        # 7.1 and below have single-threaded static RTL. -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ; -    } -    else -    { -        # 8.0 and above adds some more options. -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ; - -        # 8.0 and above only has multi-threaded static RTL. -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ; -        toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ; - -        # Specify target machine type so the linker will not need to guess. -        toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ; -        toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386)  : /MACHINE:X86 ; -        toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64)  : /MACHINE:IA64 ; -         -        # Make sure that manifest will be generated even if there is no -        # dependencies to put there. -        toolset.flags $(toolset).link LINKFLAGS $(conditions)/<embed-manifest>off : /MANIFEST ; -    } -    toolset.pop-checking-for-flags-module ; -} - - -# Registers this toolset including all of its flags, features & generators. Does -# nothing on repeated calls. -# -rule register-toolset ( ) -{ -    if ! msvc in [ feature.values toolset ] -    { -        register-toolset-really ; -    } -} - - -# Declare action for creating static libraries. If library exists, remove it -# before adding files. See -# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale. -if [ os.name ] in NT -{ -    # The 'DEL' command would issue a message to stdout if the file does not -    # exist, so need a check. -    actions archive -    { -        if exist "$(<[1])" DEL "$(<[1])" -        $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" -    } -} -else -{ -    actions archive -    { -        $(.RM) "$(<[1])" -        $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" -    } -} - - -# For the assembler the following options are turned on by default: -# -#   -Zp4   align structures to 4 bytes -#   -Cp    preserve case of user identifiers -#   -Cx    preserve case in publics, externs -# -actions compile.asm -{ -    $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)" -} - - -rule compile.c ( targets + : sources * : properties * ) -{ -    C++FLAGS on $(targets[1]) = ; -    get-rspline $(targets) : -TC ; -    compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - - -rule compile.c.preprocess ( targets + : sources * : properties * ) -{ -    C++FLAGS on $(targets[1]) = ; -    get-rspline $(targets) : -TC ; -    preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - - -rule compile.c.pch ( targets + : sources * : properties * ) -{ -    C++FLAGS on $(targets[1]) = ; -    get-rspline $(targets[1]) : -TC ; -    get-rspline $(targets[2]) : -TC ; -    local pch-source = [ on $(<) return $(PCH_SOURCE) ] ; -    if $(pch-source) -    { -        DEPENDS $(<) : $(pch-source) ; -        compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ; -    } -    else -    { -        compile-c-c++-pch $(targets) : $(sources) ; -    } -} - -toolset.flags msvc YLOPTION : "-Yl" ; - -# Action for running the C/C++ compiler without using precompiled headers. -# -# WARNING: Synchronize any changes this in action with intel-win -# -# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database -# -# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty -# -# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.  -#    The linker will pull these into the executable's PDB -# -# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,  -#    as in this case the compiler must be used to create a single PDB for our library. -# -actions compile-c-c++ bind PDB_NAME -{ -    $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER) -} - -actions preprocess-c-c++ bind PDB_NAME -{ -    $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)" -} - -rule compile-c-c++ ( targets + : sources * ) -{ -    DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ; -    DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ; -    PDB_NAME on $(<) = $(<:S=.pdb) ; -} - -rule preprocess-c-c++ ( targets + : sources * ) -{ -    DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ; -    DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ; -    PDB_NAME on $(<) = $(<:S=.pdb) ; -} - -# Action for running the C/C++ compiler using precompiled headers. In addition -# to whatever else it needs to compile, this action also adds a temporary source -# .cpp file used to compile the precompiled headers themselves. -# -# The global .escaped-double-quote variable is used to avoid messing up Emacs -# syntax highlighting in the messy N-quoted code below. -actions compile-c-c++-pch -{ -    $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER) -} - - -# Action for running the C/C++ compiler using precompiled headers. An already -# built source file for compiling the precompiled headers is expected to be -# given as one of the source parameters. -actions compile-c-c++-pch-s -{ -    $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER) -} - - -rule compile.c++ ( targets + : sources * : properties * ) -{ -    get-rspline $(targets) : -TP ; -    compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - -rule compile.c++.preprocess ( targets + : sources * : properties * ) -{ -    get-rspline $(targets) : -TP ; -    preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; -} - - -rule compile.c++.pch ( targets + : sources * : properties * ) -{ -    get-rspline $(targets[1]) : -TP ; -    get-rspline $(targets[2]) : -TP ; -    local pch-source = [ on $(<) return $(PCH_SOURCE) ] ; -    if $(pch-source) -    { -        DEPENDS $(<) : $(pch-source) ; -        compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ; -    } -    else -    { -        compile-c-c++-pch $(targets) : $(sources) ; -    } -} - - -# See midl.jam for details. -# -actions compile.idl -{ -    $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")" -    $(.TOUCH_FILE) "$(<[4]:W)" -    $(.TOUCH_FILE) "$(<[5]:W)" -} - - -actions compile.mc -{ -    $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)" -} - - -actions compile.rc -{ -    $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)" -} - - -rule link ( targets + : sources * : properties * ) -{ -    if <embed-manifest>on in $(properties) -    {         -        msvc.manifest $(targets) : $(sources) : $(properties) ; -    }     -} - -rule link.dll ( targets + : sources * : properties * ) -{ -    DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ; -    if <embed-manifest>on in $(properties) -    {         -        msvc.manifest.dll $(targets) : $(sources) : $(properties) ; -    }     -} - -# Incremental linking a DLL causes no end of problems: if the actual exports do -# not change, the import .lib file is never updated. Therefore, the .lib is -# always out-of-date and gets rebuilt every time. I am not sure that incremental -# linking is such a great idea in general, but in this case I am sure we do not -# want it. - -# Windows manifest is a new way to specify dependencies on managed DotNet -# assemblies and Windows native DLLs. The manifests are embedded as resources -# and are useful in any PE target (both DLL and EXE). - -if [ os.name ] in NT -{ -    actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE -    { -        $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" -        if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% -    } -     -    actions manifest -    {         -        if exist "$(<[1]).manifest" ( -            $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1" -        ) -    } -     -    actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE -    { -        $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" -        if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% -    } - -    actions manifest.dll -    {         -        if exist "$(<[1]).manifest" ( -            $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2" -        ) -    }     -} -else -{         -    actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE -    { -        $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" -    } - -    actions manifest -    {         -        if test -e "$(<[1]).manifest"; then -            $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1" -        fi -    } -     -    actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE -    { -        $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" -    } -     -    actions manifest.dll -    { -        if test -e "$(<[1]).manifest"; then -          $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2" -        fi -    }     -} - -# this rule sets up the pdb file that will be used when generating static  -# libraries and the debug-store option is database, so that the compiler  -# puts all debug info into a single .pdb file named after the library -# -# Poking at source targets this way is probably not clean, but it's the -# easiest approach. -rule archive ( targets + : sources * : properties * ) -{ -    PDB_NAME on $(>) = $(<:S=.pdb) ; -} - -################################################################################ -# -# Classes. -# -################################################################################ - -class msvc-pch-generator : pch-generator -{ -    import property-set ; - -    rule run-pch ( project name ? : property-set : sources * ) -    { -        # Searching for the header and source file in the sources. -        local pch-header ; -        local pch-source ; -        for local s in $(sources) -        { -            if [ type.is-derived [ $(s).type ] H ] -            { -                pch-header = $(s) ; -            } -            else if -                [ type.is-derived [ $(s).type ] CPP ] || -                [ type.is-derived [ $(s).type ] C ] -            { -                pch-source = $(s) ; -            } -        } - -        if ! $(pch-header) -        { -            errors.user-error "can not build pch without pch-header" ; -        } - -        # If we do not have the PCH source - that is fine. We will just create a -        # temporary .cpp file in the action. - -        local generated = [ generator.run $(project) $(name) -            : [ property-set.create -                # Passing of <pch-source> is a dirty trick, needed because -                # non-composing generators with multiple inputs are subtly -                # broken. For more detailed information see: -                # https://zigzag.cs.msu.su:7813/boost.build/ticket/111 -                <pch-source>$(pch-source) -                [ $(property-set).raw ] ] -            : $(pch-header) ] ; - -        local pch-file ; -        for local g in $(generated) -        { -            if [ type.is-derived [ $(g).type ] PCH ] -            { -                pch-file = $(g) ; -            } -        } - -        return [ property-set.create <pch-header>$(pch-header) -            <pch-file>$(pch-file) ] $(generated) ; -    } -} - - -################################################################################ -# -# Local rules. -# -################################################################################ - -# Detects versions listed as '.known-versions' by checking registry information, -# environment variables & default paths. Supports both native Windows and -# Cygwin. -# -local rule auto-detect-toolset-versions ( ) -{ -    if [ os.name ] in NT CYGWIN -    { -        # Get installation paths from the registry. -        for local i in $(.known-versions) -        { -            if $(.version-$(i)-reg) -            { -                local vc-path ; -                for local x in "" "Wow6432Node\\" -                { -                    vc-path += [ W32_GETREG -                        "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg) -                        : "ProductDir" ] ; -                } - -                if $(vc-path) -                { -                    vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ; -                    register-configuration $(i) : [ path.native $(vc-path[1]) ] ; -                } -            } -        } -    } - -    # Check environment and default installation paths. -    for local i in $(.known-versions) -    { -        if ! $(i) in [ $(.versions).all ] -        { -            register-configuration $(i) : [ default-path $(i) ] ; -        } -    } -} - - -# Worker rule for toolset version configuration. Takes an explicit version id or -# nothing in case it should configure the default toolset version (the first -# registered one or a new 'default' one in case no toolset versions have been -# registered yet). -# -local rule configure-really ( version ? : options * ) -{ -    local v = $(version) ; - -    # Decide what the 'default' version is. -    if ! $(v) -    { -        # Take the first registered (i.e. auto-detected) version. -        version = [ $(.versions).all ] ; -        version = $(version[1]) ; -        v = $(version) ; - -        # Note: 'version' can still be empty at this point if no versions have -        # been auto-detected. -        version ?= "default" ; -    } - -    # Version alias -> real version number. -    if $(.version-alias-$(version)) -    { -        version = $(.version-alias-$(version)) ; -    } - -    # Check whether the selected configuration is already in use. -    if $(version) in [ $(.versions).used ] -    { -        # Allow multiple 'toolset.using' calls for the same configuration if the -        # identical sets of options are used. -        if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] ) -        { -            errors.error "MSVC toolset configuration: Toolset version" -                "'$(version)' already configured." ; -        } -    } -    else -    { -        # Register a new configuration. -        $(.versions).register $(version) ; - -        # Add user-supplied to auto-detected options. -        options = [ $(.versions).get $(version) : options ] $(options) ; - -        # Mark the configuration as 'used'. -        $(.versions).use $(version) ; - -        # Generate conditions and save them. -        local conditions = [ common.check-init-parameters msvc : version $(v) ] -            ; - -        $(.versions).set $(version) : conditions : $(conditions) ; - -        local command = [ feature.get-values <command> : $(options) ] ; - -        # If version is specified, we try to search first in default paths, and -        # only then in PATH. -        command = [ common.get-invocation-command msvc : cl.exe : $(command) : -            [ default-paths $(version) ] : $(version) ] ; - -        common.handle-options msvc : $(conditions) : $(command) : $(options) ; - -        if ! $(version) -        { -            # Even if version is not explicitly specified, try to detect the -            # version from the path. -            # FIXME: We currently detect both Microsoft Visual Studio 9.0 and -            # 9.0express as 9.0 here. -            if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ] -            { -                version = 10.0 ; -            } -            else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ] -            { -                version = 9.0 ; -            } -            else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ] -            { -                version = 8.0 ; -            } -            else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ] -            { -                version = 7.1 ; -            } -            else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" : -                $(command) ] -            { -                version = 7.1toolkit ; -            } -            else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ] -            { -                version = 7.0 ; -            } -            else -            { -                version = 6.0 ; -            } -        } - -        # Generate and register setup command. - -        local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ; - -        local cpu = i386 amd64 ia64 ; -        if $(below-8.0) -        { -            cpu = i386 ; -        } - -        local setup-amd64 ; -        local setup-i386 ; -        local setup-ia64 ; - -        if $(command) -        { -            # TODO: Note that if we specify a non-existant toolset version then -            # this rule may find and use a corresponding compiler executable -            # belonging to an incorrect toolset version. For example, if you -            # have only MSVC 7.1 installed, have its executable on the path and -            # specify you want Boost Build to use MSVC 9.0, then you want Boost -            # Build to report an error but this may cause it to silently use the -            # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0 -            # toolset version. -            command = [ common.get-absolute-tool-path $(command[-1]) ] ; -        } -         -        if $(command) -        {             -            local parent = [ path.make $(command) ] ; -            parent = [ path.parent $(parent) ] ; -            parent = [ path.native $(parent) ] ; - -            # Setup will be used if the command name has been specified. If -            # setup is not specified explicitly then a default setup script will -            # be used instead. Setup scripts may be global or arhitecture/ -            # /platform/cpu specific. Setup options are used only in case of -            # global setup scripts. - -            # Default setup scripts provided with different VC distributions: -            # -            #   VC 7.1 had only the vcvars32.bat script specific to 32 bit i386 -            # builds. It was located in the bin folder for the regular version -            # and in the root folder for the free VC 7.1 tools. -            # -            #   Later 8.0 & 9.0 versions introduce separate platform specific -            # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium) -            # located in or under the bin folder. Most also include a global -            # vcvarsall.bat helper script located in the root folder which runs -            # one of the aforementioned vcvars*.bat scripts based on the options -            # passed to it. So far only the version coming with some PlatformSDK -            # distributions does not include this top level script but to -            # support those we need to fall back to using the worker scripts -            # directly in case the top level script can not be found. - -            local global-setup = [ feature.get-values <setup> : $(options) ] ; -            global-setup = $(global-setup[1]) ; -            if ! $(below-8.0) -            { -                global-setup ?= [ locate-default-setup $(command) : $(parent) : -                    vcvarsall.bat ] ; -            } - -            local default-setup-amd64 = vcvarsx86_amd64.bat ; -            local default-setup-i386  = vcvars32.bat ; -            local default-setup-ia64  = vcvarsx86_ia64.bat ; - -            # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and -            # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx -            # mention an x86_IPF option, that seems to be a documentation bug -            # and x86_ia64 is the correct option. -            local default-global-setup-options-amd64 = x86_amd64 ; -            local default-global-setup-options-i386  = x86 ; -            local default-global-setup-options-ia64  = x86_ia64 ; - -            # When using 64-bit Windows, and targeting 64-bit, it is possible to -            # use a native 64-bit compiler, selected by the "amd64" & "ia64" -            # parameters to vcvarsall.bat. There are two variables we can use -- -            # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is -            # 'x86' when running 32-bit Windows, no matter which processor is -            # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T) -            # Windows. -            # -            if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ] -            { -                default-global-setup-options-amd64 = amd64 ; -            } -            # TODO: The same 'native compiler usage' should be implemented for -            # the Itanium platform by using the "ia64" parameter. For this -            # though we need someone with access to this platform who can find -            # out how to correctly detect this case. -            else if $(somehow-detect-the-itanium-platform) -            { -                default-global-setup-options-ia64 = ia64 ; -            } - -            local setup-prefix = "call " ; -            local setup-suffix = " >nul"$(.nl) ; -            if ! [ os.name ] in NT -            { -                setup-prefix = "cmd.exe /S /C call " ; -                setup-suffix = " \">nul\" \"&&\" " ; -            } - -            for local c in $(cpu) -            { -                local setup-options ; - -                setup-$(c) = [ feature.get-values <setup-$(c)> : $(options) ] ; - -                if ! $(setup-$(c))-is-not-empty -                { -                    if $(global-setup)-is-not-empty -                    { -                        setup-$(c) = $(global-setup) ; - -                        # If needed we can easily add using configuration flags -                        # here for overriding which options get passed to the -                        # global setup command for which target platform: -                        # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ; - -                        setup-options ?= $(default-global-setup-options-$(c)) ; -                    } -                    else -                    { -                        setup-$(c) = [ locate-default-setup $(command) : $(parent) : $(default-setup-$(c)) ] ; -                    } -                } - -                # Cygwin to Windows path translation. -                setup-$(c) = "\""$(setup-$(c):W)"\"" ; - -                # Append setup options to the setup name and add the final setup -                # prefix & suffix. -                setup-options ?= "" ; -                setup-$(c) = $(setup-prefix)$(setup-$(c):J=" ")" "$(setup-options:J=" ")$(setup-suffix) ; -            } -        } - -        # Get tool names (if any) and finish setup. - -        compiler = [ feature.get-values <compiler> : $(options) ] ; -        compiler ?= cl ; - -        linker = [ feature.get-values <linker> : $(options) ] ; -        linker ?= link ; - -        resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ; -        resource-compiler ?= rc ; - -        # Turn on some options for i386 assembler -        #  -coff  generate COFF format object file (compatible with cl.exe output) -        local default-assembler-amd64 = ml64 ; -        local default-assembler-i386  = "ml -coff" ; -        local default-assembler-ia64  = ias ; - -        assembler = [ feature.get-values <assembler> : $(options) ] ; - -        idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ; -        idl-compiler ?= midl ; - -        mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ; -        mc-compiler ?= mc ; - -        manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ; -        manifest-tool ?= mt ; -         -        local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ; - -        for local c in $(cpu) -        { -            # Setup script is not required in some configurations. -            setup-$(c) ?= "" ; - -            local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ; - -            if $(.debug-configuration) -            { -                for local cpu-condition in $(cpu-conditions) -                { -                    ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ; -                } -            } - -            local cpu-assembler = $(assembler) ; -            cpu-assembler ?= $(default-assembler-$(c)) ; - -            toolset.flags msvc.compile .CC  $(cpu-conditions) : $(setup-$(c))$(compiler) /Zm800 -nologo ; -            toolset.flags msvc.compile .RC  $(cpu-conditions) : $(setup-$(c))$(resource-compiler) ; -            toolset.flags msvc.compile .ASM $(cpu-conditions) : $(setup-$(c))$(cpu-assembler) -nologo ; -            toolset.flags msvc.link    .LD  $(cpu-conditions) : $(setup-$(c))$(linker) /NOLOGO /INCREMENTAL:NO ; -            toolset.flags msvc.archive .LD  $(cpu-conditions) : $(setup-$(c))$(linker) /lib /NOLOGO  ; -            toolset.flags msvc.compile .IDL $(cpu-conditions) : $(setup-$(c))$(idl-compiler) ; -            toolset.flags msvc.compile .MC  $(cpu-conditions) : $(setup-$(c))$(mc-compiler) ; - -            toolset.flags msvc.link .MT $(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ; - -            if $(cc-filter) -            { -                toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ; -            } -        } - -        # Set version-specific flags. -        configure-version-specific msvc : $(version) : $(conditions) ; -    } -} - - -# Returns the default installation path for the given version. -# -local rule default-path ( version ) -{ -    # Use auto-detected path if possible. -    local path = [ feature.get-values <command> : [ $(.versions).get $(version) -        : options ] ] ; - -    if $(path) -    { -        path = $(path:D) ; -    } -    else -    { -        # Check environment. -        if $(.version-$(version)-env) -        { -            local vc-path = [ os.environ $(.version-$(version)-env) ] ; -            if $(vc-path) -            { -                vc-path = [ path.make $(vc-path) ] ; -                vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ; -                vc-path = [ path.native $(vc-path) ] ; - -                path = $(vc-path) ; -            } -        } - -        # Check default path. -        if ! $(path) && $(.version-$(version)-path) -        { -            path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ; -        } -    } - -    return $(path) ; -} - - -# Returns either the default installation path (if 'version' is not empty) or -# list of all known default paths (if no version is given) -# -local rule default-paths ( version ? ) -{ -    local possible-paths ; - -    if $(version) -    { -        possible-paths += [ default-path $(version) ] ; -    } -    else -    { -        for local i in $(.known-versions) -        { -            possible-paths += [ default-path $(i) ] ; -        } -    } - -    return $(possible-paths) ; -} - - -rule get-rspline ( target : lang-opt ) -{ -    CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS) -        $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES) -        $(.nl)\"-I$(INCLUDES:W)\" ] ; -} - -class msvc-linking-generator : linking-generator -{ -    # Calls the base version.  If necessary, also create a target for the -    # manifest file.specifying source's name as the name of the created -    # target. As result, the PCH will be named whatever.hpp.gch, and not -    # whatever.gch. -    rule generated-targets ( sources + : property-set : project name ? ) -    { -        local result = [ linking-generator.generated-targets $(sources) -          : $(property-set) : $(project) $(name) ] ; - -        if $(result) -        { -            local name-main = [ $(result[0]).name ] ; -            local action = [ $(result[0]).action ] ; -             -            if [ $(property-set).get <debug-symbols> ] = "on" -            {		  -                # We force exact name on PDB. The reason is tagging -- the tag rule may -                # reasonably special case some target types, like SHARED_LIB. The tag rule -                # will not catch PDB, and it cannot even easily figure if PDB is paired with -                # SHARED_LIB or EXE or something else. Because PDB always get the -                # same name as the main target, with .pdb as extension, just force it. -                local target = [ class.new file-target $(name-main:S=.pdb) exact : PDB : $(project) : $(action) ] ; -                local registered-target = [ virtual-target.register $(target) ] ; -                if $(target) != $(registered-target) -                { -                    $(action).replace-targets $(target) : $(registered-target) ; -                } -                result += $(registered-target) ; -            } - -            if [ $(property-set).get <embed-manifest> ] = "off" -            { -                # Manifest is evil target. It has .manifest appened to the name of  -                # main target, including extension. E.g. a.exe.manifest. We use 'exact' -                # name because to achieve this effect. -                local target = [ class.new file-target $(name-main).manifest exact : MANIFEST : $(project) : $(action) ] ; -                local registered-target = [ virtual-target.register $(target) ] ; -                if $(target) != $(registered-target) -                { -                    $(action).replace-targets $(target) : $(registered-target) ; -                } -                result += $(registered-target) ; -            } -        } -        return $(result) ; -    } -} - - - -# Unsafe worker rule for the register-toolset() rule. Must not be called -# multiple times. -# -local rule register-toolset-really ( ) -{ -    feature.extend toolset : msvc ; - -    # Intel and msvc supposedly have link-compatible objects. -    feature.subfeature toolset msvc : vendor : intel : propagated optional ; - -    # Inherit MIDL flags. -    toolset.inherit-flags msvc : midl ; - -    # Inherit MC flags. -    toolset.inherit-flags msvc : mc ; - -    # Dynamic runtime comes only in MT flavour. -    toolset.add-requirements -        <toolset>msvc,<runtime-link>shared:<threading>multi ; - -    # Declare msvc toolset specific features. -    { -        feature.feature debug-store : object database : propagated ; -        feature.feature pch-source  :                 : dependency free ; -    } - -    # Declare generators. -    { -        # TODO: Is it possible to combine these? Make the generators -        # non-composing so that they do not convert each source into a separate -        # .rsp file. -        generators.register [ new msvc-linking-generator  -            msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ; -        generators.register [ new msvc-linking-generator  -            msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ] ; - -        generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ; -        generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ; -        generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ; -        generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ; -        generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ; - -        # Using 'register-c-compiler' adds the build directory to INCLUDES. -        generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ; -        generators.override msvc.compile.rc : rc.compile.resource ; -        generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ; - -        generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ; -        generators.override msvc.compile.idl : midl.compile.idl ; - -        generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ; -        generators.override msvc.compile.mc : mc.compile ; - -        # Note: the 'H' source type will catch both '.h' and '.hpp' headers as -        # the latter have their HPP type derived from H. The type of compilation -        # is determined entirely by the destination type. -        generators.register [ new msvc-pch-generator msvc.compile.c.pch   : H :   C_PCH OBJ : <pch>on <toolset>msvc ] ; -        generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ; - -        generators.override msvc.compile.c.pch   : pch.default-c-pch-generator ; -        generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ; -    } - -    toolset.flags msvc.compile PCH_FILE   <pch>on : <pch-file>   ; -    toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ; -    toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ; - -    # -    # Declare flags for compilation. -    # - -    toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ; -    toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ; - -    toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ; -    toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ; - -    toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ; -    toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ; -    toolset.flags msvc.compile CFLAGS <optimization>off : /Od ; -    toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ; -    toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ; -    toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ; - -    toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ; -    toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ; -    toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ; -    toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ; - -    toolset.flags msvc.compile C++FLAGS  <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ; -    toolset.flags msvc.compile C++FLAGS  <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ; -    toolset.flags msvc.compile C++FLAGS  <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ; -    toolset.flags msvc.compile C++FLAGS  <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ; - -    # By default 8.0 enables rtti support while prior versions disabled it. We -    # simply enable or disable it explicitly so we do not have to depend on this -    # default behaviour. -    toolset.flags msvc.compile CFLAGS <rtti>on : /GR ; -    toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ; -    toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ; -    toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ; - -    toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ; -    toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ; - -    toolset.flags msvc.compile OPTIONS <cflags> : ; -    toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ; - -    toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ; - -    toolset.flags msvc.compile DEFINES <define> ; -    toolset.flags msvc.compile UNDEFS <undef> ; -    toolset.flags msvc.compile INCLUDES <include> ; - -    # Declare flags for the assembler. -    toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ; - -    toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ; - -    toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ; -    toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ; -    toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ; -    toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ; - -    toolset.flags msvc.compile.asm DEFINES <define> ; - -    # Declare flags for linking. -    { -        toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ;  # not used yet -        toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ; -        toolset.flags msvc.link DEF_FILE <def-file> ; - -        # The linker disables the default optimizations when using /DEBUG so we -        # have to enable them manually for release builds with debug symbols. -        toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ; - -        toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ; -        toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ; -        toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ; -        toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ; -        toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ; - -        toolset.flags msvc.link OPTIONS <linkflags> ; -        toolset.flags msvc.link LINKPATH <library-path> ; - -        toolset.flags msvc.link FINDLIBS_ST <find-static-library> ; -        toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ; -        toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ; -        toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ; -    } - -    toolset.flags msvc.archive AROPTIONS <archiveflags> ; -} - - -# Locates the requested setup script under the given folder and returns its full -# path or nothing in case the script can not be found. In case multiple scripts -# are found only the first one is returned. -# -# TODO: There used to exist a code comment for the msvc.init rule stating that -# we do not correctly detect the location of the vcvars32.bat setup script for -# the free VC7.1 tools in case user explicitly provides a path. This should be -# tested or simply remove this whole comment in case this toolset version is no -# longer important. -# -local rule locate-default-setup ( command : parent : setup-name ) -{ -    local result = [ GLOB $(command) $(parent) : $(setup-name) ] ; -    if $(result[1]) -    { -        return $(result[1]) ; -    } -} - - -# Validates given path, registers found configuration and prints debug -# information about it. -# -local rule register-configuration ( version : path ? ) -{ -    if $(path) -    { -        local command = [ GLOB $(path) : cl.exe ] ; - -        if $(command) -        { -            if $(.debug-configuration) -            { -                ECHO "notice: [msvc-cfg] msvc-$(version) detected, command: '$(command)'" ; -            } - -            $(.versions).register $(version) ; -            $(.versions).set $(version) : options : <command>$(command) ; -        } -    } -} - - -################################################################################ -# -#   Startup code executed when loading this module. -# -################################################################################ - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} - -# Miscellaneous constants. -.RM = [ common.rm-command ] ; -.nl = " -" ; -.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ; -.escaped-double-quote = "\"" ; -.TOUCH_FILE = [ common.file-touch-command ] ; - -# List of all registered configurations. -.versions = [ new configurations ] ; - -# Supported CPU architectures. -.cpu-arch-i386 = -    <architecture>/<address-model> -    <architecture>/<address-model>32 -    <architecture>x86/<address-model> -    <architecture>x86/<address-model>32 ; - -.cpu-arch-amd64 = -    <architecture>/<address-model>64 -    <architecture>x86/<address-model>64 ; - -.cpu-arch-ia64 = -    <architecture>ia64/<address-model> -    <architecture>ia64/<address-model>64 ; - - -# Supported CPU types (only Itanium optimization options are supported from -# VC++ 2005 on). See -# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more -# detailed information. -.cpu-type-g5       = i586 pentium pentium-mmx ; -.cpu-type-g6       = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6 -                     k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ; -.cpu-type-em64t    = prescott nocona conroe conroe-xe conroe-l allendale mermon -                     mermon-xe kentsfield kentsfield-xe penryn wolfdale -                     yorksfield nehalem ; -.cpu-type-amd64    = k8 opteron athlon64 athlon-fx ; -.cpu-type-g7       = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp -                     athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ; -.cpu-type-itanium  = itanium itanium1 merced ; -.cpu-type-itanium2 = itanium2 mckinley ; - - -# Known toolset versions, in order of preference. -.known-versions = 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ; - -# Version aliases. -.version-alias-6 = 6.0 ; -.version-alias-6.5 = 6.0 ; -.version-alias-7 = 7.0 ; -.version-alias-8 = 8.0 ; -.version-alias-9 = 9.0 ; -.version-alias-10 = 10.0 ; - -# Names of registry keys containing the Visual C++ installation path (relative -# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft"). -.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ; -.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ; -.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ; -.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ; -.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ; -.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ; -.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ; -.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ; -.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ; - -# Visual C++ Toolkit 2003 does not store its installation path in the registry. -# The environment variable 'VCToolkitInstallDir' and the default installation -# path will be checked instead. -.version-7.1toolkit-path    = "Microsoft Visual C++ Toolkit 2003" "bin" ; -.version-7.1toolkit-env     = VCToolkitInstallDir ; - -# Path to the folder containing "cl.exe" relative to the value of the -# corresponding environment variable. -.version-7.1toolkit-envpath = "bin" ; - - -# Auto-detect all the available msvc installations on the system. -auto-detect-toolset-versions ; - - -# And finally trigger the actual Boost Build toolset registration. -register-toolset ; diff --git a/jam-files/boost-build/tools/notfile.jam b/jam-files/boost-build/tools/notfile.jam deleted file mode 100644 index 97a5b0e8..00000000 --- a/jam-files/boost-build/tools/notfile.jam +++ /dev/null @@ -1,74 +0,0 @@ -#  Copyright (c) 2005 Vladimir Prus. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import generators ; -import project ; -import targets ; -import toolset ; -import type ; - - -type.register NOTFILE_MAIN ; - - -class notfile-generator : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * : multiple ? ) -    { -        local action ; -        local action-name = [ $(property-set).get <action> ] ; - -        local m = [ MATCH ^@(.*) : $(action-name) ] ; - -        if $(m) -        { -            action = [ new action $(sources) : $(m[1]) -              : $(property-set) ] ; -        } -        else -        { -            action = [ new action $(sources) : notfile.run -              : $(property-set) ] ; -        } -        return [ virtual-target.register  -            [ new notfile-target $(name) : $(project) : $(action) ] ] ; -    } -} - - -generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ; - - -toolset.flags notfile.run ACTION : <action> ; - - -actions run -{ -    $(ACTION) -} - - -rule notfile ( target-name : action + : sources * : requirements * : default-build * ) -{ -    local project = [ project.current ] ; - -    requirements += <action>$(action) ; - -    targets.main-target-alternative -        [ new typed-target $(target-name) : $(project) : NOTFILE_MAIN -            : [ targets.main-target-sources $(sources) : $(target-name) ] -            : [ targets.main-target-requirements $(requirements) : $(project) ] -            : [ targets.main-target-default-build $(default-build) : $(project) ] -        ] ; -} - -IMPORT $(__name__) : notfile : : notfile ; diff --git a/jam-files/boost-build/tools/notfile.py b/jam-files/boost-build/tools/notfile.py deleted file mode 100644 index afbf68fb..00000000 --- a/jam-files/boost-build/tools/notfile.py +++ /dev/null @@ -1,51 +0,0 @@ -# Status: ported. -# Base revision: 64429. -# -#  Copyright (c) 2005-2010 Vladimir Prus. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - - -import b2.build.type as type -import b2.build.generators as generators -import b2.build.virtual_target as virtual_target -import b2.build.toolset as toolset -import b2.build.targets as targets - -from b2.manager import get_manager -from b2.util import bjam_signature - -type.register("NOTFILE_MAIN") - -class NotfileGenerator(generators.Generator): - -    def run(self, project, name, ps, sources): -        pass -        action_name = ps.get('action')[0] -        if action_name[0] == '@': -            action = virtual_target.Action(get_manager(), sources, action_name[1:], ps) -        else: -            action = virtual_target.Action(get_manager(), sources, "notfile.run", ps) - -        return [get_manager().virtual_targets().register( -            virtual_target.NotFileTarget(name, project, action))] - -generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"])) - -toolset.flags("notfile.run", "ACTION", [], ["<action>"]) - -get_manager().engine().register_action("notfile.run", "$(ACTION)") - -@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"], -                 ["default_build", "*"])) -def notfile(target_name, action, sources, requirements, default_build): - -    requirements.append("<action>" + action) - -    return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements, -                                           default_build, []) - - -get_manager().projects().add_rule("notfile", notfile) diff --git a/jam-files/boost-build/tools/package.jam b/jam-files/boost-build/tools/package.jam deleted file mode 100644 index 198c2231..00000000 --- a/jam-files/boost-build/tools/package.jam +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (c) 2005 Vladimir Prus. -# Copyright 2006 Rene Rivera. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Provides mechanism for installing whole packages into a specific directory -# structure. This is opposed to the 'install' rule, that installs a number of -# targets to a single directory, and does not care about directory structure at -# all. - -# Example usage: -# -#   package.install boost : <properties> -#                         : <binaries> -#                         : <libraries> -#                         : <headers> -#                         ; -# -# This will install binaries, libraries and headers to the 'proper' location, -# given by command line options --prefix, --exec-prefix, --bindir, --libdir and -# --includedir. -# -# The rule is just a convenient wrapper, avoiding the need to define several -# 'install' targets. -# -# The only install-related feature is <install-source-root>. It will apply to -# headers only and if present, paths of headers relatively to source root will -# be retained after installing. If it is not specified, then "." is assumed, so -# relative paths in headers are always preserved. - -import "class" : new ; -import option ; -import project ; -import feature ; -import property ; -import stage ; -import targets ; -import modules ; - -feature.feature install-default-prefix : : free incidental ; - -rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * ) -{ -    package-name ?= $(name) ; -    if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ] -    { -        # If --prefix is explicitly specified on the command line, -        # then we need wipe away any settings of libdir/includir that -        # is specified via options in config files. -        option.set bindir : ; -        option.set libdir : ; -        option.set includedir : ; -    } -             -    # If <install-source-root> is not specified, all headers are installed to -    # prefix/include, no matter what their relative path is. Sometimes that is -    # what is needed. -    local install-source-root = [ property.select <install-source-root> : -        $(requirements) ] ; -    install-source-root = $(install-source-root:G=) ; -    requirements = [ property.change $(requirements) : <install-source-root> ] ; - -    local install-header-subdir = [ property.select <install-header-subdir> : -        $(requirements) ] ; -    install-header-subdir = /$(install-header-subdir:G=) ; -    install-header-subdir ?= "" ; -    requirements = [ property.change $(requirements) : <install-header-subdir> ] -        ; - -    # First, figure out all locations. Use the default if no prefix option -    # given. -    local prefix = [ get-prefix $(name) : $(requirements) ] ; - -    # Architecture dependent files. -    local exec-locate = [ option.get exec-prefix : $(prefix) ] ; - -    # Binaries. -    local bin-locate = [ option.get bindir : $(prefix)/bin ] ; - -    # Object code libraries. -    local lib-locate = [ option.get libdir : $(prefix)/lib ] ; - -    # Source header files. -    local include-locate = [ option.get includedir : $(prefix)/include ] ; - -    stage.install $(name)-bin : $(binaries) : $(requirements) -        <location>$(bin-locate) ; -    alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ; -     -    # Since the install location of shared libraries differs on universe -    # and cygwin, use target alternatives to make different targets. -    # We should have used indirection conditioanl requirements, but it's -    # awkward to pass bin-locate and lib-locate from there to another rule. -    alias $(name)-lib-shared : $(name)-lib-shared-universe ; -    alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ; -     -    # For shared libraries, we install both explicitly specified one and the -    # shared libraries that the installed executables depend on. -    stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements) -      <location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ; -    stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements) -      <location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ; - -    # For static libraries, we do not care about executable dependencies, since -    # static libraries are already incorporated into them. -    stage.install $(name)-lib-static : $(libraries) : $(requirements) -        <location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ; -    stage.install $(name)-headers : $(headers) : $(requirements) -        <location>$(include-locate)$(install-header-subdir) -        <install-source-root>$(install-source-root) ; -    alias $(name) : $(name)-bin $(name)-lib $(name)-headers ; - -    local c = [ project.current ] ; -    local project-module = [ $(c).project-module ] ; -    module $(project-module) -    { -        explicit $(1)-bin $(1)-lib $(1)-headers $(1) $(1)-lib-shared $(1)-lib-static  -          $(1)-lib-shared-universe $(1)-lib-shared-cygwin ; -    } -} - -rule install-data ( target-name : package-name : data * : requirements * ) -{ -    package-name ?= target-name ; -    if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ] -    { -        # If --prefix is explicitly specified on the command line, -        # then we need wipe away any settings of datarootdir -        option.set datarootdir : ; -    }    -     -    local prefix = [ get-prefix $(package-name) : $(requirements) ] ; -    local datadir = [ option.get datarootdir : $(prefix)/share ] ; - -    stage.install $(target-name)  -        : $(data) -        : $(requirements) <location>$(datadir)/$(package-name) -        ; -     -    local c = [ project.current ] ; -    local project-module = [ $(c).project-module ] ; -    module $(project-module) -    { -        explicit $(1) ; -    } -} - -local rule get-prefix ( package-name : requirements * ) -{ -    local prefix = [ option.get prefix : [ property.select -        <install-default-prefix> : $(requirements) ] ] ; -    prefix = $(prefix:G=) ; -    requirements = [ property.change $(requirements) : <install-default-prefix> -        ] ; -    # Or some likely defaults if neither is given. -    if ! $(prefix) -    { -        if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; } -        else if [ modules.peek : UNIX ] { prefix = /usr/local ; }         -    } -    return $(prefix) ; -}            - diff --git a/jam-files/boost-build/tools/package.py b/jam-files/boost-build/tools/package.py deleted file mode 100644 index aa081b4f..00000000 --- a/jam-files/boost-build/tools/package.py +++ /dev/null @@ -1,168 +0,0 @@ -# Status: ported -# Base revision: 64488 -# -# Copyright (c) 2005, 2010 Vladimir Prus. -# Copyright 2006 Rene Rivera. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Provides mechanism for installing whole packages into a specific directory -# structure. This is opposed to the 'install' rule, that installs a number of -# targets to a single directory, and does not care about directory structure at -# all. - -# Example usage: -# -#   package.install boost : <properties> -#                         : <binaries> -#                         : <libraries> -#                         : <headers> -#                         ; -# -# This will install binaries, libraries and headers to the 'proper' location, -# given by command line options --prefix, --exec-prefix, --bindir, --libdir and -# --includedir. -# -# The rule is just a convenient wrapper, avoiding the need to define several -# 'install' targets. -# -# The only install-related feature is <install-source-root>. It will apply to -# headers only and if present, paths of headers relatively to source root will -# be retained after installing. If it is not specified, then "." is assumed, so -# relative paths in headers are always preserved. - -import b2.build.feature as feature -import b2.build.property as property -import b2.util.option as option -import b2.tools.stage as stage - -from b2.build.alias import alias - -from b2.manager import get_manager - -from b2.util import bjam_signature -from b2.util.utility import ungrist - - -import os - -feature.feature("install-default-prefix", [], ["free", "incidental"]) - -@bjam_signature((["name", "package_name", "?"], ["requirements", "*"], -                 ["binaries", "*"], ["libraries", "*"], ["headers", "*"])) -def install(name, package_name=None, requirements=[], binaries=[], libraries=[], headers=[]): - -    requirements = requirements[:] -    binaries = binaries[:] -    libraries - -    if not package_name: -        package_name = name - -    if option.get("prefix"): -        # If --prefix is explicitly specified on the command line, -        # then we need wipe away any settings of libdir/includir that -        # is specified via options in config files. -        option.set("bindir", None) -        option.set("libdir", None) -        option.set("includedir", None) -             -    # If <install-source-root> is not specified, all headers are installed to -    # prefix/include, no matter what their relative path is. Sometimes that is -    # what is needed. -    install_source_root = property.select('install-source-root', requirements) -    if install_source_root: -        requirements = property.change(requirements, 'install-source-root', None) -             -    install_header_subdir = property.select('install-header-subdir', requirements) -    if install_header_subdir: -        install_header_subdir = ungrist(install_header_subdir[0]) -        requirements = property.change(requirements, 'install-header-subdir', None) - -    # First, figure out all locations. Use the default if no prefix option -    # given. -    prefix = get_prefix(name, requirements) - -    # Architecture dependent files. -    exec_locate = option.get("exec-prefix", prefix) - -    # Binaries. -    bin_locate = option.get("bindir", os.path.join(prefix, "bin")) - -    # Object code libraries. -    lib_locate = option.get("libdir", os.path.join(prefix, "lib")) - -    # Source header files. -    include_locate = option.get("includedir", os.path.join(prefix, "include")) - -    stage.install(name + "-bin", binaries, requirements + ["<location>" + bin_locate]) -     -    alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"]) -     -    # Since the install location of shared libraries differs on universe -    # and cygwin, use target alternatives to make different targets. -    # We should have used indirection conditioanl requirements, but it's -    # awkward to pass bin-locate and lib-locate from there to another rule. -    alias(name + "-lib-shared", [name + "-lib-shared-universe"]) -    alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["<target-os>cygwin"]) -     -    # For shared libraries, we install both explicitly specified one and the -    # shared libraries that the installed executables depend on. -    stage.install(name + "-lib-shared-universe", binaries + libraries, -                  requirements + ["<location>" + lib_locate, "<install-dependencies>on", -                                  "<install-type>SHARED_LIB"]) -    stage.install(name + "-lib-shared-cygwin", binaries + libraries, -                  requirements + ["<location>" + bin_locate, "<install-dependencies>on", -                                  "<install-type>SHARED_LIB"]) - -    # For static libraries, we do not care about executable dependencies, since -    # static libraries are already incorporated into them. -    stage.install(name + "-lib-static", libraries, requirements + -                  ["<location>" + lib_locate, "<install-dependencies>on", "<install-type>STATIC_LIB"]) -    stage.install(name + "-headers", headers, requirements \ -                  + ["<location>" + os.path.join(include_locate, s) for s in install_header_subdir] -                  + install_source_root) - -    alias(name, [name + "-bin", name + "-lib", name + "-headers"]) - -    pt = get_manager().projects().current() - -    for subname in ["bin", "lib", "headers", "lib-shared", "lib-static", "lib-shared-universe", "lib-shared-cygwin"]: -        pt.mark_targets_as_explicit([name + "-" + subname]) - -@bjam_signature((["target_name"], ["package_name"], ["data", "*"], ["requirements", "*"])) -def install_data(target_name, package_name, data, requirements): -    if not package_name: -        package_name = target_name - -    if option.get("prefix"): -        # If --prefix is explicitly specified on the command line, -        # then we need wipe away any settings of datarootdir -        option.set("datarootdir", None) -     -    prefix = get_prefix(package_name, requirements) -    datadir = option.get("datarootdir", os.path.join(prefix, "share")) - -    stage.install(target_name, data, -                  requirements + ["<location>" + os.path.join(datadir, package_name)]) - -    get_manager().projects().current().mark_targets_as_explicit([target_name]) - -def get_prefix(package_name, requirements): - -    specified = property.select("install-default-prefix", requirements) -    if specified: -        specified = ungrist(specified[0]) -    prefix = option.get("prefix", specified) -    requirements = property.change(requirements, "install-default-prefix", None)     -    # Or some likely defaults if neither is given. -    if not prefix: -        if os.name == "nt": -            prefix = "C:\\" + package_name -        elif os.name == "posix": -            prefix = "/usr/local" - -    return prefix - diff --git a/jam-files/boost-build/tools/pathscale.jam b/jam-files/boost-build/tools/pathscale.jam deleted file mode 100644 index 454e3454..00000000 --- a/jam-files/boost-build/tools/pathscale.jam +++ /dev/null @@ -1,168 +0,0 @@ -#  Copyright 2006 Noel Belcourt -#  Distributed under the Boost Software License, Version 1.0. -#    (See accompanying file LICENSE_1_0.txt or copy at -#          http://www.boost.org/LICENSE_1_0.txt) - -import property ; -import generators ; -import toolset : flags ; -import feature ; -import type ; -import common ; -import fortran ; - -feature.extend toolset : pathscale ; -toolset.inherit pathscale : unix ; -generators.override pathscale.prebuilt : builtin.prebuilt ; -generators.override pathscale.searched-lib-generator : searched-lib-generator ; - -# Documentation and toolchain description located -# http://www.pathscale.com/docs.html - -rule init ( version ? : command * : options * )  -{ -  command = [ common.get-invocation-command pathscale : pathCC : $(command)  -    : /opt/ekopath/bin ] ; -   -  # Determine the version -  local command-string = $(command:J=" ") ; -  if $(command) -  {     -      version ?= [ MATCH "^([0-9.]+)" -          : [ SHELL "$(command-string) -dumpversion" ] ] ; -  } -   -  local condition = [ common.check-init-parameters pathscale -    : version $(version) ] ; - -  common.handle-options pathscale : $(condition) : $(command) : $(options) ; -     -  toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) : -    [ feature.get-values <fflags> : $(options) ] : unchecked ; - -  command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ; - -  toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ; - -  # fortran support -  local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ; -  local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ; -  local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ; - -  toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ; -  toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ; - -  # always link lib rt to resolve clock_gettime() -  flags pathscale.link FINDLIBS-SA : rt : unchecked ; -} - -# Declare generators -generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ; -generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ; -generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ; -generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ; - -# Declare flags and actions for compilation -flags pathscale.compile OPTIONS <optimization>off   : -O0 ; -flags pathscale.compile OPTIONS <optimization>speed : -O3 ; -flags pathscale.compile OPTIONS <optimization>space : -Os ; - -flags pathscale.compile OPTIONS <inlining>off  : -noinline ; -flags pathscale.compile OPTIONS <inlining>on   : -inline ; -flags pathscale.compile OPTIONS <inlining>full : -inline ; - -flags pathscale.compile OPTIONS <warnings>off           : -woffall ; -flags pathscale.compile OPTIONS <warnings>on            : -Wall ; -flags pathscale.compile OPTIONS <warnings>all           : -Wall -pedantic ; -flags pathscale.compile OPTIONS <warnings-as-errors>on  : -Werror ; - -flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ; -flags pathscale.compile OPTIONS <profiling>on     : -pg ; -flags pathscale.compile OPTIONS <link>shared      : -fPIC ; -flags pathscale.compile OPTIONS <address-model>32 : -m32 ; -flags pathscale.compile OPTIONS <address-model>64 : -m64 ; - -flags pathscale.compile USER_OPTIONS <cflags> ; -flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ; -flags pathscale.compile DEFINES <define> ; -flags pathscale.compile INCLUDES <include> ; - -flags pathscale.compile.fortran USER_OPTIONS <fflags> ; -flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ; - -actions compile.c -{ -    "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.fortran -{ -    "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -rule compile.fortran90 ( targets * : sources * : properties * ) -{ -  # the space rule inserts spaces between targets and it's necessary -  SPACE on $(targets) = " " ; -  # Serialize execution of the compile.fortran90 action -  # F90 source must be compiled in a particular order so we -  # serialize the build as a parallel F90 compile might fail -  JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ; -} - -actions compile.fortran90 -{ -    "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ; -# Strip the binary when no debugging is needed -flags pathscale.link OPTIONS <debug-symbols>off : -g0 ; -flags pathscale.link OPTIONS <profiling>on : -pg ; -flags pathscale.link USER_OPTIONS <linkflags> ; -flags pathscale.link LINKPATH <library-path> ; -flags pathscale.link FINDLIBS-ST <find-static-library> ; -flags pathscale.link FINDLIBS-SA <find-shared-library> ; -flags pathscale.link FINDLIBS-SA <threading>multi : pthread ; -flags pathscale.link LIBRARIES <library-file> ; -flags pathscale.link LINK-RUNTIME <runtime-link>static : static ; -flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags pathscale.link RPATH <dll-path> ; -# On gcc, there are separate options for dll path at runtime and -# link time. On Solaris, there's only one: -R, so we have to use -# it, even though it's bad idea. -flags pathscale.link RPATH <xdll-path> ; - -rule link ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -} - -# Declare action for creating static libraries -# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)" -actions piecemeal archive -{ -    ar $(ARFLAGS) ru "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/pch.jam b/jam-files/boost-build/tools/pch.jam deleted file mode 100644 index 0c6e98fa..00000000 --- a/jam-files/boost-build/tools/pch.jam +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -##### Using Precompiled Headers (Quick Guide) ##### -# -# Make precompiled mypch.hpp: -# -#    import pch ; -# -#    cpp-pch mypch -#      : # sources -#        mypch.hpp -#      : # requiremnts -#        <toolset>msvc:<source>mypch.cpp -#      ; -# -# Add cpp-pch to sources: -# -#    exe hello -#      : main.cpp hello.cpp mypch -#      ; - -import "class" : new ; -import type ; -import feature ; -import generators ; - -type.register     PCH : pch ; - -type.register   C_PCH : : PCH ; -type.register CPP_PCH : : PCH ; - -# Control precompiled header (PCH) generation. -feature.feature pch : -      on -      off -    : propagated ; - - -feature.feature pch-header : : free dependency ; -feature.feature pch-file   : : free dependency ; - -# Base PCH generator. The 'run' method has the logic to prevent this generator -# from being run unless it's being used for a top-level PCH target. -class pch-generator : generator -{ -    import property-set ; - -    rule action-class ( ) -    { -        return compile-action ; -    } - -    rule run ( project name ? : property-set : sources + ) -    { -        if ! $(name) -        { -            # Unless this generator is invoked as the top-most generator for a -            # main target, fail. This allows using 'H' type as input type for -            # this generator, while preventing Boost.Build to try this generator -            # when not explicitly asked for. -            # -            # One bad example is msvc, where pch generator produces both PCH -            # target and OBJ target, so if there's any header generated (like by -            # bison, or by msidl), we'd try to use pch generator to get OBJ from -            # that H, which is completely wrong. By restricting this generator -            # only to pch main target, such problem is solved. -        } -        else -        { -            local r = [ run-pch $(project) $(name) -              : [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ] -              : $(sources) ] ; -            return [ generators.add-usage-requirements $(r) -              : <define>BOOST_BUILD_PCH_ENABLED ] ; -        } -    } - -    # This rule must be overridden by the derived classes. -    rule run-pch ( project name ? : property-set : sources + ) -    { -    } -} - - -# NOTE: requirements are empty, default pch generator can be applied when -# pch=off. -generators.register -    [ new dummy-generator pch.default-c-pch-generator   : :   C_PCH ] ; -generators.register -    [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ; diff --git a/jam-files/boost-build/tools/pch.py b/jam-files/boost-build/tools/pch.py deleted file mode 100644 index 21d3db09..00000000 --- a/jam-files/boost-build/tools/pch.py +++ /dev/null @@ -1,83 +0,0 @@ -# Status: Being ported by Steven Watanabe -# Base revision: 47077 -# -# Copyright (c) 2005 Reece H. Dunn. -# Copyright 2006 Ilya Sokolov -# Copyright (c) 2008 Steven Watanabe -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -##### Using Precompiled Headers (Quick Guide) ##### -# -# Make precompiled mypch.hpp: -# -#    import pch ; -# -#    cpp-pch mypch -#      : # sources -#        mypch.hpp -#      : # requiremnts -#        <toolset>msvc:<source>mypch.cpp -#      ; -# -# Add cpp-pch to sources: -# -#    exe hello -#      : main.cpp hello.cpp mypch -#      ; - -from b2.build import type, feature, generators - -type.register('PCH', ['pch']) -type.register('C_PCH', [], 'PCH') -type.register('CPP_PCH', [], 'PCH') - -# Control precompiled header (PCH) generation. -feature.feature('pch', -                ['on', 'off'], -                ['propagated']) - -feature.feature('pch-header', [], ['free', 'dependency']) -feature.feature('pch-file', [], ['free', 'dependency']) - -class PchGenerator(generators.Generator): -    """ -        Base PCH generator. The 'run' method has the logic to prevent this generator -        from being run unless it's being used for a top-level PCH target. -    """ -    def action_class(self): -        return 'compile-action' - -    def run(self, project, name, prop_set, sources): -        if not name: -            # Unless this generator is invoked as the top-most generator for a -            # main target, fail. This allows using 'H' type as input type for -            # this generator, while preventing Boost.Build to try this generator -            # when not explicitly asked for. -            # -            # One bad example is msvc, where pch generator produces both PCH -            # target and OBJ target, so if there's any header generated (like by -            # bison, or by msidl), we'd try to use pch generator to get OBJ from -            # that H, which is completely wrong. By restricting this generator -            # only to pch main target, such problem is solved. -            pass -        else: -            r = self.run_pch(project, name, -                 prop_set.add_raw('<define>BOOST_BUILD_PCH_ENABLED'), -                 sources) -            return generators.add_usage_requirements( -                r, ['<define>BOOST_BUILD_PCH_ENABLED']) - -    # This rule must be overridden by the derived classes. -    def run_pch(self, project, name, prop_set, sources): -        pass - -#FIXME: dummy-generator in builtins.jam needs to be ported. -# NOTE: requirements are empty, default pch generator can be applied when -# pch=off. -###generators.register( -###    [ new dummy-generator pch.default-c-pch-generator   : :   C_PCH ] ; -###generators.register -###    [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ; diff --git a/jam-files/boost-build/tools/pgi.jam b/jam-files/boost-build/tools/pgi.jam deleted file mode 100644 index 3a35c644..00000000 --- a/jam-files/boost-build/tools/pgi.jam +++ /dev/null @@ -1,147 +0,0 @@ -#  Copyright Noel Belcourt 2007. -#  Distributed under the Boost Software License, Version 1.0. -#    (See accompanying file LICENSE_1_0.txt or copy at -#          http://www.boost.org/LICENSE_1_0.txt) - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature ; -import fortran ; -import type ; -import common ; -import gcc ; - -feature.extend toolset : pgi ; -toolset.inherit pgi : unix ; -generators.override pgi.prebuilt : builtin.lib-generator ; -generators.override pgi.searched-lib-generator : searched-lib-generator ; - -# Documentation and toolchain description located -# http://www.pgroup.com/resources/docs.htm - -rule init ( version ? : command * : options * )  -{ -  local condition = [ common.check-init-parameters pgi : version $(version) ] ; - -  local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ; - -  common.handle-options pgi : $(condition) : $(l_command) : $(options) ; -     -  command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ; - -  toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ; - -  flags pgi.compile DEFINES $(condition) : -    [ feature.get-values <define> : $(options) ] : unchecked ; - -  # IOV_MAX support -  flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ; - -  # set link flags -  flags pgi.link FINDLIBS-ST : [ -    feature.get-values <find-static-library> : $(options) ] : unchecked ; - -  # always link lib rt to resolve clock_gettime() -  flags pgi.link FINDLIBS-SA : rt [ -    feature.get-values <find-shared-library> : $(options) ] : unchecked ; - -  gcc.init-link-flags pgi gnu $(condition) ; -} - -# Declare generators -generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ; -generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ; -generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ; - -# Declare flags and actions for compilation -flags pgi.compile OPTIONS : -Kieee ; -flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ; -flags pgi.compile OPTIONS <debug-symbols>on : -gopt ; -flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ; -flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ; -flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ; -# flags pgi.compile OPTIONS <threading>multi : -mt ; - -flags pgi.compile OPTIONS <warnings>off : -Minform=severe ; -flags pgi.compile OPTIONS <warnings>on : -Minform=warn ; - -flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ; - -flags pgi.compile OPTIONS <cflags> ; -flags pgi.compile.c++ OPTIONS <cxxflags> ; -flags pgi.compile DEFINES <define> ; -flags pgi.compile INCLUDES <include> ; - -flags pgi.compile.fortran OPTIONS <fflags> ; - -actions compile.c -{ -    "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.fortran -{ -    "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags pgi.link OPTIONS <debug-symbols>on : -gopt ; -# Strip the binary when no debugging is needed -flags pgi.link OPTIONS <debug-symbols>off : -s ; -flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ; -flags pgi.link OPTIONS <linkflags> ; -flags pgi.link OPTIONS <link>shared : -fpic -fPIC ; -flags pgi.link LINKPATH <library-path> ; -flags pgi.link FINDLIBS-ST <find-static-library> ; -flags pgi.link FINDLIBS-SA <find-shared-library> ; -flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ; -flags pgi.link LIBRARIES <library-file> ; -flags pgi.link LINK-RUNTIME <runtime-link>static : static ; -flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags pgi.link RPATH <dll-path> ; - -# On gcc, there are separate options for dll path at runtime and -# link time. On Solaris, there's only one: -R, so we have to use -# it, even though it's bad idea. -flags pgi.link RPATH <xdll-path> ; - -rule link ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line  -# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied. -# Yod on redstorm refuses to load an executable that is dynamically linked. -# removing the dynamic link options should get us where we need to be on redstorm. -# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME) -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -actions updated together piecemeal pgi.archive -{ -    ar -rc$(ARFLAGS:E=) "$(<)" "$(>)" -} - diff --git a/jam-files/boost-build/tools/python-config.jam b/jam-files/boost-build/tools/python-config.jam deleted file mode 100644 index 40aa825b..00000000 --- a/jam-files/boost-build/tools/python-config.jam +++ /dev/null @@ -1,27 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for Python tools and librries. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ -    for local R in 2.4 2.3 2.2 -    { -        local python-path = [ W32_GETREG -            "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ; -        local python-version = $(R) ; -         -        if $(python-path) -        { -            if --debug-configuration in [ modules.peek : ARGV ] -            { -                ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ; -            } -            using python : $(python-version) : $(python-path) ; -        } -    } -} diff --git a/jam-files/boost-build/tools/python.jam b/jam-files/boost-build/tools/python.jam deleted file mode 100644 index 97a9f9a5..00000000 --- a/jam-files/boost-build/tools/python.jam +++ /dev/null @@ -1,1267 +0,0 @@ -# Copyright 2004 Vladimir Prus. -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Support for Python and the the Boost.Python library. -# -# This module defines -# -# - a project 'python' with a target 'python' in it, that corresponds to the -#   python library -# -# - a main target rule 'python-extension' which can be used to build a python -#   extension. -# -# Extensions that use Boost.Python must explicitly link to it. - -import type ; -import testing ; -import generators ; -import project ; -import errors ; -import targets ; -import "class" : new ; -import os ; -import common ; -import toolset ; -import regex ; -import numbers ; -import string ; -import property ; -import sequence ; -import path ; -import feature ; -import set ; -import builtin ; -import version ; - - -# Make this module a project. -project.initialize $(__name__) ; -project python ; - -# Save the project so that if 'init' is called several times we define new -# targets in the python project, not in whatever project we were called by. -.project = [ project.current ] ; - -# Dynamic linker lib. Necessary to specify it explicitly on some platforms. -lib dl ; -# This contains 'openpty' function need by python. Again, on some system need to -# pass this to linker explicitly. -lib util ; -# Python uses pthread symbols. -lib pthread ; -# Extra library needed by phtread on some platforms. -lib rt ; - -# The pythonpath feature specifies additional elements for the PYTHONPATH -# environment variable, set by run-pyd. For example, pythonpath can be used to -# access Python modules that are part of the product being built, but are not -# installed in the development system's default paths. -feature.feature pythonpath : : free optional path ; - -# Initializes the Python toolset. Note that all parameters are optional. -# -# - version -- the version of Python to use. Should be in Major.Minor format, -#   for example 2.3.  Do not include the subminor version. -# -# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter. -#   Alternatively, the installation prefix for Python libraries and includes. If -#   empty, will be guessed from the version, the platform's installation -#   patterns, and the python executables that can be found in PATH. -# -# - includes: the include path to Python headers. If empty, will be guessed. -# -# - libraries: the path to Python library binaries. If empty, will be guessed. -#   On MacOS/Darwin, you can also pass the path of the Python framework. -# -# - condition: if specified, should be a set of properties that are matched -#   against the build configuration when Boost.Build selects a Python -#   configuration to use. -# -# - extension-suffix: A string to append to the name of extension modules before -#   the true filename extension.  Ordinarily we would just compute this based on -#   the value of the <python-debugging> feature. However ubuntu's python-dbg -#   package uses the windows convention of appending _d to debug-build extension -#   modules. We have no way of detecting ubuntu, or of probing python for the -#   "_d" requirement, and if you configure and build python using -#   --with-pydebug, you'll be using the standard *nix convention. Defaults to "" -#   (or "_d" when targeting windows and <python-debugging> is set). -# -# Example usage: -# -#   using python : 2.3 ; -#   using python : 2.3 : /usr/local/bin/python ; -# -rule init ( version ? : cmd-or-prefix ? : includes * : libraries ? -    : condition * : extension-suffix ? ) -{ -    project.push-current $(.project) ; - -    debug-message Configuring python... ; -    for local v in version cmd-or-prefix includes libraries condition -    { -        if $($(v)) -        { -            debug-message "  user-specified "$(v): \"$($(v))\" ; -        } -    } - -    configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ; - -    project.pop-current ; -} - -# A simpler version of SHELL that grabs stderr as well as stdout, but returns -# nothing if there was an error. -# -local rule shell-cmd ( cmd ) -{ -    debug-message running command '$(cmd)" 2>&1"' ; -    x = [ SHELL $(cmd)" 2>&1" : exit-status ] ; -    if $(x[2]) = 0 -    { -        return $(x[1]) ; -    } -    else -    { -        return ; -    } -} - - -# Try to identify Cygwin symlinks. Invoking such a file directly as an NT -# executable from a native Windows build of bjam would be fatal to the bjam -# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove -# that those are not also symlinks. ;-) -# -# If a symlink is found returns non-empty; we try to extract the target of the -# symlink from the file and return that. -# -# Note: 1. only works on NT  2. path is a native path. -local rule is-cygwin-symlink ( path ) -{ -    local is-symlink = ; - -    # Look for a file with the given path having the S attribute set, as cygwin -    # symlinks do. /-C means "do not use thousands separators in file sizes." -    local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ; - -    if $(dir-listing) -    { -        # Escape any special regex characters in the base part of the path. -        local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ; - -        # Extract the file's size from the directory listing. -        local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ; - -        # If the file has a reasonably small size, look for the special symlink -        # identification text. -        if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ] -        { -            local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ; -            if $(link[2]) != 0 -            { -                local nl = " - -" ; -                is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ; -                if $(is-symlink) -                { -                    is-symlink = [ *nix-path-to-native $(is-symlink) ] ; -                    is-symlink = $(is-symlink:R=$(path:D)) ; -                } - -            } -        } -    } -    return $(is-symlink) ; -} - - -# Append ext to each member of names that does not contain '.'. -# -local rule default-extension ( names * : ext * ) -{ -    local result ; -    for local n in $(names) -    { -        switch $(n) -        { -            case *.* : result += $(n) ; -            case * : result += $(n)$(ext) ; -        } -    } -    return $(result) ; -} - - -# Tries to determine whether invoking "cmd" would actually attempt to launch a -# cygwin symlink. -# -# Note: only works on NT. -# -local rule invokes-cygwin-symlink ( cmd ) -{ -    local dirs = $(cmd:D) ; -    if ! $(dirs) -    { -        dirs = . [ os.executable-path ] ; -    } -    local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ; -    local paths = [ GLOB $(dirs) : $(base) ] ; -    if $(paths) -    { -        # Make sure we have not run into a Cygwin symlink. Invoking such a file -        # as an NT executable would be fatal for the bjam process. -        return [ is-cygwin-symlink $(paths[1]) ] ; -    } -} - - -local rule debug-message ( message * ) -{ -    if --debug-configuration in [ modules.peek : ARGV ] -    { -        ECHO notice: [python-cfg] $(message) ; -    } -} - - -# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and -# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result -# found. Also accounts for the fact that on 64-bit machines, 32-bit software has -# its own area, under SOFTWARE\Wow6432node. -# -local rule software-registry-value ( path : data ? ) -{ -    local result ; -    for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE -    { -        for local x64elt in "" Wow6432node\\ # Account for 64-bit windows -        { -            if ! $(result) -            { -                result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ; -            } -        } - -    } -    return $(result) ; -} - - -.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ; -.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ; - -.working-directory = [ PWD ] ; -.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ; -.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ; - - -local rule windows-to-cygwin-path ( path ) -{ -    # If path is rooted with a drive letter, rewrite it using the /cygdrive -    # mountpoint. -    local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ; - -    # Else if path is rooted without a drive letter, use the working directory. -    p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ; - -    # Else return the path unchanged. -    return $(p:E=$(path:T)) ; -} - - -# :W only works in Cygwin builds of bjam.  This one works on NT builds as well. -# -local rule cygwin-to-windows-path ( path ) -{ -    path = $(path:R="") ; # strip any trailing slash - -    local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ; -    if $(drive-letter) -    { -        path = $(drive-letter) ; -    } -    else if $(path:R=/x) = $(path) # already rooted? -    { -        # Look for a cygwin mount that includes each head sequence in $(path). -        local head = $(path) ; -        local tail = "" ; - -        while $(head) -        { -            local root = [ software-registry-value -                "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ; - -            if $(root) -            { -                path = $(tail:R=$(root)) ; -                head = ; -            } -            tail = $(tail:R=$(head:D=)) ; - -            if $(head) = / -            { -                head = ; -            } -            else -            { -                head = $(head:D) ; -            } -        } -    } -    return [ regex.replace $(path:R="") / \\ ] ; -} - - -# Convert a *nix path to native. -# -local rule *nix-path-to-native ( path ) -{ -    if [ os.name ] = NT -    { -        path = [ cygwin-to-windows-path $(path) ] ; -    } -    return $(path) ; -} - - -# Convert an NT path to native. -# -local rule windows-path-to-native ( path ) -{ -    if [ os.name ] = NT -    { -        return $(path) ; -    } -    else -    { -        return [ windows-to-cygwin-path $(path) ] ; -    } -} - - -# Return nonempty if path looks like a windows path, i.e. it starts with a drive -# letter or contains backslashes. -# -local rule guess-windows-path ( path ) -{ -    return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ; -} - - -local rule path-to-native ( paths * ) -{ -    local result ; - -    for local p in $(paths) -    { -        if [ guess-windows-path $(p) ] -        { -            result += [ windows-path-to-native $(p) ] ; -        } -        else -        { -            result += [ *nix-path-to-native $(p:T) ] ; -        } -    } -    return $(result) ; -} - - -# Validate the version string and extract the major/minor part we care about. -# -local rule split-version ( version ) -{ -    local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ; -    if ! $(major-minor[2]) || $(major-minor[3]) -    { -        ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ; - -        # Add a zero to account for the missing digit if necessary. -        major-minor += 0 ; -    } - -    return $(major-minor[1]) $(major-minor[2]) ; -} - - -# Build a list of versions from 3.0 down to 1.5. Because bjam can not enumerate -# registry sub-keys, we have no way of finding a version with a 2-digit minor -# version, e.g. 2.10 -- let us hope that never happens. -# -.version-countdown = ; -for local v in [ numbers.range 15 30 ] -{ -    .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ; -} - - -local rule windows-installed-pythons ( version ? ) -{ -    version ?= $(.version-countdown) ; -    local interpreters ; - -    for local v in $(version) -    { -        local install-path = [ -          software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ; - -        if $(install-path) -        { -            install-path = [ windows-path-to-native $(install-path) ] ; -            debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ; -        } - -        interpreters += $(:E=python:R=$(install-path)) ; -    } -    return $(interpreters) ; -} - - -local rule darwin-installed-pythons ( version ? ) -{ -    version ?= $(.version-countdown) ; - -    local prefix -      = [ GLOB /System/Library/Frameworks /Library/Frameworks -          : Python.framework ] ; - -    return $(prefix)/Versions/$(version)/bin/python ; -} - - -# Assume "python-cmd" invokes a python interpreter and invoke it to extract all -# the information we care about from its "sys" module. Returns void if -# unsuccessful. -# -local rule probe ( python-cmd ) -{ -    # Avoid invoking a Cygwin symlink on NT. -    local skip-symlink ; -    if [ os.name ] = NT -    { -        skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ; -    } - -    if $(skip-symlink) -    { -        debug-message -------------------------------------------------------------------- ; -        debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ; -        debug-message causing a bjam built for Windows to hang. ; -        debug-message ; -        debug-message If you intend to target a Cygwin build of Python, please ; -        debug-message replace the path to the link with the path to a real executable ; -        debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ; -        debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ; -        debug-message backslashes ; -        debug-message -------------------------------------------------------------------- ; -    } -    else -    { -        # Prepare a List of Python format strings and expressions that can be -        # used to print the constants we want from the sys module. - -        # We do not really want sys.version since that is a complicated string, -        # so get the information from sys.version_info instead. -        local format = "version=%d.%d" ; -        local exprs = "version_info[0]" "version_info[1]" ; - -        for local s in $(sys-elements[2-]) -        { -            format += $(s)=%s ; -            exprs += $(s) ; -        } - -        # Invoke Python and ask it for all those values. -        if [ version.check-jam-version 3 1 17 ] || ( [ os.name ] != NT ) -        { -            # Prior to version 3.1.17 Boost Jam's SHELL command did not support -            # quoted commands correctly on Windows. This means that on that -            # platform we do not support using a Python command interpreter -            # executable whose path contains a space character. -            python-cmd = \"$(python-cmd)\" ; -        } -        local full-cmd = -            $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ; - -        local output = [ shell-cmd $(full-cmd) ] ; -        if $(output) -        { -            # Parse the output to get all the results. -            local nl = " - -" ; -            for s in $(sys-elements) -            { -                # These variables are expected to be declared local in the -                # caller, so Jam's dynamic scoping will set their values there. -                sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ; -            } -        } -        return $(output) ; -    } -} - - -# Make sure the "libraries" and "includes" variables (in an enclosing scope) -# have a value based on the information given. -# -local rule compute-default-paths ( target-os : version ? : prefix ? : -    exec-prefix ? ) -{ -    exec-prefix ?= $(prefix) ; - -    if $(target-os) = windows -    { -        # The exec_prefix is where you're supposed to look for machine-specific -        # libraries. -        local default-library-path = $(exec-prefix)\\libs ; -        local default-include-path = $(:E=Include:R=$(prefix)) ; - -        # If the interpreter was found in a directory called "PCBuild" or -        # "PCBuild8," assume we're looking at a Python built from the source -        # distro, and go up one additional level to the default root. Otherwise, -        # the default root is the directory where the interpreter was found. - -        # We ask Python itself what the executable path is in case of -        # intermediate symlinks or shell scripts. -        local executable-dir = $(sys.executable:D) ; - -        if [ MATCH ^(PCBuild) : $(executable-dir:D=) ] -        { -            debug-message "This Python appears to reside in a source distribution;" ; -            debug-message "prepending \""$(executable-dir)"\" to default library search path" ; - -            default-library-path = $(executable-dir) $(default-library-path) ; - -            default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ; - -            debug-message "and \""$(default-include-path[1])"\" to default #include path" ; -        } - -        libraries ?= $(default-library-path) ; -        includes ?= $(default-include-path) ; -    } -    else -    { -        includes ?= $(prefix)/include/python$(version) ; - -        local lib = $(exec-prefix)/lib ; -        libraries ?= $(lib)/python$(version)/config $(lib) ; -    } -} - -# The version of the python interpreter to use. -feature.feature python : : propagated ; -feature.feature python.interpreter : : free ; - -toolset.flags python.capture-output PYTHON : <python.interpreter> ; - -# -# Support for Python configured --with-pydebug -# -feature.feature python-debugging : off on : propagated ; -builtin.variant debug-python : debug : <python-debugging>on ; - - -# Return a list of candidate commands to try when looking for a Python -# interpreter. prefix is expected to be a native path. -# -local rule candidate-interpreters ( version ? : prefix ? : target-os ) -{ -    local bin-path = bin ; -    if $(target-os) = windows -    { -        # On Windows, look in the root directory itself and, to work with the -        # result of a build-from-source, the PCBuild directory. -        bin-path = PCBuild8 PCBuild "" ; -    } - -    bin-path = $(bin-path:R=$(prefix)) ; - -    if $(target-os) in windows darwin -    { -        return                                            # Search: -            $(:E=python:R=$(bin-path))                    #   Relative to the prefix, if any -            python                                        #   In the PATH -            [ $(target-os)-installed-pythons $(version) ] #   Standard install locations -        ; -    } -    else -    { -        # Search relative to the prefix, or if none supplied, in PATH. -        local unversioned = $(:E=python:R=$(bin-path:E=)) ; - -        # If a version was specified, look for a python with that specific -        # version appended before looking for one called, simply, "python" -        return $(unversioned)$(version) $(unversioned) ; -    } -} - - -# Compute system library dependencies for targets linking with static Python -# libraries. -# -# On many systems, Python uses libraries such as pthreads or libdl. Since static -# libraries carry no library dependency information of their own that the linker -# can extract, these extra dependencies have to be given explicitly on the link -# line of the client.  The information about these dependencies is packaged into -# the "python" target below. -# -# Even where Python itself uses pthreads, it never allows extension modules to -# be entered concurrently (unless they explicitly give up the interpreter lock). -# Therefore, extension modules do not need the efficiency overhead of threadsafe -# code as produced by <threading>multi, and we handle libpthread along with -# other libraries here. Note: this optimization is based on an assumption that -# the compiler generates link-compatible code in both the single- and -# multi-threaded cases, and that system libraries do not change their ABIs -# either. -# -# Returns a list of usage-requirements that link to the necessary system -# libraries. -# -local rule system-library-dependencies ( target-os ) -{ -    switch $(target-os) -    { -        case s[uo][nl]* : # solaris, sun, sunos -            # Add a librt dependency for the gcc toolset on SunOS (the sun -            # toolset adds -lrt unconditionally). While this appears to -            # duplicate the logic already in gcc.jam, it does not as long as -            # we are not forcing <threading>multi. - -            # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields -            # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem -            # to be the right list for extension modules. For example, on my -            # installation, adding -ldl causes at least one test to fail because -            # the library can not be found and removing it causes no failures. - -            # Apparently, though, we need to add -lrt for gcc. -            return <toolset>gcc:<library>rt ; - -        case osf : return  <library>pthread <toolset>gcc:<library>rt ; - -        case qnx* : return ; -        case darwin : return ; -        case windows : return ; - -        case hpux : return  <library>rt ; -        case *bsd : return  <library>pthread <toolset>gcc:<library>util ; - -        case aix : return  <library>pthread <library>dl ; - -        case * : return  <library>pthread <library>dl  -            <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ; -    } -} - - -# Declare a target to represent Python's library. -# -local rule declare-libpython-target ( version ? : requirements * ) -{ -    # Compute the representation of Python version in the name of Python's -    # library file. -    local lib-version = $(version) ; -    if <target-os>windows in $(requirements) -    { -        local major-minor = [ split-version $(version) ] ; -        lib-version = $(major-minor:J="") ; -        if <python-debugging>on in $(requirements) -        { -            lib-version = $(lib-version)_d ; -        } -    } - -    if ! $(lib-version) -    { -        ECHO *** warning: could not determine Python version, which will ; -        ECHO *** warning: probably prevent us from linking with the python ; -        ECHO *** warning: library.  Consider explicitly passing the version ; -        ECHO *** warning: to 'using python'. ; -    } - -    # Declare it. -    lib python.lib : : <name>python$(lib-version) $(requirements) ; -} - - -# Implementation of init. -local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? : -    condition * : extension-suffix ? ) -{ -    local prefix ; -    local exec-prefix ; -    local cmds-to-try ; -    local interpreter-cmd ; - -    local target-os = [ feature.get-values target-os : $(condition) ] ; -    target-os ?= [ feature.defaults target-os ] ; -    target-os = $(target-os:G=) ; - -    if $(target-os) = windows && <python-debugging>on in $(condition) -    { -        extension-suffix ?= _d ; -    } -    extension-suffix ?= "" ; - -    # Normalize and dissect any version number. -    local major-minor ; -    if $(version) -    { -        major-minor = [ split-version $(version) ] ; -        version = $(major-minor:J=.) ; -    } - -    local cmds-to-try ; - -    if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ] -    { -        # If the user did not pass a command, whatever we got was a prefix. -        prefix = $(cmd-or-prefix) ; -        cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ; -    } -    else -    { -        # Work with the command the user gave us. -        cmds-to-try = $(cmd-or-prefix) ; - -        # On Windows, do not nail down the interpreter command just yet in case -        # the user specified something that turns out to be a cygwin symlink, -        # which could bring down bjam if we invoke it. -        if $(target-os) != windows -        { -            interpreter-cmd = $(cmd-or-prefix) ; -        } -    } - -    # Values to use in case we can not really find anything in the system. -    local fallback-cmd = $(cmds-to-try[1]) ; -    local fallback-version ; - -    # Anything left to find or check? -    if ! ( $(interpreter-cmd) && $(includes) && $(libraries) ) -    { -        # Values to be extracted from python's sys module. These will be set by -        # the probe rule, above, using Jam's dynamic scoping. -        local sys-elements = version platform prefix exec_prefix executable ; -        local sys.$(sys-elements) ; - -        # Compute the string Python's sys.platform needs to match. If not -        # targeting Windows or cygwin we will assume only native builds can -        # possibly run, so we will not require a match and we leave sys.platform -        # blank. -        local platform ; -        switch $(target-os) -        { -            case windows : platform = win32 ; -            case cygwin : platform = cygwin ; -        } - -        while $(cmds-to-try) -        { -            # Pop top command. -            local cmd = $(cmds-to-try[1]) ; -            cmds-to-try = $(cmds-to-try[2-]) ; - -            debug-message Checking interpreter command \"$(cmd)\"... ; -            if [ probe $(cmd) ] -            { -                fallback-version ?= $(sys.version) ; - -                # Check for version/platform validity. -                for local x in version platform -                { -                    if $($(x)) && $($(x)) != $(sys.$(x)) -                    { -                        debug-message ...$(x) "mismatch (looking for" -                            $($(x)) but found $(sys.$(x))")" ; -                        cmd = ; -                    } -                } - -                if $(cmd) -                { -                    debug-message ...requested configuration matched! ; - -                    exec-prefix = $(sys.exec_prefix) ; - -                    compute-default-paths $(target-os) : $(sys.version) : -                        $(sys.prefix) : $(sys.exec_prefix) ; - -                    version = $(sys.version) ; -                    interpreter-cmd ?= $(cmd) ; -                    cmds-to-try = ;  # All done. -                } -            } -            else -            { -                debug-message ...does not invoke a working interpreter ; -            } -        } -    } - -    # Anything left to compute? -    if $(includes) && $(libraries) -    { -        .configured = true ; -    } -    else -    { -        version ?= $(fallback-version) ; -        version ?= 2.5 ; -        exec-prefix ?= $(prefix) ; -        compute-default-paths $(target-os) : $(version) : $(prefix:E=) ; -    } - -    if ! $(interpreter-cmd) -    { -        fallback-cmd ?= python ; -        debug-message No working Python interpreter found. ; -        if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ] -        { -            interpreter-cmd = $(fallback-cmd) ; -            debug-message falling back to \"$(interpreter-cmd)\" ; -        } -    } - -    includes = [ path-to-native $(includes) ] ; -    libraries = [ path-to-native $(libraries) ] ; - -    debug-message "Details of this Python configuration:" ; -    debug-message "  interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ; -    debug-message "  include path:" \"$(includes:E=<empty>)\" ; -    debug-message "  library path:" \"$(libraries:E=<empty>)\" ; -    if $(target-os) = windows -    { -        debug-message "  DLL search path:" \"$(exec-prefix:E=<empty>)\" ; -    } - -    # -    # End autoconfiguration sequence. -    # -    local target-requirements = $(condition) ; - -    # Add the version, if any, to the target requirements. -    if $(version) -    { -        if ! $(version) in [ feature.values python ] -        { -            feature.extend python : $(version) ; -        } -        target-requirements += <python>$(version:E=default) ; -    } -  -    target-requirements += <target-os>$(target-os) ; - -    # See if we can find a framework directory on darwin. -    local framework-directory ; -    if $(target-os) = darwin -    { -        # Search upward for the framework directory. -        local framework-directory = $(libraries[-1]) ; -        while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework -        { -            framework-directory = $(framework-directory:D) ; -        } - -        if $(framework-directory:D=) = Python.framework -        { -            debug-message framework directory is \"$(framework-directory)\" ; -        } -        else -        { -            debug-message "no framework directory found; using library path" ; -            framework-directory = ; -        } -    } - -    local dll-path = $(libraries) ; - -    # Make sure that we can find the Python DLL on Windows. -    if ( $(target-os) = windows ) && $(exec-prefix) -    { -        dll-path += $(exec-prefix) ; -    } - -    # -    # Prepare usage requirements. -    # -    local usage-requirements = [ system-library-dependencies $(target-os) ] ; -    usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ; -    if <python-debugging>on in $(condition) -    { -        if $(target-os) = windows -        { -            # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define -            # Py_DEBUG we will get multiple definition warnings. -            usage-requirements += <define>_DEBUG ; -        } -        else -        { -            usage-requirements += <define>Py_DEBUG ; -        } -    } - -    # Global, but conditional, requirements to give access to the interpreter -    # for general utilities, like other toolsets, that run Python scripts. -    toolset.add-requirements -        $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ; - -    # Register the right suffix for extensions. -    register-extension-suffix $(extension-suffix) : $(target-requirements) ; - -    # -    # Declare the "python" target. This should really be called -    # python_for_embedding. -    # - -    if $(framework-directory) -    { -        alias python -          : -          : $(target-requirements) -          : -          : $(usage-requirements) <framework>$(framework-directory) -          ; -    } -    else -    { -        declare-libpython-target $(version) : $(target-requirements) ; - -        # This is an evil hack.  On, Windows, when Python is embedded, nothing -        # seems to set up sys.path to include Python's standard library -        # (http://article.gmane.org/gmane.comp.python.general/544986). The evil -        # here, aside from the workaround necessitated by Python's bug, is that: -        # -        # a. we're guessing the location of the python standard library from the -        #    location of pythonXX.lib -        # -        # b. we're hijacking the <testing.launcher> property to get the -        #    environment variable set up, and the user may want to use it for -        #    something else (e.g. launch the debugger). -        local set-PYTHONPATH ; -        if $(target-os) = windows -        { -            set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH : -                $(libraries:D)/Lib ] ; -        } - -        alias python -          : -          : $(target-requirements) -          : -            # Why python.lib must be listed here instead of along with the -            # system libs is a mystery, but if we do not do it, on cygwin, -            # -lpythonX.Y never appears in the command line (although it does on -            # linux). -          : $(usage-requirements) -            <testing.launcher>$(set-PYTHONPATH) -              <library-path>$(libraries) <library>python.lib -          ; -    } - -    # On *nix, we do not want to link either Boost.Python or Python extensions -    # to libpython, because the Python interpreter itself provides all those -    # symbols. If we linked to libpython, we would get duplicate symbols. So -    # declare two targets -- one for building extensions and another for -    # embedding. -    # -    # Unlike most *nix systems, Mac OS X's linker does not permit undefined -    # symbols when linking a shared library. So, we still need to link against -    # the Python framework, even when building extensions. Note that framework -    # builds of Python always use shared libraries, so we do not need to worry -    # about duplicate Python symbols. -    if $(target-os) in windows cygwin darwin -    { -        alias python_for_extensions : python : $(target-requirements) ; -    } -    # On AIX we need Python extensions and Boost.Python to import symbols from -    # the Python interpreter. Dynamic libraries opened with dlopen() do not -    # inherit the symbols from the Python interpreter. -    else if $(target-os) = aix -    { -        alias python_for_extensions -            : -            : $(target-requirements) -            : -            : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp -            ; -    } -    else -    { -        alias python_for_extensions -            : -            : $(target-requirements) -            : -            : $(usage-requirements) -            ; -    } -} - - -rule configured ( ) -{ -     return $(.configured) ; -} - - -type.register PYTHON_EXTENSION : : SHARED_LIB ; - - -local rule register-extension-suffix ( root : condition * ) -{ -    local suffix ; - -    switch [ feature.get-values target-os : $(condition) ] -    { -        case windows : suffix = pyd ; -        case cygwin : suffix = dll ; -        case hpux : -        { -            if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4 -            { -                suffix = sl ; -            } -            else -            { -                suffix = so ; -            } -        } -        case * : suffix = so ; -    } - -    type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ; -} - - -# Unset 'lib' prefix for PYTHON_EXTENSION -type.set-generated-target-prefix PYTHON_EXTENSION : : "" ; - - -rule python-extension ( name : sources * : requirements * : default-build * : -                        usage-requirements * ) -{ -    if [ configured ] -    { -        requirements += <use>/python//python_for_extensions ; -    } -    requirements += <suppress-import-lib>true ; - -    local project = [ project.current ] ; - -    targets.main-target-alternative -        [ new typed-target $(name) : $(project) : PYTHON_EXTENSION -            : [ targets.main-target-sources $(sources) : $(name) ] -            : [ targets.main-target-requirements $(requirements) : $(project) ] -            : [ targets.main-target-default-build $(default-build) : $(project) ] -        ] ; -} - -IMPORT python : python-extension : : python-extension ; - -rule py2to3 -{ -    common.copy $(>) $(<) ; -    2to3 $(<) ; -} - -actions 2to3 -{ -    2to3 -wn "$(<)" -    2to3 -dwn "$(<)" -} - - -# Support for testing. -type.register PY : py ; -type.register RUN_PYD_OUTPUT ; -type.register RUN_PYD : : TEST ; - - -class python-test-generator : generator -{ -    import set ; - -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -        self.composing = true ; -    } - -    rule run ( project name ? : property-set : sources * : multiple ? ) -    { -        local pyversion = [ $(property-set).get <python> ] ; -        local python ; -        local other-pythons ; -     -        # Make new target that converting Python source by 2to3 when running with Python 3. -        local rule make-2to3-source ( source ) -        { -            if $(pyversion) >= 3.0 -            { -                local a = [ new action $(source) : python.py2to3 : $(property-set) ] ; -                local t =  [ utility.basename [ $(s).name ] ] ; -                local p = [ new file-target $(t) : PY : $(project) : $(a) ] ; -                return $(p) ; -            } -            else -            { -                return $(source) ; -            } -        } - -        for local s in $(sources) -        { -            if [ $(s).type ] = PY -            { -                if ! $(python) -                { -                    # First Python source ends up on command line. -                    python = [ make-2to3-source $(s) ] ; - -                } -                else -                { -                    # Other Python sources become dependencies. -                    other-pythons += [ make-2to3-source $(s) ] ; -                } -            } -        } - -        local extensions ; -        for local s in $(sources) -        { -            if [ $(s).type ] = PYTHON_EXTENSION -            { -                extensions += $(s) ; -            } -        } - -        local libs ; -        for local s in $(sources) -        { -            if [ type.is-derived [ $(s).type ] LIB ] -              && ! $(s) in $(extensions) -            { -                libs += $(s) ; -            } -        } - -        local new-sources ; -        for local s in $(sources) -        { -            if [ type.is-derived [ $(s).type ] CPP ] -            { -                local name = [ utility.basename [ $(s).name ] ] ; -                if $(name) = [ utility.basename [ $(python).name ] ] -                { -                    name = $(name)_ext ; -                } -                local extension = [ generators.construct $(project) $(name) : -                  PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ; - -                # The important part of usage requirements returned from -                # PYTHON_EXTENSION generator are xdll-path properties that will -                # allow us to find the python extension at runtime. -                property-set = [ $(property-set).add $(extension[1]) ] ; - -                # Ignore usage requirements. We're a top-level generator and -                # nobody is going to use what we generate. -                new-sources += $(extension[2-]) ; -            } -        } - -        property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ; - -        result = [ construct-result $(python) $(extensions) $(new-sources) : -            $(project) $(name) : $(property-set) ] ; -    } -} - - -generators.register -  [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ; - -generators.register-standard testing.expect-success -  : RUN_PYD_OUTPUT : RUN_PYD ; - - -# There are two different ways of spelling OS names. One is used for [ os.name ] -# and the other is used for the <host-os> and <target-os> properties. Until that -# is remedied, this sets up a crude mapping from the latter to the former, that -# will work *for the purposes of cygwin/NT cross-builds only*. Could not think -# of a better name than "translate". -# -.translate-os-windows = NT ; -.translate-os-cygwin = CYGWIN ; -local rule translate-os ( src-os ) -{ -    local x = $(.translate-os-$(src-os)) [ os.name ] ; -    return $(x[1]) ; -} - - -# Extract the path to a single ".pyd" source. This is used to build the -# PYTHONPATH for running bpl tests. -# -local rule pyd-pythonpath ( source ) -{ -    return [ on $(source) return $(LOCATE) $(SEARCH) ] ; -} - - -# The flag settings on testing.capture-output do not apply to python.capture -# output at the moment. Redo this explicitly. -toolset.flags python.capture-output ARGS <testing.arg> ; - - -rule capture-output ( target : sources * : properties * ) -{ -    # Setup up a proper DLL search path. Here, $(sources[1]) is a python module -    # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to -    # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not -    # consulted. Move it over explicitly. -    RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ; - -    PYTHONPATH  = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ; -    PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ; - -    # After test is run, we remove the Python module, but not the Python script. -    testing.capture-output $(target) : $(sources[1]) : $(properties) : -        $(sources[2-]) ; - -    # PYTHONPATH is different; it will be interpreted by whichever Python is -    # invoked and so must follow path rules for the target os. The only OSes -    # where we can run python for other OSes currently are NT and CYGWIN so we -    # only need to handle those cases. -    local target-os = [ feature.get-values target-os : $(properties) ] ; -    # Oddly, host-os is not in properties, so grab the default value. -    local host-os = [ feature.defaults host-os ] ; -    host-os = $(host-os:G=) ; -    if $(target-os) != $(host-os) -    { -        PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path : -            $(PYTHONPATH) ] ; -    } -    local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ; -    local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH : -        $(PYTHONPATH:J=$(path-separator)) ] ; -    LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ; -} - - -rule bpl-test ( name : sources * : requirements * ) -{ -    local s ; -    sources ?= $(name).py $(name).cpp ; -    return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python -        : $(requirements) : $(name) ] ; -} - - -IMPORT $(__name__) : bpl-test : : bpl-test ; diff --git a/jam-files/boost-build/tools/qcc.jam b/jam-files/boost-build/tools/qcc.jam deleted file mode 100644 index 4f2a4fc1..00000000 --- a/jam-files/boost-build/tools/qcc.jam +++ /dev/null @@ -1,236 +0,0 @@ -#  Copyright (c) 2001 David Abrahams. -#  Copyright (c) 2002-2003 Rene Rivera. -#  Copyright (c) 2002-2003 Vladimir Prus. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import "class" : new ; -import common ; -import errors ; -import feature ; -import generators ; -import os ; -import property ; -import set ; -import toolset ; -import type ; -import unix ; - -feature.extend toolset : qcc ; - -toolset.inherit-generators qcc : unix : unix.link unix.link.dll ; -generators.override builtin.lib-generator : qcc.prebuilt ; -toolset.inherit-flags qcc : unix ; -toolset.inherit-rules qcc : unix ; - -# Initializes the qcc toolset for the given version. If necessary, command may -# be used to specify where the compiler is located. The parameter 'options' is a -# space-delimited list of options, each one being specified as -# <option-name>option-value. Valid option names are: cxxflags, linkflags and -# linker-type. Accepted values for linker-type are gnu and sun, gnu being the -# default. -# -# Example: -#   using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ; -# -rule init ( version ? : command * : options * ) -{ -    local condition = [ common.check-init-parameters qcc : version $(version) ] ; -    local command = [ common.get-invocation-command qcc : QCC : $(command) ] ; -    common.handle-options qcc : $(condition) : $(command) : $(options) ; -} - - -generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ; -generators.register-c-compiler qcc.compile.c   : C   : OBJ : <toolset>qcc ; -generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ; - - -# Declare flags for compilation. -toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ; - -# Declare flags and action for compilation. -toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ; -toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ; -toolset.flags qcc.compile OPTIONS <optimization>space : -Os ; - -toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ; -toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ; -toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ; - -toolset.flags qcc.compile OPTIONS <warnings>off : -w ; -toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ; -toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ; - -toolset.flags qcc.compile OPTIONS <profiling>on : -p ; - -toolset.flags qcc.compile OPTIONS <cflags> ; -toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ; -toolset.flags qcc.compile DEFINES <define> ; -toolset.flags qcc.compile INCLUDES <include> ; - -toolset.flags qcc.compile OPTIONS <link>shared : -shared ; - -toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ; - - -rule compile.c++ -{ -    # Here we want to raise the template-depth parameter value to something -    # higher than the default value of 17. Note that we could do this using the -    # feature.set-default rule but we do not want to set the default value for -    # all toolsets as well. -    # -    # TODO: This 'modified default' has been inherited from some 'older Boost -    # Build implementation' and has most likely been added to make some Boost -    # library parts compile correctly. We should see what exactly prompted this -    # and whether we can get around the problem more locally. -    local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ; -    if ! $(template-depth) -    { -        TEMPLATE_DEPTH on $(1) = 128 ; -    } -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.asm -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - - -# The class checking that we do not try to use the <runtime-link>static property -# while creating or using a shared library, since it is not supported by qcc/ -# /libc. -# -class qcc-linking-generator : unix-linking-generator -{ -    rule generated-targets ( sources + : property-set : project name ? ) -    { -        if <runtime-link>static in [ $(property-set).raw ] -        { -            local m ; -            if [ id ] = "qcc.link.dll" -            { -                m = "on qcc, DLL can't be build with <runtime-link>static" ; -            } -            if ! $(m) -            { -                for local s in $(sources) -                { -                    local type = [ $(s).type ] ; -                    if $(type) && [ type.is-derived $(type) SHARED_LIB ] -                    { -                        m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ; -                    } -                } -            } -            if $(m) -            { -                errors.user-error $(m) : "It is suggested to use" -                    "<runtime-link>static together with <link>static." ; -            } -        } - -        return [ unix-linking-generator.generated-targets -            $(sources) : $(property-set) : $(project) $(name) ] ; -    } -} - -generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE -    : <toolset>qcc ] ; - -generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ -    : SHARED_LIB : <toolset>qcc ] ; - -generators.override qcc.prebuilt : builtin.prebuilt ; -generators.override qcc.searched-lib-generator : searched-lib-generator ; - - -# Declare flags for linking. -# First, the common flags. -toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ; -toolset.flags qcc.link OPTIONS <profiling>on : -p ; -toolset.flags qcc.link OPTIONS <linkflags> ; -toolset.flags qcc.link LINKPATH <library-path> ; -toolset.flags qcc.link FINDLIBS-ST <find-static-library> ; -toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ; -toolset.flags qcc.link LIBRARIES <library-file> ; - -toolset.flags qcc.link FINDLIBS-SA : m ; - -# For <runtime-link>static we made sure there are no dynamic libraries in the -# link. -toolset.flags qcc.link OPTIONS <runtime-link>static : -static ; - -# Assuming this is just like with gcc. -toolset.flags qcc.link RPATH : <dll-path> : unchecked ; -toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ; - - -# Declare actions for linking. -# -rule link ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -    # Serialize execution of the 'link' action, since running N links in -    # parallel is just slower. For now, serialize only qcc links while it might -    # be a good idea to serialize all links. -    JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) -} - - -# Always remove archive and start again. Here is the rationale from Andre Hentz: -# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c, -# updated my Jamfiles and rebuilt. My program was crashing with absurd errors. -# After some debugging I traced it back to the fact that a1.o was *still* in -# liba.a -RM = [ common.rm-command ] ; -if [ os.name ] = NT -{ -    RM = "if exist \"$(<[1])\" DEL \"$(<[1])\""  ; -} - - -# Declare action for creating static libraries. The 'r' letter means to add -# files to the archive with replacement. Since we remove the archive, we do not -# care about replacement, but there is no option to "add without replacement". -# The 'c' letter suppresses warnings in case the archive does not exists yet. -# That warning is produced only on some platforms, for whatever reasons. -# -actions piecemeal archive -{ -    $(RM) "$(<)" -    ar rc "$(<)" "$(>)" -} - - -rule link.dll ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -    JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ; -} - - -# Differ from 'link' above only by -shared. -# -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)"  "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) -} diff --git a/jam-files/boost-build/tools/qt.jam b/jam-files/boost-build/tools/qt.jam deleted file mode 100644 index 8aa7ca26..00000000 --- a/jam-files/boost-build/tools/qt.jam +++ /dev/null @@ -1,17 +0,0 @@ -#  Copyright (c) 2006 Vladimir Prus. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -#  Forwarning toolset file to Qt GUI library. Forwards to the toolset file -#  for the current version of Qt. - -import qt4 ; - -rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * ) -{ -    qt4.init $(prefix) : $(full_bin)  : $(full_inc) : $(full_lib) : $(version) : $(condition) ; -} - - diff --git a/jam-files/boost-build/tools/qt3.jam b/jam-files/boost-build/tools/qt3.jam deleted file mode 100644 index f82cf0ac..00000000 --- a/jam-files/boost-build/tools/qt3.jam +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright 2006 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -# Support for the Qt GUI library version 3 -# (http://www.trolltech.com/products/qt3/index.html). -# For new developments, it is recommended to use Qt4 via the qt4 Boost.Build -# module. - -import modules ; -import feature ; -import errors ; -import type ; -import "class" : new ; -import generators ; -import project ; -import toolset : flags ; - -# Convert this module into a project, so that we can declare targets here. -project.initialize $(__name__) ; -project qt3 ; - - -# Initialized the QT support module. The 'prefix' parameter tells where QT is -# installed. When not given, environmental variable QTDIR should be set. -# -rule init ( prefix ? ) -{ -    if ! $(prefix) -    { -        prefix = [ modules.peek : QTDIR ] ; -        if ! $(prefix)  -        { -            errors.error  -              "QT installation prefix not given and QTDIR variable is empty" ; -        }         -    } -  -    if $(.initialized) -    { -        if $(prefix) != $(.prefix) -        { -            errors.error  -              "Attempt the reinitialize QT with different installation prefix" ; -        }         -    }  -    else -    {             -        .initialized = true ; -        .prefix = $(prefix) ; -         -        generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ; -        # Note: the OBJ target type here is fake, take a look at -        # qt4.jam/uic-h-generator for explanations that apply in this case as -        # well. -        generators.register [ new moc-h-generator-qt3  -            qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ; -         -        # The UI type is defined in types/qt.jam, and UIC_H is only used in -        # qt.jam, but not in qt4.jam, so define it here. -        type.register UIC_H : : H ; -         -        generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ; -         -        # The following generator is used to convert UI files to CPP. It creates -        # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also -        # returns UIC_H target, so that it can be mocced. -        class qt::uic-cpp-generator : generator -        { -            rule __init__ ( ) -            { -                generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ; -            } -                         -            rule run ( project name ? : properties * : sources + ) -            { -                # Consider this: -                #    obj test : test_a.cpp : <optimization>off ; -                # -                # This generator will somehow be called in this case, and, -                # will fail -- which is okay. However, if there are <library> -                # properties they will be converted to sources, so the size of  -                # 'sources' will be more than 1. In this case, the base generator -                # will just crash -- and that's not good. Just use a quick test -                # here. -                                 -                local result ; -                if ! $(sources[2]) -                {     -                    # Construct CPP as usual -                    result = [ generator.run $(project) $(name)  -                      : $(properties) : $(sources) ] ; -                     -                    # If OK, process UIC_H with moc. It's pretty clear that -                    # the object generated with UIC will have Q_OBJECT macro. -                    if $(result) -                    { -                        local action = [ $(result[1]).action ] ; -                        local sources = [ $(action).sources ] ; -                        local mocced = [ generators.construct $(project) $(name) -                          : CPP : $(properties) : $(sources[2]) ] ; -                        result += $(mocced[2-]) ; -                    } -                } -                             -                return $(result) ; -            }                -        } -     -        generators.register [ new qt::uic-cpp-generator ] ; -         -        # Finally, declare prebuilt target for QT library. -        local usage-requirements =  -             <include>$(.prefix)/include  -             <dll-path>$(.prefix)/lib -             <library-path>$(.prefix)/lib      -             <allow>qt3 -             ;   -        lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ; -        lib qt : : <name>qt <threading>single : : $(usage-requirements) ;         -    } -} - -class moc-h-generator-qt3 : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    {        -        if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP -        {                               -            name = [ $(sources[1]).name ] ; -            name = $(name:B) ; -             -            local a = [ new action $(sources[1]) : qt3.moc.cpp : -              $(property-set) ] ; -             -            local target = [  -              new file-target $(name) : MOC : $(project) : $(a) ] ; -             -            local r = [ virtual-target.register $(target) ] ;  -                                   -            # Since this generator will return a H target, the linking generator -            # won't use it at all, and won't set any dependency on it. However,  -            # we need the target to be seen by bjam, so that the dependency from -            # sources to this generated header is detected -- if Jam does not -            # know about this target, it won't do anything. -            DEPENDS all : [ $(r).actualize ] ; -             -            return $(r) ;             -        }         -    }     -} - - -# Query the installation directory. This is needed in at least two scenarios. -# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt -# plugins to the Qt-Tree. -# -rule directory  -{  -    return $(.prefix) ;  -}  - -# -f forces moc to include the processed source file. Without it, it would think -# that .qpp is not a header and would not include it from the generated file. -# -actions moc  -{ -    $(.prefix)/bin/moc -f $(>) -o $(<) -} - -# When moccing .cpp files, we don't need -f, otherwise generated code will -# include .cpp and we'll get duplicated symbols. -# -actions moc.cpp -{ -    $(.prefix)/bin/moc $(>) -o $(<) -} - - -space = " " ; - -# Sometimes it's required to make 'plugins' available during uic invocation. To -# help with this we add paths to all dependency libraries to uic commane line. -# The intention is that it's possible to write -#     -#     exe a : ... a.ui ... : <uses>some_plugin ;  -#  -# and have everything work. We'd add quite a bunch of unrelated paths but it -# won't hurt. -# -flags qt3.uic-h LIBRARY_PATH <xdll-path> ; -actions uic-h -{ -    $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH) -} - - -flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ; -# The second target is uic-generated header name. It's placed in build dir, but -# we want to include it using only basename. -actions uic-cpp -{ -    $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH) -} diff --git a/jam-files/boost-build/tools/qt4.jam b/jam-files/boost-build/tools/qt4.jam deleted file mode 100644 index 71d1b762..00000000 --- a/jam-files/boost-build/tools/qt4.jam +++ /dev/null @@ -1,724 +0,0 @@ -# Copyright 2002-2006 Vladimir Prus -# Copyright 2005 Alo Sarv -# Copyright 2005-2009 Juergen Hunold -# -# Distributed under the Boost Software License, Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -# Qt4 library support module -# -# The module attempts to auto-detect QT installation location from QTDIR -# environment variable; failing that, installation location can be passed as -# argument: -# -# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ; -# -# The module supports code generation from .ui and .qrc files, as well as -# running the moc preprocessor on headers. Note that you must list all your -# moc-able headers in sources. -# -# Example: -# -#     exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc -#                 /qt4//QtGui /qt4//QtNetwork ; -# -# It's also possible to run moc on cpp sources: -# -#   import cast ; -# -#   exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ; -# -# When moccing source file myapp.cpp you need to include "myapp.moc" from -# myapp.cpp. When moccing .h files, the output of moc will be automatically -# compiled and linked in, you don't need any includes. -# -# This is consistent with Qt guidelines: -# http://doc.trolltech.com/4.0/moc.html - -import modules ; -import feature ; -import errors ; -import type ; -import "class" : new ; -import generators ; -import project ; -import toolset : flags ; -import os ; -import virtual-target ; -import scanner ; - -# Qt3Support control feature -# -# Qt4 configure defaults to build Qt4 libraries with Qt3Support. -# The autodetection is missing, so we default to disable Qt3Support. -# This prevents the user from inadvertedly using a deprecated API. -# -# The Qt3Support library can be activated by adding -# "<qt3support>on" to requirements -# -# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS" -# to get warnings about deprecated Qt3 support funtions and classes. -# Files ported by the "qt3to4" conversion tool contain _tons_ of -# warnings, so this define is not set as default. -# -# Todo: Detect Qt3Support from Qt's configure data. -#       Or add more auto-configuration (like python). -feature.feature qt3support : off on : propagated link-incompatible ; - -# The Qt version used for requirements -# Valid are <qt>4.4 or <qt>4.5.0 -# Auto-detection via qmake sets '<qt>major.minor.patch' -feature.feature qt : : propagated ; - -project.initialize $(__name__) ; -project qt ; - -# Save the project so that we tolerate 'import + using' combo. -.project = [ project.current ] ; - -# Helper utils for easy debug output -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = TRUE ; -} - -local rule debug-message ( message * ) -{ -    if $(.debug-configuration) = TRUE -    { -        ECHO notice: [qt4-cfg] $(message) ; -    } -} - -# Capture qmake output line by line -local rule read-output ( content ) -{ -    local lines ; -    local nl = " -" ; -    local << = "([^$(nl)]*)[$(nl)](.*)" ; -    local line+ = [ MATCH "$(<<)" : "$(content)" ] ; -    while $(line+) -    { -        lines += $(line+[1]) ; -        line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ; -    } -    return $(lines) ; -} - -# Capture Qt version from qmake -local rule check-version ( bin_prefix ) -{ -    full-cmd = $(bin_prefix)"/qmake -v" ; -    debug-message Running '$(full-cmd)' ; -    local output = [ SHELL $(full-cmd) ] ; -    for line in [ read-output $(output) ] -    { -        # Parse the output to get all the results. -        if [ MATCH "QMake" : $(line) ] -        { -            # Skip first line of output -        } -        else -        { -            temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ; -        } -    } -    return $(temp) ; -} - -# Validate the version string and extract the major/minor part we care about. -# -local rule split-version ( version ) -{ -    local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ; -    if ! $(major-minor[2]) || $(major-minor[3]) -    { -        ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ; - -        # Add a zero to account for the missing digit if necessary. -        major-minor += 0 ; -    } - -    return $(major-minor[1]) $(major-minor[2]) ; -} - -# Initialize the QT support module. -# Parameters: -# - 'prefix'    parameter tells where Qt is installed. -# - 'full_bin'  optional full path to Qt binaries (qmake,moc,uic,rcc) -# - 'full_inc'  optional full path to Qt top-level include directory -# - 'full_lib'  optional full path to Qt library directory -# - 'version'   optional version of Qt, else autodetected via 'qmake -v' -# - 'condition' optional requirements -rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * ) -{ -    project.push-current $(.project) ; - -    debug-message "==== Configuring Qt ... ====" ; -    for local v in version cmd-or-prefix includes libraries condition -    { -        if $($(v)) -        { -            debug-message "  user-specified "$(v): '$($(v))' ; -        } -    } - -    # Needed as default value -    .prefix = $(prefix) ; - -    # pre-build paths to detect reinitializations changes -    local inc_prefix lib_prefix bin_prefix ; -    if $(full_inc) -    { -        inc_prefix = $(full_inc) ; -    } -    else -    { -        inc_prefix = $(prefix)/include ; -    } -    if $(full_lib) -    { -        lib_prefix = $(full_lib) ; -    } -    else -    { -        lib_prefix = $(prefix)/lib ; -    } -    if $(full_bin) -    { -        bin_prefix = $(full_bin) ; -    } -    else -    { -        bin_prefix = $(prefix)/bin ; -    } - -    # Globally needed variables -    .incprefix = $(inc_prefix) ; -    .libprefix = $(lib_prefix) ; -    .binprefix = $(bin_prefix) ; - -    if ! $(.initialized) -    { -        # Make sure this is initialised only once -        .initialized = true ; - -        # Generates cpp files from header files using "moc" tool -        generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ; - -        # The OBJ result type is a fake, 'H' will be really produced. See -        # comments on the generator class, defined below the 'init' function. -        generators.register [ new uic-generator qt4.uic : UI : OBJ : -            <allow>qt4  ] ; - -        # The OBJ result type is a fake here too. -        generators.register [ new moc-h-generator -            qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ; - -        generators.register [ new moc-inc-generator -            qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ; - -        # Generates .cpp files from .qrc files. -        generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ; - -        # dependency scanner for wrapped files. -        type.set-scanner QRC : qrc-scanner ; - -        # Save value of first occuring prefix -        .PREFIX = $(prefix) ; -    } - -    if $(version) -    { -        major-minor = [ split-version $(version) ] ; -        version = $(major-minor:J=.) ; -    } -    else -    { -        version = [ check-version $(bin_prefix) ] ; -        if $(version) -        { -            version = $(version:J=.) ; -        } -        debug-message Detected version '$(version)' ; -    } - -    local target-requirements = $(condition) ; - -    # Add the version, if any, to the target requirements. -    if $(version) -    { -        if ! $(version) in [ feature.values qt ] -        { -            feature.extend qt : $(version) ; -        } -        target-requirements += <qt>$(version:E=default) ; -    } - -    local target-os = [ feature.get-values target-os : $(condition) ] ; -    if ! $(target-os) -    { -        target-os ?= [ feature.defaults target-os ] ; -        target-os = $(target-os:G=) ; -        target-requirements += <target-os>$(target-os) ; -    } - -    # Build exact requirements for the tools -    local tools-requirements = $(target-requirements:J=/) ; - -    debug-message "Details of this Qt configuration:" ; -    debug-message "  prefix:      " '$(prefix:E=<empty>)' ; -    debug-message "  binary path: " '$(bin_prefix:E=<empty>)' ; -    debug-message "  include path:" '$(inc_prefix:E=<empty>)' ; -    debug-message "  library path:" '$(lib_prefix:E=<empty>)' ; -    debug-message "  target requirements:" '$(target-requirements)' ; -    debug-message "  tool requirements:  " '$(tools-requirements)' ; - -    # setup the paths for the tools -    toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; -    toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; -    toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ; - -    # TODO: 2009-02-12: Better support for directories -    # Most likely needed are separate getters for: include,libraries,binaries and sources. -    toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ; - -    # Test for a buildable Qt. -    if [ glob $(.prefix)/Jamroot ] -    { -       .bjam-qt = true - -       # this will declare QtCore (and qtmain on <target-os>windows) -       add-shared-library QtCore ; -   } -   else -   # Setup common pre-built Qt. -   # Special setup for QtCore on which everything depends -   { -       local usage-requirements = -           <include>$(.incprefix) -           <library-path>$(.libprefix) -           <dll-path>$(.libprefix) -           <threading>multi -           <allow>qt4 ; - -       local suffix ; - -       # Since Qt-4.2, debug versions on unix have to be built -       # separately and therefore have no suffix. -       .suffix_version = "" ; -       .suffix_debug = "" ; - -       # Control flag for auto-configuration of the debug libraries. -       # This setup requires Qt 'configure -debug-and-release'. -       # Only available on some platforms. -       # ToDo: 2009-02-12: Maybe throw this away and -       # require separate setup with <variant>debug as condition. -       .have_separate_debug = FALSE ; - -       # Setup other platforms -       if $(target-os) in windows cygwin -       { -           .have_separate_debug = TRUE ; - -           # On NT, the libs have "4" suffix, and "d" suffix in debug builds. -           .suffix_version = "4" ; -           .suffix_debug = "d" ; - -           # On Windows we must link against the qtmain library -           lib qtmain -               : # sources -               : # requirements -                  <name>qtmain$(.suffix_debug) -                  <variant>debug -                  $(target-requirements) -               ; - -           lib qtmain -               : # sources -               : # requirements -                   <name>qtmain -                   $(target-requirements) -               ; -       } -       else if $(target-os) = darwin -       { -           # On MacOS X, both debug and release libraries are available. -           .suffix_debug = "_debug" ; - -           .have_separate_debug = TRUE ; - -           alias qtmain ; -       } -       else -       { -           alias qtmain : : $(target-requirements) ; -       } - -       lib QtCore : qtmain -           : # requirements -             <name>QtCore$(.suffix_version) -             $(target-requirements) -           : # default-build -           : # usage-requirements -             <define>QT_CORE_LIB -             <define>QT_NO_DEBUG -             <include>$(.incprefix)/QtCore -             $(usage-requirements) -           ; - -       if $(.have_separate_debug) = TRUE -       { -           debug-message Configure debug libraries with suffix '$(.suffix_debug)' ; - -           lib QtCore : $(main) -               : # requirements -                 <name>QtCore$(.suffix_debug)$(.suffix_version) -                 <variant>debug -                 $(target-requirements) -               : # default-build -               : # usage-requirements -                 <define>QT_CORE_LIB -                 <include>$(.incprefix)/QtCore -                 $(usage-requirements) -               ; -        } -    } - -    # Initialising the remaining libraries is canonical -    # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include' -    # 'include' only for non-canonical include paths. -    add-shared-library QtGui     : QtCore : QT_GUI_LIB     : $(target-requirements) ; -    add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ; -    add-shared-library QtSql     : QtCore : QT_SQL_LIB     : $(target-requirements) ; -    add-shared-library QtXml     : QtCore : QT_XML_LIB     : $(target-requirements) ; - -    add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql -                                  : QT_QT3SUPPORT_LIB QT3_SUPPORT -                                  : <qt3support>on $(target-requirements) ; - -    # Dummy target to enable "<qt3support>off" and -    # "<library>/qt//Qt3Support" at the same time. This enables quick -    # switching from one to the other for test/porting purposes. -    alias Qt3Support : : <qt3support>off $(target-requirements) ; - -    # OpenGl Support -    add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ; - -    # SVG-Support (Qt 4.1) -    add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ; - -    # Test-Support (Qt 4.1) -    add-shared-library QtTest : QtCore : : $(target-requirements) ; - -    # Qt designer library -    add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ; -    add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ; - -    # Support for dynamic Widgets (Qt 4.1) -    add-static-library  QtUiTools : QtGui QtXml : $(target-requirements) ; - -    # DBus-Support (Qt 4.2) -    add-shared-library QtDBus : QtXml : : $(target-requirements) ; - -    # Script-Engine (Qt 4.3) -    add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ; - -    # Tools for the Script-Engine (Qt 4.5) -    add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ; - -    # WebKit (Qt 4.4) -    add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ; - -    # Phonon Multimedia (Qt 4.4) -    add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ; - -    # Multimedia engine (Qt 4.6) -    add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ; - -    # XmlPatterns-Engine (Qt 4.4) -    add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ; - -    # Help-Engine (Qt 4.4) -    add-shared-library QtHelp    : QtGui QtSql QtXml : : $(target-requirements) ; -    add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ; - -    # QML-Engine (Qt 4.7) -    add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ; - -    # AssistantClient Support -    # Compat library removed in 4.7.0 -    # Pre-4.4 help system, use QtHelp for new programs -    if $(version) < "4.7" -    { -       add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ; -    } -    debug-message "==== Configured Qt-$(version) ====" ; - -    project.pop-current ; -} - -rule initialized ( ) -{ -    return $(.initialized) ; -} - - - -# This custom generator is needed because in QT4, UI files are translated only -# into H files, and no C++ files are created. Further, the H files need not be -# passed via MOC. The header is used only via inclusion. If we define a standard -# UI -> H generator, Boost.Build will run MOC on H, and then compile the -# resulting cpp. It will give a warning, since output from moc will be empty. -# -# This generator is declared with a UI -> OBJ signature, so it gets invoked when -# linking generator tries to convert sources to OBJ, but it produces target of -# type H. This is non-standard, but allowed. That header won't be mocced. -# -class uic-generator : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        if ! $(name) -        { -            name = [ $(sources[0]).name ] ; -            name = $(name:B) ; -        } - -        local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ; - -        # The 'ui_' prefix is to match qmake's default behavior. -        local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ; - -        local r = [ virtual-target.register $(target) ] ; - -        # Since this generator will return a H target, the linking generator -        # won't use it at all, and won't set any dependency on it. However, we -        # need the target to be seen by bjam, so that dependency from sources to -        # this generated header is detected -- if jam does not know about this -        # target, it won't do anything. -        DEPENDS all : [ $(r).actualize ] ; - -        return $(r) ; -    } -} - - -class moc-h-generator : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP -        { -            name = [ $(sources[0]).name ] ; -            name = $(name:B) ; - -            local a = [ new action $(sources[1]) : qt4.moc.inc : -                $(property-set) ] ; - -            local target = [ new file-target $(name) : MOC : $(project) : $(a) -                ] ; - -            local r = [ virtual-target.register $(target) ] ; - -            # Since this generator will return a H target, the linking generator -            # won't use it at all, and won't set any dependency on it. However, -            # we need the target to be seen by bjam, so that dependency from -            # sources to this generated header is detected -- if jam does not -            # know about this target, it won't do anything. -            DEPENDS all : [ $(r).actualize ] ; - -            return $(r) ; -        } -    } -} - - -class moc-inc-generator : generator -{ -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H -        { -            name = [ $(sources[0]).name ] ; -            name = $(name:B) ; - -            local a = [ new action $(sources[1]) : qt4.moc.inc : -                $(property-set) ] ; - -            local target = [ new file-target moc_$(name) : CPP : $(project) : -                $(a) ] ; - -            # Since this generator will return a H target, the linking generator -            # won't use it at all, and won't set any dependency on it. However, -            # we need the target to be seen by bjam, so that dependency from -            # sources to this generated header is detected -- if jam does not -            # know about this target, it won't do anything. -            DEPENDS all : [ $(target).actualize ] ; - -            return [ virtual-target.register $(target) ] ; -        } -    } -} - - -# Query the installation directory. This is needed in at least two scenarios. -# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt -# plugins to the Qt-Tree. -# -rule directory -{ -    return $(.PREFIX) ; -} - -# Add a shared Qt library. -rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * :  include ? ) -{ -     add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; -} - -# Add a static Qt library. -rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) -{ -     add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; -} - -# Add a Qt library. -# Static libs are unversioned, whereas shared libs have the major number as suffix. -# Creates both release and debug versions on platforms where both are enabled by Qt configure. -# Flags: -# - lib-name Qt library Name -# - version  Qt major number used as shared library suffix (QtCore4.so) -# - depends-on other Qt libraries -# - usage-defines those are set by qmake, so set them when using this library -# - requirements addional requirements -# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name). -rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? ) -{ -    if $(.bjam-qt) -    { -        # Import Qt module -        # Eveything will be setup there -        alias $(lib-name) -           : $(.prefix)//$(lib-name) -           : -           : -           : <allow>qt4 ; -    } -    else -    { -        local real_include ; -        real_include ?= $(include) ; -        real_include ?= $(lib-name) ; - -        lib $(lib-name) -           : # sources -             $(depends-on) -           : # requirements -             <name>$(lib-name)$(version) -             $(requirements) -           : # default-build -           : # usage-requirements -             <define>$(usage-defines) -             <include>$(.incprefix)/$(real_include) -           ; - -        if $(.have_separate_debug) = TRUE -        { -            lib $(lib-name) -               : # sources -                 $(depends-on) -               : # requirements -                 <name>$(lib-name)$(.suffix_debug)$(version) -                 $(requirements) -                 <variant>debug -               : # default-build -               : # usage-requirements -                 <define>$(usage-defines) -                 <include>$(.incprefix)/$(real_include) -               ; -        } -    } - -    # Make library explicit so that a simple <use>qt4 will not bring in everything. -    # And some components like QtDBus/Phonon may not be available on all platforms. -    explicit $(lib-name) ; -} - -# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match. -# The exact match is the last one. - -# Get <include> and <defines> from current toolset. -flags qt4.moc INCLUDES <include> ; -flags qt4.moc DEFINES <define> ; - -# need a newline for expansion of DEFINES and INCLUDES in the response file. -.nl  = " -" ; - -# Processes headers to create Qt MetaObject information. Qt4-moc has its -# c++-parser, so pass INCLUDES and DEFINES. -# We use response file with one INCLUDE/DEFINE per line -# -actions moc -{ -    $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" -} - -# When moccing files for include only, we don't need -f, otherwise the generated -# code will include the .cpp and we'll get duplicated symbols. -# -actions moc.inc -{ -    $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" -} - - -# Generates source files from resource files. -# -actions rcc -{ -    $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<) -} - - -# Generates user-interface source from .ui files. -# -actions uic -{ -    $(.BINPREFIX[-1])/uic $(>) -o $(<) -} - - -# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore -# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for -# detailed documentation of the Qt Resource System. -# -class qrc-scanner : common-scanner -{ -    rule pattern ( ) -    { -        return "<file.*>(.*)</file>" ; -    } -} - - -# Wrapped files are "included". -scanner.register qrc-scanner : include ; diff --git a/jam-files/boost-build/tools/quickbook-config.jam b/jam-files/boost-build/tools/quickbook-config.jam deleted file mode 100644 index e983a78a..00000000 --- a/jam-files/boost-build/tools/quickbook-config.jam +++ /dev/null @@ -1,44 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for BoostBook tools. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ -    local boost-dir = ; -    for local R in snapshot cvs 1.33.0 -    { -        boost-dir += [ W32_GETREG -            "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)" -            : "InstallRoot" ] ; -    } -    local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ; -    quickbook-path = $(quickbook-path[1]) ; -     -    if $(quickbook-path) -    { -        if --debug-configuration in [ modules.peek : ARGV ] -        { -            ECHO "notice:" using quickbook ":" $(quickbook-path) ; -        } -        using quickbook : $(quickbook-path) ; -    } -} -else -{ -    local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ; -    quickbook-path = $(quickbook-path[1]) ; -     -    if $(quickbook-path) -    { -        if --debug-configuration in [ modules.peek : ARGV ] -        { -            ECHO "notice:" using quickbook ":" $(quickbook-path) ; -        } -        using quickbook : $(quickbook-path) ; -    } -} diff --git a/jam-files/boost-build/tools/quickbook.jam b/jam-files/boost-build/tools/quickbook.jam deleted file mode 100644 index 6de2d42f..00000000 --- a/jam-files/boost-build/tools/quickbook.jam +++ /dev/null @@ -1,361 +0,0 @@ -# -#   Copyright (c) 2005 João Abecasis -#   Copyright (c) 2005 Vladimir Prus -#   Copyright (c) 2006 Rene Rivera -# -#   Distributed under the Boost Software License, Version 1.0. (See -#   accompanying file LICENSE_1_0.txt or copy at -#   http://www.boost.org/LICENSE_1_0.txt) -# - -# This toolset defines a generator to translate QuickBook to BoostBook. It can -# be used to generate nice (!) user documentation in different formats -# (pdf/html/...), from a single text file with simple markup. -# -# The toolset defines the QUICKBOOK type (file extension 'qbk') and -# a QUICKBOOK to XML (BOOSTBOOK) generator. -# -# -#   =========================================================================== -#   Q & A -#   =========================================================================== -# -#   If you don't know what this is all about, some Q & A will hopefully get you -#   up to speed with QuickBook and this toolset. -# -# -#   What is QuickBook ? -# -#       QuickBook is a WikiWiki style documentation tool geared towards C++ -#       documentation using simple rules and markup for simple formatting tasks. -#       QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook -#       documents are simple text files. A single QuickBook document can -#       generate a fully linked set of nice HTML and PostScript/PDF documents -#       complete with images and syntax-colorized source code. -# -# -#   Where can I get QuickBook ? -# -#       Quickbook can be found in Boost's repository, under the tools/quickbook -#       directory it was added there on Jan 2005, some time after the release of -#       Boost v1.32.0 and has been an integral part of the Boost distribution -#       since v1.33. -# -#       Here's a link to the SVN repository: -#           https://svn.boost.org/svn/boost/trunk/tools/quickbook -# -#       And to QuickBook's QuickBook-generated docs: -#           http://www.boost.org/doc/libs/release/tools/quickbook/index.html -# -# -#   How do I use QuickBook and this toolset in my projects ? -# -#       The minimal example is: -# -#           using boostbook ; -#           import quickbook ; -# -#           boostbook my_docs : my_docs_source.qbk ; -# -#       where my_docs is a target name and my_docs_source.qbk is a QuickBook -#       file. The documentation format to be generated is determined by the -#       boostbook toolset. By default html documentation should be generated, -#       but you should check BoostBook's docs to be sure. -# -# -#   What do I need ? -# -#       You should start by setting up the BoostBook toolset. Please refer to -#       boostbook.jam and the BoostBook documentation for information on how to -#       do this. -# -#       A QuickBook executable is also needed. The toolset will generate this -#       executable if it can find the QuickBook sources. The following -#       directories will be searched: -# -#           BOOST_ROOT/tools/quickbook/ -#           BOOST_BUILD_PATH/../../quickbook/ -# -#       (BOOST_ROOT and BOOST_BUILD_PATH are environment variables) -# -#       If QuickBook sources are not found the toolset will then try to use -#       the shell command 'quickbook'. -# -# -#   How do I provide a custom QuickBook executable ? -# -#       You may put the following in your user-config.jam or site-config.jam: -# -#           using quickbook : /path/to/quickbook ; -# -#       or, if 'quickbook' can be found in your PATH, -# -#           using quickbook : quickbook ; -# -# -#   For convenience three alternatives are tried to get a QuickBook executable: -# -#       1.  If the user points us to the a QuickBook executable, that is used. -# -#       2.  Otherwise, we search for the QuickBook sources and compile QuickBook -#           using the default toolset. -# -#       3.  As a last resort, we rely on the shell for finding 'quickbook'. -# - -import boostbook ; -import "class" : new ; -import feature ; -import generators ; -import toolset ; -import type ; -import scanner ; -import project ; -import targets ; -import build-system ; -import path ; -import common ; -import errors ; - -# The one and only QUICKBOOK type! -type.register QUICKBOOK : qbk ; - -# <quickbook-binary> shell command to run QuickBook -# <quickbook-binary-dependencies> targets to build QuickBook from sources. -feature.feature <quickbook-binary> : : free ; -feature.feature <quickbook-binary-dependencies> : : free dependency ; -feature.feature <quickbook-define> : : free ; -feature.feature <quickbook-indent> : : free ; -feature.feature <quickbook-line-width> : : free ; - - -# quickbook-binary-generator handles generation of the QuickBook executable, by -# marking it as a dependency for QuickBook docs. -# -# If the user supplied the QuickBook command that will be used. -# -# Otherwise we search some sensible places for the QuickBook sources and compile -# from scratch using the default toolset. -# -# As a last resort we rely on the shell to find 'quickbook'. -# -class quickbook-binary-generator : generator -{ -    import modules path targets quickbook ; - -    rule run ( project name ? : property-set : sources * : multiple ? ) -    { -        quickbook.freeze-config ; -        # QuickBook invocation command and dependencies. -        local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ; -        local quickbook-binary-dependencies ; - -        if ! $(quickbook-binary) -        { -            # If the QuickBook source directory was found, mark its main target -            # as a dependency for the current project. Otherwise, try to find -            # 'quickbook' in user's PATH -            local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ; -            if $(quickbook-dir) -            { -                # Get the main-target in QuickBook directory. -                local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ; - -                # The first element are actual targets, the second are -                # properties found in target-id. We do not care about these -                # since we have passed the id ourselves. -                quickbook-main-target = -                    [ $(quickbook-main-target[1]).main-target quickbook ] ; - -                quickbook-binary-dependencies = -                    [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ; - -                # Ignore usage-requirements returned as first element. -                quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ; - -                # Some toolsets generate extra targets (e.g. RSP). We must mark -                # all targets as dependencies for the project, but we will only -                # use the EXE target for quickbook-to-boostbook translation. -                for local target in $(quickbook-binary-dependencies) -                { -                    if [ $(target).type ] = EXE -                    { -                        quickbook-binary =  -                            [ path.native  -                                [ path.join -                                    [ $(target).path ] -                                    [ $(target).name ] -                                ] -                            ] ; -                    } -                } -            } -        } - -        # Add $(quickbook-binary-dependencies) as a dependency of the current -        # project and set it as the <quickbook-binary> feature for the -        # quickbook-to-boostbook rule, below. -        property-set = [ $(property-set).add-raw -            <dependency>$(quickbook-binary-dependencies) -            <quickbook-binary>$(quickbook-binary) -            <quickbook-binary-dependencies>$(quickbook-binary-dependencies) -        ] ; - -        return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ; -    } -} - - -# Define a scanner for tracking QBK include dependencies. -# -class qbk-scanner : common-scanner -{ -    rule pattern ( ) -    { -        return "\\[[ ]*include[ ]+([^]]+)\\]"  -        "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]"  -        "\\[[ ]*import[ ]+([^]]+)\\]" ; -    } -} - - -scanner.register qbk-scanner : include ; - -type.set-scanner QUICKBOOK : qbk-scanner ; - - -# Initialization of toolset. -# -# Parameters: -#   command ?    -> path to QuickBook executable. -# -# When command is not supplied toolset will search for QuickBook directory and -# compile the executable from source. If that fails we still search the path for -# 'quickbook'. -# -rule init ( -        command ?   # path to the QuickBook executable. -    ) -{ -    if $(command) -    { -        if $(.config-frozen) -        { -            errors.user-error "quickbook: configuration cannot be changed after it has been used." ; -        } -        .command = $(command) ; -    } -} - -rule freeze-config ( ) -{ -    if ! $(.config-frozen) -    { -        .config-frozen = true ; - -        # QuickBook invocation command and dependencies. - -        .quickbook-binary = $(.command) ; - -        if $(.quickbook-binary) -        { -            # Use user-supplied command. -            .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ; -        } -        else -        { -            # Search for QuickBook sources in sensible places, like -            #   $(BOOST_ROOT)/tools/quickbook -            #   $(BOOST_BUILD_PATH)/../../quickbook - -            # And build quickbook executable from sources. - -            local boost-root = [ modules.peek : BOOST_ROOT ] ; -            local boost-build-path = [ build-system.location ] ; - -            if $(boost-root) -            { -                .quickbook-dir += [ path.join $(boost-root) tools ] ; -            } - -            if $(boost-build-path) -            { -                .quickbook-dir += $(boost-build-path)/../.. ; -            } - -            .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ; - -            # If the QuickBook source directory was found, mark its main target -            # as a dependency for the current project. Otherwise, try to find -            # 'quickbook' in user's PATH -            if $(.quickbook-dir) -            { -                .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ; -            } -            else -            { -                ECHO "QuickBook warning: The path to the quickbook executable was" ; -                ECHO "  not provided. Additionally, couldn't find QuickBook" ; -                ECHO "  sources searching in" ; -                ECHO "    * BOOST_ROOT/tools/quickbook" ; -                ECHO "    * BOOST_BUILD_PATH/../../quickbook" ; -                ECHO "  Will now try to find a precompiled executable by searching" ; -                ECHO "  the PATH for 'quickbook'." ; -                ECHO "  To disable this warning in the future, or to completely" ; -                ECHO "  avoid compilation of quickbook, you can explicitly set the" ; -                ECHO "  path to a quickbook executable command in user-config.jam" ; -                ECHO "  or site-config.jam with the call" ; -                ECHO "    using quickbook : /path/to/quickbook ;" ; - -                # As a last resort, search for 'quickbook' command in path. Note -                # that even if the 'quickbook' command is not found, -                # get-invocation-command will still return 'quickbook' and might -                # generate an error while generating the virtual-target. - -                .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ; -            } -        } -    } -} - - -generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ; - - -# <quickbook-binary> shell command to run QuickBook -# <quickbook-binary-dependencies> targets to build QuickBook from sources. -toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND      <quickbook-binary> ; -toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ; -toolset.flags quickbook.quickbook-to-boostbook INCLUDES        <include> ; -toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES      <quickbook-define> ; -toolset.flags quickbook.quickbook-to-boostbook QB-INDENT       <quickbook-indent> ; -toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH   <quickbook-line-width> ; - - -rule quickbook-to-boostbook ( target : source : properties * ) -{ -    # Signal dependency of quickbook sources on <quickbook-binary-dependencies> -    # upon invocation of quickbook-to-boostbook. -    DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ; -} - - -actions quickbook-to-boostbook -{ -    "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)" -} - - -# Declare a main target to convert a quickbook source into a boostbook XML file. -# -rule to-boostbook ( target-name : sources * : requirements * : default-build * ) -{  -  local project = [ project.current ] ; -     -  targets.main-target-alternative  -    [ new typed-target $(target-name) : $(project) : XML -        : [ targets.main-target-sources $(sources) : $(target-name) ]  -        : [ targets.main-target-requirements $(requirements) : $(project) ] -        : [ targets.main-target-default-build $(default-build) : $(project) ]  -    ] ; -} diff --git a/jam-files/boost-build/tools/rc.jam b/jam-files/boost-build/tools/rc.jam deleted file mode 100644 index 9964d339..00000000 --- a/jam-files/boost-build/tools/rc.jam +++ /dev/null @@ -1,156 +0,0 @@ -#  Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and -#  distribute this software is granted provided this copyright notice appears in -#  all copies. This software is provided "as is" without express or implied -#  warranty, and with no claim as to its suitability for any purpose. -#   -#  Copyright (c) 2006 Rene Rivera. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -import type ; -import generators ; -import feature ; -import errors ; -import scanner ; -import toolset : flags ; - -if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] -{ -    .debug-configuration = true ; -} - -type.register RC : rc ; - -rule init ( ) -{ -} - -# Configures a new resource compilation command specific to a condition, -# usually a toolset selection condition. The possible options are: -# -#     * <rc-type>(rc|windres) - Indicates the type of options the command -#       accepts. -# -# Even though the arguments are all optional, only when a command, condition, -# and at minimum the rc-type option are given will the command be configured. -# This is so that callers don't have to check auto-configuration values -# before calling this. And still get the functionality of build failures when -# the resource compiler can't be found. -# -rule configure ( command ? : condition ? : options * ) -{ -    local rc-type = [ feature.get-values <rc-type> : $(options) ] ; - -    if $(command) && $(condition) && $(rc-type) -    { -        flags rc.compile.resource .RC $(condition) : $(command) ; -        flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ; -        flags rc.compile.resource DEFINES <define> ; -        flags rc.compile.resource INCLUDES <include> ; -        if $(.debug-configuration) -        { -            ECHO notice: using rc compiler :: $(condition) :: $(command) ; -        } -    } -} - -rule compile.resource ( target : sources * : properties * ) -{ -    local rc-type = [ on $(target) return $(.RC_TYPE) ] ; -    rc-type ?= null ; -    compile.resource.$(rc-type) $(target) : $(sources[1]) ; -} - -actions compile.resource.rc -{ -    "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)" -} - -actions compile.resource.windres -{ -    "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)" -} - -actions quietly compile.resource.null -{ -    as /dev/null -o "$(<)" -} - -# Since it's a common practice to write -# exe hello : hello.cpp hello.rc -# we change the name of object created from RC file, to -# avoid conflict with hello.cpp. -# The reason we generate OBJ and not RES, is that gcc does not -# seem to like RES files, but works OK with OBJ. -# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/ -# -# Using 'register-c-compiler' adds the build directory to INCLUDES -generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ; - -# Register scanner for resources -class res-scanner : scanner  -{ -    import regex virtual-target path scanner ;     -     -    rule __init__ ( includes * ) -    { -        scanner.__init__ ; -     -        self.includes = $(includes) ; -    }     - -    rule pattern ( ) -    { -        return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ; -    } - -    rule process ( target : matches * : binding ) -    { -        local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ; -        local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ; -        local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ; - -        # Icons and other includes may referenced as  -        # -        # IDR_MAINFRAME ICON "res\\icon.ico" -        # -        # so we have to replace double backslashes to single ones. -        res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ; - -        # CONSIDER: the new scoping rule seem to defeat "on target" variables. -        local g = [ on $(target) return $(HDRGRIST) ] ;   -        local b = [ NORMALIZE_PATH $(binding:D) ] ; - -        # Attach binding of including file to included targets. -        # When target is directly created from virtual target -        # this extra information is unnecessary. But in other -        # cases, it allows to distinguish between two headers of the  -        # same name included from different places.       -        # We don't need this extra information for angle includes, -        # since they should not depend on including file (we can't -        # get literal "." in include path). -        local g2 = $(g)"#"$(b) ; -        -        angle = $(angle:G=$(g)) ; -        quoted = $(quoted:G=$(g2)) ; -        res = $(res:G=$(g2)) ; -         -        local all = $(angle) $(quoted) ; - -        INCLUDES $(target) : $(all) ; -        DEPENDS $(target) : $(res) ; -        NOCARE $(all) $(res) ; -        SEARCH on $(angle) = $(self.includes:G=) ; -        SEARCH on $(quoted) = $(b) $(self.includes:G=) ; -        SEARCH on $(res) = $(b) $(self.includes:G=) ; -         -        # Just propagate current scanner to includes, in a hope -        # that includes do not change scanners.  -        scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ; -    }         -} - -scanner.register res-scanner : include ; -type.set-scanner RC : res-scanner ; diff --git a/jam-files/boost-build/tools/rc.py b/jam-files/boost-build/tools/rc.py deleted file mode 100644 index 0b82d231..00000000 --- a/jam-files/boost-build/tools/rc.py +++ /dev/null @@ -1,189 +0,0 @@ -# Status: being ported by Steven Watanabe -# Base revision: 47077 -# -#  Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and -#  distribute this software is granted provided this copyright notice appears in -#  all copies. This software is provided "as is" without express or implied -#  warranty, and with no claim as to its suitability for any purpose. -#   -#  Copyright (c) 2006 Rene Rivera. -# -#  Copyright (c) 2008 Steven Watanabe -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -##import type ; -##import generators ; -##import feature ; -##import errors ; -##import scanner ; -##import toolset : flags ; - -from b2.build import type, toolset, generators, scanner, feature -from b2.tools import builtin -from b2.util import regex -from b2.build.toolset import flags -from b2.manager import get_manager - -__debug = None - -def debug(): -    global __debug -    if __debug is None: -        __debug = "--debug-configuration" in bjam.variable("ARGV")         -    return __debug - -type.register('RC', ['rc']) - -def init(): -    pass - -def configure (command = None, condition = None, options = None): -    """ -        Configures a new resource compilation command specific to a condition, -        usually a toolset selection condition. The possible options are: -         -            * <rc-type>(rc|windres) - Indicates the type of options the command -              accepts. -         -        Even though the arguments are all optional, only when a command, condition, -        and at minimum the rc-type option are given will the command be configured. -        This is so that callers don't have to check auto-configuration values -        before calling this. And still get the functionality of build failures when -        the resource compiler can't be found. -    """ -    rc_type = feature.get_values('<rc-type>', options) -    if rc_type: -        assert(len(rc_type) == 1) -        rc_type = rc_type[0] - -    if command and condition and rc_type: -        flags('rc.compile.resource', '.RC', condition, command) -        flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower()) -        flags('rc.compile.resource', 'DEFINES', [], ['<define>']) -        flags('rc.compile.resource', 'INCLUDES', [], ['<include>']) -        if debug(): -            print 'notice: using rc compiler ::', condition, '::', command - -engine = get_manager().engine() - -class RCAction: -    """Class representing bjam action defined from Python. -    The function must register the action to execute.""" -     -    def __init__(self, action_name, function): -        self.action_name = action_name -        self.function = function -             -    def __call__(self, targets, sources, property_set): -        if self.function: -            self.function(targets, sources, property_set) - -# FIXME: What is the proper way to dispatch actions? -def rc_register_action(action_name, function = None): -    global engine -    if engine.actions.has_key(action_name): -        raise "Bjam action %s is already defined" % action_name -    engine.actions[action_name] = RCAction(action_name, function) - -def rc_compile_resource(targets, sources, properties): -    rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE') -    global engine -    engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties) - -rc_register_action('rc.compile.resource', rc_compile_resource) - - -engine.register_action( -    'rc.compile.resource.rc', -    '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"') - -engine.register_action( -    'rc.compile.resource.windres', -    '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"') - -# FIXME: this was originally declared quietly -engine.register_action( -    'compile.resource.null', -    'as /dev/null -o "$(<)"') - -# Since it's a common practice to write -# exe hello : hello.cpp hello.rc -# we change the name of object created from RC file, to -# avoid conflict with hello.cpp. -# The reason we generate OBJ and not RES, is that gcc does not -# seem to like RES files, but works OK with OBJ. -# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/ -# -# Using 'register-c-compiler' adds the build directory to INCLUDES -# FIXME: switch to generators -builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], []) - -__angle_include_re = "#include[ ]*<([^<]+)>" - -# Register scanner for resources -class ResScanner(scanner.Scanner): -     -    def __init__(self, includes): -        scanner.__init__ ; -        self.includes = includes - -    def pattern(self): -        return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ -               "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ; - -    def process(self, target, matches, binding): - -        angle = regex.transform(matches, "#include[ ]*<([^<]+)>") -        quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"") -        res = regex.transform(matches, -                              "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ -                              "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4]) - -        # Icons and other includes may referenced as  -        # -        # IDR_MAINFRAME ICON "res\\icon.ico" -        # -        # so we have to replace double backslashes to single ones. -        res = [ re.sub(r'\\\\', '/', match) for match in res ] - -        # CONSIDER: the new scoping rule seem to defeat "on target" variables. -        g = bjam.call('get-target-variable', target, 'HDRGRIST') -        b = os.path.normalize_path(os.path.dirname(binding)) - -        # Attach binding of including file to included targets. -        # When target is directly created from virtual target -        # this extra information is unnecessary. But in other -        # cases, it allows to distinguish between two headers of the  -        # same name included from different places.       -        # We don't need this extra information for angle includes, -        # since they should not depend on including file (we can't -        # get literal "." in include path). -        g2 = g + "#" + b -        -        g = "<" + g + ">" -        g2 = "<" + g2 + ">" -        angle = [g + x for x in angle] -        quoted = [g2 + x for x in quoted] -        res = [g2 + x for x in res] -         -        all = angle + quoted - -        bjam.call('mark-included', target, all) - -        engine = get_manager().engine() - -        engine.add_dependency(target, res) -        bjam.call('NOCARE', all + res) -        engine.set_target_variable(angle, 'SEARCH', ungrist(self.includes)) -        engine.set_target_variable(quoted, 'SEARCH', b + ungrist(self.includes)) -        engine.set_target_variable(res, 'SEARCH', b + ungrist(self.includes)) ; -         -        # Just propagate current scanner to includes, in a hope -        # that includes do not change scanners. -        get_manager().scanners().propagate(self, angle + quoted) - -scanner.register(ResScanner, 'include') -type.set_scanner('RC', ResScanner) diff --git a/jam-files/boost-build/tools/stage.jam b/jam-files/boost-build/tools/stage.jam deleted file mode 100644 index 296e7558..00000000 --- a/jam-files/boost-build/tools/stage.jam +++ /dev/null @@ -1,524 +0,0 @@ -# Copyright 2003 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'install' rule, used to copy a set of targets to a -# single location. - -import targets ; -import "class" : new ; -import errors ; -import type ; -import generators ; -import feature ; -import project ; -import virtual-target ; -import path ; -import types/register ; - - -feature.feature <install-dependencies> : off on : incidental      ; -feature.feature <install-type>         :        : free incidental ; -feature.feature <install-source-root>  :        : free path       ; -feature.feature <so-version>           :        : free incidental ; - -# If 'on', version symlinks for shared libraries will not be created. Affects -# Unix builds only. -feature.feature <install-no-version-symlinks> : on : optional incidental ; - - -class install-target-class : basic-target -{ -    import feature ; -    import project ; -    import type ; -    import errors ; -    import generators ; -    import path ; -    import stage ; -    import "class" : new ; -    import property ; -    import property-set ; - -    rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * ) -    { -        basic-target.__init__ $(name-and-dir) : $(project) : $(sources) : -            $(requirements) : $(default-build) ; -    } - -    # If <location> is not set, sets it based on the project data. -    # -    rule update-location ( property-set ) -    { -        local loc = [ $(property-set).get <location> ] ; -        if ! $(loc) -        { -            loc = [ path.root $(self.name) [ $(self.project).get location ] ] ; -            property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ; -        } - -        return $(property-set) ; -    } - -    # Takes a target that is installed and a property set which is used when -    # installing. -    # -    rule adjust-properties ( target : build-property-set ) -    { -        local ps-raw ; -        local a = [ $(target).action ] ; -        if $(a) -        { -            local ps = [ $(a).properties ] ; -            ps-raw = [ $(ps).raw ] ; - -            # Unless <hardcode-dll-paths>true is in properties, which can happen -            # only if the user has explicitly requested it, nuke all <dll-path> -            # properties. -            if [ $(build-property-set).get <hardcode-dll-paths> ] != true -            { -                ps-raw = [ property.change $(ps-raw) : <dll-path> ] ; -            } - -            # If any <dll-path> properties were specified for installing, add -            # them. -            local l = [ $(build-property-set).get <dll-path> ] ; -            ps-raw += $(l:G=<dll-path>) ; - -            # Also copy <linkflags> feature from current build set, to be used -            # for relinking. -            local l = [ $(build-property-set).get <linkflags> ] ; -            ps-raw += $(l:G=<linkflags>) ; - -            # Remove the <tag> feature on original targets. -            ps-raw = [ property.change $(ps-raw) : <tag> ] ; - -            # And <location>. If stage target has another stage target in -            # sources, then we shall get virtual targets with the <location> -            # property set. -            ps-raw = [ property.change $(ps-raw) : <location> ] ; -        } - -        local d = [ $(build-property-set).get <dependency> ] ; -        ps-raw += $(d:G=<dependency>) ; - -        local d = [ $(build-property-set).get <location> ] ; -        ps-raw += $(d:G=<location>) ; - -        local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ; -        ps-raw += $(ns:G=<install-no-version-symlinks>) ; - -        local d = [ $(build-property-set).get <install-source-root> ] ; -        # Make the path absolute: we shall use it to compute relative paths and -        # making the path absolute will help. -        if $(d) -        { -            d = [ path.root $(d) [ path.pwd ] ] ; -            ps-raw += $(d:G=<install-source-root>) ; -        } - -        if $(ps-raw) -        { -            return [ property-set.create $(ps-raw) ]  ; -        } -        else -        { -            return [ property-set.empty ] ; -        } -    } - -    rule construct ( name : source-targets * : property-set ) -    { -        source-targets = [ targets-to-stage $(source-targets) : -            $(property-set) ] ; - -        property-set = [ update-location $(property-set) ] ; - -        local ename = [ $(property-set).get <name> ] ; - -        if $(ename) && $(source-targets[2]) -        { -            errors.error "When <name> property is used in 'install', only one" -                "source is allowed" ; -        } - -        local result ; -        for local i in $(source-targets) -        { -            local staged-targets ; - -            local new-properties = [ adjust-properties $(i) : -                $(property-set) ] ; - -            # See if something special should be done when staging this type. It -            # is indicated by the presence of a special "INSTALLED_" type. -            local t = [ $(i).type ] ; -            if $(t) && [ type.registered INSTALLED_$(t) ] -            { -                if $(ename) -                { -                    errors.error "In 'install': <name> property specified with target that requires relinking." ; -                } -                else -                { -                    local targets = [ generators.construct $(self.project) -                        $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ; -                    staged-targets += $(targets[2-]) ; -                } -            } -            else -            { -                staged-targets = [ stage.copy-file $(self.project) $(ename) : -                    $(i) : $(new-properties) ] ; -            } - -            if ! $(staged-targets) -            { -                errors.error "Unable to generate staged version of " [ $(source).str ] ; -            } - -            for t in $(staged-targets) -            { -                result += [ virtual-target.register $(t) ] ; -            } -        } - -        return [ property-set.empty ] $(result) ; -    } - -    # Given the list of source targets explicitly passed to 'stage', returns the -    # list of targets which must be staged. -    # -    rule targets-to-stage ( source-targets * : property-set ) -    { -        local result ; - -        # Traverse the dependencies, if needed. -        if [ $(property-set).get <install-dependencies> ] = "on" -        { -            source-targets = [ collect-targets $(source-targets) ] ; -        } - -        # Filter the target types, if needed. -        local included-types = [ $(property-set).get <install-type> ] ; -        for local r in $(source-targets) -        { -            local ty = [ $(r).type ] ; -            if $(ty) -            { -                # Do not stage searched libs. -                if $(ty) != SEARCHED_LIB -                { -                    if $(included-types) -                    { -                        if [ include-type $(ty) : $(included-types) ] -                        { -                            result += $(r) ; -                        } -                    } -                    else -                    { -                        result += $(r) ; -                    } -                } -            } -            else if ! $(included-types) -            { -                # Don't install typeless target if there is an explicit list of -                # allowed types. -                result += $(r) ; -            } -        } - -        return $(result) ; -    } - -    # CONSIDER: figure out why we can not use virtual-target.traverse here. -    # -    rule collect-targets ( targets * ) -    { -        # Find subvariants -        local s ; -        for local t in $(targets) -        { -            s += [ $(t).creating-subvariant ] ; -        } -        s = [ sequence.unique $(s) ] ; -         -        local result = [ new set ] ; -        $(result).add $(targets) ; -         -        for local i in $(s) -        { -            $(i).all-referenced-targets $(result) ; -        } -        local result2 ; -        for local r in [ $(result).list ] -        { -            if $(r:G) != <use> -            { -                result2 += $(r:G=) ; -            } -        } -        DELETE_MODULE $(result) ; -        result = [ sequence.unique $(result2) ] ; -    } - -    # Returns true iff 'type' is subtype of some element of 'types-to-include'. -    # -    local rule include-type ( type : types-to-include * ) -    { -        local found ; -        while $(types-to-include) && ! $(found) -        { -            if [ type.is-subtype $(type) $(types-to-include[1]) ] -            { -                found = true ; -            } -            types-to-include = $(types-to-include[2-]) ; -        } - -        return $(found) ; -    } -} - - -# Creates a copy of target 'source'. The 'properties' object should have a -# <location> property which specifies where the target must be placed. -# -rule copy-file ( project name ? : source : properties ) -{ -    name ?= [ $(source).name ] ; -    local relative ; - -    local new-a = [ new non-scanning-action $(source) : common.copy : -        $(properties) ] ; -    local source-root = [ $(properties).get <install-source-root> ] ; -    if $(source-root) -    { -        # Get the real path of the target. We probably need to strip relative -        # path from the target name at construction. -        local path = [ $(source).path ] ; -        path = [ path.root $(name:D) $(path) ] ; -        # Make the path absolute. Otherwise, it would be hard to compute the -        # relative path. The 'source-root' is already absolute, see the -        # 'adjust-properties' method above. -        path = [ path.root $(path) [ path.pwd ] ] ; - -        relative = [ path.relative-to $(source-root) $(path) ] ; -    } - -    # Note: Using $(name:D=$(relative)) might be faster here, but then we would -    # need to explicitly check that relative is not ".", otherwise we might get -    # paths like '<prefix>/boost/.', try to create it and mkdir would obviously -    # fail. -    name = [ path.join $(relative) $(name:D=) ] ; - -    return [ new file-target $(name) exact : [ $(source).type ] : $(project) : -        $(new-a) ] ; -} - - -rule symlink ( name : project : source : properties ) -{ -    local a = [ new action $(source) : symlink.ln : $(properties) ] ; -    return [ new file-target $(name) exact : [ $(source).type ] : $(project) : -        $(a) ] ; -} - - -rule relink-file ( project : source : property-set  ) -{ -    local action = [ $(source).action ] ; -    local cloned-action = [ virtual-target.clone-action $(action) : $(project) : -        "" : $(property-set) ] ; -    return [ $(cloned-action).targets ] ; -} - - -# Declare installed version of the EXE type. Generator for this type will cause -# relinking to the new location. -type.register INSTALLED_EXE : : EXE ; - - -class installed-exe-generator : generator -{ -    import type ; -    import property-set ; -    import modules ; -    import stage ; - -    rule __init__ ( ) -    { -        generator.__init__ install-exe : EXE : INSTALLED_EXE ; -    } - -    rule run ( project name ? : property-set : source : multiple ? ) -    { -        local need-relink ; -         -        if [ $(property-set).get <os> ] in NT CYGWIN || -            [ $(property-set).get <target-os> ] in windows cygwin -        { -        } -        else -        { -            # See if the dll-path properties are not changed during -            # install. If so, copy, don't relink. -            local a = [ $(source).action ] ; -            local p = [ $(a).properties ] ; -            local original = [ $(p).get <dll-path> ] ; -            local current = [ $(property-set).get <dll-path> ] ; -             -            if $(current) != $(original) -            { -                need-relink = true ; -            }             -        } -         -             -        if $(need-relink) -        { -            return [ stage.relink-file $(project) -              : $(source) : $(property-set) ] ; -        } -        else -        { -            return [ stage.copy-file $(project) -              : $(source) : $(property-set) ] ; -        } -    } -} - - -generators.register [ new installed-exe-generator ] ; - - -# Installing a shared link on Unix might cause a creation of versioned symbolic -# links. -type.register INSTALLED_SHARED_LIB : : SHARED_LIB ; - - -class installed-shared-lib-generator : generator -{ -    import type ; -    import property-set ; -    import modules ; -    import stage ; - -    rule __init__ ( ) -    { -        generator.__init__ install-shared-lib : SHARED_LIB -          : INSTALLED_SHARED_LIB ; -    } - -    rule run ( project name ? : property-set : source : multiple ? ) -    { -        if [ $(property-set).get <os> ] in NT CYGWIN || -            [ $(property-set).get <target-os> ] in windows cygwin -        { -            local copied = [ stage.copy-file $(project) : $(source) : -                $(property-set) ] ; -            return [ virtual-target.register $(copied) ] ; -        } -        else -        { -            local a = [ $(source).action ] ; -            local copied ; -            if ! $(a) -            { -                # Non-derived file, just copy. -                copied = [ stage.copy-file $(project) : $(source) : -                    $(property-set) ] ; -            } -            else -            { -                local cp = [ $(a).properties ] ; -                local current-dll-path = [ $(cp).get <dll-path> ] ; -                local new-dll-path = [ $(property-set).get <dll-path> ] ; - -                if $(current-dll-path) != $(new-dll-path) -                { -                    # Rpath changed, need to relink. -                    copied = [ stage.relink-file $(project) : $(source) : -                        $(property-set) ] ; -                } -                else -                { -                    copied = [ stage.copy-file $(project) : $(source) : -                        $(property-set) ] ; -                } -            } - -            copied = [ virtual-target.register $(copied) ] ; - -            local result = $(copied) ; -            # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and -            # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY -            # symbolic links. -            local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$ -                : [ $(copied).name ] ] ; -            if $(m) -            { -                # Symlink without version at all is used to make -                # -lsome_library work. -                result += [ stage.symlink $(m[1]) : $(project) : $(copied) : -                    $(property-set) ] ; - -                # Symlinks of some libfoo.N and libfoo.N.M are used so that -                # library can found at runtime, if libfoo.N.M.X has soname of -                # libfoo.N. That happens when the library makes some binary -                # compatibility guarantees. If not, it is possible to skip those -                # symlinks. -                local suppress = -                    [ $(property-set).get <install-no-version-symlinks> ] ; - -                if $(suppress) != "on" -                { -                    result += [ stage.symlink $(m[1]).$(m[2]) : $(project) -                      : $(copied) : $(property-set) ] ; -                    result += [ stage.symlink $(m[1]).$(m[2]).$(m[3])  : $(project) -                      : $(copied) : $(property-set) ] ; -                } -            } - -            return $(result) ; -        } -    } -} - -generators.register [ new installed-shared-lib-generator ] ; - - -# Main target rule for 'install'. -# -rule install ( name : sources * : requirements * : default-build * ) -{ -    local project = [ project.current ] ; - -    # Unless the user has explicitly asked us to hardcode dll paths, add -    # <hardcode-dll-paths>false in requirements, to override default value. -    if ! <hardcode-dll-paths>true in $(requirements) -    { -        requirements += <hardcode-dll-paths>false ; -    } - -    if <tag> in $(requirements:G) -    { -        errors.user-error -            "The <tag> property is not allowed for the 'install' rule" ; -    } - -    targets.main-target-alternative -      [ new install-target-class $(name) : $(project) -        : [ targets.main-target-sources $(sources) : $(name) ] -        : [ targets.main-target-requirements $(requirements) : $(project) ] -        : [ targets.main-target-default-build $(default-build) : $(project) ] -      ] ; -} - - -IMPORT $(__name__) : install : : install ; -IMPORT $(__name__) : install : : stage ; diff --git a/jam-files/boost-build/tools/stage.py b/jam-files/boost-build/tools/stage.py deleted file mode 100644 index 25eccbe5..00000000 --- a/jam-files/boost-build/tools/stage.py +++ /dev/null @@ -1,350 +0,0 @@ -# Status: ported. -# Base revision 64444. -# -# Copyright 2003 Dave Abrahams -# Copyright 2005, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module defines the 'install' rule, used to copy a set of targets to a -# single location. - -import b2.build.feature as feature -import b2.build.targets as targets -import b2.build.property as property -import b2.build.property_set as property_set -import b2.build.generators as generators -import b2.build.virtual_target as virtual_target - -from b2.manager import get_manager -from b2.util.sequence import unique -from b2.util import bjam_signature - -import b2.build.type - -import os.path -import re -import types - -feature.feature('install-dependencies', ['off', 'on'], ['incidental']) -feature.feature('install-type', [], ['free', 'incidental']) -feature.feature('install-source-root', [], ['free', 'path']) -feature.feature('so-version', [], ['free', 'incidental']) - -# If 'on', version symlinks for shared libraries will not be created. Affects -# Unix builds only. -feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental']) - -class InstallTargetClass(targets.BasicTarget): - -    def update_location(self, ps): -        """If <location> is not set, sets it based on the project data.""" - -        loc = ps.get('location') -        if not loc: -            loc = os.path.join(self.project().get('location'), self.name()) -            ps = ps.add_raw(["<location>" + loc]) - -        return ps - -    def adjust_properties(self, target, build_ps): -        a = target.action() -        properties = [] -        if a: -            ps = a.properties() -            properties = ps.all() -             -            # Unless <hardcode-dll-paths>true is in properties, which can happen -            # only if the user has explicitly requested it, nuke all <dll-path> -            # properties. - -            if build_ps.get('hardcode-dll-paths') != ['true']: -                properties = [p for p in properties if p.feature().name() != 'dll-path'] - -            # If any <dll-path> properties were specified for installing, add -            # them. -            properties.extend(build_ps.get_properties('dll-path')) - -            # Also copy <linkflags> feature from current build set, to be used -            # for relinking. -            properties.extend(build_ps.get_properties('linkflags')) - -            # Remove the <tag> feature on original targets. -            # And <location>. If stage target has another stage target in -            # sources, then we shall get virtual targets with the <location> -            # property set. -            properties = [p for p in properties -                          if not p.feature().name() in ['tag', 'location']] - -        properties.extend(build_ps.get_properties('dependency')) - -        properties.extend(build_ps.get_properties('location')) -         - -        properties.extend(build_ps.get_properties('install-no-version-symlinks')) - -        d = build_ps.get_properties('install-source-root') - -        # Make the path absolute: we shall use it to compute relative paths and -        # making the path absolute will help. -        if d: -            p = d[0] -            properties.append(property.Property(p.feature(), os.path.abspath(p.value()))) - -        return property_set.create(properties) -     - -    def construct(self, name, source_targets, ps): - -        source_targets = self.targets_to_stage(source_targets, ps) -        ps = self.update_location(ps) - -        ename = ps.get('name') -        if ename: -            ename = ename[0] -        if ename and len(source_targets) > 1: -            get_manager().errors()("When <name> property is used in 'install', only one source is allowed") - -        result = [] - -        for i in source_targets: - -            staged_targets = [] -            new_ps = self.adjust_properties(i, ps) - -            # See if something special should be done when staging this type. It -            # is indicated by the presence of a special "INSTALLED_" type. -            t = i.type() -            if t and b2.build.type.registered("INSTALLED_" + t): - -                if ename: -                    get_manager().errors()("In 'install': <name> property specified with target that requires relinking.") -                else: -                    (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t, -                                                        new_ps, [i]) -                    assert isinstance(r, property_set.PropertySet) -                    staged_targets.extend(targets) -                     -            else: -                staged_targets.append(copy_file(self.project(), ename, i, new_ps)) - -            if not staged_targets: -                get_manager().errors()("Unable to generate staged version of " + i) - -            result.extend(get_manager().virtual_targets().register(t) for t in staged_targets) - -        return (property_set.empty(), result) - -    def targets_to_stage(self, source_targets, ps): -        """Given the list of source targets explicitly passed to 'stage', returns the -        list of targets which must be staged.""" - -        result = [] - -        # Traverse the dependencies, if needed. -        if ps.get('install-dependencies') == ['on']: -            source_targets = self.collect_targets(source_targets) - -        # Filter the target types, if needed. -        included_types = ps.get('install-type') -        for r in source_targets: -            ty = r.type() -            if ty: -                # Do not stage searched libs. -                if ty != "SEARCHED_LIB": -                    if included_types: -                        if self.include_type(ty, included_types): -                            result.append(r) -                    else: -                        result.append(r) -            elif not included_types: -                # Don't install typeless target if there is an explicit list of -                # allowed types. -                result.append(r) - -        return result - -    # CONSIDER: figure out why we can not use virtual-target.traverse here. -    # -    def collect_targets(self, targets): -         -        s = [t.creating_subvariant() for t in targets] -        s = unique(s) -         -        result = set(targets) -        for i in s: -            i.all_referenced_targets(result) -            -        result2 = [] -        for r in result: -            if isinstance(r, property.Property): -                 -                if r.feature().name() != 'use': -                    result2.append(r.value()) -            else: -                result2.append(r) -        result2 = unique(result2) -        return result2 - -    # Returns true iff 'type' is subtype of some element of 'types-to-include'. -    # -    def include_type(self, type, types_to_include): -        return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include) - -# Creates a copy of target 'source'. The 'properties' object should have a -# <location> property which specifies where the target must be placed. -# -def copy_file(project, name, source, ps): - -    if not name: -        name = source.name() - -    relative = "" - -    new_a = virtual_target.NonScanningAction([source], "common.copy", ps) -    source_root = ps.get('install-source-root') -    if source_root: -        source_root = source_root[0] -        # Get the real path of the target. We probably need to strip relative -        # path from the target name at construction. -        path = os.path.join(source.path(), os.path.dirname(name)) -        # Make the path absolute. Otherwise, it would be hard to compute the -        # relative path. The 'source-root' is already absolute, see the -        # 'adjust-properties' method above. -        path = os.path.abspath(path) - -        relative = os.path.relpath(path, source_root) - -    name = os.path.join(relative, os.path.basename(name)) -    return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True) - -def symlink(name, project, source, ps): -    a = virtual_target.Action([source], "symlink.ln", ps) -    return virtual_target.FileTarget(name, source.type(), project, a, exact=True) - -def relink_file(project, source, ps): -    action = source.action() -    cloned_action = virtual_target.clone_action(action, project, "", ps) -    targets = cloned_action.targets() -    # We relink only on Unix, where exe or shared lib is always a single file. -    assert len(targets) == 1 -    return targets[0] - - -# Declare installed version of the EXE type. Generator for this type will cause -# relinking to the new location. -b2.build.type.register('INSTALLED_EXE', [], 'EXE') - -class InstalledExeGenerator(generators.Generator): - -    def __init__(self): -        generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE']) - -    def run(self, project, name, ps, source): - -        need_relink = False; - -        if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']: -            # Never relink -            pass -        else: -            # See if the dll-path properties are not changed during -            # install. If so, copy, don't relink. -            need_relink = ps.get('dll-path') != source[0].action().properties().get('dll-path') - -        if need_relink: -            return [relink_file(project, source, ps)] -        else: -            return [copy_file(project, None, source[0], ps)] - -generators.register(InstalledExeGenerator()) - - -# Installing a shared link on Unix might cause a creation of versioned symbolic -# links. -b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB') - -class InstalledSharedLibGenerator(generators.Generator): - -    def __init__(self): -        generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB']) - -    def run(self, project, name, ps, source): - -        source = source[0] -        if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']: -            copied = copy_file(project, None, source, ps) -            return [get_manager().virtual_targets().register(copied)] -        else: -            a = source.action() -            if not a: -                # Non-derived file, just copy. -                copied = copy_file(project, source, ps) -            else: - -                need_relink = ps.get('dll-path') != source.action().properties().get('dll-path') -                 -                if need_relink: -                    # Rpath changed, need to relink. -                    copied = relink_file(project, source, ps) -                else: -                    copied = copy_file(project, None, source, ps) - -            result = [get_manager().virtual_targets().register(copied)] -            # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and -            # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY -            # symbolic links. -            m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$", -                         copied.name()); -            if m: -                # Symlink without version at all is used to make -                # -lsome_library work. -                result.append(symlink(m.group(1), project, copied, ps)) - -                # Symlinks of some libfoo.N and libfoo.N.M are used so that -                # library can found at runtime, if libfoo.N.M.X has soname of -                # libfoo.N. That happens when the library makes some binary -                # compatibility guarantees. If not, it is possible to skip those -                # symlinks. -                if ps.get('install-no-version-symlinks') != ['on']: -                 -                    result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps)) -                    result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3), -                                          project, copied, ps)) - -            return result -             -generators.register(InstalledSharedLibGenerator()) - - -# Main target rule for 'install'. -# -@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"], -                 ["default_build", "*"], ["usage_requirements", "*"])) -def install(name, sources, requirements=[], default_build=[], usage_requirements=[]): - -    requirements = requirements[:] -    # Unless the user has explicitly asked us to hardcode dll paths, add -    # <hardcode-dll-paths>false in requirements, to override default value. -    if not '<hardcode-dll-paths>true' in requirements: -        requirements.append('<hardcode-dll-paths>false') - -    if any(r.startswith('<tag>') for r in requirements): -        get_manager().errors()("The <tag> property is not allowed for the 'install' rule") - -    from b2.manager import get_manager -    t = get_manager().targets() -     -    project = get_manager().projects().current() -         -    return t.main_target_alternative( -        InstallTargetClass(name, project, -                           t.main_target_sources(sources, name), -                           t.main_target_requirements(requirements, project), -                           t.main_target_default_build(default_build, project), -                           t.main_target_usage_requirements(usage_requirements, project))) - -get_manager().projects().add_rule("install", install) -get_manager().projects().add_rule("stage", install) - diff --git a/jam-files/boost-build/tools/stlport.jam b/jam-files/boost-build/tools/stlport.jam deleted file mode 100644 index 62eebda5..00000000 --- a/jam-files/boost-build/tools/stlport.jam +++ /dev/null @@ -1,303 +0,0 @@ -# Copyright Gennadiy Rozental -# Copyright 2006 Rene Rivera  -# Copyright 2003, 2004, 2006 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -# The STLPort is usable by means of 'stdlib' feature. When -# stdlib=stlport is specified, default version of STLPort will be used, -# while stdlib=stlport-4.5 will use specific version. -# The subfeature value 'hostios' means to use host compiler's iostreams. -# -# The specific version of stlport is selected by features: -# The <runtime-link> feature selects between static and shared library -# The <runtime-debugging>on selects STLPort with debug symbols -# and stl debugging. -# There's no way to use STLPort with debug symbols but without -# stl debugging. - -# TODO: must implement selection of different STLPort installations based -# on used toolset. -# Also, finish various flags: -# -# This is copied from V1 toolset, "+" means "implemented" -#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ; -#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ; -# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ; -# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ; -# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ; -# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ; -#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ; -#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ; - - -import feature : feature subfeature ; -import project ; -import "class" : new ; -import targets ; -import property-set ; -import common ; -import type ; - -# Make this module into a project. -project.initialize $(__name__) ; -project stlport ; - -# The problem: how to request to use host compiler's iostreams? -# -# Solution 1: Global 'stlport-iostream' feature. -#    That's ugly. Subfeature make more sense for stlport-specific thing. -# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream") -#     is default. -#    The problem is that such subfeature will appear in target paths, and that's ugly -# Solution 3: Use optional subfeature with only one value. - -feature.extend stdlib : stlport ; -feature.compose <stdlib>stlport : <library>/stlport//stlport ; - -# STLport iostreams or native iostreams -subfeature stdlib stlport : iostream : hostios : optional propagated  ; - -# STLport extensions -subfeature stdlib stlport : extensions : noext : optional propagated ; - -# STLport anachronisms -- NOT YET SUPPORTED -# subfeature stdlib stlport : anachronisms : on off ; - -# STLport debug allocation -- NOT YET SUPPORTED -#subfeature stdlib stlport : debug-alloc : off on ; - -# Declare a special target class to handle the creation of search-lib-target -# instances for STLport. We need a special class, because otherwise we'll have -# - declare prebuilt targets for all possible toolsets. And by the time 'init' -#   is called we don't even know the list of toolsets that are registered -# - when host iostreams are used, we really should produce nothing. It would -#   be hard/impossible to achieve this using prebuilt targets. - -class stlport-target-class : basic-target -{ -    import feature project type errors generators ; -    import set : difference ; - -    rule __init__ ( project : headers ? : libraries * :  version ? ) -    { -        basic-target.__init__ stlport : $(project) ; -        self.headers = $(headers) ; -        self.libraries = $(libraries) ; -        self.version = $(version) ; -        self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ; -         -        local requirements ; -        requirements += <stdlib-stlport:version>$(self.version) ; -        self.requirements = [ property-set.create $(requirements) ] ; -    } - -    rule generate ( property-set ) -    { -        # Since this target is built with <stdlib>stlport, it will also -        # have <library>/stlport//stlport in requirements, which will -        # cause a loop in main target references. Remove that property -        # manually. - -        property-set = [ property-set.create -            [ difference -                [ $(property-set).raw ] : -                <library>/stlport//stlport -                <stdlib>stlport -                ] -            ] ; -        return [ basic-target.generate $(property-set) ] ; -    } - -    rule construct ( name : source-targets * : property-set ) -    { -        # Deduce the name of stlport library, based on toolset and -        # debug setting. -        local raw = [ $(property-set).raw ] ; -        local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ; -        local toolset = [ feature.get-values <toolset> : $(raw) ] ; - -        if $(self.version.5) -        { -            # Version 5.x -             -            # STLport host IO streams no longer supported. So we always  -            # need libraries. -             -            # name: stlport(stl)?[dg]?(_static)?.M.R -            local name = stlport ; -            if [ feature.get-values <runtime-debugging> : $(raw) ] = "on" -            { -                name += stl ; -                switch $(toolset) -                { -                    case gcc* : name += g ; -                    case darwin* : name += g ; -                    case * : name += d ; -                } -            } - -            if [ feature.get-values <runtime-link> : $(raw) ] = "static" -            { -                name += _static ; -            } - -            # Starting with version 5.2.0, the STLport static libraries no longer -            #  include a version number in their name -            local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ; -            if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static"  -            { -                name += .$(self.version.5) ; -            } -			 -            name = $(name:J=) ; -             -            if [ feature.get-values <install-dependencies> : $(raw) ] = "on" -            { -                #~ Allow explicitly asking to install the STLport lib by -                #~ refering to it directly: /stlport//stlport/<install-dependencies>on -                #~ This allows for install packaging of all libs one might need for -                #~ a standalone distribution. -                import path : make : path-make ; -                local runtime-link -                    = [ feature.get-values <runtime-link> : $(raw) ] ; -                local lib-file.props -                    = [ property-set.create $(raw) <link>$(runtime-link) ] ; -                local lib-file.prefix -                    = [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ; -                local lib-file.suffix -                    = [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ; -                lib-file.prefix -                    ?= "" "lib" ; -                lib-file.suffix -                    ?= "" ; -                local lib-file -                    = [ GLOB $(self.libraries) [ modules.peek : PATH ] : -                        $(lib-file.prefix)$(name).$(lib-file.suffix) ] ; -                lib-file -                    = [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ; -                lib-file -                    = [ $(lib-file).generate "" ] ; -                local lib-file.requirements -                    = [ targets.main-target-requirements -                        [ $(lib-file.props).raw ] <file>$(lib-file[-1]) -                        : $(self.project) ] ; -                return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ; -            } -            else -            { -                #~ Otherwise, it's just a regular usage of the library. -                return [ generators.construct -                    $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ; -            } -        } -        else if ! $(hostios) && $(toolset) != msvc -        { -            # We don't need libraries if host istreams are used. For -            # msvc, automatic library selection will be used. -             -            # name: stlport_<toolset>(_stldebug)? -            local name = stlport ; -            name = $(name)_$(toolset) ; -            if [ feature.get-values <runtime-debugging> : $(raw) ] = "on" -            { -                name = $(name)_stldebug ; -            } - -            return [ generators.construct -                $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ; -        } -        else -        { -            return [ property-set.empty ] ; -        } -    } - -    rule compute-usage-requirements ( subvariant ) -    { -        local usage-requirements = -            <include>$(self.headers) -            <dll-path>$(self.libraries) -            <library-path>$(self.libraries) -            ; - -        local rproperties = [ $(subvariant).build-properties ] ; -        # CONSIDER: should this "if" sequence be replaced with -        # some use of 'property-map' class? -        if [ $(rproperties).get <runtime-debugging> ] = "on" -        { -            usage-requirements += -                <define>_STLP_DEBUG=1 -                <define>_STLP_DEBUG_UNINITIALIZED=1 ; -        } -        if [ $(rproperties).get <runtime-link> ] = "shared" -        { -            usage-requirements += -                <define>_STLP_USE_DYNAMIC_LIB=1 ; -        } -        if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext -        { -            usage-requirements += -                <define>_STLP_NO_EXTENSIONS=1 ; -        } -        if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios -        { -            usage-requirements += -                <define>_STLP_NO_OWN_IOSTREAMS=1 -                <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ; -        } -        if $(self.version.5) -        { -            # Version 5.x -            if [ $(rproperties).get <threading> ] = "single" -            { -                # Since STLport5 doesn't normally support single-thread -                # we force STLport5 into the multi-thread mode. Hence -                # getting what other libs provide of single-thread code -                # linking against a multi-thread lib. -                usage-requirements += -                    <define>_STLP_THREADS=1 ; -            } -        } -         -        return [ property-set.create $(usage-requirements) ] ; -    } -} - -rule stlport-target ( headers ? : libraries * : version ? ) -{ -    local project = [ project.current ] ; - -    targets.main-target-alternative -      [ new stlport-target-class  $(project) : $(headers) : $(libraries) -        : $(version) -      ] ; -} - -local .version-subfeature-defined ; - -# Initialize stlport support. -rule init ( -    version ? : -    headers   :     # Location of header files -    libraries *     # Location of libraries, lib and bin subdirs of STLport. -    ) -{ -    # FIXME: need to use common.check-init-parameters here. -    # At the moment, that rule always tries to define subfeature -    # of the 'toolset' feature, while we need to define subfeature -    # of <stdlib>stlport, so tweaks to check-init-parameters are needed. -    if $(version) -    { -        if ! $(.version-subfeature-defined) -        { -            feature.subfeature stdlib stlport : version : : propagated ; -            .version-subfeature-defined = true ; -        } -        feature.extend-subfeature stdlib stlport : version : $(version) ; -    } - -    # Declare the main target for this STLPort version. -    stlport-target $(headers) : $(libraries) : $(version) ; -} - diff --git a/jam-files/boost-build/tools/sun.jam b/jam-files/boost-build/tools/sun.jam deleted file mode 100644 index 0ca927d3..00000000 --- a/jam-files/boost-build/tools/sun.jam +++ /dev/null @@ -1,142 +0,0 @@ -#  Copyright (C) Christopher Currie 2003. Permission to copy, use, -#  modify, sell and distribute this software is granted provided this -#  copyright notice appears in all copies. This software is provided -#  "as is" without express or implied warranty, and with no claim as -#  to its suitability for any purpose. - -import property ; -import generators ; -import os ; -import toolset : flags ; -import feature ; -import type ; -import common ; - -feature.extend toolset : sun ; -toolset.inherit  sun : unix ; -generators.override sun.prebuilt : builtin.lib-generator ; -generators.override sun.prebuilt : builtin.prebuilt ; -generators.override sun.searched-lib-generator : searched-lib-generator ; - -feature.extend stdlib : sun-stlport ; -feature.compose <stdlib>sun-stlport -    : <cxxflags>-library=stlport4 <linkflags>-library=stlport4 -    ; - -rule init ( version ? : command * : options * )  -{ -    local condition = [  -      common.check-init-parameters sun : version $(version) ] ; -     -    command = [ common.get-invocation-command sun : CC  -        : $(command) : "/opt/SUNWspro/bin" ] ; - -    # Even if the real compiler is not found, put CC to -    # command line so that user see command line that would have being executed. -    command ?= CC ; - -    common.handle-options sun : $(condition) : $(command) : $(options) ; -     -    command_c = $(command[1--2]) $(command[-1]:B=cc) ; - -    toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ; -} - -# Declare generators -generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ; -generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ; - -# Declare flags and actions for compilation -flags sun.compile OPTIONS <debug-symbols>on : -g ; -flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ; -flags sun.compile OPTIONS <optimization>speed : -xO4  ; -flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ; -flags sun.compile OPTIONS <threading>multi : -mt ; -flags sun.compile OPTIONS <warnings>off : -erroff ; -flags sun.compile OPTIONS <warnings>on : -erroff=%none ; -flags sun.compile OPTIONS <warnings>all  : -erroff=%none ; -flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ; - -flags sun.compile.c++ OPTIONS <inlining>off : +d ; - -# The -m32 and -m64 options are supported starting -# with Sun Studio 12.  On earlier compilers, the -# 'address-model' feature is not supported and should not -# be used. Instead, use -xarch=generic64 command line -# option. -# See http://svn.boost.org/trac/boost/ticket/1186 -# for details. -flags sun OPTIONS <address-model>32 : -m32 ; -flags sun OPTIONS <address-model>64 : -m64 ; -# On sparc, there's a difference between -Kpic -# and -KPIC. The first is slightly more efficient, -# but has the limits on the size of GOT table. -# For minimal fuss on user side, we use -KPIC here. -# See http://svn.boost.org/trac/boost/ticket/1186#comment:6 -# for detailed explanation. -flags sun OPTIONS <link>shared : -KPIC ; - -flags sun.compile OPTIONS <cflags> ; -flags sun.compile.c++ OPTIONS <cxxflags> ; -flags sun.compile DEFINES <define> ; -flags sun.compile INCLUDES <include> ; - -actions compile.c -{ -    "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -actions compile.c++ -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" -} - -# Declare flags and actions for linking -flags sun.link OPTIONS <debug-symbols>on : -g ; -# Strip the binary when no debugging is needed -flags sun.link OPTIONS <debug-symbols>off : -s ; -flags sun.link OPTIONS <profiling>on : -xprofile=tcov ; -flags sun.link OPTIONS <threading>multi : -mt ; -flags sun.link OPTIONS <linkflags> ; -flags sun.link LINKPATH <library-path> ; -flags sun.link FINDLIBS-ST <find-static-library> ; -flags sun.link FINDLIBS-SA <find-shared-library> ; -flags sun.link LIBRARIES <library-file> ; -flags sun.link LINK-RUNTIME <runtime-link>static : static ; -flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ; -flags sun.link RPATH <dll-path> ; -# On gcc, there are separate options for dll path at runtime and -# link time. On Solaris, there's only one: -R, so we have to use -# it, even though it's bad idea. -flags sun.link RPATH <xdll-path> ; - -# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.) -flags sun.link FINDLIBS-SA : rt ; - -rule link ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -actions link bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -# Slight mods for dlls -rule link.dll ( targets * : sources * : properties * ) -{ -    SPACE on $(targets) = " " ; -} - -actions link.dll bind LIBRARIES -{ -    "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -} - -# Declare action for creating static libraries -actions piecemeal archive -{ -    "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)" -} - diff --git a/jam-files/boost-build/tools/symlink.jam b/jam-files/boost-build/tools/symlink.jam deleted file mode 100644 index b33e8260..00000000 --- a/jam-files/boost-build/tools/symlink.jam +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2003 Dave Abrahams  -# Copyright 2002, 2003 Rene Rivera  -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -# Defines the "symlink" special target. 'symlink' targets make symbolic links -# to the sources. - -import targets modules path class os feature project property-set ; - -.count = 0 ; - -feature.feature symlink-location : project-relative build-relative : incidental ; - -# The class representing "symlink" targets. -# -class symlink-targets : basic-target -{ -    import numbers modules class property project path ; -     -    rule __init__ ( -      project -        : targets * -        : sources * -    ) -    {     -        # Generate a fake name for now. Need unnamed targets eventually. -        local c = [ modules.peek symlink : .count ] ; -        modules.poke symlink : .count : [ numbers.increment $(c) ] ; -        local fake-name = symlink#$(c) ; -     -          basic-target.__init__ $(fake-name) : $(project) : $(sources) ; -     -        # Remember the targets to map the sources onto. Pad or truncate -        # to fit the sources given. -        self.targets = ; -        for local source in $(sources) -        { -            if $(targets) -            { -                self.targets += $(targets[1]) ; -                targets = $(targets[2-]) ; -            } -            else -            { -                self.targets += $(source) ; -            } -        } -     -        # The virtual targets corresponding to the given targets. -        self.virtual-targets = ; -    }     -     -    rule construct ( name : source-targets * : property-set ) -    { -        local i = 1 ; -        for local t in $(source-targets) -        { -            local s = $(self.targets[$(i)]) ; -            local a = [ class.new action  $(t) : symlink.ln : $(property-set) ] ; -            local vt = [ class.new file-target $(s:D=)  -              : [ $(t).type ] : $(self.project) : $(a) ] ; -             -            # Place the symlink in the directory relative to the project -            # location, instead of placing it in the build directory. -            if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative -            { -                $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ; -            } -             -            self.virtual-targets += $(vt) ; -            i = [ numbers.increment $(i) ] ; -        } -        return [ property-set.empty ] $(self.virtual-targets) ; -    } -} - -# Creates a symbolic link from a set of targets to a set of sources. -# The targets and sources map one to one. The symlinks generated are -# limited to be the ones given as the sources. That is, the targets -# are either padded or trimmed to equate to the sources. The padding -# is done with the name of the corresponding source. For example:: -# -#     symlink : one two ; -# -# Is equal to:: -# -#     symlink one two : one two ; -# -# Names for symlink are relative to the project location. They cannot -# include ".." path components. -rule symlink ( -    targets * -    : sources * -    ) -{ -    local project = [ project.current ] ; -     -    return [ targets.main-target-alternative -        [ class.new symlink-targets $(project) : $(targets) :  -          # Note: inline targets are not supported for symlink, intentionally, -          # since it's used to linking existing non-local targets. -          $(sources) ] ] ; -} - -rule ln -{ -    local os ; -    if [ modules.peek : UNIX ] { os = UNIX ; } -    else { os ?= [ os.name ] ; } -    # Remember the path to make the link relative to where the symlink is located. -    local path-to-source = [ path.relative-to -        [ path.make [ on $(<) return $(LOCATE) ] ] -        [ path.make [ on $(>) return $(LOCATE) ] ] ] ; -    if $(path-to-source) = . -    { -        PATH_TO_SOURCE on $(<) = "" ; -    } -    else -    { -        PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ; -    } -    ln-$(os) $(<) : $(>) ; -} - -actions ln-UNIX -{ -    ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)' -} - -# there is a way to do this; we fall back to a copy for now -actions ln-NT -{ -    echo "NT symlinks not supported yet, making copy" -    del /f /q "$(<)" 2>nul >nul -    copy "$(>)" "$(<)" $(NULL_OUT) -} - -IMPORT $(__name__) : symlink : : symlink ; diff --git a/jam-files/boost-build/tools/symlink.py b/jam-files/boost-build/tools/symlink.py deleted file mode 100644 index 6345ded6..00000000 --- a/jam-files/boost-build/tools/symlink.py +++ /dev/null @@ -1,112 +0,0 @@ -# Status: ported. -# Base revision: 64488. - -# Copyright 2003 Dave Abrahams  -# Copyright 2002, 2003 Rene Rivera  -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus  -# Distributed under the Boost Software License, Version 1.0.  -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)  - -# Defines the "symlink" special target. 'symlink' targets make symbolic links -# to the sources. - -import b2.build.feature as feature -import b2.build.targets as targets -import b2.build.property_set as property_set -import b2.build.virtual_target as virtual_target -import b2.build.targets - -from b2.manager import get_manager - -import bjam - -import os - - -feature.feature("symlink-location", ["project-relative", "build-relative"], ["incidental"]) - -class SymlinkTarget(targets.BasicTarget): - -    _count = 0 - -    def __init__(self, project, targets, sources): -          -        # Generate a fake name for now. Need unnamed targets eventually. -        fake_name = "symlink#%s" % SymlinkTarget._count -        SymlinkTarget._count = SymlinkTarget._count + 1 - -        b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources) -     -        # Remember the targets to map the sources onto. Pad or truncate -        # to fit the sources given. -        assert len(targets) <= len(sources) -        self.targets = targets[:] + sources[len(targets):] -             -        # The virtual targets corresponding to the given targets. -        self.virtual_targets = [] - -    def construct(self, name, source_targets, ps): -        i = 0 -        for t in source_targets: -            s = self.targets[i] -            a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps) -            vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a) -                         -            # Place the symlink in the directory relative to the project -            # location, instead of placing it in the build directory. -            if not ps.get('symlink-location') == "project-relative": -                vt.set_path(os.path.join(self.project().get('location'), os.path.dirname(s))) - -            vt = get_manager().virtual_targets().register(vt) -            self.virtual_targets.append(vt) -            i = i + 1 - -        return (property_set.empty(), self.virtual_targets) - -# Creates a symbolic link from a set of targets to a set of sources. -# The targets and sources map one to one. The symlinks generated are -# limited to be the ones given as the sources. That is, the targets -# are either padded or trimmed to equate to the sources. The padding -# is done with the name of the corresponding source. For example:: -# -#     symlink : one two ; -# -# Is equal to:: -# -#     symlink one two : one two ; -# -# Names for symlink are relative to the project location. They cannot -# include ".." path components. -def symlink(targets, sources): - -    from b2.manager import get_manager -    t = get_manager().targets()    -    p = get_manager().projects().current() - -    return t.main_target_alternative( -        SymlinkTarget(p, targets,  -                      # Note: inline targets are not supported for symlink, intentionally, -                      # since it's used to linking existing non-local targets. -                      sources)) - - -def setup_ln(targets, sources, ps): - -    source_path = bjam.call("get-target-variable", sources[0], "LOCATE")[0] -    target_path = bjam.call("get-target-variable", targets[0], "LOCATE")[0] -    rel = os.path.relpath(source_path, target_path) -    if rel == ".": -        bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", "") -    else: -        bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", rel) - -if os.name == 'nt': -    ln_action = """echo "NT symlinks not supported yet, making copy" -del /f /q "$(<)" 2>nul >nul -copy "$(>)" "$(<)" $(NULL_OUT)""" -else: -    ln_action = "ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'" - -get_manager().engine().register_action("symlink.ln", ln_action, function=setup_ln) - -get_manager().projects().add_rule("symlink", symlink) diff --git a/jam-files/boost-build/tools/testing-aux.jam b/jam-files/boost-build/tools/testing-aux.jam deleted file mode 100644 index 525dafd0..00000000 --- a/jam-files/boost-build/tools/testing-aux.jam +++ /dev/null @@ -1,210 +0,0 @@ -# This module is imported by testing.py. The definitions here are -# too tricky to do in Python - -# Causes the 'target' to exist after bjam invocation if and only if all the -# dependencies were successfully built. -# -rule expect-success ( target : dependency + : requirements * ) -{ -    **passed** $(target) : $(sources) ; -} -IMPORT testing : expect-success : : testing.expect-success ; - -# Causes the 'target' to exist after bjam invocation if and only if all some of -# the dependencies were not successfully built. -# -rule expect-failure ( target : dependency + : properties * ) -{ -    local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ; -    local marker = $(dependency:G=$(grist)*fail) ; -    (failed-as-expected) $(marker) ; -    FAIL_EXPECTED $(dependency) ; -    LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ; -    RMOLD $(marker) ; -    DEPENDS $(marker) : $(dependency) ; -    DEPENDS $(target) : $(marker) ; -    **passed** $(target) : $(marker) ; -} -IMPORT testing : expect-failure : : testing.expect-failure ; - -# The rule/action combination used to report successful passing of a test. -# -rule **passed** -{ -    # Force deletion of the target, in case any dependencies failed to build. -    RMOLD $(<) ; -} - - -# Used to create test files signifying passed tests. -# -actions **passed** -{ -    echo passed > "$(<)" -} - - -# Used to create replacement object files that do not get created during tests -# that are expected to fail. -# -actions (failed-as-expected) -{ -    echo failed as expected > "$(<)" -} - -# Runs executable 'sources' and stores stdout in file 'target'. Unless -# --preserve-test-targets command line option has been specified, removes the -# executable. The 'target-to-remove' parameter controls what should be removed: -#   - if 'none', does not remove anything, ever -#   - if empty, removes 'source' -#   - if non-empty and not 'none', contains a list of sources to remove. -# -rule capture-output ( target : source : properties * : targets-to-remove * ) -{ -    output-file on $(target) = $(target:S=.output) ; -    LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ; - -    # The INCLUDES kill a warning about independent target... -    INCLUDES $(target) : $(target:S=.output) ; -    # but it also puts .output into dependency graph, so we must tell jam it is -    # OK if it cannot find the target or updating rule. -    NOCARE $(target:S=.output) ; - -    # This has two-fold effect. First it adds input files to the dependendency -    # graph, preventing a warning. Second, it causes input files to be bound -    # before target is created. Therefore, they are bound using SEARCH setting -    # on them and not LOCATE setting of $(target), as in other case (due to jam -    # bug). -    DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ; - -    if $(targets-to-remove) = none -    { -        targets-to-remove = ; -    } -    else if ! $(targets-to-remove) -    { -        targets-to-remove = $(source) ; -    } - -    if [ on $(target) return $(REMOVE_TEST_TARGETS) ] -    { -        TEMPORARY $(targets-to-remove) ; -        # Set a second action on target that will be executed after capture -        # output action. The 'RmTemps' rule has the 'ignore' modifier so it is -        # always considered succeeded. This is needed for 'run-fail' test. For -        # that test the target will be marked with FAIL_EXPECTED, and without -        # 'ignore' successful execution will be negated and be reported as -        # failure. With 'ignore' we do not detect a case where removing files -        # fails, but it is not likely to happen. -        RmTemps $(target) : $(targets-to-remove) ; -    } -} - - -if [ os.name ] = NT -{ -    .STATUS        = %status% ; -    .SET_STATUS    = "set status=%ERRORLEVEL%" ; -    .RUN_OUTPUT_NL = "echo." ; -    .STATUS_0      = "%status% EQU 0 (" ; -    .STATUS_NOT_0  = "%status% NEQ 0 (" ; -    .VERBOSE       = "%verbose% EQU 1 (" ; -    .ENDIF         = ")" ; -    .SHELL_SET     = "set " ; -    .CATENATE      = type ; -    .CP            = copy ; -} -else -{ -    .STATUS        = "$status" ; -    .SET_STATUS    = "status=$?" ; -    .RUN_OUTPUT_NL = "echo" ; -    .STATUS_0      = "test $status -eq 0 ; then" ; -    .STATUS_NOT_0  = "test $status -ne 0 ; then" ; -    .VERBOSE       = "test $verbose -eq 1 ; then" ; -    .ENDIF         = "fi" ; -    .SHELL_SET     = "" ; -    .CATENATE      = cat ; -    .CP            = cp ; -} - - -.VERBOSE_TEST = 0 ; -if --verbose-test in [ modules.peek : ARGV ] -{ -    .VERBOSE_TEST = 1 ; -} - - -.RM = [ common.rm-command ] ; - - -actions capture-output bind INPUT_FILES output-file -{ -    $(PATH_SETUP) -    $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 -    $(.SET_STATUS) -    $(.RUN_OUTPUT_NL) >> "$(output-file)" -    echo EXIT STATUS: $(.STATUS) >> "$(output-file)" -    if $(.STATUS_0) -        $(.CP) "$(output-file)" "$(<)" -    $(.ENDIF) -    $(.SHELL_SET)verbose=$(.VERBOSE_TEST) -    if $(.STATUS_NOT_0) -        $(.SHELL_SET)verbose=1 -    $(.ENDIF) -    if $(.VERBOSE) -        echo ====== BEGIN OUTPUT ====== -        $(.CATENATE) "$(output-file)" -        echo ====== END OUTPUT ====== -    $(.ENDIF) -    exit $(.STATUS) -} - -IMPORT testing : capture-output : : testing.capture-output ; - - -actions quietly updated ignore piecemeal together RmTemps -{ -    $(.RM) "$(>)" -} - - -.MAKE_FILE = [ common.file-creation-command ] ; - -actions unit-test -{ -    $(PATH_SETUP) -    $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<) -} - -rule record-time ( target : source : start end user system ) -{ -    local src-string = [$(source:G=:J=",")"] " ; -    USER_TIME on $(target) += $(src-string)$(user) ; -    SYSTEM_TIME on $(target) += $(src-string)$(system) ; -} - -# Calling this rule requests that Boost Build time how long it taks to build the -# 'source' target and display the results both on the standard output and in the -# 'target' file. -# -rule time ( target : source : properties *  ) -{ -    # Set up rule for recording timing information. -    __TIMING_RULE__ on $(source) = testing.record-time $(target) ; - -    # Make sure that the source is rebuilt any time we need to retrieve that -    # information. -    REBUILDS $(target) : $(source) ; -} - - -actions time -{ -    echo user: $(USER_TIME) -    echo system: $(SYSTEM_TIME) - -    echo user: $(USER_TIME)" seconds" > "$(<)" -    echo system: $(SYSTEM_TIME)" seconds" >> "$(<)" -} diff --git a/jam-files/boost-build/tools/testing.jam b/jam-files/boost-build/tools/testing.jam deleted file mode 100644 index c42075b7..00000000 --- a/jam-files/boost-build/tools/testing.jam +++ /dev/null @@ -1,581 +0,0 @@ -# Copyright 2005 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module implements regression testing framework. It declares a number of -# main target rules which perform some action and, if the results are OK, -# creates an output file. -# -# The exact list of rules is: -# 'compile'       -- creates .test file if compilation of sources was -#                    successful. -# 'compile-fail'  -- creates .test file if compilation of sources failed. -# 'run'           -- creates .test file is running of executable produced from -#                    sources was successful. Also leaves behind .output file -#                    with the output from program run. -# 'run-fail'      -- same as above, but .test file is created if running fails. -# -# In all cases, presence of .test file is an indication that the test passed. -# For more convenient reporting, you might want to use C++ Boost regression -# testing utilities (see http://www.boost.org/more/regression.html). -# -# For historical reason, a 'unit-test' rule is available which has the same -# syntax as 'exe' and behaves just like 'run'. - -# Things to do: -#  - Teach compiler_status handle Jamfile.v2. -# Notes: -#  - <no-warn> is not implemented, since it is Como-specific, and it is not -#    clear how to implement it -#  - std::locale-support is not implemented (it is used in one test). - - -import alias ; -import "class" ; -import common ; -import errors ; -import feature ; -import generators ; -import os ; -import path ; -import project ; -import property ; -import property-set ; -import regex ; -import sequence ; -import targets ; -import toolset ; -import type ; -import virtual-target ; - - -rule init ( ) -{ -} - - -# Feature controling the command used to lanch test programs. -feature.feature testing.launcher   : : free optional ; - -feature.feature test-info          : : free incidental ; -feature.feature testing.arg        : : free incidental ; -feature.feature testing.input-file : : free dependency ; - -feature.feature preserve-test-targets : on off : incidental propagated ; - -# Register target types. -type.register TEST         : test          ; -type.register COMPILE      :        : TEST ; -type.register COMPILE_FAIL :        : TEST ; -type.register RUN_OUTPUT   : run           ; -type.register RUN          :        : TEST ; -type.register RUN_FAIL     :        : TEST ; -type.register LINK_FAIL    :        : TEST ; -type.register LINK         :        : TEST ; -type.register UNIT_TEST    : passed : TEST ; - - -# Declare the rules which create main targets. While the 'type' module already -# creates rules with the same names for us, we need extra convenience: default -# name of main target, so write our own versions. - -# Helper rule. Create a test target, using basename of first source if no target -# name is explicitly passed. Remembers the created target in a global variable. -# -rule make-test ( target-type : sources + : requirements * : target-name ? ) -{ -    target-name ?= $(sources[1]:D=:S=) ; - -    # Having periods (".") in the target name is problematic because the typed -    # generator will strip the suffix and use the bare name for the file -    # targets. Even though the location-prefix averts problems most times it -    # does not prevent ambiguity issues when referring to the test targets. For -    # example when using the XML log output. So we rename the target to remove -    # the periods, and provide an alias for users. -    local real-name = [ regex.replace $(target-name) "[.]" "~" ] ; - -    local project = [ project.current ] ; -    # The <location-prefix> forces the build system for generate paths in the -    # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow -    # post-processing tools to work. -    local t = [ targets.create-typed-target [ type.type-from-rule-name -        $(target-type) ] : $(project) : $(real-name) : $(sources) : -        $(requirements) <location-prefix>$(real-name).test ] ; - -    # The alias to the real target, per period replacement above. -    if $(real-name) != $(target-name) -    { -        alias $(target-name) : $(t) ; -    } - -    # Remember the test (for --dump-tests). A good way would be to collect all -    # given a project. This has some technical problems: e.g. we can not call -    # this dump from a Jamfile since projects referred by 'build-project' are -    # not available until the whole Jamfile has been loaded. -    .all-tests += $(t) ; -    return $(t) ; -} - - -# Note: passing more that one cpp file here is known to fail. Passing a cpp file -# and a library target works. -# -rule compile ( sources + : requirements * : target-name ? ) -{ -    return [ make-test compile : $(sources) : $(requirements) : $(target-name) ] -        ; -} - - -rule compile-fail ( sources + : requirements * : target-name ? ) -{ -    return [ make-test compile-fail : $(sources) : $(requirements) : -        $(target-name) ] ; -} - - -rule link ( sources + : requirements * : target-name ? ) -{ -    return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ; -} - - -rule link-fail ( sources + : requirements * : target-name ? ) -{ -    return [ make-test link-fail : $(sources) : $(requirements) : $(target-name) -        ] ; -} - - -rule handle-input-files ( input-files * ) -{ -    if $(input-files[2]) -    { -        # Check that sorting made when creating property-set instance will not -        # change the ordering. -        if [ sequence.insertion-sort $(input-files) ] != $(input-files) -        { -            errors.user-error "Names of input files must be sorted alphabetically" -                : "due to internal limitations" ; -        } -    } -    return <testing.input-file>$(input-files) ; -} - - -rule run ( sources + : args * : input-files * : requirements * : target-name ? : -    default-build * ) -{ -    requirements += <testing.arg>$(args:J=" ") ; -    requirements += [ handle-input-files $(input-files) ] ; -    return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ; -} - - -rule run-fail ( sources + : args * : input-files * : requirements * : -    target-name ? : default-build * ) -{ -    requirements += <testing.arg>$(args:J=" ") ; -    requirements += [ handle-input-files $(input-files) ] ; -    return [ make-test run-fail : $(sources) : $(requirements) : $(target-name) -        ] ; -} - - -# Use 'test-suite' as a synonym for 'alias', for backward compatibility. -IMPORT : alias : : test-suite ; - - -# For all main targets in 'project-module', which are typed targets with type -# derived from 'TEST', produce some interesting information. -# -rule dump-tests -{ -    for local t in $(.all-tests) -    { -        dump-test $(t) ; -    } -} - - -# Given a project location in normalized form (slashes are forward), compute the -# name of the Boost library. -# -local rule get-library-name ( path ) -{ -    # Path is in normalized form, so all slashes are forward. -    local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ; -    local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ; -    local match3 = [ MATCH (/status$) : $(path) ] ; - -    if $(match1) { return $(match1[2]) ; } -    else if $(match2) { return $(match2[2]) ; } -    else if $(match3) { return "" ; } -    else if --dump-tests in [ modules.peek : ARGV ] -    { -        # The 'run' rule and others might be used outside boost. In that case, -        # just return the path, since the 'library name' makes no sense. -        return $(path) ; -    } -} - - -# Was an XML dump requested? -.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ; - - -# Takes a target (instance of 'basic-target') and prints -#   - its type -#   - its name -#   - comments specified via the <test-info> property -#   - relative location of all source from the project root. -# -rule dump-test ( target ) -{ -    local type = [ $(target).type ] ; -    local name = [ $(target).name ] ; -    local project = [ $(target).project ] ; - -    local project-root = [ $(project).get project-root ] ; -    local library = [ get-library-name [ path.root [ $(project).get location ] -        [ path.pwd ] ] ] ; -    if $(library) -    { -        name = $(library)/$(name) ; -    } - -    local sources = [ $(target).sources ] ; -    local source-files ; -    for local s in $(sources) -    { -        if [ class.is-a $(s) : file-reference ] -        { -            local location = [ path.root [ path.root [ $(s).name ] -                [ $(s).location ] ] [ path.pwd ] ] ; - -            source-files += [ path.relative-to [ path.root $(project-root) -                [ path.pwd ] ] $(location) ] ; -        } -    } - -    local target-name = [ $(project).get location ] // [ $(target).name ] .test -        ; -    target-name = $(target-name:J=) ; - -    local r = [ $(target).requirements ] ; -    # Extract values of the <test-info> feature. -    local test-info = [ $(r).get <test-info> ] ; - -    # If the user requested XML output on the command-line, add the test info to -    # that XML file rather than dumping them to stdout. -    if $(.out-xml) -    { -        local nl = " -" ; -        .contents on $(.out-xml) += -            "$(nl)  <test type=\"$(type)\" name=\"$(name)\">" -            "$(nl)    <target><![CDATA[$(target-name)]]></target>" -            "$(nl)    <info><![CDATA[$(test-info)]]></info>" -            "$(nl)    <source><![CDATA[$(source-files)]]></source>" -            "$(nl)  </test>" -            ; -    } -    else -    { -        # Format them into a single string of quoted strings. -        test-info = \"$(test-info:J=\"\ \")\" ; - -        ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":" -            \"$(source-files)\" ; -    } -} - - -# Register generators. Depending on target type, either 'expect-success' or -# 'expect-failure' rule will be used. -generators.register-standard testing.expect-success : OBJ        : COMPILE      ; -generators.register-standard testing.expect-failure : OBJ        : COMPILE_FAIL ; -generators.register-standard testing.expect-success : RUN_OUTPUT : RUN          ; -generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL     ; -generators.register-standard testing.expect-failure : EXE        : LINK_FAIL    ; -generators.register-standard testing.expect-success : EXE        : LINK         ; - -# Generator which runs an EXE and captures output. -generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ; - -# Generator which creates a target if sources run successfully. Differs from RUN -# in that run output is not captured. The reason why it exists is that the 'run' -# rule is much better for automated testing, but is not user-friendly (see -# http://article.gmane.org/gmane.comp.lib.boost.build/6353). -generators.register-standard testing.unit-test : EXE : UNIT_TEST ; - - -# The action rules called by generators. - -# Causes the 'target' to exist after bjam invocation if and only if all the -# dependencies were successfully built. -# -rule expect-success ( target : dependency + : requirements * ) -{ -    **passed** $(target) : $(sources) ; -} - - -# Causes the 'target' to exist after bjam invocation if and only if all some of -# the dependencies were not successfully built. -# -rule expect-failure ( target : dependency + : properties * ) -{ -    local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ; -    local marker = $(dependency:G=$(grist)*fail) ; -    (failed-as-expected) $(marker) ; -    FAIL_EXPECTED $(dependency) ; -    LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ; -    RMOLD $(marker) ; -    DEPENDS $(marker) : $(dependency) ; -    DEPENDS $(target) : $(marker) ; -    **passed** $(target) : $(marker) ; -} - - -# The rule/action combination used to report successful passing of a test. -# -rule **passed** -{ -    # Dump all the tests, if needed. We do it here, since dump should happen -    # only after all Jamfiles have been read, and there is no such place -    # currently defined (but there should be). -    if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] ) -    { -        .dumped-tests = true ; -        dump-tests ; -    } - -    # Force deletion of the target, in case any dependencies failed to build. -    RMOLD $(<) ; -} - - -# Used to create test files signifying passed tests. -# -actions **passed** -{ -    echo passed > "$(<)" -} - - -# Used to create replacement object files that do not get created during tests -# that are expected to fail. -# -actions (failed-as-expected) -{ -    echo failed as expected > "$(<)" -} - - -rule run-path-setup ( target : source : properties * ) -{ -    # For testing, we need to make sure that all dynamic libraries needed by the -    # test are found. So, we collect all paths from dependency libraries (via -    # xdll-path property) and add whatever explicit dll-path user has specified. -    # The resulting paths are added to the environment on each test invocation. -    local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ; -    dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ; -    dll-paths += [ on $(source) return $(RUN_PATH) ] ; -    dll-paths = [ sequence.unique $(dll-paths) ] ; -    if $(dll-paths) -    { -        dll-paths = [ sequence.transform path.native : $(dll-paths) ] ; -        PATH_SETUP on $(target) = [ common.prepend-path-variable-command -            [ os.shared-library-path-variable ] : $(dll-paths) ] ; -    } -} - - -local argv = [ modules.peek : ARGV ] ; - -toolset.flags testing.capture-output ARGS <testing.arg> ; -toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ; -toolset.flags testing.capture-output LAUNCHER <testing.launcher> ; - - -# Runs executable 'sources' and stores stdout in file 'target'. Unless -# --preserve-test-targets command line option has been specified, removes the -# executable. The 'target-to-remove' parameter controls what should be removed: -#   - if 'none', does not remove anything, ever -#   - if empty, removes 'source' -#   - if non-empty and not 'none', contains a list of sources to remove. -# -rule capture-output ( target : source : properties * : targets-to-remove * ) -{ -    output-file on $(target) = $(target:S=.output) ; -    LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ; - -    # The INCLUDES kill a warning about independent target... -    INCLUDES $(target) : $(target:S=.output) ; -    # but it also puts .output into dependency graph, so we must tell jam it is -    # OK if it cannot find the target or updating rule. -    NOCARE $(target:S=.output) ; - -    # This has two-fold effect. First it adds input files to the dependendency -    # graph, preventing a warning. Second, it causes input files to be bound -    # before target is created. Therefore, they are bound using SEARCH setting -    # on them and not LOCATE setting of $(target), as in other case (due to jam -    # bug). -    DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ; - -    if $(targets-to-remove) = none -    { -        targets-to-remove = ; -    } -    else if ! $(targets-to-remove) -    { -        targets-to-remove = $(source) ; -    } - -    run-path-setup $(target) : $(source) : $(properties) ; - -    if [ feature.get-values preserve-test-targets : $(properties) ] = off -    { -        TEMPORARY $(targets-to-remove) ; -        # Set a second action on target that will be executed after capture -        # output action. The 'RmTemps' rule has the 'ignore' modifier so it is -        # always considered succeeded. This is needed for 'run-fail' test. For -        # that test the target will be marked with FAIL_EXPECTED, and without -        # 'ignore' successful execution will be negated and be reported as -        # failure. With 'ignore' we do not detect a case where removing files -        # fails, but it is not likely to happen. -        RmTemps $(target) : $(targets-to-remove) ; -    } -} - - -if [ os.name ] = NT -{ -    .STATUS        = %status% ; -    .SET_STATUS    = "set status=%ERRORLEVEL%" ; -    .RUN_OUTPUT_NL = "echo." ; -    .STATUS_0      = "%status% EQU 0 (" ; -    .STATUS_NOT_0  = "%status% NEQ 0 (" ; -    .VERBOSE       = "%verbose% EQU 1 (" ; -    .ENDIF         = ")" ; -    .SHELL_SET     = "set " ; -    .CATENATE      = type ; -    .CP            = copy ; -} -else -{ -    .STATUS        = "$status" ; -    .SET_STATUS    = "status=$?" ; -    .RUN_OUTPUT_NL = "echo" ; -    .STATUS_0      = "test $status -eq 0 ; then" ; -    .STATUS_NOT_0  = "test $status -ne 0 ; then" ; -    .VERBOSE       = "test $verbose -eq 1 ; then" ; -    .ENDIF         = "fi" ; -    .SHELL_SET     = "" ; -    .CATENATE      = cat ; -    .CP            = cp ; -} - - -.VERBOSE_TEST = 0 ; -if --verbose-test in [ modules.peek : ARGV ] -{ -    .VERBOSE_TEST = 1 ; -} - - -.RM = [ common.rm-command ] ; - - -actions capture-output bind INPUT_FILES output-file -{ -    $(PATH_SETUP) -    $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 -    $(.SET_STATUS) -    $(.RUN_OUTPUT_NL) >> "$(output-file)" -    echo EXIT STATUS: $(.STATUS) >> "$(output-file)" -    if $(.STATUS_0) -        $(.CP) "$(output-file)" "$(<)" -    $(.ENDIF) -    $(.SHELL_SET)verbose=$(.VERBOSE_TEST) -    if $(.STATUS_NOT_0) -        $(.SHELL_SET)verbose=1 -    $(.ENDIF) -    if $(.VERBOSE) -        echo ====== BEGIN OUTPUT ====== -        $(.CATENATE) "$(output-file)" -        echo ====== END OUTPUT ====== -    $(.ENDIF) -    exit $(.STATUS) -} - - -actions quietly updated ignore piecemeal together RmTemps -{ -    $(.RM) "$(>)" -} - - -.MAKE_FILE = [ common.file-creation-command ] ; - -toolset.flags testing.unit-test LAUNCHER <testing.launcher> ; -toolset.flags testing.unit-test ARGS <testing.arg> ; - - -rule unit-test ( target : source : properties * ) -{ -    run-path-setup $(target) : $(source) : $(properties) ; -} - - -actions unit-test -{ -    $(PATH_SETUP) -    $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<) -} - - -IMPORT $(__name__) : compile compile-fail run run-fail link link-fail -    : : compile compile-fail run run-fail link link-fail ; - - -type.register TIME : time ; -generators.register-standard testing.time : : TIME ; - - -rule record-time ( target : source : start end user system ) -{ -    local src-string = [$(source:G=:J=",")"] " ; -    USER_TIME on $(target) += $(src-string)$(user) ; -    SYSTEM_TIME on $(target) += $(src-string)$(system) ; -} - - -IMPORT testing : record-time : : testing.record-time ; - - -# Calling this rule requests that Boost Build time how long it taks to build the -# 'source' target and display the results both on the standard output and in the -# 'target' file. -# -rule time ( target : source : properties *  ) -{ -    # Set up rule for recording timing information. -    __TIMING_RULE__ on $(source) = testing.record-time $(target) ; - -    # Make sure that the source is rebuilt any time we need to retrieve that -    # information. -    REBUILDS $(target) : $(source) ; -} - - -actions time -{ -    echo user: $(USER_TIME) -    echo system: $(SYSTEM_TIME) - -    echo user: $(USER_TIME)" seconds" > "$(<)" -    echo system: $(SYSTEM_TIME)" seconds" >> "$(<)" -} diff --git a/jam-files/boost-build/tools/testing.py b/jam-files/boost-build/tools/testing.py deleted file mode 100644 index 3b53500c..00000000 --- a/jam-files/boost-build/tools/testing.py +++ /dev/null @@ -1,342 +0,0 @@ -# Status: ported, except for --out-xml -# Base revision: 64488 -# -# Copyright 2005 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# This module implements regression testing framework. It declares a number of -# main target rules which perform some action and, if the results are OK, -# creates an output file. -# -# The exact list of rules is: -# 'compile'       -- creates .test file if compilation of sources was -#                    successful. -# 'compile-fail'  -- creates .test file if compilation of sources failed. -# 'run'           -- creates .test file is running of executable produced from -#                    sources was successful. Also leaves behind .output file -#                    with the output from program run. -# 'run-fail'      -- same as above, but .test file is created if running fails. -# -# In all cases, presence of .test file is an indication that the test passed. -# For more convenient reporting, you might want to use C++ Boost regression -# testing utilities (see http://www.boost.org/more/regression.html). -# -# For historical reason, a 'unit-test' rule is available which has the same -# syntax as 'exe' and behaves just like 'run'. - -# Things to do: -#  - Teach compiler_status handle Jamfile.v2. -# Notes: -#  - <no-warn> is not implemented, since it is Como-specific, and it is not -#    clear how to implement it -#  - std::locale-support is not implemented (it is used in one test). - -import b2.build.feature as feature -import b2.build.type as type -import b2.build.targets as targets -import b2.build.generators as generators -import b2.build.toolset as toolset -import b2.tools.common as common -import b2.util.option as option -import b2.build_system as build_system - - - -from b2.manager import get_manager -from b2.util import stem, bjam_signature -from b2.util.sequence import unique - -import bjam - -import re -import os.path -import sys - -def init(): -    pass - -# Feature controling the command used to lanch test programs. -feature.feature("testing.launcher", [], ["free", "optional"]) - -feature.feature("test-info", [], ["free", "incidental"]) -feature.feature("testing.arg", [], ["free", "incidental"]) -feature.feature("testing.input-file", [], ["free", "dependency"]) - -feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"]) - -# Register target types. -type.register("TEST", ["test"]) -type.register("COMPILE", [], "TEST") -type.register("COMPILE_FAIL", [], "TEST") - -type.register("RUN_OUTPUT", ["run"]) -type.register("RUN", [], "TEST") -type.register("RUN_FAIL", [], "TEST") - -type.register("LINK", [], "TEST") -type.register("LINK_FAIL", [], "TEST") -type.register("UNIT_TEST", ["passed"], "TEST") - -__all_tests = [] - -# Declare the rules which create main targets. While the 'type' module already -# creates rules with the same names for us, we need extra convenience: default -# name of main target, so write our own versions. - -# Helper rule. Create a test target, using basename of first source if no target -# name is explicitly passed. Remembers the created target in a global variable. -def make_test(target_type, sources, requirements, target_name=None): - -    if not target_name: -        target_name = stem(os.path.basename(sources[0])) - -    # Having periods (".") in the target name is problematic because the typed -    # generator will strip the suffix and use the bare name for the file -    # targets. Even though the location-prefix averts problems most times it -    # does not prevent ambiguity issues when referring to the test targets. For -    # example when using the XML log output. So we rename the target to remove -    # the periods, and provide an alias for users. -    real_name = target_name.replace(".", "~") - -    project = get_manager().projects().current() -    # The <location-prefix> forces the build system for generate paths in the -    # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow -    # post-processing tools to work. -    t = get_manager().targets().create_typed_target( -        type.type_from_rule_name(target_type), project, real_name, sources, -        requirements + ["<location-prefix>" + real_name + ".test"], [], []) - -    # The alias to the real target, per period replacement above. -    if real_name != target_name: -        get_manager().projects().project_rules().all_names_["alias"]( -            target_name, [t]) - -    # Remember the test (for --dump-tests). A good way would be to collect all -    # given a project. This has some technical problems: e.g. we can not call -    # this dump from a Jamfile since projects referred by 'build-project' are -    # not available until the whole Jamfile has been loaded. -    __all_tests.append(t) -    return t - - -# Note: passing more that one cpp file here is known to fail. Passing a cpp file -# and a library target works. -# -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def compile(sources, requirements, target_name=None): -    return make_test("compile", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def compile_fail(sources, requirements, target_name=None): -    return make_test("compile-fail", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def link(sources, requirements, target_name=None): -    return make_test("link", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) -def link_fail(sources, requirements, target_name=None): -    return make_test("link-fail", sources, requirements, target_name) - -def handle_input_files(input_files): -    if len(input_files) > 1: -        # Check that sorting made when creating property-set instance will not -        # change the ordering. -        if sorted(input_files) != input_files: -            get_manager().errors()("Names of input files must be sorted alphabetically\n" + -                                   "due to internal limitations") -    return ["<testing.input-file>" + f for f in input_files] - -@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], -                 ["requirements", "*"], ["target_name", "?"], -                 ["default_build", "*"]))                  -def run(sources, args, input_files, requirements, target_name=None, default_build=[]): -    if args: -        requirements.append("<testing.arg>" + " ".join(args)) -    requirements.extend(handle_input_files(input_files)) -    return make_test("run", sources, requirements, target_name) - -@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], -                 ["requirements", "*"], ["target_name", "?"], -                 ["default_build", "*"]))                  -def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]): -    if args: -        requirements.append("<testing.arg>" + " ".join(args)) -    requirements.extend(handle_input_files(input_files)) -    return make_test("run-fail", sources, requirements, target_name) - -# Register all the rules -for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]: -    get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_"))) - -# Use 'test-suite' as a synonym for 'alias', for backward compatibility. -from b2.build.alias import alias -get_manager().projects().add_rule("test-suite", alias) - -# For all main targets in 'project-module', which are typed targets with type -# derived from 'TEST', produce some interesting information. -# -def dump_tests(): -    for t in __all_tests: -        dump_test(t) - -# Given a project location in normalized form (slashes are forward), compute the -# name of the Boost library. -# -__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)") -__ln2 = re.compile("/(tools|libs)/(.*)$") -__ln3 = re.compile("(/status$)") -def get_library_name(path): -     -    path = path.replace("\\", "/") -    match1 = __ln1.match(path) -    match2 = __ln2.match(path) -    match3 = __ln3.match(path) - -    if match1: -        return match1.group(2) -    elif match2: -        return match2.group(2) -    elif match3: -        return "" -    elif option.get("dump-tests", False, True): -        # The 'run' rule and others might be used outside boost. In that case, -        # just return the path, since the 'library name' makes no sense. -        return path - -# Was an XML dump requested? -__out_xml = option.get("out-xml", False, True) - -# Takes a target (instance of 'basic-target') and prints -#   - its type -#   - its name -#   - comments specified via the <test-info> property -#   - relative location of all source from the project root. -# -def dump_test(target): -    type = target.type() -    name = target.name() -    project = target.project() - -    project_root = project.get('project-root') -    library = get_library_name(os.path.abspath(project.get('location'))) -    if library: -        name = library + "/" + name - -    sources = target.sources() -    source_files = [] -    for s in sources: -        if isinstance(s, targets.FileReference): -            location = os.path.abspath(os.path.join(s.location(), s.name())) -            source_files.append(os.path.relpath(location, os.path.abspath(project_root))) - -    target_name = project.get('location') + "//" + target.name() + ".test" - -    test_info = target.requirements().get('test-info') -    test_info = " ".join('"' + ti + '"' for ti in test_info) - -    # If the user requested XML output on the command-line, add the test info to -    # that XML file rather than dumping them to stdout. -    #if $(.out-xml) -    #{ -#        local nl = " -#" ; -#        .contents on $(.out-xml) += -#            "$(nl)  <test type=\"$(type)\" name=\"$(name)\">" -#            "$(nl)    <target><![CDATA[$(target-name)]]></target>" -#            "$(nl)    <info><![CDATA[$(test-info)]]></info>" -#            "$(nl)    <source><![CDATA[$(source-files)]]></source>" -#            "$(nl)  </test>" -#            ; -#    } -#    else - -    source_files = " ".join('"' + s + '"' for s in source_files) -    if test_info: -        print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files) -    else: -        print 'boost-test(%s) "%s" : %s' % (type, name, source_files) - -# Register generators. Depending on target type, either 'expect-success' or -# 'expect-failure' rule will be used. -generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"]) -generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"]) -generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"]) -generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"]) -generators.register_standard("testing.expect-success", ["EXE"], ["LINK"]) -generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"]) - -# Generator which runs an EXE and captures output. -generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"]) - -# Generator which creates a target if sources run successfully. Differs from RUN -# in that run output is not captured. The reason why it exists is that the 'run' -# rule is much better for automated testing, but is not user-friendly (see -# http://article.gmane.org/gmane.comp.lib.boost.build/6353). -generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"]) - -# FIXME: if those calls are after bjam.call, then bjam will crash -# when toolset.flags calls bjam.caller. -toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"]) -toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"]) -toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"]) - -toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"]) -toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"]) - -type.register("TIME", ["time"]) -generators.register_standard("testing.time", [], ["TIME"]) - - -# The following code sets up actions for this module. It's pretty convoluted, -# but the basic points is that we most of actions are defined by Jam code -# contained in testing-aux.jam, which we load into Jam module named 'testing' - -def run_path_setup(target, sources, ps): - -    # For testing, we need to make sure that all dynamic libraries needed by the -    # test are found. So, we collect all paths from dependency libraries (via -    # xdll-path property) and add whatever explicit dll-path user has specified. -    # The resulting paths are added to the environment on each test invocation. -    dll_paths = ps.get('dll-path') -    dll_paths.extend(ps.get('xdll-path')) -    dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH")) -    dll_paths = unique(dll_paths) -    if dll_paths: -        bjam.call("set-target-variable", target, "PATH_SETUP", -                  common.prepend_path_variable_command( -                     common.shared_library_path_variable(), dll_paths)) - -def capture_output_setup(target, sources, ps): -    run_path_setup(target, sources, ps) - -    if ps.get('preserve-test-targets') == ['off']: -        bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1") - -get_manager().engine().register_bjam_action("testing.capture-output", -                                            capture_output_setup) - - -path = os.path.dirname(get_manager().projects().loaded_tool_module_path_[__name__]) -import b2.util.os_j -get_manager().projects().project_rules()._import_rule("testing", "os.name", -                                                      b2.util.os_j.name) -import b2.tools.common -get_manager().projects().project_rules()._import_rule("testing", "common.rm-command", -                                                      b2.tools.common.rm_command) -get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command", -                                                      b2.tools.common.file_creation_command) - -bjam.call("load", "testing", os.path.join(path, "testing-aux.jam")) - - -for name in ["expect-success", "expect-failure", "time"]: -    get_manager().engine().register_bjam_action("testing." + name) - -get_manager().engine().register_bjam_action("testing.unit-test", -                                            run_path_setup) - -if option.get("dump-tests", False, True): -    build_system.add_pre_build_hook(dump_tests) diff --git a/jam-files/boost-build/tools/types/__init__.py b/jam-files/boost-build/tools/types/__init__.py deleted file mode 100644 index f972b714..00000000 --- a/jam-files/boost-build/tools/types/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -__all__ = [ -    'asm', -    'cpp', -    'exe', -    'html', -    'lib', -    'obj', -    'rsp', -] - -def register_all (): -    for i in __all__: -        m = __import__ (__name__ + '.' + i) -        reg = i + '.register ()' -        #exec (reg) - -# TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't? -register_all () diff --git a/jam-files/boost-build/tools/types/asm.jam b/jam-files/boost-build/tools/types/asm.jam deleted file mode 100644 index a340db36..00000000 --- a/jam-files/boost-build/tools/types/asm.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright Craig Rodrigues 2005. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -type ASM : s S asm ; diff --git a/jam-files/boost-build/tools/types/asm.py b/jam-files/boost-build/tools/types/asm.py deleted file mode 100644 index b4e1c30e..00000000 --- a/jam-files/boost-build/tools/types/asm.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright Craig Rodrigues 2005. -# Copyright (c) 2008 Steven Watanabe -# -# Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register(): -    type.register_type('ASM', ['s', 'S', 'asm']) - -register() diff --git a/jam-files/boost-build/tools/types/cpp.jam b/jam-files/boost-build/tools/types/cpp.jam deleted file mode 100644 index 3159cdd7..00000000 --- a/jam-files/boost-build/tools/types/cpp.jam +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright David Abrahams 2004. -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Copyright 2010 Rene Rivera -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -import type ; -import scanner ; - -class c-scanner : scanner -{ -    import path ; -    import regex ; -    import scanner ; -    import sequence ; -    import virtual-target ; - -    rule __init__ ( includes * ) -    { -        scanner.__init__ ; - -        for local i in $(includes) -        { -            self.includes += [ sequence.transform path.native -                                : [ regex.split $(i:G=) "&&" ] ] ; -        } -    } - -    rule pattern ( ) -    { -        return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ; -    } - -    rule process ( target : matches * : binding ) -    { -        local angle  = [ regex.transform $(matches) : "<(.*)>"   ] ; -        angle = [ sequence.transform path.native : $(angle) ] ; -        local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ; -        quoted = [ sequence.transform path.native : $(quoted) ] ; - -        # CONSIDER: the new scoping rule seem to defeat "on target" variables. -        local g = [ on $(target) return $(HDRGRIST) ] ; -        local b = [ NORMALIZE_PATH $(binding:D) ] ; - -        # Attach binding of including file to included targets. When a target is -        # directly created from virtual target this extra information is -        # unnecessary. But in other cases, it allows us to distinguish between -        # two headers of the same name included from different places. We do not -        # need this extra information for angle includes, since they should not -        # depend on including file (we can not get literal "." in include path). -        local g2 = $(g)"#"$(b) ; - -        angle = $(angle:G=$(g)) ; -        quoted = $(quoted:G=$(g2)) ; - -        local all = $(angle) $(quoted) ; - -        INCLUDES $(target) : $(all) ; -        NOCARE $(all) ; -        SEARCH on $(angle) = $(self.includes:G=) ; -        SEARCH on $(quoted) = $(b) $(self.includes:G=) ; - -        # Just propagate the current scanner to includes in hope that includes -        # do not change scanners. -        scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ; - -        ISFILE $(angle) $(quoted) ; -    } -} - -scanner.register c-scanner : include ; - -type.register CPP : cpp cxx cc ; -type.register H   : h ; -type.register HPP : hpp : H ; -type.register C   : c ; - -# It most cases where a CPP file or a H file is a source of some action, we -# should rebuild the result if any of files included by CPP/H are changed. One -# case when this is not needed is installation, which is handled specifically. -type.set-scanner CPP : c-scanner ; -type.set-scanner C   : c-scanner ; -# One case where scanning of H/HPP files is necessary is PCH generation -- if -# any header included by HPP being precompiled changes, we need to recompile the -# header. -type.set-scanner H   : c-scanner ; -type.set-scanner HPP : c-scanner ; diff --git a/jam-files/boost-build/tools/types/cpp.py b/jam-files/boost-build/tools/types/cpp.py deleted file mode 100644 index 7b56111c..00000000 --- a/jam-files/boost-build/tools/types/cpp.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): -    type.register_type ('CPP', ['cpp', 'cxx', 'cc']) - -register () diff --git a/jam-files/boost-build/tools/types/exe.jam b/jam-files/boost-build/tools/types/exe.jam deleted file mode 100644 index 47109513..00000000 --- a/jam-files/boost-build/tools/types/exe.jam +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import type ; - -type.register EXE ; -type.set-generated-target-suffix EXE : <target-os>windows : "exe" ; -type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ; diff --git a/jam-files/boost-build/tools/types/exe.py b/jam-files/boost-build/tools/types/exe.py deleted file mode 100644 index a4935e24..00000000 --- a/jam-files/boost-build/tools/types/exe.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): -    type.register_type ('EXE', ['exe'], None, ['NT', 'CYGWIN']) -    type.register_type ('EXE', [], None, []) - -register () diff --git a/jam-files/boost-build/tools/types/html.jam b/jam-files/boost-build/tools/types/html.jam deleted file mode 100644 index 5cd337d0..00000000 --- a/jam-files/boost-build/tools/types/html.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -type HTML : html ; diff --git a/jam-files/boost-build/tools/types/html.py b/jam-files/boost-build/tools/types/html.py deleted file mode 100644 index 63af4d90..00000000 --- a/jam-files/boost-build/tools/types/html.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): -    type.register_type ('HTML', ['html']) - -register () diff --git a/jam-files/boost-build/tools/types/lib.jam b/jam-files/boost-build/tools/types/lib.jam deleted file mode 100644 index 854ab8fd..00000000 --- a/jam-files/boost-build/tools/types/lib.jam +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import type ; # for set-generated-target-suffix -import os ; - -# The following naming scheme is used for libraries. -# -# On *nix: -#     libxxx.a       static library -#     libxxx.so      shared library -# -# On windows (msvc) -#     libxxx.lib     static library -#     xxx.dll        DLL -#     xxx.lib        import library -# -# On windows (mingw): -#     libxxx.a       static library -#     libxxx.dll     DLL -#     libxxx.dll.a   import library -# -# On cygwin i.e. <target-os>cygwin -#     libxxx.a       static library -#     cygxxx.dll     DLL -#     libxxx.dll.a   import library -# - -type.register LIB ; - -# FIXME: should not register both extensions on both platforms. -type.register STATIC_LIB : a lib : LIB ; - -# The 'lib' prefix is used everywhere -type.set-generated-target-prefix STATIC_LIB : : lib ; - -# Use '.lib' suffix for windows -type.set-generated-target-suffix STATIC_LIB : <target-os>windows : lib ; - -# Except with gcc. -type.set-generated-target-suffix STATIC_LIB : <toolset>gcc <target-os>windows : a ; - -# Use xxx.lib for import libs -type IMPORT_LIB : : STATIC_LIB ; -type.set-generated-target-prefix IMPORT_LIB : : "" ; -type.set-generated-target-suffix IMPORT_LIB : : lib ; - -# Except with gcc (mingw or cygwin), where use libxxx.dll.a -type.set-generated-target-prefix IMPORT_LIB : <toolset>gcc : lib ; -type.set-generated-target-suffix IMPORT_LIB : <toolset>gcc : dll.a ; - -type.register SHARED_LIB : so dll dylib : LIB ; - -# Both mingw and cygwin use libxxx.dll naming scheme. -# On Linux, use "lib" prefix -type.set-generated-target-prefix SHARED_LIB : : lib ; -# But don't use it on windows -type.set-generated-target-prefix SHARED_LIB : <target-os>windows : "" ; -# But use it again on mingw -type.set-generated-target-prefix SHARED_LIB : <toolset>gcc <target-os>windows : lib ; -# And use 'cyg' on cygwin -type.set-generated-target-prefix SHARED_LIB : <target-os>cygwin : cyg ; - - -type.set-generated-target-suffix SHARED_LIB : <target-os>windows : dll ; -type.set-generated-target-suffix SHARED_LIB : <target-os>cygwin : dll ; -type.set-generated-target-suffix SHARED_LIB : <target-os>darwin : dylib ; - -type SEARCHED_LIB : : LIB ; -# This is needed so that when we create a target of SEARCHED_LIB -# type, there's no prefix or suffix automatically added. -type.set-generated-target-prefix SEARCHED_LIB : : "" ; -type.set-generated-target-suffix SEARCHED_LIB : : "" ; diff --git a/jam-files/boost-build/tools/types/lib.py b/jam-files/boost-build/tools/types/lib.py deleted file mode 100644 index d0ec1fb5..00000000 --- a/jam-files/boost-build/tools/types/lib.py +++ /dev/null @@ -1,77 +0,0 @@ -# Status: ported -# Base revision: 64456. -# Copyright David Abrahams 2004. -# Copyright Vladimir Prus 2010. -# Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import b2.build.type as type - -# The following naming scheme is used for libraries. -# -# On *nix: -#     libxxx.a       static library -#     libxxx.so      shared library -# -# On windows (msvc) -#     libxxx.lib     static library -#     xxx.dll        DLL -#     xxx.lib        import library -# -# On windows (mingw): -#     libxxx.a       static library -#     libxxx.dll     DLL -#     libxxx.dll.a   import library -# -# On cygwin i.e. <target-os>cygwin -#     libxxx.a       static library -#     cygxxx.dll     DLL -#     libxxx.dll.a   import library -# - -type.register('LIB') - -# FIXME: should not register both extensions on both platforms. -type.register('STATIC_LIB', ['a', 'lib'], 'LIB') - -# The 'lib' prefix is used everywhere -type.set_generated_target_prefix('STATIC_LIB', [], 'lib') - -# Use '.lib' suffix for windows -type.set_generated_target_suffix('STATIC_LIB', ['<target-os>windows'], 'lib') - -# Except with gcc. -type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>windows'], 'a') - -# Use xxx.lib for import libs -type.register('IMPORT_LIB', [], 'STATIC_LIB') -type.set_generated_target_prefix('IMPORT_LIB', [], '') -type.set_generated_target_suffix('IMPORT_LIB', [], 'lib') - -# Except with gcc (mingw or cygwin), where use libxxx.dll.a -type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc'], 'lib') -type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc'], 'dll.a') - -type.register('SHARED_LIB', ['so', 'dll', 'dylib'], 'LIB') - -# Both mingw and cygwin use libxxx.dll naming scheme. -# On Linux, use "lib" prefix -type.set_generated_target_prefix('SHARED_LIB', [], 'lib') -# But don't use it on windows -type.set_generated_target_prefix('SHARED_LIB', ['<target-os>windows'], '') -# But use it again on mingw -type.set_generated_target_prefix('SHARED_LIB', ['<toolset>gcc', '<target-os>windows'], 'lib') -# And use 'cyg' on cygwin -type.set_generated_target_prefix('SHARED_LIB', ['<target-os>cygwin'], 'cyg') - - -type.set_generated_target_suffix('SHARED_LIB', ['<target-os>windows'], 'dll') -type.set_generated_target_suffix('SHARED_LIB', ['<target-os>cygwin'], 'dll') -type.set_generated_target_suffix('SHARED_LIB', ['<target-os>darwin'], 'dylib') - -type.register('SEARCHED_LIB', [], 'LIB') -# This is needed so that when we create a target of SEARCHED_LIB -# type, there's no prefix or suffix automatically added. -type.set_generated_target_prefix('SEARCHED_LIB', [], '') -type.set_generated_target_suffix('SEARCHED_LIB', [], '') diff --git a/jam-files/boost-build/tools/types/obj.jam b/jam-files/boost-build/tools/types/obj.jam deleted file mode 100644 index 6afbcaa6..00000000 --- a/jam-files/boost-build/tools/types/obj.jam +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -import type ; - -type.register OBJ : o obj ; -type.set-generated-target-suffix OBJ : <target-os>windows : obj ; -type.set-generated-target-suffix OBJ : <target-os>cygwin : obj ; diff --git a/jam-files/boost-build/tools/types/obj.py b/jam-files/boost-build/tools/types/obj.py deleted file mode 100644 index e61e99a8..00000000 --- a/jam-files/boost-build/tools/types/obj.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): -    type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN']) -    type.register_type ('OBJ', ['o']) - -register () diff --git a/jam-files/boost-build/tools/types/objc.jam b/jam-files/boost-build/tools/types/objc.jam deleted file mode 100644 index 709cbd0c..00000000 --- a/jam-files/boost-build/tools/types/objc.jam +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright Rene Rivera 2008, 2010. -# Distributed under the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -import type ; -import scanner ; -import types/cpp ; - -class objc-scanner : c-scanner -{ -    rule __init__ ( includes * ) -    { -        c-scanner.__init__ $(includes) ; -    } - -    rule pattern ( ) -    { -        return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ; -    } -} - -scanner.register objc-scanner : include ; - -type.register OBJECTIVE_C : m ; -type.register OBJECTIVE_CPP : mm ; -type.set-scanner OBJECTIVE_C : objc-scanner ; -type.set-scanner OBJECTIVE_CPP : objc-scanner ; diff --git a/jam-files/boost-build/tools/types/preprocessed.jam b/jam-files/boost-build/tools/types/preprocessed.jam deleted file mode 100644 index c9187ba6..00000000 --- a/jam-files/boost-build/tools/types/preprocessed.jam +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright Steven Watanabe 2011 -# Distributed under the Boost Software License Version 1.0. (See -# accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) - -import type ; - -type.register PREPROCESSED_C : i : C ; -type.register PREPROCESSED_CPP : ii : CPP ; diff --git a/jam-files/boost-build/tools/types/qt.jam b/jam-files/boost-build/tools/types/qt.jam deleted file mode 100644 index 6d1dfbd4..00000000 --- a/jam-files/boost-build/tools/types/qt.jam +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Vladimir Prus 2005. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -type UI : ui ; -type QRC : qrc ; -type MOCCABLE_CPP ; -type MOCCABLE_H ; -# Result of running moc. -type MOC : moc : H ; diff --git a/jam-files/boost-build/tools/types/register.jam b/jam-files/boost-build/tools/types/register.jam deleted file mode 100644 index 203992ca..00000000 --- a/jam-files/boost-build/tools/types/register.jam +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -# This module's job is to automatically import all the type -# registration modules in its directory. -import type os path modules ; - -# Register the given type on the specified OSes, or on remaining OSes -# if os is not specified.  This rule is injected into each of the type -# modules for the sake of convenience. -local rule type ( type : suffixes * : base-type ? : os * ) -{ -    if ! [ type.registered $(type) ] -    { -        if ( ! $(os) ) || [ os.name ] in $(os) -        { -            type.register $(type) : $(suffixes) : $(base-type) ; -        } -    } -} - -.this-module's-file = [ modules.binding $(__name__) ] ; -.this-module's-dir = [ path.parent $(.this-module's-file) ] ; -.sibling-jamfiles =  [ path.glob $(.this-module's-dir) : *.jam ] ; -.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ; - -# A loop over all modules in this directory -for m in $(.sibling-modules) -{ -    m = [ path.basename $(m) ] ; -    m = types/$(m) ; -     -    # Inject the type rule into the new module -    IMPORT $(__name__) : type : $(m) : type ; -    import $(m) ; -} - - diff --git a/jam-files/boost-build/tools/types/rsp.jam b/jam-files/boost-build/tools/types/rsp.jam deleted file mode 100644 index bdf8a7c9..00000000 --- a/jam-files/boost-build/tools/types/rsp.jam +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -type RSP : rsp ; diff --git a/jam-files/boost-build/tools/types/rsp.py b/jam-files/boost-build/tools/types/rsp.py deleted file mode 100644 index ccb379e9..00000000 --- a/jam-files/boost-build/tools/types/rsp.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright David Abrahams 2004. Distributed under the Boost -# Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) - -from b2.build import type - -def register (): -    type.register_type ('RSP', ['rsp']) - -register () diff --git a/jam-files/boost-build/tools/unix.jam b/jam-files/boost-build/tools/unix.jam deleted file mode 100644 index 75949851..00000000 --- a/jam-files/boost-build/tools/unix.jam +++ /dev/null @@ -1,224 +0,0 @@ -#  Copyright (c) 2004 Vladimir Prus. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -#  This file implements linking semantic common to all unixes. On unix, static -#  libraries must be specified in a fixed order on the linker command line. Generators -#  declared there store information about the order and use it property. - -import feature ; -import "class" : new ; -import generators ; -import type ; -import set ; -import order ; -import builtin ; - -class unix-linking-generator : linking-generator -{ -    import property-set ; -    import type ; -    import unix ; -     -    rule __init__ ( id  -        composing ? : # Specify if generator is composing. The generator will be -        # composing if non-empty string is passed, or parameter is -        # not given. To make generator non-composing, pass empty -        # string ("") -        source-types + : target-types + :  -        requirements * ) -    { -        composing ?= true ; -        generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) : -          $(requirements) ; -    } -     -    rule run ( project name ? : property-set : sources + ) -    {    -        local result = [ linking-generator.run $(project) $(name) : $(property-set) -          : $(sources) ] ; -         -        unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; -                                 -        return $(result) ; -    } -     -    rule generated-targets ( sources + : property-set : project name ? ) -    { -        local sources2 ; -        local libraries ; -        for local l in $(sources) -        { -            if [ type.is-derived [ $(l).type ] LIB ] -            { -                libraries += $(l) ; -            } -            else -            { -                sources2 += $(l) ; -            }             -        } -         -        sources = $(sources2) [ unix.order-libraries $(libraries) ] ; -         -        return [ linking-generator.generated-targets $(sources) : $(property-set) -          : $(project) $(name) ] ; -    } -     -}  - -class unix-archive-generator : archive-generator -{ -    import unix ;  - -    rule __init__ ( id composing ? : source-types + : target-types + :  -        requirements * ) -    { -        composing ?= true ; -        archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) : -          $(requirements) ; -    } -         -    rule run ( project name ? : property-set : sources + ) -    {                                 -        local result = [ archive-generator.run $(project) $(name) : $(property-set) -          : $(sources) ] ; -         -        unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; -         -        return $(result) ; - -    }     -} - -class unix-searched-lib-generator : searched-lib-generator -{ -    import unix ; -    rule __init__ ( * : * ) -    {  -        generator.__init__  -          $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -     -    rule optional-properties ( ) -    { -        return $(self.requirements) ; -    } -               -    rule run ( project name ? : property-set : sources * ) -    { -        local result = [ searched-lib-generator.run $(project) $(name)  -          : $(property-set) : $(sources) ] ; -         -        unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; -         -        return $(result) ; -    }     -} - -class unix-prebuilt-lib-generator : generator -{ -    import unix ; -    rule __init__ ( * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } - -    rule run ( project name ? : property-set : sources * ) -    { -        local f = [ $(property-set).get <file> ] ; -        unix.set-library-order-aux $(f) : $(sources) ; -        return $(f) $(sources) ; -    }     -} - -generators.register  -  [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB  -      : <file> <toolset>unix ] ; - -generators.override unix.prebuilt : builtin.lib-generator ; - - -# Declare generators -generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE  -    : <toolset>unix ] ; - -generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB  -    : <toolset>unix ] ; - -generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB  -    : <toolset>unix ] ; - -generators.register [ new unix-searched-lib-generator  -   unix.searched-lib-generator : : SEARCHED_LIB : <toolset>unix ] ; - - -# The derived toolset must specify their own actions. -actions link { -} - -actions link.dll { -} - -actions archive {     -} - -actions searched-lib-generator {     -} - -actions prebuilt { -} - -     - - - -.order = [ new order ] ; - -rule set-library-order-aux ( from * : to * ) -{         -    for local f in $(from) -    { -        for local t in $(to) -        {             -            if $(f) != $(t) -            {                 -                $(.order).add-pair $(f) $(t) ; -            }             -        }         -    }     -} - -rule set-library-order ( sources * : property-set : result * ) -{ -    local used-libraries ; -    local deps = [ $(property-set).dependency ] ;         -    for local l in $(sources) $(deps:G=)         -    { -        if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ] -        { -            used-libraries += $(l) ; -        } -    } - -    local created-libraries ; -    for local l in $(result) -    { -        if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]  -        { -            created-libraries += $(l) ; -        }             -    } -     -    created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ; -    set-library-order-aux $(created-libraries) : $(used-libraries) ; -} - -rule order-libraries ( libraries * ) -{ -    local r = [ $(.order).order $(libraries) ] ; -    return $(r) ; -} -     
\ No newline at end of file diff --git a/jam-files/boost-build/tools/unix.py b/jam-files/boost-build/tools/unix.py deleted file mode 100644 index d409c2e4..00000000 --- a/jam-files/boost-build/tools/unix.py +++ /dev/null @@ -1,150 +0,0 @@ -#  Copyright (c) 2004 Vladimir Prus. -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -""" This file implements linking semantics common to all unixes. On unix, static -    libraries must be specified in a fixed order on the linker command line. Generators -    declared there store information about the order and use it properly. -""" - -import builtin -from b2.build import generators, type -from b2.util.utility import * -from b2.util import set, sequence - -class UnixLinkingGenerator (builtin.LinkingGenerator): -     -    def __init__ (self, id, composing, source_types, target_types, requirements): -        builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements) -     -    def run (self, project, name, prop_set, sources): -        result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources) -        if result: -            set_library_order (project.manager (), sources, prop_set, result [1]) -                                 -        return result -     -    def generated_targets (self, sources, prop_set, project, name): -        sources2 = [] -        libraries = [] -        for l in sources: -            if type.is_derived (l.type (), 'LIB'): -                libraries.append (l) - -            else: -                sources2.append (l) -         -        sources = sources2 + order_libraries (libraries) -         -        return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name) - - -class UnixArchiveGenerator (builtin.ArchiveGenerator): -    def __init__ (self, id, composing, source_types, target_types_and_names, requirements): -        builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) -         -    def run (self, project, name, prop_set, sources): -        result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources) -        set_library_order(project.manager(), sources, prop_set, result) -        return result - -class UnixSearchedLibGenerator (builtin.SearchedLibGenerator): -     -    def __init__ (self): -        builtin.SearchedLibGenerator.__init__ (self) -     -    def optional_properties (self): -        return self.requirements () -               -    def run (self, project, name, prop_set, sources, multiple): -        result = SearchedLibGenerator.run (project, name, prop_set, sources, multiple) -         -        set_library_order (sources, prop_set, result) -         -        return result - -class UnixPrebuiltLibGenerator (generators.Generator): -    def __init__ (self, id, composing, source_types, target_types_and_names, requirements): -        generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - -    def run (self, project, name, prop_set, sources, multiple): -        f = prop_set.get ('<file>') -        set_library_order_aux (f, sources) -        return (f, sources) - -### # The derived toolset must specify their own rules and actions. -# FIXME: restore? -# action.register ('unix.prebuilt', None, None) - - -generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['<file>', '<toolset>unix'])) - - - - - -### # Declare generators -### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE  -###     : <toolset>unix ] ; -generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['<toolset>unix'])) - -### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB  -###     : <toolset>unix ] ; -###  -### generators.register [ new UnixSearchedLibGenerator  -###    unix.SearchedLibGenerator : : SEARCHED_LIB : <toolset>unix ] ; -###  -###  -### # The derived toolset must specify their own actions. -### actions link { -### } -###  -### actions link.dll { -### } - -def unix_archive (manager, targets, sources, properties): -    pass - -# FIXME: restore? -#action.register ('unix.archive', unix_archive, ['']) - -### actions searched-lib-generator {     -### } -###  -### actions prebuilt { -### } - - -from b2.util.order import Order -__order = Order () - -def set_library_order_aux (from_libs, to_libs): -    for f in from_libs: -        for t in to_libs: -            if f != t: -                __order.add_pair (f, t) - -def set_library_order (manager, sources, prop_set, result): -    used_libraries = [] -    deps = prop_set.dependency () - -    sources.extend(d.value() for d in deps) -    sources = sequence.unique(sources) - -    for l in sources: -        if l.type () and type.is_derived (l.type (), 'LIB'): -            used_libraries.append (l) - -    created_libraries = [] -    for l in result: -        if l.type () and type.is_derived (l.type (), 'LIB'): -            created_libraries.append (l) -     -    created_libraries = set.difference (created_libraries, used_libraries) -    set_library_order_aux (created_libraries, used_libraries) - -def order_libraries (libraries): -    return __order.order (libraries) -      diff --git a/jam-files/boost-build/tools/vacpp.jam b/jam-files/boost-build/tools/vacpp.jam deleted file mode 100644 index f4080fc0..00000000 --- a/jam-files/boost-build/tools/vacpp.jam +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright Vladimir Prus 2004. -# Copyright Toon Knapen 2004. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt -# or copy at http://www.boost.org/LICENSE_1_0.txt) - -# -# Boost.Build V2 toolset for the IBM XL C++ compiler -# - -import toolset : flags ; -import feature ; -import common ; -import generators ; -import os ; - -feature.extend toolset : vacpp ; -toolset.inherit vacpp : unix ; -generators.override vacpp.prebuilt : builtin.prebuilt ; -generators.override vacpp.searched-lib-generator : searched-lib-generator ; - -# Configure the vacpp toolset -rule init ( version ? : command * : options * ) -{ -    local condition = [  -        common.check-init-parameters vacpp : version $(version) ] ; -               -    command = [ common.get-invocation-command vacpp : xlC -        : $(command) : "/usr/vacpp/bin/xlC" ] ; -         -    common.handle-options vacpp : $(condition) : $(command) : $(options) ; -} - -# Declare generators -generators.register-c-compiler vacpp.compile.c : C : OBJ : <toolset>vacpp ; -generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : <toolset>vacpp ; - -# Allow C++ style comments in C files -flags vacpp CFLAGS : -qcpluscmt ; - -# Declare flags -flags vacpp CFLAGS <optimization>off : -qNOOPTimize ; -flags vacpp CFLAGS <optimization>speed : -O3 -qstrict ; -flags vacpp CFLAGS <optimization>space : -O2 -qcompact ; - -# Discretionary inlining (not recommended) -flags vacpp CFLAGS <inlining>off : -qnoinline ; -flags vacpp CFLAGS <inlining>on : -qinline ; -#flags vacpp CFLAGS <inlining>full : -qinline ; -flags vacpp CFLAGS <inlining>full : ; - -# Exception handling -flags vacpp C++FLAGS <exception-handling>off : -qnoeh ; -flags vacpp C++FLAGS <exception-handling>on : -qeh ; - -# Run-time Type Identification -flags vacpp C++FLAGS <rtti>off : -qnortti ; -flags vacpp C++FLAGS <rtti>on : -qrtti ; - -# Enable 64-bit memory addressing model -flags vacpp CFLAGS <address-model>64 : -q64 ; -flags vacpp LINKFLAGS <address-model>64 : -q64 ; -flags vacpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ; - -# Use absolute path when generating debug information -flags vacpp CFLAGS <debug-symbols>on : -g -qfullpath ; -flags vacpp LINKFLAGS <debug-symbols>on : -g -qfullpath ; -flags vacpp LINKFLAGS <debug-symbols>off : -s ; - -if [ os.name ] = AIX -{ -    flags vacpp.compile C++FLAGS : -qfuncsect ; - -    # The -bnoipath strips the prepending (relative) path of libraries from -    # the loader section in the target library or executable. Hence, during -    # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded -    # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without -    # this option, the prepending (relative) path + library name is -    # hard-coded in the loader section, causing *only* this path to be -    # searched during load-time. Note that the AIX linker does not have an -    # -soname equivalent, this is as close as it gets. -    # -    # The above options are definately for AIX 5.x, and most likely also for -    # AIX 4.x and AIX 6.x. For details about the AIX linker see: -    # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf -    # -    flags vacpp.link LINKFLAGS <link>shared : -bnoipath ; - -    # Run-time linking -    flags vacpp.link EXE-LINKFLAGS <link>shared : -brtl ; -} -else -{ -    # Linux PPC -    flags vacpp.compile CFLAGS <link>shared : -qpic=large ; -    flags vacpp FINDLIBS : rt ; -} - -# Profiling -flags vacpp CFLAGS <profiling>on : -pg ; -flags vacpp LINKFLAGS <profiling>on : -pg ; - -flags vacpp.compile OPTIONS <cflags> ; -flags vacpp.compile.c++ OPTIONS <cxxflags> ; -flags vacpp DEFINES <define> ; -flags vacpp UNDEFS <undef> ; -flags vacpp HDRS <include> ; -flags vacpp STDHDRS <sysinclude> ; -flags vacpp.link OPTIONS <linkflags> ; -flags vacpp ARFLAGS <arflags> ; - -flags vacpp LIBPATH <library-path> ; -flags vacpp NEEDLIBS <library-file> ; -flags vacpp FINDLIBS <find-shared-library> ; -flags vacpp FINDLIBS <find-static-library> ; - -# Select the compiler name according to the threading model. -flags vacpp VA_C_COMPILER  <threading>single : xlc   ; -flags vacpp VA_C_COMPILER  <threading>multi : xlc_r ; -flags vacpp VA_CXX_COMPILER <threading>single : xlC   ; -flags vacpp VA_CXX_COMPILER <threading>multi : xlC_r ; - -SPACE = " " ; - -flags vacpp.link.dll HAVE_SONAME <target-os>linux : "" ; - -actions vacpp.link bind NEEDLIBS -{ -    $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) -} - -actions vacpp.link.dll bind NEEDLIBS -{ -    xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)  -} - -actions vacpp.compile.c -{ -    $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" -} - -actions vacpp.compile.c++ -{ -    $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" -} - -actions updated together piecemeal vacpp.archive -{ -    ar $(ARFLAGS) ru "$(<)" "$(>)" -} diff --git a/jam-files/boost-build/tools/whale.jam b/jam-files/boost-build/tools/whale.jam deleted file mode 100644 index 9335ff0c..00000000 --- a/jam-files/boost-build/tools/whale.jam +++ /dev/null @@ -1,116 +0,0 @@ -#  Copyright (C) Vladimir Prus 2002-2005.  - -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -#  This module implements support for Whale/Dolphin/WD parser/lexer tools.  -#  See http://www.cs.queensu.ca/home/okhotin/whale/ for details. -# -#  There are three interesting target types: -#  - WHL (the parser sources), that are converted to CPP and H -#  - DLP (the lexer sources), that are converted to CPP and H -#  - WD (combined parser/lexer sources), that are converted to WHL + DLP - -import type ; -import generators ; -import path ; -import "class" : new ; -import errors ; - -rule init ( path # path the Whale/Dolphin/WD binaries  -          ) -{ -    if $(.configured) && $(.path) != $(path) -    { -        errors.user-error "Attempt to reconfigure Whale support" : -          "Previously configured with path \"$(.path:E=<empty>)\"" : -          "Now configuring with path \"$(path:E=<empty>)\"" ; - -    } -    .configured = true ; -    .path = $(path) ; - -    .whale = [ path.join $(path) whale ] ; -    .dolphin = [ path.join $(path) dolphin ] ; -    .wd = [ path.join $(path) wd ] ; -}      - - -# Declare the types. -type.register WHL : whl ; -type.register DLP : dlp ; -type.register WHL_LR0 : lr0 ; -type.register WD : wd ; - -# Declare standard generators. -generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ; -generators.register-standard whale.dolphin : DLP : CPP H ; -generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ; - -# The conversions defines above a ambiguious when we generated CPP from WD. -# We can either go via WHL type, or via DLP type. -# The following custom generator handles this by running both conversions. - -class wd-to-cpp : generator -{ -    rule __init__ ( * : * : * ) -    { -        generator.__init__ $(1) : $(2) : $(3) ; -    } -     -    rule run ( project name ? : property-set : source * ) -    { -        if ! $(source[2]) -        {                 -            local new-sources ; -            if ! [ $(source).type ] in WHL DLP -            {             -                local r1 = [ generators.construct $(project) $(name)  -                  : WHL : $(property-set) : $(source) ] ; -                local r2 = [ generators.construct $(project) $(name)  -                  : DLP : $(property-set) : $(source) ] ; -                 -                new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ; -            } -            else -            { -                new-sources = $(source) ; -            } -             -            local result ; -            for local i in $(new-sources)  -            { -                local t = [ generators.construct $(project) $(name) : CPP  -                  : $(property-set) : $(i) ] ;           -                result += $(t[2-]) ; -            } -            return $(result) ; -        }         -    } -     -} - - -generators.override whale.wd-to-cpp : whale.whale ; -generators.override whale.wd-to-cpp : whale.dolphin ; - - -generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ; - - -actions whale  -{ -    $(.whale) -d $(<[1]:D) $(>)       -} - -actions dolphin -{     -    $(.dolphin) -d $(<[1]:D) $(>) -} - -actions wd -{     -    $(.wd) -d $(<[1]:D) -g $(>) -} - diff --git a/jam-files/boost-build/tools/xlf.jam b/jam-files/boost-build/tools/xlf.jam deleted file mode 100644 index e7fcc608..00000000 --- a/jam-files/boost-build/tools/xlf.jam +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2004 Toon Knapen -# -#  Use, modification and distribution is subject to the Boost Software -#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or -#  http://www.boost.org/LICENSE_1_0.txt) - -# -# toolset configuration for the IBM Fortran compiler (xlf) -#  - -import toolset : flags ; -import feature ; -import fortran ; - -rule init ( version ? : command * : options * ) -{ -} - -# Declare flags and action for compilation -flags xlf OPTIONS <optimization>off : -O0 ; -flags xlf OPTIONS <optimization>speed : -O3 ; -flags xlf OPTIONS <optimization>space : -Os ; - -flags xlf OPTIONS <debug-symbols>on : -g ; -flags xlf OPTIONS <profiling>on : -pg ; - -flags xlf DEFINES <define> ; -flags xlf INCLUDES <include> ; - -rule compile-fortran -{ -} - -actions compile-fortran -{ -  xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)"  -} - -generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ;  diff --git a/jam-files/boost-build/tools/xsltproc-config.jam b/jam-files/boost-build/tools/xsltproc-config.jam deleted file mode 100644 index de54a2eb..00000000 --- a/jam-files/boost-build/tools/xsltproc-config.jam +++ /dev/null @@ -1,37 +0,0 @@ -#~ Copyright 2005 Rene Rivera. -#~ Distributed under the Boost Software License, Version 1.0. -#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - -# Automatic configuration for Python tools and librries. To use, just import this module. - -import os ; -import toolset : using ; - -if [ os.name ] = NT -{ -    local xsltproc-path = [ GLOB [ modules.peek : PATH ] "C:\\Boost\\bin" : xsltproc\.exe ] ; -    xsltproc-path = $(xsltproc-path[1]) ; -     -    if $(xsltproc-path) -    { -        if --debug-configuration in [ modules.peek : ARGV ] -        { -            ECHO "notice:" using xsltproc ":" $(xsltproc-path) ; -        } -        using xsltproc : $(xsltproc-path) ; -    } -} -else -{ -    local xsltproc-path = [ GLOB [ modules.peek : PATH ] : xsltproc ] ; -    xsltproc-path = $(xsltproc-path[1]) ; -     -    if $(xsltproc-path) -    { -        if --debug-configuration in [ modules.peek : ARGV ] -        { -            ECHO "notice:" using xsltproc ":" $(xsltproc-path) ; -        } -        using xsltproc : $(xsltproc-path) ; -    } -} diff --git a/jam-files/boost-build/tools/xsltproc.jam b/jam-files/boost-build/tools/xsltproc.jam deleted file mode 100644 index 96f5170b..00000000 --- a/jam-files/boost-build/tools/xsltproc.jam +++ /dev/null @@ -1,194 +0,0 @@ -# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and -# distribute this software is granted provided this copyright notice appears in -# all copies. This software is provided "as is" without express or implied -# warranty, and with no claim as to its suitability for any purpose. - -# This module defines rules to apply an XSLT stylesheet to an XML file using the -# xsltproc driver, part of libxslt. -# -# Note: except for 'init', this modules does not provide any rules for end -# users. - -import feature ; -import regex ; -import sequence ; -import common ; -import os ; -import modules ; -import path ; -import errors ; - -feature.feature xsl:param : : free ; -feature.feature xsl:path : : free ; -feature.feature catalog : : free ; - - -# Initialize xsltproc support. The parameters are: -#   xsltproc: The xsltproc executable -# -rule init ( xsltproc ? ) -{ -    if $(xsltproc) -    { -        modify-config ; -        .xsltproc = $(xsltproc) ; -        check-xsltproc ; -    } -} - -rule freeze-config ( ) -{ -    if ! $(.config-frozen) -    { -        .config-frozen = true ; -        .xsltproc ?= [ modules.peek : XSLTPROC ] ; -        .xsltproc ?= xsltproc ; -        check-xsltproc ; -        .is-cygwin = [ .is-cygwin $(.xsltproc) ] ; -    } -} - -rule modify-config -{ -    if $(.config-frozen) -    { -        errors.user-error "xsltproc: Cannot change xsltproc command after it has been used." ; -    } -} - -rule check-xsltproc ( ) -{ -    if $(.xsltproc) -    { -        local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] ; -        if $(status[2]) != "0" -        { -            errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ; -        } -    } -} - -# Returns a non-empty string if a cygwin xsltproc binary was specified. -rule is-cygwin ( ) -{ -    freeze-config ; -    return $(.is-cygwin) ; -} - -rule .is-cygwin ( xsltproc ) -{ -    if [ os.on-windows ] -    { -        local file = [ path.make [ modules.binding $(__name__) ] ] ; -        local dir = [ path.native -                      [ path.join [ path.parent $(file) ] xsltproc ] ] ; -        if [ os.name ] = CYGWIN -        { -            dir = $(dir:W) ; -        } -        local command = -            "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ; -        local status = [ SHELL $(command) : no-output : exit-status ] ; -        if $(status[2]) != "0" -        { -            return true ; -        } -    } -} - -rule compute-xslt-flags ( target : properties * ) -{ -    local flags ; - -    # Raw flags. -    flags += [ feature.get-values <flags> : $(properties) ] ; - -    # Translate <xsl:param> into command line flags. -    for local param in [ feature.get-values <xsl:param> : $(properties) ] -    { -        local namevalue = [ regex.split $(param) "=" ] ; -        flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ; -    } - -    # Translate <xsl:path>. -    for local path in [ feature.get-values <xsl:path> : $(properties) ] -    { -        flags += --path \"$(path:G=)\" ; -    } - -    # Take care of implicit dependencies. -    local other-deps ; -    for local dep in [ feature.get-values <implicit-dependency> : $(properties) ] -    { -        other-deps += [ $(dep:G=).creating-subvariant ] ; -    } - -    local implicit-target-directories ; -    for local dep in [ sequence.unique $(other-deps) ] -    { -        implicit-target-directories += [ $(dep).all-target-directories ] ; -    } - -    for local dir in $(implicit-target-directories) -    { -        flags += --path \"$(dir:T)\" ; -    } - -    return $(flags) ; -} - - -local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : action ) -{ -    freeze-config ; -    STYLESHEET on $(target) = $(stylesheet) ; -    FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ; -    NAME on $(target) = $(.xsltproc) ; - -    for local catalog in [ feature.get-values <catalog> : $(properties) ] -    { -        CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : $(catalog:T) ] ; -    } - -    if [ os.on-windows ] && ! [ is-cygwin ] -    { -        action = $(action).windows ; -    } - -    $(action) $(target) : $(source) ; -} - - -rule xslt ( target : source stylesheet : properties * ) -{ -    return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : xslt-xsltproc ] ; -} - - -rule xslt-dir ( target : source stylesheet : properties * : dirname ) -{ -    return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : $(dirname) : xslt-xsltproc-dir ] ; -} - -actions xslt-xsltproc.windows -{ -    $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)" -} - - -actions xslt-xsltproc bind STYLESHEET -{ -    $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)" -} - - -actions xslt-xsltproc-dir.windows bind STYLESHEET -{ -    $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)" -} - - -actions xslt-xsltproc-dir bind STYLESHEET -{ -    $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)" -} diff --git a/jam-files/boost-build/tools/xsltproc/included.xsl b/jam-files/boost-build/tools/xsltproc/included.xsl deleted file mode 100644 index ef86394a..00000000 --- a/jam-files/boost-build/tools/xsltproc/included.xsl +++ /dev/null @@ -1,11 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- -   Copyright (c) 2010 Steven Watanabe -   -   Distributed under the Boost Software License, Version 1.0. -   (See accompanying file LICENSE_1_0.txt or copy at -   http://www.boost.org/LICENSE_1_0.txt) -  --> -<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" -                version="1.0"> -</xsl:stylesheet> diff --git a/jam-files/boost-build/tools/xsltproc/test.xml b/jam-files/boost-build/tools/xsltproc/test.xml deleted file mode 100644 index 57c8ba18..00000000 --- a/jam-files/boost-build/tools/xsltproc/test.xml +++ /dev/null @@ -1,2 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<root/> diff --git a/jam-files/boost-build/tools/xsltproc/test.xsl b/jam-files/boost-build/tools/xsltproc/test.xsl deleted file mode 100644 index a142c91d..00000000 --- a/jam-files/boost-build/tools/xsltproc/test.xsl +++ /dev/null @@ -1,12 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- -   Copyright (c) 2010 Steven Watanabe -   -   Distributed under the Boost Software License, Version 1.0. -   (See accompanying file LICENSE_1_0.txt or copy at -   http://www.boost.org/LICENSE_1_0.txt) -  --> -<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" -                version="1.0"> -  <xsl:include href="included.xsl"/> -</xsl:stylesheet> diff --git a/jam-files/boost-build/tools/zlib.jam b/jam-files/boost-build/tools/zlib.jam deleted file mode 100644 index f9138fd5..00000000 --- a/jam-files/boost-build/tools/zlib.jam +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2010 Vladimir Prus. -# -# Use, modification and distribution is subject to the Boost Software -# License Version 1.0. (See accompanying file LICENSE_1_0.txt or -# http://www.boost.org/LICENSE_1_0.txt) - -# Supports the zlib library -# -# After 'using zlib', the following targets are available: -# -# /zlib//zlib -- The zlib library - - -# In addition to direct purpose of supporting zlib, this module also -# serves as canonical example of how third-party condiguration works -# in Boost.Build. The operation is as follows -# -# - For each 'using zlib : condition ... : ...' we create a target alternative -#   for zlib, with the specified condition. -# - There's one target alternative for 'zlib' with no specific condition -#   properties.  -# -# Two invocations of 'using zlib' with the same condition but different -# properties are not permitted, e.g.: -# -#   using zlib : condition <target-os>windows : include foo ; -#   using zlib : condition <target-os>windows : include bar ; -# -# is in error. One exception is for empty condition, 'using' without any -# parameters is overridable. That is: -# -#   using zlib ; -#   using zlib : include foo ; -#  -# Is OK then the first 'using' is ignored. Likewise if the order of the statements -# is reversed. -# -# When 'zlib' target is built, a target alternative is selected as usual for -# Boost.Build. The selected alternative is a custom target class, which: -# -# - calls ac.find-include-path to find header path. If explicit path is provided -#   in 'using', only that path is checked, and if no header is found there, error -#   is emitted. Otherwise, we check a directory specified using ZLIB_INCLUDE -#   environment variable, and failing that, in standard directories. -#   [TODO: document sysroot handling] -# - calls ac.find-library to find the library, in an identical fashion. -# - -import project ; -import ac ; -import errors ; -import "class" : new ; -import targets ;  - -project.initialize $(__name__) ; -project = [ project.current ] ; -project zlib ; - -header = zlib.h ; -names = z zlib zll zdll ; - -.default-alternative = [ new ac-library zlib : $(project) ] ; -$(.default-alternative).set-header $(header) ; -$(.default-alternative).set-default-names $(names) ; -targets.main-target-alternative $(.default-alternative) ; - -rule init ( * : * ) -{ -    if ! $(condition) -    { -        # Special case the no-condition case so that 'using' without parameters -        # can mix with more specific 'using'. -        $(.default-alternative).reconfigure $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; -    } -    else                     -    { -        # FIXME: consider if we should allow overriding definitions for a given -        # condition -- e.g. project-config.jam might want to override whatever is -        # in user-config.jam.  -        local mt = [ new ac-library zlib : $(project) -          : $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; -        $(mt).set-header $(header) ; -        $(mt).set-default-names $(names) ; -        targets.main-target-alternative $(mt) ; -    }     -} - - - - - -  | 
