summaryrefslogtreecommitdiff
path: root/jam-files
diff options
context:
space:
mode:
authorChris Dyer <cdyer@cs.cmu.edu>2012-10-11 14:06:32 -0400
committerChris Dyer <cdyer@cs.cmu.edu>2012-10-11 14:06:32 -0400
commit07ea7b64b6f85e5798a8068453ed9fd2b97396db (patch)
tree644496a1690d84d82a396bbc1e39160788beb2cd /jam-files
parent37b9e45e5cb29d708f7249dbe0b0fb27685282a0 (diff)
parenta36fcc5d55c1de84ae68c1091ebff2b1c32dc3b7 (diff)
Merge branch 'master' of https://github.com/redpony/cdec
Diffstat (limited to 'jam-files')
-rw-r--r--jam-files/LICENSE_1_0.txt23
-rw-r--r--jam-files/boost-build/boost-build.jam8
-rw-r--r--jam-files/boost-build/bootstrap.jam18
-rw-r--r--jam-files/boost-build/build-system.jam1008
-rw-r--r--jam-files/boost-build/build/__init__.py0
-rw-r--r--jam-files/boost-build/build/ac.jam198
-rw-r--r--jam-files/boost-build/build/alias.jam73
-rw-r--r--jam-files/boost-build/build/alias.py63
-rw-r--r--jam-files/boost-build/build/build-request.jam322
-rw-r--r--jam-files/boost-build/build/build_request.py216
-rw-r--r--jam-files/boost-build/build/configure.jam237
-rw-r--r--jam-files/boost-build/build/configure.py164
-rw-r--r--jam-files/boost-build/build/engine.py172
-rw-r--r--jam-files/boost-build/build/errors.py127
-rw-r--r--jam-files/boost-build/build/feature.jam1335
-rw-r--r--jam-files/boost-build/build/feature.py905
-rw-r--r--jam-files/boost-build/build/generators.jam1408
-rw-r--r--jam-files/boost-build/build/generators.py1089
-rw-r--r--jam-files/boost-build/build/modifiers.jam232
-rw-r--r--jam-files/boost-build/build/project.ann.py996
-rw-r--r--jam-files/boost-build/build/project.jam1110
-rw-r--r--jam-files/boost-build/build/project.py1120
-rw-r--r--jam-files/boost-build/build/property-set.jam481
-rw-r--r--jam-files/boost-build/build/property.jam788
-rw-r--r--jam-files/boost-build/build/property.py593
-rw-r--r--jam-files/boost-build/build/property_set.py449
-rw-r--r--jam-files/boost-build/build/readme.txt13
-rw-r--r--jam-files/boost-build/build/scanner.jam153
-rw-r--r--jam-files/boost-build/build/scanner.py158
-rw-r--r--jam-files/boost-build/build/targets.jam1659
-rw-r--r--jam-files/boost-build/build/targets.py1401
-rw-r--r--jam-files/boost-build/build/toolset.jam502
-rw-r--r--jam-files/boost-build/build/toolset.py398
-rw-r--r--jam-files/boost-build/build/type.jam425
-rw-r--r--jam-files/boost-build/build/type.py313
-rw-r--r--jam-files/boost-build/build/version.jam161
-rw-r--r--jam-files/boost-build/build/virtual-target.jam1317
-rw-r--r--jam-files/boost-build/build/virtual_target.py1118
-rw-r--r--jam-files/boost-build/kernel/boost-build.jam5
-rw-r--r--jam-files/boost-build/kernel/bootstrap.jam263
-rw-r--r--jam-files/boost-build/kernel/bootstrap.py25
-rw-r--r--jam-files/boost-build/kernel/class.jam420
-rw-r--r--jam-files/boost-build/kernel/errors.jam274
-rw-r--r--jam-files/boost-build/kernel/modules.jam354
-rw-r--r--jam-files/boost-build/options/help.jam212
-rw-r--r--jam-files/boost-build/site-config.jam4
-rw-r--r--jam-files/boost-build/tools/__init__.py0
-rw-r--r--jam-files/boost-build/tools/acc.jam118
-rw-r--r--jam-files/boost-build/tools/auto-index.jam212
-rw-r--r--jam-files/boost-build/tools/bison.jam32
-rw-r--r--jam-files/boost-build/tools/boostbook-config.jam13
-rw-r--r--jam-files/boost-build/tools/boostbook.jam727
-rw-r--r--jam-files/boost-build/tools/borland.jam220
-rw-r--r--jam-files/boost-build/tools/builtin.jam960
-rw-r--r--jam-files/boost-build/tools/builtin.py718
-rw-r--r--jam-files/boost-build/tools/cast.jam91
-rw-r--r--jam-files/boost-build/tools/cast.py69
-rw-r--r--jam-files/boost-build/tools/clang-darwin.jam170
-rw-r--r--jam-files/boost-build/tools/clang-linux.jam196
-rw-r--r--jam-files/boost-build/tools/clang.jam27
-rw-r--r--jam-files/boost-build/tools/common.jam994
-rw-r--r--jam-files/boost-build/tools/common.py840
-rw-r--r--jam-files/boost-build/tools/como-linux.jam103
-rw-r--r--jam-files/boost-build/tools/como-win.jam117
-rw-r--r--jam-files/boost-build/tools/como.jam29
-rw-r--r--jam-files/boost-build/tools/convert.jam62
-rw-r--r--jam-files/boost-build/tools/cw-config.jam34
-rw-r--r--jam-files/boost-build/tools/cw.jam246
-rw-r--r--jam-files/boost-build/tools/darwin.jam568
-rw-r--r--jam-files/boost-build/tools/darwin.py57
-rw-r--r--jam-files/boost-build/tools/dmc.jam134
-rw-r--r--jam-files/boost-build/tools/docutils.jam84
-rw-r--r--jam-files/boost-build/tools/doxproc.py859
-rw-r--r--jam-files/boost-build/tools/doxygen-config.jam11
-rw-r--r--jam-files/boost-build/tools/doxygen.jam776
-rw-r--r--jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile3
-rw-r--r--jam-files/boost-build/tools/doxygen/windows-paths-check.hpp0
-rw-r--r--jam-files/boost-build/tools/fop.jam69
-rw-r--r--jam-files/boost-build/tools/fortran.jam55
-rw-r--r--jam-files/boost-build/tools/gcc.jam1185
-rw-r--r--jam-files/boost-build/tools/gcc.py796
-rw-r--r--jam-files/boost-build/tools/generate.jam108
-rw-r--r--jam-files/boost-build/tools/gettext.jam230
-rw-r--r--jam-files/boost-build/tools/gfortran.jam39
-rw-r--r--jam-files/boost-build/tools/hp_cxx.jam181
-rw-r--r--jam-files/boost-build/tools/hpfortran.jam35
-rw-r--r--jam-files/boost-build/tools/ifort.jam44
-rw-r--r--jam-files/boost-build/tools/intel-darwin.jam220
-rw-r--r--jam-files/boost-build/tools/intel-linux.jam250
-rw-r--r--jam-files/boost-build/tools/intel-win.jam184
-rw-r--r--jam-files/boost-build/tools/intel.jam34
-rw-r--r--jam-files/boost-build/tools/lex.jam33
-rw-r--r--jam-files/boost-build/tools/make.jam72
-rw-r--r--jam-files/boost-build/tools/make.py59
-rw-r--r--jam-files/boost-build/tools/mc.jam44
-rw-r--r--jam-files/boost-build/tools/message.jam55
-rw-r--r--jam-files/boost-build/tools/message.py46
-rw-r--r--jam-files/boost-build/tools/midl.jam142
-rw-r--r--jam-files/boost-build/tools/mipspro.jam145
-rw-r--r--jam-files/boost-build/tools/mpi.jam583
-rw-r--r--jam-files/boost-build/tools/msvc-config.jam12
-rw-r--r--jam-files/boost-build/tools/msvc.jam1392
-rw-r--r--jam-files/boost-build/tools/notfile.jam74
-rw-r--r--jam-files/boost-build/tools/notfile.py51
-rw-r--r--jam-files/boost-build/tools/package.jam165
-rw-r--r--jam-files/boost-build/tools/package.py168
-rw-r--r--jam-files/boost-build/tools/pathscale.jam168
-rw-r--r--jam-files/boost-build/tools/pch.jam95
-rw-r--r--jam-files/boost-build/tools/pch.py83
-rw-r--r--jam-files/boost-build/tools/pgi.jam147
-rw-r--r--jam-files/boost-build/tools/python-config.jam27
-rw-r--r--jam-files/boost-build/tools/python.jam1267
-rw-r--r--jam-files/boost-build/tools/qcc.jam236
-rw-r--r--jam-files/boost-build/tools/qt.jam17
-rw-r--r--jam-files/boost-build/tools/qt3.jam209
-rw-r--r--jam-files/boost-build/tools/qt4.jam724
-rw-r--r--jam-files/boost-build/tools/quickbook-config.jam44
-rw-r--r--jam-files/boost-build/tools/quickbook.jam361
-rw-r--r--jam-files/boost-build/tools/rc.jam156
-rw-r--r--jam-files/boost-build/tools/rc.py189
-rw-r--r--jam-files/boost-build/tools/stage.jam524
-rw-r--r--jam-files/boost-build/tools/stage.py350
-rw-r--r--jam-files/boost-build/tools/stlport.jam303
-rw-r--r--jam-files/boost-build/tools/sun.jam142
-rw-r--r--jam-files/boost-build/tools/symlink.jam140
-rw-r--r--jam-files/boost-build/tools/symlink.py112
-rw-r--r--jam-files/boost-build/tools/testing-aux.jam210
-rw-r--r--jam-files/boost-build/tools/testing.jam581
-rw-r--r--jam-files/boost-build/tools/testing.py342
-rw-r--r--jam-files/boost-build/tools/types/__init__.py18
-rw-r--r--jam-files/boost-build/tools/types/asm.jam4
-rw-r--r--jam-files/boost-build/tools/types/asm.py13
-rw-r--r--jam-files/boost-build/tools/types/cpp.jam86
-rw-r--r--jam-files/boost-build/tools/types/cpp.py10
-rw-r--r--jam-files/boost-build/tools/types/exe.jam9
-rw-r--r--jam-files/boost-build/tools/types/exe.py11
-rw-r--r--jam-files/boost-build/tools/types/html.jam4
-rw-r--r--jam-files/boost-build/tools/types/html.py10
-rw-r--r--jam-files/boost-build/tools/types/lib.jam74
-rw-r--r--jam-files/boost-build/tools/types/lib.py77
-rw-r--r--jam-files/boost-build/tools/types/obj.jam9
-rw-r--r--jam-files/boost-build/tools/types/obj.py11
-rw-r--r--jam-files/boost-build/tools/types/objc.jam26
-rw-r--r--jam-files/boost-build/tools/types/preprocessed.jam9
-rw-r--r--jam-files/boost-build/tools/types/qt.jam10
-rw-r--r--jam-files/boost-build/tools/types/register.jam39
-rw-r--r--jam-files/boost-build/tools/types/rsp.jam4
-rw-r--r--jam-files/boost-build/tools/types/rsp.py10
-rw-r--r--jam-files/boost-build/tools/unix.jam224
-rw-r--r--jam-files/boost-build/tools/unix.py150
-rw-r--r--jam-files/boost-build/tools/vacpp.jam150
-rw-r--r--jam-files/boost-build/tools/whale.jam116
-rw-r--r--jam-files/boost-build/tools/xlf.jam39
-rw-r--r--jam-files/boost-build/tools/xsltproc-config.jam37
-rw-r--r--jam-files/boost-build/tools/xsltproc.jam194
-rw-r--r--jam-files/boost-build/tools/xsltproc/included.xsl11
-rw-r--r--jam-files/boost-build/tools/xsltproc/test.xml2
-rw-r--r--jam-files/boost-build/tools/xsltproc/test.xsl12
-rw-r--r--jam-files/boost-build/tools/zlib.jam92
-rw-r--r--jam-files/boost-build/user-config.jam92
-rw-r--r--jam-files/boost-build/util/__init__.py136
-rw-r--r--jam-files/boost-build/util/assert.jam336
-rw-r--r--jam-files/boost-build/util/container.jam339
-rw-r--r--jam-files/boost-build/util/doc.jam997
-rw-r--r--jam-files/boost-build/util/indirect.jam115
-rw-r--r--jam-files/boost-build/util/indirect.py15
-rw-r--r--jam-files/boost-build/util/logger.py46
-rw-r--r--jam-files/boost-build/util/numbers.jam218
-rw-r--r--jam-files/boost-build/util/option.jam109
-rw-r--r--jam-files/boost-build/util/option.py35
-rw-r--r--jam-files/boost-build/util/order.jam169
-rw-r--r--jam-files/boost-build/util/order.py121
-rw-r--r--jam-files/boost-build/util/os.jam171
-rw-r--r--jam-files/boost-build/util/os_j.py19
-rw-r--r--jam-files/boost-build/util/path.jam934
-rw-r--r--jam-files/boost-build/util/path.py904
-rw-r--r--jam-files/boost-build/util/print.jam488
-rw-r--r--jam-files/boost-build/util/regex.jam193
-rw-r--r--jam-files/boost-build/util/regex.py25
-rw-r--r--jam-files/boost-build/util/sequence.jam335
-rw-r--r--jam-files/boost-build/util/sequence.py50
-rw-r--r--jam-files/boost-build/util/set.jam93
-rw-r--r--jam-files/boost-build/util/set.py42
-rw-r--r--jam-files/boost-build/util/string.jam189
-rw-r--r--jam-files/boost-build/util/utility.jam235
-rw-r--r--jam-files/boost-build/util/utility.py155
-rw-r--r--jam-files/engine/Jambase2473
-rw-r--r--jam-files/engine/boost-jam.spec64
-rw-r--r--jam-files/engine/boost-no-inspect1
-rw-r--r--jam-files/engine/build.bat532
-rw-r--r--jam-files/engine/build.jam1070
-rwxr-xr-xjam-files/engine/build.sh303
-rw-r--r--jam-files/engine/build_vms.com105
-rw-r--r--jam-files/engine/builtins.c2310
-rw-r--r--jam-files/engine/builtins.h69
-rw-r--r--jam-files/engine/bump_version.py80
-rw-r--r--jam-files/engine/class.c141
-rw-r--r--jam-files/engine/class.h13
-rw-r--r--jam-files/engine/command.c100
-rw-r--r--jam-files/engine/command.h61
-rw-r--r--jam-files/engine/compile.c1424
-rw-r--r--jam-files/engine/compile.h82
-rw-r--r--jam-files/engine/debian/changelog72
-rw-r--r--jam-files/engine/debian/control16
-rw-r--r--jam-files/engine/debian/copyright25
-rw-r--r--jam-files/engine/debian/jam.man.sgml236
-rwxr-xr-xjam-files/engine/debian/rules73
-rw-r--r--jam-files/engine/debug.c132
-rw-r--r--jam-files/engine/debug.h54
-rw-r--r--jam-files/engine/execcmd.h45
-rw-r--r--jam-files/engine/execmac.c69
-rw-r--r--jam-files/engine/execnt.c1296
-rw-r--r--jam-files/engine/execunix.c569
-rw-r--r--jam-files/engine/execvms.c161
-rw-r--r--jam-files/engine/expand.c733
-rw-r--r--jam-files/engine/expand.h14
-rw-r--r--jam-files/engine/filemac.c175
-rw-r--r--jam-files/engine/filent.c387
-rw-r--r--jam-files/engine/fileos2.c138
-rw-r--r--jam-files/engine/filesys.c83
-rw-r--r--jam-files/engine/filesys.h60
-rw-r--r--jam-files/engine/fileunix.c501
-rw-r--r--jam-files/engine/filevms.c327
-rw-r--r--jam-files/engine/frames.c22
-rw-r--r--jam-files/engine/frames.h37
-rw-r--r--jam-files/engine/glob.c152
-rw-r--r--jam-files/engine/hash.c459
-rw-r--r--jam-files/engine/hash.h25
-rw-r--r--jam-files/engine/hcache.c434
-rw-r--r--jam-files/engine/hcache.h18
-rw-r--r--jam-files/engine/hdrmacro.c137
-rw-r--r--jam-files/engine/hdrmacro.h14
-rw-r--r--jam-files/engine/headers.c203
-rw-r--r--jam-files/engine/headers.h16
-rw-r--r--jam-files/engine/jam.c632
-rw-r--r--jam-files/engine/jam.h579
-rw-r--r--jam-files/engine/jambase.c1691
-rw-r--r--jam-files/engine/jambase.h15
-rw-r--r--jam-files/engine/jamgram.c1830
-rw-r--r--jam-files/engine/jamgram.h140
-rw-r--r--jam-files/engine/jamgram.y371
-rw-r--r--jam-files/engine/jamgram.yy329
-rw-r--r--jam-files/engine/jamgramtab.h44
-rw-r--r--jam-files/engine/lists.c339
-rw-r--r--jam-files/engine/lists.h108
-rw-r--r--jam-files/engine/make.c814
-rw-r--r--jam-files/engine/make.h41
-rw-r--r--jam-files/engine/make1.c1145
-rw-r--r--jam-files/engine/md5.c381
-rw-r--r--jam-files/engine/md5.h91
-rw-r--r--jam-files/engine/mem.c75
-rw-r--r--jam-files/engine/mem.h134
-rw-r--r--jam-files/engine/mkjambase.c123
-rw-r--r--jam-files/engine/modules.c168
-rw-r--r--jam-files/engine/modules.h37
-rw-r--r--jam-files/engine/modules/order.c144
-rw-r--r--jam-files/engine/modules/path.c32
-rw-r--r--jam-files/engine/modules/property-set.c110
-rw-r--r--jam-files/engine/modules/readme.txt3
-rw-r--r--jam-files/engine/modules/regex.c96
-rw-r--r--jam-files/engine/modules/sequence.c42
-rw-r--r--jam-files/engine/modules/set.c41
-rw-r--r--jam-files/engine/native.c36
-rw-r--r--jam-files/engine/native.h34
-rw-r--r--jam-files/engine/newstr.c174
-rw-r--r--jam-files/engine/newstr.h14
-rw-r--r--jam-files/engine/option.c94
-rw-r--r--jam-files/engine/option.h23
-rw-r--r--jam-files/engine/output.c125
-rw-r--r--jam-files/engine/output.h29
-rw-r--r--jam-files/engine/parse.c132
-rw-r--r--jam-files/engine/parse.h59
-rw-r--r--jam-files/engine/patchlevel.h17
-rw-r--r--jam-files/engine/pathmac.c252
-rw-r--r--jam-files/engine/pathsys.h91
-rw-r--r--jam-files/engine/pathunix.c457
-rw-r--r--jam-files/engine/pathvms.c406
-rw-r--r--jam-files/engine/pwd.c66
-rw-r--r--jam-files/engine/pwd.h10
-rw-r--r--jam-files/engine/regexp.c1328
-rw-r--r--jam-files/engine/regexp.h32
-rw-r--r--jam-files/engine/rules.c810
-rw-r--r--jam-files/engine/rules.h280
-rw-r--r--jam-files/engine/scan.c418
-rw-r--r--jam-files/engine/scan.h56
-rw-r--r--jam-files/engine/search.c223
-rw-r--r--jam-files/engine/search.h11
-rw-r--r--jam-files/engine/strings.c201
-rw-r--r--jam-files/engine/strings.h34
-rw-r--r--jam-files/engine/subst.c94
-rw-r--r--jam-files/engine/timestamp.c226
-rw-r--r--jam-files/engine/timestamp.h12
-rw-r--r--jam-files/engine/variable.c631
-rw-r--r--jam-files/engine/variable.h35
-rw-r--r--jam-files/engine/w32_getreg.c207
-rw-r--r--jam-files/engine/yyacc.c268
-rw-r--r--jam-files/sanity.jam277
297 files changed, 0 insertions, 83928 deletions
diff --git a/jam-files/LICENSE_1_0.txt b/jam-files/LICENSE_1_0.txt
deleted file mode 100644
index 36b7cd93..00000000
--- a/jam-files/LICENSE_1_0.txt
+++ /dev/null
@@ -1,23 +0,0 @@
-Boost Software License - Version 1.0 - August 17th, 2003
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/jam-files/boost-build/boost-build.jam b/jam-files/boost-build/boost-build.jam
deleted file mode 100644
index 73db0497..00000000
--- a/jam-files/boost-build/boost-build.jam
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2002 Rene Rivera
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-boost-build kernel ;
diff --git a/jam-files/boost-build/bootstrap.jam b/jam-files/boost-build/bootstrap.jam
deleted file mode 100644
index af3e8bf5..00000000
--- a/jam-files/boost-build/bootstrap.jam
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2003 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# This file handles initial phase of Boost.Build loading.
-# Boost.Jam has already figured out where Boost.Build is
-# and loads this file, which is responsible for initialization
-# of basic facilities such a module system and loading the
-# main Boost.Build module, build-system.jam.
-#
-# Exact operation of this module is not interesting, it makes
-# sense to look at build-system.jam right away.
-
-# Load the kernel/bootstrap.jam, which does all the work.
-.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ;
-include $(.bootstrap-file) ; \ No newline at end of file
diff --git a/jam-files/boost-build/build-system.jam b/jam-files/boost-build/build-system.jam
deleted file mode 100644
index 9f9c884c..00000000
--- a/jam-files/boost-build/build-system.jam
+++ /dev/null
@@ -1,1008 +0,0 @@
-# Copyright 2003, 2005, 2007 Dave Abrahams
-# Copyright 2006, 2007 Rene Rivera
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This file is part of Boost Build version 2. You can think of it as forming the
-# main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
-
-import build-request ;
-import builtin ;
-import "class" : new ;
-import errors ;
-import feature ;
-import make ;
-import modules ;
-import os ;
-import path ;
-import project ;
-import property-set ;
-import regex ;
-import sequence ;
-import targets ;
-import toolset ;
-import utility ;
-import version ;
-import virtual-target ;
-import generators ;
-import configure ;
-
-################################################################################
-#
-# Module global data.
-#
-################################################################################
-
-# Shortcut used in this module for accessing used command-line parameters.
-.argv = [ modules.peek : ARGV ] ;
-
-# Flag indicating we should display additional debugging information related to
-# locating and loading Boost Build configuration files.
-.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
-
-# Legacy option doing too many things, some of which are not even documented.
-# Should be phased out.
-# * Disables loading site and user configuration files.
-# * Disables auto-configuration for toolsets specified explicitly on the
-# command-line.
-# * Causes --toolset command-line options to be ignored.
-# * Prevents the default toolset from being used even if no toolset has been
-# configured at all.
-.legacy-ignore-config = [ MATCH ^(--ignore-config)$ : $(.argv) ] ;
-
-# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
-# directory, then we want to clean targets which are in 'foo' as well as those
-# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
-# achieve this we collect a list of projects under which cleaning is allowed.
-.project-targets = ;
-
-# Virtual targets obtained when building main targets references on the command
-# line. When running 'bjam --clean main_target' we want to clean only files
-# belonging to that main target so we need to record which targets are produced
-# for it.
-.results-of-main-targets = ;
-
-# Was an XML dump requested?
-.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
-
-# Default toolset & version to be used in case no other toolset has been used
-# explicitly by either the loaded configuration files, the loaded project build
-# scripts or an explicit toolset request on the command line. If not specified,
-# an arbitrary default will be used based on the current host OS. This value,
-# while not strictly necessary, has been added to allow testing Boost-Build's
-# default toolset usage functionality.
-.default-toolset = ;
-.default-toolset-version = ;
-
-
-################################################################################
-#
-# Public rules.
-#
-################################################################################
-
-# Returns the property set with the free features from the currently processed
-# build request.
-#
-rule command-line-free-features ( )
-{
- return $(.command-line-free-features) ;
-}
-
-
-# Returns the location of the build system. The primary use case is building
-# Boost where it is sometimes needed to get the location of other components
-# (e.g. BoostBook files) and it is convenient to use locations relative to the
-# Boost Build path.
-#
-rule location ( )
-{
- local r = [ modules.binding build-system ] ;
- return $(r:P) ;
-}
-
-
-# Sets the default toolset & version to be used in case no other toolset has
-# been used explicitly by either the loaded configuration files, the loaded
-# project build scripts or an explicit toolset request on the command line. For
-# more detailed information see the comment related to used global variables.
-#
-rule set-default-toolset ( toolset : version ? )
-{
- .default-toolset = $(toolset) ;
- .default-toolset-version = $(version) ;
-}
-
-rule set-pre-build-hook ( function )
-{
- .pre-build-hook = $(function) ;
-}
-
-rule set-post-build-hook ( function )
-{
- .post-build-hook = $(function) ;
-}
-
-################################################################################
-#
-# Local rules.
-#
-################################################################################
-
-# Returns actual Jam targets to be used for executing a clean request.
-#
-local rule actual-clean-targets ( )
-{
- # Construct a list of projects explicitly detected as targets on this build
- # system run. These are the projects under which cleaning is allowed.
- for local t in $(targets)
- {
- if [ class.is-a $(t) : project-target ]
- {
- .project-targets += [ $(t).project-module ] ;
- }
- }
-
- # Construct a list of targets explicitly detected on this build system run
- # as a result of building main targets.
- local targets-to-clean ;
- for local t in $(.results-of-main-targets)
- {
- # Do not include roots or sources.
- targets-to-clean += [ virtual-target.traverse $(t) ] ;
- }
- targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
-
- local to-clean ;
- for local t in [ virtual-target.all-targets ]
- {
- local p = [ $(t).project ] ;
-
- # Remove only derived targets.
- if [ $(t).action ]
- {
- if $(t) in $(targets-to-clean) ||
- [ should-clean-project [ $(p).project-module ] ] = true
- {
- to-clean += $(t) ;
- }
- }
- }
-
- local to-clean-actual ;
- for local t in $(to-clean)
- {
- to-clean-actual += [ $(t).actualize ] ;
- }
- return $(to-clean-actual) ;
-}
-
-
-# Given a target id, try to find and return the corresponding target. This is
-# only invoked when there is no Jamfile in ".". This code somewhat duplicates
-# code in project-target.find but we can not reuse that code without a
-# project-targets instance.
-#
-local rule find-target ( target-id )
-{
- local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
-
- local pm ;
- if $(split)
- {
- pm = [ project.find $(split[1]) : "." ] ;
- }
- else
- {
- pm = [ project.find $(target-id) : "." ] ;
- }
-
- local result ;
- if $(pm)
- {
- result = [ project.target $(pm) ] ;
- }
-
- if $(split)
- {
- result = [ $(result).find $(split[2]) ] ;
- }
-
- return $(result) ;
-}
-
-
-# Initializes a new configuration module.
-#
-local rule initialize-config-module ( module-name : location ? )
-{
- project.initialize $(module-name) : $(location) ;
- if USER_MODULE in [ RULENAMES ]
- {
- USER_MODULE $(module-name) ;
- }
-}
-
-
-# Helper rule used to load configuration files. Loads the first configuration
-# file with the given 'filename' at 'path' into module with name 'module-name'.
-# Not finding the requested file may or may not be treated as an error depending
-# on the must-find parameter. Returns a normalized path to the loaded
-# configuration file or nothing if no file was loaded.
-#
-local rule load-config ( module-name : filename : path + : must-find ? )
-{
- if $(.debug-config)
- {
- ECHO "notice: Searching" "$(path)" "for" "$(module-name)"
- "configuration file" "$(filename)" "." ;
- }
- local where = [ GLOB $(path) : $(filename) ] ;
- if $(where)
- {
- where = [ NORMALIZE_PATH $(where[1]) ] ;
- if $(.debug-config)
- {
- ECHO "notice: Loading" "$(module-name)" "configuration file"
- "$(filename)" "from" $(where) "." ;
- }
-
- # Set source location so that path-constant in config files
- # with relative paths work. This is of most importance
- # for project-config.jam, but may be used in other
- # config files as well.
- local attributes = [ project.attributes $(module-name) ] ;
- $(attributes).set source-location : $(where:D) : exact ;
- modules.load $(module-name) : $(filename) : $(path) ;
- project.load-used-projects $(module-name) ;
- }
- else
- {
- if $(must-find)
- {
- errors.user-error "Configuration file" "$(filename)" "not found in"
- "$(path)" "." ;
- }
- if $(.debug-config)
- {
- ECHO "notice:" "Configuration file" "$(filename)" "not found in"
- "$(path)" "." ;
- }
- }
- return $(where) ;
-}
-
-
-# Loads all the configuration files used by Boost Build in the following order:
-#
-# -- test-config --
-# Loaded only if specified on the command-line using the --test-config
-# command-line parameter. It is ok for this file not to exist even if specified.
-# If this configuration file is loaded, regular site and user configuration
-# files will not be. If a relative path is specified, file is searched for in
-# the current folder.
-#
-# -- site-config --
-# Always named site-config.jam. Will only be found if located on the system
-# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
-# path, in that order. Not loaded in case the test-config configuration file is
-# loaded or either the --ignore-site-config or the --ignore-config command-line
-# option is specified.
-#
-# -- user-config --
-# Named user-config.jam by default or may be named explicitly using the
-# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
-# variable. If named explicitly the file is looked for from the current working
-# directory and if the default one is used then it is searched for in the
-# user's home directory and the Boost Build path, in that order. Not loaded in
-# case either the test-config configuration file is loaded, --ignore-config
-# command-line option is specified or an empty file name is explicitly
-# specified. If the file name has been given explicitly then the file must
-# exist.
-#
-# Test configurations have been added primarily for use by Boost Build's
-# internal unit testing system but may be used freely in other places as well.
-#
-local rule load-configuration-files
-{
- # Flag indicating that site configuration should not be loaded.
- local ignore-site-config =
- [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
-
- if $(.legacy-ignore-config) && $(.debug-config)
- {
- ECHO "notice: Regular site and user configuration files will be ignored" ;
- ECHO "notice: due to the --ignore-config command-line option." ;
- }
-
- initialize-config-module test-config ;
- local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
- local uq = [ MATCH \"(.*)\" : $(test-config) ] ;
- if $(uq)
- {
- test-config = $(uq) ;
- }
- if $(test-config)
- {
- local where =
- [ load-config test-config : $(test-config:BS) : $(test-config:D) ] ;
- if $(where)
- {
- if $(.debug-config) && ! $(.legacy-ignore-config)
- {
- ECHO "notice: Regular site and user configuration files will" ;
- ECHO "notice: be ignored due to the test configuration being"
- "loaded." ;
- }
- }
- else
- {
- test-config = ;
- }
- }
-
- local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
- local site-path = /etc $(user-path) ;
- if [ os.name ] in NT CYGWIN
- {
- site-path = [ modules.peek : SystemRoot ] $(user-path) ;
- }
-
- if $(ignore-site-config) && !$(.legacy-ignore-config)
- {
- ECHO "notice: Site configuration files will be ignored due to the" ;
- ECHO "notice: --ignore-site-config command-line option." ;
- }
-
- initialize-config-module site-config ;
- if ! $(test-config) && ! $(ignore-site-config) && ! $(.legacy-ignore-config)
- {
- load-config site-config : site-config.jam : $(site-path) ;
- }
-
- initialize-config-module user-config ;
- if ! $(test-config) && ! $(.legacy-ignore-config)
- {
- local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
- user-config = $(user-config[-1]) ;
- user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
- # Special handling for the case when the OS does not strip the quotes
- # around the file name, as is the case when using Cygwin bash.
- user-config = [ utility.unquote $(user-config) ] ;
- local explicitly-requested = $(user-config) ;
- user-config ?= user-config.jam ;
-
- if $(user-config)
- {
- if $(explicitly-requested)
- {
- # Treat explicitly entered user paths as native OS path
- # references and, if non-absolute, root them at the current
- # working directory.
- user-config = [ path.make $(user-config) ] ;
- user-config = [ path.root $(user-config) [ path.pwd ] ] ;
- user-config = [ path.native $(user-config) ] ;
-
- if $(.debug-config)
- {
- ECHO "notice: Loading explicitly specified user"
- "configuration file:" ;
- ECHO " $(user-config)" ;
- }
-
- load-config user-config : $(user-config:BS) : $(user-config:D)
- : must-exist ;
- }
- else
- {
- load-config user-config : $(user-config) : $(user-path) ;
- }
- }
- else if $(.debug-config)
- {
- ECHO "notice: User configuration file loading explicitly disabled." ;
- }
- }
-
- # We look for project-config.jam from "." upward.
- # I am not sure this is 100% right decision, we might as well check for
- # it only alonside the Jamroot file. However:
- #
- # - We need to load project-root.jam before Jamroot
- # - We probably would need to load project-root.jam even if there's no
- # Jamroot - e.g. to implement automake-style out-of-tree builds.
- local file = [ path.glob "." : project-config.jam ] ;
- if ! $(file)
- {
- file = [ path.glob-in-parents "." : project-config.jam ] ;
- }
- if $(file)
- {
- initialize-config-module project-config : $(file:D) ;
- load-config project-config : project-config.jam : $(file:D) ;
- }
-}
-
-
-# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
-# toolset=xx,yy,...zz in the command line. May return additional properties to
-# be processed as if they had been specified by the user.
-#
-local rule process-explicit-toolset-requests
-{
- local extra-properties ;
-
- local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
- local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
-
- for local t in $(option-toolsets) $(feature-toolsets)
- {
- # Parse toolset-version/properties.
- local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
- local toolset-version = $((t-v,t,v)[1]) ;
- local toolset = $((t-v,t,v)[2]) ;
- local version = $((t-v,t,v)[3]) ;
-
- if $(.debug-config)
- {
- ECHO notice: [cmdline-cfg] Detected command-line request for
- $(toolset-version): "toolset=" $(toolset) "version="
- $(version) ;
- }
-
- # If the toolset is not known, configure it now.
- local known ;
- if $(toolset) in [ feature.values <toolset> ]
- {
- known = true ;
- }
- if $(known) && $(version) && ! [ feature.is-subvalue toolset
- : $(toolset) : version : $(version) ]
- {
- known = ;
- }
- # TODO: we should do 'using $(toolset)' in case no version has been
- # specified and there are no versions defined for the given toolset to
- # allow the toolset to configure its default version. For this we need
- # to know how to detect whether a given toolset has any versions
- # defined. An alternative would be to do this whenever version is not
- # specified but that would require that toolsets correctly handle the
- # case when their default version is configured multiple times which
- # should be checked for all existing toolsets first.
-
- if ! $(known)
- {
- if $(.debug-config)
- {
- ECHO "notice: [cmdline-cfg] toolset $(toolset-version) not"
- "previously configured; attempting to auto-configure now" ;
- }
- toolset.using $(toolset) : $(version) ;
- }
- else
- {
- if $(.debug-config)
- {
- ECHO notice: [cmdline-cfg] toolset $(toolset-version) already
- configured ;
- }
- }
-
- # Make sure we get an appropriate property into the build request in
- # case toolset has been specified using the "--toolset=..." command-line
- # option form.
- if ! $(t) in $(.argv) && ! $(t) in $(feature-toolsets)
- {
- if $(.debug-config)
- {
- ECHO notice: [cmdline-cfg] adding toolset=$(t) to the build
- request. ;
- }
- extra-properties += toolset=$(t) ;
- }
- }
-
- return $(extra-properties) ;
-}
-
-
-# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
-# child to any of the projects requested to be cleaned in this build system run.
-# Returns 'false' otherwise. Expects the .project-targets list to have already
-# been constructed.
-#
-local rule should-clean-project ( project )
-{
- if ! $(.should-clean-project.$(project))
- {
- local r = false ;
- if $(project) in $(.project-targets)
- {
- r = true ;
- }
- else
- {
- local parent = [ project.attribute $(project) parent-module ] ;
- if $(parent) && $(parent) != user-config
- {
- r = [ should-clean-project $(parent) ] ;
- }
- }
- .should-clean-project.$(project) = $(r) ;
- }
-
- return $(.should-clean-project.$(project)) ;
-}
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-{
- if --version in $(.argv)
- {
- version.print ;
- EXIT ;
- }
-
- version.verify-engine-version ;
-
- load-configuration-files ;
-
- local extra-properties ;
- # Note that this causes --toolset options to be ignored if --ignore-config
- # is specified.
- if ! $(.legacy-ignore-config)
- {
- extra-properties = [ process-explicit-toolset-requests ] ;
- }
-
-
- # We always load project in "." so that 'use-project' directives have any
- # chance of being seen. Otherwise, we would not be able to refer to
- # subprojects using target ids.
- local current-project ;
- if [ project.find "." : "." ]
- {
- current-project = [ project.target [ project.load "." ] ] ;
- }
-
-
- # In case there are no toolsets currently defined makes the build run using
- # the default toolset.
- if ! $(.legacy-ignore-config) && ! [ feature.values <toolset> ]
- {
- local default-toolset = $(.default-toolset) ;
- local default-toolset-version = ;
- if $(default-toolset)
- {
- default-toolset-version = $(.default-toolset-version) ;
- }
- else
- {
- default-toolset = gcc ;
- if [ os.name ] = NT
- {
- default-toolset = msvc ;
- }
- else if [ os.name ] = MACOSX
- {
- default-toolset = darwin ;
- }
- }
-
- ECHO "warning: No toolsets are configured." ;
- ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
- ECHO "warning: If the default is wrong, your build may not work correctly." ;
- ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ;
- ECHO "warning: For more configuration options, please consult" ;
- ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
-
- toolset.using $(default-toolset) : $(default-toolset-version) ;
- }
-
-
- # Parse command line for targets and properties. Note that this requires
- # that all project files already be loaded.
- local build-request = [ build-request.from-command-line $(.argv)
- $(extra-properties) ] ;
- local target-ids = [ $(build-request).get-at 1 ] ;
- local properties = [ $(build-request).get-at 2 ] ;
-
-
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if $(properties)
- {
- expanded = [ build-request.expand-no-defaults $(properties) ] ;
- local xexpanded ;
- for local e in $(expanded)
- {
- xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
- }
- expanded = $(xexpanded) ;
- }
- else
- {
- expanded = [ property-set.empty ] ;
- }
-
-
- # Check that we actually found something to build.
- if ! $(current-project) && ! $(target-ids)
- {
- errors.user-error "error: no Jamfile in current directory found, and no"
- "target references specified." ;
- EXIT ;
- }
-
-
- # Flags indicating that this build system run has been started in order to
- # clean existing instead of create new targets. Note that these are not the
- # final flag values as they may get changed later on due to some special
- # targets being specified on the command line.
- local clean ; if "--clean" in $(.argv) { clean = true ; }
- local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
-
-
- # List of explicitly requested files to build. Any target references read
- # from the command line parameter not recognized as one of the targets
- # defined in the loaded Jamfiles will be interpreted as an explicitly
- # requested file to build. If any such files are explicitly requested then
- # only those files and the targets they depend on will be built and they
- # will be searched for among targets that would have been built had there
- # been no explicitly requested files.
- local explicitly-requested-files
-
-
- # List of Boost Build meta-targets, virtual-targets and actual Jam targets
- # constructed in this build system run.
- local targets ;
- local virtual-targets ;
- local actual-targets ;
-
-
- # Process each target specified on the command-line and convert it into
- # internal Boost Build target objects. Detect special clean target. If no
- # main Boost Build targets were explictly requested use the current project
- # as the target.
- for local id in $(target-ids)
- {
- if $(id) = clean
- {
- clean = true ;
- }
- else
- {
- local t ;
- if $(current-project)
- {
- t = [ $(current-project).find $(id) : no-error ] ;
- }
- else
- {
- t = [ find-target $(id) ] ;
- }
-
- if ! $(t)
- {
- ECHO "notice: could not find main target" $(id) ;
- ECHO "notice: assuming it is a name of file to create." ;
- explicitly-requested-files += $(id) ;
- }
- else
- {
- targets += $(t) ;
- }
- }
- }
- if ! $(targets)
- {
- targets += [ project.target [ project.module-name "." ] ] ;
- }
-
- if [ option.get dump-generators : : true ]
- {
- generators.dump ;
- }
-
- # We wish to put config.log in the build directory corresponding
- # to Jamroot, so that the location does not differ depending on
- # directory where we do build. The amount of indirection necessary
- # here is scary.
- local first-project = [ $(targets[0]).project ] ;
- local first-project-root-location = [ $(first-project).get project-root ] ;
- local first-project-root-module = [ project.load $(first-project-root-location) ] ;
- local first-project-root = [ project.target $(first-project-root-module) ] ;
- local first-build-build-dir = [ $(first-project-root).build-dir ] ;
- configure.set-log-file $(first-build-build-dir)/config.log ;
-
- # Now that we have a set of targets to build and a set of property sets to
- # build the targets with, we can start the main build process by using each
- # property set to generate virtual targets from all of our listed targets
- # and any of their dependants.
- for local p in $(expanded)
- {
- .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
- for local t in $(targets)
- {
- local g = [ $(t).generate $(p) ] ;
- if ! [ class.is-a $(t) : project-target ]
- {
- .results-of-main-targets += $(g[2-]) ;
- }
- virtual-targets += $(g[2-]) ;
- }
- }
-
-
- # Convert collected virtual targets into actual raw Jam targets.
- for t in $(virtual-targets)
- {
- actual-targets += [ $(t).actualize ] ;
- }
-
-
- # If XML data output has been requested prepare additional rules and targets
- # so we can hook into Jam to collect build data while its building and have
- # it trigger the final XML report generation after all the planned targets
- # have been built.
- if $(.out-xml)
- {
- # Get a qualified virtual target name.
- rule full-target-name ( target )
- {
- local name = [ $(target).name ] ;
- local project = [ $(target).project ] ;
- local project-path = [ $(project).get location ] ;
- return $(project-path)//$(name) ;
- }
-
- # Generate an XML file containing build statistics for each constituent.
- #
- rule out-xml ( xml-file : constituents * )
- {
- # Prepare valid XML header and footer with some basic info.
- local nl = "
-" ;
- local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
- local timestamp = [ modules.peek : JAMDATE ] ;
- local cwd = [ PWD ] ;
- local command = $(.argv) ;
- local bb-version = [ version.boost-build ] ;
- .header on $(xml-file) =
- "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
- "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
- "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
- "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
- "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
- "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
- ;
- .footer on $(xml-file) =
- "$(nl)</build>" ;
-
- # Generate the target dependency graph.
- .contents on $(xml-file) +=
- "$(nl) <targets>" ;
- for local t in [ virtual-target.all-targets ]
- {
- local action = [ $(t).action ] ;
- if $(action)
- # If a target has no action, it has no dependencies.
- {
- local name = [ full-target-name $(t) ] ;
- local sources = [ $(action).sources ] ;
- local dependencies ;
- for local s in $(sources)
- {
- dependencies += [ full-target-name $(s) ] ;
- }
-
- local path = [ $(t).path ] ;
- local jam-target = [ $(t).actual-name ] ;
-
- .contents on $(xml-file) +=
- "$(nl) <target>"
- "$(nl) <name><![CDATA[$(name)]]></name>"
- "$(nl) <dependencies>"
- "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
- "$(nl) </dependencies>"
- "$(nl) <path><![CDATA[$(path)]]></path>"
- "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
- "$(nl) </target>"
- ;
- }
- }
- .contents on $(xml-file) +=
- "$(nl) </targets>" ;
-
- # Build $(xml-file) after $(constituents). Do so even if a
- # constituent action fails and regenerate the xml on every bjam run.
- INCLUDES $(xml-file) : $(constituents) ;
- ALWAYS $(xml-file) ;
- __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
- out-xml.generate $(xml-file) ;
- }
-
- # The actual build actions are here; if we did this work in the actions
- # clause we would have to form a valid command line containing the
- # result of @(...) below (the name of the XML file).
- #
- rule out-xml.generate-action ( args * : xml-file
- : command status start end user system : output ? )
- {
- local contents =
- [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
- local f = @($(xml-file):E=$(contents)) ;
- }
-
- # Nothing to do here; the *real* actions happen in
- # out-xml.generate-action.
- actions quietly out-xml.generate { }
-
- # Define the out-xml file target, which depends on all the targets so
- # that it runs the collection after the targets have run.
- out-xml $(.out-xml) : $(actual-targets) ;
-
- # Set up a global __ACTION_RULE__ that records all the available
- # statistics about each actual target in a variable "on" the --out-xml
- # target.
- #
- rule out-xml.collect ( xml-file : target : command status start end user
- system : output ? )
- {
- local nl = "
-" ;
- # Open the action with some basic info.
- .contents on $(xml-file) +=
- "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
-
- # If we have an action object we can print out more detailed info.
- local action = [ on $(target) return $(.action) ] ;
- if $(action)
- {
- local action-name = [ $(action).action-name ] ;
- local action-sources = [ $(action).sources ] ;
- local action-props = [ $(action).properties ] ;
-
- # The qualified name of the action which we created the target.
- .contents on $(xml-file) +=
- "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
-
- # The sources that made up the target.
- .contents on $(xml-file) +=
- "$(nl) <sources>" ;
- for local source in $(action-sources)
- {
- local source-actual = [ $(source).actual-name ] ;
- .contents on $(xml-file) +=
- "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
- }
- .contents on $(xml-file) +=
- "$(nl) </sources>" ;
-
- # The properties that define the conditions under which the
- # target was built.
- .contents on $(xml-file) +=
- "$(nl) <properties>" ;
- for local prop in [ $(action-props).raw ]
- {
- local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
- .contents on $(xml-file) +=
- "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
- }
- .contents on $(xml-file) +=
- "$(nl) </properties>" ;
- }
-
- local locate = [ on $(target) return $(LOCATE) ] ;
- locate ?= "" ;
- .contents on $(xml-file) +=
- "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
- "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
- "$(nl) <command><![CDATA[$(command)]]></command>"
- "$(nl) <output><![CDATA[$(output)]]></output>" ;
- .contents on $(xml-file) +=
- "$(nl) </action>" ;
- }
-
- # When no __ACTION_RULE__ is set "on" a target, the search falls back to
- # the global module.
- module
- {
- __ACTION_RULE__ = build-system.out-xml.collect
- [ modules.peek build-system : .out-xml ] ;
- }
-
- IMPORT
- build-system :
- out-xml.collect
- out-xml.generate-action
- : :
- build-system.out-xml.collect
- build-system.out-xml.generate-action
- ;
- }
-
- local j = [ option.get jobs ] ;
- if $(j)
- {
- modules.poke : PARALLELISM : $(j) ;
- }
-
- local k = [ option.get keep-going : true : true ] ;
- if $(k) in "on" "yes" "true"
- {
- modules.poke : KEEP_GOING : 1 ;
- }
- else if $(k) in "off" "no" "false"
- {
- modules.poke : KEEP_GOING : 0 ;
- }
- else
- {
- ECHO "error: Invalid value for the --keep-going option" ;
- EXIT ;
- }
-
- # The 'all' pseudo target is not strictly needed expect in the case when we
- # use it below but people often assume they always have this target
- # available and do not declare it themselves before use which may cause
- # build failures with an error message about not being able to build the
- # 'all' target.
- NOTFILE all ;
-
- # And now that all the actual raw Jam targets and all the dependencies
- # between them have been prepared all that is left is to tell Jam to update
- # those targets.
- if $(explicitly-requested-files)
- {
- # Note that this case can not be joined with the regular one when only
- # exact Boost Build targets are requested as here we do not build those
- # requested targets but only use them to construct the dependency tree
- # needed to build the explicitly requested files.
- UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ;
- }
- else if $(cleanall)
- {
- UPDATE clean-all ;
- }
- else if $(clean)
- {
- common.Clean clean : [ actual-clean-targets ] ;
- UPDATE clean ;
- }
- else
- {
- configure.print-configure-checks-summary ;
-
- if $(.pre-build-hook)
- {
- $(.pre-build-hook) ;
- }
-
- DEPENDS all : $(actual-targets) ;
- if UPDATE_NOW in [ RULENAMES ]
- {
- local ok = [ UPDATE_NOW all $(.out-xml) ] ;
- if $(.post-build-hook)
- {
- $(.post-build-hook) $(ok) ;
- }
- # Prevent automatic update of the 'all' target, now that
- # we have explicitly updated what we wanted.
- UPDATE ;
- }
- else
- {
- UPDATE all $(.out-xml) ;
- }
- }
-}
diff --git a/jam-files/boost-build/build/__init__.py b/jam-files/boost-build/build/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/jam-files/boost-build/build/__init__.py
+++ /dev/null
diff --git a/jam-files/boost-build/build/ac.jam b/jam-files/boost-build/build/ac.jam
deleted file mode 100644
index 6768f358..00000000
--- a/jam-files/boost-build/build/ac.jam
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property-set ;
-import path ;
-import modules ;
-import "class" ;
-import errors ;
-import configure ;
-
-rule find-include-path ( variable : properties : header
- : provided-path ? )
-{
- # FIXME: document which properties affect this function by
- # default.
- local target-os = [ $(properties).get <target-os> ] ;
- properties = [ property-set.create <target-os>$(toolset) ] ;
- if $($(variable)-$(properties))
- {
- return $($(variable)-$(properties)) ;
- }
- else
- {
- provided-path ?= [ modules.peek : $(variable) ] ;
- includes = $(provided-path) ;
- includes += [ $(properties).get <include> ] ;
- if [ $(properties).get <target-os> ] != windows
- {
- # FIXME: use sysroot
- includes += /usr/include ;
- }
-
- local result ;
- while ! $(result) && $(includes)
- {
- local f = [ path.root $(header) $(includes[1]) ] ;
- ECHO "Checking " $(f) ;
- if [ path.exists $(f) ]
- {
- result = $(includes[1]) ;
- }
- else if $(provided-path)
- {
- errors.user-error "Could not find header" $(header)
- : "in the user-specified directory" $(provided-path) ;
- }
- includes = $(includes[2-]) ;
- }
- $(variable)-$(properties) = $(result) ;
- return $(result) ;
- }
-}
-
-rule find-library ( variable : properties : names + : provided-path ? )
-{
- local target-os = [ $(properties).get <target-os> ] ;
- properties = [ property-set.create <target-os>$(toolset) ] ;
- if $($(variable)-$(properties))
- {
- return $($(variable)-$(properties)) ;
- }
- else
- {
- provided-path ?= [ modules.peek : $(variable) ] ;
- paths = $(provided-path) ;
- paths += [ $(properties).get <library-path> ] ;
- if [ $(properties).get <target-os> ] != windows
- {
- paths += /usr/lib /usr/lib32 /usr/lib64 ;
- }
-
- local result ;
- while ! $(result) && $(paths)
- {
- while ! $(result) && $(names)
- {
- local f ;
- if $(target-os) = windows
- {
- f = $(paths[1])/$(names[1]).lib ;
- if [ path.exists $(f) ]
- {
- result = $(f) ;
- }
- }
- else
- {
- # FIXME: check for .a as well, depending on
- # the 'link' feature.
- f = $(paths[1])/lib$(names[1]).so ;
- ECHO "CHECKING $(f) " ;
- if [ path.exists $(f) ]
- {
- result = $(f) ;
- }
- }
- if ! $(result) && $(provided-path)
- {
- errors.user-error "Could not find either of: " $(names)
- : "in the user-specified directory" $(provided-path) ;
-
- }
- names = $(names[2-]) ;
- }
- paths = $(paths[2-]) ;
- }
- $(variable)-$(properties) = $(result) ;
- return $(result) ;
- }
-}
-
-class ac-library : basic-target
-{
- import errors ;
- import indirect ;
- import virtual-target ;
- import ac ;
- import configure ;
-
- rule __init__ ( name : project : * : * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources)
- : $(requirements) ;
-
- reconfigure $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule set-header ( header )
- {
- self.header = $(header) ;
- }
-
- rule set-default-names ( names + )
- {
- self.default-names = $(names) ;
- }
-
- rule reconfigure ( * : * )
- {
- ECHO "XXX" $(1) ;
- if ! $(1)
- {
- # This is 'using xxx ;'. Nothing to configure, really.
- }
- else
- {
- for i in 1 2 3 4 5 6 7 8 9
- {
- # FIXME: this naming is inconsistent with XXX_INCLUDE/XXX_LIBRARY
- if ! ( $($(i)[1]) in root include-path library-path library-name condition )
- {
- errors.user-error "Invalid named parameter" $($(i)[1]) ;
- }
- local name = $($(i)[1]) ;
- local value = $($(i)[2-]) ;
- if $($(name)) && $($(name)) != $(value)
- {
- errors.user-error "Attempt to change value of '$(name)'" ;
- }
- $(name) = $(value) ;
- }
-
- include-path ?= $(root)/include ;
- library-path ?= $(root)/lib ;
- }
- }
-
- rule construct ( name : sources * : property-set )
- {
- # FIXME: log results.
- local libnames = $(library-name) ;
- if ! $(libnames) && ! $(include-path) && ! $(library-path)
- {
- libnames = [ modules.peek : $(name:U)_NAME ] ;
- # Backward compatibility only.
- libnames ?= [ modules.peek : $(name:U)_BINARY ] ;
- }
- libnames ?= $(self.default-names) ;
-
- local includes = [
- ac.find-include-path $(name:U)_INCLUDE : $(property-set) : $(self.header) : $(include-path) ] ;
- local library = [ ac.find-library $(name:U)_LIBRARY : $(property-set) : $(libnames) : $(library-path) ] ;
- if $(includes) && $(library)
- {
- library = [ virtual-target.from-file $(library) : . : $(self.project) ] ;
- configure.log-library-search-result $(name) : "found" ;
- return [ property-set.create <include>$(includes) <source>$(library) ] ;
- }
- else
- {
- configure.log-library-search-result $(name) : "no found" ;
- }
- }
-}
-
diff --git a/jam-files/boost-build/build/alias.jam b/jam-files/boost-build/build/alias.jam
deleted file mode 100644
index 48019cb9..00000000
--- a/jam-files/boost-build/build/alias.jam
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'alias' rule and the associated target class.
-#
-# Alias is just a main target which returns its source targets without any
-# processing. For example:
-#
-# alias bin : hello test_hello ;
-# alias lib : helpers xml_parser ;
-#
-# Another important use of 'alias' is to conveniently group source files:
-#
-# alias platform-src : win.cpp : <os>NT ;
-# alias platform-src : linux.cpp : <os>LINUX ;
-# exe main : main.cpp platform-src ;
-#
-# Lastly, it is possible to create a local alias for some target, with different
-# properties:
-#
-# alias big_lib : : @/external_project/big_lib/<link>static ;
-#
-
-import "class" : new ;
-import project ;
-import property-set ;
-import targets ;
-
-
-class alias-target-class : basic-target
-{
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- return [ property-set.empty ] $(source-targets) ;
- }
-
- rule compute-usage-requirements ( subvariant )
- {
- local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
- return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
- }
-}
-
-
-# Declares the 'alias' target. It will process its sources virtual-targets by
-# returning them unaltered as its own constructed virtual-targets.
-#
-rule alias ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new alias-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project)
- ]
- : [ targets.main-target-usage-requirements $(usage-requirements) :
- $(project) ]
- ] ;
-}
-
-
-IMPORT $(__name__) : alias : : alias ;
diff --git a/jam-files/boost-build/build/alias.py b/jam-files/boost-build/build/alias.py
deleted file mode 100644
index 575e5360..00000000
--- a/jam-files/boost-build/build/alias.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Status: ported (danielw)
-# Base revision: 56043
-
-# This module defines the 'alias' rule and associated class.
-#
-# Alias is just a main target which returns its source targets without any
-# processing. For example::
-#
-# alias bin : hello test_hello ;
-# alias lib : helpers xml_parser ;
-#
-# Another important use of 'alias' is to conveniently group source files::
-#
-# alias platform-src : win.cpp : <os>NT ;
-# alias platform-src : linux.cpp : <os>LINUX ;
-# exe main : main.cpp platform-src ;
-#
-# Lastly, it's possible to create local alias for some target, with different
-# properties::
-#
-# alias big_lib : : @/external_project/big_lib/<link>static ;
-#
-
-import targets
-import property_set
-from b2.manager import get_manager
-
-from b2.util import metatarget
-
-class AliasTarget(targets.BasicTarget):
-
- def __init__(self, *args):
- targets.BasicTarget.__init__(self, *args)
-
- def construct(self, name, source_targets, properties):
- return [property_set.empty(), source_targets]
-
- def compute_usage_requirements(self, subvariant):
- base = targets.BasicTarget.compute_usage_requirements(self, subvariant)
- # Add source's usage requirement. If we don't do this, "alias" does not
- # look like 100% alias.
- return base.add(subvariant.sources_usage_requirements())
-
-@metatarget
-def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
-
- project = get_manager().projects().current()
- targets = get_manager().targets()
-
- targets.main_target_alternative(AliasTarget(
- name, project,
- targets.main_target_sources(sources, name, no_renaming=True),
- targets.main_target_requirements(requirements or [], project),
- targets.main_target_default_build(default_build, project),
- targets.main_target_usage_requirements(usage_requirements or [], project)))
-
-# Declares the 'alias' target. It will build sources, and return them unaltered.
-get_manager().projects().add_rule("alias", alias)
-
diff --git a/jam-files/boost-build/build/build-request.jam b/jam-files/boost-build/build/build-request.jam
deleted file mode 100644
index 8a1f7b0e..00000000
--- a/jam-files/boost-build/build/build-request.jam
+++ /dev/null
@@ -1,322 +0,0 @@
-# Copyright 2002 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import sequence ;
-import set ;
-import regex ;
-import feature ;
-import property ;
-import container ;
-import string ;
-
-
-# Transform property-set by applying f to each component property.
-#
-local rule apply-to-property-set ( f property-set )
-{
- local properties = [ feature.split $(property-set) ] ;
- return [ string.join [ $(f) $(properties) ] : / ] ;
-}
-
-
-# Expand the given build request by combining all property-sets which do not
-# specify conflicting non-free features. Expects all the project files to
-# already be loaded.
-#
-rule expand-no-defaults ( property-sets * )
-{
- # First make all features and subfeatures explicit.
- local expanded-property-sets = [ sequence.transform apply-to-property-set
- feature.expand-subfeatures : $(property-sets) ] ;
-
- # Now combine all of the expanded property-sets
- local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
-
- return $(product) ;
-}
-
-
-# Implementation of x-product, below. Expects all the project files to already
-# be loaded.
-#
-local rule x-product-aux ( property-sets + )
-{
- local result ;
- local p = [ feature.split $(property-sets[1]) ] ;
- local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
- local seen ;
- # No conflict with things used at a higher level?
- if ! [ set.intersection $(f) : $(x-product-used) ]
- {
- local x-product-seen ;
- {
- # Do not mix in any conflicting features.
- local x-product-used = $(x-product-used) $(f) ;
-
- if $(property-sets[2])
- {
- local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
- result = $(property-sets[1])/$(rest) ;
- }
-
- result ?= $(property-sets[1]) ;
- }
-
- # If we did not encounter a conflicting feature lower down, do not
- # recurse again.
- if ! [ set.intersection $(f) : $(x-product-seen) ]
- {
- property-sets = ;
- }
-
- seen = $(x-product-seen) ;
- }
-
- if $(property-sets[2])
- {
- result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
- }
-
- # Note that we have seen these features so that higher levels will recurse
- # again without them set.
- x-product-seen += $(f) $(seen) ;
- return $(result) ;
-}
-
-
-# Return the cross-product of all elements of property-sets, less any that would
-# contain conflicting values for single-valued features. Expects all the project
-# files to already be loaded.
-#
-local rule x-product ( property-sets * )
-{
- if $(property-sets).non-empty
- {
- # Prepare some "scoped globals" that can be used by the implementation
- # function, x-product-aux.
- local x-product-seen x-product-used ;
- return [ x-product-aux $(property-sets) : $(feature-space) ] ;
- }
- # Otherwise return empty.
-}
-
-
-# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
-# an implicit value. Expects all the project files to already be loaded.
-#
-local rule looks-like-implicit-value ( v )
-{
- if [ feature.is-implicit-value $(v) ]
- {
- return true ;
- }
- else
- {
- local split = [ regex.split $(v) - ] ;
- if [ feature.is-implicit-value $(split[1]) ]
- {
- return true ;
- }
- }
-}
-
-
-# Takes the command line tokens (such as taken from the ARGV rule) and
-# constructs a build request from them. Returns a vector of two vectors (where
-# "vector" means container.jam's "vector"). First is the set of targets
-# specified in the command line, and second is the set of requested build
-# properties. Expects all the project files to already be loaded.
-#
-rule from-command-line ( command-line * )
-{
- local targets ;
- local properties ;
-
- command-line = $(command-line[2-]) ;
- local skip-next = ;
- for local e in $(command-line)
- {
- if $(skip-next)
- {
- skip-next = ;
- }
- else if ! [ MATCH "^(-).*" : $(e) ]
- {
- # Build request spec either has "=" in it or completely consists of
- # implicit feature values.
- local fs = feature-space ;
- if [ MATCH "(.*=.*)" : $(e) ]
- || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
- {
- properties += [ convert-command-line-element $(e) :
- $(feature-space) ] ;
- }
- else
- {
- targets += $(e) ;
- }
- }
- else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
- {
- skip-next = true ;
- }
- }
- return [ new vector
- [ new vector $(targets) ]
- [ new vector $(properties) ] ] ;
-}
-
-
-# Converts one element of command line build request specification into internal
-# form. Expects all the project files to already be loaded.
-#
-local rule convert-command-line-element ( e )
-{
- local result ;
- local parts = [ regex.split $(e) "/" ] ;
- while $(parts)
- {
- local p = $(parts[1]) ;
- local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
- local lresult ;
- local feature ;
- local values ;
- if $(m)
- {
- feature = $(m[1]) ;
- values = [ regex.split $(m[2]) "," ] ;
- lresult = <$(feature)>$(values) ;
- }
- else
- {
- lresult = [ regex.split $(p) "," ] ;
- }
-
- if $(feature) && free in [ feature.attributes $(feature) ]
- {
- # If we have free feature, then the value is everything
- # until the end of the command line token. Slashes in
- # the following string are not taked to mean separation
- # of properties. Commas are also not interpreted specially.
- values = $(values:J=,) ;
- values = $(values) $(parts[2-]) ;
- values = $(values:J=/) ;
- lresult = <$(feature)>$(values) ;
- parts = ;
- }
-
- if ! [ MATCH (.*-.*) : $(p) ]
- {
- # property.validate cannot handle subfeatures, so we avoid the check
- # here.
- for local p in $(lresult)
- {
- property.validate $(p) : $(feature-space) ;
- }
- }
-
- if ! $(result)
- {
- result = $(lresult) ;
- }
- else
- {
- result = $(result)/$(lresult) ;
- }
-
- parts = $(parts[2-]) ;
- }
-
- return $(result) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import feature ;
-
- feature.prepare-test build-request-test-temp ;
-
- import build-request ;
- import build-request : expand-no-defaults : build-request.expand-no-defaults ;
- import errors : try catch ;
- import feature : feature subfeature ;
-
- feature toolset : gcc msvc borland : implicit ;
- subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
- 3.0 3.0.1 3.0.2 : optional ;
-
- feature variant : debug release : implicit composite ;
- feature inlining : on off ;
- feature "include" : : free ;
-
- feature stdlib : native stlport : implicit ;
-
- feature runtime-link : dynamic static : symmetric ;
-
- # Empty build requests should expand to empty.
- assert.result
- : build-request.expand-no-defaults ;
-
- assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
- <toolset>msvc/<stdlib>stlport/<variant>debug
- <toolset>msvc/<variant>debug
- : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
-
- assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
- <toolset>msvc/<variant>debug
- <variant>debug/<toolset>msvc/<stdlib>stlport
- : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
-
- assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
- : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
-
- assert.result
- <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
- <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
- <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
- : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
-
- local r ;
-
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-
- try ;
- {
- build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
- }
- catch \"static\" is not a value of an implicit feature ;
-
- r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
- assert.equal [ $(r).get-at 1 ] : target ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
-
- r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
- gcc/<runtime-link>static ;
-
- r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
- borland/<runtime-link>static ;
-
- r = [ build-request.from-command-line bjam gcc-3.0 ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
-
- feature.finish-test build-request-test-temp ;
-}
diff --git a/jam-files/boost-build/build/build_request.py b/jam-files/boost-build/build/build_request.py
deleted file mode 100644
index cc9f2400..00000000
--- a/jam-files/boost-build/build/build_request.py
+++ /dev/null
@@ -1,216 +0,0 @@
-# Status: being ported by Vladimir Prus
-# TODO: need to re-compare with mainline of .jam
-# Base revision: 40480
-#
-# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-import b2.build.feature
-feature = b2.build.feature
-
-from b2.util.utility import *
-import b2.build.property_set as property_set
-
-def expand_no_defaults (property_sets):
- """ Expand the given build request by combining all property_sets which don't
- specify conflicting non-free features.
- """
- # First make all features and subfeatures explicit
- expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
-
- # Now combine all of the expanded property_sets
- product = __x_product (expanded_property_sets)
-
- return [property_set.create(p) for p in product]
-
-
-def __x_product (property_sets):
- """ Return the cross-product of all elements of property_sets, less any
- that would contain conflicting values for single-valued features.
- """
- x_product_seen = set()
- return __x_product_aux (property_sets, x_product_seen)[0]
-
-def __x_product_aux (property_sets, seen_features):
- """Returns non-conflicting combinations of property sets.
-
- property_sets is a list of PropertySet instances. seen_features is a set of Property
- instances.
-
- Returns a tuple of:
- - list of lists of Property instances, such that within each list, no two Property instance
- have the same feature, and no Property is for feature in seen_features.
- - set of features we saw in property_sets
- """
- if not property_sets:
- return ([], set())
-
- properties = property_sets[0].all()
-
- these_features = set()
- for p in property_sets[0].non_free():
- these_features.add(p.feature())
-
- # Note: the algorithm as implemented here, as in original Jam code, appears to
- # detect conflicts based on features, not properties. For example, if command
- # line build request say:
- #
- # <a>1/<b>1 c<1>/<b>1
- #
- # It will decide that those two property sets conflict, because they both specify
- # a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
- # different property sets. This is a topic for future fixing, maybe.
- if these_features & seen_features:
-
- (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
- return (inner_result, inner_seen | these_features)
-
- else:
-
- result = []
- (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
- if inner_result:
- for inner in inner_result:
- result.append(properties + inner)
- else:
- result.append(properties)
-
- if inner_seen & these_features:
- # Some of elements in property_sets[1:] conflict with elements of property_sets[0],
- # Try again, this time omitting elements of property_sets[0]
- (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
- result.extend(inner_result2)
-
- return (result, inner_seen | these_features)
-
-
-
-def looks_like_implicit_value(v):
- """Returns true if 'v' is either implicit value, or
- the part before the first '-' symbol is implicit value."""
- if feature.is_implicit_value(v):
- return 1
- else:
- split = v.split("-")
- if feature.is_implicit_value(split[0]):
- return 1
-
- return 0
-
-def from_command_line(command_line):
- """Takes the command line tokens (such as taken from ARGV rule)
- and constructs build request from it. Returns a list of two
- lists. First is the set of targets specified in the command line,
- and second is the set of requested build properties."""
-
- targets = []
- properties = []
-
- for e in command_line:
- if e[0] != "-":
- # Build request spec either has "=" in it, or completely
- # consists of implicit feature values.
- if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
- properties += convert_command_line_element(e)
- else:
- targets.append(e)
-
- return [targets, properties]
-
-# Converts one element of command line build request specification into
-# internal form.
-def convert_command_line_element(e):
-
- result = None
- parts = e.split("/")
- for p in parts:
- m = p.split("=")
- if len(m) > 1:
- feature = m[0]
- values = m[1].split(",")
- lresult = [("<%s>%s" % (feature, v)) for v in values]
- else:
- lresult = p.split(",")
-
- if p.find('-') == -1:
- # FIXME: first port property.validate
- # property.validate cannot handle subfeatures,
- # so we avoid the check here.
- #for p in lresult:
- # property.validate(p)
- pass
-
- if not result:
- result = lresult
- else:
- result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
-
- return [property_set.create(b2.build.feature.split(r)) for r in result]
-
-###
-### rule __test__ ( )
-### {
-### import assert feature ;
-###
-### feature.prepare-test build-request-test-temp ;
-###
-### import build-request ;
-### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
-### import errors : try catch ;
-### import feature : feature subfeature ;
-###
-### feature toolset : gcc msvc borland : implicit ;
-### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
-### 3.0 3.0.1 3.0.2 : optional ;
-###
-### feature variant : debug release : implicit composite ;
-### feature inlining : on off ;
-### feature "include" : : free ;
-###
-### feature stdlib : native stlport : implicit ;
-###
-### feature runtime-link : dynamic static : symmetric ;
-###
-###
-### local r ;
-###
-### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-###
-### try ;
-### {
-###
-### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
-### }
-### catch \"static\" is not a value of an implicit feature ;
-###
-###
-### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
-### assert.equal [ $(r).get-at 1 ] : target ;
-### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-###
-### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
-###
-### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
-### gcc/<runtime-link>static ;
-###
-### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
-### borland/<runtime-link>static ;
-###
-### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
-###
-### feature.finish-test build-request-test-temp ;
-### }
-###
-###
diff --git a/jam-files/boost-build/build/configure.jam b/jam-files/boost-build/build/configure.jam
deleted file mode 100644
index 14c1328a..00000000
--- a/jam-files/boost-build/build/configure.jam
+++ /dev/null
@@ -1,237 +0,0 @@
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines function to help with two main tasks:
-#
-# - Discovering build-time configuration for the purposes of adjusting
-# build process.
-# - Reporting what is built, and how it is configured.
-
-import targets ;
-import errors ;
-import targets ;
-import sequence ;
-import property ;
-import property-set ;
-import "class" : new ;
-import common ;
-import path ;
-
-rule log-summary ( )
-{
-
-}
-
-.width = 30 ;
-
-rule set-width ( width )
-{
- .width = $(width) ;
-}
-
-# Declare that the components specified by the parameter exist.
-rule register-components ( components * )
-{
- .components += $(components) ;
-}
-
-# Declare that the components specified by the parameters will
-# be build.
-rule components-building ( components * )
-{
- .built-components += $(components) ;
-}
-
-# Report something about component configuration that the
-# user should better know.
-rule log-component-configuration ( component : message )
-{
- # FIXME: implement per-property-set logs
- .component-logs.$(component) += $(message) ;
-}
-
-
-
-rule log-check-result ( result )
-{
- if ! $(.announced-checks)
- {
- ECHO "Performing configuration checks\n" ;
- .announced-checks = 1 ;
- }
-
- ECHO $(result) ;
- #.check-results += $(result) ;
-}
-
-rule log-library-search-result ( library : result )
-{
- local x = [ PAD " - $(library) : $(result)" : $(.width) ] ;
- log-check-result "$(x)" ;
-}
-
-rule print-component-configuration ( )
-{
- local c = [ sequence.unique $(.components) ] ;
-
- ECHO "\nComponent configuration:\n" ;
- for c in $(.components)
- {
- local s ;
- if $(c) in $(.built-components)
- {
- s = "building" ;
- }
- else
- {
- s = "not building" ;
- }
- ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
- for local m in $(.component-logs.$(c))
- {
- ECHO " -" $(m) ;
- }
- }
- ECHO ;
-}
-
-rule print-configure-checks-summary ( )
-{
- # FIXME: the problem with that approach is tha
- # the user sees checks summary when all checks are
- # done, and has no progress reporting while the
- # checks are being executed.
- if $(.check-results)
- {
- ECHO "Configuration checks summary\n" ;
-
- for local r in $(.check-results)
- {
- ECHO $(r) ;
- }
- ECHO ;
- }
-}
-
-# Attempt to build a metatarget named by 'metatarget-reference'
-# in context of 'project' with properties 'ps'.
-# Returns non-empty value if build is OK.
-rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
-{
- local result ;
-
- if ! $(retry) && ! $(.$(what)-tested.$(ps))
- {
- .$(what)-tested.$(ps) = true ;
-
- local targets = [ targets.generate-from-reference
- $(metatarget-reference) : $(project) : $(ps) ] ;
-
- local jam-targets ;
- for local t in $(targets[2-])
- {
- jam-targets += [ $(t).actualize ] ;
- }
-
- if ! UPDATE_NOW in [ RULENAMES ]
- {
- # Cannot determine. Assume existance.
- }
- else
- {
- local x = [ PAD " - $(what)" : $(.width) ] ;
- if [ UPDATE_NOW $(jam-targets) :
- $(.log-fd) : ignore-minus-n : ignore-minus-q ]
- {
- .$(what)-supported.$(ps) = yes ;
- result = true ;
- log-check-result "$(x) : yes" ;
- }
- else
- {
- log-check-result "$(x) : no" ;
- }
- }
- return $(result) ;
- }
- else
- {
- return $(.$(what)-supported.$(ps)) ;
- }
-}
-
-rule builds ( metatarget-reference : properties * : what ? : retry ? )
-{
- what ?= "$(metatarget-reference) builds" ;
-
- # FIXME: this should not be hardcoded. Other checks might
- # want to consider different set of features as relevant.
- local toolset = [ property.select <toolset> : $(properties) ] ;
- local toolset-version-property = "<toolset-$(toolset:G=):version>" ;
- local relevant = [ property.select <target-os> <toolset> $(toolset-version-property)
- <address-model> <architecture>
- : $(properties) ] ;
- local ps = [ property-set.create $(relevant) ] ;
- local t = [ targets.current ] ;
- local p = [ $(t).project ] ;
-
- return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : $(retry) ] ;
-}
-
-
-# Called by Boost.Build startup code to specify name of a file
-# that will receive results of configure checks. This
-# should never be called by users.
-rule set-log-file ( log-file )
-{
- path.makedirs [ path.parent $(log-file) ] ;
-
- .log-fd = [ FILE_OPEN $(log-file) : "w" ] ;
-}
-
-# Frontend rules
-
-class check-target-builds-worker
-{
- import configure ;
- import property-set ;
- import targets ;
- import property ;
-
- rule __init__ ( target message ? : true-properties * : false-properties * )
- {
- self.target = $(target) ;
- self.message = $(message) ;
- self.true-properties = $(true-properties) ;
- self.false-properties = $(false-properties) ;
- }
-
- rule check ( properties * )
- {
- local choosen ;
- if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
- {
- choosen = $(self.true-properties) ;
- }
- else
- {
- choosen = $(self.false-properties) ;
- }
- return [ property.evaluate-conditionals-in-context $(choosen) : $(properties) ] ;
- }
-}
-
-
-rule check-target-builds ( target message ? : true-properties * : false-properties * )
-{
- local instance = [ new check-target-builds-worker $(target) $(message) : $(true-properties)
- : $(false-properties) ] ;
- return <conditional>@$(instance).check ;
-}
-
-IMPORT $(__name__) : check-target-builds : : check-target-builds ;
-
-
diff --git a/jam-files/boost-build/build/configure.py b/jam-files/boost-build/build/configure.py
deleted file mode 100644
index 0426832c..00000000
--- a/jam-files/boost-build/build/configure.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Status: ported.
-# Base revison: 64488
-#
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines function to help with two main tasks:
-#
-# - Discovering build-time configuration for the purposes of adjusting
-# build process.
-# - Reporting what is built, and how it is configured.
-
-import b2.build.property as property
-import b2.build.property_set as property_set
-
-import b2.build.targets
-
-from b2.manager import get_manager
-from b2.util.sequence import unique
-from b2.util import bjam_signature, value_to_jam
-
-import bjam
-import os
-
-__width = 30
-
-def set_width(width):
- global __width
- __width = 30
-
-__components = []
-__built_components = []
-__component_logs = {}
-__announced_checks = False
-
-__log_file = None
-__log_fd = -1
-
-def register_components(components):
- """Declare that the components specified by the parameter exist."""
- __components.extend(components)
-
-def components_building(components):
- """Declare that the components specified by the parameters will be build."""
- __built_components.extend(components)
-
-def log_component_configuration(component, message):
- """Report something about component configuration that the user should better know."""
- __component_logs.setdefault(component, []).append(message)
-
-def log_check_result(result):
- global __announced_checks
- if not __announced_checks:
- print "Performing configuration checks"
- __announced_checks = True
-
- print result
-
-def log_library_search_result(library, result):
- log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width))
-
-
-def print_component_configuration():
-
- print "\nComponent configuration:"
- for c in __components:
- if c in __built_components:
- s = "building"
- else:
- s = "not building"
- message = " - %s)" % c
- message = message.rjust(__width)
- message += " : " + s
- for m in __component_logs.get(c, []):
- print " -" + m
- print ""
-
-__builds_cache = {}
-
-def builds(metatarget_reference, project, ps, what):
- # Attempt to build a metatarget named by 'metatarget-reference'
- # in context of 'project' with properties 'ps'.
- # Returns non-empty value if build is OK.
-
- result = []
-
- existing = __builds_cache.get((what, ps), None)
- if existing is None:
-
- result = False
- __builds_cache[(what, ps)] = False
-
- targets = b2.build.targets.generate_from_reference(
- metatarget_reference, project, ps).targets()
- jam_targets = []
- for t in targets:
- jam_targets.append(t.actualize())
-
- x = (" - %s" % what).rjust(__width)
- if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
- __builds_cache[(what, ps)] = True
- result = True
- log_check_result("%s: yes" % x)
- else:
- log_check_result("%s: no" % x)
-
- return result
- else:
- return existing
-
-def set_log_file(log_file_name):
- # Called by Boost.Build startup code to specify name of a file
- # that will receive results of configure checks. This
- # should never be called by users.
- global __log_file, __log_fd
- dirname = os.path.dirname(log_file_name)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- # Make sure to keep the file around, so that it's not
- # garbage-collected and closed
- __log_file = open(log_file_name, "w")
- __log_fd = __log_file.fileno()
-
-# Frontend rules
-
-class CheckTargetBuildsWorker:
-
- def __init__(self, target, true_properties, false_properties):
- self.target = target
- self.true_properties = property.create_from_strings(true_properties, True)
- self.false_properties = property.create_from_strings(false_properties, True)
-
- def check(self, ps):
-
- # FIXME: this should not be hardcoded. Other checks might
- # want to consider different set of features as relevant.
- toolset = ps.get('toolset')[0]
- toolset_version_property = "<toolset-" + toolset + ":version>" ;
- relevant = ps.get_properties('target-os') + \
- ps.get_properties("toolset") + \
- ps.get_properties(toolset_version_property) + \
- ps.get_properties("address-model") + \
- ps.get_properties("architecture")
- rps = property_set.create(relevant)
- t = get_manager().targets().current()
- p = t.project()
- if builds(self.target, p, rps, "%s builds" % self.target):
- choosen = self.true_properties
- else:
- choosen = self.false_properties
- return property.evaluate_conditionals_in_context(choosen, ps)
-
-@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"]))
-def check_target_builds(target, true_properties, false_properties):
- worker = CheckTargetBuildsWorker(target, true_properties, false_properties)
- value = value_to_jam(worker.check)
- return "<conditional>" + value
-
-get_manager().projects().add_rule("check-target-builds", check_target_builds)
-
-
diff --git a/jam-files/boost-build/build/engine.py b/jam-files/boost-build/build/engine.py
deleted file mode 100644
index be9736e0..00000000
--- a/jam-files/boost-build/build/engine.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Copyright Pedro Ferreira 2005.
-# Copyright Vladimir Prus 2007.
-# Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-bjam_interface = __import__('bjam')
-
-import operator
-import re
-
-import b2.build.property_set as property_set
-import b2.util
-
-class BjamAction:
- """Class representing bjam action defined from Python."""
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
- if self.function:
- self.function(targets, sources, property_set)
-
- # Bjam actions defined from Python have only the command
- # to execute, and no associated jam procedural code. So
- # passing 'property_set' to it is not necessary.
- bjam_interface.call("set-update-action", self.action_name,
- targets, sources, [])
-
-class BjamNativeAction:
- """Class representing bjam action defined by Jam code.
-
- We still allow to associate a Python callable that will
- be called when this action is installed on any target.
- """
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
- if self.function:
- self.function(targets, sources, property_set)
-
- p = []
- if property_set:
- p = property_set.raw()
-
- b2.util.set_jam_action(self.action_name, targets, sources, p)
-
-action_modifiers = {"updated": 0x01,
- "together": 0x02,
- "ignore": 0x04,
- "quietly": 0x08,
- "piecemeal": 0x10,
- "existing": 0x20}
-
-class Engine:
- """ The abstract interface to a build engine.
-
- For now, the naming of targets, and special handling of some
- target variables like SEARCH and LOCATE make this class coupled
- to bjam engine.
- """
- def __init__ (self):
- self.actions = {}
-
- def add_dependency (self, targets, sources):
- """Adds a dependency from 'targets' to 'sources'
-
- Both 'targets' and 'sources' can be either list
- of target names, or a single target name.
- """
- if isinstance (targets, str):
- targets = [targets]
- if isinstance (sources, str):
- sources = [sources]
-
- for target in targets:
- for source in sources:
- self.do_add_dependency (target, source)
-
- def set_target_variable (self, targets, variable, value, append=0):
- """ Sets a target variable.
-
- The 'variable' will be available to bjam when it decides
- where to generate targets, and will also be available to
- updating rule for that 'taret'.
- """
- if isinstance (targets, str):
- targets = [targets]
-
- for target in targets:
- self.do_set_target_variable (target, variable, value, append)
-
- def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
- """ Binds a target to the corresponding update action.
- If target needs to be updated, the action registered
- with action_name will be used.
- The 'action_name' must be previously registered by
- either 'register_action' or 'register_bjam_action'
- method.
- """
- assert(isinstance(properties, property_set.PropertySet))
- if isinstance (targets, str):
- targets = [targets]
- self.do_set_update_action (action_name, targets, sources, properties)
-
- def register_action (self, action_name, command, bound_list = [], flags = [],
- function = None):
- """Creates a new build engine action.
-
- Creates on bjam side an action named 'action_name', with
- 'command' as the command to be executed, 'bound_variables'
- naming the list of variables bound when the command is executed
- and specified flag.
- If 'function' is not None, it should be a callable taking three
- parameters:
- - targets
- - sources
- - instance of the property_set class
- This function will be called by set_update_action, and can
- set additional target variables.
- """
- if self.actions.has_key(action_name):
- raise "Bjam action %s is already defined" % action_name
-
- assert(isinstance(flags, list))
-
- bjam_flags = reduce(operator.or_,
- (action_modifiers[flag] for flag in flags), 0)
-
- bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
-
- self.actions[action_name] = BjamAction(action_name, function)
-
- def register_bjam_action (self, action_name, function=None):
- """Informs self that 'action_name' is declared in bjam.
-
- From this point, 'action_name' is a valid argument to the
- set_update_action method. The action_name should be callable
- in the global module of bjam.
- """
-
- # We allow duplicate calls to this rule for the same
- # action name. This way, jamfile rules that take action names
- # can just register them without specially checking if
- # action is already registered.
- if not self.actions.has_key(action_name):
- self.actions[action_name] = BjamNativeAction(action_name, function)
-
- # Overridables
-
-
- def do_set_update_action (self, action_name, targets, sources, property_set):
- action = self.actions.get(action_name)
- if not action:
- raise Exception("No action %s was registered" % action_name)
- action(targets, sources, property_set)
-
- def do_set_target_variable (self, target, variable, value, append):
- if append:
- bjam_interface.call("set-target-variable", target, variable, value, "true")
- else:
- bjam_interface.call("set-target-variable", target, variable, value)
-
- def do_add_dependency (self, target, source):
- bjam_interface.call("DEPENDS", target, source)
-
-
diff --git a/jam-files/boost-build/build/errors.py b/jam-files/boost-build/build/errors.py
deleted file mode 100644
index d9dceefe..00000000
--- a/jam-files/boost-build/build/errors.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Status: being written afresh by Vladimir Prus
-
-# Copyright 2007 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This file is supposed to implement error reporting for Boost.Build.
-# Experience with jam version has shown that printing full backtrace
-# on each error is buffling. Further, for errors printed after parsing --
-# during target building, the stacktrace does not even mention what
-# target is being built.
-
-# This module implements explicit contexts -- where other code can
-# communicate which projects/targets are being built, and error
-# messages will show those contexts. For programming errors,
-# Python assertions are to be used.
-
-import bjam
-import traceback
-import sys
-
-def format(message, prefix=""):
- parts = str(message).split("\n")
- return "\n".join(prefix+p for p in parts)
-
-
-class Context:
-
- def __init__(self, message, nested=None):
- self.message_ = message
- self.nested_ = nested
-
- def report(self, indent=""):
- print indent + " -", self.message_
- if self.nested_:
- print indent + " declared at:"
- for n in self.nested_:
- n.report(indent + " ")
-
-class JamfileContext:
-
- def __init__(self):
- raw = bjam.backtrace()
- self.raw_ = raw
-
- def report(self, indent=""):
- for r in self.raw_:
- print indent + " - %s:%s" % (r[0], r[1])
-
-class ExceptionWithUserContext(Exception):
-
- def __init__(self, message, context,
- original_exception=None, original_tb=None, stack=None):
- Exception.__init__(self, message)
- self.context_ = context
- self.original_exception_ = original_exception
- self.original_tb_ = original_tb
- self.stack_ = stack
-
- def report(self):
- print "error:", self.args[0]
- if self.original_exception_:
- print format(str(self.original_exception_), " ")
- print
- print " error context (most recent first):"
- for c in self.context_[::-1]:
- c.report()
- print
- if "--stacktrace" in bjam.variable("ARGV"):
- if self.original_tb_:
- traceback.print_tb(self.original_tb_)
- elif self.stack_:
- for l in traceback.format_list(self.stack_):
- print l,
- else:
- print " use the '--stacktrace' option to get Python stacktrace"
- print
-
-def user_error_checkpoint(callable):
- def wrapper(self, *args):
- errors = self.manager().errors()
- try:
- return callable(self, *args)
- except ExceptionWithUserContext, e:
- raise
- except Exception, e:
- errors.handle_stray_exception(e)
- finally:
- errors.pop_user_context()
-
- return wrapper
-
-class Errors:
-
- def __init__(self):
- self.contexts_ = []
- self._count = 0
-
- def count(self):
- return self._count
-
- def push_user_context(self, message, nested=None):
- self.contexts_.append(Context(message, nested))
-
- def pop_user_context(self):
- del self.contexts_[-1]
-
- def push_jamfile_context(self):
- self.contexts_.append(JamfileContext())
-
- def pop_jamfile_context(self):
- del self.contexts_[-1]
-
- def capture_user_context(self):
- return self.contexts_[:]
-
- def handle_stray_exception(self, e):
- raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
- e, sys.exc_info()[2])
- def __call__(self, message):
- self._count = self._count + 1
- raise ExceptionWithUserContext(message, self.contexts_[:],
- stack=traceback.extract_stack())
-
-
-
-
diff --git a/jam-files/boost-build/build/feature.jam b/jam-files/boost-build/build/feature.jam
deleted file mode 100644
index 6f54adef..00000000
--- a/jam-files/boost-build/build/feature.jam
+++ /dev/null
@@ -1,1335 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import assert : * ;
-import "class" : * ;
-import errors : lol->list ;
-import indirect ;
-import modules ;
-import regex ;
-import sequence ;
-import set ;
-import utility ;
-
-
-local rule setup ( )
-{
- .all-attributes =
- implicit
- composite
- optional
- symmetric
- free
- incidental
- path
- dependency
- propagated
- link-incompatible
- subfeature
- order-sensitive
- ;
-
- .all-features = ;
- .all-subfeatures = ;
- .all-top-features = ; # non-subfeatures
- .all-implicit-values = ;
-}
-setup ;
-
-
-# Prepare a fresh space to test in by moving all global variable settings into
-# the given temporary module and erasing them here.
-#
-rule prepare-test ( temp-module )
-{
- DELETE_MODULE $(temp-module) ;
-
- # Transfer globals to temp-module.
- for local v in [ VARNAMES feature ]
- {
- if [ MATCH (\\.) : $(v) ]
- {
- modules.poke $(temp-module) : $(v) : $($(v)) ;
- $(v) = ;
- }
- }
- setup ;
-}
-
-
-# Clear out all global variables and recover all variables from the given
-# temporary module.
-#
-rule finish-test ( temp-module )
-{
- # Clear globals.
- for local v in [ VARNAMES feature ]
- {
- if [ MATCH (\\.) : $(v) ]
- {
- $(v) = ;
- }
- }
-
- for local v in [ VARNAMES $(temp-module) ]
- {
- $(v) = [ modules.peek $(temp-module) : $(v) ] ;
- }
- DELETE_MODULE $(temp-module) ;
-}
-
-
-# Transform features by bracketing any elements which are not already bracketed
-# by "<>".
-#
-local rule grist ( features * )
-{
- local empty = "" ;
- return $(empty:G=$(features)) ;
-}
-
-
-# Declare a new feature with the given name, values, and attributes.
-#
-rule feature (
- name # Feature name.
- : values * # Allowable values - may be extended later using feature.extend.
- : attributes * # Feature attributes (e.g. implicit, free, propagated...).
-)
-{
- name = [ grist $(name) ] ;
-
- local error ;
-
- # Check for any unknown attributes.
- if ! ( $(attributes) in $(.all-attributes) )
- {
- error = unknown attributes:
- [ set.difference $(attributes) : $(.all-attributes) ] ;
- }
- else if $(name) in $(.all-features)
- {
- error = feature already defined: ;
- }
- else if implicit in $(attributes) && free in $(attributes)
- {
- error = free features cannot also be implicit ;
- }
- else if free in $(attributes) && propagated in $(attributes)
- {
- error = free features cannot be propagated ;
- }
- else
- {
- local m = [ MATCH (.*=.*) : $(values) ] ;
- if $(m[1])
- {
- error = "feature value may not contain '='" ;
- }
- }
-
- if $(error)
- {
- errors.error $(error)
- : "in" feature declaration:
- : feature [ lol->list $(1) : $(2) : $(3) ] ;
- }
-
- $(name).values ?= ;
- $(name).attributes = $(attributes) ;
- $(name).subfeatures ?= ;
- $(attributes).features += $(name) ;
-
- .all-features += $(name) ;
- if subfeature in $(attributes)
- {
- .all-subfeatures += $(name) ;
- }
- else
- {
- .all-top-features += $(name) ;
- }
- extend $(name) : $(values) ;
-}
-
-
-# Sets the default value of the given feature, overriding any previous default.
-#
-rule set-default ( feature : value )
-{
- local f = [ grist $(feature) ] ;
- local a = $($(f).attributes) ;
- local bad-attribute = ;
- if free in $(a)
- {
- bad-attribute = free ;
- }
- else if optional in $(a)
- {
- bad-attribute = optional ;
- }
- if $(bad-attribute)
- {
- errors.error "$(bad-attribute) property $(f) cannot have a default." ;
- }
- if ! $(value) in $($(f).values)
- {
- errors.error "The specified default value, '$(value)' is invalid"
- : "allowed values are: " $($(f).values) ;
- }
- $(f).default = $(value) ;
-}
-
-
-# Returns the default property values for the given features.
-#
-rule defaults ( features * )
-{
- local result ;
- for local f in $(features)
- {
- local gf = $(:E=:G=$(f)) ;
- local a = $($(gf).attributes) ;
- if ( free in $(a) ) || ( optional in $(a) )
- {
- }
- else
- {
- result += $(gf)$($(gf).default) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns true iff all 'names' elements are valid features.
-#
-rule valid ( names + )
-{
- if $(names) in $(.all-features)
- {
- return true ;
- }
-}
-
-
-# Returns the attibutes of the given feature.
-#
-rule attributes ( feature )
-{
- return $($(:E=:G=$(feature)).attributes) ;
-}
-
-
-# Returns the values of the given feature.
-#
-rule values ( feature )
-{
- return $($(:E=:G=$(feature)).values) ;
-}
-
-
-# Returns true iff 'value-string' is a value-string of an implicit feature.
-#
-rule is-implicit-value ( value-string )
-{
- local v = [ regex.split $(value-string) - ] ;
- local failed ;
- if ! $(v[1]) in $(.all-implicit-values)
- {
- failed = true ;
- }
- else
- {
- local feature = $($(v[1]).implicit-feature) ;
- for local subvalue in $(v[2-])
- {
- if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
- {
- failed = true ;
- }
- }
- }
-
- if ! $(failed)
- {
- return true ;
- }
-}
-
-
-# Returns the implicit feature associated with the given implicit value.
-#
-rule implied-feature ( implicit-value )
-{
- local components = [ regex.split $(implicit-value) "-" ] ;
-
- local feature = $($(components[1]).implicit-feature) ;
- if ! $(feature)
- {
- errors.error \"$(implicit-value)\" is not a value of an implicit feature ;
- feature = "" ; # Keep testing happy; it expects a result.
- }
- return $(feature) ;
-}
-
-
-local rule find-implied-subfeature ( feature subvalue : value-string ? )
-{
- # Feature should be of the form <feature-name>.
- if $(feature) != $(feature:G)
- {
- errors.error invalid feature $(feature) ;
- }
-
- return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
-}
-
-
-# Given a feature and a value of one of its subfeatures, find the name of the
-# subfeature. If value-string is supplied, looks for implied subfeatures that
-# are specific to that value of feature
-#
-rule implied-subfeature (
- feature # The main feature name.
- subvalue # The value of one of its subfeatures.
- : value-string ? # The value of the main feature.
-)
-{
- local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
- : $(value-string) ] ;
- if ! $(subfeature)
- {
- value-string ?= "" ;
- errors.error \"$(subvalue)\" is not a known subfeature value of
- $(feature)$(value-string) ;
- }
- return $(subfeature) ;
-}
-
-
-# Generate an error if the feature is unknown.
-#
-local rule validate-feature ( feature )
-{
- if ! $(feature) in $(.all-features)
- {
- errors.error unknown feature \"$(feature)\" ;
- }
-}
-
-
-# Given a feature and its value or just a value corresponding to an implicit
-# feature, returns a property set consisting of all component subfeatures and
-# their values. For example all the following calls:
-#
-# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
-# expand-subfeatures-aux gcc-2.95.2-linux-x86
-#
-# return:
-#
-# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
-#
-local rule expand-subfeatures-aux (
- feature ? # Feature name or empty if value corresponds to an
- # implicit property.
- : value # Feature value.
- : dont-validate ? # If set, no value string validation will be done.
-)
-{
- if $(feature)
- {
- feature = $(feature) ;
- }
-
- if ! $(feature)
- {
- feature = [ implied-feature $(value) ] ;
- }
- else
- {
- validate-feature $(feature) ;
- }
- if ! $(dont-validate)
- {
- validate-value-string $(feature) $(value) ;
- }
-
- local components = [ regex.split $(value) "-" ] ;
-
- # Get the top-level feature's value.
- local value = $(components[1]:G=) ;
-
- local result = $(components[1]:G=$(feature)) ;
-
- local subvalues = $(components[2-]) ;
- while $(subvalues)
- {
- local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
- subvalues = $(subvalues[2-]) ;
-
- local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
- $(value) ] ;
-
- # If no subfeature was found reconstitute the value string and use that.
- if ! $(subfeature)
- {
- result = $(components:J=-) ;
- result = $(result:G=$(feature)) ;
- subvalues = ; # Stop looping.
- }
- else
- {
- local f = [ MATCH ^<(.*)>$ : $(feature) ] ;
- result += $(subvalue:G=$(f)-$(subfeature)) ;
- }
- }
-
- return $(result) ;
-}
-
-
-# Make all elements of properties corresponding to implicit features explicit,
-# and express all subfeature values as separate properties in their own right.
-# For example, all of the following properties
-#
-# gcc-2.95.2-linux-x86
-# <toolset>gcc-2.95.2-linux-x86
-#
-# might expand to
-#
-# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
-#
-rule expand-subfeatures (
- properties * # Property set with elements of the form
- # <feature>value-string or just value-string in the case
- # of implicit features.
- : dont-validate ?
-)
-{
- local result ;
- for local p in $(properties)
- {
- # Don't expand subfeatures in subfeatures
- if ! [ MATCH "(:)" : $(p:G) ]
- {
- result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
- }
- else
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Helper for extend, below. Handles the feature case.
-#
-local rule extend-feature ( feature : values * )
-{
- feature = [ grist $(feature) ] ;
- validate-feature $(feature) ;
- if implicit in $($(feature).attributes)
- {
- for local v in $(values)
- {
- if $($(v).implicit-feature)
- {
- errors.error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ;
- }
- $(v).implicit-feature = $(feature) ;
- }
-
- .all-implicit-values += $(values) ;
- }
- if ! $($(feature).values)
- {
- # This is the first value specified for this feature so make it be the
- # default.
- $(feature).default = $(values[1]) ;
- }
- $(feature).values += $(values) ;
-}
-
-
-# Checks that value-string is a valid value-string for the given feature.
-#
-rule validate-value-string ( feature value-string )
-{
- if ! (
- free in $($(feature).attributes)
- || ( $(value-string) in $(feature).values )
- )
- {
- local values = $(value-string) ;
-
- if $($(feature).subfeatures)
- {
- if ! ( $(value-string) in $($(feature).values) )
- && ! ( $(value-string) in $($(feature).subfeatures) )
- {
- values = [ regex.split $(value-string) - ] ;
- }
- }
-
- if ! ( $(values[1]) in $($(feature).values) ) &&
-
- # An empty value is allowed for optional features.
- ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
- {
- errors.error \"$(values[1])\" is not a known value of feature $(feature)
- : legal values: \"$($(feature).values)\" ;
- }
-
- for local v in $(values[2-])
- {
- # This will validate any subfeature values in value-string.
- implied-subfeature $(feature) $(v) : $(values[1]) ;
- }
- }
-}
-
-
-# A helper that computes:
-# * name(s) of module-local variable(s) used to record the correspondence
-# between subvalue(s) and a subfeature
-# * value of that variable when such a subfeature/subvalue has been defined and
-# returns a list consisting of the latter followed by the former.
-#
-local rule subvalue-var (
- feature # Main feature name.
- value-string ? # If supplied, specifies a specific value of the main
- # feature for which the subfeature values are valid.
- : subfeature # Subfeature name.
- : subvalues * # Subfeature values.
-)
-{
- feature = [ grist $(feature) ] ;
- validate-feature $(feature) ;
- if $(value-string)
- {
- validate-value-string $(feature) $(value-string) ;
- }
-
- local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
-
- return $(subfeature-name)
- $(feature)$(value-string:E="")<>$(subvalues).subfeature ;
-}
-
-
-# Extends the given subfeature with the subvalues. If the optional value-string
-# is provided, the subvalues are only valid for the given value of the feature.
-# Thus, you could say that <target-platform>mingw is specific to
-# <toolset>gcc-2.95.2 as follows:
-#
-# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
-#
-rule extend-subfeature (
- feature # The feature whose subfeature is being extended.
-
- value-string ? # If supplied, specifies a specific value of the main
- # feature for which the new subfeature values are valid.
-
- : subfeature # Subfeature name.
- : subvalues * # Additional subfeature values.
-)
-{
- local subfeature-vars = [ subvalue-var $(feature) $(value-string)
- : $(subfeature) : $(subvalues) ] ;
-
- local f = [ utility.ungrist [ grist $(feature) ] ] ;
- extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
-
- # Provide a way to get from the given feature or property and subfeature
- # value to the subfeature name.
- $(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
-}
-
-
-# Returns true iff the subvalues are valid for the feature. When the optional
-# value-string is provided, returns true iff the subvalues are valid for the
-# given value of the feature.
-#
-rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
-{
- local subfeature-vars = [ subvalue-var $(feature) $(value-string)
- : $(subfeature) : $(subvalue) ] ;
-
- if $($(subfeature-vars[2])) = $(subfeature-vars[1])
- {
- return true ;
- }
-}
-
-
-# Can be called three ways:
-#
-# 1. extend feature : values *
-# 2. extend <feature> subfeature : values *
-# 3. extend <feature>value-string subfeature : values *
-#
-# * Form 1 adds the given values to the given feature.
-# * Forms 2 and 3 add subfeature values to the given feature.
-# * Form 3 adds the subfeature values as specific to the given property
-# value-string.
-#
-rule extend ( feature-or-property subfeature ? : values * )
-{
- local feature ; # If a property was specified this is its feature.
- local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
-
- # If a property was specified.
- if $(feature-or-property:G) && $(feature-or-property:G=)
- {
- # Extract the feature and value-string, if any.
- feature = $(feature-or-property:G) ;
- value-string = $(feature-or-property:G=) ;
- }
- else
- {
- feature = [ grist $(feature-or-property) ] ;
- }
-
- # Dispatch to the appropriate handler.
- if $(subfeature)
- {
- extend-subfeature $(feature) $(value-string) : $(subfeature)
- : $(values) ;
- }
- else
- {
- # If no subfeature was specified, we do not expect to see a
- # value-string.
- if $(value-string)
- {
- errors.error can only specify a property as the first argument when
- extending a subfeature
- : usage:
- : " extend" feature ":" values...
- : " | extend" <feature>value-string subfeature ":" values...
- ;
- }
-
- extend-feature $(feature) : $(values) ;
- }
-}
-
-
-local rule get-subfeature-name ( subfeature value-string ? )
-{
- local prefix = $(value-string): ;
- return $(prefix:E="")$(subfeature) ;
-}
-
-
-# Declares a subfeature.
-#
-rule subfeature (
- feature # Root feature that is not a subfeature.
- value-string ? # A value-string specifying which feature or subfeature
- # values this subfeature is specific to, if any.
- : subfeature # The name of the subfeature being declared.
- : subvalues * # The allowed values of this subfeature.
- : attributes * # The attributes of the subfeature.
-)
-{
- feature = [ grist $(feature) ] ;
- validate-feature $(feature) ;
-
- # Add grist to the subfeature name if a value-string was supplied.
- local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
-
- if $(subfeature-name) in $($(feature).subfeatures)
- {
- errors.error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\"
- "specific to "$(value-string) ;
- }
- $(feature).subfeatures += $(subfeature-name) ;
-
- # First declare the subfeature as a feature in its own right.
- local f = [ utility.ungrist $(feature) ] ;
- feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
-
- # Now make sure the subfeature values are known.
- extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
-}
-
-
-# Set components of the given composite property.
-#
-rule compose ( composite-property : component-properties * )
-{
- local feature = $(composite-property:G) ;
- if ! ( composite in [ attributes $(feature) ] )
- {
- errors.error "$(feature)" is not a composite feature ;
- }
-
- $(composite-property).components ?= ;
- if $($(composite-property).components)
- {
- errors.error components of "$(composite-property)" already set:
- $($(composite-property).components) ;
- }
-
- if $(composite-property) in $(component-properties)
- {
- errors.error composite property "$(composite-property)" cannot have itself as a component ;
- }
- $(composite-property).components = $(component-properties) ;
-}
-
-
-local rule expand-composite ( property )
-{
- return $(property)
- [ sequence.transform expand-composite : $($(property).components) ] ;
-}
-
-
-# Return all values of the given feature specified by the given property set.
-#
-rule get-values ( feature : properties * )
-{
- local result ;
-
- feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
- for local p in $(properties)
- {
- if $(p:G) = $(feature)
- {
- # Use MATCH instead of :G= to get the value, in order to preserve
- # the value intact instead of having bjam treat it as a decomposable
- # path.
- result += [ MATCH ">(.*)" : $(p) ] ;
- }
- }
- return $(result) ;
-}
-
-
-rule free-features ( )
-{
- return $(free.features) ;
-}
-
-
-# Expand all composite properties in the set so that all components are
-# explicitly expressed.
-#
-rule expand-composites ( properties * )
-{
- local explicit-features = $(properties:G) ;
- local result ;
-
- # Now expand composite features.
- for local p in $(properties)
- {
- local expanded = [ expand-composite $(p) ] ;
-
- for local x in $(expanded)
- {
- if ! $(x) in $(result)
- {
- local f = $(x:G) ;
-
- if $(f) in $(free.features)
- {
- result += $(x) ;
- }
- else if ! $(x) in $(properties) # x is the result of expansion
- {
- if ! $(f) in $(explicit-features) # not explicitly-specified
- {
- if $(f) in $(result:G)
- {
- errors.error expansions of composite features result
- in conflicting values for $(f)
- : values: [ get-values $(f) : $(result) ] $(x:G=)
- : one contributing composite property was $(p) ;
- }
- else
- {
- result += $(x) ;
- }
- }
- }
- else if $(f) in $(result:G)
- {
- errors.error explicitly-specified values of non-free feature
- $(f) conflict :
- "existing values:" [ get-values $(f) : $(properties) ] :
- "value from expanding " $(p) ":" $(x:G=) ;
- }
- else
- {
- result += $(x) ;
- }
- }
- }
- }
- return $(result) ;
-}
-
-
-# Return true iff f is an ordinary subfeature of the parent-property's feature,
-# or if f is a subfeature of the parent-property's feature specific to the
-# parent-property's value.
-#
-local rule is-subfeature-of ( parent-property f )
-{
- if subfeature in $($(f).attributes)
- {
- local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
- if $(specific-subfeature)
- {
- # The feature has the form <topfeature-topvalue:subfeature>, e.g.
- # <toolset-msvc:version>.
- local feature-value = [ split-top-feature $(specific-subfeature[1])
- ] ;
- if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
- {
- return true ;
- }
- }
- else
- {
- # The feature has the form <topfeature-subfeature>, e.g.
- # <toolset-version>
- local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
- if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
- {
- return true ;
- }
- }
- }
-}
-
-
-# As for is-subfeature-of but for subproperties.
-#
-local rule is-subproperty-of ( parent-property p )
-{
- return [ is-subfeature-of $(parent-property) $(p:G) ] ;
-}
-
-
-# Given a property, return the subset of features consisting of all ordinary
-# subfeatures of the property's feature, and all specific subfeatures of the
-# property's feature which are conditional on the property's value.
-#
-local rule select-subfeatures ( parent-property : features * )
-{
- return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
-}
-
-
-# As for select-subfeatures but for subproperties.
-#
-local rule select-subproperties ( parent-property : properties * )
-{
- return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
-}
-
-
-# Given a property set which may consist of composite and implicit properties
-# and combined subfeature values, returns an expanded, normalized property set
-# with all implicit features expressed explicitly, all subfeature values
-# individually expressed, and all components of composite properties expanded.
-# Non-free features directly expressed in the input properties cause any values
-# of those features due to composite feature expansion to be dropped. If two
-# values of a given non-free feature are directly expressed in the input, an
-# error is issued.
-#
-rule expand ( properties * )
-{
- local expanded = [ expand-subfeatures $(properties) ] ;
- return [ expand-composites $(expanded) ] ;
-}
-
-
-# Helper rule for minimize. Returns true iff property's feature is present in
-# the contents of the variable named by feature-set-var.
-#
-local rule in-features ( feature-set-var property )
-{
- if $(property:G) in $($(feature-set-var))
- {
- return true ;
- }
-}
-
-
-# Helper rule for minimize. Returns the list with the same properties, but with
-# all subfeatures moved to the end of the list.
-#
-local rule move-subfeatures-to-the-end ( properties * )
-{
- local x1 ;
- local x2 ;
- for local p in $(properties)
- {
- if subfeature in $($(p:G).attributes)
- {
- x2 += $(p) ;
- }
- else
- {
- x1 += $(p) ;
- }
- }
- return $(x1) $(x2) ;
-}
-
-
-# Given an expanded property set, eliminate all redundancy: properties that are
-# elements of other (composite) properties in the set will be eliminated.
-# Non-symmetric properties equal to default values will be eliminated unless
-# they override a value from some composite property. Implicit properties will
-# be expressed without feature grist, and sub-property values will be expressed
-# as elements joined to the corresponding main property.
-#
-rule minimize ( properties * )
-{
- # Precondition checking
- local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
- if $(implicits)
- {
- errors.error minimize requires an expanded property set, but
- \"$(implicits[1])\" appears to be the value of an un-expanded
- implicit feature ;
- }
-
- # Remove properties implied by composite features.
- local components = $($(properties).components) ;
- local x = [ set.difference $(properties) : $(components) ] ;
-
- # Handle subfeatures and implicit features.
- x = [ move-subfeatures-to-the-end $(x) ] ;
- local result ;
- while $(x)
- {
- local p fullp = $(x[1]) ;
- local f = $(p:G) ;
- local v = $(p:G=) ;
-
- # Eliminate features in implicit properties.
- if implicit in [ attributes $(f) ]
- {
- p = $(v) ;
- }
-
- # Locate all subproperties of $(x[1]) in the property set.
- local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
- if $(subproperties)
- {
- # Reconstitute the joined property name.
- local sorted = [ sequence.insertion-sort $(subproperties) ] ;
- result += $(p)-$(sorted:G="":J=-) ;
-
- x = [ set.difference $(x[2-]) : $(subproperties) ] ;
- }
- else
- {
- # Eliminate properties whose value is equal to feature's default,
- # which are not symmetric and which do not contradict values implied
- # by composite properties.
-
- # Since all component properties of composites in the set have been
- # eliminated, any remaining property whose feature is the same as a
- # component of a composite in the set must have a non-redundant
- # value.
- if $(fullp) != [ defaults $(f) ]
- || symmetric in [ attributes $(f) ]
- || $(fullp:G) in $(components:G)
- {
- result += $(p) ;
- }
-
- x = $(x[2-]) ;
- }
- }
- return $(result) ;
-}
-
-
-# Combine all subproperties into their parent properties
-#
-# Requires: for every subproperty, there is a parent property. All features are
-# explicitly expressed.
-#
-# This rule probably should not be needed, but build-request.expand-no-defaults
-# is being abused for unintended purposes and it needs help.
-#
-rule compress-subproperties ( properties * )
-{
- local all-subs ;
- local matched-subs ;
- local result ;
-
- for local p in $(properties)
- {
- if ! $(p:G)
- {
- # Expecting fully-gristed properties.
- assert.variable-not-empty p:G ;
- }
-
- if ! subfeature in $($(p:G).attributes)
- {
- local subs = [ sequence.insertion-sort
- [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
-
- matched-subs += $(subs) ;
-
- local subvalues = -$(subs:G=:J=-) ;
- subvalues ?= "" ;
- result += $(p)$(subvalues) ;
- }
- else
- {
- all-subs += $(p) ;
- }
- }
- assert.result true : set.equal $(all-subs) : $(matched-subs) ;
- return $(result) ;
-}
-
-
-# Given an ungristed string, finds the longest prefix which is a top-level
-# feature name followed by a dash, and return a pair consisting of the parts
-# before and after that dash. More interesting than a simple split because
-# feature names may contain dashes.
-#
-local rule split-top-feature ( feature-plus )
-{
- local e = [ regex.split $(feature-plus) - ] ;
- local f = $(e[1]) ;
- local v ;
- while $(e)
- {
- if <$(f)> in $(.all-top-features)
- {
- v = $(f) $(e[2-]:J=-) ;
- }
- e = $(e[2-]) ;
- f = $(f)-$(e[1]) ;
- }
- return $(v) ;
-}
-
-
-# Given a set of properties, add default values for features not represented in
-# the set.
-#
-# Note: if there's an ordinary feature F1 and a composite feature F2 which
-# includes some value for F1 and both feature have default values then the
-# default value of F1 will be added (as opposed to the value in F2). This might
-# not be the right idea, e.g. consider:
-#
-# feature variant : debug ... ;
-# <variant>debug : .... <runtime-debugging>on
-# feature <runtime-debugging> : off on ;
-#
-# Here, when adding default for an empty property set, we'll get
-#
-# <variant>debug <runtime_debugging>off
-#
-# and that's kind of strange.
-#
-rule add-defaults ( properties * )
-{
- for local v in $(properties:G=)
- {
- if $(v) in $(properties)
- {
- errors.error add-defaults requires explicitly specified features,
- but \"$(v)\" appears to be the value of an un-expanded implicit
- feature ;
- }
- }
- # We don't add default for elements with ":" inside. This catches:
- # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
- # to be takes as specified value for <variant>
- # 2. Free properties with ":" in values. We don't care, since free
- # properties don't have defaults.
- local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
- local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
- local more = [ defaults $(missing-top) ] ;
- properties += $(more) ;
- xproperties += $(more) ;
-
- # Add defaults for subfeatures of features which are present.
- for local p in $(xproperties)
- {
- local s = $($(p:G).subfeatures) ;
- local f = [ utility.ungrist $(p:G) ] ;
- local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
- properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
- }
-
- return $(properties) ;
-}
-
-
-# Given a property-set of the form
-# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
-#
-# Returns
-# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
-#
-# Note that vN...vM may contain slashes. This needs to be resilient to the
-# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
-# slash direction on NT.
-#
-rule split ( property-set )
-{
- local pieces = [ regex.split $(property-set) [\\/] ] ;
- local result ;
-
- for local x in $(pieces)
- {
- if ( ! $(x:G) ) && $(result[-1]:G)
- {
- result = $(result[1--2]) $(result[-1])/$(x) ;
- }
- else
- {
- result += $(x) ;
- }
- }
-
- return $(result) ;
-}
-
-
-# Tests of module feature.
-#
-rule __test__ ( )
-{
- # Use a fresh copy of the feature module.
- prepare-test feature-test-temp ;
-
- import assert ;
- import errors : try catch ;
-
- # These are local rules and so must be explicitly reimported into the
- # testing module.
- import feature : extend-feature validate-feature select-subfeatures ;
-
- feature toolset : gcc : implicit ;
- feature define : : free ;
- feature runtime-link : dynamic static : symmetric ;
- feature optimization : on off ;
- feature variant : debug release profile : implicit composite symmetric ;
- feature stdlib : native stlport ;
- feature magic : : free ;
-
- compose <variant>debug : <define>_DEBUG <optimization>off ;
- compose <variant>release : <define>NDEBUG <optimization>on ;
-
- assert.result dynamic static : values <runtime-link> ;
- assert.result dynamic static : values runtime-link ;
-
- try ;
- {
- compose <variant>profile : <variant>profile ;
- }
- catch composite property <variant>profile cannot have itself as a component ;
-
- extend-feature toolset : msvc metrowerks ;
- subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
-
- assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
- assert.false is-subvalue toolset : gcc : version : 1.1 ;
-
- assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
- assert.false is-subvalue toolset : : version : yabba ;
-
- feature yabba ;
- subfeature yabba : version : dabba ;
- assert.true is-subvalue yabba : : version : dabba ;
-
- subfeature toolset gcc : platform : linux cygwin : optional ;
-
- assert.result <toolset-gcc:version>
- : select-subfeatures <toolset>gcc
- : <toolset-gcc:version>
- <toolset-msvc:version>
- <toolset-version>
- <stdlib> ;
-
- subfeature stdlib : version : 3 4 : optional ;
-
- assert.result <stdlib-version>
- : select-subfeatures <stdlib>native
- : <toolset-gcc:version>
- <toolset-msvc:version>
- <toolset-version>
- <stdlib-version> ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand-subfeatures <toolset>gcc-3.0.1 ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
- : expand-subfeatures <toolset>gcc-3.0.1-linux ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
-
- assert.result <define>foo=x-y
- : expand-subfeatures <define>foo=x-y ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand-subfeatures gcc-3.0.1 ;
-
- assert.result a c e
- : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- <variant>debug <define>_DEBUG <optimization>on
- : expand gcc-3.0.1 debug <optimization>on ;
-
- assert.result <variant>debug <define>_DEBUG <optimization>on
- : expand debug <optimization>on ;
-
- assert.result <optimization>on <variant>debug <define>_DEBUG
- : expand <optimization>on debug ;
-
- assert.result <runtime-link>dynamic <optimization>on
- : defaults <runtime-link> <define> <optimization> ;
-
- # Make sure defaults is resilient to missing grist.
- assert.result <runtime-link>dynamic <optimization>on
- : defaults runtime-link define optimization ;
-
- feature dummy : dummy1 dummy2 ;
- subfeature dummy : subdummy : x y z : optional ;
-
- feature fu : fu1 fu2 : optional ;
- subfeature fu : subfu : x y z : optional ;
- subfeature fu : subfu2 : q r s ;
-
- assert.result optional : attributes <fu> ;
- assert.result optional : attributes fu ;
-
- assert.result <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
- <dummy>dummy1 <toolset-gcc:version>2.95.2
- : add-defaults <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO ;
-
- assert.result <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
- <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
- : add-defaults <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO <fu>fu1 ;
-
- set-default <runtime-link> : static ;
- assert.result <runtime-link>static : defaults <runtime-link> ;
-
- assert.result gcc-3.0.1 debug <optimization>on
- : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
-
- assert.result gcc-3.0.1 debug <runtime-link>dynamic
- : minimize
- [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
-
- assert.result gcc-3.0.1 debug
- : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
-
- assert.result debug <optimization>on
- : minimize [ expand debug <optimization>on ] ;
-
- assert.result gcc-3.0
- : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
-
- assert.result gcc-3.0
- : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
-
- assert.result <x>y/z <a>b/c <d>e/f
- : split <x>y/z/<a>b/c/<d>e/f ;
-
- assert.result <x>y/z <a>b/c <d>e/f
- : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
-
- assert.result a b c <d>e/f/g <h>i/j/k
- : split a/b/c/<d>e/f/g/<h>i/j/k ;
-
- assert.result a b c <d>e/f/g <h>i/j/k
- : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
-
- # Test error checking.
-
- try ;
- {
- expand release <optimization>off <optimization>on ;
- }
- catch explicitly-specified values of non-free feature <optimization> conflict ;
-
- try ;
- {
- validate-feature <foobar> ;
- }
- catch unknown feature ;
-
- validate-value-string <toolset> gcc ;
- validate-value-string <toolset> gcc-3.0.1 ;
-
- try ;
- {
- validate-value-string <toolset> digital_mars ;
- }
- catch \"digital_mars\" is not a known value of <toolset> ;
-
- try ;
- {
- feature foobar : : baz ;
- }
- catch unknown attributes: baz ;
-
- feature feature1 ;
- try ;
- {
- feature feature1 ;
- }
- catch feature already defined: ;
-
- try ;
- {
- feature feature2 : : free implicit ;
- }
- catch free features cannot also be implicit ;
-
- try ;
- {
- feature feature3 : : free propagated ;
- }
- catch free features cannot be propagated ;
-
- try ;
- {
- implied-feature lackluster ;
- }
- catch \"lackluster\" is not a value of an implicit feature ;
-
- try ;
- {
- implied-subfeature <toolset> 3.0.1 ;
- }
- catch \"3.0.1\" is not a known subfeature value of <toolset> ;
-
- try ;
- {
- implied-subfeature <toolset> not-a-version : gcc ;
- }
- catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
-
- # Leave a clean copy of the features module behind.
- finish-test feature-test-temp ;
-}
diff --git a/jam-files/boost-build/build/feature.py b/jam-files/boost-build/build/feature.py
deleted file mode 100644
index 315a18e9..00000000
--- a/jam-files/boost-build/build/feature.py
+++ /dev/null
@@ -1,905 +0,0 @@
-# Status: ported, except for unit tests.
-# Base revision: 64488
-#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import re
-
-from b2.util import utility, bjam_signature
-import b2.util.set
-from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
-from b2.exceptions import *
-
-__re_split_subfeatures = re.compile ('<(.*):(.*)>')
-__re_no_hyphen = re.compile ('^([^:]+)$')
-__re_slash_or_backslash = re.compile (r'[\\/]')
-
-class Feature(object):
-
- # Map from string attribute names to integers bit flags.
- # This will be initialized after declaration of the class.
- _attribute_name_to_integer = {}
-
- def __init__(self, name, values, attributes):
- self._name = name
- self._values = values
- self._default = None
- self._attributes = 0
- for a in attributes:
- self._attributes = self._attributes | Feature._attribute_name_to_integer[a]
- self._attributes_string_list = attributes
- self._subfeatures = []
- self._parent = None
-
- def name(self):
- return self._name
-
- def values(self):
- return self._values
-
- def add_values(self, values):
- self._values.extend(values)
-
- def attributes(self):
- return self._attributes
-
- def set_default(self, value):
- self._default = value
-
- def default(self):
- return self._default
-
- # FIXME: remove when we fully move to using classes for features/properties
- def attributes_string_list(self):
- return self._attributes_string_list
-
- def subfeatures(self):
- return self._subfeatures
-
- def add_subfeature(self, name):
- self._subfeatures.append(name)
-
- def parent(self):
- """For subfeatures, return pair of (parent_feature, value).
-
- Value may be None if this subfeature is not specific to any
- value of the parent feature.
- """
- return self._parent
-
- def set_parent(self, feature, value):
- self._parent = (feature, value)
-
- def __str__(self):
- return self._name
-
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __all_attributes, __all_features, __implicit_features, __composite_properties
- global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features
- global __all_subfeatures
-
- # The list with all attribute names.
- __all_attributes = [ 'implicit',
- 'composite',
- 'optional',
- 'symmetric',
- 'free',
- 'incidental',
- 'path',
- 'dependency',
- 'propagated',
- 'link-incompatible',
- 'subfeature',
- 'order-sensitive'
- ]
- i = 1
- for a in __all_attributes:
- setattr(Feature, a.upper(), i)
- Feature._attribute_name_to_integer[a] = i
- def probe(self, flag=i):
- return getattr(self, "_attributes") & flag
- setattr(Feature, a.replace("-", "_"), probe)
- i = i << 1
-
- # A map containing all features. The key is the feature name.
- # The value is an instance of Feature class.
- __all_features = {}
-
- # All non-subfeatures.
- __all_top_features = []
-
- # Maps valus to the corresponding implicit feature
- __implicit_features = {}
-
- # A map containing all composite properties. The key is a Property instance,
- # and the value is a list of Property instances
- __composite_properties = {}
-
- __features_with_attributes = {}
- for attribute in __all_attributes:
- __features_with_attributes [attribute] = []
-
- # Maps a value to the corresponding subfeature name.
- __subfeature_from_value = {}
-
- # All free features
- __free_features = []
-
- __all_subfeatures = []
-
-reset ()
-
-def enumerate ():
- """ Returns an iterator to the features map.
- """
- return __all_features.iteritems ()
-
-def get(name):
- """Return the Feature instance for the specified name.
-
- Throws if no feature by such name exists
- """
- return __all_features[name]
-
-# FIXME: prepare-test/finish-test?
-
-@bjam_signature((["name"], ["values", "*"], ["attributes", "*"]))
-def feature (name, values, attributes = []):
- """ Declares a new feature with the given name, values, and attributes.
- name: the feature name
- values: a sequence of the allowable values - may be extended later with feature.extend
- attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
- """
- __validate_feature_attributes (name, attributes)
-
- feature = Feature(name, [], attributes)
- __all_features[name] = feature
- # Temporary measure while we have not fully moved from 'gristed strings'
- __all_features["<" + name + ">"] = feature
-
- for attribute in attributes:
- __features_with_attributes [attribute].append (name)
-
- name = add_grist(name)
-
- if 'subfeature' in attributes:
- __all_subfeatures.append(name)
- else:
- __all_top_features.append(feature)
-
- extend (name, values)
-
- # FIXME: why his is needed.
- if 'free' in attributes:
- __free_features.append (name)
-
- return feature
-
-@bjam_signature((["feature"], ["value"]))
-def set_default (feature, value):
- """ Sets the default value of the given feature, overriding any previous default.
- feature: the name of the feature
- value: the default value to assign
- """
- f = __all_features[feature]
- attributes = f.attributes()
- bad_attribute = None
-
- if attributes & Feature.FREE:
- bad_attribute = "free"
- elif attributes & Feature.OPTIONAL:
- bad_attribute = "optional"
-
- if bad_attribute:
- raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, feature.name()))
-
- if not value in f.values():
- raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % values)
-
- f.set_default(value)
-
-def defaults(features):
- """ Returns the default property values for the given features.
- """
- # FIXME: should merge feature and property modules.
- import property
-
- result = []
- for f in features:
- if not f.free() and not f.optional() and f.default():
- result.append(property.Property(f, f.default()))
-
- return result
-
-def valid (names):
- """ Returns true iff all elements of names are valid features.
- """
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return [ valid_one (name) for name in names ]
-
-def attributes (feature):
- """ Returns the attributes of the given feature.
- """
- return __all_features[feature].attributes_string_list()
-
-def values (feature):
- """ Return the values of the given feature.
- """
- validate_feature (feature)
- return __all_features[feature].values()
-
-def is_implicit_value (value_string):
- """ Returns true iff 'value_string' is a value_string
- of an implicit feature.
- """
-
- if __implicit_features.has_key(value_string):
- return __implicit_features[value_string]
-
- v = value_string.split('-')
-
- if not __implicit_features.has_key(v[0]):
- return False
-
- feature = __implicit_features[v[0]]
-
- for subvalue in (v[1:]):
- if not __find_implied_subfeature(feature, subvalue, v[0]):
- return False
-
- return True
-
-def implied_feature (implicit_value):
- """ Returns the implicit feature associated with the given implicit value.
- """
- components = implicit_value.split('-')
-
- if not __implicit_features.has_key(components[0]):
- raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
-
- return __implicit_features[components[0]]
-
-def __find_implied_subfeature (feature, subvalue, value_string):
-
- #if value_string == None: value_string = ''
-
- if not __subfeature_from_value.has_key(feature) \
- or not __subfeature_from_value[feature].has_key(value_string) \
- or not __subfeature_from_value[feature][value_string].has_key (subvalue):
- return None
-
- return __subfeature_from_value[feature][value_string][subvalue]
-
-# Given a feature and a value of one of its subfeatures, find the name
-# of the subfeature. If value-string is supplied, looks for implied
-# subfeatures that are specific to that value of feature
-# feature # The main feature name
-# subvalue # The value of one of its subfeatures
-# value-string # The value of the main feature
-
-def implied_subfeature (feature, subvalue, value_string):
- result = __find_implied_subfeature (feature, subvalue, value_string)
- if not result:
- raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
-
- return result
-
-def validate_feature (name):
- """ Checks if all name is a valid feature. Otherwise, raises an exception.
- """
- if not __all_features.has_key(name):
- raise InvalidFeature ("'%s' is not a valid feature name" % name)
- else:
- return __all_features[name]
-
-def valid (names):
- """ Returns true iff all elements of names are valid features.
- """
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return [ valid_one (name) for name in names ]
-
-# Uses Property
-def __expand_subfeatures_aux (property, dont_validate = False):
- """ Helper for expand_subfeatures.
- Given a feature and value, or just a value corresponding to an
- implicit feature, returns a property set consisting of all component
- subfeatures and their values. For example:
-
- expand_subfeatures <toolset>gcc-2.95.2-linux-x86
- -> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
- equivalent to:
- expand_subfeatures gcc-2.95.2-linux-x86
-
- feature: The name of the feature, or empty if value corresponds to an implicit property
- value: The value of the feature.
- dont_validate: If True, no validation of value string will be done.
- """
- f = property.feature()
- v = property.value()
- if not dont_validate:
- validate_value_string(f, v)
-
- components = v.split ("-")
-
- v = components[0]
-
- import property
-
- result = [property.Property(f, components[0])]
-
- subvalues = components[1:]
-
- while len(subvalues) > 0:
- subvalue = subvalues [0] # pop the head off of subvalues
- subvalues = subvalues [1:]
-
- subfeature = __find_implied_subfeature (f, subvalue, v)
-
- # If no subfeature was found, reconstitute the value string and use that
- if not subfeature:
- return [property.Property(f, '-'.join(components))]
-
- result.append(property.Property(subfeature, subvalue))
-
- return result
-
-def expand_subfeatures(properties, dont_validate = False):
- """
- Make all elements of properties corresponding to implicit features
- explicit, and express all subfeature values as separate properties
- in their own right. For example, the property
-
- gcc-2.95.2-linux-x86
-
- might expand to
-
- <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
-
- properties: A sequence with elements of the form
- <feature>value-string or just value-string in the
- case of implicit features.
- : dont_validate: If True, no validation of value string will be done.
- """
- result = []
- for p in properties:
- # Don't expand subfeatures in subfeatures
- if p.feature().subfeature():
- result.append (p)
- else:
- result.extend(__expand_subfeatures_aux (p, dont_validate))
-
- return result
-
-
-
-# rule extend was defined as below:
- # Can be called three ways:
- #
- # 1. extend feature : values *
- # 2. extend <feature> subfeature : values *
- # 3. extend <feature>value-string subfeature : values *
- #
- # * Form 1 adds the given values to the given feature
- # * Forms 2 and 3 add subfeature values to the given feature
- # * Form 3 adds the subfeature values as specific to the given
- # property value-string.
- #
- #rule extend ( feature-or-property subfeature ? : values * )
-#
-# Now, the specific rule must be called, depending on the desired operation:
-# extend_feature
-# extend_subfeature
-
-def extend (name, values):
- """ Adds the given values to the given feature.
- """
- name = add_grist (name)
- __validate_feature (name)
- feature = __all_features [name]
-
- if feature.implicit():
- for v in values:
- if __implicit_features.has_key(v):
- raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
-
- __implicit_features[v] = feature
-
- if len (feature.values()) == 0 and len (values) > 0:
- # This is the first value specified for this feature,
- # take it as default value
- feature.set_default(values[0])
-
- feature.add_values(values)
-
-def validate_value_string (f, value_string):
- """ Checks that value-string is a valid value-string for the given feature.
- """
- if f.free() or value_string in f.values():
- return
-
- values = [value_string]
-
- if f.subfeatures():
- if not value_string in f.values() and \
- not value_string in f.subfeatures():
- values = value_string.split('-')
-
- # An empty value is allowed for optional features
- if not values[0] in f.values() and \
- (values[0] or not f.optional()):
- raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], feature, f.values()))
-
- for v in values [1:]:
- # this will validate any subfeature values in value-string
- implied_subfeature(f, v, values[0])
-
-
-""" Extends the given subfeature with the subvalues. If the optional
- value-string is provided, the subvalues are only valid for the given
- value of the feature. Thus, you could say that
- <target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows:
-
- extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
-
- feature: The feature whose subfeature is being extended.
-
- value-string: If supplied, specifies a specific value of the
- main feature for which the new subfeature values
- are valid.
-
- subfeature: The name of the subfeature.
-
- subvalues: The additional values of the subfeature being defined.
-"""
-def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
-
- feature = validate_feature(feature_name)
-
- if value_string:
- validate_value_string(feature, value_string)
-
- subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
-
- extend(subfeature_name, subvalues) ;
- subfeature = __all_features[subfeature_name]
-
- if value_string == None: value_string = ''
-
- if not __subfeature_from_value.has_key(feature):
- __subfeature_from_value [feature] = {}
-
- if not __subfeature_from_value[feature].has_key(value_string):
- __subfeature_from_value [feature][value_string] = {}
-
- for subvalue in subvalues:
- __subfeature_from_value [feature][value_string][subvalue] = subfeature
-
-@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"],
- ["subvalues", "*"], ["attributes", "*"]))
-def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
- """ Declares a subfeature.
- feature_name: Root feature that is not a subfeature.
- value_string: An optional value-string specifying which feature or
- subfeature values this subfeature is specific to,
- if any.
- subfeature: The name of the subfeature being declared.
- subvalues: The allowed values of this subfeature.
- attributes: The attributes of the subfeature.
- """
- parent_feature = validate_feature (feature_name)
-
- # Add grist to the subfeature name if a value-string was supplied
- subfeature_name = __get_subfeature_name (subfeature, value_string)
-
- if subfeature_name in __all_features[feature_name].subfeatures():
- message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
- message += " specific to '%s'" % value_string
- raise BaseException (message)
-
- # First declare the subfeature as a feature in its own right
- f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
- f.set_parent(parent_feature, value_string)
-
- parent_feature.add_subfeature(f)
-
- # Now make sure the subfeature values are known.
- extend_subfeature (feature_name, value_string, subfeature, subvalues)
-
-
-@bjam_signature((["composite_property_s"], ["component_properties_s", "*"]))
-def compose (composite_property_s, component_properties_s):
- """ Sets the components of the given composite property.
-
- All paremeters are <feature>value strings
- """
- import property
-
- component_properties_s = to_seq (component_properties_s)
- composite_property = property.create_from_string(composite_property_s)
- f = composite_property.feature()
-
- if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property):
- component_properties = component_properties_s
- else:
- component_properties = [property.create_from_string(p) for p in component_properties_s]
-
- if not f.composite():
- raise BaseException ("'%s' is not a composite feature" % f)
-
- if __composite_properties.has_key(property):
- raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property])))
-
- if composite_property in component_properties:
- raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
-
- __composite_properties[composite_property] = component_properties
-
-
-def expand_composite(property):
- result = [ property ]
- if __composite_properties.has_key(property):
- for p in __composite_properties[property]:
- result.extend(expand_composite(p))
- return result
-
-
-def get_values (feature, properties):
- """ Returns all values of the given feature specified by the given property set.
- """
- result = []
- for p in properties:
- if get_grist (p) == feature:
- result.append (replace_grist (p, ''))
-
- return result
-
-def free_features ():
- """ Returns all free features.
- """
- return __free_features
-
-def expand_composites (properties):
- """ Expand all composite properties in the set so that all components
- are explicitly expressed.
- """
- explicit_features = set(p.feature() for p in properties)
-
- result = []
-
- # now expand composite features
- for p in properties:
- expanded = expand_composite(p)
-
- for x in expanded:
- if not x in result:
- f = x.feature()
-
- if f.free():
- result.append (x)
- elif not x in properties: # x is the result of expansion
- if not f in explicit_features: # not explicitly-specified
- if any(r.feature() == f for r in result):
- raise FeatureConflict(
- "expansions of composite features result in "
- "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" %
- (f.name(), [r.value() for r in result if r.feature() == f] + [x.value()], p))
- else:
- result.append (x)
- elif any(r.feature() == f for r in result):
- raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
- "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
- [r.value() for r in result if r.feature() == f], p, x.value()))
- else:
- result.append (x)
-
- return result
-
-# Uses Property
-def is_subfeature_of (parent_property, f):
- """ Return true iff f is an ordinary subfeature of the parent_property's
- feature, or if f is a subfeature of the parent_property's feature
- specific to the parent_property's value.
- """
- if not f.subfeature():
- return False
-
- p = f.parent()
- if not p:
- return False
-
- parent_feature = p[0]
- parent_value = p[1]
-
- if parent_feature != parent_property.feature():
- return False
-
- if parent_value and parent_value != parent_property.value():
- return False
-
- return True
-
-def __is_subproperty_of (parent_property, p):
- """ As is_subfeature_of, for subproperties.
- """
- return is_subfeature_of (parent_property, p.feature())
-
-
-# Returns true iff the subvalue is valid for the feature. When the
-# optional value-string is provided, returns true iff the subvalues
-# are valid for the given value of the feature.
-def is_subvalue(feature, value_string, subfeature, subvalue):
-
- if not value_string:
- value_string = ''
-
- if not __subfeature_from_value.has_key(feature):
- return False
-
- if not __subfeature_from_value[feature].has_key(value_string):
- return False
-
- if not __subfeature_from_value[feature][value_string].has_key(subvalue):
- return False
-
- if __subfeature_from_value[feature][value_string][subvalue]\
- != subfeature:
- return False
-
- return True
-
-def implied_subfeature (feature, subvalue, value_string):
- result = __find_implied_subfeature (feature, subvalue, value_string)
- if not result:
- raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
-
- return result
-
-
-# Uses Property
-def expand (properties):
- """ Given a property set which may consist of composite and implicit
- properties and combined subfeature values, returns an expanded,
- normalized property set with all implicit features expressed
- explicitly, all subfeature values individually expressed, and all
- components of composite properties expanded. Non-free features
- directly expressed in the input properties cause any values of
- those features due to composite feature expansion to be dropped. If
- two values of a given non-free feature are directly expressed in the
- input, an error is issued.
- """
- expanded = expand_subfeatures(properties)
- return expand_composites (expanded)
-
-# Accepts list of Property objects
-def add_defaults (properties):
- """ Given a set of properties, add default values for features not
- represented in the set.
- Note: if there's there's ordinary feature F1 and composite feature
- F2, which includes some value for F1, and both feature have default values,
- then the default value of F1 will be added, not the value in F2. This might
- not be right idea: consider
-
- feature variant : debug ... ;
- <variant>debug : .... <runtime-debugging>on
- feature <runtime-debugging> : off on ;
-
- Here, when adding default for an empty property set, we'll get
-
- <variant>debug <runtime_debugging>off
-
- and that's kind of strange.
- """
- result = [x for x in properties]
-
- handled_features = set()
- for p in properties:
- # We don't add default for conditional properties. We don't want
- # <variant>debug:<define>DEBUG to be takes as specified value for <variant>
- if not p.condition():
- handled_features.add(p.feature())
-
- missing_top = [f for f in __all_top_features if not f in handled_features]
- more = defaults(missing_top)
- result.extend(more)
- for p in more:
- handled_features.add(p.feature())
-
- # Add defaults for subfeatures of features which are present
- for p in result[:]:
- s = p.feature().subfeatures()
- more = defaults([s for s in p.feature().subfeatures() if not s in handled_features])
- for p in more:
- handled_features.add(p.feature())
- result.extend(more)
-
- return result
-
-def minimize (properties):
- """ Given an expanded property set, eliminate all redundancy: properties
- which are elements of other (composite) properties in the set will
- be eliminated. Non-symmetric properties equal to default values will be
- eliminated, unless the override a value from some composite property.
- Implicit properties will be expressed without feature
- grist, and sub-property values will be expressed as elements joined
- to the corresponding main property.
- """
-
- # remove properties implied by composite features
- components = []
- for property in properties:
- if __composite_properties.has_key (property):
- components.extend(__composite_properties[property])
- properties = b2.util.set.difference (properties, components)
-
- # handle subfeatures and implicit features
-
- # move subfeatures to the end of the list
- properties = [p for p in properties if not p.feature().subfeature()] +\
- [p for p in properties if p.feature().subfeature()]
-
- result = []
- while properties:
- p = properties[0]
- f = p.feature()
-
- # locate all subproperties of $(x[1]) in the property set
- subproperties = __select_subproperties (p, properties)
-
- if subproperties:
- # reconstitute the joined property name
- subproperties.sort ()
- joined = b2.build.property.Property(p.feature(), p.value() + '-' + '-'.join ([sp.value() for sp in subproperties]))
- result.append(joined)
-
- properties = b2.util.set.difference(properties[1:], subproperties)
-
- else:
- # eliminate properties whose value is equal to feature's
- # default and which are not symmetric and which do not
- # contradict values implied by composite properties.
-
- # since all component properties of composites in the set
- # have been eliminated, any remaining property whose
- # feature is the same as a component of a composite in the
- # set must have a non-redundant value.
- if p.value() != f.default() or f.symmetric():
- result.append (p)
- #\
- #or get_grist (fullp) in get_grist (components):
- # FIXME: restore above
-
-
- properties = properties[1:]
-
- return result
-
-
-def split (properties):
- """ Given a property-set of the form
- v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
-
- Returns
- v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
-
- Note that vN...vM may contain slashes. This is resilient to the
- substitution of backslashes for slashes, since Jam, unbidden,
- sometimes swaps slash direction on NT.
- """
-
- def split_one (properties):
- pieces = re.split (__re_slash_or_backslash, properties)
- result = []
-
- for x in pieces:
- if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
- result = result [0:-1] + [ result [-1] + '/' + x ]
- else:
- result.append (x)
-
- return result
-
- if isinstance (properties, str):
- return split_one (properties)
-
- result = []
- for p in properties:
- result += split_one (p)
- return result
-
-
-def compress_subproperties (properties):
- """ Combine all subproperties into their parent properties
-
- Requires: for every subproperty, there is a parent property. All
- features are explicitly expressed.
-
- This rule probably shouldn't be needed, but
- build-request.expand-no-defaults is being abused for unintended
- purposes and it needs help
- """
- result = []
- matched_subs = set()
- all_subs = set()
- for p in properties:
- f = p.feature()
-
- if not f.subfeature():
- subs = __select_subproperties (p, properties)
- if subs:
-
- matched_subs.update(subs)
-
- subvalues = '-'.join (sub.value() for sub in subs)
- result.append(b2.build.property.Property(
- p.feature(), p.value() + '-' + subvalues,
- p.condition()))
- else:
- result.append(p)
-
- else:
- all_subs.add(p)
-
- # TODO: this variables are used just for debugging. What's the overhead?
- assert all_subs == matched_subs
-
- return result
-
-######################################################################################
-# Private methods
-
-def __select_subproperties (parent_property, properties):
- return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
-
-def __get_subfeature_name (subfeature, value_string):
- if value_string == None:
- prefix = ''
- else:
- prefix = value_string + ':'
-
- return prefix + subfeature
-
-
-def __validate_feature_attributes (name, attributes):
- for attribute in attributes:
- if not attribute in __all_attributes:
- raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
-
- if name in __all_features:
- raise AlreadyDefined ("feature '%s' already defined" % name)
- elif 'implicit' in attributes and 'free' in attributes:
- raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name)
- elif 'free' in attributes and 'propagated' in attributes:
- raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
-
-
-def __validate_feature (feature):
- """ Generates an error if the feature is unknown.
- """
- if not __all_features.has_key (feature):
- raise BaseException ('unknown feature "%s"' % feature)
-
-
-def __select_subfeatures (parent_property, features):
- """ Given a property, return the subset of features consisting of all
- ordinary subfeatures of the property's feature, and all specific
- subfeatures of the property's feature which are conditional on the
- property's value.
- """
- return [f for f in features if is_subfeature_of (parent_property, f)]
-
-# FIXME: copy over tests.
diff --git a/jam-files/boost-build/build/generators.jam b/jam-files/boost-build/build/generators.jam
deleted file mode 100644
index 1515525f..00000000
--- a/jam-files/boost-build/build/generators.jam
+++ /dev/null
@@ -1,1408 +0,0 @@
-# Copyright Vladimir Prus 2002.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Manages 'generators' --- objects which can do transformation between different
-# target types and contain algorithm for finding transformation from sources to
-# targets.
-#
-# The main entry point to this module is generators.construct rule. It is given
-# a list of source targets, desired target type and a set of properties. It
-# starts by selecting 'viable generators', which have any chances of producing
-# the desired target type with the required properties. Generators are ranked
-# and a set of the most specific ones is selected.
-#
-# The most specific generators have their 'run' methods called, with the
-# properties and list of sources. Each one selects a target which can be
-# directly consumed, and tries to convert the remaining ones to the types it can
-# consume. This is done by recursively calling 'construct' with all consumable
-# types.
-#
-# If the generator has collected all the targets it needs, it creates targets
-# corresponding to result, and returns it. When all generators have been run,
-# results of one of them are selected and returned as a result.
-#
-# It is quite possible for 'construct' to return more targets that it was asked
-# for. For example, if it were asked to generate a target of type EXE, but the
-# only found generator produces both EXE and TDS (file with debug) information.
-# The extra target will be returned.
-#
-# Likewise, when generator tries to convert sources to consumable types, it can
-# get more targets that it was asked for. The question is what to do with extra
-# targets. Boost.Build attempts to convert them to requested types, and attempts
-# that as early as possible. Specifically, this is done after invoking each
-# generator. TODO: An example is needed to document the rationale for trying
-# extra target conversion at that point.
-#
-# In order for the system to be able to use a specific generator instance 'when
-# needed', the instance needs to be registered with the system using
-# generators.register() or one of its related rules. Unregistered generators may
-# only be run explicitly and will not be considered by Boost.Build when when
-# converting between given target types.
-
-import "class" : new ;
-import errors ;
-import property-set ;
-import sequence ;
-import set ;
-import type ;
-import utility ;
-import virtual-target ;
-
-
-if "--debug-generators" in [ modules.peek : ARGV ]
-{
- .debug = true ;
-}
-
-
-# Updated cached viable source target type information as needed after a new
-# target type gets defined. This is needed because if a target type is a viable
-# source target type for some generator then all of the target type's derived
-# target types should automatically be considered as viable source target types
-# for the same generator as well. Does nothing if a non-derived target type is
-# passed to it.
-#
-rule update-cached-information-with-a-new-type ( type )
-{
- local base-type = [ type.base $(type) ] ;
- if $(base-type)
- {
- for local g in $(.vstg-cached-generators)
- {
- if $(base-type) in $(.vstg.$(g))
- {
- .vstg.$(g) += $(type) ;
- }
- }
-
- for local t in $(.vst-cached-types)
- {
- if $(base-type) in $(.vst.$(t))
- {
- .vst.$(t) += $(type) ;
- }
- }
- }
-}
-
-
-# Clears cached viable source target type information except for target types
-# and generators with all source types listed as viable. Should be called when
-# something invalidates those cached values by possibly causing some new source
-# types to become viable.
-#
-local rule invalidate-extendable-viable-source-target-type-cache ( )
-{
- local generators-with-cached-source-types = $(.vstg-cached-generators) ;
- .vstg-cached-generators = ;
- for local g in $(generators-with-cached-source-types)
- {
- if $(.vstg.$(g)) = *
- {
- .vstg-cached-generators += $(g) ;
- }
- else
- {
- .vstg.$(g) = ;
- }
- }
-
- local types-with-cached-source-types = $(.vst-cached-types) ;
- .vst-cached-types = ;
- for local t in $(types-with-cached-source-types)
- {
- if $(.vst.$(t)) = *
- {
- .vst-cached-types += $(t) ;
- }
- else
- {
- .vst.$(t) = ;
- }
- }
-}
-
-
-# Outputs a debug message if generators debugging is on. Each element of
-# 'message' is checked to see if it is a class instance. If so, instead of the
-# value, the result of 'str' call is output.
-#
-local rule generators.dout ( message * )
-{
- if $(.debug)
- {
- ECHO [ sequence.transform utility.str : $(message) ] ;
- }
-}
-
-
-local rule indent ( )
-{
- return $(.indent:J="") ;
-}
-
-
-local rule increase-indent ( )
-{
- .indent += " " ;
-}
-
-
-local rule decrease-indent ( )
-{
- .indent = $(.indent[2-]) ;
-}
-
-
-# Models a generator.
-#
-class generator
-{
- import generators : indent increase-indent decrease-indent generators.dout ;
- import set ;
- import utility ;
- import feature ;
- import errors ;
- import sequence ;
- import type ;
- import virtual-target ;
- import "class" : new ;
- import property ;
- import path ;
-
- EXPORT class@generator : indent increase-indent decrease-indent
- generators.dout ;
-
- rule __init__ (
- id # Identifies the generator - should be name
- # of the rule which sets up the build
- # actions.
-
- composing ? # Whether generator processes each source
- # target in turn, converting it to required
- # types. Ordinary generators pass all
- # sources together to the recursive
- # generators.construct-types call.
-
- : source-types * # Types that this generator can handle. If
- # empty, the generator can consume anything.
-
- : target-types-and-names + # Types the generator will create and,
- # optionally, names for created targets.
- # Each element should have the form
- # type["(" name-pattern ")"], for example,
- # obj(%_x). Generated target name will be
- # found by replacing % with the name of
- # source, provided an explicit name was not
- # specified.
-
- : requirements *
- )
- {
- self.id = $(id) ;
- self.rule-name = $(id) ;
- self.composing = $(composing) ;
- self.source-types = $(source-types) ;
- self.target-types-and-names = $(target-types-and-names) ;
- self.requirements = $(requirements) ;
-
- for local e in $(target-types-and-names)
- {
- # Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
- # name. We use parallel lists for prefix and postfix (as opposed to
- # mapping), because given target type might occur several times, for
- # example "H H(%_symbols)".
- local m = [ MATCH ([^\\(]*)(\\((.*)%(.*)\\))? : $(e) ] ;
- self.target-types += $(m[1]) ;
- self.name-prefix += $(m[3]:E="") ;
- self.name-postfix += $(m[4]:E="") ;
- }
-
- # Note that 'transform' here, is the same as 'for_each'.
- sequence.transform type.validate : $(self.source-types) ;
- sequence.transform type.validate : $(self.target-types) ;
- }
-
- ################# End of constructor #################
-
- rule id ( )
- {
- return $(self.id) ;
- }
-
- # Returns the list of target type the generator accepts.
- #
- rule source-types ( )
- {
- return $(self.source-types) ;
- }
-
- # Returns the list of target types that this generator produces. It is
- # assumed to be always the same -- i.e. it can not change depending on some
- # provided list of sources.
- #
- rule target-types ( )
- {
- return $(self.target-types) ;
- }
-
- # Returns the required properties for this generator. Properties in returned
- # set must be present in build properties if this generator is to be used.
- # If result has grist-only element, that build properties must include some
- # value of that feature.
- #
- # XXX: remove this method?
- #
- rule requirements ( )
- {
- return $(self.requirements) ;
- }
-
- rule set-rule-name ( rule-name )
- {
- self.rule-name = $(rule-name) ;
- }
-
- rule rule-name ( )
- {
- return $(self.rule-name) ;
- }
-
- # Returns a true value if the generator can be run with the specified
- # properties.
- #
- rule match-rank ( property-set-to-match )
- {
- # See if generator requirements are satisfied by 'properties'. Treat a
- # feature name in requirements (i.e. grist-only element), as matching
- # any value of the feature.
- local all-requirements = [ requirements ] ;
-
- local property-requirements feature-requirements ;
- for local r in $(all-requirements)
- {
- if $(r:G=)
- {
- property-requirements += $(r) ;
- }
- else
- {
- feature-requirements += $(r) ;
- }
- }
-
- local properties-to-match = [ $(property-set-to-match).raw ] ;
- if $(property-requirements) in $(properties-to-match) &&
- $(feature-requirements) in $(properties-to-match:G)
- {
- return true ;
- }
- else
- {
- return ;
- }
- }
-
- # Returns another generator which differs from $(self) in
- # - id
- # - value to <toolset> feature in properties
- #
- rule clone ( new-id : new-toolset-properties + )
- {
- local g = [ new $(__class__) $(new-id) $(self.composing) :
- $(self.source-types) : $(self.target-types-and-names) :
- # Note: this does not remove any subfeatures of <toolset> which
- # might cause problems.
- [ property.change $(self.requirements) : <toolset> ]
- $(new-toolset-properties) ] ;
- return $(g) ;
- }
-
- # Creates another generator that is the same as $(self), except that if
- # 'base' is in target types of $(self), 'type' will in target types of the
- # new generator.
- #
- rule clone-and-change-target-type ( base : type )
- {
- local target-types ;
- for local t in $(self.target-types-and-names)
- {
- local m = [ MATCH ([^\\(]*)(\\(.*\\))? : $(t) ] ;
- if $(m) = $(base)
- {
- target-types += $(type)$(m[2]:E="") ;
- }
- else
- {
- target-types += $(t) ;
- }
- }
-
- local g = [ new $(__class__) $(self.id) $(self.composing) :
- $(self.source-types) : $(target-types) : $(self.requirements) ] ;
- if $(self.rule-name)
- {
- $(g).set-rule-name $(self.rule-name) ;
- }
- return $(g) ;
- }
-
- # Tries to invoke this generator on the given sources. Returns a list of
- # generated targets (instances of 'virtual-target') and optionally a set of
- # properties to be added to the usage-requirements for all the generated
- # targets. Returning nothing from run indicates that the generator was
- # unable to create the target.
- #
- rule run
- (
- project # Project for which the targets are generated.
- name ? # Used when determining the 'name' attribute for all
- # generated targets. See the 'generated-targets' method.
- : property-set # Desired properties for generated targets.
- : sources + # Source targets.
- )
- {
- generators.dout [ indent ] " ** generator" $(self.id) ;
- generators.dout [ indent ] " composing:" $(self.composing) ;
-
- if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
- {
- errors.error "Unsupported source/source-type combination" ;
- }
-
- # We do not run composing generators if no name is specified. The reason
- # is that composing generator combines several targets, which can have
- # different names, and it cannot decide which name to give for produced
- # target. Therefore, the name must be passed.
- #
- # This in effect, means that composing generators are runnable only at
- # the top-level of a transformation graph, or if their name is passed
- # explicitly. Thus, we dissallow composing generators in the middle. For
- # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
- # will not be allowed as the OBJ -> STATIC_LIB generator is composing.
- if ! $(self.composing) || $(name)
- {
- run-really $(project) $(name) : $(property-set) : $(sources) ;
- }
- }
-
- rule run-really ( project name ? : property-set : sources + )
- {
- # Targets that this generator will consume directly.
- local consumed = ;
- # Targets that can not be consumed and will be returned as-is.
- local bypassed = ;
-
- if $(self.composing)
- {
- convert-multiple-sources-to-consumable-types $(project)
- : $(property-set) : $(sources) : consumed bypassed ;
- }
- else
- {
- convert-to-consumable-types $(project) $(name) : $(property-set)
- : $(sources) : : consumed bypassed ;
- }
-
- local result ;
- if $(consumed)
- {
- result = [ construct-result $(consumed) : $(project) $(name) :
- $(property-set) ] ;
- }
-
- if $(result)
- {
- generators.dout [ indent ] " SUCCESS: " $(result) ;
- }
- else
- {
- generators.dout [ indent ] " FAILURE" ;
- }
- generators.dout ;
- return $(result) ;
- }
-
- # Constructs the dependency graph to be returned by this generator.
- #
- rule construct-result
- (
- consumed + # Already prepared list of consumable targets.
- # Composing generators may receive multiple sources
- # all of which will have types matching those in
- # $(self.source-types). Non-composing generators with
- # multiple $(self.source-types) will receive exactly
- # len $(self.source-types) sources with types matching
- # those in $(self.source-types). And non-composing
- # generators with only a single source type may
- # receive multiple sources with all of them of the
- # type listed in $(self.source-types).
- : project name ?
- : property-set # Properties to be used for all actions created here.
- )
- {
- local result ;
- # If this is 1->1 transformation, apply it to all consumed targets in
- # order.
- if ! $(self.source-types[2]) && ! $(self.composing)
- {
- for local r in $(consumed)
- {
- result += [ generated-targets $(r) : $(property-set) :
- $(project) $(name) ] ;
- }
- }
- else if $(consumed)
- {
- result += [ generated-targets $(consumed) : $(property-set) :
- $(project) $(name) ] ;
- }
- return $(result) ;
- }
-
- # Determine target name from fullname (maybe including path components)
- # Place optional prefix and postfix around basename
- #
- rule determine-target-name ( fullname : prefix ? : postfix ? )
- {
- # See if we need to add directory to the target name.
- local dir = $(fullname:D) ;
- local name = $(fullname:B) ;
-
- name = $(prefix:E=)$(name) ;
- name = $(name)$(postfix:E=) ;
-
- if $(dir) &&
- # Never append '..' to target path.
- ! [ MATCH .*(\\.\\.).* : $(dir) ]
- &&
- ! [ path.is-rooted $(dir) ]
- {
- # Relative path is always relative to the source
- # directory. Retain it, so that users can have files
- # with the same in two different subdirectories.
- name = $(dir)/$(name) ;
- }
- return $(name) ;
- }
-
- # Determine the name of the produced target from the names of the sources.
- #
- rule determine-output-name ( sources + )
- {
- # The simple case if when a name of source has single dot. Then, we take
- # the part before dot. Several dots can be caused by:
- # - using source file like a.host.cpp, or
- # - a type whose suffix has a dot. Say, we can type 'host_cpp' with
- # extension 'host.cpp'.
- # In the first case, we want to take the part up to the last dot. In the
- # second case -- not sure, but for now take the part up to the last dot
- # too.
- name = [ utility.basename [ $(sources[1]).name ] ] ;
-
- for local s in $(sources[2])
- {
- local n2 = [ utility.basename [ $(s).name ] ] ;
- if $(n2) != $(name)
- {
- errors.error "$(self.id): source targets have different names: cannot determine target name" ;
- }
- }
- name = [ determine-target-name [ $(sources[1]).name ] ] ;
- return $(name) ;
- }
-
- # Constructs targets that are created after consuming 'sources'. The result
- # will be the list of virtual-target, which has the same length as the
- # 'target-types' attribute and with corresponding types.
- #
- # When 'name' is empty, all source targets must have the same 'name'
- # attribute value, which will be used instead of the 'name' argument.
- #
- # The 'name' attribute value for each generated target will be equal to
- # the 'name' parameter if there is no name pattern for this type. Otherwise,
- # the '%' symbol in the name pattern will be replaced with the 'name'
- # parameter to obtain the 'name' attribute.
- #
- # For example, if targets types are T1 and T2 (with name pattern "%_x"),
- # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created
- # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
- # determines the basename of a file.
- #
- # Note that this pattern mechanism has nothing to do with implicit patterns
- # in make. It is a way to produce a target whose name is different than the
- # name of its source.
- #
- rule generated-targets ( sources + : property-set : project name ? )
- {
- if ! $(name)
- {
- name = [ determine-output-name $(sources) ] ;
- }
-
- # Assign an action for each target.
- local action = [ action-class ] ;
- local a = [ class.new $(action) $(sources) : $(self.rule-name) :
- $(property-set) ] ;
-
- # Create generated target for each target type.
- local targets ;
- local pre = $(self.name-prefix) ;
- local post = $(self.name-postfix) ;
- for local t in $(self.target-types)
- {
- local generated-name = $(pre[1])$(name:BS)$(post[1]) ;
- generated-name = $(generated-name:R=$(name:D)) ;
- pre = $(pre[2-]) ;
- post = $(post[2-]) ;
-
- targets += [ class.new file-target $(generated-name) : $(t) :
- $(project) : $(a) ] ;
- }
-
- return [ sequence.transform virtual-target.register : $(targets) ] ;
- }
-
- # Attempts to convert 'sources' to targets of types that this generator can
- # handle. The intention is to produce the set of targets that can be used
- # when the generator is run.
- #
- rule convert-to-consumable-types
- (
- project name ?
- : property-set
- : sources +
- : only-one ? # Convert 'source' to only one of the source types. If
- # there is more that one possibility, report an error.
- : consumed-var # Name of the variable which receives all targets which
- # can be consumed.
- bypassed-var # Name of the variable which receives all targets which
- # can not be consumed.
- )
- {
- # We are likely to be passed 'consumed' and 'bypassed' var names. Use
- # '_' to avoid name conflicts.
- local _consumed ;
- local _bypassed ;
- local missing-types ;
-
- if $(sources[2])
- {
- # Do not know how to handle several sources yet. Just try to pass
- # the request to other generator.
- missing-types = $(self.source-types) ;
- }
- else
- {
- consume-directly $(sources) : _consumed : missing-types ;
- }
-
- # No need to search for transformation if some source type has consumed
- # source and no more source types are needed.
- if $(only-one) && $(_consumed)
- {
- missing-types = ;
- }
-
- # TODO: we should check that only one source type if create of
- # 'only-one' is true.
- # TODO: consider if consumed/bypassed separation should be done by
- # 'construct-types'.
-
- if $(missing-types)
- {
- local transformed = [ generators.construct-types $(project) $(name)
- : $(missing-types) : $(property-set) : $(sources) ] ;
-
- # Add targets of right type to 'consumed'. Add others to 'bypassed'.
- # The 'generators.construct' rule has done its best to convert
- # everything to the required type. There is no need to rerun it on
- # targets of different types.
-
- # NOTE: ignoring usage requirements.
- for local t in $(transformed[2-])
- {
- if [ $(t).type ] in $(missing-types)
- {
- _consumed += $(t) ;
- }
- else
- {
- _bypassed += $(t) ;
- }
- }
- }
-
- _consumed = [ sequence.unique $(_consumed) ] ;
- _bypassed = [ sequence.unique $(_bypassed) ] ;
-
- # Remove elements of '_bypassed' that are in '_consumed'.
-
- # Suppose the target type of current generator, X is produced from X_1
- # and X_2, which are produced from Y by one generator. When creating X_1
- # from Y, X_2 will be added to 'bypassed'. Likewise, when creating X_2
- # from Y, X_1 will be added to 'bypassed', but they are also in
- # 'consumed'. We have to remove them from bypassed, so that generators
- # up the call stack do not try to convert them.
-
- # In this particular case, X_1 instance in 'consumed' and X_1 instance
- # in 'bypassed' will be the same: because they have the same source and
- # action name, and 'virtual-target.register' will not allow two
- # different instances. Therefore, it is OK to use 'set.difference'.
-
- _bypassed = [ set.difference $(_bypassed) : $(_consumed) ] ;
-
- $(consumed-var) += $(_consumed) ;
- $(bypassed-var) += $(_bypassed) ;
- }
-
- # Converts several files to consumable types. Called for composing
- # generators only.
- #
- rule convert-multiple-sources-to-consumable-types ( project : property-set :
- sources * : consumed-var bypassed-var )
- {
- # We process each source one-by-one, trying to convert it to a usable
- # type.
- for local source in $(sources)
- {
- local _c ;
- local _b ;
- # TODO: need to check for failure on each source.
- convert-to-consumable-types $(project) : $(property-set) : $(source)
- : true : _c _b ;
- if ! $(_c)
- {
- generators.dout [ indent ] " failed to convert " $(source) ;
- }
- $(consumed-var) += $(_c) ;
- $(bypassed-var) += $(_b) ;
- }
- }
-
- rule consume-directly ( source : consumed-var : missing-types-var )
- {
- local real-source-type = [ $(source).type ] ;
-
- # If there are no source types, we can consume anything.
- local source-types = $(self.source-types) ;
- source-types ?= $(real-source-type) ;
-
- for local st in $(source-types)
- {
- # The 'source' if of the right type already.
- if $(real-source-type) = $(st) || [ type.is-derived
- $(real-source-type) $(st) ]
- {
- $(consumed-var) += $(source) ;
- }
- else
- {
- $(missing-types-var) += $(st) ;
- }
- }
- }
-
- # Returns the class to be used to actions. Default implementation returns
- # "action".
- #
- rule action-class ( )
- {
- return "action" ;
- }
-}
-
-
-# Registers a new generator instance 'g'.
-#
-rule register ( g )
-{
- .all-generators += $(g) ;
-
- # A generator can produce several targets of the same type. We want unique
- # occurrence of that generator in .generators.$(t) in that case, otherwise,
- # it will be tried twice and we will get a false ambiguity.
- for local t in [ sequence.unique [ $(g).target-types ] ]
- {
- .generators.$(t) += $(g) ;
- }
-
- # Update the set of generators for toolset.
-
- # TODO: should we check that generator with this id is not already
- # registered. For example, the fop.jam module intentionally declared two
- # generators with the same id, so such check will break it.
- local id = [ $(g).id ] ;
-
- # Some generators have multiple periods in their name, so a simple $(id:S=)
- # will not generate the right toolset name. E.g. if id = gcc.compile.c++,
- # then .generators-for-toolset.$(id:S=) will append to
- # .generators-for-toolset.gcc.compile, which is a separate value from
- # .generators-for-toolset.gcc. Correcting this makes generator inheritance
- # work properly. See also inherit-generators in the toolset module.
- local base = $(id) ;
- while $(base:S)
- {
- base = $(base:B) ;
- }
- .generators-for-toolset.$(base) += $(g) ;
-
-
- # After adding a new generator that can construct new target types, we need
- # to clear the related cached viable source target type information for
- # constructing a specific target type or using a specific generator. Cached
- # viable source target type lists affected by this are those containing any
- # of the target types constructed by the new generator or any of their base
- # target types.
- #
- # A more advanced alternative to clearing that cached viable source target
- # type information would be to expand it with additional source types or
- # even better - mark it as needing to be expanded on next use.
- #
- # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077
- # mailing list thread for an even more advanced idea of how we could convert
- # Boost Build's Jamfile processing, target selection and generator selection
- # into separate steps which would prevent these caches from ever being
- # invalidated.
- #
- # For now we just clear all the cached viable source target type information
- # that does not simply state 'all types' and may implement a more detailed
- # algorithm later on if it becomes needed.
-
- invalidate-extendable-viable-source-target-type-cache ;
-}
-
-
-# Creates a new non-composing 'generator' class instance and registers it.
-# Returns the created instance. Rationale: the instance is returned so that it
-# is possible to first register a generator and then call its 'run' method,
-# bypassing the whole generator selection process.
-#
-rule register-standard ( id : source-types * : target-types + : requirements * )
-{
- local g = [ new generator $(id) : $(source-types) : $(target-types) :
- $(requirements) ] ;
- register $(g) ;
- return $(g) ;
-}
-
-
-# Creates a new composing 'generator' class instance and registers it.
-#
-rule register-composing ( id : source-types * : target-types + : requirements *
- )
-{
- local g = [ new generator $(id) true : $(source-types) : $(target-types) :
- $(requirements) ] ;
- register $(g) ;
- return $(g) ;
-}
-
-
-# Returns all generators belonging to the given 'toolset', i.e. whose ids are
-# '$(toolset).<something>'.
-#
-rule generators-for-toolset ( toolset )
-{
- return $(.generators-for-toolset.$(toolset)) ;
-}
-
-
-# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
-# searching for generators that could produce a target of a certain type, both
-# those generators are among viable generators, the overridden generator is
-# immediately discarded.
-#
-# The overridden generators are discarded immediately after computing the list
-# of viable generators but before running any of them.
-#
-rule override ( overrider-id : overridee-id )
-{
- .override.$(overrider-id) += $(overridee-id) ;
-}
-
-
-# Returns a list of source type which can possibly be converted to 'target-type'
-# by some chain of generator invocation.
-#
-# More formally, takes all generators for 'target-type' and returns a union of
-# source types for those generators and result of calling itself recursively on
-# source types.
-#
-# Returns '*' in case any type should be considered a viable source type for the
-# given type.
-#
-local rule viable-source-types-real ( target-type )
-{
- local result ;
-
- # 't0' is the initial list of target types we need to process to get a list
- # of their viable source target types. New target types will not be added to
- # this list.
- local t0 = [ type.all-bases $(target-type) ] ;
-
- # 't' is the list of target types which have not yet been processed to get a
- # list of their viable source target types. This list will get expanded as
- # we locate more target types to process.
- local t = $(t0) ;
-
- while $(t)
- {
- # Find all generators for the current type. Unlike
- # 'find-viable-generators' we do not care about the property-set.
- local generators = $(.generators.$(t[1])) ;
- t = $(t[2-]) ;
-
- while $(generators)
- {
- local g = $(generators[1]) ;
- generators = $(generators[2-]) ;
-
- if ! [ $(g).source-types ]
- {
- # Empty source types -- everything can be accepted.
- result = * ;
- # This will terminate this loop.
- generators = ;
- # This will terminate the outer loop.
- t = ;
- }
-
- for local source-type in [ $(g).source-types ]
- {
- if ! $(source-type) in $(result)
- {
- # If a generator accepts a 'source-type' it will also
- # happily accept any type derived from it.
- for local n in [ type.all-derived $(source-type) ]
- {
- if ! $(n) in $(result)
- {
- # Here there is no point in adding target types to
- # the list of types to process in case they are or
- # have already been on that list. We optimize this
- # check by realizing that we only need to avoid the
- # original target type's base types. Other target
- # types that are or have been on the list of target
- # types to process have been added to the 'result'
- # list as well and have thus already been eliminated
- # by the previous if.
- if ! $(n) in $(t0)
- {
- t += $(n) ;
- }
- result += $(n) ;
- }
- }
- }
- }
- }
- }
-
- return $(result) ;
-}
-
-
-# Helper rule, caches the result of 'viable-source-types-real'.
-#
-rule viable-source-types ( target-type )
-{
- local key = .vst.$(target-type) ;
- if ! $($(key))
- {
- .vst-cached-types += $(target-type) ;
- local v = [ viable-source-types-real $(target-type) ] ;
- if ! $(v)
- {
- v = none ;
- }
- $(key) = $(v) ;
- }
-
- if $($(key)) != none
- {
- return $($(key)) ;
- }
-}
-
-
-# Returns the list of source types, which, when passed to 'run' method of
-# 'generator', has some change of being eventually used (probably after
-# conversion by other generators).
-#
-# Returns '*' in case any type should be considered a viable source type for the
-# given generator.
-#
-rule viable-source-types-for-generator-real ( generator )
-{
- local source-types = [ $(generator).source-types ] ;
- if ! $(source-types)
- {
- # If generator does not specify any source types, it might be a special
- # generator like builtin.lib-generator which just relays to other
- # generators. Return '*' to indicate that any source type is possibly
- # OK, since we do not know for sure.
- return * ;
- }
- else
- {
- local result ;
- while $(source-types)
- {
- local s = $(source-types[1]) ;
- source-types = $(source-types[2-]) ;
- local viable-sources = [ generators.viable-source-types $(s) ] ;
- if $(viable-sources) = *
- {
- result = * ;
- source-types = ; # Terminate the loop.
- }
- else
- {
- result += [ type.all-derived $(s) ] $(viable-sources) ;
- }
- }
- return [ sequence.unique $(result) ] ;
- }
-}
-
-
-# Helper rule, caches the result of 'viable-source-types-for-generator'.
-#
-local rule viable-source-types-for-generator ( generator )
-{
- local key = .vstg.$(generator) ;
- if ! $($(key))
- {
- .vstg-cached-generators += $(generator) ;
- local v = [ viable-source-types-for-generator-real $(generator) ] ;
- if ! $(v)
- {
- v = none ;
- }
- $(key) = $(v) ;
- }
-
- if $($(key)) != none
- {
- return $($(key)) ;
- }
-}
-
-
-# Returns usage requirements + list of created targets.
-#
-local rule try-one-generator-really ( project name ? : generator : target-type
- : property-set : sources * )
-{
- local targets =
- [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
-
- local usage-requirements ;
- local success ;
-
- generators.dout [ indent ] returned $(targets) ;
-
- if $(targets)
- {
- success = true ;
-
- if [ class.is-a $(targets[1]) : property-set ]
- {
- usage-requirements = $(targets[1]) ;
- targets = $(targets[2-]) ;
- }
- else
- {
- usage-requirements = [ property-set.empty ] ;
- }
- }
-
- generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
- generators.dout [ indent ] " " $(targets) ;
- if $(usage-requirements)
- {
- generators.dout [ indent ] " with usage requirements:" $(x) ;
- }
-
- if $(success)
- {
- return $(usage-requirements) $(targets) ;
- }
-}
-
-
-# Checks if generator invocation can be pruned, because it is guaranteed to
-# fail. If so, quickly returns an empty list. Otherwise, calls
-# try-one-generator-really.
-#
-local rule try-one-generator ( project name ? : generator : target-type
- : property-set : sources * )
-{
- local source-types ;
- for local s in $(sources)
- {
- source-types += [ $(s).type ] ;
- }
- local viable-source-types = [ viable-source-types-for-generator $(generator)
- ] ;
-
- if $(source-types) && $(viable-source-types) != * &&
- ! [ set.intersection $(source-types) : $(viable-source-types) ]
- {
- local id = [ $(generator).id ] ;
- generators.dout [ indent ] " ** generator '$(id)' pruned" ;
- #generators.dout [ indent ] "source-types" '$(source-types)' ;
- #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
- }
- else
- {
- return [ try-one-generator-really $(project) $(name) : $(generator) :
- $(target-type) : $(property-set) : $(sources) ] ;
- }
-}
-
-
-rule construct-types ( project name ? : target-types + : property-set
- : sources + )
-{
- local result ;
- local matched-types ;
- local usage-requirements = [ property-set.empty ] ;
- for local t in $(target-types)
- {
- local r = [ construct $(project) $(name) : $(t) : $(property-set) :
- $(sources) ] ;
- if $(r)
- {
- usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
- result += $(r[2-]) ;
- matched-types += $(t) ;
- }
- }
- # TODO: have to introduce parameter controlling if several types can be
- # matched and add appropriate checks.
-
- # TODO: need to review the documentation for 'construct' to see if it should
- # return $(source) even if nothing can be done with it. Currents docs seem
- # to imply that, contrary to the behaviour.
- if $(result)
- {
- return $(usage-requirements) $(result) ;
- }
- else
- {
- return $(usage-requirements) $(sources) ;
- }
-}
-
-
-# Ensures all 'targets' have their type. If this is not so, exists with error.
-#
-local rule ensure-type ( targets * )
-{
- for local t in $(targets)
- {
- if ! [ $(t).type ]
- {
- errors.error "target" [ $(t).str ] "has no type" ;
- }
- }
-}
-
-
-# Returns generators which can be used to construct target of specified type
-# with specified properties. Uses the following algorithm:
-# - iterates over requested target-type and all its bases (in the order returned
-# by type.all-bases).
-# - for each type find all generators that generate that type and whose
-# requirements are satisfied by properties.
-# - if the set of generators is not empty, returns that set.
-#
-# Note: this algorithm explicitly ignores generators for base classes if there
-# is at least one generator for the requested target-type.
-#
-local rule find-viable-generators-aux ( target-type : property-set )
-{
- # Select generators that can create the required target type.
- local viable-generators = ;
- local generator-rank = ;
-
- import type ;
- local t = [ type.all-bases $(target-type) ] ;
-
- generators.dout [ indent ] find-viable-generators target-type= $(target-type)
- property-set= [ $(property-set).as-path ] ;
-
- # Get the list of generators for the requested type. If no generator is
- # registered, try base type, and so on.
- local generators ;
- while $(t[1])
- {
- generators.dout [ indent ] "trying type" $(t[1]) ;
- if $(.generators.$(t[1]))
- {
- generators.dout [ indent ] "there are generators for this type" ;
- generators = $(.generators.$(t[1])) ;
-
- if $(t[1]) != $(target-type)
- {
- # We are here because there were no generators found for
- # target-type but there are some generators for its base type.
- # We will try to use them, but they will produce targets of
- # base type, not of 'target-type'. So, we clone the generators
- # and modify the list of target types.
- local generators2 ;
- for local g in $(generators)
- {
- # generators.register adds a generator to the list of
- # generators for toolsets, which is a bit strange, but
- # should work. That list is only used when inheriting a
- # toolset, which should have been done before running
- # generators.
- generators2 += [ $(g).clone-and-change-target-type $(t[1]) :
- $(target-type) ] ;
- generators.register $(generators2[-1]) ;
- }
- generators = $(generators2) ;
- }
- t = ;
- }
- t = $(t[2-]) ;
- }
-
- for local g in $(generators)
- {
- generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
-
- local m = [ $(g).match-rank $(property-set) ] ;
- if $(m)
- {
- generators.dout [ indent ] " is viable" ;
- viable-generators += $(g) ;
- }
- }
-
- return $(viable-generators) ;
-}
-
-
-rule find-viable-generators ( target-type : property-set )
-{
- local key = $(target-type).$(property-set) ;
- local l = $(.fv.$(key)) ;
- if ! $(l)
- {
- l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ;
- if ! $(l)
- {
- l = none ;
- }
- .fv.$(key) = $(l) ;
- }
-
- if $(l) = none
- {
- l = ;
- }
-
- local viable-generators ;
- for local g in $(l)
- {
- # Avoid trying the same generator twice on different levels.
- if ! $(g) in $(.active-generators)
- {
- viable-generators += $(g) ;
- }
- else
- {
- generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ;
- }
- }
-
- # Generators which override 'all'.
- local all-overrides ;
- # Generators which are overriden.
- local overriden-ids ;
- for local g in $(viable-generators)
- {
- local id = [ $(g).id ] ;
- local this-overrides = $(.override.$(id)) ;
- overriden-ids += $(this-overrides) ;
- if all in $(this-overrides)
- {
- all-overrides += $(g) ;
- }
- }
- if $(all-overrides)
- {
- viable-generators = $(all-overrides) ;
- }
- local result ;
- for local g in $(viable-generators)
- {
- if ! [ $(g).id ] in $(overriden-ids)
- {
- result += $(g) ;
- }
- }
-
- return $(result) ;
-}
-
-
-.construct-stack = ;
-
-
-# Attempts to construct a target by finding viable generators, running them and
-# selecting the dependency graph.
-#
-local rule construct-really ( project name ? : target-type : property-set :
- sources * )
-{
- viable-generators = [ find-viable-generators $(target-type) :
- $(property-set) ] ;
-
- generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
- " viable generators" ;
-
- local result ;
- local generators-that-succeeded ;
- for local g in $(viable-generators)
- {
- # This variable will be restored on exit from this scope.
- local .active-generators = $(g) $(.active-generators) ;
-
- local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type)
- : $(property-set) : $(sources) ] ;
-
- if $(r)
- {
- generators-that-succeeded += $(g) ;
- if $(result)
- {
- ECHO "Error: ambiguity found when searching for best transformation" ;
- ECHO "Trying to produce type '$(target-type)' from: " ;
- for local s in $(sources)
- {
- ECHO " - " [ $(s).str ] ;
- }
- ECHO "Generators that succeeded:" ;
- for local g in $(generators-that-succeeded)
- {
- ECHO " - " [ $(g).id ] ;
- }
- ECHO "First generator produced: " ;
- for local t in $(result[2-])
- {
- ECHO " - " [ $(t).str ] ;
- }
- ECHO "Second generator produced: " ;
- for local t in $(r[2-])
- {
- ECHO " - " [ $(t).str ] ;
- }
- EXIT ;
- }
- else
- {
- result = $(r) ;
- }
- }
- }
-
- return $(result) ;
-}
-
-
-# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
-# The 'sources' are treated as a collection of *possible* ingridients, i.e.
-# there is no obligation to consume them all.
-#
-# Returns a list of targets. When this invocation is first instance of
-# 'construct' in stack, returns only targets of requested 'target-type',
-# otherwise, returns also unused sources and additionally generated targets.
-#
-# If 'top-level' is set, does not suppress generators that are already
-# used in the stack. This may be useful in cases where a generator
-# has to build a metatargets -- for example a target corresponding to
-# built tool.
-#
-rule construct ( project name ? : target-type : property-set * : sources * : top-level ? )
-{
- local saved-stack ;
- if $(top-level)
- {
- saved-active = $(.active-generators) ;
- .active-generators = ;
- }
-
- if (.construct-stack)
- {
- ensure-type $(sources) ;
- }
-
- .construct-stack += 1 ;
-
- increase-indent ;
-
- if $(.debug)
- {
- generators.dout [ indent ] "*** construct" $(target-type) ;
-
- for local s in $(sources)
- {
- generators.dout [ indent ] " from" $(s) ;
- }
- generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
- }
-
- local result = [ construct-really $(project) $(name) : $(target-type) :
- $(property-set) : $(sources) ] ;
-
- decrease-indent ;
-
- .construct-stack = $(.construct-stack[2-]) ;
-
- if $(top-level)
- {
- .active-generators = $(saved-active) ;
- }
-
- return $(result) ;
-}
-
-# Given 'result', obtained from some generator or generators.construct, adds
-# 'raw-properties' as usage requirements to it. If result already contains usage
-# requirements -- that is the first element of result of an instance of the
-# property-set class, the existing usage requirements and 'raw-properties' are
-# combined.
-#
-rule add-usage-requirements ( result * : raw-properties * )
-{
- if $(result)
- {
- if [ class.is-a $(result[1]) : property-set ]
- {
- return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
- }
- else
- {
- return [ property-set.create $(raw-properties) ] $(result) ;
- }
- }
-}
-
-rule dump ( )
-{
- for local g in $(.all-generators)
- {
- ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ;
- }
-}
-
diff --git a/jam-files/boost-build/build/generators.py b/jam-files/boost-build/build/generators.py
deleted file mode 100644
index 2c59f7ca..00000000
--- a/jam-files/boost-build/build/generators.py
+++ /dev/null
@@ -1,1089 +0,0 @@
-# Status: being ported by Vladimir Prus
-# Base revision: 48649
-# TODO: replace the logging with dout
-
-# Copyright Vladimir Prus 2002.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Manages 'generators' --- objects which can do transformation between different
-# target types and contain algorithm for finding transformation from sources
-# to targets.
-#
-# The main entry point to this module is generators.construct rule. It is given
-# a list of source targets, desired target type and a set of properties.
-# It starts by selecting 'viable generators', which have any chances of producing
-# the desired target type with the required properties. Generators are ranked and
-# a set of most specific ones is selected.
-#
-# The most specific generators have their 'run' methods called, with the properties
-# and list of sources. Each one selects target which can be directly consumed, and
-# tries to convert the remaining ones to the types it can consume. This is done
-# by recursively calling 'construct' with all consumable types.
-#
-# If the generator has collected all the targets it needs, it creates targets
-# corresponding to result, and returns it. When all generators have been run,
-# results of one of them are selected and returned as result.
-#
-# It's quite possible that 'construct' returns more targets that it was asked for.
-# For example, it was asked to target type EXE, but the only found generators produces
-# both EXE and TDS (file with debug) information. The extra target will be returned.
-#
-# Likewise, when generator tries to convert sources to consumable types, it can get
-# more targets that it was asked for. The question is what to do with extra targets.
-# Boost.Build attempts to convert them to requested types, and attempts as early as
-# possible. Specifically, this is done after invoking each generator. (Later I'll
-# document the rationale for trying extra target conversion at that point).
-#
-# That early conversion is not always desirable. Suppose a generator got a source of
-# type Y and must consume one target of type X_1 and one target of type X_2.
-# When converting Y to X_1 extra target of type Y_2 is created. We should not try to
-# convert it to type X_1, because if we do so, the generator will get two targets
-# of type X_1, and will be at loss as to which one to use. Because of that, the
-# 'construct' rule has a parameter, telling if multiple targets can be returned. If
-# the parameter is false, conversion of extra targets is not performed.
-
-
-import re
-import cStringIO
-import os.path
-
-from virtual_target import Subvariant
-import virtual_target, type, property_set, property
-from b2.util.logger import *
-from b2.util.utility import *
-from b2.util import set
-from b2.util.sequence import unique
-import b2.util.sequence as sequence
-from b2.manager import get_manager
-import b2.build.type
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __generators, __type_to_generators, __generators_for_toolset, __construct_stack
- global __overrides, __active_generators
- global __viable_generators_cache, __viable_source_types_cache
- global __vstg_cached_generators, __vst_cached_types
-
- __generators = {}
- __type_to_generators = {}
- __generators_for_toolset = {}
- __overrides = {}
-
- # TODO: can these be global?
- __construct_stack = []
- __viable_generators_cache = {}
- __viable_source_types_cache = {}
- __active_generators = []
-
- __vstg_cached_generators = []
- __vst_cached_types = []
-
-reset ()
-
-_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?')
-_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?')
-
-
-__debug = None
-__indent = ""
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-generators" in bjam.variable("ARGV")
- return __debug
-
-def increase_indent():
- global __indent
- __indent += " "
-
-def decrease_indent():
- global __indent
- __indent = __indent[0:-4]
-
-
-# Updated cached viable source target type information as needed after a new
-# derived target type gets added. This is needed because if a target type is a
-# viable source target type for some generator then all of the target type's
-# derived target types are automatically viable as source target types for the
-# same generator. Does nothing if a non-derived target type is passed to it.
-#
-def update_cached_information_with_a_new_type(type):
-
- base_type = b2.build.type.base(type)
-
- if base_type:
- for g in __vstg_cached_generators:
- if base_type in __viable_source_types_cache.get(g, []):
- __viable_source_types_cache[g].append(type)
-
- for t in __vst_cached_types:
- if base_type in __viable_source_types_cache.get(t, []):
- __viable_source_types_cache[t].append(type)
-
-# Clears cached viable source target type information except for target types
-# and generators with all source types listed as viable. Should be called when
-# something invalidates those cached values by possibly causing some new source
-# types to become viable.
-#
-def invalidate_extendable_viable_source_target_type_cache():
-
- global __vstg_cached_generators
- generators_with_cached_source_types = __vstg_cached_generators
- __vstg_cached_generators = []
-
- for g in generators_with_cached_source_types:
- if __viable_source_types_cache.has_key(g):
- if __viable_source_types_cache[g] == ["*"]:
- __vstg_cached_generators.append(g)
- else:
- del __viable_source_types_cache[g]
-
- global __vst_cached_types
- types_with_cached_sources_types = __vst_cached_types
- __vst_cached_types = []
- for t in types_with_cached_sources_types:
- if __viable_source_types_cache.has_key(t):
- if __viable_source_types_cache[t] == ["*"]:
- __vst_cached_types.append(t)
- else:
- del __viable_source_types_cache[t]
-
-def dout(message):
- if debug():
- print __indent + message
-
-class Generator:
- """ Creates a generator.
- manager: the build manager.
- id: identifies the generator
-
- rule: the rule which sets up build actions.
-
- composing: whether generator processes each source target in
- turn, converting it to required types.
- Ordinary generators pass all sources together to
- recusrive generators.construct_types call.
-
- source_types (optional): types that this generator can handle
-
- target_types_and_names: types the generator will create and, optionally, names for
- created targets. Each element should have the form
- type["(" name-pattern ")"]
- for example, obj(%_x). Name of generated target will be found
- by replacing % with the name of source, provided explicit name
- was not specified.
-
- requirements (optional)
-
- NOTE: all subclasses must have a similar signature for clone to work!
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
- assert(not isinstance(source_types, str))
- assert(not isinstance(target_types_and_names, str))
- self.id_ = id
- self.composing_ = composing
- self.source_types_ = source_types
- self.target_types_and_names_ = target_types_and_names
- self.requirements_ = requirements
-
- self.target_types_ = []
- self.name_prefix_ = []
- self.name_postfix_ = []
-
- for e in target_types_and_names:
- # Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
- # name. We use parallel lists for prefix and postfix (as opposed
- # to mapping), because given target type might occur several times,
- # for example "H H(%_symbols)".
- m = _re_separate_types_prefix_and_postfix.match (e)
-
- if not m:
- raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
-
- target_type = m.group (1)
- if not target_type: target_type = ''
- prefix = m.group (3)
- if not prefix: prefix = ''
- postfix = m.group (4)
- if not postfix: postfix = ''
-
- self.target_types_.append (target_type)
- self.name_prefix_.append (prefix)
- self.name_postfix_.append (postfix)
-
- for x in self.source_types_:
- type.validate (x)
-
- for x in self.target_types_:
- type.validate (x)
-
- def clone (self, new_id, new_toolset_properties):
- """ Returns another generator which differers from $(self) in
- - id
- - value to <toolset> feature in properties
- """
- return self.__class__ (new_id,
- self.composing_,
- self.source_types_,
- self.target_types_and_names_,
- # Note: this does not remove any subfeatures of <toolset>
- # which might cause problems
- property.change (self.requirements_, '<toolset>') + new_toolset_properties)
-
- def clone_and_change_target_type(self, base, type):
- """Creates another generator that is the same as $(self), except that
- if 'base' is in target types of $(self), 'type' will in target types
- of the new generator."""
- target_types = []
- for t in self.target_types_and_names_:
- m = _re_match_type.match(t)
- assert m
-
- if m.group(1) == base:
- if m.group(2):
- target_types.append(type + m.group(2))
- else:
- target_types.append(type)
- else:
- target_types.append(t)
-
- return self.__class__(self.id_, self.composing_,
- self.source_types_,
- target_types,
- self.requirements_)
-
-
- def id(self):
- return self.id_
-
- def source_types (self):
- """ Returns the list of target type the generator accepts.
- """
- return self.source_types_
-
- def target_types (self):
- """ Returns the list of target types that this generator produces.
- It is assumed to be always the same -- i.e. it cannot change depending
- list of sources.
- """
- return self.target_types_
-
- def requirements (self):
- """ Returns the required properties for this generator. Properties
- in returned set must be present in build properties if this
- generator is to be used. If result has grist-only element,
- that build properties must include some value of that feature.
- """
- return self.requirements_
-
- def match_rank (self, ps):
- """ Returns true if the generator can be run with the specified
- properties.
- """
- # See if generator's requirements are satisfied by
- # 'properties'. Treat a feature name in requirements
- # (i.e. grist-only element), as matching any value of the
- # feature.
- all_requirements = self.requirements ()
-
- property_requirements = []
- feature_requirements = []
- # This uses strings because genenator requirements allow
- # the '<feature>' syntax without value and regular validation
- # is not happy about that.
- for r in all_requirements:
- if get_value (r):
- property_requirements.append (r)
-
- else:
- feature_requirements.append (r)
-
- return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
- and all(ps.get(get_grist(s)) for s in feature_requirements)
-
- def run (self, project, name, prop_set, sources):
- """ Tries to invoke this generator on the given sources. Returns a
- list of generated targets (instances of 'virtual-target').
-
- project: Project for which the targets are generated.
-
- name: Determines the name of 'name' attribute for
- all generated targets. See 'generated_targets' method.
-
- prop_set: Desired properties for generated targets.
-
- sources: Source targets.
- """
-
- if project.manager ().logger ().on ():
- project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
- project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
-
- if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
- raise BaseException ("Unsupported source/source_type combination")
-
- # We don't run composing generators if no name is specified. The reason
- # is that composing generator combines several targets, which can have
- # different names, and it cannot decide which name to give for produced
- # target. Therefore, the name must be passed.
- #
- # This in effect, means that composing generators are runnable only
- # at top-level of transofrmation graph, or if name is passed explicitly.
- # Thus, we dissallow composing generators in the middle. For example, the
- # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
- # (the OBJ -> STATIC_LIB generator is composing)
- if not self.composing_ or name:
- return self.run_really (project, name, prop_set, sources)
- else:
- return []
-
- def run_really (self, project, name, prop_set, sources):
-
- # consumed: Targets that this generator will consume directly.
- # bypassed: Targets that can't be consumed and will be returned as-is.
-
- if self.composing_:
- (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
- else:
- (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
-
- result = []
- if consumed:
- result = self.construct_result (consumed, project, name, prop_set)
- result.extend (bypassed)
-
- if result:
- if project.manager ().logger ().on ():
- project.manager ().logger ().log (__name__, " SUCCESS: ", result)
-
- else:
- project.manager ().logger ().log (__name__, " FAILURE")
-
- return result
-
- def construct_result (self, consumed, project, name, prop_set):
- """ Constructs the dependency graph that will be returned by this
- generator.
- consumed: Already prepared list of consumable targets
- If generator requires several source files will contain
- exactly len $(self.source_types_) targets with matching types
- Otherwise, might contain several targets with the type of
- self.source_types_ [0]
- project:
- name:
- prop_set: Properties to be used for all actions create here
- """
- result = []
- # If this is 1->1 transformation, apply it to all consumed targets in order.
- if len (self.source_types_) < 2 and not self.composing_:
-
- for r in consumed:
- result.extend (self.generated_targets ([r], prop_set, project, name))
-
- else:
-
- if consumed:
- result.extend (self.generated_targets (consumed, prop_set, project, name))
-
- return result
-
- def determine_target_name(self, fullname):
- # Determine target name from fullname (maybe including path components)
- # Place optional prefix and postfix around basename
-
- dir = os.path.dirname(fullname)
- name = os.path.basename(fullname)
-
- if dir and not ".." in dir and not os.path.isabs(dir):
- # Relative path is always relative to the source
- # directory. Retain it, so that users can have files
- # with the same in two different subdirectories.
- name = dir + "/" + name
-
- return name
-
- def determine_output_name(self, sources):
- """Determine the name of the produced target from the
- names of the sources."""
-
- # The simple case if when a name
- # of source has single dot. Then, we take the part before
- # dot. Several dots can be caused by:
- # - Using source file like a.host.cpp
- # - A type which suffix has a dot. Say, we can
- # type 'host_cpp' with extension 'host.cpp'.
- # In the first case, we want to take the part till the last
- # dot. In the second case -- no sure, but for now take
- # the part till the last dot too.
- name = os.path.splitext(sources[0].name())[0]
-
- for s in sources[1:]:
- n2 = os.path.splitext(s.name())
- if n2 != name:
- get_manager().errors()(
- "%s: source targets have different names: cannot determine target name"
- % (self.id_))
-
- # Names of sources might include directory. We should strip it.
- return self.determine_target_name(sources[0].name())
-
-
- def generated_targets (self, sources, prop_set, project, name):
- """ Constructs targets that are created after consuming 'sources'.
- The result will be the list of virtual-target, which the same length
- as 'target_types' attribute and with corresponding types.
-
- When 'name' is empty, all source targets must have the same value of
- the 'name' attribute, which will be used instead of the 'name' argument.
-
- The value of 'name' attribute for each generated target will be equal to
- the 'name' parameter if there's no name pattern for this type. Otherwise,
- the '%' symbol in the name pattern will be replaced with the 'name' parameter
- to obtain the 'name' attribute.
-
- For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
- for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
- be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
- basename of a file.
-
- Note that this pattern mechanism has nothing to do with implicit patterns
- in make. It's a way to produce target which name is different for name of
- source.
- """
- if not name:
- name = self.determine_output_name(sources)
-
- # Assign an action for each target
- action = self.action_class()
- a = action(project.manager(), sources, self.id_, prop_set)
-
- # Create generated target for each target type.
- targets = []
- pre = self.name_prefix_
- post = self.name_postfix_
- for t in self.target_types_:
- basename = os.path.basename(name)
- idx = basename.find(".")
- if idx != -1:
- basename = basename[:idx]
- generated_name = pre[0] + basename + post[0]
- generated_name = os.path.join(os.path.dirname(name), generated_name)
- pre = pre[1:]
- post = post[1:]
-
- targets.append(virtual_target.FileTarget(generated_name, t, project, a))
-
- return [ project.manager().virtual_targets().register(t) for t in targets ]
-
- def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
- """ Attempts to convert 'source' to the types that this generator can
- handle. The intention is to produce the set of targets can should be
- used when generator is run.
- only_one: convert 'source' to only one of source types
- if there's more that one possibility, report an
- error.
-
- Returns a pair:
- consumed: all targets that can be consumed.
- bypassed: all targets that cannot be consumed.
- """
- consumed = []
- bypassed = []
- missing_types = []
-
- if len (sources) > 1:
- # Don't know how to handle several sources yet. Just try
- # to pass the request to other generator
- missing_types = self.source_types_
-
- else:
- (c, m) = self.consume_directly (sources [0])
- consumed += c
- missing_types += m
-
- # No need to search for transformation if
- # some source type has consumed source and
- # no more source types are needed.
- if only_one and consumed:
- missing_types = []
-
- #TODO: we should check that only one source type
- #if create of 'only_one' is true.
- # TODO: consider if consuned/bypassed separation should
- # be done by 'construct_types'.
-
- if missing_types:
- transformed = construct_types (project, name, missing_types, prop_set, sources)
-
- # Add targets of right type to 'consumed'. Add others to
- # 'bypassed'. The 'generators.construct' rule has done
- # its best to convert everything to the required type.
- # There's no need to rerun it on targets of different types.
-
- # NOTE: ignoring usage requirements
- for t in transformed[1]:
- if t.type() in missing_types:
- consumed.append(t)
-
- else:
- bypassed.append(t)
-
- consumed = unique(consumed)
- bypassed = unique(bypassed)
-
- # remove elements of 'bypassed' that are in 'consumed'
-
- # Suppose the target type of current generator, X is produced from
- # X_1 and X_2, which are produced from Y by one generator.
- # When creating X_1 from Y, X_2 will be added to 'bypassed'
- # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
- # But they are also in 'consumed'. We have to remove them from
- # bypassed, so that generators up the call stack don't try to convert
- # them.
-
- # In this particular case, X_1 instance in 'consumed' and X_1 instance
- # in 'bypassed' will be the same: because they have the same source and
- # action name, and 'virtual-target.register' won't allow two different
- # instances. Therefore, it's OK to use 'set.difference'.
-
- bypassed = set.difference(bypassed, consumed)
-
- return (consumed, bypassed)
-
-
- def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
- """ Converts several files to consumable types.
- """
- consumed = []
- bypassed = []
-
- # We process each source one-by-one, trying to convert it to
- # a usable type.
- for s in sources:
- # TODO: need to check for failure on each source.
- (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True)
- if not c:
- project.manager ().logger ().log (__name__, " failed to convert ", s)
-
- consumed.extend (c)
- bypassed.extend (b)
-
- return (consumed, bypassed)
-
- def consume_directly (self, source):
- real_source_type = source.type ()
-
- # If there are no source types, we can consume anything
- source_types = self.source_types()
- if not source_types:
- source_types = [real_source_type]
-
- consumed = []
- missing_types = []
- for st in source_types:
- # The 'source' if of right type already)
- if real_source_type == st or type.is_derived (real_source_type, st):
- consumed.append (source)
-
- else:
- missing_types.append (st)
-
- return (consumed, missing_types)
-
- def action_class (self):
- """ Returns the class to be used to actions. Default implementation
- returns "action".
- """
- return virtual_target.Action
-
-
-def find (id):
- """ Finds the generator with id. Returns None if not found.
- """
- return __generators.get (id, None)
-
-def register (g):
- """ Registers new generator instance 'g'.
- """
- id = g.id()
-
- __generators [id] = g
-
- # A generator can produce several targets of the
- # same type. We want unique occurence of that generator
- # in .generators.$(t) in that case, otherwise, it will
- # be tried twice and we'll get false ambiguity.
- for t in sequence.unique(g.target_types()):
- __type_to_generators.setdefault(t, []).append(g)
-
- # Update the set of generators for toolset
-
- # TODO: should we check that generator with this id
- # is not already registered. For example, the fop.jam
- # module intentionally declared two generators with the
- # same id, so such check will break it.
-
- # Some generators have multiple periods in their name, so the
- # normal $(id:S=) won't generate the right toolset name.
- # e.g. if id = gcc.compile.c++, then
- # .generators-for-toolset.$(id:S=) will append to
- # .generators-for-toolset.gcc.compile, which is a separate
- # value from .generators-for-toolset.gcc. Correcting this
- # makes generator inheritance work properly.
- # See also inherit-generators in module toolset
- base = id.split ('.', 100) [0]
-
- __generators_for_toolset.setdefault(base, []).append(g)
-
- # After adding a new generator that can construct new target types, we need
- # to clear the related cached viable source target type information for
- # constructing a specific target type or using a specific generator. Cached
- # viable source target type lists affected by this are those containing any
- # of the target types constructed by the new generator or any of their base
- # target types.
- #
- # A more advanced alternative to clearing that cached viable source target
- # type information would be to expand it with additional source types or
- # even better - mark it as needing to be expanded on next use.
- #
- # For now we just clear all the cached viable source target type information
- # that does not simply state 'all types' and may implement a more detailed
- # algorithm later on if it becomes needed.
-
- invalidate_extendable_viable_source_target_type_cache()
-
-
-def register_standard (id, source_types, target_types, requirements = []):
- """ Creates new instance of the 'generator' class and registers it.
- Returns the creates instance.
- Rationale: the instance is returned so that it's possible to first register
- a generator and then call 'run' method on that generator, bypassing all
- generator selection.
- """
- g = Generator (id, False, source_types, target_types, requirements)
- register (g)
- return g
-
-def register_composing (id, source_types, target_types, requirements = []):
- g = Generator (id, True, source_types, target_types, requirements)
- register (g)
- return g
-
-def generators_for_toolset (toolset):
- """ Returns all generators which belong to 'toolset'.
- """
- return __generators_for_toolset.get(toolset, [])
-
-def override (overrider_id, overridee_id):
- """Make generator 'overrider-id' be preferred to
- 'overridee-id'. If, when searching for generators
- that could produce a target of certain type,
- both those generators are amoung viable generators,
- the overridden generator is immediately discarded.
-
- The overridden generators are discarded immediately
- after computing the list of viable generators, before
- running any of them."""
-
- __overrides.get(overrider_id, []).append(overridee_id)
-
-def __viable_source_types_real (target_type):
- """ Returns a list of source type which can possibly be converted
- to 'target_type' by some chain of generator invocation.
-
- More formally, takes all generators for 'target_type' and
- returns union of source types for those generators and result
- of calling itself recusrively on source types.
- """
- generators = []
-
- # 't0' is the initial list of target types we need to process to get a list
- # of their viable source target types. New target types will not be added to
- # this list.
- t0 = type.all_bases (target_type)
-
-
- # 't' is the list of target types which have not yet been processed to get a
- # list of their viable source target types. This list will get expanded as
- # we locate more target types to process.
- t = t0
-
- result = []
- while t:
- # Find all generators for current type.
- # Unlike 'find_viable_generators' we don't care about prop_set.
- generators = __type_to_generators.get (t [0], [])
- t = t[1:]
-
- for g in generators:
- if not g.source_types():
- # Empty source types -- everything can be accepted
- result = "*"
- # This will terminate outer loop.
- t = None
- break
-
- for source_type in g.source_types ():
- if not source_type in result:
- # If generator accepts 'source_type' it
- # will happily accept any type derived from it
- all = type.all_derived (source_type)
- for n in all:
- if not n in result:
-
- # Here there is no point in adding target types to
- # the list of types to process in case they are or
- # have already been on that list. We optimize this
- # check by realizing that we only need to avoid the
- # original target type's base types. Other target
- # types that are or have been on the list of target
- # types to process have been added to the 'result'
- # list as well and have thus already been eliminated
- # by the previous if.
- if not n in t0:
- t.append (n)
- result.append (n)
-
- return result
-
-
-def viable_source_types (target_type):
- """ Helper rule, caches the result of '__viable_source_types_real'.
- """
- if not __viable_source_types_cache.has_key(target_type):
- __vst_cached_types.append(target_type)
- __viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
- return __viable_source_types_cache [target_type]
-
-def viable_source_types_for_generator_real (generator):
- """ Returns the list of source types, which, when passed to 'run'
- method of 'generator', has some change of being eventually used
- (probably after conversion by other generators)
- """
- source_types = generator.source_types ()
-
- if not source_types:
- # If generator does not specify any source types,
- # it might be special generator like builtin.lib-generator
- # which just relays to other generators. Return '*' to
- # indicate that any source type is possibly OK, since we don't
- # know for sure.
- return ['*']
-
- else:
- result = []
- for s in source_types:
- viable_sources = viable_source_types(s)
- if viable_sources == "*":
- result = ["*"]
- break
- else:
- result.extend(type.all_derived(s) + viable_sources)
- return unique(result)
-
-def viable_source_types_for_generator (generator):
- """ Caches the result of 'viable_source_types_for_generator'.
- """
- if not __viable_source_types_cache.has_key(generator):
- __vstg_cached_generators.append(generator)
- __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
-
- return __viable_source_types_cache[generator]
-
-def try_one_generator_really (project, name, generator, target_type, properties, sources):
- """ Returns usage requirements + list of created targets.
- """
- targets = generator.run (project, name, properties, sources)
-
- usage_requirements = []
- success = False
-
- dout("returned " + str(targets))
-
- if targets:
- success = True;
-
- if isinstance (targets[0], property_set.PropertySet):
- usage_requirements = targets [0]
- targets = targets [1]
-
- else:
- usage_requirements = property_set.empty ()
-
- dout( " generator" + generator.id() + " spawned ")
- # generators.dout [ indent ] " " $(targets) ;
-# if $(usage-requirements)
-# {
-# generators.dout [ indent ] " with usage requirements:" $(x) ;
-# }
-
- if success:
- return (usage_requirements, targets)
- else:
- return None
-
-def try_one_generator (project, name, generator, target_type, properties, sources):
- """ Checks if generator invocation can be pruned, because it's guaranteed
- to fail. If so, quickly returns empty list. Otherwise, calls
- try_one_generator_really.
- """
- source_types = []
-
- for s in sources:
- source_types.append (s.type ())
-
- viable_source_types = viable_source_types_for_generator (generator)
-
- if source_types and viable_source_types != ['*'] and\
- not set.intersection (source_types, viable_source_types):
- if project.manager ().logger ().on ():
- id = generator.id ()
- project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
- project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
- project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
-
- return []
-
- else:
- return try_one_generator_really (project, name, generator, target_type, properties, sources)
-
-
-def construct_types (project, name, target_types, prop_set, sources):
-
- result = []
- usage_requirements = property_set.empty()
-
- for t in target_types:
- r = construct (project, name, t, prop_set, sources)
-
- if r:
- (ur, targets) = r
- usage_requirements = usage_requirements.add(ur)
- result.extend(targets)
-
- # TODO: have to introduce parameter controlling if
- # several types can be matched and add appropriate
- # checks
-
- # TODO: need to review the documentation for
- # 'construct' to see if it should return $(source) even
- # if nothing can be done with it. Currents docs seem to
- # imply that, contrary to the behaviour.
- if result:
- return (usage_requirements, result)
-
- else:
- return (usage_requirements, sources)
-
-def __ensure_type (targets):
- """ Ensures all 'targets' have types. If this is not so, exists with
- error.
- """
- for t in targets:
- if not t.type ():
- get_manager().errors()("target '%s' has no type" % str (t))
-
-def find_viable_generators_aux (target_type, prop_set):
- """ Returns generators which can be used to construct target of specified type
- with specified properties. Uses the following algorithm:
- - iterates over requested target_type and all it's bases (in the order returned bt
- type.all-bases.
- - for each type find all generators that generate that type and which requirements
- are satisfied by properties.
- - if the set of generators is not empty, returns that set.
-
- Note: this algorithm explicitly ignores generators for base classes if there's
- at least one generator for requested target_type.
- """
- # Select generators that can create the required target type.
- viable_generators = []
- initial_generators = []
-
- import type
-
- # Try all-type generators first. Assume they have
- # quite specific requirements.
- all_bases = type.all_bases(target_type)
-
- for t in all_bases:
-
- initial_generators = __type_to_generators.get(t, [])
-
- if initial_generators:
- dout("there are generators for this type")
- if t != target_type:
- # We're here, when no generators for target-type are found,
- # but there are some generators for a base type.
- # We'll try to use them, but they will produce targets of
- # base type, not of 'target-type'. So, we clone the generators
- # and modify the list of target types.
- generators2 = []
- for g in initial_generators[:]:
- # generators.register adds generator to the list of generators
- # for toolsets, which is a bit strange, but should work.
- # That list is only used when inheriting toolset, which
- # should have being done before generators are run.
- ng = g.clone_and_change_target_type(t, target_type)
- generators2.append(ng)
- register(ng)
-
- initial_generators = generators2
- break
-
- for g in initial_generators:
- dout("trying generator " + g.id()
- + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
-
- m = g.match_rank(prop_set)
- if m:
- dout(" is viable")
- viable_generators.append(g)
-
- return viable_generators
-
-def find_viable_generators (target_type, prop_set):
- key = target_type + '.' + str (prop_set)
-
- l = __viable_generators_cache.get (key, None)
- if not l:
- l = []
-
- if not l:
- l = find_viable_generators_aux (target_type, prop_set)
-
- __viable_generators_cache [key] = l
-
- viable_generators = []
- for g in l:
- # Avoid trying the same generator twice on different levels.
- # TODO: is this really used?
- if not g in __active_generators:
- viable_generators.append (g)
- else:
- dout(" generator %s is active, discarding" % g.id())
-
- # Generators which override 'all'.
- all_overrides = []
-
- # Generators which are overriden
- overriden_ids = []
-
- for g in viable_generators:
- id = g.id ()
-
- this_overrides = __overrides.get (id, [])
-
- if this_overrides:
- overriden_ids.extend (this_overrides)
- if 'all' in this_overrides:
- all_overrides.append (g)
-
- if all_overrides:
- viable_generators = all_overrides
-
- result = []
- for g in viable_generators:
- if not g.id () in overriden_ids:
- result.append (g)
-
-
- return result
-
-def __construct_really (project, name, target_type, prop_set, sources):
- """ Attempts to construct target by finding viable generators, running them
- and selecting the dependency graph.
- """
- viable_generators = find_viable_generators (target_type, prop_set)
-
- result = []
-
- project.manager ().logger ().log (__name__, "*** %d viable generators" % len (viable_generators))
-
- generators_that_succeeded = []
-
- for g in viable_generators:
- __active_generators.append(g)
- r = try_one_generator (project, name, g, target_type, prop_set, sources)
- del __active_generators[-1]
-
- if r:
- generators_that_succeeded.append(g)
- if result:
- output = cStringIO.StringIO()
- print >>output, "ambiguity found when searching for best transformation"
- print >>output, "Trying to produce type '%s' from: " % (target_type)
- for s in sources:
- print >>output, " - " + s.str()
- print >>output, "Generators that succeeded:"
- for g in generators_that_succeeded:
- print >>output, " - " + g.id()
- print >>output, "First generator produced: "
- for t in result[1:]:
- print >>output, " - " + str(t)
- print >>output, "Second generator produced:"
- for t in r[1:]:
- print >>output, " - " + str(t)
- get_manager().errors()(output.getvalue())
- else:
- result = r;
-
- return result;
-
-
-def construct (project, name, target_type, prop_set, sources, top_level=False):
- """ Attempts to create target of 'target-type' with 'properties'
- from 'sources'. The 'sources' are treated as a collection of
- *possible* ingridients -- i.e. it is not required to consume
- them all. If 'multiple' is true, the rule is allowed to return
- several targets of 'target-type'.
-
- Returns a list of target. When this invocation is first instance of
- 'construct' in stack, returns only targets of requested 'target-type',
- otherwise, returns also unused sources and additionally generated
- targets.
-
- If 'top-level' is set, does not suppress generators that are already
- used in the stack. This may be useful in cases where a generator
- has to build a metatarget -- for example a target corresponding to
- built tool.
- """
-
- global __active_generators
- if top_level:
- saved_active = __active_generators
- __active_generators = []
-
- global __construct_stack
- if not __construct_stack:
- __ensure_type (sources)
-
- __construct_stack.append (1)
-
- if project.manager().logger().on():
- increase_indent ()
-
- dout( "*** construct " + target_type)
-
- for s in sources:
- dout(" from " + str(s))
-
- project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
-
- result = __construct_really(project, name, target_type, prop_set, sources)
-
- project.manager().logger().decrease_indent()
-
- __construct_stack = __construct_stack [1:]
-
- if top_level:
- __active_generators = saved_active
-
- return result
-
diff --git a/jam-files/boost-build/build/modifiers.jam b/jam-files/boost-build/build/modifiers.jam
deleted file mode 100644
index 6b009343..00000000
--- a/jam-files/boost-build/build/modifiers.jam
+++ /dev/null
@@ -1,232 +0,0 @@
-# Copyright 2003 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Modifiers are generalized generators that mutate targets in specific ways.
-# This structure allows for grouping a variety of functionality in an
-# orthogonal way to the functionality in toolsets, and without specifying
-# more target variations. In turn the modifiers can be used as building
-# blocks to implement simple requests, like the <version> feature.
-
-import modules ;
-import feature ;
-import errors ;
-import type ;
-import "class" : new ;
-import generators ;
-import property ;
-import virtual-target ;
-import numbers ;
-import sequence ;
-import symlink ;
-import property-set ;
-
-# Base generator for creating targets that are modifications of existing
-# targets.
-#
-class modifier : generator
-{
- rule __init__ (
- id
- composing ?
- : source-types *
- : target-types-and-names +
- : requirements *
- )
- {
- generator.__init__ $(id) $(composing)
- : $(source-types)
- : $(target-types-and-names)
- : $(requirements) ;
-
- self.targets-in-progress = ;
- }
-
- # Wraps the generation of the target to call before and after rules to
- # affect the real target.
- #
- rule run ( project name ? : property-set : sources + )
- {
- local result ;
- local current-target = $(project)^$(name) ;
- if ! $(current-target) in $(self.targets-in-progress)
- {
- # Before modifications...
- local project_ =
- [ modify-project-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- local name_ =
- [ modify-name-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- local property-set_ =
- [ modify-properties-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- local sources_ =
- [ modify-sources-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- project = $(project_) ;
- name = $(name_) ;
- property-set = $(property-set_) ;
- sources = $(sources_) ;
-
- # Generate the real target...
- local target-type-p =
- [ property.select <main-target-type> : [ $(property-set).raw ] ] ;
- self.targets-in-progress += $(current-target) ;
- result =
- [ generators.construct $(project) $(name)
- : $(target-type-p:G=)
- : $(property-set)
- : $(sources) ] ;
- self.targets-in-progress = $(self.targets-in-progress[1--2]) ;
-
- # After modifications...
- result =
- [ modify-target-after $(result)
- : $(project) $(name)
- : $(property-set)
- : $(sources) ] ;
- }
- return $(result) ;
- }
-
- rule modify-project-before ( project name ? : property-set : sources + )
- {
- return $(project) ;
- }
-
- rule modify-name-before ( project name ? : property-set : sources + )
- {
- return $(name) ;
- }
-
- rule modify-properties-before ( project name ? : property-set : sources + )
- {
- return $(property-set) ;
- }
-
- rule modify-sources-before ( project name ? : property-set : sources + )
- {
- return $(sources) ;
- }
-
- rule modify-target-after ( target : project name ? : property-set : sources + )
- {
- return $(target) ;
- }
-
- # Utility, clones a file-target with optional changes to the name, type and
- # project of the target.
- # NOTE: This functionality should be moved, and generalized, to
- # virtual-targets.
- #
- rule clone-file-target ( target : new-name ? : new-type ? : new-project ? )
- {
- # Need a MUTCH better way to clone a target...
- new-name ?= [ $(target).name ] ;
- new-type ?= [ $(target).type ] ;
- new-project ?= [ $(target).project ] ;
- local result = [ new file-target $(new-name) : $(new-type) : $(new-project) ] ;
-
- if [ $(target).dependencies ] { $(result).depends [ $(target).dependencies ] ; }
- $(result).root [ $(target).root ] ;
- $(result).set-usage-requirements [ $(target).usage-requirements ] ;
-
- local action = [ $(target).action ] ;
- local action-class = [ modules.peek $(action) : __class__ ] ;
-
- local ps = [ $(action).properties ] ;
- local cloned-action = [ new $(action-class) $(result) :
- [ $(action).sources ] : [ $(action).action-name ] : $(ps) ] ;
- $(result).action $(cloned-action) ;
-
- return $(result) ;
- }
-}
-
-
-# A modifier that changes the name of a target, after it's generated, given a
-# regular expression to split the name, and a set of token to insert between the
-# split tokens of the name. This also exposes the target for other uses with a
-# symlink to the original name (optionally).
-#
-class name-modifier : modifier
-{
- rule __init__ ( )
- {
- # Apply ourselves to EXE targets, for now.
- modifier.__init__ name.modifier : : EXE LIB : <name-modify>yes ;
- }
-
- # Modifies the name, by cloning the target with the new name.
- #
- rule modify-target-after ( target : project name ? : property-set : sources + )
- {
- local result = $(target) ;
-
- local name-mod-p = [ property.select <name-modifier> : [ $(property-set).raw ] ] ;
- if $(name-mod-p)
- {
- local new-name = [ modify-name [ $(target).name ] : $(name-mod-p:G=) ] ;
- if $(new-name) != [ $(target).name ]
- {
- result = [ clone-file-target $(target) : $(new-name) ] ;
- }
- local expose-original-as-symlink = [ MATCH "<symlink>(.*)" : $(name-mod-p) ] ;
- if $(expose-original-as-symlink)
- {
- local symlink-t = [ new symlink-targets $(project) : $(name) : [ $(result).name ] ] ;
- result = [ $(symlink-t).construct $(result)
- : [ property-set.create [ $(property-set).raw ] <symlink-location>build-relative ] ] ;
- }
- }
-
- return $(result) ;
- }
-
- # Do the transformation of the name.
- #
- rule modify-name ( name : modifier-spec + )
- {
- local match = [ MATCH "<match>(.*)" : $(modifier-spec) ] ;
- local name-parts = [ MATCH $(match) : $(name) ] ;
- local insertions = [ sequence.insertion-sort [ MATCH "(<[0123456789]+>.*)" : $(modifier-spec) ] ] ;
- local new-name-parts ;
- local insert-position = 1 ;
- while $(insertions)
- {
- local insertion = [ MATCH "<$(insert-position)>(.*)" : $(insertions[1]) ] ;
- if $(insertion)
- {
- new-name-parts += $(insertion) ;
- insertions = $(insertions[2-]) ;
- }
- new-name-parts += $(name-parts[1]) ;
- name-parts = $(name-parts[2-]) ;
- insert-position = [ numbers.increment $(insert-position) ] ;
- }
- new-name-parts += $(name-parts) ;
- return [ sequence.join $(new-name-parts) ] ;
- }
-
- rule optional-properties ( )
- {
- return <name-modify>yes ;
- }
-}
-feature.feature name-modifier : : free ;
-feature.feature name-modify : no yes : incidental optional ;
-generators.register [ new name-modifier ] ;
-
-# Translates <version> property to a set of modification properties
-# that are applied by the name-modifier, and symlink-modifier.
-#
-rule version-to-modifier ( property : properties * )
-{
- return
- <name-modify>yes
- <name-modifier><match>"^([^.]*)(.*)" <name-modifier><2>.$(property:G=)
- <name-modifier><symlink>yes
- ;
-}
-feature.action <version> : version-to-modifier ;
diff --git a/jam-files/boost-build/build/project.ann.py b/jam-files/boost-build/build/project.ann.py
deleted file mode 100644
index 349f5495..00000000
--- a/jam-files/boost-build/build/project.ann.py
+++ /dev/null
@@ -1,996 +0,0 @@
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 1) # Status: being ported by Vladimir Prus
-ddc17f01 (vladimir_prus 2007-10-26 14:57:56 +0000 2) # Base revision: 40480
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 3)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 4) # Copyright 2002, 2003 Dave Abrahams
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 5) # Copyright 2002, 2005, 2006 Rene Rivera
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 6) # Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 7) # Distributed under the Boost Software License, Version 1.0.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 8) # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 9)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 10) # Implements project representation and loading.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 11) # Each project is represented by
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 12) # - a module where all the Jamfile content live.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 13) # - an instance of 'project-attributes' class.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 14) # (given module name, can be obtained by 'attributes' rule)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 15) # - an instance of 'project-target' class (from targets.jam)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 16) # (given a module name, can be obtained by 'target' rule)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 17) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 18) # Typically, projects are created as result of loading Jamfile, which is
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 19) # do by rules 'load' and 'initialize', below. First, module for Jamfile
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 20) # is loaded and new project-attributes instance is created. Some rules
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 21) # necessary for project are added to the module (see 'project-rules' module)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 22) # at the bottom of this file.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 23) # Default project attributes are set (inheriting attributes of parent project, if
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 24) # it exists). After that, Jamfile is read. It can declare its own attributes,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 25) # via 'project' rule, which will be combined with already set attributes.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 26) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 27) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 28) # The 'project' rule can also declare project id, which will be associated with
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 29) # the project module.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 30) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 31) # There can also be 'standalone' projects. They are created by calling 'initialize'
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 32) # on arbitrary module, and not specifying location. After the call, the module can
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 33) # call 'project' rule, declare main target and behave as regular projects. However,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 34) # since it's not associated with any location, it's better declare only prebuilt
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 35) # targets.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 36) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 37) # The list of all loaded Jamfile is stored in variable .project-locations. It's possible
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 38) # to obtain module name for a location using 'module-name' rule. The standalone projects
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 39) # are not recorded, the only way to use them is by project id.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 40)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 41) import b2.util.path
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 42) from b2.build import property_set, property
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 43) from b2.build.errors import ExceptionWithUserContext
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 44) import b2.build.targets
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 45)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 46) import bjam
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 47)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 48) import re
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 49) import sys
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 50) import os
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 51) import string
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 52) import imp
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 53) import traceback
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 54)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 55) class ProjectRegistry:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 56)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 57) def __init__(self, manager, global_build_dir):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 58) self.manager = manager
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 59) self.global_build_dir = None
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 60) self.project_rules_ = ProjectRules(self)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 61)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 62) # The target corresponding to the project being loaded now
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 63) self.current_project = None
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 64)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 65) # The set of names of loaded project modules
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 66) self.jamfile_modules = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 67)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 68) # Mapping from location to module name
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 69) self.location2module = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 70)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 71) # Mapping from project id to project module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 72) self.id2module = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 73)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 74) # Map from Jamfile directory to parent Jamfile/Jamroot
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 75) # location.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 76) self.dir2parent_jamfile = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 77)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 78) # Map from directory to the name of Jamfile in
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 79) # that directory (or None).
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 80) self.dir2jamfile = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 81)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 82) # Map from project module to attributes object.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 83) self.module2attributes = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 84)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 85) # Map from project module to target for the project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 86) self.module2target = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 87)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 88) # Map from names to Python modules, for modules loaded
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 89) # via 'using' and 'import' rules in Jamfiles.
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 90) self.loaded_tool_modules_ = {}
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 91)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 92) # Map from project target to the list of
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 93) # (id,location) pairs corresponding to all 'use-project'
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 94) # invocations.
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 95) # TODO: should not have a global map, keep this
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 96) # in ProjectTarget.
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 97) self.used_projects = {}
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 98)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 99) self.saved_current_project = []
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 100)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 101) self.JAMROOT = self.manager.getenv("JAMROOT");
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 102)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 103) # Note the use of character groups, as opposed to listing
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 104) # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 105) # matches on windows and would have to eliminate duplicates.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 106) if not self.JAMROOT:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 107) self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 108)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 109) # Default patterns to search for the Jamfiles to use for build
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 110) # declarations.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 111) self.JAMFILE = self.manager.getenv("JAMFILE")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 112)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 113) if not self.JAMFILE:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 114) self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 115) "[Jj]amfile.jam"]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 116)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 117)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 118) def load (self, jamfile_location):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 119) """Loads jamfile at the given location. After loading, project global
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 120) file and jamfile needed by the loaded one will be loaded recursively.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 121) If the jamfile at that location is loaded already, does nothing.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 122) Returns the project module for the Jamfile."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 123)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 124) absolute = os.path.join(os.getcwd(), jamfile_location)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 125) absolute = os.path.normpath(absolute)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 126) jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 127)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 128) if "--debug-loading" in self.manager.argv():
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 129) print "Loading Jamfile at '%s'" % jamfile_location
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 130)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 131)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 132) mname = self.module_name(jamfile_location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 133) # If Jamfile is already loaded, don't try again.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 134) if not mname in self.jamfile_modules:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 135)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 136) self.load_jamfile(jamfile_location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 137)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 138) # We want to make sure that child project are loaded only
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 139) # after parent projects. In particular, because parent projects
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 140) # define attributes whch are inherited by children, and we don't
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 141) # want children to be loaded before parents has defined everything.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 142) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 143) # While "build-project" and "use-project" can potentially refer
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 144) # to child projects from parent projects, we don't immediately
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 145) # load child projects when seing those attributes. Instead,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 146) # we record the minimal information that will be used only later.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 147)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 148) self.load_used_projects(mname)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 149)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 150) return mname
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 151)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 152) def load_used_projects(self, module_name):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 153) # local used = [ modules.peek $(module-name) : .used-projects ] ;
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 154) used = self.used_projects[module_name]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 155)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 156) location = self.attribute(module_name, "location")
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 157) for u in used:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 158) id = u[0]
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 159) where = u[1]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 160)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 161) self.use(id, os.path.join(location, where))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 162)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 163) def load_parent(self, location):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 164) """Loads parent of Jamfile at 'location'.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 165) Issues an error if nothing is found."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 166)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 167) found = b2.util.path.glob_in_parents(
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 168) location, self.JAMROOT + self.JAMFILE)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 169)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 170) if not found:
-1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 171) print "error: Could not find parent for project at '%s'" % location
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 172) print "error: Did not find Jamfile or project-root.jam in any parent directory."
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 173) sys.exit(1)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 174)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 175) return self.load(os.path.dirname(found[0]))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 176)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 177) def act_as_jamfile(self, module, location):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 178) """Makes the specified 'module' act as if it were a regularly loaded Jamfile
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 179) at 'location'. If Jamfile is already located for that location, it's an
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 180) error."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 181)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 182) if self.module_name(location) in self.jamfile_modules:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 183) self.manager.errors()(
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 184) "Jamfile was already loaded for '%s'" % location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 185)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 186) # Set up non-default mapping from location to module.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 187) self.location2module[location] = module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 188)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 189) # Add the location to the list of project locations
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 190) # so that we don't try to load Jamfile in future
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 191) self.jamfile_modules.append(location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 192)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 193) self.initialize(module, location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 194)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 195) def find(self, name, current_location):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 196) """Given 'name' which can be project-id or plain directory name,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 197) return project module corresponding to that id or directory.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 198) Returns nothing of project is not found."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 199)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 200) project_module = None
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 201)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 202) # Try interpreting name as project id.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 203) if name[0] == '/':
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 204) project_module = self.id2module.get(name)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 205)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 206) if not project_module:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 207) location = os.path.join(current_location, name)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 208) # If no project is registered for the given location, try to
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 209) # load it. First see if we have Jamfile. If not we might have project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 210) # root, willing to act as Jamfile. In that case, project-root
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 211) # must be placed in the directory referred by id.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 212)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 213) project_module = self.module_name(location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 214) if not project_module in self.jamfile_modules and \
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 215) b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 216) project_module = self.load(location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 217)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 218) return project_module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 219)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 220) def module_name(self, jamfile_location):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 221) """Returns the name of module corresponding to 'jamfile-location'.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 222) If no module corresponds to location yet, associates default
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 223) module name with that location."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 224) module = self.location2module.get(jamfile_location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 225) if not module:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 226) # Root the path, so that locations are always umbiguious.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 227) # Without this, we can't decide if '../../exe/program1' and '.'
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 228) # are the same paths, or not.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 229) jamfile_location = os.path.realpath(
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 230) os.path.join(os.getcwd(), jamfile_location))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 231) module = "Jamfile<%s>" % jamfile_location
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 232) self.location2module[jamfile_location] = module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 233) return module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 234)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 235) def find_jamfile (self, dir, parent_root=0, no_errors=0):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 236) """Find the Jamfile at the given location. This returns the
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 237) exact names of all the Jamfiles in the given directory. The optional
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 238) parent-root argument causes this to search not the given directory
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 239) but the ones above it up to the directory given in it."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 240)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 241) # Glob for all the possible Jamfiles according to the match pattern.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 242) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 243) jamfile_glob = None
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 244) if parent_root:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 245) parent = self.dir2parent_jamfile.get(dir)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 246) if not parent:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 247) parent = b2.util.path.glob_in_parents(dir,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 248) self.JAMFILE)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 249) self.dir2parent_jamfile[dir] = parent
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 250) jamfile_glob = parent
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 251) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 252) jamfile = self.dir2jamfile.get(dir)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 253) if not jamfile:
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 254) jamfile = b2.util.path.glob([dir], self.JAMFILE)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 255) self.dir2jamfile[dir] = jamfile
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 256) jamfile_glob = jamfile
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 257)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 258) if len(jamfile_glob):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 259) # Multiple Jamfiles found in the same place. Warn about this.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 260) # And ensure we use only one of them.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 261) # As a temporary convenience measure, if there's Jamfile.v2 amount
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 262) # found files, suppress the warning and use it.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 263) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 264) pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 265) v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 266) if len(v2_jamfiles) == 1:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 267) jamfile_glob = v2_jamfiles
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 268) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 269) print """warning: Found multiple Jamfiles at '%s'!
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 270) Loading the first one: '%s'.""" % (dir, jamfile_glob[0])
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 271)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 272) # Could not find it, error.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 273) if not no_errors and not jamfile_glob:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 274) self.manager.errors()(
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 275) """Unable to load Jamfile.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 276) Could not find a Jamfile in directory '%s'
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 277) Attempted to find it with pattern '%s'.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 278) Please consult the documentation at 'http://boost.org/b2.'."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 279) % (dir, string.join(self.JAMFILE)))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 280)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 281) return jamfile_glob[0]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 282)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 283) def load_jamfile(self, dir):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 284) """Load a Jamfile at the given directory. Returns nothing.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 285) Will attempt to load the file as indicated by the JAMFILE patterns.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 286) Effect of calling this rule twice with the same 'dir' is underfined."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 287)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 288) # See if the Jamfile is where it should be.
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 289) jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 290) if not jamfile_to_load:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 291) jamfile_to_load = self.find_jamfile(dir)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 292) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 293) jamfile_to_load = jamfile_to_load[0]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 294)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 295) # The module of the jamfile.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 296) dir = os.path.realpath(os.path.dirname(jamfile_to_load))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 297)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 298) jamfile_module = self.module_name (dir)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 299)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 300) # Initialize the jamfile module before loading.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 301) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 302) self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 303)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 304) saved_project = self.current_project
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 305)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 306) self.used_projects[jamfile_module] = []
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 307)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 308) # Now load the Jamfile in it's own context.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 309) # Initialization might have load parent Jamfiles, which might have
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 310) # loaded the current Jamfile with use-project. Do a final check to make
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 311) # sure it's not loaded already.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 312) if not jamfile_module in self.jamfile_modules:
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 313) self.jamfile_modules[jamfile_module] = True
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 314)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 315) # FIXME:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 316) # mark-as-user $(jamfile-module) ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 317)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 318) bjam.call("load", jamfile_module, jamfile_to_load)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 319) basename = os.path.basename(jamfile_to_load)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 320)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 321) # Now do some checks
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 322) if self.current_project != saved_project:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 323) self.manager.errors()(
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 324) """The value of the .current-project variable
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 325) has magically changed after loading a Jamfile.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 326) This means some of the targets might be defined a the wrong project.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 327) after loading %s
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 328) expected value %s
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 329) actual value %s""" % (jamfile_module, saved_project, self.current_project))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 330)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 331) if self.global_build_dir:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 332) id = self.attribute(jamfile_module, "id")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 333) project_root = self.attribute(jamfile_module, "project-root")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 334) location = self.attribute(jamfile_module, "location")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 335)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 336) if location and project_root == dir:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 337) # This is Jamroot
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 338) if not id:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 339) # FIXME: go via errors module, so that contexts are
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 340) # shown?
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 341) print "warning: the --build-dir option was specified"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 342) print "warning: but Jamroot at '%s'" % dir
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 343) print "warning: specified no project id"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 344) print "warning: the --build-dir option will be ignored"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 345)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 346)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 347) def load_standalone(self, jamfile_module, file):
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 348) """Loads 'file' as standalone project that has no location
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 349) associated with it. This is mostly useful for user-config.jam,
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 350) which should be able to define targets, but although it has
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 351) some location in filesystem, we don't want any build to
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 352) happen in user's HOME, for example.
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 353)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 354) The caller is required to never call this method twice on
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 355) the same file.
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 356) """
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 357)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 358) self.initialize(jamfile_module)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 359) self.used_projects[jamfile_module] = []
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 360) bjam.call("load", jamfile_module, file)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 361) self.load_used_projects(jamfile_module)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 362)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 363) def is_jamroot(self, basename):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 364) match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 365) if match:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 366) return 1
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 367) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 368) return 0
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 369)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 370) def initialize(self, module_name, location=None, basename=None):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 371) """Initialize the module for a project.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 372)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 373) module-name is the name of the project module.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 374) location is the location (directory) of the project to initialize.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 375) If not specified, stanalone project will be initialized
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 376) """
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 377)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 378) if "--debug-loading" in self.manager.argv():
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 379) print "Initializing project '%s'" % module_name
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 380)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 381) # TODO: need to consider if standalone projects can do anything but defining
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 382) # prebuilt targets. If so, we need to give more sensible "location", so that
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 383) # source paths are correct.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 384) if not location:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 385) location = ""
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 386) else:
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 387) location = b2.util.path.relpath(os.getcwd(), location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 388)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 389) attributes = ProjectAttributes(self.manager, location, module_name)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 390) self.module2attributes[module_name] = attributes
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 391)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 392) if location:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 393) attributes.set("source-location", location, exact=1)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 394) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 395) attributes.set("source-location", "", exact=1)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 396)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 397) attributes.set("requirements", property_set.empty(), exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 398) attributes.set("usage-requirements", property_set.empty(), exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 399) attributes.set("default-build", [], exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 400) attributes.set("projects-to-build", [], exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 401) attributes.set("project-root", None, exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 402) attributes.set("build-dir", None, exact=True)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 403)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 404) self.project_rules_.init_project(module_name)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 405)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 406) jamroot = False
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 407)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 408) parent_module = None;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 409) if module_name == "site-config":
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 410) # No parent
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 411) pass
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 412) elif module_name == "user-config":
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 413) parent_module = "site-config"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 414) elif location and not self.is_jamroot(basename):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 415) # We search for parent/project-root only if jamfile was specified
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 416) # --- i.e
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 417) # if the project is not standalone.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 418) parent_module = self.load_parent(location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 419) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 420) # It's either jamroot, or standalone project.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 421) # If it's jamroot, inherit from user-config.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 422) if location:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 423) parent_module = "user-config" ;
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 424) jamroot = True ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 425)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 426) if parent_module:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 427) self.inherit_attributes(module_name, parent_module)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 428) attributes.set("parent-module", parent_module, exact=1)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 429)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 430) if jamroot:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 431) attributes.set("project-root", location, exact=1)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 432)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 433) parent = None
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 434) if parent_module:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 435) parent = self.target(parent_module)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 436)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 437) if not self.module2target.has_key(module_name):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 438) target = b2.build.targets.ProjectTarget(self.manager,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 439) module_name, module_name, parent,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 440) self.attribute(module_name,"requirements"),
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 441) # FIXME: why we need to pass this? It's not
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 442) # passed in jam code.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 443) self.attribute(module_name, "default-build"))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 444) self.module2target[module_name] = target
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 445)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 446) self.current_project = self.target(module_name)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 447)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 448) def inherit_attributes(self, project_module, parent_module):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 449) """Make 'project-module' inherit attributes of project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 450) root and parent module."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 451)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 452) attributes = self.module2attributes[project_module]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 453) pattributes = self.module2attributes[parent_module]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 454)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 455) # Parent module might be locationless user-config.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 456) # FIXME:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 457) #if [ modules.binding $(parent-module) ]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 458) #{
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 459) # $(attributes).set parent : [ path.parent
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 460) # [ path.make [ modules.binding $(parent-module) ] ] ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 461) # }
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 462)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 463) attributes.set("project-root", pattributes.get("project-root"), exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 464) attributes.set("default-build", pattributes.get("default-build"), exact=True)
-49c03622 (jhunold 2008-07-23 09:57:41 +0000 465) attributes.set("requirements", pattributes.get("requirements"), exact=True)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 466) attributes.set("usage-requirements",
-cde6f09a (vladimir_prus 2007-10-19 23:12:33 +0000 467) pattributes.get("usage-requirements"), exact=1)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 468)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 469) parent_build_dir = pattributes.get("build-dir")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 470)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 471) if parent_build_dir:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 472) # Have to compute relative path from parent dir to our dir
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 473) # Convert both paths to absolute, since we cannot
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 474) # find relative path from ".." to "."
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 475)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 476) location = attributes.get("location")
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 477) parent_location = pattributes.get("location")
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 478)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 479) our_dir = os.path.join(os.getcwd(), location)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 480) parent_dir = os.path.join(os.getcwd(), parent_location)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 481)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 482) build_dir = os.path.join(parent_build_dir,
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 483) b2.util.path.relpath(parent_dir,
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 484) our_dir))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 485)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 486) def register_id(self, id, module):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 487) """Associate the given id with the given project module."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 488) self.id2module[id] = module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 489)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 490) def current(self):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 491) """Returns the project which is currently being loaded."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 492) return self.current_project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 493)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 494) def push_current(self, project):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 495) """Temporary changes the current project to 'project'. Should
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 496) be followed by 'pop-current'."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 497) self.saved_current_project.append(self.current_project)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 498) self.current_project = project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 499)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 500) def pop_current(self):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 501) self.current_project = self.saved_current_project[-1]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 502) del self.saved_current_project[-1]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 503)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 504) def attributes(self, project):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 505) """Returns the project-attribute instance for the
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 506) specified jamfile module."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 507) return self.module2attributes[project]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 508)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 509) def attribute(self, project, attribute):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 510) """Returns the value of the specified attribute in the
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 511) specified jamfile module."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 512) return self.module2attributes[project].get(attribute)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 513)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 514) def target(self, project_module):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 515) """Returns the project target corresponding to the 'project-module'."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 516) if not self.module2target[project_module]:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 517) self.module2target[project_module] = \
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 518) ProjectTarget(project_module, project_module,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 519) self.attribute(project_module, "requirements"))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 520)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 521) return self.module2target[project_module]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 522)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 523) def use(self, id, location):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 524) # Use/load a project.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 525) saved_project = self.current_project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 526) project_module = self.load(location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 527) declared_id = self.attribute(project_module, "id")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 528)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 529) if not declared_id or declared_id != id:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 530) # The project at 'location' either have no id or
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 531) # that id is not equal to the 'id' parameter.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 532) if self.id2module[id] and self.id2module[id] != project_module:
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 533) self.manager.errors()(
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 534) """Attempt to redeclare already existing project id '%s'""" % id)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 535) self.id2module[id] = project_module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 536)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 537) self.current_module = saved_project
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 538)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 539) def add_rule(self, name, callable):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 540) """Makes rule 'name' available to all subsequently loaded Jamfiles.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 541)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 542) Calling that rule wil relay to 'callable'."""
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 543) self.project_rules_.add_rule(name, callable)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 544)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 545) def project_rules(self):
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 546) return self.project_rules_
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 547)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 548) def glob_internal(self, project, wildcards, excludes, rule_name):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 549) location = project.get("source-location")
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 550)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 551) result = []
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 552) callable = b2.util.path.__dict__[rule_name]
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 553)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 554) paths = callable(location, wildcards, excludes)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 555) has_dir = 0
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 556) for w in wildcards:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 557) if os.path.dirname(w):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 558) has_dir = 1
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 559) break
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 560)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 561) if has_dir or rule_name != "glob":
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 562) # The paths we've found are relative to current directory,
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 563) # but the names specified in sources list are assumed to
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 564) # be relative to source directory of the corresponding
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 565) # prject. So, just make the name absolute.
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 566) result = [os.path.join(os.getcwd(), p) for p in paths]
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 567) else:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 568) # There were not directory in wildcard, so the files are all
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 569) # in the source directory of the project. Just drop the
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 570) # directory, instead of making paths absolute.
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 571) result = [os.path.basename(p) for p in paths]
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 572)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 573) return result
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 574)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 575) def load_module(self, name, extra_path=None):
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 576) """Classic Boost.Build 'modules' are in fact global variables.
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 577) Therefore, try to find an already loaded Python module called 'name' in sys.modules.
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 578) If the module ist not loaded, find it Boost.Build search
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 579) path and load it. The new module is not entered in sys.modules.
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 580) The motivation here is to have disjoint namespace of modules
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 581) loaded via 'import/using' in Jamfile, and ordinary Python
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 582) modules. We don't want 'using foo' in Jamfile to load ordinary
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 583) Python module 'foo' which is going to not work. And we
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 584) also don't want 'import foo' in regular Python module to
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 585) accidentally grab module named foo that is internal to
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 586) Boost.Build and intended to provide interface to Jamfiles."""
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 587)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 588) existing = self.loaded_tool_modules_.get(name)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 589) if existing:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 590) return existing
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 591)
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 592) modules = sys.modules
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 593) for class_name in modules:
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 594) if name in class_name:
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 595) module = modules[class_name]
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 596) self.loaded_tool_modules_[name] = module
-53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 597) return module
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 598)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 599) path = extra_path
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 600) if not path:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 601) path = []
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 602) path.extend(self.manager.b2.path())
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 603) location = None
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 604) for p in path:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 605) l = os.path.join(p, name + ".py")
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 606) if os.path.exists(l):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 607) location = l
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 608) break
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 609)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 610) if not location:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 611) self.manager.errors()("Cannot find module '%s'" % name)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 612)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 613) mname = "__build_build_temporary__"
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 614) file = open(location)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 615) try:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 616) # TODO: this means we'll never make use of .pyc module,
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 617) # which might be a problem, or not.
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 618) module = imp.load_module(mname, file, os.path.basename(location),
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 619) (".py", "r", imp.PY_SOURCE))
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 620) del sys.modules[mname]
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 621) self.loaded_tool_modules_[name] = module
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 622) return module
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 623) finally:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 624) file.close()
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 625)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 626)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 627)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 628) # FIXME:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 629) # Defines a Boost.Build extension project. Such extensions usually
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 630) # contain library targets and features that can be used by many people.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 631) # Even though extensions are really projects, they can be initialize as
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 632) # a module would be with the "using" (project.project-rules.using)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 633) # mechanism.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 634) #rule extension ( id : options * : * )
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 635) #{
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 636) # # The caller is a standalone module for the extension.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 637) # local mod = [ CALLER_MODULE ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 638) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 639) # # We need to do the rest within the extension module.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 640) # module $(mod)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 641) # {
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 642) # import path ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 643) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 644) # # Find the root project.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 645) # local root-project = [ project.current ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 646) # root-project = [ $(root-project).project-module ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 647) # while
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 648) # [ project.attribute $(root-project) parent-module ] &&
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 649) # [ project.attribute $(root-project) parent-module ] != user-config
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 650) # {
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 651) # root-project = [ project.attribute $(root-project) parent-module ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 652) # }
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 653) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 654) # # Create the project data, and bring in the project rules
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 655) # # into the module.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 656) # project.initialize $(__name__) :
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 657) # [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 658) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 659) # # Create the project itself, i.e. the attributes.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 660) # # All extensions are created in the "/ext" project space.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 661) # project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 662) # local attributes = [ project.attributes $(__name__) ] ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 663) #
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 664) # # Inherit from the root project of whomever is defining us.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 665) # project.inherit-attributes $(__name__) : $(root-project) ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 666) # $(attributes).set parent-module : $(root-project) : exact ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 667) # }
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 668) #}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 669)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 670)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 671) class ProjectAttributes:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 672) """Class keeping all the attributes of a project.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 673)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 674) The standard attributes are 'id', "location", "project-root", "parent"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 675) "requirements", "default-build", "source-location" and "projects-to-build".
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 676) """
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 677)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 678) def __init__(self, manager, location, project_module):
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 679) self.manager = manager
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 680) self.location = location
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 681) self.project_module = project_module
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 682) self.attributes = {}
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 683) self.usage_requirements = None
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 684)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 685) def set(self, attribute, specification, exact):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 686) """Set the named attribute from the specification given by the user.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 687) The value actually set may be different."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 688)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 689) if exact:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 690) self.__dict__[attribute] = specification
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 691)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 692) elif attribute == "requirements":
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 693) self.requirements = property_set.refine_from_user_input(
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 694) self.requirements, specification,
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 695) self.project_module, self.location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 696)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 697) elif attribute == "usage-requirements":
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 698) unconditional = []
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 699) for p in specification:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 700) split = property.split_conditional(p)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 701) if split:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 702) unconditional.append(split[1])
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 703) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 704) unconditional.append(p)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 705)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 706) non_free = property.remove("free", unconditional)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 707) if non_free:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 708) pass
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 709) # FIXME:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 710) #errors.error "usage-requirements" $(specification) "have non-free properties" $(non-free) ;
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 711)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 712) t = property.translate_paths(specification, self.location)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 713)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 714) existing = self.__dict__.get("usage-requirements")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 715) if existing:
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 716) new = property_set.create(existing.raw() + t)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 717) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 718) new = property_set.create(t)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 719) self.__dict__["usage-requirements"] = new
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 720)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 721)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 722) elif attribute == "default-build":
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 723) self.__dict__["default-build"] = property_set.create(specification)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 724)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 725) elif attribute == "source-location":
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 726) source_location = []
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 727) for path in specification:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 728) source_location += os.path.join(self.location, path)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 729) self.__dict__["source-location"] = source_location
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 730)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 731) elif attribute == "build-dir":
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 732) self.__dict__["build-dir"] = os.path.join(self.location, specification)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 733)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 734) elif not attribute in ["id", "default-build", "location",
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 735) "source-location", "parent",
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 736) "projects-to-build", "project-root"]:
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 737) self.manager.errors()(
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 738) """Invalid project attribute '%s' specified
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 739) for project at '%s'""" % (attribute, self.location))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 740) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 741) self.__dict__[attribute] = specification
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 742)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 743) def get(self, attribute):
-cde6f09a (vladimir_prus 2007-10-19 23:12:33 +0000 744) return self.__dict__[attribute]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 745)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 746) def dump(self):
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 747) """Prints the project attributes."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 748) id = self.get("id")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 749) if not id:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 750) id = "(none)"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 751) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 752) id = id[0]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 753)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 754) parent = self.get("parent")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 755) if not parent:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 756) parent = "(none)"
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 757) else:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 758) parent = parent[0]
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 759)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 760) print "'%s'" % id
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 761) print "Parent project:%s", parent
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 762) print "Requirements:%s", self.get("requirements")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 763) print "Default build:%s", string.join(self.get("debuild-build"))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 764) print "Source location:%s", string.join(self.get("source-location"))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 765) print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 766)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 767) class ProjectRules:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 768) """Class keeping all rules that are made available to Jamfile."""
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 769)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 770) def __init__(self, registry):
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 771) self.registry = registry
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 772) self.manager_ = registry.manager
-38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 773) self.rules = {}
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 774) self.local_names = [x for x in self.__class__.__dict__
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 775) if x not in ["__init__", "init_project", "add_rule",
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 776) "error_reporting_wrapper", "add_rule_for_type"]]
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 777) self.all_names_ = [x for x in self.local_names]
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 778)
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 779) def add_rule_for_type(self, type):
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 780) rule_name = type.lower();
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 781)
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 782) def xpto (name, sources, requirements = [], default_build = None, usage_requirements = []):
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 783) return self.manager_.targets().create_typed_target(
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 784) type, self.registry.current(), name[0], sources,
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 785) requirements, default_build, usage_requirements)
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 786)
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 787) self.add_rule(type.lower(), xpto)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 788)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 789) def add_rule(self, name, callable):
-38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 790) self.rules[name] = callable
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 791) self.all_names_.append(name)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 792)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 793) def all_names(self):
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 794) return self.all_names_
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 795)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 796) def call_and_report_errors(self, callable, *args):
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 797) result = None
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 798) try:
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 799) self.manager_.errors().push_jamfile_context()
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 800) result = callable(*args)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 801) except ExceptionWithUserContext, e:
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 802) e.report()
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 803) except Exception, e:
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 804) try:
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 805) self.manager_.errors().handle_stray_exception (e)
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 806) except ExceptionWithUserContext, e:
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 807) e.report()
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 808) finally:
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 809) self.manager_.errors().pop_jamfile_context()
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 810)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 811) return result
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 812)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 813) def make_wrapper(self, callable):
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 814) """Given a free-standing function 'callable', return a new
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 815) callable that will call 'callable' and report all exceptins,
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 816) using 'call_and_report_errors'."""
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 817) def wrapper(*args):
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 818) self.call_and_report_errors(callable, *args)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 819) return wrapper
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 820)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 821) def init_project(self, project_module):
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 822)
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 823) for n in self.local_names:
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 824) # Using 'getattr' here gives us a bound method,
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 825) # while using self.__dict__[r] would give unbound one.
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 826) v = getattr(self, n)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 827) if callable(v):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 828) if n == "import_":
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 829) n = "import"
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 830) else:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 831) n = string.replace(n, "_", "-")
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 832)
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 833) bjam.import_rule(project_module, n,
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 834) self.make_wrapper(v))
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 835)
-38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 836) for n in self.rules:
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 837) bjam.import_rule(project_module, n,
-0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 838) self.make_wrapper(self.rules[n]))
-38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 839)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 840) def project(self, *args):
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 841)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 842) jamfile_module = self.registry.current().project_module()
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 843) attributes = self.registry.attributes(jamfile_module)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 844)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 845) id = None
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 846) if args and args[0]:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 847) id = args[0][0]
-092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 848) args = args[1:]
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 849)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 850) if id:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 851) if id[0] != '/':
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 852) id = '/' + id
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 853) self.registry.register_id (id, jamfile_module)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 854)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 855) explicit_build_dir = None
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 856) for a in args:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 857) if a:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 858) attributes.set(a[0], a[1:], exact=0)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 859) if a[0] == "build-dir":
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 860) explicit_build_dir = a[1]
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 861)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 862) # If '--build-dir' is specified, change the build dir for the project.
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 863) if self.registry.global_build_dir:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 864)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 865) location = attributes.get("location")
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 866) # Project with empty location is 'standalone' project, like
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 867) # user-config, or qt. It has no build dir.
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 868) # If we try to set build dir for user-config, we'll then
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 869) # try to inherit it, with either weird, or wrong consequences.
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 870) if location and location == attributes.get("project-root"):
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 871) # This is Jamroot.
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 872) if id:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 873) if explicit_build_dir and os.path.isabs(explicit_build_dir):
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 874) self.register.manager.errors()(
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 875) """Absolute directory specified via 'build-dir' project attribute
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 876) Don't know how to combine that with the --build-dir option.""")
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 877)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 878) rid = id
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 879) if rid[0] == '/':
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 880) rid = rid[1:]
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 881)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 882) p = os.path.join(self.registry.global_build_dir,
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 883) rid, explicit_build_dir)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 884) attributes.set("build-dir", p, exact=1)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 885) elif explicit_build_dir:
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 886) self.registry.manager.errors()(
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 887) """When --build-dir is specified, the 'build-project'
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 888) attribute is allowed only for top-level 'project' invocations""")
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 889)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 890) def constant(self, name, value):
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 891) """Declare and set a project global constant.
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 892) Project global constants are normal variables but should
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 893) not be changed. They are applied to every child Jamfile."""
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 894) m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 895) self.registry.current().add_constant(name[0], value)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 896)
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 897) def path_constant(self, name, value):
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 898) """Declare and set a project global constant, whose value is a path. The
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 899) path is adjusted to be relative to the invocation directory. The given
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 900) value path is taken to be either absolute, or relative to this project
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 901) root."""
-0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 902) self.registry.current().add_constant(name[0], value, path=1)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 903)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 904) def use_project(self, id, where):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 905) # See comment in 'load' for explanation why we record the
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 906) # parameters as opposed to loading the project now.
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 907) m = self.registry.current().project_module();
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 908) self.registry.used_projects[m].append((id, where))
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 909)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 910) def build_project(self, dir):
-1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 911) assert(isinstance(dir, list))
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 912) jamfile_module = self.registry.current().project_module()
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 913) attributes = self.registry.attributes(jamfile_module)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 914) now = attributes.get("projects-to-build")
-1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 915) attributes.set("projects-to-build", now + dir, exact=True)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 916)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 917) def explicit(self, target_names):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 918) t = self.registry.current()
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 919) for n in target_names:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 920) t.mark_target_as_explicit(n)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 921)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 922) def glob(self, wildcards, excludes=None):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 923) return self.registry.glob_internal(self.registry.current(),
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 924) wildcards, excludes, "glob")
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 925)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 926) def glob_tree(self, wildcards, excludes=None):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 927) bad = 0
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 928) for p in wildcards:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 929) if os.path.dirname(p):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 930) bad = 1
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 931)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 932) if excludes:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 933) for p in excludes:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 934) if os.path.dirname(p):
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 935) bad = 1
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 936)
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 937) if bad:
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 938) self.registry.manager().errors()(
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 939) "The patterns to 'glob-tree' may not include directory")
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 940) return self.registry.glob_internal(self.registry.current(),
-2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 941) wildcards, excludes, "glob_tree")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 942)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 943)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 944) def using(self, toolset, *args):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 945) # The module referred by 'using' can be placed in
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 946) # the same directory as Jamfile, and the user
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 947) # will expect the module to be found even though
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 948) # the directory is not in BOOST_BUILD_PATH.
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 949) # So temporary change the search path.
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 950) jamfile_module = self.registry.current().project_module()
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 951) attributes = self.registry.attributes(jamfile_module)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 952) location = attributes.get("location")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 953)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 954) m = self.registry.load_module(toolset[0], [location])
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 955) if not m.__dict__.has_key("init"):
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 956) self.registry.manager.errors()(
-7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 957) "Tool module '%s' does not define the 'init' method" % toolset[0])
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 958) m.init(*args)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 959)
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 960)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 961) def import_(self, name, names_to_import=None, local_names=None):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 962)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 963) name = name[0]
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 964) jamfile_module = self.registry.current().project_module()
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 965) attributes = self.registry.attributes(jamfile_module)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 966) location = attributes.get("location")
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 967)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 968) m = self.registry.load_module(name, [location])
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 969)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 970) for f in m.__dict__:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 971) v = m.__dict__[f]
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 972) if callable(v):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 973) bjam.import_rule(jamfile_module, name + "." + f, v)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 974)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 975) if names_to_import:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 976) if not local_names:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 977) local_names = names_to_import
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 978)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 979) if len(names_to_import) != len(local_names):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 980) self.registry.manager.errors()(
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 981) """The number of names to import and local names do not match.""")
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 982)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 983) for n, l in zip(names_to_import, local_names):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 984) bjam.import_rule(jamfile_module, l, m.__dict__[n])
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 985)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 986) def conditional(self, condition, requirements):
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 987) """Calculates conditional requirements for multiple requirements
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 988) at once. This is a shorthand to be reduce duplication and to
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 989) keep an inline declarative syntax. For example:
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 990)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 991) lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 992) <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 993) """
-f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 994)
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 995) c = string.join(condition, ",")
-f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 996) return [c + ":" + r for r in requirements]
diff --git a/jam-files/boost-build/build/project.jam b/jam-files/boost-build/build/project.jam
deleted file mode 100644
index c9967613..00000000
--- a/jam-files/boost-build/build/project.jam
+++ /dev/null
@@ -1,1110 +0,0 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements project representation and loading. Each project is represented by:
-# - a module where all the Jamfile content live.
-# - an instance of 'project-attributes' class.
-# (given a module name, can be obtained using the 'attributes' rule)
-# - an instance of 'project-target' class (from targets.jam)
-# (given a module name, can be obtained using the 'target' rule)
-#
-# Typically, projects are created as result of loading a Jamfile, which is done
-# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
-# and new project-attributes instance is created. Some rules necessary for
-# project are added to the module (see 'project-rules' module) at the bottom of
-# this file. Default project attributes are set (inheriting attributes of parent
-# project, if it exists). After that the Jamfile is read. It can declare its own
-# attributes using the 'project' rule which will be combined with any already
-# set attributes.
-#
-# The 'project' rule can also declare a project id which will be associated with
-# the project module.
-#
-# There can also be 'standalone' projects. They are created by calling
-# 'initialize' on an arbitrary module and not specifying their location. After
-# the call, the module can call the 'project' rule, declare main targets and
-# behave as a regular project except that, since it is not associated with any
-# location, it should not declare targets that are not prebuilt.
-#
-# The list of all loaded Jamfile is stored in the .project-locations variable.
-# It is possible to obtain a module name for a location using the 'module-name'
-# rule. Standalone projects are not recorded and can only be referenced using
-# their project id.
-
-import "class" : new ;
-import errors ;
-import modules ;
-import path ;
-import print ;
-import property-set ;
-import sequence ;
-
-
-# Loads the Jamfile at the given location. After loading, project global file
-# and Jamfiles needed by the requested one will be loaded recursively. If the
-# Jamfile at that location is loaded already, does nothing. Returns the project
-# module for the Jamfile.
-#
-rule load ( jamfile-location )
-{
- if --debug-loading in [ modules.peek : ARGV ]
- {
- ECHO "Loading Jamfile at" '$(jamfile-location)' ;
- }
-
- local module-name = [ module-name $(jamfile-location) ] ;
- # If Jamfile is already loaded, don't try again.
- if ! $(module-name) in $(.jamfile-modules)
- {
- load-jamfile $(jamfile-location) : $(module-name) ;
-
- # We want to make sure that child project are loaded only after parent
- # projects. In particular, because parent projects define attributes
- # which are inherited by children, and we don't want children to be
- # loaded before parent has defined everything.
- #
- # While "build-project" and "use-project" can potentially refer to child
- # projects from parent projects, we don't immediately load child
- # projects when seeing those attributes. Instead, we record the minimal
- # information to be used only later.
- load-used-projects $(module-name) ;
- }
- return $(module-name) ;
-}
-
-
-rule load-used-projects ( module-name )
-{
- local used = [ modules.peek $(module-name) : .used-projects ] ;
- local location = [ attribute $(module-name) location ] ;
- import project ;
- while $(used)
- {
- local id = $(used[1]) ;
- local where = $(used[2]) ;
-
- project.use $(id) : [ path.root [ path.make $(where) ] $(location) ] ;
- used = $(used[3-]) ;
- }
-}
-
-
-# Note the use of character groups, as opposed to listing 'Jamroot' and
-# 'jamroot'. With the latter, we would get duplicate matches on Windows and
-# would have to eliminate duplicates.
-JAMROOT ?= [ modules.peek : JAMROOT ] ;
-JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
-
-
-# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
-#
-rule load-parent ( location )
-{
- local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ;
-
- if ! $(found)
- {
- ECHO error: Could not find parent for project at '$(location)' ;
- EXIT error: Did not find Jamfile.jam or Jamroot.jam in any parent
- directory. ;
- }
-
- return [ load $(found[1]:D) ] ;
-}
-
-
-# Makes the specified 'module' act as if it were a regularly loaded Jamfile at
-# 'location'. Reports an error if a Jamfile has already been loaded for that
-# location.
-#
-rule act-as-jamfile ( module : location )
-{
- if [ module-name $(location) ] in $(.jamfile-modules)
- {
- errors.error "Jamfile was already loaded for '$(location)'" ;
- }
- # Set up non-default mapping from location to module.
- .module.$(location) = $(module) ;
-
- # Add the location to the list of project locations so that we don't try to
- # reload the same Jamfile in the future.
- .jamfile-modules += [ module-name $(location) ] ;
-
- initialize $(module) : $(location) ;
-}
-
-
-# Returns the project module corresponding to the given project-id or plain
-# directory name. Returns nothing if such a project can not be found.
-#
-rule find ( name : current-location )
-{
- local project-module ;
-
- # Try interpreting name as project id.
- if [ path.is-rooted $(name) ]
- {
- project-module = $($(name).jamfile-module) ;
- }
-
- if ! $(project-module)
- {
- local location = [ path.root [ path.make $(name) ] $(current-location) ]
- ;
-
- # If no project is registered for the given location, try to load it.
- # First see if we have a Jamfile. If not, then see if we might have a
- # project root willing to act as a Jamfile. In that case, project root
- # must be placed in the directory referred by id.
-
- project-module = [ module-name $(location) ] ;
- if ! $(project-module) in $(.jamfile-modules)
- {
- if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
- {
- project-module = [ load $(location) ] ;
- }
- else
- {
- project-module = ;
- }
- }
- }
-
- return $(project-module) ;
-}
-
-
-# Returns the name of the module corresponding to 'jamfile-location'. If no
-# module corresponds to that location yet, associates the default module name
-# with that location.
-#
-rule module-name ( jamfile-location )
-{
- if ! $(.module.$(jamfile-location))
- {
- # Root the path, so that locations are always unambiguous. Without this,
- # we can't decide if '../../exe/program1' and '.' are the same paths.
- jamfile-location = [ path.root $(jamfile-location) [ path.pwd ] ] ;
- .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ;
- }
- return $(.module.$(jamfile-location)) ;
-}
-
-
-# Default patterns to search for the Jamfiles to use for build declarations.
-#
-JAMFILE = [ modules.peek : JAMFILE ] ;
-JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
-
-
-# Find the Jamfile at the given location. This returns the exact names of all
-# the Jamfiles in the given directory. The optional parent-root argument causes
-# this to search not the given directory but the ones above it up to the
-# directory given in it.
-#
-rule find-jamfile (
- dir # The directory(s) to look for a Jamfile.
- parent-root ? # Optional flag indicating to search for the parent Jamfile.
- : no-errors ?
- )
-{
- # Glob for all the possible Jamfiles according to the match pattern.
- #
- local jamfile-glob = ;
- if $(parent-root)
- {
- if ! $(.parent-jamfile.$(dir))
- {
- .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE)
- ] ;
- }
- jamfile-glob = $(.parent-jamfile.$(dir)) ;
- }
- else
- {
- if ! $(.jamfile.$(dir))
- {
- .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
- }
- jamfile-glob = $(.jamfile.$(dir)) ;
-
- }
-
- local jamfile-to-load = $(jamfile-glob) ;
- # Multiple Jamfiles found in the same place. Warn about this and ensure we
- # use only one of them. As a temporary convenience measure, if there is
- # Jamfile.v2 among found files, suppress the warning and use it.
- #
- if $(jamfile-to-load[2-])
- {
- local v2-jamfiles = [ MATCH (.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam) : $(jamfile-to-load) ] ;
-
- if $(v2-jamfiles) && ! $(v2-jamfiles[2])
- {
- jamfile-to-load = $(v2-jamfiles) ;
- }
- else
- {
- local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
- ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
- "Loading the first one: '$(jamfile)'." ;
- }
-
- jamfile-to-load = $(jamfile-to-load[1]) ;
- }
-
- # Could not find it, error.
- #
- if ! $(no-errors) && ! $(jamfile-to-load)
- {
- errors.error Unable to load Jamfile.
- : Could not find a Jamfile in directory '$(dir)'.
- : Attempted to find it with pattern '"$(JAMFILE:J=" ")"'.
- : Please consult the documentation at 'http://www.boost.org'. ;
- }
-
- return $(jamfile-to-load) ;
-}
-
-
-# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
-# the file as indicated by the JAMFILE patterns. Effect of calling this rule
-# twice with the same 'dir' is undefined.
-#
-local rule load-jamfile (
- dir # The directory of the project Jamfile.
- : jamfile-module
- )
-{
- # See if the Jamfile is where it should be.
- #
- local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ;
- if ! $(jamfile-to-load)
- {
- jamfile-to-load = [ find-jamfile $(dir) ] ;
- }
-
- if $(jamfile-to-load[2])
- {
- errors.error "Multiple Jamfiles found at '$(dir)'"
- : "Filenames are: " $(jamfile-to-load:D=) ;
- }
-
- # Now load the Jamfile in it's own context.
- # The call to 'initialize' may load parent Jamfile, which might have
- # 'use-project' statement that causes a second attempt to load the
- # same project we're loading now. Checking inside .jamfile-modules
- # prevents that second attempt from messing up.
- if ! $(jamfile-module) in $(.jamfile-modules)
- {
- .jamfile-modules += $(jamfile-module) ;
-
- # Initialize the Jamfile module before loading.
- #
- initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
- : $(jamfile-to-load:BS) ;
-
- local saved-project = $(.current-project) ;
-
- mark-as-user $(jamfile-module) ;
- modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
- if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ]
- {
- jamfile = [ find-jamfile $(dir) : no-errors ] ;
- if $(jamfile)
- {
- load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
- }
- }
-
- # Now do some checks.
- if $(.current-project) != $(saved-project)
- {
- errors.error "The value of the .current-project variable has magically"
- : "changed after loading a Jamfile. This means some of the targets"
- : "might be defined in the wrong project."
- : "after loading" $(jamfile-module)
- : "expected value" $(saved-project)
- : "actual value" $(.current-project) ;
- }
-
- if $(.global-build-dir)
- {
- local id = [ attribute $(jamfile-module) id ] ;
- local project-root = [ attribute $(jamfile-module) project-root ] ;
- local location = [ attribute $(jamfile-module) location ] ;
-
- if $(location) && $(project-root) = $(dir)
- {
- # This is Jamroot.
- if ! $(id)
- {
- ECHO "warning: the --build-dir option was specified" ;
- ECHO "warning: but Jamroot at '$(dir)'" ;
- ECHO "warning: specified no project id" ;
- ECHO "warning: the --build-dir option will be ignored" ;
- }
- }
- }
- }
-}
-
-
-rule mark-as-user ( module-name )
-{
- if USER_MODULE in [ RULENAMES ]
- {
- USER_MODULE $(module-name) ;
- }
-}
-
-
-rule load-aux ( module-name : file )
-{
- mark-as-user $(module-name) ;
-
- module $(module-name)
- {
- include $(2) ;
- local rules = [ RULENAMES $(1) ] ;
- IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
- }
-}
-
-
-.global-build-dir = [ MATCH --build-dir=(.*) : [ modules.peek : ARGV ] ] ;
-if $(.global-build-dir)
-{
- # If the option is specified several times, take the last value.
- .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
-}
-
-
-# Initialize the module for a project.
-#
-rule initialize (
- module-name # The name of the project module.
- : location ? # The location (directory) of the project to initialize. If
- # not specified, a standalone project will be initialized.
- : basename ?
- )
-{
- if --debug-loading in [ modules.peek : ARGV ]
- {
- ECHO "Initializing project '$(module-name)'" ;
- }
-
- # TODO: need to consider if standalone projects can do anything but define
- # prebuilt targets. If so, we need to give it a more sensible "location", so
- # that source paths are correct.
- location ?= "" ;
- # Create the module for the Jamfile first.
- module $(module-name)
- {
- }
- $(module-name).attributes = [ new project-attributes $(location)
- $(module-name) ] ;
- local attributes = $($(module-name).attributes) ;
-
- if $(location)
- {
- $(attributes).set source-location : [ path.make $(location) ] : exact ;
- }
- else if ! $(module-name) in test-config site-config user-config project-config
- {
- # This is a standalone project with known location. Set source location
- # so that it can declare targets. This is intended so that you can put
- # a .jam file in your sources and use it via 'using'. Standard modules
- # (in 'tools' subdir) may not assume source dir is set.
- local s = [ modules.binding $(module-name) ] ;
- if ! $(s)
- {
- errors.error "Could not determine project location $(module-name)" ;
- }
- $(attributes).set source-location : $(s:D) : exact ;
- }
-
- $(attributes).set requirements : [ property-set.empty ] : exact ;
- $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
-
- # Import rules common to all project modules from project-rules module,
- # defined at the end of this file.
- local rules = [ RULENAMES project-rules ] ;
- IMPORT project-rules : $(rules) : $(module-name) : $(rules) ;
-
- local jamroot ;
-
- local parent-module ;
- if $(module-name) = test-config
- {
- # No parent.
- }
- else if $(module-name) = site-config
- {
- parent-module = test-config ;
- }
- else if $(module-name) = user-config
- {
- parent-module = site-config ;
- }
- else if $(module-name) = project-config
- {
- parent-module = user-config ;
- }
- else
- {
- # We search for parent/project-root only if Jamfile was specified, i.e.
- # if the project is not standalone.
- if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ]
- {
- parent-module = [ load-parent $(location) ] ;
- }
- else
- {
- # It's either jamroot or standalone project. If it's jamroot,
- # inherit from user-config.
- if $(location)
- {
- # If project-config module exist, inherit from it.
- if $(project-config.attributes)
- {
- parent-module = project-config ;
- }
- else
- {
- parent-module = user-config ;
- }
- jamroot = true ;
- }
- }
- }
-
- if $(parent-module)
- {
- inherit-attributes $(module-name) : $(parent-module) ;
- $(attributes).set parent-module : $(parent-module) : exact ;
- }
-
- if $(jamroot)
- {
- $(attributes).set project-root : $(location) : exact ;
- }
-
- local parent ;
- if $(parent-module)
- {
- parent = [ target $(parent-module) ] ;
- }
-
- if ! $(.target.$(module-name))
- {
- .target.$(module-name) = [ new project-target $(module-name)
- : $(module-name) $(parent)
- : [ attribute $(module-name) requirements ] ] ;
-
- if --debug-loading in [ modules.peek : ARGV ]
- {
- ECHO "Assigned project target" $(.target.$(module-name))
- "to '$(module-name)'" ;
- }
- }
-
- .current-project = [ target $(module-name) ] ;
-}
-
-
-# Make 'project-module' inherit attributes of project root and parent module.
-#
-rule inherit-attributes ( project-module : parent-module )
-{
- local attributes = $($(project-module).attributes) ;
- local pattributes = [ attributes $(parent-module) ] ;
- # Parent module might be locationless configuration module.
- if [ modules.binding $(parent-module) ]
- {
- $(attributes).set parent : [ path.parent
- [ path.make [ modules.binding $(parent-module) ] ] ] ;
- }
- local v = [ $(pattributes).get project-root ] ;
- $(attributes).set project-root : $(v) : exact ;
- $(attributes).set default-build
- : [ $(pattributes).get default-build ] ;
- $(attributes).set requirements
- : [ $(pattributes).get requirements ] : exact ;
- $(attributes).set usage-requirements
- : [ $(pattributes).get usage-requirements ] : exact ;
-
- local parent-build-dir = [ $(pattributes).get build-dir ] ;
- if $(parent-build-dir)
- {
- # Have to compute relative path from parent dir to our dir. Convert both
- # paths to absolute, since we cannot find relative path from ".." to
- # ".".
-
- local location = [ attribute $(project-module) location ] ;
- local parent-location = [ attribute $(parent-module) location ] ;
-
- local pwd = [ path.pwd ] ;
- local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
- local our-dir = [ path.root $(location) $(pwd) ] ;
- $(attributes).set build-dir : [ path.join $(parent-build-dir)
- [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
- }
-}
-
-
-# Associate the given id with the given project module.
-#
-rule register-id ( id : module )
-{
- $(id).jamfile-module = $(module) ;
-}
-
-
-# Class keeping all the attributes of a project.
-#
-# The standard attributes are "id", "location", "project-root", "parent"
-# "requirements", "default-build", "source-location" and "projects-to-build".
-#
-class project-attributes
-{
- import property ;
- import property-set ;
- import errors ;
- import path ;
- import print ;
- import sequence ;
- import project ;
-
- rule __init__ ( location project-module )
- {
- self.location = $(location) ;
- self.project-module = $(project-module) ;
- }
-
- # Set the named attribute from the specification given by the user. The
- # value actually set may be different.
- #
- rule set ( attribute : specification *
- : exact ? # Sets value from 'specification' without any processing.
- )
- {
- if $(exact)
- {
- self.$(attribute) = $(specification) ;
- }
- else if $(attribute) = "requirements"
- {
- local result = [ property-set.refine-from-user-input
- $(self.requirements) : $(specification)
- : $(self.project-module) : $(self.location) ] ;
-
- if $(result[1]) = "@error"
- {
- errors.error Requirements for project at '$(self.location)'
- conflict with parent's. : Explanation: $(result[2-]) ;
- }
- else
- {
- self.requirements = $(result) ;
- }
- }
- else if $(attribute) = "usage-requirements"
- {
- local unconditional ;
- for local p in $(specification)
- {
- local split = [ property.split-conditional $(p) ] ;
- split ?= nothing $(p) ;
- unconditional += $(split[2]) ;
- }
-
- local non-free = [ property.remove free : $(unconditional) ] ;
- if $(non-free)
- {
- errors.error usage-requirements $(specification) have non-free
- properties $(non-free) ;
- }
- local t = [ property.translate-paths $(specification)
- : $(self.location) ] ;
- if $(self.usage-requirements)
- {
- self.usage-requirements = [ property-set.create
- [ $(self.usage-requirements).raw ] $(t) ] ;
- }
- else
- {
- self.usage-requirements = [ property-set.create $(t) ] ;
- }
- }
- else if $(attribute) = "default-build"
- {
- self.default-build = [ property.make $(specification) ] ;
- }
- else if $(attribute) = "source-location"
- {
- self.source-location = ;
- for local src-path in $(specification)
- {
- self.source-location += [ path.root [ path.make $(src-path) ]
- $(self.location) ] ;
- }
- }
- else if $(attribute) = "build-dir"
- {
- self.build-dir = [ path.root
- [ path.make $(specification) ] $(self.location) ] ;
- }
- else if $(attribute) = "id"
- {
- id = [ path.root $(specification) / ] ;
- project.register-id $(id) : $(self.project-module) ;
- self.id = $(id) ;
- }
- else if ! $(attribute) in "default-build" "location" "parent"
- "projects-to-build" "project-root" "source-location"
- {
- errors.error Invalid project attribute '$(attribute)' specified for
- project at '$(self.location)' ;
- }
- else
- {
- self.$(attribute) = $(specification) ;
- }
- }
-
- # Returns the value of the given attribute.
- #
- rule get ( attribute )
- {
- return $(self.$(attribute)) ;
- }
-
- # Prints the project attributes.
- #
- rule print ( )
- {
- local id = $(self.id) ; id ?= (none) ;
- local parent = $(self.parent) ; parent ?= (none) ;
- print.section "'"$(id)"'" ;
- print.list-start ;
- print.list-item "Parent project:" $(parent) ;
- print.list-item "Requirements:" [ $(self.requirements).raw ] ;
- print.list-item "Default build:" $(self.default-build) ;
- print.list-item "Source location:" $(self.source-location) ;
- print.list-item "Projects to build:"
- [ sequence.insertion-sort $(self.projects-to-build) ] ;
- print.list-end ;
- }
-}
-
-
-# Returns the project which is currently being loaded.
-#
-rule current ( )
-{
- return $(.current-project) ;
-}
-
-
-# Temporarily changes the current project to 'project'. Should be followed by
-# 'pop-current'.
-#
-rule push-current ( project )
-{
- .saved-current-project += $(.current-project) ;
- .current-project = $(project) ;
-}
-
-
-rule pop-current ( )
-{
- .current-project = $(.saved-current-project[-1]) ;
- .saved-current-project = $(.saved-current-project[1--2]) ;
-}
-
-
-# Returns the project-attribute instance for the specified Jamfile module.
-#
-rule attributes ( project )
-{
- return $($(project).attributes) ;
-}
-
-
-# Returns the value of the specified attribute in the specified Jamfile module.
-#
-rule attribute ( project attribute )
-{
- return [ $($(project).attributes).get $(attribute) ] ;
-}
-
-
-# Returns the project target corresponding to the 'project-module'.
-#
-rule target ( project-module )
-{
- if ! $(.target.$(project-module))
- {
- .target.$(project-module) = [ new project-target $(project-module)
- : $(project-module)
- : [ attribute $(project-module) requirements ] ] ;
- }
- return $(.target.$(project-module)) ;
-}
-
-
-# Use/load a project.
-#
-rule use ( id : location )
-{
- local saved-project = $(.current-project) ;
- local project-module = [ project.load $(location) ] ;
- local declared-id = [ project.attribute $(project-module) id ] ;
-
- if ! $(declared-id) || $(declared-id) != $(id)
- {
- # The project at 'location' either has no id or that id is not equal to
- # the 'id' parameter.
- if $($(id).jamfile-module) && ( $($(id).jamfile-module) !=
- $(project-module) )
- {
- errors.user-error Attempt to redeclare already existing project id
- '$(id)'
- location '$(location)' ;
- }
- $(id).jamfile-module = $(project-module) ;
- }
- .current-project = $(saved-project) ;
-}
-
-
-# Defines a Boost.Build extension project. Such extensions usually contain
-# library targets and features that can be used by many people. Even though
-# extensions are really projects, they can be initialized as a module would be
-# with the "using" (project.project-rules.using) mechanism.
-#
-rule extension ( id : options * : * )
-{
- # The caller is a standalone module for the extension.
- local mod = [ CALLER_MODULE ] ;
-
- # We need to do the rest within the extension module.
- module $(mod)
- {
- import path ;
-
- # Find the root project.
- local root-project = [ project.current ] ;
- root-project = [ $(root-project).project-module ] ;
- while
- [ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config
- {
- root-project = [ project.attribute $(root-project) parent-module ] ;
- }
-
- # Create the project data, and bring in the project rules into the
- # module.
- project.initialize $(__name__) : [ path.join [ project.attribute
- $(root-project) location ] ext $(1:L) ] ;
-
- # Create the project itself, i.e. the attributes. All extensions are
- # created in the "/ext" project space.
- project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
- $(9) ;
- local attributes = [ project.attributes $(__name__) ] ;
-
- # Inherit from the root project of whomever is defining us.
- project.inherit-attributes $(__name__) : $(root-project) ;
- $(attributes).set parent-module : $(root-project) : exact ;
- }
-}
-
-
-rule glob-internal ( project : wildcards + : excludes * : rule-name )
-{
- local location = [ $(project).get source-location ] ;
-
- local result ;
- local paths = [ path.$(rule-name) $(location) :
- [ sequence.transform path.make : $(wildcards) ] :
- [ sequence.transform path.make : $(excludes) ] ] ;
- if $(wildcards:D) || $(rule-name) != glob
- {
- # The paths we have found are relative to the current directory, but the
- # names specified in the sources list are assumed to be relative to the
- # source directory of the corresponding project. So, just make the names
- # absolute.
- for local p in $(paths)
- {
- # If the path is below source location, use relative path.
- # Otherwise, use full path just to avoid any ambiguities.
- local rel = [ path.relative $(p) $(location) : no-error ] ;
- if $(rel) = not-a-child
- {
- result += [ path.root $(p) [ path.pwd ] ] ;
- }
- else
- {
- result += $(rel) ;
- }
- }
- }
- else
- {
- # There were no wildcards in the directory path, so the files are all in
- # the source directory of the project. Just drop the directory, instead
- # of making paths absolute.
- result = $(paths:D="") ;
- }
-
- return $(result) ;
-}
-
-
-# This module defines rules common to all projects.
-#
-module project-rules
-{
- rule using ( toolset-module : * )
- {
- import toolset ;
- import modules ;
- import project ;
-
- # Temporarily change the search path so the module referred to by
- # 'using' can be placed in the same directory as Jamfile. User will
- # expect the module to be found even though the directory is not in
- # BOOST_BUILD_PATH.
- local x = [ modules.peek : BOOST_BUILD_PATH ] ;
- local caller = [ CALLER_MODULE ] ;
- local caller-location = [ modules.binding $(caller) ] ;
- modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ;
- toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- modules.poke : BOOST_BUILD_PATH : $(x) ;
-
- # The above might have clobbered .current-project. Restore the correct
- # value.
- modules.poke project : .current-project
- : [ project.target $(caller) ] ;
- }
-
- import modules ;
-
- rule import ( * : * : * )
- {
- modules.import project ;
-
- local caller = [ CALLER_MODULE ] ;
- local saved = [ modules.peek project : .current-project ] ;
- module $(caller)
- {
- modules.import $(1) : $(2) : $(3) ;
- }
- modules.poke project : .current-project : $(saved) ;
- }
-
- rule project ( id ? : options * : * )
- {
- import errors ;
- import path ;
- import project ;
-
- local caller = [ CALLER_MODULE ] ;
- local attributes = [ project.attributes $(caller) ] ;
- if $(id)
- {
- $(attributes).set id : $(id) ;
- }
-
- local explicit-build-dir ;
-
- for n in 2 3 4 5 6 7 8 9
- {
- local option = $($(n)) ;
- if $(option)
- {
- $(attributes).set $(option[1]) : $(option[2-]) ;
- }
- if $(option[1]) = "build-dir"
- {
- explicit-build-dir = [ path.make $(option[2-]) ] ;
- }
- }
-
- # If '--build-dir' is specified, change the build dir for the project.
- local global-build-dir =
- [ modules.peek project : .global-build-dir ] ;
-
- if $(global-build-dir)
- {
- local location = [ $(attributes).get location ] ;
- # Project with an empty location is a 'standalone' project such as
- # user-config or qt. It has no build dir. If we try to set build dir
- # for user-config, we shall then try to inherit it, with either
- # weird or wrong consequences.
- if $(location) && $(location) = [ $(attributes).get project-root ]
- {
- # Re-read the project id, since it might have been changed in
- # the project's attributes.
- id = [ $(attributes).get id ] ;
- # This is Jamroot.
- if $(id)
- {
- if $(explicit-build-dir) &&
- [ path.is-rooted $(explicit-build-dir) ]
- {
- errors.user-error Absolute directory specified via
- 'build-dir' project attribute : Do not know how to
- combine that with the --build-dir option. ;
- }
- # Strip the leading slash from id.
- local rid = [ MATCH /(.*) : $(id) ] ;
- local p = [ path.join
- $(global-build-dir) $(rid) $(explicit-build-dir) ] ;
-
- $(attributes).set build-dir : $(p) : exact ;
- }
- }
- else
- {
- # Not Jamroot.
- if $(explicit-build-dir)
- {
- errors.user-error When --build-dir is specified, the
- 'build-dir' project : attribute is allowed only for
- top-level 'project' invocations ;
- }
- }
- }
- }
-
- # Declare and set a project global constant. Project global constants are
- # normal variables but should not be changed. They are applied to every
- # child Jamfile.
- #
- rule constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- )
- {
- import project ;
- local caller = [ CALLER_MODULE ] ;
- local p = [ project.target $(caller) ] ;
- $(p).add-constant $(name) : $(value) ;
- }
-
- # Declare and set a project global constant, whose value is a path. The path
- # is adjusted to be relative to the invocation directory. The given value
- # path is taken to be either absolute, or relative to this project root.
- #
- rule path-constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- )
- {
- import project ;
- local caller = [ CALLER_MODULE ] ;
- local p = [ project.target $(caller) ] ;
- $(p).add-constant $(name) : $(value) : path ;
- }
-
- rule use-project ( id : where )
- {
- import modules ;
- # See comment in 'load' for explanation.
- local caller = [ CALLER_MODULE ] ;
- modules.poke $(caller) : .used-projects :
- [ modules.peek $(caller) : .used-projects ]
- $(id) $(where) ;
- }
-
- rule build-project ( dir )
- {
- import project ;
- local caller = [ CALLER_MODULE ] ;
- local attributes = [ project.attributes $(caller) ] ;
-
- local now = [ $(attributes).get projects-to-build ] ;
- $(attributes).set projects-to-build : $(now) $(dir) ;
- }
-
- rule explicit ( target-names * )
- {
- import project ;
- # If 'explicit' is used in a helper rule defined in Jamroot and
- # inherited by children, then most of the time we want 'explicit' to
- # operate on the Jamfile where the helper rule is invoked.
- local t = [ project.current ] ;
- for local n in $(target-names)
- {
- $(t).mark-target-as-explicit $(n) ;
- }
- }
-
- rule always ( target-names * )
- {
- import project ;
- local t = [ project.current ] ;
- for local n in $(target-names)
- {
- $(t).mark-target-as-always $(n) ;
- }
- }
-
- rule glob ( wildcards + : excludes * )
- {
- import project ;
- return [ project.glob-internal [ project.current ] : $(wildcards) :
- $(excludes) : glob ] ;
- }
-
- rule glob-tree ( wildcards + : excludes * )
- {
- import project ;
-
- if $(wildcards:D) || $(excludes:D)
- {
- errors.user-error The patterns to 'glob-tree' may not include
- directory ;
- }
- return [ project.glob-internal [ project.current ] : $(wildcards) :
- $(excludes) : glob-tree ] ;
- }
-
- # Calculates conditional requirements for multiple requirements at once.
- # This is a shorthand to reduce duplication and to keep an inline
- # declarative syntax. For example:
- #
- # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
- # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
- #
- rule conditional ( condition + : requirements * )
- {
- local condition = $(condition:J=,) ;
- if [ MATCH (:) : $(condition) ]
- {
- return $(condition)$(requirements) ;
- }
- else
- {
- return $(condition):$(requirements) ;
- }
- }
-
- rule option ( name : value )
- {
- if $(__name__) != site-config && $(__name__) != user-config && $(__name__) != project-config
- {
- import errors ;
- errors.error "The 'option' rule may be used only in site-config or user-config" ;
- }
- import option ;
- option.set $(name) : $(value) ;
- }
-}
diff --git a/jam-files/boost-build/build/project.py b/jam-files/boost-build/build/project.py
deleted file mode 100644
index 1e1e16fa..00000000
--- a/jam-files/boost-build/build/project.py
+++ /dev/null
@@ -1,1120 +0,0 @@
-# Status: ported.
-# Base revision: 64488
-
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements project representation and loading.
-# Each project is represented by
-# - a module where all the Jamfile content live.
-# - an instance of 'project-attributes' class.
-# (given module name, can be obtained by 'attributes' rule)
-# - an instance of 'project-target' class (from targets.jam)
-# (given a module name, can be obtained by 'target' rule)
-#
-# Typically, projects are created as result of loading Jamfile, which is
-# do by rules 'load' and 'initialize', below. First, module for Jamfile
-# is loaded and new project-attributes instance is created. Some rules
-# necessary for project are added to the module (see 'project-rules' module)
-# at the bottom of this file.
-# Default project attributes are set (inheriting attributes of parent project, if
-# it exists). After that, Jamfile is read. It can declare its own attributes,
-# via 'project' rule, which will be combined with already set attributes.
-#
-#
-# The 'project' rule can also declare project id, which will be associated with
-# the project module.
-#
-# There can also be 'standalone' projects. They are created by calling 'initialize'
-# on arbitrary module, and not specifying location. After the call, the module can
-# call 'project' rule, declare main target and behave as regular projects. However,
-# since it's not associated with any location, it's better declare only prebuilt
-# targets.
-#
-# The list of all loaded Jamfile is stored in variable .project-locations. It's possible
-# to obtain module name for a location using 'module-name' rule. The standalone projects
-# are not recorded, the only way to use them is by project id.
-
-import b2.util.path
-from b2.build import property_set, property
-from b2.build.errors import ExceptionWithUserContext
-import b2.build.targets
-
-import bjam
-
-import re
-import sys
-import os
-import string
-import imp
-import traceback
-import b2.util.option as option
-
-from b2.util import record_jam_to_value_mapping, qualify_jam_action
-
-class ProjectRegistry:
-
- def __init__(self, manager, global_build_dir):
- self.manager = manager
- self.global_build_dir = global_build_dir
- self.project_rules_ = ProjectRules(self)
-
- # The target corresponding to the project being loaded now
- self.current_project = None
-
- # The set of names of loaded project modules
- self.jamfile_modules = {}
-
- # Mapping from location to module name
- self.location2module = {}
-
- # Mapping from project id to project module
- self.id2module = {}
-
- # Map from Jamfile directory to parent Jamfile/Jamroot
- # location.
- self.dir2parent_jamfile = {}
-
- # Map from directory to the name of Jamfile in
- # that directory (or None).
- self.dir2jamfile = {}
-
- # Map from project module to attributes object.
- self.module2attributes = {}
-
- # Map from project module to target for the project
- self.module2target = {}
-
- # Map from names to Python modules, for modules loaded
- # via 'using' and 'import' rules in Jamfiles.
- self.loaded_tool_modules_ = {}
-
- self.loaded_tool_module_path_ = {}
-
- # Map from project target to the list of
- # (id,location) pairs corresponding to all 'use-project'
- # invocations.
- # TODO: should not have a global map, keep this
- # in ProjectTarget.
- self.used_projects = {}
-
- self.saved_current_project = []
-
- self.JAMROOT = self.manager.getenv("JAMROOT");
-
- # Note the use of character groups, as opposed to listing
- # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
- # matches on windows and would have to eliminate duplicates.
- if not self.JAMROOT:
- self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
-
- # Default patterns to search for the Jamfiles to use for build
- # declarations.
- self.JAMFILE = self.manager.getenv("JAMFILE")
-
- if not self.JAMFILE:
- self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
- "[Jj]amfile.jam"]
-
-
- def load (self, jamfile_location):
- """Loads jamfile at the given location. After loading, project global
- file and jamfile needed by the loaded one will be loaded recursively.
- If the jamfile at that location is loaded already, does nothing.
- Returns the project module for the Jamfile."""
-
- absolute = os.path.join(os.getcwd(), jamfile_location)
- absolute = os.path.normpath(absolute)
- jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
-
- if "--debug-loading" in self.manager.argv():
- print "Loading Jamfile at '%s'" % jamfile_location
-
-
- mname = self.module_name(jamfile_location)
- # If Jamfile is already loaded, don't try again.
- if not mname in self.jamfile_modules:
-
- self.load_jamfile(jamfile_location, mname)
-
- # We want to make sure that child project are loaded only
- # after parent projects. In particular, because parent projects
- # define attributes whch are inherited by children, and we don't
- # want children to be loaded before parents has defined everything.
- #
- # While "build-project" and "use-project" can potentially refer
- # to child projects from parent projects, we don't immediately
- # load child projects when seing those attributes. Instead,
- # we record the minimal information that will be used only later.
-
- self.load_used_projects(mname)
-
- return mname
-
- def load_used_projects(self, module_name):
- # local used = [ modules.peek $(module-name) : .used-projects ] ;
- used = self.used_projects[module_name]
-
- location = self.attribute(module_name, "location")
- for u in used:
- id = u[0]
- where = u[1]
-
- self.use(id, os.path.join(location, where))
-
- def load_parent(self, location):
- """Loads parent of Jamfile at 'location'.
- Issues an error if nothing is found."""
-
- found = b2.util.path.glob_in_parents(
- location, self.JAMROOT + self.JAMFILE)
-
- if not found:
- print "error: Could not find parent for project at '%s'" % location
- print "error: Did not find Jamfile or project-root.jam in any parent directory."
- sys.exit(1)
-
- return self.load(os.path.dirname(found[0]))
-
- def act_as_jamfile(self, module, location):
- """Makes the specified 'module' act as if it were a regularly loaded Jamfile
- at 'location'. If Jamfile is already located for that location, it's an
- error."""
-
- if self.module_name(location) in self.jamfile_modules:
- self.manager.errors()(
- "Jamfile was already loaded for '%s'" % location)
-
- # Set up non-default mapping from location to module.
- self.location2module[location] = module
-
- # Add the location to the list of project locations
- # so that we don't try to load Jamfile in future
- self.jamfile_modules.append(location)
-
- self.initialize(module, location)
-
- def find(self, name, current_location):
- """Given 'name' which can be project-id or plain directory name,
- return project module corresponding to that id or directory.
- Returns nothing of project is not found."""
-
- project_module = None
-
- # Try interpreting name as project id.
- if name[0] == '/':
- project_module = self.id2module.get(name)
-
- if not project_module:
- location = os.path.join(current_location, name)
- # If no project is registered for the given location, try to
- # load it. First see if we have Jamfile. If not we might have project
- # root, willing to act as Jamfile. In that case, project-root
- # must be placed in the directory referred by id.
-
- project_module = self.module_name(location)
- if not project_module in self.jamfile_modules:
- if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
- project_module = self.load(location)
- else:
- project_module = None
-
- return project_module
-
- def module_name(self, jamfile_location):
- """Returns the name of module corresponding to 'jamfile-location'.
- If no module corresponds to location yet, associates default
- module name with that location."""
- module = self.location2module.get(jamfile_location)
- if not module:
- # Root the path, so that locations are always umbiguious.
- # Without this, we can't decide if '../../exe/program1' and '.'
- # are the same paths, or not.
- jamfile_location = os.path.realpath(
- os.path.join(os.getcwd(), jamfile_location))
- module = "Jamfile<%s>" % jamfile_location
- self.location2module[jamfile_location] = module
- return module
-
- def find_jamfile (self, dir, parent_root=0, no_errors=0):
- """Find the Jamfile at the given location. This returns the
- exact names of all the Jamfiles in the given directory. The optional
- parent-root argument causes this to search not the given directory
- but the ones above it up to the directory given in it."""
-
- # Glob for all the possible Jamfiles according to the match pattern.
- #
- jamfile_glob = None
- if parent_root:
- parent = self.dir2parent_jamfile.get(dir)
- if not parent:
- parent = b2.util.path.glob_in_parents(dir,
- self.JAMFILE)
- self.dir2parent_jamfile[dir] = parent
- jamfile_glob = parent
- else:
- jamfile = self.dir2jamfile.get(dir)
- if not jamfile:
- jamfile = b2.util.path.glob([dir], self.JAMFILE)
- self.dir2jamfile[dir] = jamfile
- jamfile_glob = jamfile
-
- if len(jamfile_glob) > 1:
- # Multiple Jamfiles found in the same place. Warn about this.
- # And ensure we use only one of them.
- # As a temporary convenience measure, if there's Jamfile.v2 amount
- # found files, suppress the warning and use it.
- #
- pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
- v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
- if len(v2_jamfiles) == 1:
- jamfile_glob = v2_jamfiles
- else:
- print """warning: Found multiple Jamfiles at '%s'!""" % (dir)
- for j in jamfile_glob:
- print " -", j
- print "Loading the first one"
-
- # Could not find it, error.
- if not no_errors and not jamfile_glob:
- self.manager.errors()(
- """Unable to load Jamfile.
-Could not find a Jamfile in directory '%s'
-Attempted to find it with pattern '%s'.
-Please consult the documentation at 'http://boost.org/boost-build2'."""
- % (dir, string.join(self.JAMFILE)))
-
- if jamfile_glob:
- return jamfile_glob[0]
-
- def load_jamfile(self, dir, jamfile_module):
- """Load a Jamfile at the given directory. Returns nothing.
- Will attempt to load the file as indicated by the JAMFILE patterns.
- Effect of calling this rule twice with the same 'dir' is underfined."""
-
- # See if the Jamfile is where it should be.
- is_jamroot = False
- jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
- if not jamfile_to_load:
- jamfile_to_load = self.find_jamfile(dir)
- else:
- if len(jamfile_to_load) > 1:
- get_manager().errors()("Multiple Jamfiles found at '%s'\n" +\
- "Filenames are: %s"
- % (dir, [os.path.basename(j) for j in jamfile_to_load]))
-
- is_jamroot = True
- jamfile_to_load = jamfile_to_load[0]
-
- dir = os.path.dirname(jamfile_to_load)
- if not dir:
- dir = "."
-
- self.used_projects[jamfile_module] = []
-
- # Now load the Jamfile in it's own context.
- # The call to 'initialize' may load parent Jamfile, which might have
- # 'use-project' statement that causes a second attempt to load the
- # same project we're loading now. Checking inside .jamfile-modules
- # prevents that second attempt from messing up.
- if not jamfile_module in self.jamfile_modules:
- self.jamfile_modules[jamfile_module] = True
-
- # Initialize the jamfile module before loading.
- #
- self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
-
- saved_project = self.current_project
-
- bjam.call("load", jamfile_module, jamfile_to_load)
- basename = os.path.basename(jamfile_to_load)
-
- if is_jamroot:
- jamfile = self.find_jamfile(dir, no_errors=True)
- if jamfile:
- bjam.call("load", jamfile_module, jamfile)
-
- # Now do some checks
- if self.current_project != saved_project:
- self.manager.errors()(
-"""The value of the .current-project variable
-has magically changed after loading a Jamfile.
-This means some of the targets might be defined a the wrong project.
-after loading %s
-expected value %s
-actual value %s""" % (jamfile_module, saved_project, self.current_project))
-
- if self.global_build_dir:
- id = self.attributeDefault(jamfile_module, "id", None)
- project_root = self.attribute(jamfile_module, "project-root")
- location = self.attribute(jamfile_module, "location")
-
- if location and project_root == dir:
- # This is Jamroot
- if not id:
- # FIXME: go via errors module, so that contexts are
- # shown?
- print "warning: the --build-dir option was specified"
- print "warning: but Jamroot at '%s'" % dir
- print "warning: specified no project id"
- print "warning: the --build-dir option will be ignored"
-
-
- def load_standalone(self, jamfile_module, file):
- """Loads 'file' as standalone project that has no location
- associated with it. This is mostly useful for user-config.jam,
- which should be able to define targets, but although it has
- some location in filesystem, we don't want any build to
- happen in user's HOME, for example.
-
- The caller is required to never call this method twice on
- the same file.
- """
-
- self.used_projects[jamfile_module] = []
- bjam.call("load", jamfile_module, file)
- self.load_used_projects(jamfile_module)
-
- def is_jamroot(self, basename):
- match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
- if match:
- return 1
- else:
- return 0
-
- def initialize(self, module_name, location=None, basename=None):
- """Initialize the module for a project.
-
- module-name is the name of the project module.
- location is the location (directory) of the project to initialize.
- If not specified, stanalone project will be initialized
- """
-
- if "--debug-loading" in self.manager.argv():
- print "Initializing project '%s'" % module_name
-
- # TODO: need to consider if standalone projects can do anything but defining
- # prebuilt targets. If so, we need to give more sensible "location", so that
- # source paths are correct.
- if not location:
- location = ""
-
- attributes = ProjectAttributes(self.manager, location, module_name)
- self.module2attributes[module_name] = attributes
-
- python_standalone = False
- if location:
- attributes.set("source-location", [location], exact=1)
- elif not module_name in ["test-config", "site-config", "user-config", "project-config"]:
- # This is a standalone project with known location. Set source location
- # so that it can declare targets. This is intended so that you can put
- # a .jam file in your sources and use it via 'using'. Standard modules
- # (in 'tools' subdir) may not assume source dir is set.
- module = sys.modules[module_name]
- attributes.set("source-location", self.loaded_tool_module_path_[module_name], exact=1)
- python_standalone = True
-
- attributes.set("requirements", property_set.empty(), exact=True)
- attributes.set("usage-requirements", property_set.empty(), exact=True)
- attributes.set("default-build", property_set.empty(), exact=True)
- attributes.set("projects-to-build", [], exact=True)
- attributes.set("project-root", None, exact=True)
- attributes.set("build-dir", None, exact=True)
-
- self.project_rules_.init_project(module_name, python_standalone)
-
- jamroot = False
-
- parent_module = None;
- if module_name == "test-config":
- # No parent
- pass
- elif module_name == "site-config":
- parent_module = "test-config"
- elif module_name == "user-config":
- parent_module = "site-config"
- elif module_name == "project-config":
- parent_module = "user-config"
- elif location and not self.is_jamroot(basename):
- # We search for parent/project-root only if jamfile was specified
- # --- i.e
- # if the project is not standalone.
- parent_module = self.load_parent(location)
- else:
- # It's either jamroot, or standalone project.
- # If it's jamroot, inherit from user-config.
- if location:
- # If project-config module exist, inherit from it.
- if self.module2attributes.has_key("project-config"):
- parent_module = "project-config"
- else:
- parent_module = "user-config" ;
-
- jamroot = True ;
-
- if parent_module:
- self.inherit_attributes(module_name, parent_module)
- attributes.set("parent-module", parent_module, exact=1)
-
- if jamroot:
- attributes.set("project-root", location, exact=1)
-
- parent = None
- if parent_module:
- parent = self.target(parent_module)
-
- if not self.module2target.has_key(module_name):
- target = b2.build.targets.ProjectTarget(self.manager,
- module_name, module_name, parent,
- self.attribute(module_name,"requirements"),
- # FIXME: why we need to pass this? It's not
- # passed in jam code.
- self.attribute(module_name, "default-build"))
- self.module2target[module_name] = target
-
- self.current_project = self.target(module_name)
-
- def inherit_attributes(self, project_module, parent_module):
- """Make 'project-module' inherit attributes of project
- root and parent module."""
-
- attributes = self.module2attributes[project_module]
- pattributes = self.module2attributes[parent_module]
-
- # Parent module might be locationless user-config.
- # FIXME:
- #if [ modules.binding $(parent-module) ]
- #{
- # $(attributes).set parent : [ path.parent
- # [ path.make [ modules.binding $(parent-module) ] ] ] ;
- # }
-
- attributes.set("project-root", pattributes.get("project-root"), exact=True)
- attributes.set("default-build", pattributes.get("default-build"), exact=True)
- attributes.set("requirements", pattributes.get("requirements"), exact=True)
- attributes.set("usage-requirements",
- pattributes.get("usage-requirements"), exact=1)
-
- parent_build_dir = pattributes.get("build-dir")
-
- if parent_build_dir:
- # Have to compute relative path from parent dir to our dir
- # Convert both paths to absolute, since we cannot
- # find relative path from ".." to "."
-
- location = attributes.get("location")
- parent_location = pattributes.get("location")
-
- our_dir = os.path.join(os.getcwd(), location)
- parent_dir = os.path.join(os.getcwd(), parent_location)
-
- build_dir = os.path.join(parent_build_dir,
- os.path.relpath(our_dir, parent_dir))
- attributes.set("build-dir", build_dir, exact=True)
-
- def register_id(self, id, module):
- """Associate the given id with the given project module."""
- self.id2module[id] = module
-
- def current(self):
- """Returns the project which is currently being loaded."""
- return self.current_project
-
- def set_current(self, c):
- self.current_project = c
-
- def push_current(self, project):
- """Temporary changes the current project to 'project'. Should
- be followed by 'pop-current'."""
- self.saved_current_project.append(self.current_project)
- self.current_project = project
-
- def pop_current(self):
- self.current_project = self.saved_current_project[-1]
- del self.saved_current_project[-1]
-
- def attributes(self, project):
- """Returns the project-attribute instance for the
- specified jamfile module."""
- return self.module2attributes[project]
-
- def attribute(self, project, attribute):
- """Returns the value of the specified attribute in the
- specified jamfile module."""
- return self.module2attributes[project].get(attribute)
- try:
- return self.module2attributes[project].get(attribute)
- except:
- raise BaseException("No attribute '%s' for project" % (attribute, project))
-
- def attributeDefault(self, project, attribute, default):
- """Returns the value of the specified attribute in the
- specified jamfile module."""
- return self.module2attributes[project].getDefault(attribute, default)
-
- def target(self, project_module):
- """Returns the project target corresponding to the 'project-module'."""
- if not self.module2target.has_key(project_module):
- self.module2target[project_module] = \
- b2.build.targets.ProjectTarget(project_module, project_module,
- self.attribute(project_module, "requirements"))
-
- return self.module2target[project_module]
-
- def use(self, id, location):
- # Use/load a project.
- saved_project = self.current_project
- project_module = self.load(location)
- declared_id = self.attributeDefault(project_module, "id", "")
-
- if not declared_id or declared_id != id:
- # The project at 'location' either have no id or
- # that id is not equal to the 'id' parameter.
- if self.id2module.has_key(id) and self.id2module[id] != project_module:
- self.manager.errors()(
-"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location))
- self.id2module[id] = project_module
-
- self.current_module = saved_project
-
- def add_rule(self, name, callable):
- """Makes rule 'name' available to all subsequently loaded Jamfiles.
-
- Calling that rule wil relay to 'callable'."""
- self.project_rules_.add_rule(name, callable)
-
- def project_rules(self):
- return self.project_rules_
-
- def glob_internal(self, project, wildcards, excludes, rule_name):
- location = project.get("source-location")[0]
-
- result = []
- callable = b2.util.path.__dict__[rule_name]
-
- paths = callable([location], wildcards, excludes)
- has_dir = 0
- for w in wildcards:
- if os.path.dirname(w):
- has_dir = 1
- break
-
- if has_dir or rule_name != "glob":
- result = []
- # The paths we've found are relative to current directory,
- # but the names specified in sources list are assumed to
- # be relative to source directory of the corresponding
- # prject. Either translate them or make absolute.
-
- for p in paths:
- rel = os.path.relpath(p, location)
- # If the path is below source location, use relative path.
- if not ".." in rel:
- result.append(rel)
- else:
- # Otherwise, use full path just to avoid any ambiguities.
- result.append(os.path.abspath(p))
-
- else:
- # There were not directory in wildcard, so the files are all
- # in the source directory of the project. Just drop the
- # directory, instead of making paths absolute.
- result = [os.path.basename(p) for p in paths]
-
- return result
-
- def load_module(self, name, extra_path=None):
- """Load a Python module that should be useable from Jamfiles.
-
- There are generally two types of modules Jamfiles might want to
- use:
- - Core Boost.Build. Those are imported using plain names, e.g.
- 'toolset', so this function checks if we have module named
- b2.package.module already.
- - Python modules in the same directory as Jamfile. We don't
- want to even temporary add Jamfile's directory to sys.path,
- since then we might get naming conflicts between standard
- Python modules and those.
- """
-
- # See if we loaded module of this name already
- existing = self.loaded_tool_modules_.get(name)
- if existing:
- return existing
-
- # See if we have a module b2.whatever.<name>, where <name>
- # is what is passed to this function
- modules = sys.modules
- for class_name in modules:
- parts = class_name.split('.')
- if name is class_name or parts[0] == "b2" \
- and parts[-1] == name.replace("-", "_"):
- module = modules[class_name]
- self.loaded_tool_modules_[name] = module
- return module
-
- # Lookup a module in BOOST_BUILD_PATH
- path = extra_path
- if not path:
- path = []
- path.extend(self.manager.boost_build_path())
- location = None
- for p in path:
- l = os.path.join(p, name + ".py")
- if os.path.exists(l):
- location = l
- break
-
- if not location:
- self.manager.errors()("Cannot find module '%s'" % name)
-
- mname = name + "__for_jamfile"
- file = open(location)
- try:
- # TODO: this means we'll never make use of .pyc module,
- # which might be a problem, or not.
- self.loaded_tool_module_path_[mname] = location
- module = imp.load_module(mname, file, os.path.basename(location),
- (".py", "r", imp.PY_SOURCE))
- self.loaded_tool_modules_[name] = module
- return module
- finally:
- file.close()
-
-
-
-# FIXME:
-# Defines a Boost.Build extension project. Such extensions usually
-# contain library targets and features that can be used by many people.
-# Even though extensions are really projects, they can be initialize as
-# a module would be with the "using" (project.project-rules.using)
-# mechanism.
-#rule extension ( id : options * : * )
-#{
-# # The caller is a standalone module for the extension.
-# local mod = [ CALLER_MODULE ] ;
-#
-# # We need to do the rest within the extension module.
-# module $(mod)
-# {
-# import path ;
-#
-# # Find the root project.
-# local root-project = [ project.current ] ;
-# root-project = [ $(root-project).project-module ] ;
-# while
-# [ project.attribute $(root-project) parent-module ] &&
-# [ project.attribute $(root-project) parent-module ] != user-config
-# {
-# root-project = [ project.attribute $(root-project) parent-module ] ;
-# }
-#
-# # Create the project data, and bring in the project rules
-# # into the module.
-# project.initialize $(__name__) :
-# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
-#
-# # Create the project itself, i.e. the attributes.
-# # All extensions are created in the "/ext" project space.
-# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-# local attributes = [ project.attributes $(__name__) ] ;
-#
-# # Inherit from the root project of whomever is defining us.
-# project.inherit-attributes $(__name__) : $(root-project) ;
-# $(attributes).set parent-module : $(root-project) : exact ;
-# }
-#}
-
-
-class ProjectAttributes:
- """Class keeping all the attributes of a project.
-
- The standard attributes are 'id', "location", "project-root", "parent"
- "requirements", "default-build", "source-location" and "projects-to-build".
- """
-
- def __init__(self, manager, location, project_module):
- self.manager = manager
- self.location = location
- self.project_module = project_module
- self.attributes = {}
- self.usage_requirements = None
-
- def set(self, attribute, specification, exact=False):
- """Set the named attribute from the specification given by the user.
- The value actually set may be different."""
-
- if exact:
- self.__dict__[attribute] = specification
-
- elif attribute == "requirements":
- self.requirements = property_set.refine_from_user_input(
- self.requirements, specification,
- self.project_module, self.location)
-
- elif attribute == "usage-requirements":
- unconditional = []
- for p in specification:
- split = property.split_conditional(p)
- if split:
- unconditional.append(split[1])
- else:
- unconditional.append(p)
-
- non_free = property.remove("free", unconditional)
- if non_free:
- get_manager().errors()("usage-requirements %s have non-free properties %s" \
- % (specification, non_free))
-
- t = property.translate_paths(
- property.create_from_strings(specification, allow_condition=True),
- self.location)
-
- existing = self.__dict__.get("usage-requirements")
- if existing:
- new = property_set.create(existing.all() + t)
- else:
- new = property_set.create(t)
- self.__dict__["usage-requirements"] = new
-
-
- elif attribute == "default-build":
- self.__dict__["default-build"] = property_set.create(specification)
-
- elif attribute == "source-location":
- source_location = []
- for path in specification:
- source_location.append(os.path.join(self.location, path))
- self.__dict__["source-location"] = source_location
-
- elif attribute == "build-dir":
- self.__dict__["build-dir"] = os.path.join(self.location, specification[0])
-
- elif attribute == "id":
- id = specification[0]
- if id[0] != '/':
- id = "/" + id
- self.manager.projects().register_id(id, self.project_module)
- self.__dict__["id"] = id
-
- elif not attribute in ["default-build", "location",
- "source-location", "parent",
- "projects-to-build", "project-root"]:
- self.manager.errors()(
-"""Invalid project attribute '%s' specified
-for project at '%s'""" % (attribute, self.location))
- else:
- self.__dict__[attribute] = specification
-
- def get(self, attribute):
- return self.__dict__[attribute]
-
- def getDefault(self, attribute, default):
- return self.__dict__.get(attribute, default)
-
- def dump(self):
- """Prints the project attributes."""
- id = self.get("id")
- if not id:
- id = "(none)"
- else:
- id = id[0]
-
- parent = self.get("parent")
- if not parent:
- parent = "(none)"
- else:
- parent = parent[0]
-
- print "'%s'" % id
- print "Parent project:%s", parent
- print "Requirements:%s", self.get("requirements")
- print "Default build:%s", string.join(self.get("debuild-build"))
- print "Source location:%s", string.join(self.get("source-location"))
- print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
-
-class ProjectRules:
- """Class keeping all rules that are made available to Jamfile."""
-
- def __init__(self, registry):
- self.registry = registry
- self.manager_ = registry.manager
- self.rules = {}
- self.local_names = [x for x in self.__class__.__dict__
- if x not in ["__init__", "init_project", "add_rule",
- "error_reporting_wrapper", "add_rule_for_type", "reverse"]]
- self.all_names_ = [x for x in self.local_names]
-
- def _import_rule(self, bjam_module, name, callable):
- if hasattr(callable, "bjam_signature"):
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature)
- else:
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable))
-
-
- def add_rule_for_type(self, type):
- rule_name = type.lower().replace("_", "-")
-
- def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []):
- return self.manager_.targets().create_typed_target(
- type, self.registry.current(), name[0], sources,
- requirements, default_build, usage_requirements)
-
- self.add_rule(rule_name, xpto)
-
- def add_rule(self, name, callable):
- self.rules[name] = callable
- self.all_names_.append(name)
-
- # Add new rule at global bjam scope. This might not be ideal,
- # added because if a jamroot does 'import foo' where foo calls
- # add_rule, we need to import new rule to jamroot scope, and
- # I'm lazy to do this now.
- self._import_rule("", name, callable)
-
- def all_names(self):
- return self.all_names_
-
- def call_and_report_errors(self, callable, *args, **kw):
- result = None
- try:
- self.manager_.errors().push_jamfile_context()
- result = callable(*args, **kw)
- except ExceptionWithUserContext, e:
- e.report()
- except Exception, e:
- try:
- self.manager_.errors().handle_stray_exception (e)
- except ExceptionWithUserContext, e:
- e.report()
- finally:
- self.manager_.errors().pop_jamfile_context()
-
- return result
-
- def make_wrapper(self, callable):
- """Given a free-standing function 'callable', return a new
- callable that will call 'callable' and report all exceptins,
- using 'call_and_report_errors'."""
- def wrapper(*args, **kw):
- return self.call_and_report_errors(callable, *args, **kw)
- return wrapper
-
- def init_project(self, project_module, python_standalone=False):
-
- if python_standalone:
- m = sys.modules[project_module]
-
- for n in self.local_names:
- if n != "import_":
- setattr(m, n, getattr(self, n))
-
- for n in self.rules:
- setattr(m, n, self.rules[n])
-
- return
-
- for n in self.local_names:
- # Using 'getattr' here gives us a bound method,
- # while using self.__dict__[r] would give unbound one.
- v = getattr(self, n)
- if callable(v):
- if n == "import_":
- n = "import"
- else:
- n = string.replace(n, "_", "-")
-
- self._import_rule(project_module, n, v)
-
- for n in self.rules:
- self._import_rule(project_module, n, self.rules[n])
-
- def project(self, *args):
-
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
-
- id = None
- if args and args[0]:
- id = args[0][0]
- args = args[1:]
-
- if id:
- attributes.set('id', [id])
-
- explicit_build_dir = None
- for a in args:
- if a:
- attributes.set(a[0], a[1:], exact=0)
- if a[0] == "build-dir":
- explicit_build_dir = a[1]
-
- # If '--build-dir' is specified, change the build dir for the project.
- if self.registry.global_build_dir:
-
- location = attributes.get("location")
- # Project with empty location is 'standalone' project, like
- # user-config, or qt. It has no build dir.
- # If we try to set build dir for user-config, we'll then
- # try to inherit it, with either weird, or wrong consequences.
- if location and location == attributes.get("project-root"):
- # Re-read the project id, since it might have been changed in
- # the project's attributes.
- id = attributes.get('id')
-
- # This is Jamroot.
- if id:
- if explicit_build_dir and os.path.isabs(explicit_build_dir):
- self.registry.manager.errors()(
-"""Absolute directory specified via 'build-dir' project attribute
-Don't know how to combine that with the --build-dir option.""")
-
- rid = id
- if rid[0] == '/':
- rid = rid[1:]
-
- p = os.path.join(self.registry.global_build_dir, rid)
- if explicit_build_dir:
- p = os.path.join(p, explicit_build_dir)
- attributes.set("build-dir", p, exact=1)
- elif explicit_build_dir:
- self.registry.manager.errors()(
-"""When --build-dir is specified, the 'build-dir'
-attribute is allowed only for top-level 'project' invocations""")
-
- def constant(self, name, value):
- """Declare and set a project global constant.
- Project global constants are normal variables but should
- not be changed. They are applied to every child Jamfile."""
- m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
- self.registry.current().add_constant(name[0], value)
-
- def path_constant(self, name, value):
- """Declare and set a project global constant, whose value is a path. The
- path is adjusted to be relative to the invocation directory. The given
- value path is taken to be either absolute, or relative to this project
- root."""
- if len(value) > 1:
- self.registry.manager.error()("path constant should have one element")
- self.registry.current().add_constant(name[0], value[0], path=1)
-
- def use_project(self, id, where):
- # See comment in 'load' for explanation why we record the
- # parameters as opposed to loading the project now.
- m = self.registry.current().project_module();
- self.registry.used_projects[m].append((id[0], where[0]))
-
- def build_project(self, dir):
- assert(isinstance(dir, list))
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
- now = attributes.get("projects-to-build")
- attributes.set("projects-to-build", now + dir, exact=True)
-
- def explicit(self, target_names):
- self.registry.current().mark_targets_as_explicit(target_names)
-
- def always(self, target_names):
- self.registry.current().mark_targets_as_alays(target_names)
-
- def glob(self, wildcards, excludes=None):
- return self.registry.glob_internal(self.registry.current(),
- wildcards, excludes, "glob")
-
- def glob_tree(self, wildcards, excludes=None):
- bad = 0
- for p in wildcards:
- if os.path.dirname(p):
- bad = 1
-
- if excludes:
- for p in excludes:
- if os.path.dirname(p):
- bad = 1
-
- if bad:
- self.registry.manager.errors()(
-"The patterns to 'glob-tree' may not include directory")
- return self.registry.glob_internal(self.registry.current(),
- wildcards, excludes, "glob_tree")
-
-
- def using(self, toolset, *args):
- # The module referred by 'using' can be placed in
- # the same directory as Jamfile, and the user
- # will expect the module to be found even though
- # the directory is not in BOOST_BUILD_PATH.
- # So temporary change the search path.
- current = self.registry.current()
- location = current.get('location')
-
- m = self.registry.load_module(toolset[0], [location])
- if not m.__dict__.has_key("init"):
- self.registry.manager.errors()(
- "Tool module '%s' does not define the 'init' method" % toolset[0])
- m.init(*args)
-
- # The above might have clobbered .current-project. Restore the correct
- # value.
- self.registry.set_current(current)
-
- def import_(self, name, names_to_import=None, local_names=None):
-
- name = name[0]
- py_name = name
- if py_name == "os":
- py_name = "os_j"
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
- location = attributes.get("location")
-
- saved = self.registry.current()
-
- m = self.registry.load_module(py_name, [location])
-
- for f in m.__dict__:
- v = m.__dict__[f]
- f = f.replace("_", "-")
- if callable(v):
- qn = name + "." + f
- self._import_rule(jamfile_module, qn, v)
- record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v)
-
-
- if names_to_import:
- if not local_names:
- local_names = names_to_import
-
- if len(names_to_import) != len(local_names):
- self.registry.manager.errors()(
-"""The number of names to import and local names do not match.""")
-
- for n, l in zip(names_to_import, local_names):
- self._import_rule(jamfile_module, l, m.__dict__[n])
-
- self.registry.set_current(saved)
-
- def conditional(self, condition, requirements):
- """Calculates conditional requirements for multiple requirements
- at once. This is a shorthand to be reduce duplication and to
- keep an inline declarative syntax. For example:
-
- lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
- <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
- """
-
- c = string.join(condition, ",")
- if c.find(":") != -1:
- return [c + r for r in requirements]
- else:
- return [c + ":" + r for r in requirements]
-
- def option(self, name, value):
- name = name[0]
- if not name in ["site-config", "user-config", "project-config"]:
- get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
-
- option.set(name, value[0])
diff --git a/jam-files/boost-build/build/property-set.jam b/jam-files/boost-build/build/property-set.jam
deleted file mode 100644
index 70fd90cd..00000000
--- a/jam-files/boost-build/build/property-set.jam
+++ /dev/null
@@ -1,481 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import feature ;
-import path ;
-import project ;
-import property ;
-import sequence ;
-import set ;
-import option ;
-
-# Class for storing a set of properties.
-#
-# There is 1<->1 correspondence between identity and value. No two instances
-# of the class are equal. To maintain this property, the 'property-set.create'
-# rule should be used to create new instances. Instances are immutable.
-#
-# Each property is classified with regard to its effect on build results.
-# Incidental properties have no effect on build results, from Boost.Build's
-# point of view. Others are either free, or non-free and we refer to non-free
-# ones as 'base'. Each property belongs to exactly one of those categories.
-#
-# It is possible to get a list of properties belonging to each category as
-# well as a list of properties with a specific attribute.
-#
-# Several operations, like and refine and as-path are provided. They all use
-# caching whenever possible.
-#
-class property-set
-{
- import errors ;
- import feature ;
- import path ;
- import property ;
- import property-set ;
- import set ;
-
- rule __init__ ( raw-properties * )
- {
- self.raw = $(raw-properties) ;
-
- for local p in $(raw-properties)
- {
- if ! $(p:G)
- {
- errors.error "Invalid property: '$(p)'" ;
- }
-
- local att = [ feature.attributes $(p:G) ] ;
- # A feature can be both incidental and free, in which case we add it
- # to incidental.
- if incidental in $(att)
- {
- self.incidental += $(p) ;
- }
- else if free in $(att)
- {
- self.free += $(p) ;
- }
- else
- {
- self.base += $(p) ;
- }
-
- if dependency in $(att)
- {
- self.dependency += $(p) ;
- }
- else
- {
- self.non-dependency += $(p) ;
- }
-
- if [ MATCH (:) : $(p:G=) ]
- {
- self.conditional += $(p) ;
- }
- else
- {
- self.non-conditional += $(p) ;
- }
-
- if propagated in $(att)
- {
- self.propagated += $(p) ;
- }
- if link-incompatible in $(att)
- {
- self.link-incompatible += $(p) ;
- }
- }
- }
-
- # Returns Jam list of stored properties.
- #
- rule raw ( )
- {
- return $(self.raw) ;
- }
-
- rule str ( )
- {
- return "[" $(self.raw) "]" ;
- }
-
- # Returns properties that are neither incidental nor free.
- #
- rule base ( )
- {
- return $(self.base) ;
- }
-
- # Returns free properties which are not incidental.
- #
- rule free ( )
- {
- return $(self.free) ;
- }
-
- # Returns dependency properties.
- #
- rule dependency ( )
- {
- return $(self.dependency) ;
- }
-
- rule non-dependency ( )
- {
- return $(self.non-dependency) ;
- }
-
- rule conditional ( )
- {
- return $(self.conditional) ;
- }
-
- rule non-conditional ( )
- {
- return $(self.non-conditional) ;
- }
-
- # Returns incidental properties.
- #
- rule incidental ( )
- {
- return $(self.incidental) ;
- }
-
- rule refine ( ps )
- {
- if ! $(self.refined.$(ps))
- {
- local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
- if $(r[1]) != "@error"
- {
- self.refined.$(ps) = [ property-set.create $(r) ] ;
- }
- else
- {
- self.refined.$(ps) = $(r) ;
- }
- }
- return $(self.refined.$(ps)) ;
- }
-
- rule expand ( )
- {
- if ! $(self.expanded)
- {
- self.expanded = [ property-set.create [ feature.expand $(self.raw) ] ] ;
- }
- return $(self.expanded) ;
- }
-
- rule expand-composites ( )
- {
- if ! $(self.composites)
- {
- self.composites = [ property-set.create
- [ feature.expand-composites $(self.raw) ] ] ;
- }
- return $(self.composites) ;
- }
-
- rule evaluate-conditionals ( context ? )
- {
- context ?= $(__name__) ;
- if ! $(self.evaluated.$(context))
- {
- self.evaluated.$(context) = [ property-set.create
- [ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ;
- }
- return $(self.evaluated.$(context)) ;
- }
-
- rule propagated ( )
- {
- if ! $(self.propagated-ps)
- {
- self.propagated-ps = [ property-set.create $(self.propagated) ] ;
- }
- return $(self.propagated-ps) ;
- }
-
- rule link-incompatible ( )
- {
- if ! $(self.link-incompatible-ps)
- {
- self.link-incompatible-ps =
- [ property-set.create $(self.link-incompatible) ] ;
- }
- return $(self.link-incompatible-ps) ;
- }
-
- rule run-actions ( )
- {
- if ! $(self.run)
- {
- self.run = [ property-set.create [ feature.run-actions $(self.raw) ] ] ;
- }
- return $(self.run) ;
- }
-
- rule add-defaults ( )
- {
- if ! $(self.defaults)
- {
- self.defaults = [ property-set.create
- [ feature.add-defaults $(self.raw) ] ] ;
- }
- return $(self.defaults) ;
- }
-
- rule as-path ( )
- {
- if ! $(self.as-path)
- {
- self.as-path = [ property.as-path $(self.base) ] ;
- }
- return $(self.as-path) ;
- }
-
- # Computes the path to be used for a target with the given properties.
- # Returns a list of
- # - the computed path
- # - if the path is relative to the build directory, a value of 'true'.
- #
- rule target-path ( )
- {
- if ! $(self.target-path)
- {
- # The <location> feature can be used to explicitly change the
- # location of generated targets.
- local l = [ get <location> ] ;
- if $(l)
- {
- self.target-path = $(l) ;
- }
- else
- {
- local p = [ as-path ] ;
- p = [ property-set.hash-maybe $(p) ] ;
-
- # A real ugly hack. Boost regression test system requires
- # specific target paths, and it seems that changing it to handle
- # other directory layout is really hard. For that reason, we
- # teach V2 to do the things regression system requires. The
- # value of '<location-prefix>' is prepended to the path.
- local prefix = [ get <location-prefix> ] ;
- if $(prefix)
- {
- self.target-path = [ path.join $(prefix) $(p) ] ;
- }
- else
- {
- self.target-path = $(p) ;
- }
- if ! $(self.target-path)
- {
- self.target-path = . ;
- }
- # The path is relative to build dir.
- self.target-path += true ;
- }
- }
- return $(self.target-path) ;
- }
-
- rule add ( ps )
- {
- if ! $(self.added.$(ps))
- {
- self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ;
- }
- return $(self.added.$(ps)) ;
- }
-
- rule add-raw ( properties * )
- {
- return [ add [ property-set.create $(properties) ] ] ;
- }
-
- rule link-incompatible-with ( ps )
- {
- if ! $(.li.$(ps))
- {
- local li1 = [ $(__name__).link-incompatible ] ;
- local li2 = [ $(ps).link-incompatible ] ;
- if [ set.equal $(li1) : $(li2) ]
- {
- .li.$(ps) = false ;
- }
- else
- {
- .li.$(ps) = true ;
- }
- }
- if $(.li.$(ps)) = true
- {
- return true ;
- }
- else
- {
- return ;
- }
- }
-
- # Returns all values of 'feature'.
- #
- rule get ( feature )
- {
- if ! $(self.map-built)
- {
- # For each feature, create a member var and assign all values to it.
- # Since all regular member vars start with 'self', there will be no
- # conflicts between names.
- self.map-built = true ;
- for local v in $(self.raw)
- {
- $(v:G) += $(v:G=) ;
- }
- }
- return $($(feature)) ;
- }
-}
-
-
-# Creates a new 'property-set' instance for the given raw properties or returns
-# an already existing ones.
-#
-rule create ( raw-properties * )
-{
- raw-properties = [ sequence.unique
- [ sequence.insertion-sort $(raw-properties) ] ] ;
-
- local key = $(raw-properties:J=-:E=) ;
-
- if ! $(.ps.$(key))
- {
- .ps.$(key) = [ new property-set $(raw-properties) ] ;
- }
- return $(.ps.$(key)) ;
-}
-NATIVE_RULE property-set : create ;
-
-
-# Creates a new 'property-set' instance after checking that all properties are
-# valid and converting incidental properties into gristed form.
-#
-rule create-with-validation ( raw-properties * )
-{
- property.validate $(raw-properties) ;
- return [ create [ property.make $(raw-properties) ] ] ;
-}
-
-
-# Creates a property-set from the input given by the user, in the context of
-# 'jamfile-module' at 'location'.
-#
-rule create-from-user-input ( raw-properties * : jamfile-module location )
-{
- local specification = [ property.translate-paths $(raw-properties)
- : $(location) ] ;
- specification = [ property.translate-indirect $(specification)
- : $(jamfile-module) ] ;
- local project-id = [ project.attribute $(jamfile-module) id ] ;
- project-id ?= [ path.root $(location) [ path.pwd ] ] ;
- specification = [ property.translate-dependencies
- $(specification) : $(project-id) : $(location) ] ;
- specification =
- [ property.expand-subfeatures-in-conditions $(specification) ] ;
- specification = [ property.make $(specification) ] ;
- return [ property-set.create $(specification) ] ;
-}
-
-
-# Refines requirements with requirements provided by the user. Specially handles
-# "-<property>value" syntax in specification to remove given requirements.
-# - parent-requirements -- property-set object with requirements to refine.
-# - specification -- string list of requirements provided by the user.
-# - project-module -- module to which context indirect features will be
-# bound.
-# - location -- path to which path features are relative.
-#
-rule refine-from-user-input ( parent-requirements : specification * :
- project-module : location )
-{
- if ! $(specification)
- {
- return $(parent-requirements) ;
- }
- else
- {
- local add-requirements ;
- local remove-requirements ;
-
- for local r in $(specification)
- {
- local m = [ MATCH "^-(.*)" : $(r) ] ;
- if $(m)
- {
- remove-requirements += $(m) ;
- }
- else
- {
- add-requirements += $(r) ;
- }
- }
-
- if $(remove-requirements)
- {
- # Need to create a property set, so that path features and indirect
- # features are translated just like they are in project
- # requirements.
- local ps = [ property-set.create-from-user-input
- $(remove-requirements) : $(project-module) $(location) ] ;
-
- parent-requirements = [ property-set.create
- [ set.difference [ $(parent-requirements).raw ]
- : [ $(ps).raw ] ] ] ;
- specification = $(add-requirements) ;
- }
-
- local requirements = [ property-set.create-from-user-input
- $(specification) : $(project-module) $(location) ] ;
-
- return [ $(parent-requirements).refine $(requirements) ] ;
- }
-}
-
-
-# Returns a property-set with an empty set of properties.
-#
-rule empty ( )
-{
- if ! $(.empty)
- {
- .empty = [ create ] ;
- }
- return $(.empty) ;
-}
-
-if [ option.get hash : : yes ] = yes
-{
- rule hash-maybe ( path ? )
- {
- path ?= "" ;
- return [ MD5 $(path) ] ;
- }
-}
-else
-{
- rule hash-maybe ( path ? )
- {
- return $(path) ;
- }
-}
-
diff --git a/jam-files/boost-build/build/property.jam b/jam-files/boost-build/build/property.jam
deleted file mode 100644
index a2ad5226..00000000
--- a/jam-files/boost-build/build/property.jam
+++ /dev/null
@@ -1,788 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-import feature ;
-import indirect ;
-import path ;
-import regex ;
-import string ;
-import sequence ;
-import set ;
-import utility ;
-
-
-# Refines 'properties' by overriding any non-free and non-conditional properties
-# for which a different value is specified in 'requirements'. Returns the
-# resulting list of properties.
-#
-rule refine ( properties * : requirements * )
-{
- local result ;
- local error ;
-
- # All the 'requirements' elements should be present in the result. Record
- # them so that we can handle 'properties'.
- for local r in $(requirements)
- {
- # Do not consider conditional requirements.
- if ! [ MATCH (:) : $(r:G=) ]
- {
- # Note: cannot use a local variable here, so use an ugly name.
- __require__$(r:G) = $(r:G=) ;
- }
- }
-
- for local p in $(properties)
- {
- if [ MATCH (:) : $(p:G=) ]
- {
- # Do not modify conditional properties.
- result += $(p) ;
- }
- else if free in [ feature.attributes $(p:G) ]
- {
- # Do not modify free properties.
- result += $(p) ;
- }
- else
- {
- local required-value = $(__require__$(p:G)) ;
- if $(required-value)
- {
- if $(p:G=) != $(required-value)
- {
- result += $(p:G)$(required-value) ;
- }
- else
- {
- result += $(p) ;
- }
- }
- else
- {
- result += $(p) ;
- }
- }
- }
-
- # Unset our ugly map.
- for local r in $(requirements)
- {
- __require__$(r:G) = ;
- }
-
- if $(error)
- {
- return $(error) ;
- }
- else
- {
- return [ sequence.unique $(result) $(requirements) ] ;
- }
-}
-
-
-# Removes all conditional properties whose conditions are not met. For those
-# with met conditions, removes the condition. Properties in conditions are
-# looked up in 'context'.
-#
-rule evaluate-conditionals-in-context ( properties * : context * )
-{
- local base ;
- local conditionals ;
- for local p in $(properties)
- {
- if [ MATCH (:<) : $(p) ]
- {
- conditionals += $(p) ;
- }
- else
- {
- base += $(p) ;
- }
- }
-
- local result = $(base) ;
- for local p in $(conditionals)
- {
- # Separate condition and property.
- local s = [ MATCH (.*):(<.*) : $(p) ] ;
- # Split condition into individual properties.
- local condition = [ regex.split $(s[1]) "," ] ;
- # Evaluate condition.
- if ! [ MATCH (!).* : $(condition:G=) ]
- {
- # Only positive checks
- if $(condition) in $(context)
- {
- result += $(s[2]) ;
- }
- }
- else
- {
- # Have negative checks
- local fail ;
- while $(condition)
- {
- local c = $(condition[1]) ;
- local m = [ MATCH !(.*) : $(c) ] ;
- if $(m)
- {
- local p = $(m:G=$(c:G)) ;
- if $(p) in $(context)
- {
- fail = true ;
- c = ;
- }
- }
- else
- {
- if ! $(c) in $(context)
- {
- fail = true ;
- c = ;
- }
- }
- condition = $(condition[2-]) ;
- }
- if ! $(fail)
- {
- result += $(s[2]) ;
- }
- }
- }
- return $(result) ;
-}
-
-
-rule expand-subfeatures-in-conditions ( properties * )
-{
- local result ;
- for local p in $(properties)
- {
- local s = [ MATCH (.*):(<.*) : $(p) ] ;
- if ! $(s)
- {
- result += $(p) ;
- }
- else
- {
- local condition = $(s[1]) ;
- local value = $(s[2]) ;
- # Condition might include several elements.
- condition = [ regex.split $(condition) "," ] ;
- local e ;
- for local c in $(condition)
- {
- # It is common for a condition to include a toolset or
- # subfeatures that have not been defined. In that case we want
- # the condition to simply 'never be satisfied' and validation
- # would only produce a spurious error so we prevent it by
- # passing 'true' as the second parameter.
- e += [ feature.expand-subfeatures $(c) : true ] ;
- }
- if $(e) = $(condition)
- {
- # (todo)
- # This is just an optimization and possibly a premature one at
- # that.
- # (todo) (12.07.2008.) (Jurko)
- result += $(p) ;
- }
- else
- {
- result += $(e:J=,):$(value) ;
- }
- }
- }
- return $(result) ;
-}
-
-
-# Helper for as-path, below. Orders properties with the implicit ones first, and
-# within the two sections in alphabetical order of feature name.
-#
-local rule path-order ( x y )
-{
- if $(y:G) && ! $(x:G)
- {
- return true ;
- }
- else if $(x:G) && ! $(y:G)
- {
- return ;
- }
- else
- {
- if ! $(x:G)
- {
- x = [ feature.expand-subfeatures $(x) ] ;
- y = [ feature.expand-subfeatures $(y) ] ;
- }
-
- if $(x[1]) < $(y[1])
- {
- return true ;
- }
- }
-}
-
-
-local rule abbreviate-dashed ( string )
-{
- local r ;
- for local part in [ regex.split $(string) - ]
- {
- r += [ string.abbreviate $(part) ] ;
- }
- return $(r:J=-) ;
-}
-
-
-local rule identity ( string )
-{
- return $(string) ;
-}
-
-
-if --abbreviate-paths in [ modules.peek : ARGV ]
-{
- .abbrev = abbreviate-dashed ;
-}
-else
-{
- .abbrev = identity ;
-}
-
-
-# Returns a path representing the given expanded property set.
-#
-rule as-path ( properties * )
-{
- local entry = .result.$(properties:J=-) ;
-
- if ! $($(entry))
- {
- # Trim redundancy.
- properties = [ feature.minimize $(properties) ] ;
-
- # Sort according to path-order.
- properties = [ sequence.insertion-sort $(properties) : path-order ] ;
-
- local components ;
- for local p in $(properties)
- {
- if $(p:G)
- {
- local f = [ utility.ungrist $(p:G) ] ;
- p = $(f)-$(p:G=) ;
- }
- components += [ $(.abbrev) $(p) ] ;
- }
-
- $(entry) = $(components:J=/) ;
- }
-
- return $($(entry)) ;
-}
-
-
-# Exit with error if property is not valid.
-#
-local rule validate1 ( property )
-{
- local msg ;
- if $(property:G)
- {
- local feature = $(property:G) ;
- local value = $(property:G=) ;
-
- if ! [ feature.valid $(feature) ]
- {
- # Ungrist for better error messages.
- feature = [ utility.ungrist $(property:G) ] ;
- msg = "unknown feature '$(feature)'" ;
- }
- else if $(value) && ! free in [ feature.attributes $(feature) ]
- {
- feature.validate-value-string $(feature) $(value) ;
- }
- else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
- {
- # Ungrist for better error messages.
- feature = [ utility.ungrist $(property:G) ] ;
- msg = "No value specified for feature '$(feature)'" ;
- }
- }
- else
- {
- local feature = [ feature.implied-feature $(property) ] ;
- feature.validate-value-string $(feature) $(property) ;
- }
- if $(msg)
- {
- errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
- }
-}
-
-
-rule validate ( properties * )
-{
- for local p in $(properties)
- {
- validate1 $(p) ;
- }
-}
-
-
-rule validate-property-sets ( property-sets * )
-{
- for local s in $(property-sets)
- {
- validate [ feature.split $(s) ] ;
- }
-}
-
-
-# Expands any implicit property values in the given property 'specification' so
-# they explicitly state their feature.
-#
-rule make ( specification * )
-{
- local result ;
- for local e in $(specification)
- {
- if $(e:G)
- {
- result += $(e) ;
- }
- else if [ feature.is-implicit-value $(e) ]
- {
- local feature = [ feature.implied-feature $(e) ] ;
- result += $(feature)$(e) ;
- }
- else
- {
- errors.error "'$(e)' is not a valid property specification" ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns a property set containing all the elements in 'properties' that do not
-# have their attributes listed in 'attributes'.
-#
-rule remove ( attributes + : properties * )
-{
- local result ;
- for local e in $(properties)
- {
- if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
- {
- result += $(e) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns a property set containing all the elements in 'properties' that have
-# their attributes listed in 'attributes'.
-#
-rule take ( attributes + : properties * )
-{
- local result ;
- for local e in $(properties)
- {
- if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
- {
- result += $(e) ;
- }
- }
- return $(result) ;
-}
-
-
-# Selects properties corresponding to any of the given features.
-#
-rule select ( features * : properties * )
-{
- local result ;
-
- # Add any missing angle brackets.
- local empty = "" ;
- features = $(empty:G=$(features)) ;
-
- for local p in $(properties)
- {
- if $(p:G) in $(features)
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns a modified version of properties with all values of the given feature
-# replaced by the given value. If 'value' is empty the feature will be removed.
-#
-rule change ( properties * : feature value ? )
-{
- local result ;
- for local p in $(properties)
- {
- if $(p:G) = $(feature)
- {
- result += $(value:G=$(feature)) ;
- }
- else
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# If 'property' is a conditional property, returns the condition and the
-# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
-# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
-# string.
-#
-rule split-conditional ( property )
-{
- local m = [ MATCH "(.+):<(.+)" : $(property) ] ;
- if $(m)
- {
- return $(m[1]) <$(m[2]) ;
- }
-}
-
-
-# Interpret all path properties in 'properties' as relative to 'path'. The
-# property values are assumed to be in system-specific form, and will be
-# translated into normalized form.
-#
-rule translate-paths ( properties * : path )
-{
- local result ;
- for local p in $(properties)
- {
- local split = [ split-conditional $(p) ] ;
- local condition = "" ;
- if $(split)
- {
- condition = $(split[1]): ;
- p = $(split[2]) ;
- }
-
- if path in [ feature.attributes $(p:G) ]
- {
- local values = [ regex.split $(p:TG=) "&&" ] ;
- local t ;
- for local v in $(values)
- {
- t += [ path.root [ path.make $(v) ] $(path) ] ;
- }
- t = $(t:J="&&") ;
- result += $(condition)$(t:TG=$(p:G)) ;
- }
- else
- {
- result += $(condition)$(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Assumes that all feature values that start with '@' are names of rules, used
-# in 'context-module'. Such rules can be either local to the module or global.
-# Converts such values into 'indirect-rule' format (see indirect.jam), so they
-# can be called from other modules. Does nothing for such values that are
-# already in the 'indirect-rule' format.
-#
-rule translate-indirect ( specification * : context-module )
-{
- local result ;
- for local p in $(specification)
- {
- local m = [ MATCH ^@(.+) : $(p:G=) ] ;
- if $(m)
- {
- local v ;
- if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
- {
- # Rule is already in the 'indirect-rule' format.
- v = $(m) ;
- }
- else
- {
- if ! [ MATCH ".*([.]).*" : $(m) ]
- {
- # This is an unqualified rule name. The user might want to
- # set flags on this rule name and toolset.flag
- # auto-qualifies it. Need to do the same here so flag
- # setting works. We can arrange for toolset.flag to *not*
- # auto-qualify the argument but then two rules defined in
- # two Jamfiles would conflict.
- m = $(context-module).$(m) ;
- }
- v = [ indirect.make $(m) : $(context-module) ] ;
- }
-
- v = @$(v) ;
- result += $(v:G=$(p:G)) ;
- }
- else
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Binds all dependency properties in a list relative to the given project.
-# Targets with absolute paths will be left unchanged and targets which have a
-# project specified will have the path to the project interpreted relative to
-# the specified location.
-#
-rule translate-dependencies ( specification * : project-id : location )
-{
- local result ;
- for local p in $(specification)
- {
- local split = [ split-conditional $(p) ] ;
- local condition = "" ;
- if $(split)
- {
- condition = $(split[1]): ;
- p = $(split[2]) ;
- }
- if dependency in [ feature.attributes $(p:G) ]
- {
- local split-target = [ regex.match (.*)//(.*) : $(p:G=) ] ;
- if $(split-target)
- {
- local rooted = [ path.root [ path.make $(split-target[1]) ]
- [ path.root $(location) [ path.pwd ] ] ] ;
- result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
- }
- else if [ path.is-rooted $(p:G=) ]
- {
- result += $(condition)$(p) ;
- }
- else
- {
- result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
- }
- }
- else
- {
- result += $(condition)$(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Class maintaining a property set -> string mapping.
-#
-class property-map
-{
- import errors ;
- import numbers ;
- import sequence ;
-
- rule __init__ ( )
- {
- self.next-flag = 1 ;
- }
-
- # Associate 'value' with 'properties'.
- #
- rule insert ( properties + : value )
- {
- self.all-flags += $(self.next-flag) ;
- self.properties.$(self.next-flag) = $(properties) ;
- self.value.$(self.next-flag) = $(value) ;
-
- self.next-flag = [ numbers.increment $(self.next-flag) ] ;
- }
-
- # Returns the value associated with 'properties' or any subset of it. If
- # more than one subset has a value assigned to it, returns the value for the
- # longest subset, if it is unique.
- #
- rule find ( properties + )
- {
- return [ find-replace $(properties) ] ;
- }
-
- # Returns the value associated with 'properties'. If 'value' parameter is
- # given, replaces the found value.
- #
- rule find-replace ( properties + : value ? )
- {
- # First find all matches.
- local matches ;
- local match-ranks ;
- for local i in $(self.all-flags)
- {
- if $(self.properties.$(i)) in $(properties)
- {
- matches += $(i) ;
- match-ranks += [ sequence.length $(self.properties.$(i)) ] ;
- }
- }
- local best = [ sequence.select-highest-ranked $(matches)
- : $(match-ranks) ] ;
- if $(best[2])
- {
- errors.error "Ambiguous key $(properties:J= :E=)" ;
- }
- local original = $(self.value.$(best)) ;
- if $(value)
- {
- self.value.$(best) = $(value) ;
- }
- return $(original) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
- import errors : try catch ;
- import feature ;
-
- # Local rules must be explicitly re-imported.
- import property : path-order abbreviate-dashed ;
-
- feature.prepare-test property-test-temp ;
-
- feature.feature toolset : gcc : implicit symmetric ;
- feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
- 3.0.2 : optional ;
- feature.feature define : : free ;
- feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
- feature.feature optimization : on off ;
- feature.feature variant : debug release : implicit composite symmetric ;
- feature.feature rtti : on off : link-incompatible ;
-
- feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
- feature.compose <variant>release : <define>NDEBUG <optimization>on ;
-
- validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
-
- assert.true path-order $(test-space) debug <define>foo ;
- assert.false path-order $(test-space) <define>foo debug ;
- assert.true path-order $(test-space) gcc debug ;
- assert.false path-order $(test-space) debug gcc ;
- assert.true path-order $(test-space) <optimization>on <rtti>on ;
- assert.false path-order $(test-space) <rtti>on <optimization>on ;
-
- assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
- : refine <toolset>gcc <rtti>off
- : <define>FOO
- : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc <optimization>on
- : refine <toolset>gcc <optimization>off
- : <optimization>on
- : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc <rtti>off
- : refine <toolset>gcc : <rtti>off : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
- : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
- : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
- : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
- : $(test-space) ;
-
- assert.result <define>MY_RELEASE
- : evaluate-conditionals-in-context
- <variant>release,<rtti>off:<define>MY_RELEASE
- : <toolset>gcc <variant>release <rtti>off ;
-
- assert.result debug
- : as-path <optimization>off <variant>debug
- : $(test-space) ;
-
- assert.result gcc/debug/rtti-off
- : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
- : $(test-space) ;
-
- assert.result optmz-off : abbreviate-dashed optimization-off ;
- assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
-
- try ;
- validate <feature>value : $(test-space) ;
- catch "Invalid property '<feature>value': unknown feature 'feature'." ;
-
- try ;
- validate <rtti>default : $(test-space) ;
- catch \"default\" is not a known value of feature <rtti> ;
-
- validate <define>WHATEVER : $(test-space) ;
-
- try ;
- validate <rtti> : $(test-space) ;
- catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
-
- try ;
- validate value : $(test-space) ;
- catch "value" is not a value of an implicit feature ;
-
- assert.result-set-equal <rtti>on
- : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
-
- assert.result-set-equal <include>a
- : select include : <include>a <toolset>gcc ;
-
- assert.result-set-equal <include>a
- : select include bar : <include>a <toolset>gcc ;
-
- assert.result-set-equal <include>a <toolset>gcc
- : select include <bar> <toolset> : <include>a <toolset>gcc ;
-
- assert.result-set-equal <toolset>kylix <include>a
- : change <toolset>gcc <include>a : <toolset> kylix ;
-
- pm = [ new property-map ] ;
- $(pm).insert <toolset>gcc : o ;
- $(pm).insert <toolset>gcc <os>NT : obj ;
- $(pm).insert <toolset>gcc <os>CYGWIN : obj ;
-
- assert.equal o : [ $(pm).find <toolset>gcc ] ;
-
- assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ;
-
- try ;
- $(pm).find <toolset>gcc <os>NT <os>CYGWIN ;
- catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
-
- # Test ordinary properties.
- assert.result : split-conditional <toolset>gcc ;
-
- # Test properties with ":".
- assert.result : split-conditional <define>FOO=A::B ;
-
- # Test conditional feature.
- assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
- : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
-
- feature.finish-test property-test-temp ;
-}
diff --git a/jam-files/boost-build/build/property.py b/jam-files/boost-build/build/property.py
deleted file mode 100644
index c4b13dbc..00000000
--- a/jam-files/boost-build/build/property.py
+++ /dev/null
@@ -1,593 +0,0 @@
-# Status: ported, except for tests and --abbreviate-paths.
-# Base revision: 64070
-#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import re
-from b2.util.utility import *
-from b2.build import feature
-from b2.util import sequence, qualify_jam_action
-import b2.util.set
-from b2.manager import get_manager
-
-__re_two_ampersands = re.compile ('&&')
-__re_comma = re.compile (',')
-__re_split_condition = re.compile ('(.*):(<.*)')
-__re_split_conditional = re.compile (r'(.+):<(.+)')
-__re_colon = re.compile (':')
-__re_has_condition = re.compile (r':<')
-__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
-
-class Property(object):
-
- __slots__ = ('_feature', '_value', '_condition')
-
- def __init__(self, f, value, condition = []):
- if type(f) == type(""):
- f = feature.get(f)
- # At present, single property has a single value.
- assert type(value) != type([])
- assert(f.free() or value.find(':') == -1)
- self._feature = f
- self._value = value
- self._condition = condition
-
- def feature(self):
- return self._feature
-
- def value(self):
- return self._value
-
- def condition(self):
- return self._condition
-
- def to_raw(self):
- result = "<" + self._feature.name() + ">" + str(self._value)
- if self._condition:
- result = ",".join(str(p) for p in self._condition) + ':' + result
- return result
-
- def __str__(self):
- return self.to_raw()
-
- def __hash__(self):
- # FIXME: consider if this class should be value-is-identity one
- return hash((self._feature, self._value, tuple(self._condition)))
-
- def __cmp__(self, other):
- return cmp((self._feature, self._value, self._condition),
- (other._feature, other._value, other._condition))
-
-
-def create_from_string(s, allow_condition=False):
-
- condition = []
- import types
- if not isinstance(s, types.StringType):
- print type(s)
- if __re_has_condition.search(s):
-
- if not allow_condition:
- raise BaseException("Conditional property is not allowed in this context")
-
- m = __re_separate_condition_and_property.match(s)
- condition = m.group(1)
- s = m.group(2)
-
- # FIXME: break dependency cycle
- from b2.manager import get_manager
-
- feature_name = get_grist(s)
- if not feature_name:
- if feature.is_implicit_value(s):
- f = feature.implied_feature(s)
- value = s
- else:
- raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
- else:
- f = feature.get(feature_name)
-
- value = get_value(s)
- if not value:
- get_manager().errors()("Invalid property '%s' -- no value specified" % s)
-
-
- if condition:
- condition = [create_from_string(x) for x in condition.split(',')]
-
- return Property(f, value, condition)
-
-def create_from_strings(string_list, allow_condition=False):
-
- return [create_from_string(s, allow_condition) for s in string_list]
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __results
-
- # A cache of results from as_path
- __results = {}
-
-reset ()
-
-
-def path_order (x, y):
- """ Helper for as_path, below. Orders properties with the implicit ones
- first, and within the two sections in alphabetical order of feature
- name.
- """
- if x == y:
- return 0
-
- xg = get_grist (x)
- yg = get_grist (y)
-
- if yg and not xg:
- return -1
-
- elif xg and not yg:
- return 1
-
- else:
- if not xg:
- x = feature.expand_subfeatures([x])
- y = feature.expand_subfeatures([y])
-
- if x < y:
- return -1
- elif x > y:
- return 1
- else:
- return 0
-
-def identify(string):
- return string
-
-# Uses Property
-def refine (properties, requirements):
- """ Refines 'properties' by overriding any non-free properties
- for which a different value is specified in 'requirements'.
- Conditional requirements are just added without modification.
- Returns the resulting list of properties.
- """
- # The result has no duplicates, so we store it in a set
- result = set()
-
- # Records all requirements.
- required = {}
-
- # All the elements of requirements should be present in the result
- # Record them so that we can handle 'properties'.
- for r in requirements:
- # Don't consider conditional requirements.
- if not r.condition():
- required[r.feature()] = r
-
- for p in properties:
- # Skip conditional properties
- if p.condition():
- result.add(p)
- # No processing for free properties
- elif p.feature().free():
- result.add(p)
- else:
- if required.has_key(p.feature()):
- result.add(required[p.feature()])
- else:
- result.add(p)
-
- return sequence.unique(list(result) + requirements)
-
-def translate_paths (properties, path):
- """ Interpret all path properties in 'properties' as relative to 'path'
- The property values are assumed to be in system-specific form, and
- will be translated into normalized form.
- """
- result = []
-
- for p in properties:
-
- if p.feature().path():
- values = __re_two_ampersands.split(p.value())
-
- new_value = "&&".join(os.path.join(path, v) for v in values)
-
- if new_value != p.value():
- result.append(Property(p.feature(), new_value, p.condition()))
- else:
- result.append(p)
-
- else:
- result.append (p)
-
- return result
-
-def translate_indirect(properties, context_module):
- """Assumes that all feature values that start with '@' are
- names of rules, used in 'context-module'. Such rules can be
- either local to the module or global. Qualified local rules
- with the name of the module."""
- result = []
- for p in properties:
- if p.value()[0] == '@':
- q = qualify_jam_action(p.value()[1:], context_module)
- get_manager().engine().register_bjam_action(q)
- result.append(Property(p.feature(), '@' + q, p.condition()))
- else:
- result.append(p)
-
- return result
-
-def validate (properties):
- """ Exit with error if any of the properties is not valid.
- properties may be a single property or a sequence of properties.
- """
-
- if isinstance (properties, str):
- __validate1 (properties)
- else:
- for p in properties:
- __validate1 (p)
-
-def expand_subfeatures_in_conditions (properties):
-
- result = []
- for p in properties:
-
- if not p.condition():
- result.append(p)
- else:
- expanded = []
- for c in p.condition():
-
- if c.feature().name().startswith("toolset") or c.feature().name() == "os":
- # It common that condition includes a toolset which
- # was never defined, or mentiones subfeatures which
- # were never defined. In that case, validation will
- # only produce an spirious error, so don't validate.
- expanded.extend(feature.expand_subfeatures ([c], True))
- else:
- expanded.extend(feature.expand_subfeatures([c]))
-
- result.append(Property(p.feature(), p.value(), expanded))
-
- return result
-
-# FIXME: this should go
-def split_conditional (property):
- """ If 'property' is conditional property, returns
- condition and the property, e.g
- <variant>debug,<toolset>gcc:<inlining>full will become
- <variant>debug,<toolset>gcc <inlining>full.
- Otherwise, returns empty string.
- """
- m = __re_split_conditional.match (property)
-
- if m:
- return (m.group (1), '<' + m.group (2))
-
- return None
-
-
-def select (features, properties):
- """ Selects properties which correspond to any of the given features.
- """
- result = []
-
- # add any missing angle brackets
- features = add_grist (features)
-
- return [p for p in properties if get_grist(p) in features]
-
-def validate_property_sets (sets):
- for s in sets:
- validate(s.all())
-
-def evaluate_conditionals_in_context (properties, context):
- """ Removes all conditional properties which conditions are not met
- For those with met conditions, removes the condition. Properies
- in conditions are looked up in 'context'
- """
- base = []
- conditional = []
-
- for p in properties:
- if p.condition():
- conditional.append (p)
- else:
- base.append (p)
-
- result = base[:]
- for p in conditional:
-
- # Evaluate condition
- # FIXME: probably inefficient
- if all(x in context for x in p.condition()):
- result.append(Property(p.feature(), p.value()))
-
- return result
-
-
-def change (properties, feature, value = None):
- """ Returns a modified version of properties with all values of the
- given feature replaced by the given value.
- If 'value' is None the feature will be removed.
- """
- result = []
-
- feature = add_grist (feature)
-
- for p in properties:
- if get_grist (p) == feature:
- if value:
- result.append (replace_grist (value, feature))
-
- else:
- result.append (p)
-
- return result
-
-
-################################################################
-# Private functions
-
-def __validate1 (property):
- """ Exit with error if property is not valid.
- """
- msg = None
-
- if not property.feature().free():
- feature.validate_value_string (property.feature(), property.value())
-
-
-###################################################################
-# Still to port.
-# Original lines are prefixed with "# "
-#
-#
-# import utility : ungrist ;
-# import sequence : unique ;
-# import errors : error ;
-# import feature ;
-# import regex ;
-# import sequence ;
-# import set ;
-# import path ;
-# import assert ;
-#
-#
-
-
-# rule validate-property-sets ( property-sets * )
-# {
-# for local s in $(property-sets)
-# {
-# validate [ feature.split $(s) ] ;
-# }
-# }
-#
-
-def remove(attributes, properties):
- """Returns a property sets which include all the elements
- in 'properties' that do not have attributes listed in 'attributes'."""
-
- result = []
- for e in properties:
- attributes_new = feature.attributes(get_grist(e))
- has_common_features = 0
- for a in attributes_new:
- if a in attributes:
- has_common_features = 1
- break
-
- if not has_common_features:
- result += e
-
- return result
-
-
-def take(attributes, properties):
- """Returns a property set which include all
- properties in 'properties' that have any of 'attributes'."""
- result = []
- for e in properties:
- if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
- result.append(e)
- return result
-
-def translate_dependencies(properties, project_id, location):
-
- result = []
- for p in properties:
-
- if not p.feature().dependency():
- result.append(p)
- else:
- v = p.value()
- m = re.match("(.*)//(.*)", v)
- if m:
- rooted = m.group(1)
- if rooted[0] == '/':
- # Either project id or absolute Linux path, do nothing.
- pass
- else:
- rooted = os.path.join(os.getcwd(), location, rooted)
-
- result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition()))
-
- elif os.path.isabs(v):
- result.append(p)
- else:
- result.append(Property(p.feature(), project_id + "//" + v, p.condition()))
-
- return result
-
-
-class PropertyMap:
- """ Class which maintains a property set -> string mapping.
- """
- def __init__ (self):
- self.__properties = []
- self.__values = []
-
- def insert (self, properties, value):
- """ Associate value with properties.
- """
- self.__properties.append(properties)
- self.__values.append(value)
-
- def find (self, properties):
- """ Return the value associated with properties
- or any subset of it. If more than one
- subset has value assigned to it, return the
- value for the longest subset, if it's unique.
- """
- return self.find_replace (properties)
-
- def find_replace(self, properties, value=None):
- matches = []
- match_ranks = []
-
- for i in range(0, len(self.__properties)):
- p = self.__properties[i]
-
- if b2.util.set.contains (p, properties):
- matches.append (i)
- match_ranks.append(len(p))
-
- best = sequence.select_highest_ranked (matches, match_ranks)
-
- if not best:
- return None
-
- if len (best) > 1:
- raise NoBestMatchingAlternative ()
-
- best = best [0]
-
- original = self.__values[best]
-
- if value:
- self.__values[best] = value
-
- return original
-
-# local rule __test__ ( )
-# {
-# import errors : try catch ;
-# import feature ;
-# import feature : feature subfeature compose ;
-#
-# # local rules must be explicitly re-imported
-# import property : path-order ;
-#
-# feature.prepare-test property-test-temp ;
-#
-# feature toolset : gcc : implicit symmetric ;
-# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
-# 3.0 3.0.1 3.0.2 : optional ;
-# feature define : : free ;
-# feature runtime-link : dynamic static : symmetric link-incompatible ;
-# feature optimization : on off ;
-# feature variant : debug release : implicit composite symmetric ;
-# feature rtti : on off : link-incompatible ;
-#
-# compose <variant>debug : <define>_DEBUG <optimization>off ;
-# compose <variant>release : <define>NDEBUG <optimization>on ;
-#
-# import assert ;
-# import "class" : new ;
-#
-# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
-#
-# assert.result <toolset>gcc <rtti>off <define>FOO
-# : refine <toolset>gcc <rtti>off
-# : <define>FOO
-# : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc <optimization>on
-# : refine <toolset>gcc <optimization>off
-# : <optimization>on
-# : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc <rtti>off
-# : refine <toolset>gcc : <rtti>off : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
-# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
-# : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
-# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
-# : $(test-space)
-# ;
-#
-# assert.result <define>MY_RELEASE
-# : evaluate-conditionals-in-context
-# <variant>release,<rtti>off:<define>MY_RELEASE
-# : <toolset>gcc <variant>release <rtti>off
-#
-# ;
-#
-# try ;
-# validate <feature>value : $(test-space) ;
-# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
-#
-# try ;
-# validate <rtti>default : $(test-space) ;
-# catch \"default\" is not a known value of feature <rtti> ;
-#
-# validate <define>WHATEVER : $(test-space) ;
-#
-# try ;
-# validate <rtti> : $(test-space) ;
-# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
-#
-# try ;
-# validate value : $(test-space) ;
-# catch "value" is not a value of an implicit feature ;
-#
-#
-# assert.result <rtti>on
-# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
-#
-# assert.result <include>a
-# : select include : <include>a <toolset>gcc ;
-#
-# assert.result <include>a
-# : select include bar : <include>a <toolset>gcc ;
-#
-# assert.result <include>a <toolset>gcc
-# : select include <bar> <toolset> : <include>a <toolset>gcc ;
-#
-# assert.result <toolset>kylix <include>a
-# : change <toolset>gcc <include>a : <toolset> kylix ;
-#
-# # Test ordinary properties
-# assert.result
-# : split-conditional <toolset>gcc
-# ;
-#
-# # Test properties with ":"
-# assert.result
-# : split-conditional <define>FOO=A::B
-# ;
-#
-# # Test conditional feature
-# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
-# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
-# ;
-#
-# feature.finish-test property-test-temp ;
-# }
-#
-
diff --git a/jam-files/boost-build/build/property_set.py b/jam-files/boost-build/build/property_set.py
deleted file mode 100644
index f12eb90c..00000000
--- a/jam-files/boost-build/build/property_set.py
+++ /dev/null
@@ -1,449 +0,0 @@
-# Status: ported.
-# Base revision: 40480
-
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-from b2.util.utility import *
-import property, feature, string
-import b2.build.feature
-from b2.exceptions import *
-from b2.util.sequence import unique
-from b2.util.set import difference
-from b2.util import cached
-
-from b2.manager import get_manager
-
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __cache
-
- # A cache of property sets
- # TODO: use a map of weak refs?
- __cache = {}
-
-reset ()
-
-
-def create (raw_properties = []):
- """ Creates a new 'PropertySet' instance for the given raw properties,
- or returns an already existing one.
- """
- # FIXME: propagate to callers.
- if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
- x = raw_properties
- else:
- x = [property.create_from_string(ps) for ps in raw_properties]
- x.sort()
- x = unique (x)
-
- # FIXME: can we do better, e.g. by directly computing
- # has value of the list?
- key = tuple(x)
-
- if not __cache.has_key (key):
- __cache [key] = PropertySet(x)
-
- return __cache [key]
-
-def create_with_validation (raw_properties):
- """ Creates new 'PropertySet' instances after checking
- that all properties are valid and converting incidental
- properties into gristed form.
- """
- properties = [property.create_from_string(s) for s in raw_properties]
- property.validate(properties)
-
- return create(properties)
-
-def empty ():
- """ Returns PropertySet with empty set of properties.
- """
- return create ()
-
-def create_from_user_input(raw_properties, jamfile_module, location):
- """Creates a property-set from the input given by the user, in the
- context of 'jamfile-module' at 'location'"""
-
- properties = property.create_from_strings(raw_properties, True)
- properties = property.translate_paths(properties, location)
- properties = property.translate_indirect(properties, jamfile_module)
-
- project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None)
- if not project_id:
- project_id = os.path.abspath(location)
- properties = property.translate_dependencies(properties, project_id, location)
- properties = property.expand_subfeatures_in_conditions(properties)
- return create(properties)
-
-
-def refine_from_user_input(parent_requirements, specification, jamfile_module,
- location):
- """Refines requirements with requirements provided by the user.
- Specially handles "-<property>value" syntax in specification
- to remove given requirements.
- - parent-requirements -- property-set object with requirements
- to refine
- - specification -- string list of requirements provided by the use
- - project-module -- the module to which context indirect features
- will be bound.
- - location -- the path to which path features are relative."""
-
-
- if not specification:
- return parent_requirements
-
-
- add_requirements = []
- remove_requirements = []
-
- for r in specification:
- if r[0] == '-':
- remove_requirements.append(r[1:])
- else:
- add_requirements.append(r)
-
- if remove_requirements:
- # Need to create property set, so that path features
- # and indirect features are translated just like they
- # are in project requirements.
- ps = create_from_user_input(remove_requirements,
- jamfile_module, location)
-
- parent_requirements = create(difference(parent_requirements.all(),
- ps.all()))
- specification = add_requirements
-
- requirements = create_from_user_input(specification,
- jamfile_module, location)
-
- return parent_requirements.refine(requirements)
-
-class PropertySet:
- """ Class for storing a set of properties.
- - there's 1<->1 correspondence between identity and value. No
- two instances of the class are equal. To maintain this property,
- the 'PropertySet.create' rule should be used to create new instances.
- Instances are immutable.
-
- - each property is classified with regard to it's effect on build
- results. Incidental properties have no effect on build results, from
- Boost.Build point of view. Others are either free, or non-free, which we
- call 'base'. Each property belong to exactly one of those categories and
- it's possible to get list of properties in each category.
-
- In addition, it's possible to get list of properties with specific
- attribute.
-
- - several operations, like and refine and as_path are provided. They all use
- caching whenever possible.
- """
- def __init__ (self, properties = []):
-
-
- raw_properties = []
- for p in properties:
- raw_properties.append(p.to_raw())
-
- self.all_ = properties
- self.all_raw_ = raw_properties
- self.all_set_ = set(properties)
-
- self.incidental_ = []
- self.free_ = []
- self.base_ = []
- self.dependency_ = []
- self.non_dependency_ = []
- self.conditional_ = []
- self.non_conditional_ = []
- self.propagated_ = []
- self.link_incompatible = []
-
- # A cache of refined properties.
- self.refined_ = {}
-
- # A cache of property sets created by adding properties to this one.
- self.added_ = {}
-
- # Cache for the default properties.
- self.defaults_ = None
-
- # Cache for the expanded properties.
- self.expanded_ = None
-
- # Cache for the expanded composite properties
- self.composites_ = None
-
- # Cache for property set with expanded subfeatures
- self.subfeatures_ = None
-
- # Cache for the property set containing propagated properties.
- self.propagated_ps_ = None
-
- # A map of features to its values.
- self.feature_map_ = None
-
- # A tuple (target path, is relative to build directory)
- self.target_path_ = None
-
- self.as_path_ = None
-
- # A cache for already evaluated sets.
- self.evaluated_ = {}
-
- for p in raw_properties:
- if not get_grist (p):
- raise BaseException ("Invalid property: '%s'" % p)
-
- att = feature.attributes (get_grist (p))
-
- if 'propagated' in att:
- self.propagated_.append (p)
-
- if 'link_incompatible' in att:
- self.link_incompatible.append (p)
-
- for p in properties:
-
- # A feature can be both incidental and free,
- # in which case we add it to incidental.
- if p.feature().incidental():
- self.incidental_.append(p)
- elif p.feature().free():
- self.free_.append(p)
- else:
- self.base_.append(p)
-
- if p.condition():
- self.conditional_.append(p)
- else:
- self.non_conditional_.append(p)
-
- if p.feature().dependency():
- self.dependency_.append (p)
- else:
- self.non_dependency_.append (p)
-
-
- def all(self):
- return self.all_
-
- def raw (self):
- """ Returns the list of stored properties.
- """
- return self.all_raw_
-
- def __str__(self):
- return ' '.join(str(p) for p in self.all_)
-
- def base (self):
- """ Returns properties that are neither incidental nor free.
- """
- return self.base_
-
- def free (self):
- """ Returns free properties which are not dependency properties.
- """
- return self.free_
-
- def non_free(self):
- return self.base_ + self.incidental_
-
- def dependency (self):
- """ Returns dependency properties.
- """
- return self.dependency_
-
- def non_dependency (self):
- """ Returns properties that are not dependencies.
- """
- return self.non_dependency_
-
- def conditional (self):
- """ Returns conditional properties.
- """
- return self.conditional_
-
- def non_conditional (self):
- """ Returns properties that are not conditional.
- """
- return self.non_conditional_
-
- def incidental (self):
- """ Returns incidental properties.
- """
- return self.incidental_
-
- def refine (self, requirements):
- """ Refines this set's properties using the requirements passed as an argument.
- """
- assert isinstance(requirements, PropertySet)
- if not self.refined_.has_key (requirements):
- r = property.refine(self.all_, requirements.all_)
-
- self.refined_[requirements] = create(r)
-
- return self.refined_[requirements]
-
- def expand (self):
- if not self.expanded_:
- expanded = feature.expand(self.all_)
- self.expanded_ = create(expanded)
- return self.expanded_
-
- def expand_subfeatures(self):
- if not self.subfeatures_:
- self.subfeatures_ = create(feature.expand_subfeatures(self.all_))
- return self.subfeatures_
-
- def evaluate_conditionals(self, context=None):
- if not context:
- context = self
-
- if not self.evaluated_.has_key(context):
- # FIXME: figure why the call messes up first parameter
- self.evaluated_[context] = create(
- property.evaluate_conditionals_in_context(self.all(), context))
-
- return self.evaluated_[context]
-
- def propagated (self):
- if not self.propagated_ps_:
- self.propagated_ps_ = create (self.propagated_)
- return self.propagated_ps_
-
- def add_defaults (self):
- # FIXME: this caching is invalidated when new features
- # are declare inside non-root Jamfiles.
- if not self.defaults_:
- expanded = feature.add_defaults(self.all_)
- self.defaults_ = create(expanded)
- return self.defaults_
-
- def as_path (self):
- if not self.as_path_:
-
- def path_order (p1, p2):
-
- i1 = p1.feature().implicit()
- i2 = p2.feature().implicit()
-
- if i1 != i2:
- return i2 - i1
- else:
- return cmp(p1.feature().name(), p2.feature().name())
-
- # trim redundancy
- properties = feature.minimize(self.base_)
-
- # sort according to path_order
- properties.sort (path_order)
-
- components = []
- for p in properties:
- if p.feature().implicit():
- components.append(p.value())
- else:
- components.append(p.feature().name() + "-" + p.value())
-
- self.as_path_ = '/'.join (components)
-
- return self.as_path_
-
- def target_path (self):
- """ Computes the target path that should be used for
- target with these properties.
- Returns a tuple of
- - the computed path
- - if the path is relative to build directory, a value of
- 'true'.
- """
- if not self.target_path_:
- # The <location> feature can be used to explicitly
- # change the location of generated targets
- l = self.get ('<location>')
- if l:
- computed = l[0]
- is_relative = False
-
- else:
- p = self.as_path ()
-
- # Really, an ugly hack. Boost regression test system requires
- # specific target paths, and it seems that changing it to handle
- # other directory layout is really hard. For that reason,
- # we teach V2 to do the things regression system requires.
- # The value o '<location-prefix>' is predended to the path.
- prefix = self.get ('<location-prefix>')
-
- if prefix:
- if len (prefix) > 1:
- raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
-
- computed = os.path.join(prefix[0], p)
-
- else:
- computed = p
-
- if not computed:
- computed = "."
-
- is_relative = True
-
- self.target_path_ = (computed, is_relative)
-
- return self.target_path_
-
- def add (self, ps):
- """ Creates a new property set containing the properties in this one,
- plus the ones of the property set passed as argument.
- """
- if not self.added_.has_key(ps):
- self.added_[ps] = create(self.all_ + ps.all())
- return self.added_[ps]
-
- def add_raw (self, properties):
- """ Creates a new property set containing the properties in this one,
- plus the ones passed as argument.
- """
- return self.add (create (properties))
-
-
- def get (self, feature):
- """ Returns all values of 'feature'.
- """
- if type(feature) == type([]):
- feature = feature[0]
- if not isinstance(feature, b2.build.feature.Feature):
- feature = b2.build.feature.get(feature)
-
- if not self.feature_map_:
- self.feature_map_ = {}
-
- for v in self.all_:
- if not self.feature_map_.has_key(v.feature()):
- self.feature_map_[v.feature()] = []
- self.feature_map_[v.feature()].append(v.value())
-
- return self.feature_map_.get(feature, [])
-
- @cached
- def get_properties(self, feature):
- """Returns all contained properties associated with 'feature'"""
-
- if not isinstance(feature, b2.build.feature.Feature):
- feature = b2.build.feature.get(feature)
-
- result = []
- for p in self.all_:
- if p.feature() == feature:
- result.append(p)
- return result
-
- def __contains__(self, item):
- return item in self.all_set_
-
diff --git a/jam-files/boost-build/build/readme.txt b/jam-files/boost-build/build/readme.txt
deleted file mode 100644
index c3dddd8d..00000000
--- a/jam-files/boost-build/build/readme.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2001, 2002 Dave Abrahams
-Copyright 2002 Vladimir Prus
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-Development code for new build system. To run unit tests for jam code, execute:
-
- bjam --debug --build-system=test
-
-Comprehensive tests require Python. See ../test/readme.txt
-
-
-
diff --git a/jam-files/boost-build/build/scanner.jam b/jam-files/boost-build/build/scanner.jam
deleted file mode 100644
index d6042ea2..00000000
--- a/jam-files/boost-build/build/scanner.jam
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements scanners: objects that compute implicit dependencies for
-# files, such as includes in C++.
-#
-# Scanner has a regular expression used to find dependencies, some
-# data needed to interpret those dependencies (for example, include
-# paths), and a code which actually established needed relationship
-# between actual jam targets.
-#
-# Scanner objects are created by actions, when they try to actualize
-# virtual targets, passed to 'virtual-target.actualize' method and are
-# then associated with actual targets. It is possible to use
-# several scanners for a virtual-target. For example, a single source
-# might be used by to compile actions, with different include paths.
-# In this case, two different actual targets will be created, each
-# having scanner of its own.
-#
-# Typically, scanners are created from target type and action's
-# properties, using the rule 'get' in this module. Directly creating
-# scanners is not recommended, because it might create many equvivalent
-# but different instances, and lead in unneeded duplication of
-# actual targets. However, actions can also create scanners in a special
-# way, instead of relying on just target type.
-
-import "class" : new ;
-import property virtual-target property-set ;
-import errors : error ;
-
-# Base scanner class.
-class scanner
-{
- rule __init__ ( )
- {
- }
-
- # Returns a pattern to use for scanning
- rule pattern ( )
- {
- error "method must be overriden" ;
- }
-
- # Establish necessary relationship between targets,
- # given actual target beeing scanned, and a list of
- # pattern matches in that file.
- rule process ( target : matches * )
- {
- error "method must be overriden" ;
- }
-}
-
-# Registers a new generator class, specifying a set of
-# properties relevant to this scanner. Ctor for that class
-# should have one parameter: list of properties.
-rule register ( scanner-class : relevant-properties * )
-{
- .registered += $(scanner-class) ;
- .relevant-properties.$(scanner-class) = $(relevant-properties) ;
-}
-
-# Common scanner class, which can be used when there's only one
-# kind of includes (unlike C, where "" and <> includes have different
-# search paths).
-class common-scanner : scanner
-{
- import scanner ;
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
- self.includes = $(includes) ;
- }
-
- rule process ( target : matches * : binding )
- {
- local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
-
- NOCARE $(matches) ;
- INCLUDES $(target) : $(matches) ;
- SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
- ISFILE $(matches) ;
-
- scanner.propagate $(__name__) : $(matches) : $(target) ;
- }
-}
-
-
-# Returns an instance of previously registered scanner,
-# with the specified properties.
-rule get ( scanner-class : property-set )
-{
- if ! $(scanner-class) in $(.registered)
- {
- error "attempt to get unregisted scanner" ;
- }
-
- local r = $(.rv-cache.$(property-set)) ;
- if ! $(r)
- {
- r = [ property-set.create
- [ property.select $(.relevant-properties.$(scanner-class)) :
- [ $(property-set).raw ] ] ] ;
- .rv-cache.$(property-set) = $(r) ;
- }
-
- if ! $(scanner.$(scanner-class).$(r:J=-))
- {
- scanner.$(scanner-class).$(r:J=-) = [ new $(scanner-class) [ $(r).raw ] ] ;
- }
- return $(scanner.$(scanner-class).$(r:J=-)) ;
-}
-
-
-# Installs the specified scanner on actual target 'target'.
-rule install ( scanner : target
- vtarget # virtual target from which 'target' was actualized
-)
-{
- HDRSCAN on $(target) = [ $(scanner).pattern ] ;
- SCANNER on $(target) = $(scanner) ;
- HDRRULE on $(target) = scanner.hdrrule ;
-
- # scanner reflects difference in properties affecting
- # binding of 'target', which will be known when processing
- # includes for it, will give information on how to
- # interpret quoted includes.
- HDRGRIST on $(target) = $(scanner) ;
-}
-
-# Propagate scanner setting from 'including-target' to 'targets'.
-rule propagate ( scanner : targets * : including-target )
-{
- HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
- SCANNER on $(targets) = $(scanner) ;
- HDRRULE on $(targets) = scanner.hdrrule ;
- HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
-}
-
-
-rule hdrrule ( target : matches * : binding )
-{
- local scanner = [ on $(target) return $(SCANNER) ] ;
- $(scanner).process $(target) : $(matches) : $(binding) ;
-}
-# hdrrule must be available at global scope so that it can be invoked
-# by header scanning
-IMPORT scanner : hdrrule : : scanner.hdrrule ;
-
-
-
-
diff --git a/jam-files/boost-build/build/scanner.py b/jam-files/boost-build/build/scanner.py
deleted file mode 100644
index 19f1431d..00000000
--- a/jam-files/boost-build/build/scanner.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Status: ported.
-# Base revision: 45462
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements scanners: objects that compute implicit dependencies for
-# files, such as includes in C++.
-#
-# Scanner has a regular expression used to find dependencies, some
-# data needed to interpret those dependencies (for example, include
-# paths), and a code which actually established needed relationship
-# between actual jam targets.
-#
-# Scanner objects are created by actions, when they try to actualize
-# virtual targets, passed to 'virtual-target.actualize' method and are
-# then associated with actual targets. It is possible to use
-# several scanners for a virtual-target. For example, a single source
-# might be used by to compile actions, with different include paths.
-# In this case, two different actual targets will be created, each
-# having scanner of its own.
-#
-# Typically, scanners are created from target type and action's
-# properties, using the rule 'get' in this module. Directly creating
-# scanners is not recommended, because it might create many equvivalent
-# but different instances, and lead in unneeded duplication of
-# actual targets. However, actions can also create scanners in a special
-# way, instead of relying on just target type.
-
-import property
-import bjam
-import os
-from b2.exceptions import *
-from b2.manager import get_manager
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __scanners, __rv_cache, __scanner_cache
-
- # Maps registered scanner classes to relevant properties
- __scanners = {}
-
- # A cache of scanners.
- # The key is: class_name.properties_tag, where properties_tag is the concatenation
- # of all relevant properties, separated by '-'
- __scanner_cache = {}
-
-reset ()
-
-
-def register(scanner_class, relevant_properties):
- """ Registers a new generator class, specifying a set of
- properties relevant to this scanner. Ctor for that class
- should have one parameter: list of properties.
- """
- __scanners[str(scanner_class)] = relevant_properties
-
-def registered(scanner_class):
- """ Returns true iff a scanner of that class is registered
- """
- return __scanners.has_key(str(scanner_class))
-
-def get(scanner_class, properties):
- """ Returns an instance of previously registered scanner
- with the specified properties.
- """
- scanner_name = str(scanner_class)
-
- if not registered(scanner_name):
- raise BaseException ("attempt to get unregisted scanner: %s" % scanner_name)
-
- relevant_properties = __scanners[scanner_name]
- r = property.select(relevant_properties, properties)
-
- scanner_id = scanner_name + '.' + '-'.join(r)
-
- if not __scanner_cache.has_key(scanner_name):
- __scanner_cache[scanner_name] = scanner_class(r)
-
- return __scanner_cache[scanner_name]
-
-class Scanner:
- """ Base scanner class.
- """
- def __init__ (self):
- pass
-
- def pattern (self):
- """ Returns a pattern to use for scanning.
- """
- raise BaseException ("method must be overriden")
-
- def process (self, target, matches):
- """ Establish necessary relationship between targets,
- given actual target beeing scanned, and a list of
- pattern matches in that file.
- """
- raise BaseException ("method must be overriden")
-
-
-# Common scanner class, which can be used when there's only one
-# kind of includes (unlike C, where "" and <> includes have different
-# search paths).
-class CommonScanner(Scanner):
-
- def __init__ (self, includes):
- Scanner.__init__(self)
- self.includes = includes
-
- def process(self, target, matches, binding):
-
- target_path = os.path.normpath(os.path.dirname(binding[0]))
- bjam.call("mark-included", target, matches)
-
- get_manager().engine().set_target_variable(matches, "SEARCH",
- [target_path] + self.includes)
- get_manager().scanners().propagate(self, matches)
-
-class ScannerRegistry:
-
- def __init__ (self, manager):
- self.manager_ = manager
- self.count_ = 0
- self.exported_scanners_ = {}
-
- def install (self, scanner, target, vtarget):
- """ Installs the specified scanner on actual target 'target'.
- vtarget: virtual target from which 'target' was actualized.
- """
- engine = self.manager_.engine()
- engine.set_target_variable(target, "HDRSCAN", scanner.pattern())
- if not self.exported_scanners_.has_key(scanner):
- exported_name = "scanner_" + str(self.count_)
- self.count_ = self.count_ + 1
- self.exported_scanners_[scanner] = exported_name
- bjam.import_rule("", exported_name, scanner.process)
- else:
- exported_name = self.exported_scanners_[scanner]
-
- engine.set_target_variable(target, "HDRRULE", exported_name)
-
- # scanner reflects difference in properties affecting
- # binding of 'target', which will be known when processing
- # includes for it, will give information on how to
- # interpret quoted includes.
- engine.set_target_variable(target, "HDRGRIST", str(id(scanner)))
- pass
-
- def propagate(self, scanner, targets):
- engine = self.manager_.engine()
- engine.set_target_variable(targets, "HDRSCAN", scanner.pattern())
- engine.set_target_variable(targets, "HDRRULE",
- self.exported_scanners_[scanner])
- engine.set_target_variable(targets, "HDRGRIST", str(id(scanner)))
-
diff --git a/jam-files/boost-build/build/targets.jam b/jam-files/boost-build/build/targets.jam
deleted file mode 100644
index a70532ce..00000000
--- a/jam-files/boost-build/build/targets.jam
+++ /dev/null
@@ -1,1659 +0,0 @@
-# Copyright Vladimir Prus 2002.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports 'abstract' targets, which are targets explicitly defined in a
-# Jamfile.
-#
-# Abstract targets are represented by classes derived from 'abstract-target'
-# class. The first abstract target is 'project-target', which is created for
-# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module
-# (see project.jam).
-#
-# Project targets keep a list of 'main-target' instances. A main target is what
-# the user explicitly defines in a Jamfile. It is possible to have several
-# definitions for a main target, for example to have different lists of sources
-# for different platforms. So, main targets keep a list of alternatives.
-#
-# Each alternative is an instance of 'abstract-target'. When a main target
-# subvariant is defined by some rule, that rule will decide what class to use,
-# create an instance of that class and add it to the list of alternatives for
-# the main target.
-#
-# Rules supplied by the build system will use only targets derived from
-# 'basic-target' class, which will provide some default behaviour. There will be
-# different classes derived from it such as 'make-target', created by the 'make'
-# rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
-
-#
-# +------------------------+
-# |abstract-target |
-# +========================+
-# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project-target | | main-target | | basic-target |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
-# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
-# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
-# | | typed-target | | make-target | | stage-target |
-# . +==============+ +=============+ +==============+
-# . | construct | | construct | | construct |
-# +--------------+ +-------------+ +--------------+
-
-import assert ;
-import "class" : new ;
-import errors ;
-import feature ;
-import indirect ;
-import path ;
-import property ;
-import property-set ;
-import sequence ;
-import set ;
-import toolset ;
-import build-request ;
-
-
-# Base class for all abstract targets.
-#
-class abstract-target
-{
- import project ;
- import assert ;
- import "class" ;
- import errors ;
-
- rule __init__ ( name # Name of the target in Jamfile.
- : project-target # The project target to which this one belongs.
- )
- {
- # Note: it might seem that we don't need either name or project at all.
- # However, there are places where we really need it. One example is
- # error messages which should name problematic targets. Another is
- # setting correct paths for sources and generated files.
-
- self.name = $(name) ;
- self.project = $(project-target) ;
- self.location = [ errors.nearest-user-location ] ;
- }
-
- # Returns the name of this target.
- rule name ( )
- {
- return $(self.name) ;
- }
-
- # Returns the project for this target.
- rule project ( )
- {
- return $(self.project) ;
- }
-
- # Return the location where the target was declared.
- rule location ( )
- {
- return $(self.location) ;
- }
-
- # Returns a user-readable name for this target.
- rule full-name ( )
- {
- local location = [ $(self.project).get location ] ;
- return $(location)/$(self.name) ;
- }
-
- # Generates virtual targets for this abstract target using the specified
- # properties, unless a different value of some feature is required by the
- # target.
- # On success, returns:
- # - a property-set with the usage requirements to be applied to dependants
- # - a list of produced virtual targets, which may be empty.
- # If 'property-set' is empty, performs the default build of this target, in
- # a way specific to the derived class.
- #
- rule generate ( property-set )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- rule rename ( new-name )
- {
- self.name = $(new-name) ;
- }
-}
-
-
-if --debug-building in [ modules.peek : ARGV ]
-{
- modules.poke : .debug-building : true ;
-}
-
-
-rule indent ( )
-{
- return $(.indent:J="") ;
-}
-
-
-rule increase-indent ( )
-{
- .indent += " " ;
-}
-
-
-rule decrease-indent ( )
-{
- .indent = $(.indent[2-]) ;
-}
-
-
-# Project target class (derived from 'abstract-target').
-#
-# This class has the following responsibilities:
-# - Maintaining a list of main targets in this project and building them.
-#
-# Main targets are constructed in two stages:
-# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
-# that time, alternatives can also be renamed to account for inline targets.
-# - The first time 'main-target' or 'has-main-target' rule is called, all
-# alternatives are enumerated and main targets are created.
-#
-class project-target : abstract-target
-{
- import project ;
- import targets ;
- import path ;
- import print ;
- import property-set ;
- import set ;
- import sequence ;
- import "class" : new ;
- import errors ;
-
- rule __init__ ( name : project-module parent-project ?
- : requirements * : default-build * )
- {
- abstract-target.__init__ $(name) : $(__name__) ;
-
- self.project-module = $(project-module) ;
- self.location = [ project.attribute $(project-module) location ] ;
- self.requirements = $(requirements) ;
- self.default-build = $(default-build) ;
-
- if $(parent-project)
- {
- inherit $(parent-project) ;
- }
- }
-
- # This is needed only by the 'make' rule. Need to find the way to make
- # 'make' work without this method.
- #
- rule project-module ( )
- {
- return $(self.project-module) ;
- }
-
- rule get ( attribute )
- {
- return [ project.attribute $(self.project-module) $(attribute) ] ;
- }
-
- rule build-dir ( )
- {
- if ! $(self.build-dir)
- {
- self.build-dir = [ get build-dir ] ;
- if ! $(self.build-dir)
- {
- self.build-dir = [ path.join [ $(self.project).get location ]
- bin ] ;
- }
- }
- return $(self.build-dir) ;
- }
-
- # Generates all possible targets contained in this project.
- #
- rule generate ( property-set * )
- {
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "building project" [ name ] " ('$(__name__)') with" [ $(property-set).raw ] ;
- targets.increase-indent ;
- }
-
- local usage-requirements = [ property-set.empty ] ;
- local targets ;
-
- for local t in [ targets-to-build ]
- {
- local g = [ $(t).generate $(property-set) ] ;
- usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
- targets += $(g[2-]) ;
- }
- targets.decrease-indent ;
- return $(usage-requirements) [ sequence.unique $(targets) ] ;
- }
-
- # Computes and returns a list of abstract-target instances which must be
- # built when this project is built.
- #
- rule targets-to-build ( )
- {
- local result ;
-
- if ! $(self.built-main-targets)
- {
- build-main-targets ;
- }
-
- # Collect all main targets here, except for "explicit" ones.
- for local t in $(self.main-targets)
- {
- if ! [ $(t).name ] in $(self.explicit-targets)
- {
- result += $(t) ;
- }
- }
-
- # Collect all projects referenced via "projects-to-build" attribute.
- local self-location = [ get location ] ;
- for local pn in [ get projects-to-build ]
- {
- result += [ find $(pn)/ ] ;
- }
-
- return $(result) ;
- }
-
- # Add 'target' to the list of targets in this project that should be build
- # only by explicit request
- #
- rule mark-target-as-explicit ( target-name * )
- {
- # Record the name of the target, not instance, since this rule is called
- # before main target instances are created.
- self.explicit-targets += $(target-name) ;
- }
-
- rule mark-target-as-always ( target-name * )
- {
- # Record the name of the target, not instance, since this rule is called
- # before main target instances are created.
- self.always-targets += $(target-name) ;
- }
-
- # Add new target alternative
- #
- rule add-alternative ( target-instance )
- {
- if $(self.built-main-targets)
- {
- errors.error add-alternative called when main targets are already
- created. : in project [ full-name ] ;
- }
- self.alternatives += $(target-instance) ;
- }
-
- # Returns a 'main-target' class instance corresponding to 'name'.
- #
- rule main-target ( name )
- {
- if ! $(self.built-main-targets)
- {
- build-main-targets ;
- }
- return $(self.main-target.$(name)) ;
- }
-
- # Returns whether a main target with the specified name exists.
- #
- rule has-main-target ( name )
- {
- if ! $(self.built-main-targets)
- {
- build-main-targets ;
- }
-
- if $(self.main-target.$(name))
- {
- return true ;
- }
- }
-
- # Worker function for the find rule not implementing any caching and simply
- # returning nothing in case the target can not be found.
- #
- rule find-really ( id )
- {
- local result ;
- local current-location = [ get location ] ;
-
- local split = [ MATCH (.*)//(.*) : $(id) ] ;
- local project-part = $(split[1]) ;
- local target-part = $(split[2]) ;
-
- local extra-error-message ;
- if $(project-part)
- {
- # There is an explicitly specified project part in id. Looks up the
- # project and passes the request to it.
- local pm = [ project.find $(project-part) : $(current-location) ] ;
- if $(pm)
- {
- project-target = [ project.target $(pm) ] ;
- result = [ $(project-target).find $(target-part) : no-error ] ;
- }
- else
- {
- # TODO: This extra error message will not get displayed most
- # likely due to some buggy refactoring. Refactor the code so the
- # message gets diplayed again.
- extra-error-message = error: could not find project
- '$(project-part)' ;
- }
- }
- else
- {
- # Interpret target-name as name of main target. Need to do this
- # before checking for file. Consider the following scenario with a
- # toolset not modifying its executable's names, e.g. gcc on
- # Unix-like platforms:
- #
- # exe test : test.cpp ;
- # install s : test : <location>. ;
- #
- # After the first build we would have a target named 'test' in the
- # Jamfile and a file named 'test' on the disk. We need the target to
- # override the file.
- result = [ main-target $(id) ] ;
-
- # Interpret id as an existing file reference.
- if ! $(result)
- {
- result = [ new file-reference [ path.make $(id) ] :
- $(self.project) ] ;
- if ! [ $(result).exists ]
- {
- result = ;
- }
- }
-
- # Interpret id as project-id.
- if ! $(result)
- {
- local project-module = [ project.find $(id) :
- $(current-location) ] ;
- if $(project-module)
- {
- result = [ project.target $(project-module) ] ;
- }
- }
- }
-
- return $(result) ;
- }
-
- # Find and return the target with the specified id, treated relative to
- # self. Id may specify either a target or a file name with the target taking
- # priority. May report an error or return nothing if the target is not found
- # depending on the 'no-error' parameter.
- #
- rule find ( id : no-error ? )
- {
- local v = $(.id.$(id)) ;
- if ! $(v)
- {
- v = [ find-really $(id) ] ;
- if ! $(v)
- {
- v = none ;
- }
- .id.$(id) = $(v) ;
- }
-
- if $(v) != none
- {
- return $(v) ;
- }
- else
- {
- if ! $(no-error)
- {
- local current-location = [ get location ] ;
- ECHO "error: Unable to find file or target named" ;
- ECHO "error: '$(id)'" ;
- ECHO "error: referred from project at" ;
- ECHO "error: '$(current-location)'" ;
- ECHO $(extra-error-message) ;
- EXIT ;
- }
- }
- }
-
- rule build-main-targets ( )
- {
- self.built-main-targets = true ;
- for local a in $(self.alternatives)
- {
- local name = [ $(a).name ] ;
- local target = $(self.main-target.$(name)) ;
- if ! $(target)
- {
- local t = [ new main-target $(name) : $(self.project) ] ;
- self.main-target.$(name) = $(t) ;
- self.main-targets += $(t) ;
- target = $(self.main-target.$(name)) ;
- }
-
- if $(name) in $(self.always-targets)
- {
- $(a).always ;
- }
-
- $(target).add-alternative $(a) ;
- }
- }
-
- # Accessor, add a constant.
- #
- rule add-constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- : type ? # Optional type of value.
- )
- {
- switch $(type)
- {
- case path :
- local r ;
- for local v in $(value)
- {
- local l = $(self.location) ;
- if ! $(l)
- {
- # Project corresponding to config files do not have
- # 'location' attribute, but do have source location.
- # It might be more reasonable to make every project have
- # a location and use some other approach to prevent buildable
- # targets in config files, but that's for later.
- l = [ get source-location ] ;
- }
- v = [ path.root [ path.make $(v) ] $(l) ] ;
- # Now make the value absolute path.
- v = [ path.root $(v) [ path.pwd ] ] ;
- # Constants should be in platform-native form.
- v = [ path.native $(v) ] ;
- r += $(v) ;
- }
- value = $(r) ;
- }
- if ! $(name) in $(self.constants)
- {
- self.constants += $(name) ;
- }
- self.constant.$(name) = $(value) ;
- # Inject the constant in the scope of the Jamroot module.
- modules.poke $(self.project-module) : $(name) : $(value) ;
- }
-
- rule inherit ( parent )
- {
- for local c in [ modules.peek $(parent) : self.constants ]
- {
- # No need to pass the type. Path constants were converted to
- # absolute paths already by parent.
- add-constant $(c)
- : [ modules.peek $(parent) : self.constant.$(c) ] ;
- }
-
- # Import rules from parent.
- local this-module = [ project-module ] ;
- local parent-module = [ $(parent).project-module ] ;
- # Do not import rules coming from 'project-rules' as they must be
- # imported localized.
- local user-rules = [ set.difference
- [ RULENAMES $(parent-module) ] :
- [ RULENAMES project-rules ] ] ;
- IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
- EXPORT $(this-module) : $(user-rules) ;
- }
-}
-
-
-# Helper rules to detect cycles in main target references.
-#
-local rule start-building ( main-target-instance )
-{
- if $(main-target-instance) in $(.targets-being-built)
- {
- local names ;
- for local t in $(.targets-being-built) $(main-target-instance)
- {
- names += [ $(t).full-name ] ;
- }
-
- errors.error "Recursion in main target references"
- : "the following target are being built currently:"
- : $(names) ;
- }
- .targets-being-built += $(main-target-instance) ;
-}
-
-
-local rule end-building ( main-target-instance )
-{
- .targets-being-built = $(.targets-being-built[1--2]) ;
-}
-
-
-# A named top-level target in Jamfile.
-#
-class main-target : abstract-target
-{
- import assert ;
- import errors ;
- import feature ;
- import print ;
- import property-set ;
- import sequence ;
- import targets : start-building end-building ;
-
- rule __init__ ( name : project )
- {
- abstract-target.__init__ $(name) : $(project) ;
- }
-
- # Add a new alternative for this target
- rule add-alternative ( target )
- {
- local d = [ $(target).default-build ] ;
- if $(self.alternatives) && ( $(self.default-build) != $(d) )
- {
- errors.error "default build must be identical in all alternatives"
- : "main target is" [ full-name ]
- : "with" [ $(d).raw ]
- : "differing from previous default build" [ $(self.default-build).raw ] ;
- }
- else
- {
- self.default-build = $(d) ;
- }
- self.alternatives += $(target) ;
- }
-
- # Returns the best viable alternative for this property-set. See the
- # documentation for selection rules.
- #
- local rule select-alternatives ( property-set debug ? )
- {
- # When selecting alternatives we have to consider defaults, for example:
- # lib l : l.cpp : <variant>debug ;
- # lib l : l_opt.cpp : <variant>release ;
- # won't work unless we add default value <variant>debug.
- property-set = [ $(p).add-defaults ] ;
-
- # The algorithm: we keep the current best viable alternative. When we've
- # got a new best viable alternative, we compare it with the current one.
-
- local best ;
- local best-properties ;
-
- if $(self.alternatives[2-])
- {
- local bad ;
- local worklist = $(self.alternatives) ;
- while $(worklist) && ! $(bad)
- {
- local v = $(worklist[1]) ;
- local properties = [ $(v).match $(property-set) $(debug) ] ;
-
- if $(properties) != no-match
- {
- if ! $(best)
- {
- best = $(v) ;
- best-properties = $(properties) ;
- }
- else
- {
- if $(properties) = $(best-properties)
- {
- bad = true ;
- }
- else if $(properties) in $(best-properties)
- {
- # Do nothing, this alternative is worse
- }
- else if $(best-properties) in $(properties)
- {
- best = $(v) ;
- best-properties = $(properties) ;
- }
- else
- {
- bad = true ;
- }
- }
- }
- worklist = $(worklist[2-]) ;
- }
- if ! $(bad)
- {
- return $(best) ;
- }
- }
- else
- {
- return $(self.alternatives) ;
- }
- }
-
- rule apply-default-build ( property-set )
- {
- return [ targets.apply-default-build $(property-set)
- : $(self.default-build) ] ;
- }
-
- # Select an alternative for this main target, by finding all alternatives
- # which requirements are satisfied by 'properties' and picking the one with
- # the longest requirements set. Returns the result of calling 'generate' on
- # that alternative.
- #
- rule generate ( property-set )
- {
- start-building $(__name__) ;
-
- # We want composite properties in build request act as if all the
- # properties it expands too are explicitly specified.
- property-set = [ $(property-set).expand ] ;
-
- local all-property-sets = [ apply-default-build $(property-set) ] ;
- local usage-requirements = [ property-set.empty ] ;
- local result ;
- for local p in $(all-property-sets)
- {
- local r = [ generate-really $(p) ] ;
- if $(r)
- {
- usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
- result += $(r[2-]) ;
- }
- }
- end-building $(__name__) ;
- return $(usage-requirements) [ sequence.unique $(result) ] ;
- }
-
- # Generates the main target with the given property set and returns a list
- # which first element is property-set object containing usage-requirements
- # of generated target and with generated virtual target in other elements.
- # It is possible that no targets are generated.
- #
- local rule generate-really ( property-set )
- {
- local best-alternatives = [ select-alternatives $(property-set) ] ;
- if ! $(best-alternatives)
- {
- ECHO "error: No best alternative for" [ full-name ] ;
- select-alternatives $(property-set) debug ;
- return [ property-set.empty ] ;
- }
- else
- {
- # Now return virtual targets for the only alternative.
- return [ $(best-alternatives).generate $(property-set) ] ;
- }
- }
-
- rule rename ( new-name )
- {
- abstract-target.rename $(new-name) ;
- for local a in $(self.alternatives)
- {
- $(a).rename $(new-name) ;
- }
- }
-}
-
-
-# Abstract target refering to a source file. This is an artificial entity
-# allowing sources to a target to be represented using a list of abstract target
-# instances.
-#
-class file-reference : abstract-target
-{
- import virtual-target ;
- import property-set ;
- import path ;
-
- rule __init__ ( file : project )
- {
- abstract-target.__init__ $(file) : $(project) ;
- }
-
- rule generate ( properties )
- {
- return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
- [ location ] : $(self.project) ] ;
- }
-
- # Returns true if the referred file really exists.
- rule exists ( )
- {
- location ;
- return $(self.file-path) ;
- }
-
- # Returns the location of target. Needed by 'testing.jam'.
- rule location ( )
- {
- if ! $(self.file-location)
- {
- local source-location = [ $(self.project).get source-location ] ;
- for local src-dir in $(source-location)
- {
- if ! $(self.file-location)
- {
- local location = [ path.root $(self.name) $(src-dir) ] ;
- if [ CHECK_IF_FILE [ path.native $(location) ] ]
- {
- self.file-location = $(src-dir) ;
- self.file-path = $(location) ;
- }
- }
- }
- }
- return $(self.file-location) ;
- }
-}
-
-
-# Given a target-reference, made in context of 'project', returns the
-# abstract-target instance that is referred to, as well as properties explicitly
-# specified for this reference.
-#
-rule resolve-reference ( target-reference : project )
-{
- # Separate target name from properties override.
- local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ;
- local id = $(split[1]) ;
- local sproperties = ;
- if $(split[3])
- {
- sproperties = [ property.make [ feature.split $(split[3]) ] ] ;
- sproperties = [ feature.expand-composites $(sproperties) ] ;
- }
-
- # Find the target.
- local target = [ $(project).find $(id) ] ;
-
- return $(target) [ property-set.create $(sproperties) ] ;
-}
-
-
-# Attempts to generate the target given by target reference, which can refer
-# both to a main target or to a file. Returns a list consisting of
-# - usage requirements
-# - generated virtual targets, if any
-#
-rule generate-from-reference (
- target-reference # Target reference.
- : project # Project where the reference is made.
- : property-set # Properties of the main target that makes the reference.
-)
-{
- local r = [ resolve-reference $(target-reference) : $(project) ] ;
- local target = $(r[1]) ;
- local sproperties = $(r[2]) ;
-
- # Take properties which should be propagated and refine them with
- # source-specific requirements.
- local propagated = [ $(property-set).propagated ] ;
- local rproperties = [ $(propagated).refine $(sproperties) ] ;
- if $(rproperties[1]) = "@error"
- {
- errors.error
- "When building" [ full-name ] " with properties " $(properties) :
- "Invalid properties specified for " $(source) ":"
- $(rproperties[2-]) ;
- }
- return [ $(target).generate $(rproperties) ] ;
-}
-
-rule apply-default-build ( property-set : default-build )
-{
- # 1. First, see what properties from default-build are already present
- # in property-set.
-
- local raw = [ $(property-set).raw ] ;
- local specified-features = $(raw:G) ;
-
- local defaults-to-apply ;
- for local d in [ $(default-build).raw ]
- {
- if ! $(d:G) in $(specified-features)
- {
- defaults-to-apply += $(d) ;
- }
- }
-
- # 2. If there are any defaults to be applied, form a new build request.
- # Pass it through to 'expand-no-defaults' since default-build might
- # contain "release debug" resulting in two property-sets.
- local result ;
- if $(defaults-to-apply)
- {
- properties = [
- build-request.expand-no-defaults
-
- # We have to compress subproperties here to prevent property
- # lists like:
- #
- # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
- #
- # from being expanded into:
- #
- # <toolset-msvc:version>7.1/<threading>multi
- # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
- #
- # due to a cross-product property combination. That may be an
- # indication that build-request.expand-no-defaults is the wrong
- # rule to use here.
- [ feature.compress-subproperties $(raw) ]
- $(defaults-to-apply)
- ] ;
-
- if $(properties)
- {
- for local p in $(properties)
- {
- result += [ property-set.create
- [ feature.expand [ feature.split $(p) ] ] ] ;
- }
- }
- else
- {
- result = [ property-set.empty ] ;
- }
- }
- else
- {
- result = $(property-set) ;
- }
- return $(result) ;
-}
-
-
-# Given a build request and requirements, return properties common to dependency
-# build request and target requirements.
-#
-# TODO: Document exactly what 'common properties' are, whether they should
-# include default property values, whether they should contain any conditional
-# properties or should those be already processed, etc. See whether there are
-# any differences between use cases with empty and non-empty build-request as
-# well as with requirements containing and those not containing any non-free
-# features.
-#
-rule common-properties ( build-request requirements )
-{
- # For optimization, we add free requirements directly, without using a
- # complex algorithm. This gives the complex algorithm a better chance of
- # caching results.
- local free = [ $(requirements).free ] ;
- local non-free = [ property-set.create [ $(requirements).base ]
- [ $(requirements).incidental ] ] ;
-
- local key = .rp.$(build-request)-$(non-free) ;
- if ! $($(key))
- {
- $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
- }
- result = [ $($(key)).add-raw $(free) ] ;
-}
-
-
-# Given a 'context' -- a set of already present properties, and 'requirements',
-# decide which extra properties should be applied to 'context'. For conditional
-# requirements, this means evaluating the condition. For indirect conditional
-# requirements, this means calling a rule. Ordinary requirements are always
-# applied.
-#
-# Handles the situation where evaluating one conditional requirement affects
-# conditions of another conditional requirements, such as:
-# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
-#
-# If 'what' is 'refined' returns context refined with new requirements. If
-# 'what' is 'added' returns just the requirements to be applied.
-#
-rule evaluate-requirements ( requirements : context : what )
-{
- # Apply non-conditional requirements. It is possible that further
- # conditional requirement change a value set by non-conditional
- # requirements. For example:
- #
- # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
- #
- # I am not sure if this should be an error, or not, especially given that
- #
- # <threading>single
- #
- # might come from project's requirements.
-
- local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] ;
-
- local raw = [ $(context).raw ] ;
- raw = [ property.refine $(raw) : $(unconditional) ] ;
-
- # We have collected properties that surely must be present in common
- # properties. We now try to figure out what other properties should be added
- # in order to satisfy rules (4)-(6) from the docs.
-
- local conditionals = [ $(requirements).conditional ] ;
- # The 'count' variable has one element for each conditional feature and for
- # each occurrence of '<indirect-conditional>' feature. It is used as a loop
- # counter: for each iteration of the loop before we remove one element and
- # the property set should stabilize before we are done. It is assumed that
- # #conditionals iterations should be enough for properties to propagate
- # along conditions in any direction.
- local count = $(conditionals)
- [ $(requirements).get <conditional> ]
- and-once-more ;
-
- local added-requirements ;
-
- local current = $(raw) ;
-
- # It is assumed that ordinary conditional requirements can not add
- # <conditional> properties (a.k.a. indirect conditional properties), and
- # that rules referred to by <conditional> properties can not add new
- # <conditional> properties. So the list of indirect conditionals does not
- # change.
- local indirect = [ $(requirements).get <conditional> ] ;
- indirect = [ MATCH ^@(.*) : $(indirect) ] ;
-
- local ok ;
- while $(count)
- {
- # Evaluate conditionals in context of current properties.
- local e = [ property.evaluate-conditionals-in-context $(conditionals)
- : $(current) ] ;
-
- # Evaluate indirect conditionals.
- for local i in $(indirect)
- {
- e += [ indirect.call $(i) $(current) ] ;
- }
-
- if $(e) = $(added-requirements)
- {
- # If we got the same result, we have found the final properties.
- count = ;
- ok = true ;
- }
- else
- {
- # Oops, conditional evaluation results have changed. Also 'current'
- # contains leftovers from a previous evaluation. Recompute 'current'
- # using initial properties and conditional requirements.
- added-requirements = $(e) ;
- current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
- }
- count = $(count[2-]) ;
- }
- if ! $(ok)
- {
- errors.error "Can not evaluate conditional properties " $(conditionals) ;
- }
-
- if $(what) = added
- {
- return [ property-set.create $(unconditional) $(added-requirements) ] ;
- }
- else if $(what) = refined
- {
- return [ property-set.create $(current) ] ;
- }
- else
- {
- errors.error "Invalid value of the 'what' parameter." ;
- }
-}
-
-
-rule common-properties2 ( build-request requirements )
-{
- # This guarantees that default properties are present in the result, unless
- # they are overriden by some requirement. FIXME: There is possibility that
- # we have added <foo>bar, which is composite and expands to <foo2>bar2, but
- # default value of <foo2> is not bar2, in which case it is not clear what to
- # do.
- #
- build-request = [ $(build-request).add-defaults ] ;
- # Features added by 'add-default' can be composite and expand to features
- # without default values -- so they are not added yet. It could be clearer/
- # /faster to expand only newly added properties but that is not critical.
- build-request = [ $(build-request).expand ] ;
-
- return [ evaluate-requirements $(requirements) : $(build-request) :
- refined ] ;
-}
-
-rule push-target ( target )
-{
- .targets = $(target) $(.targets) ;
-}
-
-rule pop-target ( )
-{
- .targets = $(.targets[2-]) ;
-}
-
-# Return the metatarget that is currently being generated.
-rule current ( )
-{
- return $(.targets[1]) ;
-}
-
-
-# Implements the most standard way of constructing main target alternative from
-# sources. Allows sources to be either file or other main target and handles
-# generation of those dependency targets.
-#
-class basic-target : abstract-target
-{
- import build-request ;
- import build-system ;
- import "class" : new ;
- import errors ;
- import feature ;
- import property ;
- import property-set ;
- import sequence ;
- import set ;
- import targets ;
- import virtual-target ;
-
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- abstract-target.__init__ $(name) : $(project) ;
-
- self.sources = $(sources) ;
- if ! $(requirements) {
- requirements = [ property-set.empty ] ;
- }
- self.requirements = $(requirements) ;
- if ! $(default-build)
- {
- default-build = [ property-set.empty ] ;
- }
- self.default-build = $(default-build) ;
- if ! $(usage-requirements)
- {
- usage-requirements = [ property-set.empty ] ;
- }
- self.usage-requirements = $(usage-requirements) ;
-
- if $(sources:G)
- {
- errors.user-error properties found in the 'sources' parameter for
- [ full-name ] ;
- }
- }
-
- rule always ( )
- {
- self.always = 1 ;
- }
-
- # Returns the list of abstract-targets which are used as sources. The extra
- # properties specified for sources are not represented. The only user for
- # this rule at the moment is the "--dump-tests" feature of the test system.
- #
- rule sources ( )
- {
- if ! $(self.source-targets)
- {
- for local s in $(self.sources)
- {
- self.source-targets +=
- [ targets.resolve-reference $(s) : $(self.project) ] ;
- }
- }
- return $(self.source-targets) ;
- }
-
- rule requirements ( )
- {
- return $(self.requirements) ;
- }
-
- rule default-build ( )
- {
- return $(self.default-build) ;
- }
-
- # Returns the alternative condition for this alternative, if the condition
- # is satisfied by 'property-set'.
- #
- rule match ( property-set debug ? )
- {
- # The condition is composed of all base non-conditional properties. It
- # is not clear if we should expand 'self.requirements' or not. For one
- # thing, it would be nice to be able to put
- # <toolset>msvc-6.0
- # in requirements. On the other hand, if we have <variant>release as a
- # condition it does not make sense to require <optimization>full to be
- # in the build request just to select this variant.
- local bcondition = [ $(self.requirements).base ] ;
- local ccondition = [ $(self.requirements).conditional ] ;
- local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
- if $(debug)
- {
- ECHO " next alternative: required properties:" $(condition:E=(empty)) ;
- }
-
- if $(condition) in [ $(property-set).raw ]
- {
- if $(debug)
- {
- ECHO " matched" ;
- }
- return $(condition) ;
- }
- else
- {
- if $(debug)
- {
- ECHO " not matched" ;
- }
- return no-match ;
- }
- }
-
- # Takes a target reference, which might be either target id or a dependency
- # property, and generates that target using 'property-set' as build request.
- #
- # The results are added to the variable called 'result-var'. Usage
- # requirements are added to the variable called 'usage-requirements-var'.
- #
- rule generate-dependencies ( dependencies * : property-set
- : result-var usage-requirements-var )
- {
- for local dependency in $(dependencies)
- {
- local grist = $(dependency:G) ;
- local id = $(dependency:G=) ;
-
- local result = [ targets.generate-from-reference $(id) :
- $(self.project) : $(property-set) ] ;
-
- $(result-var) += $(result[2-]:G=$(grist)) ;
- $(usage-requirements-var) += [ $(result[1]).raw ] ;
- }
- }
-
- # Determines final build properties, generates sources, and calls
- # 'construct'. This method should not be overridden.
- #
- rule generate ( property-set )
- {
- if [ modules.peek : .debug-building ]
- {
- ECHO ;
- local fn = [ full-name ] ;
- ECHO [ targets.indent ] "Building target '$(fn)'" ;
- targets.increase-indent ;
- ECHO [ targets.indent ] "Build request: " $(property-set) [ $(property-set).raw ] ;
- local cf = [ build-system.command-line-free-features ] ;
- ECHO [ targets.indent ] "Command line free features: " [ $(cf).raw ] ;
- ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ;
- }
- targets.push-target $(__name__) ;
-
- if ! $(self.generated.$(property-set))
- {
- # Apply free features from the command line. If user said
- # define=FOO
- # he most likely wants this define to be set for all compiles.
- property-set = [ $(property-set).refine
- [ build-system.command-line-free-features ] ] ;
- local rproperties = [ targets.common-properties $(property-set)
- $(self.requirements) ] ;
-
- if [ modules.peek : .debug-building ]
- {
- ECHO ;
- ECHO [ targets.indent ] "Common properties: " [ $(rproperties).raw ] ;
- }
-
- if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get
- <build> ] != no )
- {
- local source-targets ;
- local properties = [ $(rproperties).non-dependency ] ;
- local usage-requirements ;
-
- generate-dependencies [ $(rproperties).dependency ] :
- $(rproperties) : properties usage-requirements ;
-
- generate-dependencies $(self.sources) : $(rproperties) :
- source-targets usage-requirements ;
-
- if [ modules.peek : .debug-building ]
- {
- ECHO ;
- ECHO [ targets.indent ] "Usage requirements for"
- $(self.name)": " $(usage-requirements) ;
- }
-
- rproperties = [ property-set.create $(properties)
- $(usage-requirements) ] ;
- usage-requirements = [ property-set.create $(usage-requirements) ] ;
-
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "Build properties: "
- [ $(rproperties).raw ] ;
- }
-
- local extra = [ $(rproperties).get <source> ] ;
- source-targets += $(extra:G=) ;
- # We might get duplicate sources, for example if we link to two
- # libraries having the same <library> usage requirement.
- # Use stable sort, since for some targets the order is
- # important. E.g. RUN_PY target need python source to come
- # first.
- source-targets = [ sequence.unique $(source-targets) : stable ] ;
-
- local result = [ construct $(self.name) : $(source-targets) :
- $(rproperties) ] ;
-
- if $(result)
- {
- local gur = $(result[1]) ;
- result = $(result[2-]) ;
-
- if $(self.always)
- {
- for local t in $(result)
- {
- $(t).always ;
- }
- }
-
- local s = [ create-subvariant $(result)
- : [ virtual-target.recent-targets ]
- : $(property-set) : $(source-targets)
- : $(rproperties) : $(usage-requirements) ] ;
- virtual-target.clear-recent-targets ;
-
- local ur = [ compute-usage-requirements $(s) ] ;
- ur = [ $(ur).add $(gur) ] ;
- $(s).set-usage-requirements $(ur) ;
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "Usage requirements from"
- $(self.name)": " [ $(ur).raw ] ;
- }
-
- self.generated.$(property-set) = $(ur) $(result) ;
- }
- }
- else
- {
- if $(rproperties[1]) = "@error"
- {
- ECHO [ targets.indent ] "Skipping build of:" [ full-name ]
- "cannot compute common properties" ;
- }
- else if [ $(rproperties).get <build> ] = no
- {
- # If we just see <build>no, we cannot produce any reasonable
- # diagnostics. The code that adds this property is expected
- # to explain why a target is not built, for example using
- # the configure.log-component-configuration function.
- }
- else
- {
- ECHO [ targets.indent ] "Skipping build of: " [ full-name ]
- " unknown reason" ;
- }
-
- # We are here either because there has been an error computing
- # properties or there is <build>no in properties. In the latter
- # case we do not want any diagnostic. In the former case, we
- # need diagnostics. FIXME
-
- # If this target fails to build, add <build>no to properties to
- # cause any parent target to fail to build. Except that it
- # - does not work now, since we check for <build>no only in
- # common properties, but not in properties that came from
- # dependencies
- # - it is not clear if that is a good idea anyway. The alias
- # target, for example, should not fail to build if a
- # dependency fails.
- self.generated.$(property-set) = [ property-set.create <build>no ] ;
- }
- }
- else
- {
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "Already built" ;
- local ur = $(self.generated.$(property-set)) ;
- ur = $(ur[0]) ;
- targets.increase-indent ;
- ECHO [ targets.indent ] "Usage requirements from"
- $(self.name)": " [ $(ur).raw ] ;
- targets.decrease-indent ;
- }
- }
-
- targets.pop-target ;
- targets.decrease-indent ;
- return $(self.generated.$(property-set)) ;
- }
-
- # Given the set of generated targets, and refined build properties,
- # determines and sets appropriate usage requirements on those targets.
- #
- rule compute-usage-requirements ( subvariant )
- {
- local rproperties = [ $(subvariant).build-properties ] ;
- xusage-requirements = [ targets.evaluate-requirements
- $(self.usage-requirements) : $(rproperties) : added ] ;
-
- # We generate all dependency properties and add them, as well as their
- # usage requirements, to the result.
- local extra ;
- generate-dependencies [ $(xusage-requirements).dependency ] :
- $(rproperties) : extra extra ;
-
- local result = [ property-set.create
- [ $(xusage-requirements).non-dependency ] $(extra) ] ;
-
- # Propagate usage requirements we got from sources, except for the
- # <pch-header> and <pch-file> features.
- #
- # That feature specifies which pch file to use, and should apply only to
- # direct dependents. Consider:
- #
- # pch pch1 : ...
- # lib lib1 : ..... pch1 ;
- # pch pch2 :
- # lib lib2 : pch2 lib1 ;
- #
- # Here, lib2 should not get <pch-header> property from pch1.
- #
- # Essentially, when those two features are in usage requirements, they
- # are propagated only to direct dependents. We might need a more general
- # mechanism, but for now, only those two features are special.
- #
- # TODO - Actually there are more possible candidates like for instance
- # when listing static library X as a source for another static library.
- # Then static library X will be added as a <source> property to the
- # second library's usage requirements but those requirements should last
- # only up to the first executable or shared library that actually links
- # to it.
- local raw = [ $(subvariant).sources-usage-requirements ] ;
- raw = [ $(raw).raw ] ;
- raw = [ property.change $(raw) : <pch-header> ] ;
- raw = [ property.change $(raw) : <pch-file> ] ;
- return [ $(result).add [ property-set.create $(raw) ] ] ;
- }
-
- # Creates new subvariant instances for 'targets'.
- # 'root-targets' - virtual targets to be returned to dependants
- # 'all-targets' - virtual targets created while building this main target
- # 'build-request' - property-set instance with requested build properties
- #
- local rule create-subvariant ( root-targets * : all-targets * :
- build-request : sources * : rproperties : usage-requirements )
- {
- for local e in $(root-targets)
- {
- $(e).root true ;
- }
-
- # Process all virtual targets that will be created if this main target
- # is created.
- local s = [ new subvariant $(__name__) : $(build-request) : $(sources) :
- $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
- for local v in $(all-targets)
- {
- if ! [ $(v).creating-subvariant ]
- {
- $(v).creating-subvariant $(s) ;
- }
- }
- return $(s) ;
- }
-
- # Constructs virtual targets for this abstract target and the dependency
- # graph. Returns a usage-requirements property-set and a list of virtual
- # targets. Should be overriden in derived classes.
- #
- rule construct ( name : source-targets * : properties * )
- {
- errors.error "method should be defined in derived classes" ;
- }
-}
-
-
-class typed-target : basic-target
-{
- import generators ;
-
- rule __init__ ( name : project : type : sources * : requirements * :
- default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
-
- self.type = $(type) ;
- }
-
- rule type ( )
- {
- return $(self.type) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local r = [ generators.construct $(self.project) $(name:S=) : $(self.type)
- : [ property-set.create [ $(property-set).raw ]
- <main-target-type>$(self.type) ]
- : $(source-targets) : true ] ;
- if ! $(r)
- {
- ECHO "warn: Unable to construct" [ full-name ] ;
-
- # Are there any top-level generators for this type/property set.
- if ! [ generators.find-viable-generators $(self.type)
- : $(property-set) ]
- {
- ECHO "error: no generators were found for type '$(self.type)'" ;
- ECHO "error: and the requested properties" ;
- ECHO "error: make sure you've configured the needed tools" ;
- ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
- ECHO "To debug this problem, try the --debug-generators option." ;
- EXIT ;
- }
- }
- return $(r) ;
- }
-}
-
-
-# Return the list of sources to use, if main target rule is invoked with
-# 'sources'. If there are any objects in 'sources', they are treated as main
-# target instances, and the name of such targets are adjusted to be
-# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
-# a non-empty value is passed as the 'no-renaming' parameter.
-#
-rule main-target-sources ( sources * : main-target-name : no-renaming ? )
-{
- local result ;
- for local t in $(sources)
- {
- if [ class.is-instance $(t) ]
- {
- local name = [ $(t).name ] ;
- if ! $(no-renaming)
- {
- name = $(main-target-name)__$(name) ;
- $(t).rename $(name) ;
- }
- # Inline targets are not built by default.
- local p = [ $(t).project ] ;
- $(p).mark-target-as-explicit $(name) ;
- result += $(name) ;
- }
- else
- {
- result += $(t) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns the requirements to use when declaring a main target, obtained by
-# translating all specified property paths and refining project requirements
-# with the ones specified for the target.
-#
-rule main-target-requirements (
- specification * # Properties explicitly specified for the main target.
- : project # Project where the main target is to be declared.
-)
-{
- specification += [ toolset.requirements ] ;
-
- local requirements = [ property-set.refine-from-user-input
- [ $(project).get requirements ] : $(specification) :
- [ $(project).project-module ] : [ $(project).get location ] ] ;
- if $(requirements[1]) = "@error"
- {
- errors.error "Conflicting requirements for target:" $(requirements) ;
- }
- return $(requirements) ;
-}
-
-
-# Returns the usage requirements to use when declaring a main target, which are
-# obtained by translating all specified property paths and adding project's
-# usage requirements.
-#
-rule main-target-usage-requirements (
- specification * # Use-properties explicitly specified for a main target.
- : project # Project where the main target is to be declared.
-)
-{
- local project-usage-requirements = [ $(project).get usage-requirements ] ;
-
- # We do not use 'refine-from-user-input' because:
- # - I am not sure if removing parent's usage requirements makes sense
- # - refining usage requirements is not needed, since usage requirements are
- # always free.
- local usage-requirements = [ property-set.create-from-user-input
- $(specification)
- : [ $(project).project-module ] [ $(project).get location ] ] ;
-
- return [ $(project-usage-requirements).add $(usage-requirements) ] ;
-}
-
-
-# Return the default build value to use when declaring a main target, which is
-# obtained by using the specified value if not empty and parent's default build
-# attribute otherwise.
-#
-rule main-target-default-build (
- specification * # Default build explicitly specified for a main target.
- : project # Project where the main target is to be declared.
-)
-{
- local result ;
- if $(specification)
- {
- result = $(specification) ;
- }
- else
- {
- result = [ $(project).get default-build ] ;
- }
- return [ property-set.create-with-validation $(result) ] ;
-}
-
-
-# Registers the specified target as a main target alternative and returns it.
-#
-rule main-target-alternative ( target )
-{
- local ptarget = [ $(target).project ] ;
- $(ptarget).add-alternative $(target) ;
- return $(target) ;
-}
-
-# Creates a new metargets with the specified properties, using 'klass' as
-# the class. The 'name', 'sources',
-# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
-# the form specified by the user in Jamfile corresponding to 'project'.
-#
-rule create-metatarget ( klass : project : name : sources * : requirements * :
- default-build * : usage-requirements * )
-{
- return [
- targets.main-target-alternative
- [ new $(klass) $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-}
-
-# Creates a typed-target with the specified properties. The 'name', 'sources',
-# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
-# the form specified by the user in Jamfile corresponding to 'project'.
-#
-rule create-typed-target ( type : project : name : sources * : requirements * :
- default-build * : usage-requirements * )
-{
- return [
- targets.main-target-alternative
- [ new typed-target $(name) : $(project) : $(type)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-}
diff --git a/jam-files/boost-build/build/targets.py b/jam-files/boost-build/build/targets.py
deleted file mode 100644
index a35612ce..00000000
--- a/jam-files/boost-build/build/targets.py
+++ /dev/null
@@ -1,1401 +0,0 @@
-# Status: ported.
-# Base revision: 64488
-
-# Copyright Vladimir Prus 2002-2007.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
-#
-# Abstract targets are represented by classes derived from 'AbstractTarget' class.
-# The first abstract target is 'project_target', which is created for each
-# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
-# (see project.jam).
-#
-# Project targets keep a list of 'MainTarget' instances.
-# A main target is what the user explicitly defines in a Jamfile. It is
-# possible to have several definitions for a main target, for example to have
-# different lists of sources for different platforms. So, main targets
-# keep a list of alternatives.
-#
-# Each alternative is an instance of 'AbstractTarget'. When a main target
-# subvariant is defined by some rule, that rule will decide what class to
-# use, create an instance of that class and add it to the list of alternatives
-# for the main target.
-#
-# Rules supplied by the build system will use only targets derived
-# from 'BasicTarget' class, which will provide some default behaviour.
-# There will be two classes derived from it, 'make-target', created by the
-# 'make' rule, and 'TypedTarget', created by rules such as 'exe' and 'dll'.
-
-#
-# +------------------------+
-# |AbstractTarget |
-# +========================+
-# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project_target | | MainTarget | | BasicTarget |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
-# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
-# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
-# | | TypedTarget | | make-target | | stage-target |
-# . +==============+ +=============+ +==============+
-# . | construct | | construct | | construct |
-# +--------------+ +-------------+ +--------------+
-
-import re
-import os.path
-import sys
-
-from b2.manager import get_manager
-
-from b2.util.utility import *
-import property, project, virtual_target, property_set, feature, generators, toolset
-from virtual_target import Subvariant
-from b2.exceptions import *
-from b2.util.sequence import unique
-from b2.util import path, bjam_signature
-from b2.build.errors import user_error_checkpoint
-
-import b2.build.build_request as build_request
-
-import b2.util.set
-_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
-
-class TargetRegistry:
-
- def __init__ (self):
- # All targets that are currently being built.
- # Only the key is id (target), the value is the actual object.
- self.targets_being_built_ = {}
-
- # Current indent for debugging messages
- self.indent_ = ""
-
- self.debug_building_ = "--debug-building" in bjam.variable("ARGV")
-
- self.targets_ = []
-
- def main_target_alternative (self, target):
- """ Registers the specified target as a main target alternatives.
- Returns 'target'.
- """
- target.project ().add_alternative (target)
- return target
-
- def main_target_sources (self, sources, main_target_name, no_renaming=0):
- """Return the list of sources to use, if main target rule is invoked
- with 'sources'. If there are any objects in 'sources', they are treated
- as main target instances, and the name of such targets are adjusted to
- be '<name_of_this_target>__<name_of_source_target>'. Such renaming
- is disabled is non-empty value is passed for 'no-renaming' parameter."""
- result = []
-
- for t in sources:
-
- t = b2.util.jam_to_value_maybe(t)
-
- if isinstance (t, AbstractTarget):
- name = t.name ()
-
- if not no_renaming:
- name = main_target_name + '__' + name
- t.rename (name)
-
- # Inline targets are not built by default.
- p = t.project()
- p.mark_targets_as_explicit([name])
- result.append(name)
-
- else:
- result.append (t)
-
- return result
-
-
- def main_target_requirements(self, specification, project):
- """Returns the requirement to use when declaring a main target,
- which are obtained by
- - translating all specified property paths, and
- - refining project requirements with the one specified for the target
-
- 'specification' are the properties xplicitly specified for a
- main target
- 'project' is the project where the main taret is to be declared."""
-
- specification.extend(toolset.requirements())
-
- requirements = property_set.refine_from_user_input(
- project.get("requirements"), specification,
- project.project_module(), project.get("location"))
-
- return requirements
-
- def main_target_usage_requirements (self, specification, project):
- """ Returns the use requirement to use when declaraing a main target,
- which are obtained by
- - translating all specified property paths, and
- - adding project's usage requirements
- specification: Use-properties explicitly specified for a main target
- project: Project where the main target is to be declared
- """
- project_usage_requirements = project.get ('usage-requirements')
-
- # We don't use 'refine-from-user-input' because I'm not sure if:
- # - removing of parent's usage requirements makes sense
- # - refining of usage requirements is not needed, since usage requirements
- # are always free.
- usage_requirements = property_set.create_from_user_input(
- specification, project.project_module(), project.get("location"))
-
- return project_usage_requirements.add (usage_requirements)
-
- def main_target_default_build (self, specification, project):
- """ Return the default build value to use when declaring a main target,
- which is obtained by using specified value if not empty and parent's
- default build attribute otherwise.
- specification: Default build explicitly specified for a main target
- project: Project where the main target is to be declared
- """
- if specification:
- return property_set.create_with_validation(specification)
- else:
- return project.get ('default-build')
-
- def start_building (self, main_target_instance):
- """ Helper rules to detect cycles in main target references.
- """
- if self.targets_being_built_.has_key(id(main_target_instance)):
- names = []
- for t in self.targets_being_built_.values() + [main_target_instance]:
- names.append (t.full_name())
-
- get_manager().errors()("Recursion in main target references\n")
-
- self.targets_being_built_[id(main_target_instance)] = main_target_instance
-
- def end_building (self, main_target_instance):
- assert (self.targets_being_built_.has_key (id (main_target_instance)))
- del self.targets_being_built_ [id (main_target_instance)]
-
- def create_typed_target (self, type, project, name, sources, requirements, default_build, usage_requirements):
- """ Creates a TypedTarget with the specified properties.
- The 'name', 'sources', 'requirements', 'default_build' and
- 'usage_requirements' are assumed to be in the form specified
- by the user in Jamfile corresponding to 'project'.
- """
- return self.main_target_alternative (TypedTarget (name, project, type,
- self.main_target_sources (sources, name),
- self.main_target_requirements (requirements, project),
- self.main_target_default_build (default_build, project),
- self.main_target_usage_requirements (usage_requirements, project)))
-
- def increase_indent(self):
- self.indent_ += " "
-
- def decrease_indent(self):
- self.indent_ = self.indent_[0:-4]
-
- def logging(self):
- return self.debug_building_
-
- def log(self, message):
- if self.debug_building_:
- print self.indent_ + message
-
- def push_target(self, target):
- self.targets_.append(target)
-
- def pop_target(self):
- self.targets_ = self.targets_[:-1]
-
- def current(self):
- return self.targets_[0]
-
-
-class GenerateResult:
-
- def __init__ (self, ur=None, targets=None):
- if not targets:
- targets = []
-
- self.__usage_requirements = ur
- self.__targets = targets
- assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets)
-
- if not self.__usage_requirements:
- self.__usage_requirements = property_set.empty ()
-
- def usage_requirements (self):
- return self.__usage_requirements
-
- def targets (self):
- return self.__targets
-
- def extend (self, other):
- assert (isinstance (other, GenerateResult))
-
- self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ())
- self.__targets.extend (other.targets ())
-
-class AbstractTarget:
- """ Base class for all abstract targets.
- """
- def __init__ (self, name, project, manager = None):
- """ manager: the Manager object
- name: name of the target
- project: the project target to which this one belongs
- manager:the manager object. If none, uses project.manager ()
- """
- assert (isinstance (project, ProjectTarget))
- # Note: it might seem that we don't need either name or project at all.
- # However, there are places where we really need it. One example is error
- # messages which should name problematic targets. Another is setting correct
- # paths for sources and generated files.
-
- # Why allow manager to be specified? Because otherwise project target could not derive
- # from this class.
- if manager:
- self.manager_ = manager
- else:
- self.manager_ = project.manager ()
-
- self.name_ = name
- self.project_ = project
-
- def manager (self):
- return self.manager_
-
- def name (self):
- """ Returns the name of this target.
- """
- return self.name_
-
- def project (self):
- """ Returns the project for this target.
- """
- return self.project_
-
- def location (self):
- """ Return the location where the target was declared.
- """
- return self.location_
-
- def full_name (self):
- """ Returns a user-readable name for this target.
- """
- location = self.project ().get ('location')
- return location + '/' + self.name_
-
- def generate (self, property_set):
- """ Takes a property set. Generates virtual targets for this abstract
- target, using the specified properties, unless a different value of some
- feature is required by the target.
- On success, returns a GenerateResult instance with:
- - a property_set with the usage requirements to be
- applied to dependents
- - a list of produced virtual targets, which may be
- empty.
- If 'property_set' is empty, performs default build of this
- target, in a way specific to derived class.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def rename (self, new_name):
- self.name_ = new_name
-
-class ProjectTarget (AbstractTarget):
- """ Project target class (derived from 'AbstractTarget')
-
- This class these responsibilities:
- - maintaining a list of main target in this project and
- building it
-
- Main targets are constructed in two stages:
- - When Jamfile is read, a number of calls to 'add_alternative' is made.
- At that time, alternatives can also be renamed to account for inline
- targets.
- - The first time 'main-target' or 'has-main-target' rule is called,
- all alternatives are enumerated an main targets are created.
- """
- def __init__ (self, manager, name, project_module, parent_project, requirements, default_build):
- AbstractTarget.__init__ (self, name, self, manager)
-
- self.project_module_ = project_module
- self.location_ = manager.projects().attribute (project_module, 'location')
- self.requirements_ = requirements
- self.default_build_ = default_build
-
- self.build_dir_ = None
-
- # A cache of IDs
- self.ids_cache_ = {}
-
- # True is main targets have already been built.
- self.built_main_targets_ = False
-
- # A list of the registered alternatives for this project.
- self.alternatives_ = []
-
- # A map from main target name to the target corresponding
- # to it.
- self.main_target_ = {}
-
- # Targets marked as explicit.
- self.explicit_targets_ = set()
-
- # Targets marked as always
- self.always_targets_ = set()
-
- # The constants defined for this project.
- self.constants_ = {}
-
- # Whether targets for all main target are already created.
- self.built_main_targets_ = 0
-
- if parent_project:
- self.inherit (parent_project)
-
-
- # TODO: This is needed only by the 'make' rule. Need to find the
- # way to make 'make' work without this method.
- def project_module (self):
- return self.project_module_
-
- def get (self, attribute):
- return self.manager().projects().attribute(
- self.project_module_, attribute)
-
- def build_dir (self):
- if not self.build_dir_:
- self.build_dir_ = self.get ('build-dir')
- if not self.build_dir_:
- self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin')
-
- return self.build_dir_
-
- def generate (self, ps):
- """ Generates all possible targets contained in this project.
- """
- self.manager_.targets().log(
- "Building project '%s' with '%s'" % (self.name (), str(ps)))
- self.manager_.targets().increase_indent ()
-
- result = GenerateResult ()
-
- for t in self.targets_to_build ():
- g = t.generate (ps)
- result.extend (g)
-
- self.manager_.targets().decrease_indent ()
- return result
-
- def targets_to_build (self):
- """ Computes and returns a list of AbstractTarget instances which
- must be built when this project is built.
- """
- result = []
-
- if not self.built_main_targets_:
- self.build_main_targets ()
-
- # Collect all main targets here, except for "explicit" ones.
- for n, t in self.main_target_.iteritems ():
- if not t.name () in self.explicit_targets_:
- result.append (t)
-
- # Collect all projects referenced via "projects-to-build" attribute.
- self_location = self.get ('location')
- for pn in self.get ('projects-to-build'):
- result.append (self.find(pn + "/"))
-
- return result
-
- def mark_targets_as_explicit (self, target_names):
- """Add 'target' to the list of targets in this project
- that should be build only by explicit request."""
-
- # Record the name of the target, not instance, since this
- # rule is called before main target instaces are created.
- self.explicit_targets_.update(target_names)
-
- def mark_targets_as_always(self, target_names):
- self.always_targets_.update(target_names)
-
- def add_alternative (self, target_instance):
- """ Add new target alternative.
- """
- if self.built_main_targets_:
- raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ())
-
- self.alternatives_.append (target_instance)
-
- def main_target (self, name):
- if not self.built_main_targets_:
- self.build_main_targets()
-
- return self.main_target_[name]
-
- def has_main_target (self, name):
- """Tells if a main target with the specified name exists."""
- if not self.built_main_targets_:
- self.build_main_targets()
-
- return self.main_target_.has_key(name)
-
- def create_main_target (self, name):
- """ Returns a 'MainTarget' class instance corresponding to the 'name'.
- """
- if not self.built_main_targets_:
- self.build_main_targets ()
-
- return self.main_targets_.get (name, None)
-
-
- def find_really(self, id):
- """ Find and return the target with the specified id, treated
- relative to self.
- """
- result = None
- current_location = self.get ('location')
-
- __re_split_project_target = re.compile (r'(.*)//(.*)')
- split = __re_split_project_target.match (id)
-
- project_part = None
- target_part = None
-
- if split:
- project_part = split.group (1)
- target_part = split.group (2)
-
- project_registry = self.project_.manager ().projects ()
-
- extra_error_message = ''
- if project_part:
- # There's explicit project part in id. Looks up the
- # project and pass the request to it.
- pm = project_registry.find (project_part, current_location)
-
- if pm:
- project_target = project_registry.target (pm)
- result = project_target.find (target_part, no_error=1)
-
- else:
- extra_error_message = "error: could not find project '$(project_part)'"
-
- else:
- # Interpret target-name as name of main target
- # Need to do this before checking for file. Consider this:
- #
- # exe test : test.cpp ;
- # install s : test : <location>. ;
- #
- # After first build we'll have target 'test' in Jamfile and file
- # 'test' on the disk. We need target to override the file.
-
- result = None
- if self.has_main_target(id):
- result = self.main_target(id)
-
- if not result:
- result = FileReference (self.manager_, id, self.project_)
- if not result.exists ():
- # File actually does not exist.
- # Reset 'target' so that an error is issued.
- result = None
-
-
- if not result:
- # Interpret id as project-id
- project_module = project_registry.find (id, current_location)
- if project_module:
- result = project_registry.target (project_module)
-
- return result
-
- def find (self, id, no_error = False):
- v = self.ids_cache_.get (id, None)
-
- if not v:
- v = self.find_really (id)
- self.ids_cache_ [id] = v
-
- if v or no_error:
- return v
-
- raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location')))
-
-
- def build_main_targets (self):
- self.built_main_targets_ = True
-
- for a in self.alternatives_:
- name = a.name ()
- if not self.main_target_.has_key (name):
- t = MainTarget (name, self.project_)
- self.main_target_ [name] = t
-
- if name in self.always_targets_:
- a.always()
-
- self.main_target_ [name].add_alternative (a)
-
- def add_constant(self, name, value, path=0):
- """Adds a new constant for this project.
-
- The constant will be available for use in Jamfile
- module for this project. If 'path' is true,
- the constant will be interpreted relatively
- to the location of project.
- """
-
- if path:
- l = self.location_
- if not l:
- # Project corresponding to config files do not have
- # 'location' attribute, but do have source location.
- # It might be more reasonable to make every project have
- # a location and use some other approach to prevent buildable
- # targets in config files, but that's for later.
- l = get('source-location')
-
- value = os.path.join(l, value)
- # Now make the value absolute path
- value = os.path.join(os.getcwd(), value)
-
- self.constants_[name] = value
- bjam.call("set-variable", self.project_module(), name, value)
-
- def inherit(self, parent_project):
- for c in parent_project.constants_:
- # No need to pass the type. Path constants were converted to
- # absolute paths already by parent.
- self.add_constant(c, parent_project.constants_[c])
-
- # Import rules from parent
- this_module = self.project_module()
- parent_module = parent_project.project_module()
-
- rules = bjam.call("RULENAMES", parent_module)
- if not rules:
- rules = []
- user_rules = [x for x in rules
- if x not in self.manager().projects().project_rules().all_names()]
- if user_rules:
- bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
-
-class MainTarget (AbstractTarget):
- """ A named top-level target in Jamfile.
- """
- def __init__ (self, name, project):
- AbstractTarget.__init__ (self, name, project)
- self.alternatives_ = []
- self.default_build_ = property_set.empty ()
-
- def add_alternative (self, target):
- """ Add a new alternative for this target.
- """
- d = target.default_build ()
-
- if self.alternatives_ and self.default_build_ != d:
- get_manager().errors()("default build must be identical in all alternatives\n"
- "main target is '%s'\n"
- "with '%s'\n"
- "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ()))
-
- else:
- self.default_build_ = d
-
- self.alternatives_.append (target)
-
- def __select_alternatives (self, property_set, debug):
- """ Returns the best viable alternative for this property_set
- See the documentation for selection rules.
- # TODO: shouldn't this be 'alternative' (singular)?
- """
- # When selecting alternatives we have to consider defaults,
- # for example:
- # lib l : l.cpp : <variant>debug ;
- # lib l : l_opt.cpp : <variant>release ;
- # won't work unless we add default value <variant>debug.
- property_set = property_set.add_defaults ()
-
- # The algorithm: we keep the current best viable alternative.
- # When we've got new best viable alternative, we compare it
- # with the current one.
- best = None
- best_properties = None
-
- if len (self.alternatives_) == 0:
- return None
-
- if len (self.alternatives_) == 1:
- return self.alternatives_ [0]
-
- if debug:
- print "Property set for selection:", property_set
-
- for v in self.alternatives_:
- properties = v.match (property_set, debug)
-
- if properties is not None:
- if not best:
- best = v
- best_properties = properties
-
- else:
- if b2.util.set.equal (properties, best_properties):
- return None
-
- elif b2.util.set.contains (properties, best_properties):
- # Do nothing, this alternative is worse
- pass
-
- elif b2.util.set.contains (best_properties, properties):
- best = v
- best_properties = properties
-
- else:
- return None
-
- return best
-
- def apply_default_build (self, property_set):
- return apply_default_build(property_set, self.default_build_)
-
- def generate (self, ps):
- """ Select an alternative for this main target, by finding all alternatives
- which requirements are satisfied by 'properties' and picking the one with
- longest requirements set.
- Returns the result of calling 'generate' on that alternative.
- """
- self.manager_.targets ().start_building (self)
-
- # We want composite properties in build request act as if
- # all the properties it expands too are explicitly specified.
- ps = ps.expand ()
-
- all_property_sets = self.apply_default_build (ps)
-
- result = GenerateResult ()
-
- for p in all_property_sets:
- result.extend (self.__generate_really (p))
-
- self.manager_.targets ().end_building (self)
-
- return result
-
- def __generate_really (self, prop_set):
- """ Generates the main target with the given property set
- and returns a list which first element is property_set object
- containing usage_requirements of generated target and with
- generated virtual target in other elements. It's possible
- that no targets are generated.
- """
- best_alternative = self.__select_alternatives (prop_set, debug=0)
-
- if not best_alternative:
- # FIXME: revive.
- # self.__select_alternatives(prop_set, debug=1)
- self.manager_.errors()(
- "No best alternative for '%s'.\n"
- % (self.full_name(),))
-
- result = best_alternative.generate (prop_set)
-
- # Now return virtual targets for the only alternative
- return result
-
- def rename(self, new_name):
- AbstractTarget.rename(self, new_name)
- for a in self.alternatives_:
- a.rename(new_name)
-
-class FileReference (AbstractTarget):
- """ Abstract target which refers to a source file.
- This is artificial creature; it's usefull so that sources to
- a target can be represented as list of abstract target instances.
- """
- def __init__ (self, manager, file, project):
- AbstractTarget.__init__ (self, file, project)
- self.file_location_ = None
-
- def generate (self, properties):
- return GenerateResult (None, [
- self.manager_.virtual_targets ().from_file (
- self.name_, self.location(), self.project_) ])
-
- def exists (self):
- """ Returns true if the referred file really exists.
- """
- if self.location ():
- return True
- else:
- return False
-
- def location (self):
- # Returns the location of target. Needed by 'testing.jam'
- if not self.file_location_:
- source_location = self.project_.get('source-location')
-
- for src_dir in source_location:
- location = os.path.join(src_dir, self.name())
- if os.path.isfile(location):
- self.file_location_ = src_dir
- self.file_path = location
- break
-
- return self.file_location_
-
-def resolve_reference(target_reference, project):
- """ Given a target_reference, made in context of 'project',
- returns the AbstractTarget instance that is referred to, as well
- as properties explicitly specified for this reference.
- """
- # Separate target name from properties override
- split = _re_separate_target_from_properties.match (target_reference)
- if not split:
- raise BaseException ("Invalid reference: '%s'" % target_reference)
-
- id = split.group (1)
-
- sproperties = []
-
- if split.group (3):
- sproperties = property.create_from_strings(feature.split(split.group(3)))
- sproperties = feature.expand_composites(sproperties)
-
- # Find the target
- target = project.find (id)
-
- return (target, property_set.create(sproperties))
-
-def generate_from_reference(target_reference, project, property_set):
- """ Attempts to generate the target given by target reference, which
- can refer both to a main target or to a file.
- Returns a list consisting of
- - usage requirements
- - generated virtual targets, if any
- target_reference: Target reference
- project: Project where the reference is made
- property_set: Properties of the main target that makes the reference
- """
- target, sproperties = resolve_reference(target_reference, project)
-
- # Take properties which should be propagated and refine them
- # with source-specific requirements.
- propagated = property_set.propagated()
- rproperties = propagated.refine(sproperties)
-
- return target.generate(rproperties)
-
-
-
-class BasicTarget (AbstractTarget):
- """ Implements the most standard way of constructing main target
- alternative from sources. Allows sources to be either file or
- other main target and handles generation of those dependency
- targets.
- """
- def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None):
- AbstractTarget.__init__ (self, name, project)
-
- for s in sources:
- if get_grist (s):
- raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
-
- self.sources_ = sources
-
- if not requirements: requirements = property_set.empty ()
- self.requirements_ = requirements
-
- if not default_build: default_build = property_set.empty ()
- self.default_build_ = default_build
-
- if not usage_requirements: usage_requirements = property_set.empty ()
- self.usage_requirements_ = usage_requirements
-
- # A cache for resolved references
- self.source_targets_ = None
-
- # A cache for generated targets
- self.generated_ = {}
-
- # A cache for build requests
- self.request_cache = {}
-
- # Result of 'capture_user_context' has everything. For example, if this
- # target is declare as result of loading Jamfile which was loaded when
- # building target B which was requested from A, then we'll have A, B and
- # Jamroot location in context. We only care about Jamroot location, most
- # of the times.
- self.user_context_ = self.manager_.errors().capture_user_context()[-1:]
-
- self.always_ = False
-
- def always(self):
- self.always_ = True
-
- def sources (self):
- """ Returns the list of AbstractTargets which are used as sources.
- The extra properties specified for sources are not represented.
- The only used of this rule at the moment is the '--dump-tests'
- feature of the test system.
- """
- if self.source_targets_ == None:
- self.source_targets_ = []
- for s in self.sources_:
- self.source_targets_.append(resolve_reference(s, self.project_)[0])
-
- return self.source_targets_
-
- def requirements (self):
- return self.requirements_
-
- def default_build (self):
- return self.default_build_
-
- def common_properties (self, build_request, requirements):
- """ Given build request and requirements, return properties
- common to dependency build request and target build
- properties.
- """
- # For optimization, we add free unconditional requirements directly,
- # without using complex algorithsm.
- # This gives the complex algorithm better chance of caching results.
- # The exact effect of this "optimization" is no longer clear
- free_unconditional = []
- other = []
- for p in requirements.all():
- if p.feature().free() and not p.condition() and p.feature().name() != 'conditional':
- free_unconditional.append(p)
- else:
- other.append(p)
- other = property_set.create(other)
-
- key = (build_request, other)
- if not self.request_cache.has_key(key):
- self.request_cache[key] = self.__common_properties2 (build_request, other)
-
- return self.request_cache[key].add_raw(free_unconditional)
-
- # Given 'context' -- a set of already present properties, and 'requirements',
- # decide which extra properties should be applied to 'context'.
- # For conditional requirements, this means evaluating condition. For
- # indirect conditional requirements, this means calling a rule. Ordinary
- # requirements are always applied.
- #
- # Handles situation where evaluating one conditional requirements affects
- # condition of another conditional requirements, for example:
- #
- # <toolset>gcc:<variant>release <variant>release:<define>RELEASE
- #
- # If 'what' is 'refined' returns context refined with new requirements.
- # If 'what' is 'added' returns just the requirements that must be applied.
- def evaluate_requirements(self, requirements, context, what):
- # Apply non-conditional requirements.
- # It's possible that that further conditional requirement change
- # a value set by non-conditional requirements. For example:
- #
- # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
- #
- # I'm not sure if this should be an error, or not, especially given that
- #
- # <threading>single
- #
- # might come from project's requirements.
- unconditional = feature.expand(requirements.non_conditional())
-
- context = context.refine(property_set.create(unconditional))
-
- # We've collected properties that surely must be present in common
- # properties. We now try to figure out what other properties
- # should be added in order to satisfy rules (4)-(6) from the docs.
-
- conditionals = property_set.create(requirements.conditional())
-
- # It's supposed that #conditionals iterations
- # should be enough for properties to propagate along conditions in any
- # direction.
- max_iterations = len(conditionals.all()) +\
- len(requirements.get("<conditional>")) + 1
-
- added_requirements = []
- current = context
-
- # It's assumed that ordinary conditional requirements can't add
- # <indirect-conditional> properties, and that rules referred
- # by <indirect-conditional> properties can't add new
- # <indirect-conditional> properties. So the list of indirect conditionals
- # does not change.
- indirect = requirements.get("<conditional>")
-
- ok = 0
- for i in range(0, max_iterations):
-
- e = conditionals.evaluate_conditionals(current).all()[:]
-
- # Evaluate indirect conditionals.
- for i in indirect:
- i = b2.util.jam_to_value_maybe(i)
- if callable(i):
- # This is Python callable, yeah.
- e.extend(i(current))
- else:
- # Name of bjam function. Because bjam is unable to handle
- # list of Property, pass list of strings.
- br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
- if br:
- e.extend(property.create_from_strings(br))
-
- if e == added_requirements:
- # If we got the same result, we've found final properties.
- ok = 1
- break
- else:
- # Oops, results of evaluation of conditionals has changed.
- # Also 'current' contains leftover from previous evaluation.
- # Recompute 'current' using initial properties and conditional
- # requirements.
- added_requirements = e
- current = context.refine(property_set.create(feature.expand(e)))
-
- if not ok:
- self.manager().errors()("Can't evaluate conditional properties "
- + str(conditionals))
-
-
- if what == "added":
- return property_set.create(unconditional + added_requirements)
- elif what == "refined":
- return current
- else:
- self.manager().errors("Invalid value of the 'what' parameter")
-
- def __common_properties2(self, build_request, requirements):
- # This guarantees that default properties are present
- # in result, unless they are overrided by some requirement.
- # TODO: There is possibility that we've added <foo>bar, which is composite
- # and expands to <foo2>bar2, but default value of <foo2> is not bar2,
- # in which case it's not clear what to do.
- #
- build_request = build_request.add_defaults()
- # Featured added by 'add-default' can be composite and expand
- # to features without default values -- so they are not added yet.
- # It could be clearer/faster to expand only newly added properties
- # but that's not critical.
- build_request = build_request.expand()
-
- return self.evaluate_requirements(requirements, build_request,
- "refined")
-
- def match (self, property_set, debug):
- """ Returns the alternative condition for this alternative, if
- the condition is satisfied by 'property_set'.
- """
- # The condition is composed of all base non-conditional properties.
- # It's not clear if we should expand 'self.requirements_' or not.
- # For one thing, it would be nice to be able to put
- # <toolset>msvc-6.0
- # in requirements.
- # On the other hand, if we have <variant>release in condition it
- # does not make sense to require <optimization>full to be in
- # build request just to select this variant.
- bcondition = self.requirements_.base ()
- ccondition = self.requirements_.conditional ()
- condition = b2.util.set.difference (bcondition, ccondition)
-
- if debug:
- print " next alternative: required properties:", [str(p) for p in condition]
-
- if b2.util.set.contains (condition, property_set.all()):
-
- if debug:
- print " matched"
-
- return condition
-
- else:
- return None
-
-
- def generate_dependency_targets (self, target_ids, property_set):
- targets = []
- usage_requirements = []
- for id in target_ids:
-
- result = generate_from_reference(id, self.project_, property_set)
- targets += result.targets()
- usage_requirements += result.usage_requirements().all()
-
- return (targets, usage_requirements)
-
- def generate_dependency_properties(self, properties, ps):
- """ Takes a target reference, which might be either target id
- or a dependency property, and generates that target using
- 'property_set' as build request.
-
- Returns a tuple (result, usage_requirements).
- """
- result_properties = []
- usage_requirements = []
- for p in properties:
-
- result = generate_from_reference(p.value(), self.project_, ps)
-
- for t in result.targets():
- result_properties.append(property.Property(p.feature(), t))
-
- usage_requirements += result.usage_requirements().all()
-
- return (result_properties, usage_requirements)
-
-
-
-
- @user_error_checkpoint
- def generate (self, ps):
- """ Determines final build properties, generates sources,
- and calls 'construct'. This method should not be
- overridden.
- """
- self.manager_.errors().push_user_context(
- "Generating target " + self.full_name(), self.user_context_)
-
- if self.manager().targets().logging():
- self.manager().targets().log(
- "Building target '%s'" % self.name_)
- self.manager().targets().increase_indent ()
- self.manager().targets().log(
- "Build request: '%s'" % str (ps.raw ()))
- cf = self.manager().command_line_free_features()
- self.manager().targets().log(
- "Command line free features: '%s'" % str (cf.raw ()))
- self.manager().targets().log(
- "Target requirements: %s'" % str (self.requirements().raw ()))
-
- self.manager().targets().push_target(self)
-
- if not self.generated_.has_key(ps):
-
- # Apply free features form the command line. If user
- # said
- # define=FOO
- # he most likely want this define to be set for all compiles.
- ps = ps.refine(self.manager().command_line_free_features())
- rproperties = self.common_properties (ps, self.requirements_)
-
- self.manager().targets().log(
- "Common properties are '%s'" % str (rproperties))
-
- if rproperties.get("<build>") != ["no"]:
-
- result = GenerateResult ()
-
- properties = rproperties.non_dependency ()
-
- (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties)
- properties += p
- assert all(isinstance(p, property.Property) for p in properties)
- usage_requirements = u
-
- (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties)
- usage_requirements += u
-
- self.manager_.targets().log(
- "Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements))
-
- # FIXME:
-
- rproperties = property_set.create(properties + usage_requirements)
- usage_requirements = property_set.create (usage_requirements)
-
- self.manager_.targets().log(
- "Build properties: '%s'" % str(rproperties))
-
- source_targets += rproperties.get('<source>')
-
- # We might get duplicate sources, for example if
- # we link to two library which have the same <library> in
- # usage requirements.
- # Use stable sort, since for some targets the order is
- # important. E.g. RUN_PY target need python source to come
- # first.
- source_targets = unique(source_targets, stable=True)
-
- # FIXME: figure why this call messes up source_targets in-place
- result = self.construct (self.name_, source_targets[:], rproperties)
-
- if result:
- assert len(result) == 2
- gur = result [0]
- result = result [1]
-
- if self.always_:
- for t in result:
- t.always()
-
- s = self.create_subvariant (
- result,
- self.manager().virtual_targets().recent_targets(), ps,
- source_targets, rproperties, usage_requirements)
- self.manager().virtual_targets().clear_recent_targets()
-
- ur = self.compute_usage_requirements (s)
- ur = ur.add (gur)
- s.set_usage_requirements (ur)
-
- self.manager_.targets().log (
- "Usage requirements from '%s' are '%s'" %
- (self.name(), str(rproperties)))
-
- self.generated_[ps] = GenerateResult (ur, result)
- else:
- self.generated_[ps] = GenerateResult (property_set.empty(), [])
- else:
- # If we just see <build>no, we cannot produce any reasonable
- # diagnostics. The code that adds this property is expected
- # to explain why a target is not built, for example using
- # the configure.log-component-configuration function.
-
- # If this target fails to build, add <build>no to properties
- # to cause any parent target to fail to build. Except that it
- # - does not work now, since we check for <build>no only in
- # common properties, but not in properties that came from
- # dependencies
- # - it's not clear if that's a good idea anyway. The alias
- # target, for example, should not fail to build if a dependency
- # fails.
- self.generated_[ps] = GenerateResult(
- property_set.create(["<build>no"]), [])
- else:
- self.manager().targets().log ("Already built")
-
- self.manager().targets().pop_target()
- self.manager().targets().decrease_indent()
-
- return self.generated_[ps]
-
- def compute_usage_requirements (self, subvariant):
- """ Given the set of generated targets, and refined build
- properties, determines and sets appripriate usage requirements
- on those targets.
- """
- rproperties = subvariant.build_properties ()
- xusage_requirements =self.evaluate_requirements(
- self.usage_requirements_, rproperties, "added")
-
- # We generate all dependency properties and add them,
- # as well as their usage requirements, to result.
- (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
- extra = r1 + r2
-
- result = property_set.create (xusage_requirements.non_dependency () + extra)
-
- # Propagate usage requirements we've got from sources, except
- # for the <pch-header> and <pch-file> features.
- #
- # That feature specifies which pch file to use, and should apply
- # only to direct dependents. Consider:
- #
- # pch pch1 : ...
- # lib lib1 : ..... pch1 ;
- # pch pch2 :
- # lib lib2 : pch2 lib1 ;
- #
- # Here, lib2 should not get <pch-header> property from pch1.
- #
- # Essentially, when those two features are in usage requirements,
- # they are propagated only to direct dependents. We might need
- # a more general mechanism, but for now, only those two
- # features are special.
- raw = subvariant.sources_usage_requirements().raw()
- raw = property.change(raw, "<pch-header>", None);
- raw = property.change(raw, "<pch-file>", None);
- result = result.add(property_set.create(raw))
-
- return result
-
- def create_subvariant (self, root_targets, all_targets,
- build_request, sources,
- rproperties, usage_requirements):
- """Creates a new subvariant-dg instances for 'targets'
- - 'root-targets' the virtual targets will be returned to dependents
- - 'all-targets' all virtual
- targets created while building this main target
- - 'build-request' is property-set instance with
- requested build properties"""
-
- for e in root_targets:
- e.root (True)
-
- s = Subvariant (self, build_request, sources,
- rproperties, usage_requirements, all_targets)
-
- for v in all_targets:
- if not v.creating_subvariant():
- v.creating_subvariant(s)
-
- return s
-
- def construct (self, name, source_targets, properties):
- """ Constructs the virtual targets for this abstract targets and
- the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets.
- Should be overrided in derived classes.
- """
- raise BaseException ("method should be defined in derived classes")
-
-
-class TypedTarget (BasicTarget):
- import generators
-
- def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
- BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
- self.type_ = type
-
- def __jam_repr__(self):
- return b2.util.value_to_jam(self)
-
- def type (self):
- return self.type_
-
- def construct (self, name, source_targets, prop_set):
-
- r = generators.construct (self.project_, name, self.type_,
- prop_set.add_raw(['<main-target-type>' + self.type_]),
- source_targets, True)
-
- if not r:
- print "warning: Unable to construct '%s'" % self.full_name ()
-
- # Are there any top-level generators for this type/property set.
- if not generators.find_viable_generators (self.type_, prop_set):
- print "error: no generators were found for type '" + self.type_ + "'"
- print "error: and the requested properties"
- print "error: make sure you've configured the needed tools"
- print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
-
- print "To debug this problem, try the --debug-generators option."
- sys.exit(1)
-
- return r
-
-def apply_default_build(property_set, default_build):
- # 1. First, see what properties from default_build
- # are already present in property_set.
-
- specified_features = set(p.feature() for p in property_set.all())
-
- defaults_to_apply = []
- for d in default_build.all():
- if not d.feature() in specified_features:
- defaults_to_apply.append(d)
-
- # 2. If there's any defaults to be applied, form the new
- # build request. Pass it throw 'expand-no-defaults', since
- # default_build might contain "release debug", which will
- # result in two property_sets.
- result = []
- if defaults_to_apply:
-
- # We have to compress subproperties here to prevent
- # property lists like:
- #
- # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
- #
- # from being expanded into:
- #
- # <toolset-msvc:version>7.1/<threading>multi
- # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
- #
- # due to cross-product property combination. That may
- # be an indication that
- # build_request.expand-no-defaults is the wrong rule
- # to use here.
- compressed = feature.compress_subproperties(property_set.all())
-
- result = build_request.expand_no_defaults(
- b2.build.property_set.create([p]) for p in (compressed + defaults_to_apply))
-
- else:
- result.append (property_set)
-
- return result
-
-
-def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- TypedTarget(name, project, type,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-
-def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- klass(name, project,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-def metatarget_function_for_class(class_):
-
- @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"], ["usage_requirements", "*"]))
- def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []):
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- class_(name, project,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
- return create_metatarget
diff --git a/jam-files/boost-build/build/toolset.jam b/jam-files/boost-build/build/toolset.jam
deleted file mode 100644
index f2036d99..00000000
--- a/jam-files/boost-build/build/toolset.jam
+++ /dev/null
@@ -1,502 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for toolset definition.
-
-import errors ;
-import feature ;
-import generators ;
-import numbers ;
-import path ;
-import property ;
-import regex ;
-import sequence ;
-import set ;
-
-
-.flag-no = 1 ;
-
-.ignore-requirements = ;
-
-# This is used only for testing, to make sure we do not get random extra
-# elements in paths.
-if --ignore-toolset-requirements in [ modules.peek : ARGV ]
-{
- .ignore-requirements = 1 ;
-}
-
-
-# Initializes an additional toolset-like module. First load the 'toolset-module'
-# and then calls its 'init' rule with trailing arguments.
-#
-rule using ( toolset-module : * )
-{
- import $(toolset-module) ;
- $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-}
-
-
-# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
-# converted to '<toolset>gcc/<toolset-version>3.2'.
-#
-local rule normalize-condition ( property-sets * )
-{
- local result ;
- for local p in $(property-sets)
- {
- local split = [ feature.split $(p) ] ;
- local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
- result += $(expanded:J=/) ;
- }
- return $(result) ;
-}
-
-
-# Specifies if the 'flags' rule should check that the invoking module is the
-# same as the module we are setting the flag for. 'v' can be either 'checked' or
-# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
-# the setting that was in effect before calling this rule.
-#
-rule push-checking-for-flags-module ( v )
-{
- .flags-module-checking = $(v) $(.flags-module-checking) ;
-}
-
-rule pop-checking-for-flags-module ( )
-{
- .flags-module-checking = $(.flags-module-checking[2-]) ;
-}
-
-
-# Specifies the flags (variables) that must be set on targets under certain
-# conditions, described by arguments.
-#
-rule flags (
- rule-or-module # If contains a dot, should be a rule name. The flags will
- # be applied when that rule is used to set up build
- # actions.
- #
- # If does not contain dot, should be a module name. The
- # flag will be applied for all rules in that module. If
- # module for rule is different from the calling module, an
- # error is issued.
-
- variable-name # Variable that should be set on target.
- condition * : # A condition when this flag should be applied. Should be a
- # set of property sets. If one of those property sets is
- # contained in the build properties, the flag will be used.
- # Implied values are not allowed: "<toolset>gcc" should be
- # used, not just "gcc". Subfeatures, like in
- # "<toolset>gcc-3.2" are allowed. If left empty, the flag
- # will be used unconditionally.
- #
- # Propery sets may use value-less properties ('<a>' vs.
- # '<a>value') to match absent properties. This allows to
- # separately match:
- #
- # <architecture>/<address-model>64
- # <architecture>ia64/<address-model>
- #
- # Where both features are optional. Without this syntax
- # we would be forced to define "default" values.
-
- values * : # The value to add to variable. If <feature> is specified,
- # then the value of 'feature' will be added.
- unchecked ? # If value 'unchecked' is passed, will not test that flags
- # are set for the calling module.
- : hack-hack ? # For
- # flags rule OPTIONS <cxx-abi> : -model ansi
- # Treat <cxx-abi> as condition
- # FIXME: ugly hack.
-)
-{
- local caller = [ CALLER_MODULE ] ;
- if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
- && [ MATCH "(Jamfile<.*)" : $(caller) ]
- {
- # Unqualified rule name, used inside Jamfile. Most likely used with
- # 'make' or 'notfile' rules. This prevents setting flags on the entire
- # Jamfile module (this will be considered as rule), but who cares?
- # Probably, 'flags' rule should be split into 'flags' and
- # 'flags-on-module'.
- rule-or-module = $(caller).$(rule-or-module) ;
- }
- else
- {
- local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
- if $(unchecked) != unchecked
- && $(.flags-module-checking[1]) != unchecked
- && $(module_) != $(caller)
- {
- errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
- }
- }
-
- if $(condition) && ! $(condition:G=) && ! $(hack-hack)
- {
- # We have condition in the form '<feature>', that is, without value.
- # That is an older syntax:
- # flags gcc.link RPATH <dll-path> ;
- # for compatibility, convert it to
- # flags gcc.link RPATH : <dll-path> ;
- values = $(condition) ;
- condition = ;
- }
-
- if $(condition)
- {
- property.validate-property-sets $(condition) ;
- condition = [ normalize-condition $(condition) ] ;
- }
-
- add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
-}
-
-
-# Adds a new flag setting with the specified values. Does no checking.
-#
-local rule add-flag ( rule-or-module : variable-name : condition * : values * )
-{
- .$(rule-or-module).flags += $(.flag-no) ;
-
- # Store all flags for a module.
- local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
- .module-flags.$(module_) += $(.flag-no) ;
- # Store flag-no -> rule-or-module mapping.
- .rule-or-module.$(.flag-no) = $(rule-or-module) ;
-
- .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
- .$(rule-or-module).values.$(.flag-no) += $(values) ;
- .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
-
- .flag-no = [ numbers.increment $(.flag-no) ] ;
-}
-
-
-# Returns the first element of 'property-sets' which is a subset of
-# 'properties' or an empty list if no such element exists.
-#
-rule find-property-subset ( property-sets * : properties * )
-{
- # Cut property values off.
- local prop-keys = $(properties:G) ;
-
- local result ;
- for local s in $(property-sets)
- {
- if ! $(result)
- {
- # Handle value-less properties like '<architecture>' (compare with
- # '<architecture>x86').
-
- local set = [ feature.split $(s) ] ;
-
- # Find the set of features that
- # - have no property specified in required property set
- # - are omitted in the build property set.
- local default-props ;
- for local i in $(set)
- {
- # If $(i) is a value-less property it should match default value
- # of an optional property. See the first line in the example
- # below:
- #
- # property set properties result
- # <a> <b>foo <b>foo match
- # <a> <b>foo <a>foo <b>foo no match
- # <a>foo <b>foo <b>foo no match
- # <a>foo <b>foo <a>foo <b>foo match
- if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
- {
- default-props += $(i) ;
- }
- }
-
- if $(set) in $(properties) $(default-props)
- {
- result = $(s) ;
- }
- }
- }
- return $(result) ;
-}
-
-
-# Returns a value to be added to some flag for some target based on the flag's
-# value definition and the given target's property set.
-#
-rule handle-flag-value ( value * : properties * )
-{
- local result ;
- if $(value:G)
- {
- local matches = [ property.select $(value) : $(properties) ] ;
- for local p in $(matches)
- {
- local att = [ feature.attributes $(p:G) ] ;
- if dependency in $(att)
- {
- # The value of a dependency feature is a target and needs to be
- # actualized.
- result += [ $(p:G=).actualize ] ;
- }
- else if path in $(att) || free in $(att)
- {
- local values ;
- # Treat features with && in the value specially -- each
- # &&-separated element is considered a separate value. This is
- # needed to handle searched libraries or include paths, which
- # may need to be in a specific order.
- if ! [ MATCH (&&) : $(p:G=) ]
- {
- values = $(p:G=) ;
- }
- else
- {
- values = [ regex.split $(p:G=) "&&" ] ;
- }
- if path in $(att)
- {
- result += [ sequence.transform path.native : $(values) ] ;
- }
- else
- {
- result += $(values) ;
- }
- }
- else
- {
- result += $(p:G=) ;
- }
- }
- }
- else
- {
- result += $(value) ;
- }
- return $(result) ;
-}
-
-
-# Given a rule name and a property set, returns a list of interleaved variables
-# names and values which must be set on targets for that rule/property-set
-# combination.
-#
-rule set-target-variables-aux ( rule-or-module : property-set )
-{
- local result ;
- properties = [ $(property-set).raw ] ;
- for local f in $(.$(rule-or-module).flags)
- {
- local variable = $(.$(rule-or-module).variable.$(f)) ;
- local condition = $(.$(rule-or-module).condition.$(f)) ;
- local values = $(.$(rule-or-module).values.$(f)) ;
-
- if ! $(condition) ||
- [ find-property-subset $(condition) : $(properties) ]
- {
- local processed ;
- for local v in $(values)
- {
- # The value might be <feature-name> so needs special treatment.
- processed += [ handle-flag-value $(v) : $(properties) ] ;
- }
- for local r in $(processed)
- {
- result += $(variable) $(r) ;
- }
- }
- }
-
- # Strip away last dot separated part and recurse.
- local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
- if $(next)
- {
- result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
- }
- return $(result) ;
-}
-
-
-rule set-target-variables ( rule-or-module targets + : property-set )
-{
- properties = [ $(property-set).raw ] ;
- local key = $(rule-or-module).$(property-set) ;
- local settings = $(.stv.$(key)) ;
- if ! $(settings)
- {
- settings = [ set-target-variables-aux $(rule-or-module) :
- $(property-set) ] ;
-
- if ! $(settings)
- {
- settings = none ;
- }
- .stv.$(key) = $(settings) ;
- }
-
- if $(settings) != none
- {
- local var-name = ;
- for local name-or-value in $(settings)
- {
- if $(var-name)
- {
- $(var-name) on $(targets) += $(name-or-value) ;
- var-name = ;
- }
- else
- {
- var-name = $(name-or-value) ;
- }
- }
- }
-}
-
-
-# Make toolset 'toolset', defined in a module of the same name, inherit from
-# 'base'.
-# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
-# Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
-# property in requires is adjusted too.
-# 3. All flags are inherited.
-# 4. All rules are imported.
-#
-rule inherit ( toolset : base )
-{
- import $(base) ;
- inherit-generators $(toolset) : $(base) ;
- inherit-flags $(toolset) : $(base) ;
- inherit-rules $(toolset) : $(base) ;
-}
-
-
-rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
-{
- properties ?= <toolset>$(toolset) ;
- local base-generators = [ generators.generators-for-toolset $(base) ] ;
- for local g in $(base-generators)
- {
- local id = [ $(g).id ] ;
-
- if ! $(id) in $(generators-to-ignore)
- {
- # Some generator names have multiple periods in their name, so
- # $(id:B=$(toolset)) does not generate the right new-id name. E.g.
- # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
- # not what we want. Manually parse the base and suffix. If there is
- # a better way to do this, I would love to see it. See also the
- # register() rule in the generators module.
- local base = $(id) ;
- local suffix = "" ;
- while $(base:S)
- {
- suffix = $(base:S)$(suffix) ;
- base = $(base:B) ;
- }
- local new-id = $(toolset)$(suffix) ;
-
- generators.register [ $(g).clone $(new-id) : $(properties) ] ;
- }
- }
-}
-
-
-# Brings all flag definitions from the 'base' toolset into the 'toolset'
-# toolset. Flag definitions whose conditions make use of properties in
-# 'prohibited-properties' are ignored. Do not confuse property and feature, for
-# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
-# not block the other one.
-#
-# The flag conditions are not altered at all, so if a condition includes a name,
-# or version of a base toolset, it will not ever match the inheriting toolset.
-# When such flag settings must be inherited, define a rule in base toolset
-# module and call it as needed.
-#
-rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
-{
- for local f in $(.module-flags.$(base))
- {
- local rule-or-module = $(.rule-or-module.$(f)) ;
- if ( [ set.difference
- $(.$(rule-or-module).condition.$(f)) :
- $(prohibited-properties) ]
- || ! $(.$(rule-or-module).condition.$(f))
- ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
- {
- local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
- local new-rule-or-module ;
- if $(rule_)
- {
- new-rule-or-module = $(toolset).$(rule_) ;
- }
- else
- {
- new-rule-or-module = $(toolset) ;
- }
-
- add-flag
- $(new-rule-or-module)
- : $(.$(rule-or-module).variable.$(f))
- : $(.$(rule-or-module).condition.$(f))
- : $(.$(rule-or-module).values.$(f)) ;
- }
- }
-}
-
-
-rule inherit-rules ( toolset : base : localize ? )
-{
- # It appears that "action" creates a local rule.
- local base-generators = [ generators.generators-for-toolset $(base) ] ;
- local rules ;
- for local g in $(base-generators)
- {
- rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
- }
- rules = [ sequence.unique $(rules) ] ;
- IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
- IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
-}
-
-
-# Return the list of global 'toolset requirements'. Those requirements will be
-# automatically added to the requirements of any main target.
-#
-rule requirements ( )
-{
- return $(.requirements) ;
-}
-
-
-# Adds elements to the list of global 'toolset requirements'. The requirements
-# will be automatically added to the requirements for all main targets, as if
-# they were specified literally. For best results, all requirements added should
-# be conditional or indirect conditional.
-#
-rule add-requirements ( requirements * )
-{
- if ! $(.ignore-requirements)
- {
- .requirements += $(requirements) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
- assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
- assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
-
- local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
- assert.result <a>/<b> : find-property-subset $(p-set) : ;
- assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
- assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
- assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
-}
diff --git a/jam-files/boost-build/build/toolset.py b/jam-files/boost-build/build/toolset.py
deleted file mode 100644
index b4267987..00000000
--- a/jam-files/boost-build/build/toolset.py
+++ /dev/null
@@ -1,398 +0,0 @@
-# Status: being ported by Vladimir Prus
-# Base revision: 40958
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-""" Support for toolset definition.
-"""
-
-import feature, property, generators, property_set
-import b2.util.set
-from b2.util import cached, qualify_jam_action
-from b2.util.utility import *
-from b2.util import bjam_signature
-from b2.manager import get_manager
-
-__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
-__re_two_ampersands = re.compile ('(&&)')
-__re_first_segment = re.compile ('([^.]*).*')
-__re_first_group = re.compile (r'[^.]*\.(.*)')
-
-# Flag is a mechanism to set a value
-# A single toolset flag. Specifies that when certain
-# properties are in build property set, certain values
-# should be appended to some variable.
-#
-# A flag applies to a specific action in specific module.
-# The list of all flags for a module is stored, and each
-# flag further contains the name of the rule it applies
-# for,
-class Flag:
-
- def __init__(self, variable_name, values, condition, rule = None):
- self.variable_name = variable_name
- self.values = values
- self.condition = condition
- self.rule = rule
-
- def __str__(self):
- return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
- ", " + str(self.condition) + ", " + str(self.rule) + ")")
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __module_flags, __flags, __stv
-
- # Mapping from module name to a list of all flags that apply
- # to either that module directly, or to any rule in that module.
- # Each element of the list is Flag instance.
- # So, for module named xxx this might contain flags for 'xxx',
- # for 'xxx.compile', for 'xxx.compile.c++', etc.
- __module_flags = {}
-
- # Mapping from specific rule or module name to a list of Flag instances
- # that apply to that name.
- # Say, it might contain flags for 'xxx.compile.c++'. If there are
- # entries for module name 'xxx', they are flags for 'xxx' itself,
- # not including any rules in that module.
- __flags = {}
-
- # A cache for varaible settings. The key is generated from the rule name and the properties.
- __stv = {}
-
-reset ()
-
-# FIXME: --ignore-toolset-requirements
-# FIXME: using
-
-# FIXME push-checking-for-flags-module ....
-# FIXME: investigate existing uses of 'hack-hack' parameter
-# in jam code.
-
-@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
- ["values", "*"]))
-def flags(rule_or_module, variable_name, condition, values = []):
- """ Specifies the flags (variables) that must be set on targets under certain
- conditions, described by arguments.
- rule_or_module: If contains dot, should be a rule name.
- The flags will be applied when that rule is
- used to set up build actions.
-
- If does not contain dot, should be a module name.
- The flags will be applied for all rules in that
- module.
- If module for rule is different from the calling
- module, an error is issued.
-
- variable_name: Variable that should be set on target
-
- condition A condition when this flag should be applied.
- Should be set of property sets. If one of
- those property sets is contained in build
- properties, the flag will be used.
- Implied values are not allowed:
- "<toolset>gcc" should be used, not just
- "gcc". Subfeatures, like in "<toolset>gcc-3.2"
- are allowed. If left empty, the flag will
- always used.
-
- Propery sets may use value-less properties
- ('<a>' vs. '<a>value') to match absent
- properties. This allows to separately match
-
- <architecture>/<address-model>64
- <architecture>ia64/<address-model>
-
- Where both features are optional. Without this
- syntax we'd be forced to define "default" value.
-
- values: The value to add to variable. If <feature>
- is specified, then the value of 'feature'
- will be added.
- """
- caller = bjam.caller()[:-1]
- if not '.' in rule_or_module and caller.startswith("Jamfile"):
- # Unqualified rule name, used inside Jamfile. Most likely used with
- # 'make' or 'notfile' rules. This prevents setting flags on the entire
- # Jamfile module (this will be considered as rule), but who cares?
- # Probably, 'flags' rule should be split into 'flags' and
- # 'flags-on-module'.
- rule_or_module = qualify_jam_action(rule_or_module, caller)
- else:
- # FIXME: revive checking that we don't set flags for a different
- # module unintentionally
- pass
-
- if condition and not replace_grist (condition, ''):
- # We have condition in the form '<feature>', that is, without
- # value. That's a previous syntax:
- #
- # flags gcc.link RPATH <dll-path> ;
- # for compatibility, convert it to
- # flags gcc.link RPATH : <dll-path> ;
- values = [ condition ]
- condition = None
-
- if condition:
- transformed = []
- for c in condition:
- # FIXME: 'split' might be a too raw tool here.
- pl = [property.create_from_string(s) for s in c.split('/')]
- pl = feature.expand_subfeatures(pl);
- transformed.append(property_set.create(pl))
- condition = transformed
-
- property.validate_property_sets(condition)
-
- __add_flag (rule_or_module, variable_name, condition, values)
-
-def set_target_variables (manager, rule_or_module, targets, ps):
- """
- """
- settings = __set_target_variables_aux(manager, rule_or_module, ps)
-
- if settings:
- for s in settings:
- for target in targets:
- manager.engine ().set_target_variable (target, s [0], s[1], True)
-
-def find_satisfied_condition(conditions, ps):
- """Returns the first element of 'property-sets' which is a subset of
- 'properties', or an empty list if no such element exists."""
-
- features = set(p.feature() for p in ps.all())
-
- for condition in conditions:
-
- found_all = True
- for i in condition.all():
-
- found = False
- if i.value():
- found = i.value() in ps.get(i.feature())
- else:
- # Handle value-less properties like '<architecture>' (compare with
- # '<architecture>x86').
- # If $(i) is a value-less property it should match default
- # value of an optional property. See the first line in the
- # example below:
- #
- # property set properties result
- # <a> <b>foo <b>foo match
- # <a> <b>foo <a>foo <b>foo no match
- # <a>foo <b>foo <b>foo no match
- # <a>foo <b>foo <a>foo <b>foo match
- found = not i.feature() in features
-
- found_all = found_all and found
-
- if found_all:
- return condition
-
- return None
-
-
-def register (toolset):
- """ Registers a new toolset.
- """
- feature.extend('toolset', [toolset])
-
-def inherit_generators (toolset, properties, base, generators_to_ignore = []):
- if not properties:
- properties = [replace_grist (toolset, '<toolset>')]
-
- base_generators = generators.generators_for_toolset(base)
-
- for g in base_generators:
- id = g.id()
-
- if not id in generators_to_ignore:
- # Some generator names have multiple periods in their name, so
- # $(id:B=$(toolset)) doesn't generate the right new_id name.
- # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
- # which is not what we want. Manually parse the base and suffix
- # (if there's a better way to do this, I'd love to see it.)
- # See also register in module generators.
- (base, suffix) = split_action_id(id)
-
- new_id = toolset + '.' + suffix
-
- generators.register(g.clone(new_id, properties))
-
-def inherit_flags(toolset, base, prohibited_properties = []):
- """Brings all flag definitions from the 'base' toolset into the 'toolset'
- toolset. Flag definitions whose conditions make use of properties in
- 'prohibited-properties' are ignored. Don't confuse property and feature, for
- example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
- not block the other one.
-
- The flag conditions are not altered at all, so if a condition includes a name,
- or version of a base toolset, it won't ever match the inheriting toolset. When
- such flag settings must be inherited, define a rule in base toolset module and
- call it as needed."""
- for f in __module_flags.get(base, []):
-
- if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
- match = __re_first_group.match(f.rule)
- rule_ = None
- if match:
- rule_ = match.group(1)
-
- new_rule_or_module = ''
-
- if rule_:
- new_rule_or_module = toolset + '.' + rule_
- else:
- new_rule_or_module = toolset
-
- __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
-
-def inherit_rules (toolset, base):
- pass
- # FIXME: do something about this.
-# base_generators = generators.generators_for_toolset (base)
-
-# import action
-
-# ids = []
-# for g in base_generators:
-# (old_toolset, id) = split_action_id (g.id ())
-# ids.append (id) ;
-
-# new_actions = []
-
-# engine = get_manager().engine()
- # FIXME: do this!
-# for action in engine.action.values():
-# pass
-# (old_toolset, id) = split_action_id(action.action_name)
-#
-# if old_toolset == base:
-# new_actions.append ((id, value [0], value [1]))
-#
-# for a in new_actions:
-# action.register (toolset + '.' + a [0], a [1], a [2])
-
- # TODO: how to deal with this?
-# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ;
-# # Import the rules to the global scope
-# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
-# }
-#
-
-######################################################################################
-# Private functions
-
-@cached
-def __set_target_variables_aux (manager, rule_or_module, ps):
- """ Given a rule name and a property set, returns a list of tuples of
- variables names and values, which must be set on targets for that
- rule/properties combination.
- """
- result = []
-
- for f in __flags.get(rule_or_module, []):
-
- if not f.condition or find_satisfied_condition (f.condition, ps):
- processed = []
- for v in f.values:
- # The value might be <feature-name> so needs special
- # treatment.
- processed += __handle_flag_value (manager, v, ps)
-
- for r in processed:
- result.append ((f.variable_name, r))
-
- # strip away last dot separated part and recurse.
- next = __re_split_last_segment.match(rule_or_module)
-
- if next:
- result.extend(__set_target_variables_aux(
- manager, next.group(1), ps))
-
- return result
-
-def __handle_flag_value (manager, value, ps):
- result = []
-
- if get_grist (value):
- f = feature.get(value)
- values = ps.get(f)
-
- for value in values:
-
- if f.dependency():
- # the value of a dependency feature is a target
- # and must be actualized
- result.append(value.actualize())
-
- elif f.path() or f.free():
-
- # Treat features with && in the value
- # specially -- each &&-separated element is considered
- # separate value. This is needed to handle searched
- # libraries, which must be in specific order.
- if not __re_two_ampersands.search(value):
- result.append(value)
-
- else:
- result.extend(value.split ('&&'))
- else:
- result.append (ungristed)
- else:
- result.append (value)
-
- return result
-
-def __add_flag (rule_or_module, variable_name, condition, values):
- """ Adds a new flag setting with the specified values.
- Does no checking.
- """
- f = Flag(variable_name, values, condition, rule_or_module)
-
- # Grab the name of the module
- m = __re_first_segment.match (rule_or_module)
- assert m
- module = m.group(1)
-
- __module_flags.setdefault(m, []).append(f)
- __flags.setdefault(rule_or_module, []).append(f)
-
-__requirements = []
-
-def requirements():
- """Return the list of global 'toolset requirements'.
- Those requirements will be automatically added to the requirements of any main target."""
- return __requirements
-
-def add_requirements(requirements):
- """Adds elements to the list of global 'toolset requirements'. The requirements
- will be automatically added to the requirements for all main targets, as if
- they were specified literally. For best results, all requirements added should
- be conditional or indirect conditional."""
-
- #if ! $(.ignore-requirements)
- #{
- print "XXXX", requirements
- __requirements.extend(requirements)
- #}
-
-# Make toolset 'toolset', defined in a module of the same name,
-# inherit from 'base'
-# 1. The 'init' rule from 'base' is imported into 'toolset' with full
-# name. Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and
-# <toolset> property in requires is adjusted too
-# 3. All flags are inherited
-# 4. All rules are imported.
-def inherit(toolset, base):
- get_manager().projects().load_module(base, []);
-
- inherit_generators(toolset, [], base)
- inherit_flags(toolset, base)
- inherit_rules(toolset, base)
diff --git a/jam-files/boost-build/build/type.jam b/jam-files/boost-build/build/type.jam
deleted file mode 100644
index 1a7a5782..00000000
--- a/jam-files/boost-build/build/type.jam
+++ /dev/null
@@ -1,425 +0,0 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Deals with target type declaration and defines target class which supports
-# typed targets.
-
-import "class" : new ;
-import errors ;
-import feature ;
-import generators : * ;
-import project ;
-import property ;
-import scanner ;
-import os ;
-
-# The following import would create a circular dependency:
-# project -> project-root -> builtin -> type -> targets -> project
-# import targets ;
-
-# The feature is optional so it would never get added implicitly. It is used
-# only for internal purposes and in all cases we want to use it explicitly.
-feature.feature target-type : : composite optional ;
-
-feature.feature main-target-type : : optional incidental ;
-feature.feature base-target-type : : composite optional free ;
-
-
-# Registers a target type, possible derived from a 'base-type'. Providing a list
-# of 'suffixes' here is a shortcut for separately calling the register-suffixes
-# rule with the given suffixes and the set-generated-target-suffix rule with the
-# first given suffix.
-#
-rule register ( type : suffixes * : base-type ? )
-{
- # Type names cannot contain hyphens, because when used as feature-values
- # they would be interpreted as composite features which need to be
- # decomposed.
- switch $(type)
- {
- case *-* : errors.error "type name \"$(type)\" contains a hyphen" ;
- }
-
- if $(type) in $(.types)
- {
- errors.error "Type $(type) is already registered." ;
- }
- else
- {
- .types += $(type) ;
- .base.$(type) = $(base-type) ;
- .derived.$(base-type) += $(type) ;
-
- if $(suffixes)-is-not-empty
- {
- # Specify mapping from suffixes to type.
- register-suffixes $(suffixes) : $(type) ;
- # By default generated targets of 'type' will use the first of
- #'suffixes'. This may be overriden.
- set-generated-target-suffix $(type) : : $(suffixes[1]) ;
- }
-
- feature.extend target-type : $(type) ;
- feature.extend main-target-type : $(type) ;
- feature.extend base-target-type : $(type) ;
-
- feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
- feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
-
- # We used to declare the main target rule only when a 'main' parameter
- # has been specified. However, it is hard to decide that a type will
- # *never* need a main target rule and so from time to time we needed to
- # make yet another type 'main'. So now a main target rule is defined for
- # each type.
- main-rule-name = [ type-to-rule-name $(type) ] ;
- .main-target-type.$(main-rule-name) = $(type) ;
- IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
-
- # Adding a new derived type affects generator selection so we need to
- # make the generator selection module update any of its cached
- # information related to a new derived type being defined.
- generators.update-cached-information-with-a-new-type $(type) ;
- }
-}
-
-
-# Given a type, returns the name of the main target rule which creates targets
-# of that type.
-#
-rule type-to-rule-name ( type )
-{
- # Lowercase everything. Convert underscores to dashes.
- import regex ;
- local n = [ regex.split $(type:L) "_" ] ;
- return $(n:J=-) ;
-}
-
-
-# Given a main target rule name, returns the type for which it creates targets.
-#
-rule type-from-rule-name ( rule-name )
-{
- return $(.main-target-type.$(rule-name)) ;
-}
-
-
-# Specifies that files with suffix from 'suffixes' be recognized as targets of
-# type 'type'. Issues an error if a different type is already specified for any
-# of the suffixes.
-#
-rule register-suffixes ( suffixes + : type )
-{
- for local s in $(suffixes)
- {
- if ! $(.type.$(s))
- {
- .type.$(s) = $(type) ;
- }
- else if $(.type.$(s)) != $(type)
- {
- errors.error Attempting to specify multiple types for suffix
- \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
- }
- }
-}
-
-
-# Returns true iff type has been registered.
-#
-rule registered ( type )
-{
- if $(type) in $(.types)
- {
- return true ;
- }
-}
-
-
-# Issues an error if 'type' is unknown.
-#
-rule validate ( type )
-{
- if ! [ registered $(type) ]
- {
- errors.error "Unknown target type $(type)" ;
- }
-}
-
-
-# Sets a scanner class that will be used for this 'type'.
-#
-rule set-scanner ( type : scanner )
-{
- validate $(type) ;
- .scanner.$(type) = $(scanner) ;
-}
-
-
-# Returns a scanner instance appropriate to 'type' and 'properties'.
-#
-rule get-scanner ( type : property-set )
-{
- if $(.scanner.$(type))
- {
- return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
- }
-}
-
-
-# Returns a base type for the given type or nothing in case the given type is
-# not derived.
-#
-rule base ( type )
-{
- return $(.base.$(type)) ;
-}
-
-
-# Returns the given type and all of its base types in order of their distance
-# from type.
-#
-rule all-bases ( type )
-{
- local result = $(type) ;
- while $(type)
- {
- type = [ base $(type) ] ;
- result += $(type) ;
- }
- return $(result) ;
-}
-
-
-# Returns the given type and all of its derived types in order of their distance
-# from type.
-#
-rule all-derived ( type )
-{
- local result = $(type) ;
- for local d in $(.derived.$(type))
- {
- result += [ all-derived $(d) ] ;
- }
- return $(result) ;
-}
-
-
-# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
-# indirect base.
-#
-rule is-derived ( type base )
-{
- if $(base) in [ all-bases $(type) ]
- {
- return true ;
- }
-}
-
-# Returns true if 'type' is either derived from or is equal to 'base'.
-#
-# TODO: It might be that is-derived and is-subtype were meant to be different
-# rules - one returning true for type = base and one not, but as currently
-# implemented they are actually the same. Clean this up.
-#
-rule is-subtype ( type base )
-{
- return [ is-derived $(type) $(base) ] ;
-}
-
-
-# Store suffixes for generated targets.
-.suffixes = [ new property-map ] ;
-
-# Store prefixes for generated targets (e.g. "lib" for library).
-.prefixes = [ new property-map ] ;
-
-
-# Sets a file suffix to be used when generating a target of 'type' with the
-# specified properties. Can be called with no properties if no suffix has
-# already been specified for the 'type'. The 'suffix' parameter can be an empty
-# string ("") to indicate that no suffix should be used.
-#
-# Note that this does not cause files with 'suffix' to be automatically
-# recognized as being of 'type'. Two different types can use the same suffix for
-# their generated files but only one type can be auto-detected for a file with
-# that suffix. User should explicitly specify which one using the
-# register-suffixes rule.
-#
-rule set-generated-target-suffix ( type : properties * : suffix )
-{
- set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
-}
-
-
-# Change the suffix previously registered for this type/properties combination.
-# If suffix is not yet specified, sets it.
-#
-rule change-generated-target-suffix ( type : properties * : suffix )
-{
- change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
-}
-
-
-# Returns the suffix used when generating a file of 'type' with the given
-# properties.
-#
-rule generated-target-suffix ( type : property-set )
-{
- return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
-}
-
-
-# Sets a target prefix that should be used when generating targets of 'type'
-# with the specified properties. Can be called with empty properties if no
-# prefix for 'type' has been specified yet.
-#
-# The 'prefix' parameter can be empty string ("") to indicate that no prefix
-# should be used.
-#
-# Usage example: library names use the "lib" prefix on unix.
-#
-rule set-generated-target-prefix ( type : properties * : prefix )
-{
- set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
-}
-
-
-# Change the prefix previously registered for this type/properties combination.
-# If prefix is not yet specified, sets it.
-#
-rule change-generated-target-prefix ( type : properties * : prefix )
-{
- change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
-}
-
-
-rule generated-target-prefix ( type : property-set )
-{
- return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
-}
-
-
-# Common rules for prefix/suffix provisioning follow.
-
-local rule set-generated-target-ps ( ps : type : properties * : psval )
-{
- properties = <target-type>$(type) $(properties) ;
- $(.$(ps)es).insert $(properties) : $(psval) ;
-}
-
-
-local rule change-generated-target-ps ( ps : type : properties * : psval )
-{
- properties = <target-type>$(type) $(properties) ;
- local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ;
- if ! $(prev)
- {
- set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
- }
-}
-
-
-# Returns either prefix or suffix (as indicated by 'ps') that should be used
-# when generating a target of 'type' with the specified properties. Parameter
-# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
-# 'type', returns prefix/suffix for base type, if any.
-#
-local rule generated-target-ps-real ( ps : type : properties * )
-{
- local result ;
- local found ;
- while $(type) && ! $(found)
- {
- result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ;
- # If the prefix/suffix is explicitly set to an empty string, we consider
- # prefix/suffix to be found. If we were not to compare with "", there
- # would be no way to specify an empty prefix/suffix.
- if $(result)-is-not-empty
- {
- found = true ;
- }
- type = $(.base.$(type)) ;
- }
- if $(result) = ""
- {
- result = ;
- }
- return $(result) ;
-}
-
-
-local rule generated-target-ps ( ps : type : property-set )
-{
- local key = .$(ps).$(type).$(property-set) ;
- local v = $($(key)) ;
- if ! $(v)
- {
- v = [ generated-target-ps-real $(ps) : $(type) : [ $(property-set).raw ]
- ] ;
- if ! $(v)
- {
- v = none ;
- }
- $(key) = $(v) ;
- }
-
- if $(v) != none
- {
- return $(v) ;
- }
-}
-
-
-# Returns file type given its name. If there are several dots in filename, tries
-# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
-# be tried.
-#
-rule type ( filename )
-{
- if [ os.name ] in NT CYGWIN
- {
- filename = $(filename:L) ;
- }
- local type ;
- while ! $(type) && $(filename:S)
- {
- local suffix = $(filename:S) ;
- type = $(.type$(suffix)) ;
- filename = $(filename:S=) ;
- }
- return $(type) ;
-}
-
-
-# Rule used to construct all main targets. Note that this rule gets imported
-# into the global namespace under different alias names and the exact target
-# type to construct is selected based on the alias used to actually invoke this
-# rule.
-#
-rule main-target-rule ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- # First discover the required target type based on the exact alias used to
- # invoke this rule.
- local bt = [ BACKTRACE 1 ] ;
- local rulename = $(bt[4]) ;
- local target-type = [ type-from-rule-name $(rulename) ] ;
-
- # This is a circular module dependency and so must be imported here.
- import targets ;
-
- return [ targets.create-typed-target $(target-type) : [ project.current ] :
- $(name) : $(sources) : $(requirements) : $(default-build) :
- $(usage-requirements) ] ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- # TODO: Add tests for all the is-derived, is-base & related type relation
- # checking rules.
-}
diff --git a/jam-files/boost-build/build/type.py b/jam-files/boost-build/build/type.py
deleted file mode 100644
index ddb7ba09..00000000
--- a/jam-files/boost-build/build/type.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# Status: ported.
-# Base revision: 45462.
-
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-
-
-import re
-import os
-import os.path
-from b2.util.utility import replace_grist, os_name
-from b2.exceptions import *
-from b2.build import feature, property, scanner
-from b2.util import bjam_signature
-
-
-__re_hyphen = re.compile ('-')
-
-def __register_features ():
- """ Register features need by this module.
- """
- # The feature is optional so that it is never implicitly added.
- # It's used only for internal purposes, and in all cases we
- # want to explicitly use it.
- feature.feature ('target-type', [], ['composite', 'optional'])
- feature.feature ('main-target-type', [], ['optional', 'incidental'])
- feature.feature ('base-target-type', [], ['composite', 'optional', 'free'])
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- Note that this must be called _after_ resetting the module 'feature'.
- """
- global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache
-
- __register_features ()
-
- # Stores suffixes for generated targets.
- __prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()]
-
- # Maps suffixes to types
- __suffixes_to_types = {}
-
- # A map with all the registered types, indexed by the type name
- # Each entry is a dictionary with following values:
- # 'base': the name of base type or None if type has no base
- # 'derived': a list of names of type which derive from this one
- # 'scanner': the scanner class registered for this type, if any
- __types = {}
-
- # Caches suffixes for targets with certain properties.
- __target_suffixes_cache = {}
-
-reset ()
-
-@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"]))
-def register (type, suffixes = [], base_type = None):
- """ Registers a target type, possibly derived from a 'base-type'.
- If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
- Also, the first element gives the suffix to be used when constructing and object of
- 'type'.
- type: a string
- suffixes: None or a sequence of strings
- base_type: None or a string
- """
- # Type names cannot contain hyphens, because when used as
- # feature-values they will be interpreted as composite features
- # which need to be decomposed.
- if __re_hyphen.search (type):
- raise BaseException ('type name "%s" contains a hyphen' % type)
-
- if __types.has_key (type):
- raise BaseException ('Type "%s" is already registered.' % type)
-
- entry = {}
- entry ['base'] = base_type
- entry ['derived'] = []
- entry ['scanner'] = None
- __types [type] = entry
-
- if base_type:
- __types [base_type]['derived'].append (type)
-
- if len (suffixes) > 0:
- # Generated targets of 'type' will use the first of 'suffixes'
- # (this may be overriden)
- set_generated_target_suffix (type, [], suffixes [0])
-
- # Specify mapping from suffixes to type
- register_suffixes (suffixes, type)
-
- feature.extend('target-type', [type])
- feature.extend('main-target-type', [type])
- feature.extend('base-target-type', [type])
-
- if base_type:
- feature.compose ('<target-type>' + type, replace_grist (base_type, '<base-target-type>'))
- feature.compose ('<base-target-type>' + type, '<base-target-type>' + base_type)
-
- import b2.build.generators as generators
- # Adding a new derived type affects generator selection so we need to
- # make the generator selection module update any of its cached
- # information related to a new derived type being defined.
- generators.update_cached_information_with_a_new_type(type)
-
- # FIXME: resolving recursive dependency.
- from b2.manager import get_manager
- get_manager().projects().project_rules().add_rule_for_type(type)
-
-# FIXME: quick hack.
-def type_from_rule_name(rule_name):
- return rule_name.upper().replace("-", "_")
-
-
-def register_suffixes (suffixes, type):
- """ Specifies that targets with suffix from 'suffixes' have the type 'type'.
- If a different type is already specified for any of syffixes, issues an error.
- """
- for s in suffixes:
- if __suffixes_to_types.has_key (s):
- old_type = __suffixes_to_types [s]
- if old_type != type:
- raise BaseException ('Attempting to specify type for suffix "%s"\nOld type: "%s", New type "%s"' % (s, old_type, type))
- else:
- __suffixes_to_types [s] = type
-
-def registered (type):
- """ Returns true iff type has been registered.
- """
- return __types.has_key (type)
-
-def validate (type):
- """ Issues an error if 'type' is unknown.
- """
- if not registered (type):
- raise BaseException ("Unknown target type '%s'" % type)
-
-def set_scanner (type, scanner):
- """ Sets a scanner class that will be used for this 'type'.
- """
- validate (type)
- __types [type]['scanner'] = scanner
-
-def get_scanner (type, prop_set):
- """ Returns a scanner instance appropriate to 'type' and 'property_set'.
- """
- if registered (type):
- scanner_type = __types [type]['scanner']
- if scanner_type:
- return scanner.get (scanner_type, prop_set.raw ())
- pass
-
- return None
-
-def base(type):
- """Returns a base type for the given type or nothing in case the given type is
- not derived."""
-
- return __types[type]['base']
-
-def all_bases (type):
- """ Returns type and all of its bases, in the order of their distance from type.
- """
- result = []
- while type:
- result.append (type)
- type = __types [type]['base']
-
- return result
-
-def all_derived (type):
- """ Returns type and all classes that derive from it, in the order of their distance from type.
- """
- result = [type]
- for d in __types [type]['derived']:
- result.extend (all_derived (d))
-
- return result
-
-def is_derived (type, base):
- """ Returns true if 'type' is 'base' or has 'base' as its direct or indirect base.
- """
- # TODO: this isn't very efficient, especially for bases close to type
- if base in all_bases (type):
- return True
- else:
- return False
-
-def is_subtype (type, base):
- """ Same as is_derived. Should be removed.
- """
- # TODO: remove this method
- return is_derived (type, base)
-
-@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
-def set_generated_target_suffix (type, properties, suffix):
- """ Sets a target suffix that should be used when generating target
- of 'type' with the specified properties. Can be called with
- empty properties if no suffix for 'type' was specified yet.
- This does not automatically specify that files 'suffix' have
- 'type' --- two different types can use the same suffix for
- generating, but only one type should be auto-detected for
- a file with that suffix. User should explicitly specify which
- one.
-
- The 'suffix' parameter can be empty string ("") to indicate that
- no suffix should be used.
- """
- set_generated_target_ps(1, type, properties, suffix)
-
-
-
-def change_generated_target_suffix (type, properties, suffix):
- """ Change the suffix previously registered for this type/properties
- combination. If suffix is not yet specified, sets it.
- """
- change_generated_target_ps(1, type, properties, suffix)
-
-def generated_target_suffix(type, properties):
- return generated_target_ps(1, type, properties)
-
-# Sets a target prefix that should be used when generating targets of 'type'
-# with the specified properties. Can be called with empty properties if no
-# prefix for 'type' has been specified yet.
-#
-# The 'prefix' parameter can be empty string ("") to indicate that no prefix
-# should be used.
-#
-# Usage example: library names use the "lib" prefix on unix.
-@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
-def set_generated_target_prefix(type, properties, prefix):
- set_generated_target_ps(0, type, properties, prefix)
-
-# Change the prefix previously registered for this type/properties combination.
-# If prefix is not yet specified, sets it.
-def change_generated_target_prefix(type, properties, prefix):
- change_generated_target_ps(0, type, properties, prefix)
-
-def generated_target_prefix(type, properties):
- return generated_target_ps(0, type, properties)
-
-def set_generated_target_ps(is_suffix, type, properties, val):
- properties.append ('<target-type>' + type)
- __prefixes_suffixes[is_suffix].insert (properties, val)
-
-def change_generated_target_ps(is_suffix, type, properties, val):
- properties.append ('<target-type>' + type)
- prev = __prefixes_suffixes[is_suffix].find_replace(properties, val)
- if not prev:
- set_generated_target_ps(is_suffix, type, properties, val)
-
-# Returns either prefix or suffix (as indicated by 'is_suffix') that should be used
-# when generating a target of 'type' with the specified properties.
-# If no prefix/suffix is specified for 'type', returns prefix/suffix for
-# base type, if any.
-def generated_target_ps_real(is_suffix, type, properties):
-
- result = ''
- found = False
- while type and not found:
- result = __prefixes_suffixes[is_suffix].find (['<target-type>' + type] + properties)
-
- # Note that if the string is empty (""), but not null, we consider
- # suffix found. Setting prefix or suffix to empty string is fine.
- if result is not None:
- found = True
-
- type = __types [type]['base']
-
- if not result:
- result = ''
- return result
-
-def generated_target_ps(is_suffix, type, prop_set):
- """ Returns suffix that should be used when generating target of 'type',
- with the specified properties. If not suffix were specified for
- 'type', returns suffix for base type, if any.
- """
- key = (is_suffix, type, prop_set)
- v = __target_suffixes_cache.get(key, None)
-
- if not v:
- v = generated_target_ps_real(is_suffix, type, prop_set.raw())
- __target_suffixes_cache [key] = v
-
- return v
-
-def type(filename):
- """ Returns file type given it's name. If there are several dots in filename,
- tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
- "so" will be tried.
- """
- while 1:
- filename, suffix = os.path.splitext (filename)
- if not suffix: return None
- suffix = suffix[1:]
-
- if __suffixes_to_types.has_key(suffix):
- return __suffixes_to_types[suffix]
-
-# NOTE: moved from tools/types/register
-def register_type (type, suffixes, base_type = None, os = []):
- """ Register the given type on the specified OSes, or on remaining OSes
- if os is not specified. This rule is injected into each of the type
- modules for the sake of convenience.
- """
- if registered (type):
- return
-
- if not os or os_name () in os:
- register (type, suffixes, base_type)
diff --git a/jam-files/boost-build/build/version.jam b/jam-files/boost-build/build/version.jam
deleted file mode 100644
index 7626ddda..00000000
--- a/jam-files/boost-build/build/version.jam
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-import numbers ;
-
-major = "2011" ;
-minor = "04" ;
-
-rule boost-build ( )
-{
- return "$(major).$(minor)-svn" ;
-}
-
-rule print ( )
-{
- if [ verify-engine-version ]
- {
- ECHO "Boost.Build" [ boost-build ] ;
- }
-}
-
-rule verify-engine-version ( )
-{
- local v = [ modules.peek : JAM_VERSION ] ;
-
- if $(v[1]) != $(major) || $(v[2]) != $(minor)
- {
- local argv = [ modules.peek : ARGV ] ;
- local e = $(argv[1]) ;
- local l = [ modules.binding version ] ;
- l = $(l:D) ;
- l = $(l:D) ;
- ECHO "warning: mismatched versions of Boost.Build engine and core" ;
- ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ;
- ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ;
- }
- else
- {
- return true ;
- }
-}
-
-
-
-# Utility rule for testing whether all elements in a sequence are equal to 0.
-#
-local rule is-all-zeroes ( sequence * )
-{
- local result = "true" ;
- for local e in $(sequence)
- {
- if $(e) != "0"
- {
- result = "" ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns "true" if the first version is less than the second one.
-#
-rule version-less ( lhs + : rhs + )
-{
- numbers.check $(lhs) ;
- numbers.check $(rhs) ;
-
- local done ;
- local result ;
-
- while ! $(done) && $(lhs) && $(rhs)
- {
- if [ numbers.less $(lhs[1]) $(rhs[1]) ]
- {
- done = "true" ;
- result = "true" ;
- }
- else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
- {
- done = "true" ;
- }
- else
- {
- lhs = $(lhs[2-]) ;
- rhs = $(rhs[2-]) ;
- }
- }
- if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
- {
- result = "true" ;
- }
-
- return $(result) ;
-}
-
-
-# Returns "true" if the current JAM version version is at least the given
-# version.
-#
-rule check-jam-version ( version + )
-{
- local version-tag = $(version:J=.) ;
- if ! $(version-tag)
- {
- errors.error Invalid version specifier: : $(version:E="(undefined)") ;
- }
-
- if ! $(.jam-version-check.$(version-tag))-is-not-empty
- {
- local jam-version = [ modules.peek : JAM_VERSION ] ;
- if ! $(jam-version)
- {
- errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
- "installation is most likely terribly outdated." ;
- }
- .jam-version-check.$(version-tag) = "true" ;
- if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
- {
- .jam-version-check.$(version-tag) = "" ;
- }
- }
- return $(.jam-version-check.$(version-tag)) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- local jam-version = [ modules.peek : JAM_VERSION ] ;
- local future-version = $(jam-version) ;
- future-version += "1" ;
-
- assert.true check-jam-version $(jam-version) ;
- assert.false check-jam-version $(future-version) ;
-
- assert.true version-less 0 : 1 ;
- assert.false version-less 0 : 0 ;
- assert.true version-less 1 : 2 ;
- assert.false version-less 1 : 1 ;
- assert.false version-less 2 : 1 ;
- assert.true version-less 3 1 20 : 3 4 10 ;
- assert.false version-less 3 1 10 : 3 1 10 ;
- assert.false version-less 3 4 10 : 3 1 20 ;
- assert.true version-less 3 1 20 5 1 : 3 4 10 ;
- assert.false version-less 3 1 10 5 1 : 3 1 10 ;
- assert.false version-less 3 4 10 5 1 : 3 1 20 ;
- assert.true version-less 3 1 20 : 3 4 10 5 1 ;
- assert.true version-less 3 1 10 : 3 1 10 5 1 ;
- assert.false version-less 3 4 10 : 3 1 20 5 1 ;
- assert.false version-less 3 1 10 : 3 1 10 0 0 ;
- assert.false version-less 3 1 10 0 0 : 3 1 10 ;
- assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
- assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
- assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
-
- # TODO: Add tests for invalid input data being sent to version-less.
-}
diff --git a/jam-files/boost-build/build/virtual-target.jam b/jam-files/boost-build/build/virtual-target.jam
deleted file mode 100644
index 2e8446bc..00000000
--- a/jam-files/boost-build/build/virtual-target.jam
+++ /dev/null
@@ -1,1317 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements virtual targets, which correspond to actual files created during a
-# build, but are not yet targets in Jam sense. They are needed, for example,
-# when searching for possible transformation sequences, when it is not yet known
-# whether a particular target should be created at all.
-
-import "class" : new ;
-import errors ;
-import path ;
-import sequence ;
-import set ;
-import type ;
-import utility ;
-
-
-# +--------------------------+
-# | virtual-target |
-# +==========================+
-# | actualize |
-# +--------------------------+
-# | actualize-action() = 0 |
-# | actualize-location() = 0 |
-# +----------------+---------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# +---------------------+ +-------+--------------+
-# | action | | abstract-file-target |
-# +=====================| * +======================+
-# | action-name | +--+ action |
-# | properties | | +----------------------+
-# +---------------------+--+ | actualize-action() |
-# | actualize() |0..1 +-----------+----------+
-# | path() | |
-# | adjust-properties() | sources |
-# | actualize-sources() | targets |
-# +------+--------------+ ^
-# | / \
-# ^ +-+-+
-# / \ |
-# +-+-+ +-------------+-------------+
-# | | |
-# | +------+---------------+ +--------+-------------+
-# | | file-target | | searched-lib-target |
-# | +======================+ +======================+
-# | | actualize-location() | | actualize-location() |
-# | +----------------------+ +----------------------+
-# |
-# +-+------------------------------+
-# | |
-# +----+----------------+ +---------+-----------+
-# | compile-action | | link-action |
-# +=====================+ +=====================+
-# | adjust-properties() | | adjust-properties() |
-# +---------------------+ | actualize-sources() |
-# +---------------------+
-#
-# The 'compile-action' and 'link-action' classes are not defined here but in
-# builtin.jam modules. They are shown in the diagram to give the big picture.
-
-
-# Models a potential target. It can be converted into a Jam target and used in
-# building, if needed. However, it can be also dropped, which allows us to
-# search for different transformations and select only one.
-#
-class virtual-target
-{
- import scanner ;
- import sequence ;
- import utility ;
- import virtual-target ;
-
- rule __init__ (
- name # Target/project name.
- : project # Project to which this target belongs.
- )
- {
- self.name = $(name) ;
- self.project = $(project) ;
- self.dependencies = ;
- }
-
- # Name of this target.
- #
- rule name ( )
- {
- return $(self.name) ;
- }
-
- # Project of this target.
- #
- rule project ( )
- {
- return $(self.project) ;
- }
-
- # Adds additional 'virtual-target' instances this one depends on.
- #
- rule depends ( d + )
- {
- self.dependencies = [ sequence.merge $(self.dependencies) :
- [ sequence.insertion-sort $(d) ] ] ;
- }
-
- rule dependencies ( )
- {
- return $(self.dependencies) ;
- }
-
- rule always ( )
- {
- .always = 1 ;
- }
-
- # Generates all the actual targets and sets up build actions for this
- # target.
- #
- # If 'scanner' is specified, creates an additional target with the same
- # location as the actual target, which will depend on the actual target and
- # be associated with a 'scanner'. That additional target is returned. See
- # the docs (#dependency_scanning) for rationale. Target must correspond to a
- # file if 'scanner' is specified.
- #
- # If scanner is not specified then the actual target is returned.
- #
- rule actualize ( scanner ? )
- {
- local actual-name = [ actualize-no-scanner ] ;
-
- if $(.always)
- {
- ALWAYS $(actual-name) ;
- }
-
- if ! $(scanner)
- {
- return $(actual-name) ;
- }
- else
- {
- # Add the scanner instance to the grist for name.
- local g = [ sequence.join
- [ utility.ungrist $(actual-name:G) ] $(scanner) : - ] ;
- local name = $(actual-name:G=$(g)) ;
-
- if ! $(self.made.$(name))
- {
- self.made.$(name) = true ;
-
- DEPENDS $(name) : $(actual-name) ;
-
- actualize-location $(name) ;
-
- scanner.install $(scanner) : $(name) $(__name__) ;
- }
- return $(name) ;
- }
- }
-
-# private: (overridables)
-
- # Sets up build actions for 'target'. Should call appropriate rules and set
- # target variables.
- #
- rule actualize-action ( target )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- # Sets up variables on 'target' which specify its location.
- #
- rule actualize-location ( target )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- # If the target is a generated one, returns the path where it will be
- # generated. Otherwise, returns an empty list.
- #
- rule path ( )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- # Returns the actual target name to be used in case when no scanner is
- # involved.
- #
- rule actual-name ( )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
-# implementation
- rule actualize-no-scanner ( )
- {
- # In fact, we just need to merge virtual-target with
- # abstract-file-target as the latter is the only class derived from the
- # former. But that has been left for later.
-
- errors.error "method should be defined in derived classes" ;
- }
-}
-
-
-# Target corresponding to a file. The exact mapping for file is not yet
-# specified in this class. (TODO: Actually, the class name could be better...)
-#
-# May be a source file (when no action is specified) or a derived file
-# (otherwise).
-#
-# The target's grist is a concatenation of its project's location, action
-# properties (for derived targets) and, optionally, value identifying the main
-# target.
-#
-class abstract-file-target : virtual-target
-{
- import project ;
- import regex ;
- import sequence ;
- import path ;
- import type ;
- import property-set ;
- import indirect ;
-
- rule __init__ (
- name # Target's name.
- exact ? # If non-empty, the name is exactly the name created file
- # should have. Otherwise, the '__init__' method will add a
- # suffix obtained from 'type' by calling
- # 'type.generated-target-suffix'.
- : type ? # Target's type.
- : project
- : action ?
- )
- {
- virtual-target.__init__ $(name) : $(project) ;
-
- self.type = $(type) ;
- self.action = $(action) ;
- if $(action)
- {
- $(action).add-targets $(__name__) ;
-
- if $(self.type) && ! $(exact)
- {
- _adjust-name $(name) ;
- }
- }
- }
-
- rule type ( )
- {
- return $(self.type) ;
- }
-
- # Sets the path. When generating target name, it will override any path
- # computation from properties.
- #
- rule set-path ( path )
- {
- self.path = [ path.native $(path) ] ;
- }
-
- # Returns the currently set action.
- #
- rule action ( )
- {
- return $(self.action) ;
- }
-
- # Sets/gets the 'root' flag. Target is root if it directly corresponds to
- # some variant of a main target.
- #
- rule root ( set ? )
- {
- if $(set)
- {
- self.root = true ;
- }
- return $(self.root) ;
- }
-
- # Gets or sets the subvariant which created this target. Subvariant is set
- # when target is brought into existance and is never changed after that. In
- # particular, if a target is shared by a subvariant, only the first is
- # stored.
- #
- rule creating-subvariant ( s ? # If specified, specifies the value to set,
- # which should be a 'subvariant' class
- # instance.
- )
- {
- if $(s) && ! $(self.creating-subvariant)
- {
- self.creating-subvariant = $(s) ;
- }
- return $(self.creating-subvariant) ;
- }
-
- rule actualize-action ( target )
- {
- if $(self.action)
- {
- $(self.action).actualize ;
- }
- }
-
- # Return a human-readable representation of this target. If this target has
- # an action, that is:
- #
- # { <action-name>-<self.name>.<self.type> <action-sources>... }
- #
- # otherwise, it is:
- #
- # { <self.name>.<self.type> }
- #
- rule str ( )
- {
- local action = [ action ] ;
- local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
-
- if $(action)
- {
- local sources = [ $(action).sources ] ;
- local action-name = [ $(action).action-name ] ;
-
- local ss ;
- for local s in $(sources)
- {
- ss += [ $(s).str ] ;
- }
-
- return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
- }
- else
- {
- return "{" $(name-dot-type) "}" ;
- }
- }
-
- rule less ( a )
- {
- if [ str ] < [ $(a).str ]
- {
- return true ;
- }
- }
-
- rule equal ( a )
- {
- if [ str ] = [ $(a).str ]
- {
- return true ;
- }
- }
-
-# private:
- rule actual-name ( )
- {
- if ! $(self.actual-name)
- {
- local grist = [ grist ] ;
- local basename = [ path.native $(self.name) ] ;
- self.actual-name = <$(grist)>$(basename) ;
- }
- return $(self.actual-name) ;
- }
-
- # Helper to 'actual-name', above. Computes a unique prefix used to
- # distinguish this target from other targets with the same name creating
- # different files.
- #
- rule grist ( )
- {
- # Depending on target, there may be different approaches to generating
- # unique prefixes. We generate prefixes in the form:
- # <one letter approach code> <the actual prefix>
- local path = [ path ] ;
- if $(path)
- {
- # The target will be generated to a known path. Just use the path
- # for identification, since path is as unique as it can get.
- return p$(path) ;
- }
- else
- {
- # File is either source, which will be searched for, or is not a
- # file at all. Use the location of project for distinguishing.
- local project-location = [ $(self.project).get location ] ;
- local location-grist = [ sequence.join [ regex.split
- $(project-location) "/" ] : "!" ] ;
-
- if $(self.action)
- {
- local ps = [ $(self.action).properties ] ;
- local property-grist = [ $(ps).as-path ] ;
- # 'property-grist' can be empty when 'ps' is an empty property
- # set.
- if $(property-grist)
- {
- location-grist = $(location-grist)/$(property-grist) ;
- }
- }
-
- return l$(location-grist) ;
- }
- }
-
- # Given the target name specified in constructor, returns the name which
- # should be really used, by looking at the <tag> properties. Tag properties
- # need to be specified as <tag>@rule-name. This makes Boost Build call the
- # specified rule with the target name, type and properties to get the new
- # name. If no <tag> property is specified or the rule specified by <tag>
- # returns nothing, returns the result of calling
- # virtual-target.add-prefix-and-suffix.
- #
- rule _adjust-name ( specified-name )
- {
- local ps ;
- if $(self.action)
- {
- ps = [ $(self.action).properties ] ;
- }
- else
- {
- ps = [ property-set.empty ] ;
- }
-
- # We add ourselves to the properties so that any tag rule can get more
- # direct information about the target than just that available through
- # the properties. This is useful in implementing name changes based on
- # the sources of the target. For example to make unique names of object
- # files based on the source file. --grafik
- ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
-
- local tag = [ $(ps).get <tag> ] ;
-
- if $(tag)
- {
- local rule-name = [ MATCH ^@(.*) : $(tag) ] ;
- if $(rule-name)
- {
- if $(tag[2])
- {
- errors.error "<tag>@rulename is present but is not the only"
- "<tag> feature" ;
- }
-
- self.name = [ indirect.call $(rule-name) $(specified-name)
- : $(self.type) : $(ps) ] ;
- }
- else
- {
- errors.error
- "The value of the <tag> feature must be '@rule-name'" ;
- }
- }
-
- # If there is no tag or the tag rule returned nothing.
- if ! $(tag) || ! $(self.name)
- {
- self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
- : $(self.type) : $(ps) ] ;
- }
- }
-
- rule actualize-no-scanner ( )
- {
- local name = [ actual-name ] ;
-
- # Do anything only on the first invocation.
- if ! $(self.made.$(name))
- {
- self.made.$(name) = true ;
-
- if $(self.action)
- {
- # For non-derived target, we do not care if there are several
- # virtual targets that refer to the same name. One case when
- # this is unavoidable is when the file name is main.cpp and two
- # targets have types CPP (for compiling) and MOCCABLE_CPP (for
- # conversion to H via Qt tools).
- virtual-target.register-actual-name $(name) : $(__name__) ;
- }
-
- for local i in $(self.dependencies)
- {
- DEPENDS $(name) : [ $(i).actualize ] ;
- }
-
- actualize-location $(name) ;
- actualize-action $(name) ;
- }
- return $(name) ;
- }
-}
-
-
-# Appends the suffix appropriate to 'type/property-set' combination to the
-# specified name and returns the result.
-#
-rule add-prefix-and-suffix ( specified-name : type ? : property-set )
-{
- local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
-
- # Handle suffixes for which no leading dot is desired. Those are specified
- # by enclosing them in <...>. Needed by python so it can create "_d.so"
- # extensions, for example.
- if $(suffix:G)
- {
- suffix = [ utility.ungrist $(suffix) ] ;
- }
- else
- {
- suffix = .$(suffix) ;
- }
-
- local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
-
- if [ MATCH ^($(prefix)) : $(specified-name) ]
- {
- prefix = ;
- }
- return $(prefix:E="")$(specified-name)$(suffix:E="") ;
-}
-
-
-# File targets with explicitly known location.
-#
-# The file path is determined as
-# * Value passed to the 'set-path' method, if any.
-# * For derived files, project's build dir, joined with components that
-# describe action properties. If free properties are not equal to the
-# project's reference properties an element with the name of the main
-# target is added.
-# * For source files, project's source dir.
-#
-# The file suffix is determined as:
-# * The value passed to the 'suffix' method, if any.
-# * The suffix corresponding to the target's type.
-#
-class file-target : abstract-file-target
-{
- import "class" : new ;
- import common ;
- import errors ;
-
- rule __init__ (
- name exact ?
- : type ? # Optional type for this target.
- : project
- : action ?
- : path ?
- )
- {
- abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) :
- $(action) ;
-
- self.path = $(path) ;
- }
-
- rule clone-with-different-type ( new-type )
- {
- return [ new file-target $(self.name) exact : $(new-type) :
- $(self.project) : $(self.action) : $(self.path) ] ;
- }
-
- rule actualize-location ( target )
- {
- if $(self.action)
- {
- # This is a derived file.
- local path = [ path ] ;
- LOCATE on $(target) = $(path) ;
-
- # Make sure the path exists.
- DEPENDS $(target) : $(path) ;
- common.MkDir $(path) ;
-
- # It is possible that the target name includes a directory too, for
- # example when installing headers. Create that directory.
- if $(target:D)
- {
- local d = $(target:D) ;
- d = $(d:R=$(path)) ;
- DEPENDS $(target) : $(d) ;
- common.MkDir $(d) ;
- }
-
- # For a real file target, we create a fake target depending on the
- # real target. This allows us to run
- #
- # bjam hello.o
- #
- # without trying to guess the name of the real target. Note that the
- # target has no directory name and uses a special <e> grist.
- #
- # First, that means that "bjam hello.o" will build all known hello.o
- # targets. Second, the <e> grist makes sure this target will not be
- # confused with other targets, for example, if we have subdir 'test'
- # with target 'test' in it that includes a 'test.o' file, then the
- # target for directory will be just 'test' the target for test.o
- # will be <ptest/bin/gcc/debug>test.o and the target we create below
- # will be <e>test.o
- DEPENDS $(target:G=e) : $(target) ;
- # Allow bjam <path-to-file>/<file> to work. This will not catch all
- # possible ways to refer to the path (relative/absolute, extra ".",
- # various "..", but should help in obvious cases.
- DEPENDS $(target:G=e:R=$(path)) : $(target) ;
- }
- else
- {
- SEARCH on $(target) = [ path.native $(self.path) ] ;
- }
- }
-
- # Returns the directory for this target.
- #
- rule path ( )
- {
- if ! $(self.path)
- {
- if $(self.action)
- {
- local p = [ $(self.action).properties ] ;
- local path,relative-to-build-dir = [ $(p).target-path ] ;
- local path = $(path,relative-to-build-dir[1]) ;
- local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
-
- if $(relative-to-build-dir)
- {
- path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
- }
-
- self.path = [ path.native $(path) ] ;
- }
- }
- return $(self.path) ;
- }
-}
-
-
-class notfile-target : abstract-file-target
-{
- rule __init__ ( name : project : action ? )
- {
- abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
- }
-
- # Returns nothing to indicate that the target's path is not known.
- #
- rule path ( )
- {
- return ;
- }
-
- rule actualize-location ( target )
- {
- NOTFILE $(target) ;
- ALWAYS $(target) ;
- # TEMPORARY $(target) ;
- NOUPDATE $(target) ;
- }
-}
-
-
-# Class representing an action. Both 'targets' and 'sources' should list
-# instances of 'virtual-target'. Action name should name a rule with this
-# prototype:
-# rule action-name ( targets + : sources * : properties * )
-# Targets and sources are passed as actual Jam targets. The rule may not
-# establish additional dependency relationships.
-#
-class action
-{
- import "class" ;
- import errors ;
- import type ;
- import toolset ;
- import property-set ;
- import indirect ;
- import path ;
- import set : difference ;
-
- rule __init__ ( sources * : action-name + : property-set ? )
- {
- self.sources = $(sources) ;
-
- self.action-name = [ indirect.make-qualified $(action-name) ] ;
-
- if ! $(property-set)
- {
- property-set = [ property-set.empty ] ;
- }
-
- if ! [ class.is-instance $(property-set) ]
- {
- errors.error "Property set instance required" ;
- }
-
- self.properties = $(property-set) ;
- }
-
- rule add-targets ( targets * )
- {
- self.targets += $(targets) ;
- }
-
- rule replace-targets ( old-targets * : new-targets * )
- {
- self.targets = [ set.difference $(self.targets) : $(old-targets) ] ;
- self.targets += $(new-targets) ;
- }
-
- rule targets ( )
- {
- return $(self.targets) ;
- }
-
- rule sources ( )
- {
- return $(self.sources) ;
- }
-
- rule action-name ( )
- {
- return $(self.action-name) ;
- }
-
- rule properties ( )
- {
- return $(self.properties) ;
- }
-
- # Generates actual build instructions.
- #
- rule actualize ( )
- {
- if ! $(self.actualized)
- {
- self.actualized = true ;
-
- local ps = [ properties ] ;
- local properties = [ adjust-properties $(ps) ] ;
-
- local actual-targets ;
- for local i in [ targets ]
- {
- actual-targets += [ $(i).actualize ] ;
- }
-
- actualize-sources [ sources ] : $(properties) ;
-
- DEPENDS $(actual-targets) : $(self.actual-sources)
- $(self.dependency-only-sources) ;
-
- # This works around a bug with -j and actions that
- # produce multiple target, where:
- # - dependency on the first output is found, and
- # the action is started
- # - dependency on the second output is found, and
- # bjam noticed that command is already running
- # - instead of waiting for the command, dependents
- # of the second targets are immediately updated.
- if $(actual-targets[2])
- {
- INCLUDES $(actual-targets) : $(actual-targets) ;
- }
-
- # Action name can include additional argument to rule, which should
- # not be passed to 'set-target-variables'
- toolset.set-target-variables
- [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
- : $(properties) ;
-
- # Reflect ourselves in a variable for the target. This allows
- # looking up additional info for the action given the raw target.
- # For example to debug or output action information from action
- # rules.
- .action on $(actual-targets) = $(__name__) ;
-
- indirect.call $(self.action-name) $(actual-targets)
- : $(self.actual-sources) : [ $(properties).raw ] ;
-
- # Since we set up the creating action here, we set up the action for
- # cleaning up as well.
- common.Clean clean-all : $(actual-targets) ;
- }
- }
-
- # Helper for 'actualize-sources'. For each passed source, actualizes it with
- # the appropriate scanner. Returns the actualized virtual targets.
- #
- rule actualize-source-type ( sources * : property-set )
- {
- local result = ;
- for local i in $(sources)
- {
- local scanner ;
- if [ $(i).type ]
- {
- scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
- }
- result += [ $(i).actualize $(scanner) ] ;
- }
- return $(result) ;
- }
-
- # Creates actual Jam targets for sources. Initializes the following member
- # variables:
- # 'self.actual-sources' -- sources passed to the updating action.
- # 'self.dependency-only-sources' -- sources marked as dependencies, but
- # are not used otherwise.
- #
- # New values will be *appended* to the variables. They may be non-empty if
- # caller wants it.
- #
- rule actualize-sources ( sources * : property-set )
- {
- local dependencies = [ $(self.properties).get <dependency> ] ;
-
- self.dependency-only-sources +=
- [ actualize-source-type $(dependencies) : $(property-set) ] ;
- self.actual-sources +=
- [ actualize-source-type $(sources) : $(property-set) ] ;
-
- # This is used to help bjam find dependencies in generated headers and
- # other main targets, e.g. in:
- #
- # make a.h : ....... ;
- # exe hello : hello.cpp : <implicit-dependency>a.h ;
- #
- # For bjam to find the dependency the generated target must be
- # actualized (i.e. have its Jam target constructed). In the above case,
- # if we are building just hello ("bjam hello"), 'a.h' will not be
- # actualized unless we do it here.
- local implicit = [ $(self.properties).get <implicit-dependency> ] ;
- for local i in $(implicit)
- {
- $(i:G=).actualize ;
- }
- }
-
- # Determines real properties when trying to build with 'properties'. This is
- # the last chance to fix properties, for example to adjust includes to get
- # generated headers correctly. Default implementation simply returns its
- # argument.
- #
- rule adjust-properties ( property-set )
- {
- return $(property-set) ;
- }
-}
-
-
-# Action class which does nothing --- it produces the targets with specific
-# properties out of nowhere. It is needed to distinguish virtual targets with
-# different properties that are known to exist and have no actions which create
-# them.
-#
-class null-action : action
-{
- rule __init__ ( property-set ? )
- {
- action.__init__ : .no-action : $(property-set) ;
- }
-
- rule actualize ( )
- {
- if ! $(self.actualized)
- {
- self.actualized = true ;
- for local i in [ targets ]
- {
- $(i).actualize ;
- }
- }
- }
-}
-
-
-# Class which acts exactly like 'action', except that its sources are not
-# scanned for dependencies.
-#
-class non-scanning-action : action
-{
- rule __init__ ( sources * : action-name + : property-set ? )
- {
- action.__init__ $(sources) : $(action-name) : $(property-set) ;
- }
-
- rule actualize-source-type ( sources * : property-set )
- {
- local result ;
- for local i in $(sources)
- {
- result += [ $(i).actualize ] ;
- }
- return $(result) ;
- }
-}
-
-
-# Creates a virtual target with an appropriate name and type from 'file'. If a
-# target with that name in that project already exists, returns that already
-# created target.
-#
-# FIXME: a more correct way would be to compute the path to the file, based on
-# name and source location for the project, and use that path to determine if
-# the target has already been created. This logic should be shared with how we
-# usually find targets identified by a specific target id. It should also be
-# updated to work correctly when the file is specified using both relative and
-# absolute paths.
-#
-# TODO: passing a project with all virtual targets is starting to be annoying.
-#
-rule from-file ( file : file-loc : project )
-{
- import type ; # Had to do this here to break a circular dependency.
-
- # Check whether we already created a target corresponding to this file.
- local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
-
- if $(.files.$(path))
- {
- return $(.files.$(path)) ;
- }
- else
- {
- local name = [ path.make $(file) ] ;
- local type = [ type.type $(file) ] ;
- local result ;
-
- result = [ new file-target $(file) : $(type) : $(project) : :
- $(file-loc) ] ;
-
- .files.$(path) = $(result) ;
- return $(result) ;
- }
-}
-
-
-# Registers a new virtual target. Checks if there is already a registered target
-# with the same name, type, project and subvariant properties as well as the
-# same sources and equal action. If such target is found it is returned and a
-# new 'target' is not registered. Otherwise, 'target' is registered and
-# returned.
-#
-rule register ( target )
-{
- local signature = [ sequence.join
- [ $(target).path ] [ $(target).name ] : - ] ;
-
- local result ;
- for local t in $(.cache.$(signature))
- {
- local a1 = [ $(t).action ] ;
- local a2 = [ $(target).action ] ;
-
- if ! $(result)
- {
- if ! $(a1) && ! $(a2)
- {
- result = $(t) ;
- }
- else
- {
- if $(a1) && $(a2) &&
- ( [ $(a1).action-name ] = [ $(a2).action-name ] ) &&
- ( [ $(a1).sources ] = [ $(a2).sources ] )
- {
- local ps1 = [ $(a1).properties ] ;
- local ps2 = [ $(a2).properties ] ;
- local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference
- [ $(ps1).dependency ] : [ $(ps1).incidental ] ] ;
- local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference
- [ $(ps2).dependency ] : [ $(ps2).incidental ] ] ;
- if $(p1) = $(p2)
- {
- result = $(t) ;
- }
- }
- }
- }
- }
-
- if ! $(result)
- {
- .cache.$(signature) += $(target) ;
- result = $(target) ;
- }
-
- .recent-targets += $(result) ;
- .all-targets += $(result) ;
-
- return $(result) ;
-}
-
-
-# Each target returned by 'register' is added to the .recent-targets list,
-# returned by this function. This allows us to find all virtual targets created
-# when building a specific main target, even those constructed only as
-# intermediate targets.
-#
-rule recent-targets ( )
-{
- return $(.recent-targets) ;
-}
-
-
-rule clear-recent-targets ( )
-{
- .recent-targets = ;
-}
-
-
-# Returns all virtual targets ever created.
-#
-rule all-targets ( )
-{
- return $(.all-targets) ;
-}
-
-
-# Returns all targets from 'targets' with types equal to 'type' or derived from
-# it.
-#
-rule select-by-type ( type : targets * )
-{
- local result ;
- for local t in $(targets)
- {
- if [ type.is-subtype [ $(t).type ] $(type) ]
- {
- result += $(t) ;
- }
- }
- return $(result) ;
-}
-
-
-rule register-actual-name ( actual-name : virtual-target )
-{
- if $(.actual.$(actual-name))
- {
- local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ;
- local cs2 = [ $(virtual-target).creating-subvariant ] ;
- local cmt1 = [ $(cs1).main-target ] ;
- local cmt2 = [ $(cs2).main-target ] ;
-
- local action1 = [ $(.actual.$(actual-name)).action ] ;
- local action2 = [ $(virtual-target).action ] ;
- local properties-added ;
- local properties-removed ;
- if $(action1) && $(action2)
- {
- local p1 = [ $(action1).properties ] ;
- p1 = [ $(p1).raw ] ;
- local p2 = [ $(action2).properties ] ;
- p2 = [ $(p2).raw ] ;
- properties-removed = [ set.difference $(p1) : $(p2) ] ;
- properties-removed ?= "none" ;
- properties-added = [ set.difference $(p2) : $(p1) ] ;
- properties-added ?= "none" ;
- }
- errors.error "Duplicate name of actual target:" $(actual-name)
- : "previous virtual target" [ $(.actual.$(actual-name)).str ]
- : "created from" [ $(cmt1).full-name ]
- : "another virtual target" [ $(virtual-target).str ]
- : "created from" [ $(cmt2).full-name ]
- : "added properties:" $(properties-added)
- : "removed properties:" $(properties-removed) ;
- }
- else
- {
- .actual.$(actual-name) = $(virtual-target) ;
- }
-}
-
-
-# Traverses the dependency graph of 'target' and return all targets that will be
-# created before this one is created. If the root of some dependency graph is
-# found during traversal, it is either included or not, depending on the
-# 'include-roots' value. In either case traversal stops at root targets, i.e.
-# root target sources are not traversed.
-#
-rule traverse ( target : include-roots ? : include-sources ? )
-{
- local result ;
- if [ $(target).action ]
- {
- local action = [ $(target).action ] ;
- # This includes the 'target' as well.
- result += [ $(action).targets ] ;
-
- for local t in [ $(action).sources ]
- {
- if ! [ $(t).root ]
- {
- result += [ traverse $(t) : $(include-roots) : $(include-sources) ] ;
- }
- else if $(include-roots)
- {
- result += $(t) ;
- }
- }
- }
- else if $(include-sources)
- {
- result = $(target) ;
- }
- return $(result) ;
-}
-
-
-# Takes an 'action' instance and creates a new instance of it and all targets
-# produced by the action. The rule-name and properties are set to
-# 'new-rule-name' and 'new-properties', if those are specified. Returns the
-# cloned action.
-#
-rule clone-action ( action : new-project : new-action-name ? : new-properties ? )
-{
- if ! $(new-action-name)
- {
- new-action-name = [ $(action).action-name ] ;
- }
- if ! $(new-properties)
- {
- new-properties = [ $(action).properties ] ;
- }
-
- local action-class = [ modules.peek $(action) : __class__ ] ;
- local cloned-action = [ class.new $(action-class)
- [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
-
- local cloned-targets ;
- for local target in [ $(action).targets ]
- {
- local n = [ $(target).name ] ;
- # Do not modify produced target names.
- local cloned-target = [ class.new file-target $(n) exact :
- [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
- local d = [ $(target).dependencies ] ;
- if $(d)
- {
- $(cloned-target).depends $(d) ;
- }
- $(cloned-target).root [ $(target).root ] ;
- $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
-
- cloned-targets += $(cloned-target) ;
- }
-
- return $(cloned-action) ;
-}
-
-
-class subvariant
-{
- import sequence ;
- import type ;
-
- rule __init__ ( main-target # The instance of main-target class.
- : property-set # Properties requested for this target.
- : sources *
- : build-properties # Actually used properties.
- : sources-usage-requirements # Properties propagated from sources.
- : created-targets * ) # Top-level created targets.
- {
- self.main-target = $(main-target) ;
- self.properties = $(property-set) ;
- self.sources = $(sources) ;
- self.build-properties = $(build-properties) ;
- self.sources-usage-requirements = $(sources-usage-requirements) ;
- self.created-targets = $(created-targets) ;
-
- # Pre-compose a list of other dependency graphs this one depends on.
- local deps = [ $(build-properties).get <implicit-dependency> ] ;
- for local d in $(deps)
- {
- self.other-dg += [ $(d:G=).creating-subvariant ] ;
- }
-
- self.other-dg = [ sequence.unique $(self.other-dg) ] ;
- }
-
- rule main-target ( )
- {
- return $(self.main-target) ;
- }
-
- rule created-targets ( )
- {
- return $(self.created-targets) ;
- }
-
- rule requested-properties ( )
- {
- return $(self.properties) ;
- }
-
- rule build-properties ( )
- {
- return $(self.build-properties) ;
- }
-
- rule sources-usage-requirements ( )
- {
- return $(self.sources-usage-requirements) ;
- }
-
- rule set-usage-requirements ( usage-requirements )
- {
- self.usage-requirements = $(usage-requirements) ;
- }
-
- rule usage-requirements ( )
- {
- return $(self.usage-requirements) ;
- }
-
- # Returns all targets referenced by this subvariant, either directly or
- # indirectly, and either as sources, or as dependency properties. Targets
- # referred to using the dependency property are returned as properties, not
- # targets.
- #
- rule all-referenced-targets ( theset )
- {
- # Find directly referenced targets.
- local deps = [ $(self.build-properties).dependency ] ;
- local all-targets = $(self.sources) $(deps) ;
-
- # Find other subvariants.
- local r ;
- for local t in $(all-targets)
- {
- if ! [ $(theset).contains $(t) ]
- {
- $(theset).add $(t) ;
- r += [ $(t:G=).creating-subvariant ] ;
- }
- }
- r = [ sequence.unique $(r) ] ;
- for local s in $(r)
- {
- if $(s) != $(__name__)
- {
- $(s).all-referenced-targets $(theset) ;
- }
- }
- }
-
- # Returns the properties specifying implicit include paths to generated
- # headers. This traverses all targets in this subvariant and subvariants
- # referred by <implcit-dependecy> properties. For all targets of type
- # 'target-type' (or for all targets, if 'target-type' is not specified), the
- # result will contain <$(feature)>path-to-that-target.
- #
- rule implicit-includes ( feature : target-type ? )
- {
- local key = ii$(feature)-$(target-type:E="") ;
- if ! $($(key))-is-not-empty
- {
- local target-paths = [ all-target-directories $(target-type) ] ;
- target-paths = [ sequence.unique $(target-paths) ] ;
- local result = $(target-paths:G=$(feature)) ;
- if ! $(result)
- {
- result = "" ;
- }
- $(key) = $(result) ;
- }
- if $($(key)) = ""
- {
- return ;
- }
- else
- {
- return $($(key)) ;
- }
- }
-
- rule all-target-directories ( target-type ? )
- {
- if ! $(self.target-directories)
- {
- compute-target-directories $(target-type) ;
- }
- return $(self.target-directories) ;
- }
-
- rule compute-target-directories ( target-type ? )
- {
- local result ;
- for local t in $(self.created-targets)
- {
- # Skip targets of the wrong type.
- if ! $(target-type) ||
- [ type.is-derived [ $(t).type ] $(target-type) ]
- {
- result = [ sequence.merge $(result) : [ $(t).path ] ] ;
- }
- }
- for local d in $(self.other-dg)
- {
- result += [ $(d).all-target-directories $(target-type) ] ;
- }
- self.target-directories = $(result) ;
- }
-}
diff --git a/jam-files/boost-build/build/virtual_target.py b/jam-files/boost-build/build/virtual_target.py
deleted file mode 100644
index 51dff037..00000000
--- a/jam-files/boost-build/build/virtual_target.py
+++ /dev/null
@@ -1,1118 +0,0 @@
-# Status: ported.
-# Base revision: 64488.
-#
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# Implements virtual targets, which correspond to actual files created during
-# build, but are not yet targets in Jam sense. They are needed, for example,
-# when searching for possible transormation sequences, when it's not known
-# if particular target should be created at all.
-#
-#
-# +--------------------------+
-# | VirtualTarget |
-# +==========================+
-# | actualize |
-# +--------------------------+
-# | actualize_action() = 0 |
-# | actualize_location() = 0 |
-# +----------------+---------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# +---------------------+ +-------+--------------+
-# | Action | | AbstractFileTarget |
-# +=====================| * +======================+
-# | action_name | +--+ action |
-# | properties | | +----------------------+
-# +---------------------+--+ | actualize_action() |
-# | actualize() |0..1 +-----------+----------+
-# | path() | |
-# | adjust_properties() | sources |
-# | actualize_sources() | targets |
-# +------+--------------+ ^
-# | / \
-# ^ +-+-+
-# / \ |
-# +-+-+ +-------------+-------------+
-# | | |
-# | +------+---------------+ +--------+-------------+
-# | | FileTarget | | SearchedLibTarget |
-# | +======================+ +======================+
-# | | actualize-location() | | actualize-location() |
-# | +----------------------+ +----------------------+
-# |
-# +-+------------------------------+
-# | |
-# +----+----------------+ +---------+-----------+
-# | CompileAction | | LinkAction |
-# +=====================+ +=====================+
-# | adjust_properties() | | adjust_properties() |
-# +---------------------+ | actualize_sources() |
-# +---------------------+
-#
-# The 'CompileAction' and 'LinkAction' classes are defined not here,
-# but in builtin.jam modules. They are shown in the diagram to give
-# the big picture.
-
-import bjam
-
-import re
-import os.path
-import string
-import types
-
-from b2.util import path, utility, set
-from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
-from b2.util.sequence import unique
-from b2.tools import common
-from b2.exceptions import *
-import b2.build.type
-import b2.build.property_set as property_set
-
-import b2.build.property as property
-
-from b2.manager import get_manager
-from b2.util import bjam_signature
-
-__re_starts_with_at = re.compile ('^@(.*)')
-
-class VirtualTargetRegistry:
- def __init__ (self, manager):
- self.manager_ = manager
-
- # A cache for FileTargets
- self.files_ = {}
-
- # A cache for targets.
- self.cache_ = {}
-
- # A map of actual names to virtual targets.
- # Used to make sure we don't associate same
- # actual target to two virtual targets.
- self.actual_ = {}
-
- self.recent_targets_ = []
-
- # All targets ever registed
- self.all_targets_ = []
-
- self.next_id_ = 0
-
- def register (self, target):
- """ Registers a new virtual target. Checks if there's already registered target, with the same
- name, type, project and subvariant properties, and also with the same sources
- and equal action. If such target is found it is retured and 'target' is not registered.
- Otherwise, 'target' is registered and returned.
- """
- if target.path():
- signature = target.path() + "-" + target.name()
- else:
- signature = "-" + target.name()
-
- result = None
- if not self.cache_.has_key (signature):
- self.cache_ [signature] = []
-
- for t in self.cache_ [signature]:
- a1 = t.action ()
- a2 = target.action ()
-
- # TODO: why are we checking for not result?
- if not result:
- if not a1 and not a2:
- result = t
- else:
- if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources ():
- ps1 = a1.properties ()
- ps2 = a2.properties ()
- p1 = ps1.base () + ps1.free () +\
- b2.util.set.difference(ps1.dependency(), ps1.incidental())
- p2 = ps2.base () + ps2.free () +\
- b2.util.set.difference(ps2.dependency(), ps2.incidental())
- if p1 == p2:
- result = t
-
- if not result:
- self.cache_ [signature].append (target)
- result = target
-
- # TODO: Don't append if we found pre-existing target?
- self.recent_targets_.append(result)
- self.all_targets_.append(result)
-
- return result
-
- def from_file (self, file, file_location, project):
- """ Creates a virtual target with appropriate name and type from 'file'.
- If a target with that name in that project was already created, returns that already
- created target.
- TODO: more correct way would be to compute path to the file, based on name and source location
- for the project, and use that path to determine if the target was already created.
- TODO: passing project with all virtual targets starts to be annoying.
- """
- # Check if we've created a target corresponding to this file.
- path = os.path.join(os.getcwd(), file_location, file)
- path = os.path.normpath(path)
-
- if self.files_.has_key (path):
- return self.files_ [path]
-
- file_type = b2.build.type.type (file)
-
- result = FileTarget (file, file_type, project,
- None, file_location)
- self.files_ [path] = result
-
- return result
-
- def recent_targets(self):
- """Each target returned by 'register' is added to a list of
- 'recent-target', returned by this function. So, this allows
- us to find all targets created when building a given main
- target, even if the target."""
-
- return self.recent_targets_
-
- def clear_recent_targets(self):
- self.recent_targets_ = []
-
- def all_targets(self):
- # Returns all virtual targets ever created
- return self.all_targets_
-
- # Returns all targets from 'targets' with types
- # equal to 'type' or derived from it.
- def select_by_type(self, type, targets):
- return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
-
- def register_actual_name (self, actual_name, virtual_target):
- if self.actual_.has_key (actual_name):
- cs1 = self.actual_ [actual_name].creating_subvariant ()
- cs2 = virtual_target.creating_subvariant ()
- cmt1 = cs1.main_target ()
- cmt2 = cs2.main_target ()
-
- action1 = self.actual_ [actual_name].action ()
- action2 = virtual_target.action ()
-
- properties_added = []
- properties_removed = []
- if action1 and action2:
- p1 = action1.properties ()
- p1 = p1.raw ()
- p2 = action2.properties ()
- p2 = p2.raw ()
-
- properties_removed = set.difference (p1, p2)
- if not properties_removed: properties_removed = "none"
-
- properties_added = set.difference (p2, p1)
- if not properties_added: properties_added = "none"
-
- # FIXME: Revive printing of real location.
- get_manager().errors()(
- "Duplicate name of actual target: '%s'\n"
- "previous virtual target '%s'\n"
- "created from '%s'\n"
- "another virtual target '%s'\n"
- "created from '%s'\n"
- "added properties: '%s'\n"
- "removed properties: '%s'\n"
- % (actual_name,
- self.actual_ [actual_name], "loc", #cmt1.location (),
- virtual_target,
- "loc", #cmt2.location (),
- properties_added, properties_removed))
-
- else:
- self.actual_ [actual_name] = virtual_target
-
-
- def add_suffix (self, specified_name, file_type, prop_set):
- """ Appends the suffix appropriate to 'type/property_set' combination
- to the specified name and returns the result.
- """
- suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
-
- if suffix:
- return specified_name + '.' + suffix
-
- else:
- return specified_name
-
-class VirtualTarget:
- """ Potential target. It can be converted into jam target and used in
- building, if needed. However, it can be also dropped, which allows
- to search for different transformation and select only one.
- name: name of this target.
- project: project to which this target belongs.
- """
- def __init__ (self, name, project):
- self.name_ = name
- self.project_ = project
- self.dependencies_ = []
- self.always_ = False
-
- # Caches if dapendencies for scanners have already been set.
- self.made_ = {}
-
- def manager(self):
- return self.project_.manager()
-
- def virtual_targets(self):
- return self.manager().virtual_targets()
-
- def name (self):
- """ Name of this target.
- """
- return self.name_
-
- def project (self):
- """ Project of this target.
- """
- return self.project_
-
- def depends (self, d):
- """ Adds additional instances of 'VirtualTarget' that this
- one depends on.
- """
- self.dependencies_ = unique (self.dependencies_ + d).sort ()
-
- def dependencies (self):
- return self.dependencies_
-
- def always(self):
- self.always_ = True
-
- def actualize (self, scanner = None):
- """ Generates all the actual targets and sets up build actions for
- this target.
-
- If 'scanner' is specified, creates an additional target
- with the same location as actual target, which will depend on the
- actual target and be associated with 'scanner'. That additional
- target is returned. See the docs (#dependency_scanning) for rationale.
- Target must correspond to a file if 'scanner' is specified.
-
- If scanner is not specified, then actual target is returned.
- """
- actual_name = self.actualize_no_scanner ()
-
- if self.always_:
- bjam.call("ALWAYS", actual_name)
-
- if not scanner:
- return actual_name
-
- else:
- # Add the scanner instance to the grist for name.
- g = '-'.join ([ungrist(get_grist(actual_name)), str(id(scanner))])
-
- name = replace_grist (actual_name, '<' + g + '>')
-
- if not self.made_.has_key (name):
- self.made_ [name] = True
-
- self.project_.manager ().engine ().add_dependency (name, actual_name)
-
- self.actualize_location (name)
-
- self.project_.manager ().scanners ().install (scanner, name, str (self))
-
- return name
-
-# private: (overridables)
-
- def actualize_action (self, target):
- """ Sets up build actions for 'target'. Should call appropriate rules
- and set target variables.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def actualize_location (self, target):
- """ Sets up variables on 'target' which specify its location.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def path (self):
- """ If the target is generated one, returns the path where it will be
- generated. Otherwise, returns empty list.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def actual_name (self):
- """ Return that actual target name that should be used
- (for the case where no scanner is involved)
- """
- raise BaseException ("method should be defined in derived classes")
-
-
-class AbstractFileTarget (VirtualTarget):
- """ Target which correspond to a file. The exact mapping for file
- is not yet specified in this class. (TODO: Actually, the class name
- could be better...)
-
- May be a source file (when no action is specified), or
- derived file (otherwise).
-
- The target's grist is concatenation of project's location,
- properties of action (for derived files), and, optionally,
- value identifying the main target.
-
- exact: If non-empty, the name is exactly the name
- created file should have. Otherwise, the '__init__'
- method will add suffix obtained from 'type' by
- calling 'type.generated-target-suffix'.
-
- type: optional type of this target.
- """
- def __init__ (self, name, type, project, action = None, exact=False):
- VirtualTarget.__init__ (self, name, project)
-
- self.type_ = type
-
- self.action_ = action
- self.exact_ = exact
-
- if action:
- action.add_targets ([self])
-
- if self.type and not exact:
- self.__adjust_name (name)
-
-
- self.actual_name_ = None
- self.path_ = None
- self.intermediate_ = False
- self.creating_subvariant_ = None
-
- # True if this is a root target.
- self.root_ = False
-
- def type (self):
- return self.type_
-
- def set_path (self, path):
- """ Sets the path. When generating target name, it will override any path
- computation from properties.
- """
- self.path_ = path
-
- def action (self):
- """ Returns the action.
- """
- return self.action_
-
- def root (self, set = None):
- """ Sets/gets the 'root' flag. Target is root is it directly correspods to some
- variant of a main target.
- """
- if set:
- self.root_ = True
- return self.root_
-
- def creating_subvariant (self, s = None):
- """ Gets or sets the subvariant which created this target. Subvariant
- is set when target is brought into existance, and is never changed
- after that. In particual, if target is shared by subvariant, only
- the first is stored.
- s: If specified, specified the value to set,
- which should be instance of 'subvariant' class.
- """
- if s and not self.creating_subvariant ():
- if self.creating_subvariant ():
- raise BaseException ("Attempt to change 'dg'")
-
- else:
- self.creating_subvariant_ = s
-
- return self.creating_subvariant_
-
- def actualize_action (self, target):
- if self.action_:
- self.action_.actualize ()
-
- # Return a human-readable representation of this target
- #
- # If this target has an action, that's:
- #
- # { <action-name>-<self.name>.<self.type> <action-sources>... }
- #
- # otherwise, it's:
- #
- # { <self.name>.<self.type> }
- #
- def str(self):
- a = self.action()
-
- name_dot_type = self.name_ + "." + self.type_
-
- if a:
- action_name = a.action_name()
- ss = [ s.str() for s in a.sources()]
-
- return "{ %s-%s %s}" % (action_name, name_dot_type, str(ss))
- else:
- return "{ " + name_dot_type + " }"
-
-# private:
-
- def actual_name (self):
- if not self.actual_name_:
- self.actual_name_ = '<' + self.grist() + '>' + self.name_
-
- return self.actual_name_
-
- def grist (self):
- """Helper to 'actual_name', above. Compute unique prefix used to distinguish
- this target from other targets with the same name which create different
- file.
- """
- # Depending on target, there may be different approaches to generating
- # unique prefixes. We'll generate prefixes in the form
- # <one letter approach code> <the actual prefix>
- path = self.path ()
-
- if path:
- # The target will be generated to a known path. Just use the path
- # for identification, since path is as unique as it can get.
- return 'p' + path
-
- else:
- # File is either source, which will be searched for, or is not a file at
- # all. Use the location of project for distinguishing.
- project_location = self.project_.get ('location')
- path_components = b2.util.path.split(project_location)
- location_grist = '!'.join (path_components)
-
- if self.action_:
- ps = self.action_.properties ()
- property_grist = ps.as_path ()
- # 'property_grist' can be empty when 'ps' is an empty
- # property set.
- if property_grist:
- location_grist = location_grist + '/' + property_grist
-
- return 'l' + location_grist
-
- def __adjust_name(self, specified_name):
- """Given the target name specified in constructor, returns the
- name which should be really used, by looking at the <tag> properties.
- The tag properties come in two flavour:
- - <tag>value,
- - <tag>@rule-name
- In the first case, value is just added to name
- In the second case, the specified rule is called with specified name,
- target type and properties and should return the new name.
- If not <tag> property is specified, or the rule specified by
- <tag> returns nothing, returns the result of calling
- virtual-target.add-suffix"""
-
- if self.action_:
- ps = self.action_.properties()
- else:
- ps = property_set.empty()
-
- # FIXME: I'm not sure how this is used, need to check with
- # Rene to figure out how to implement
- #~ We add ourselves to the properties so that any tag rule can get
- #~ more direct information about the target than just that available
- #~ through the properties. This is useful in implementing
- #~ name changes based on the sources of the target. For example to
- #~ make unique names of object files based on the source file.
- #~ --grafik
- #ps = property_set.create(ps.raw() + ["<target>%s" % "XXXX"])
- #ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
-
- tag = ps.get("<tag>")
-
- if tag:
-
- if len(tag) > 1:
- get_manager().errors()(
- """<tag>@rulename is present but is not the only <tag> feature""")
-
- tag = tag[0]
- if callable(tag):
- self.name_ = tag(specified_name, self.type_, ps)
- else:
- if not tag[0] == '@':
- self.manager_.errors()("""The value of the <tag> feature must be '@rule-nane'""")
-
- exported_ps = b2.util.value_to_jam(ps, methods=True)
- self.name_ = b2.util.call_jam_function(
- tag[1:], specified_name, self.type_, exported_ps)
- if self.name_:
- self.name_ = self.name_[0]
-
- # If there's no tag or the tag rule returned nothing.
- if not tag or not self.name_:
- self.name_ = add_prefix_and_suffix(specified_name, self.type_, ps)
-
- def actualize_no_scanner(self):
- name = self.actual_name()
-
- # Do anything only on the first invocation
- if not self.made_:
- self.made_[name] = True
-
- if self.action_:
- # For non-derived target, we don't care if there
- # are several virtual targets that refer to the same name.
- # One case when this is unavoidable is when file name is
- # main.cpp and two targets have types CPP (for compiling)
- # and MOCCABLE_CPP (for convertion to H via Qt tools).
- self.virtual_targets().register_actual_name(name, self)
-
- for i in self.dependencies_:
- self.manager_.engine().add_dependency(name, i.actualize())
-
- self.actualize_location(name)
- self.actualize_action(name)
-
- return name
-
-@bjam_signature((["specified_name"], ["type"], ["property_set"]))
-def add_prefix_and_suffix(specified_name, type, property_set):
- """Appends the suffix appropriate to 'type/property-set' combination
- to the specified name and returns the result."""
-
- property_set = b2.util.jam_to_value_maybe(property_set)
-
- suffix = ""
- if type:
- suffix = b2.build.type.generated_target_suffix(type, property_set)
-
- # Handle suffixes for which no leading dot is desired. Those are
- # specified by enclosing them in <...>. Needed by python so it
- # can create "_d.so" extensions, for example.
- if get_grist(suffix):
- suffix = ungrist(suffix)
- elif suffix:
- suffix = "." + suffix
-
- prefix = ""
- if type:
- prefix = b2.build.type.generated_target_prefix(type, property_set)
-
- if specified_name.startswith(prefix):
- prefix = ""
-
- if not prefix:
- prefix = ""
- if not suffix:
- suffix = ""
- return prefix + specified_name + suffix
-
-
-class FileTarget (AbstractFileTarget):
- """ File target with explicitly known location.
-
- The file path is determined as
- - value passed to the 'set_path' method, if any
- - for derived files, project's build dir, joined with components
- that describe action's properties. If the free properties
- are not equal to the project's reference properties
- an element with name of main target is added.
- - for source files, project's source dir
-
- The file suffix is
- - the value passed to the 'suffix' method, if any, or
- - the suffix which correspond to the target's type.
- """
- def __init__ (self, name, type, project, action = None, path=None, exact=False):
- AbstractFileTarget.__init__ (self, name, type, project, action, exact)
-
- self.path_ = path
-
- def __str__(self):
- if self.type_:
- return self.name_ + "." + self.type_
- else:
- return self.name_
-
- def clone_with_different_type(self, new_type):
- return FileTarget(self.name_, new_type, self.project_,
- self.action_, self.path_, exact=True)
-
- def actualize_location (self, target):
- engine = self.project_.manager_.engine ()
-
- if self.action_:
- # This is a derived file.
- path = self.path ()
- engine.set_target_variable (target, 'LOCATE', path)
-
- # Make sure the path exists.
- engine.add_dependency (target, path)
- common.mkdir(engine, path)
-
- # It's possible that the target name includes a directory
- # too, for example when installing headers. Create that
- # directory.
- d = os.path.dirname(get_value(target))
- if d:
- d = os.path.join(path, d)
- engine.add_dependency(target, d)
- common.mkdir(engine, d)
-
- # For real file target, we create a fake target that
- # depends on the real target. This allows to run
- #
- # bjam hello.o
- #
- # without trying to guess the name of the real target.
- # Note the that target has no directory name, and a special
- # grist <e>.
- #
- # First, that means that "bjam hello.o" will build all
- # known hello.o targets.
- # Second, the <e> grist makes sure this target won't be confused
- # with other targets, for example, if we have subdir 'test'
- # with target 'test' in it that includes 'test.o' file,
- # then the target for directory will be just 'test' the target
- # for test.o will be <ptest/bin/gcc/debug>test.o and the target
- # we create below will be <e>test.o
- engine.add_dependency("<e>%s" % get_value(target), target)
-
- # Allow bjam <path-to-file>/<file> to work. This won't catch all
- # possible ways to refer to the path (relative/absolute, extra ".",
- # various "..", but should help in obvious cases.
- engine.add_dependency("<e>%s" % (os.path.join(path, get_value(target))), target)
-
- else:
- # This is a source file.
- engine.set_target_variable (target, 'SEARCH', self.project_.get ('source-location'))
-
-
- def path (self):
- """ Returns the directory for this target.
- """
- if not self.path_:
- if self.action_:
- p = self.action_.properties ()
- (target_path, relative_to_build_dir) = p.target_path ()
-
- if relative_to_build_dir:
- # Indicates that the path is relative to
- # build dir.
- target_path = os.path.join (self.project_.build_dir (), target_path)
-
- # Store the computed path, so that it's not recomputed
- # any more
- self.path_ = target_path
-
- return self.path_
-
-
-class NotFileTarget(AbstractFileTarget):
-
- def __init__(self, name, project, action):
- AbstractFileTarget.__init__(self, name, None, project, action)
-
- def path(self):
- """Returns nothing, to indicate that target path is not known."""
- return None
-
- def actualize_location(self, target):
- bjam.call("NOTFILE", target)
- bjam.call("ALWAYS", target)
- bjam.call("NOUPDATE", target)
-
-
-class Action:
- """ Class which represents an action.
- Both 'targets' and 'sources' should list instances of 'VirtualTarget'.
- Action name should name a rule with this prototype
- rule action_name ( targets + : sources * : properties * )
- Targets and sources are passed as actual jam targets. The rule may
- not establish dependency relationship, but should do everything else.
- """
- def __init__ (self, manager, sources, action_name, prop_set):
- assert(isinstance(prop_set, property_set.PropertySet))
- assert type(sources) == types.ListType
- self.sources_ = sources
- self.action_name_ = action_name
- if not prop_set:
- prop_set = property_set.empty()
- self.properties_ = prop_set
- if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')):
- import pdb
- pdb.set_trace()
-
- self.manager_ = manager
- self.engine_ = self.manager_.engine ()
- self.targets_ = []
-
- # Indicates whether this has been actualized or not.
- self.actualized_ = False
-
- self.dependency_only_sources_ = []
- self.actual_sources_ = []
-
-
- def add_targets (self, targets):
- self.targets_ += targets
-
-
- def replace_targets (old_targets, new_targets):
- self.targets_ = [t for t in targets if not t in old_targets] + new_targets
-
- def targets (self):
- return self.targets_
-
- def sources (self):
- return self.sources_
-
- def action_name (self):
- return self.action_name_
-
- def properties (self):
- return self.properties_
-
- def actualize (self):
- """ Generates actual build instructions.
- """
- if self.actualized_:
- return
-
- self.actualized_ = True
-
- ps = self.properties ()
- properties = self.adjust_properties (ps)
-
-
- actual_targets = []
-
- for i in self.targets ():
- actual_targets.append (i.actualize ())
-
- self.actualize_sources (self.sources (), properties)
-
- self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_)
-
- # This works around a bug with -j and actions that
- # produce multiple target, where:
- # - dependency on the first output is found, and
- # the action is started
- # - dependency on the second output is found, and
- # bjam noticed that command is already running
- # - instead of waiting for the command, dependents
- # of the second targets are immediately updated.
- if len(actual_targets) > 1:
- bjam.call("INCLUDES", actual_targets, actual_targets)
-
- # FIXME: check the comment below. Was self.action_name_ [1]
- # Action name can include additional argument to rule, which should not
- # be passed to 'set-target-variables'
- # FIXME: breaking circular dependency
- import toolset
- toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties)
-
- engine = self.manager_.engine ()
-
- # FIXME: this is supposed to help --out-xml option, but we don't
- # implement that now, and anyway, we should handle it in Python,
- # not but putting variables on bjam-level targets.
- bjam.call("set-target-variable", actual_targets, ".action", repr(self))
-
- self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_,
- properties)
-
- # Since we set up creating action here, we also set up
- # action for cleaning up
- self.manager_.engine ().set_update_action ('common.Clean', 'clean-all',
- actual_targets)
-
- return actual_targets
-
- def actualize_source_type (self, sources, prop_set):
- """ Helper for 'actualize_sources'.
- For each passed source, actualizes it with the appropriate scanner.
- Returns the actualized virtual targets.
- """
- result = []
- for i in sources:
- scanner = None
-
-# FIXME: what's this?
-# if isinstance (i, str):
-# i = self.manager_.get_object (i)
-
- if i.type ():
- scanner = b2.build.type.get_scanner (i.type (), prop_set)
-
- r = i.actualize (scanner)
- result.append (r)
-
- return result
-
- def actualize_sources (self, sources, prop_set):
- """ Creates actual jam targets for sources. Initializes two member
- variables:
- 'self.actual_sources_' -- sources which are passed to updating action
- 'self.dependency_only_sources_' -- sources which are made dependencies, but
- are not used otherwise.
-
- New values will be *appended* to the variables. They may be non-empty,
- if caller wants it.
- """
- dependencies = self.properties_.get ('<dependency>')
-
- self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
- self.actual_sources_ += self.actualize_source_type (sources, prop_set)
-
- # This is used to help bjam find dependencies in generated headers
- # in other main targets.
- # Say:
- #
- # make a.h : ....... ;
- # exe hello : hello.cpp : <implicit-dependency>a.h ;
- #
- # However, for bjam to find the dependency the generated target must
- # be actualized (i.e. have the jam target). In the above case,
- # if we're building just hello ("bjam hello"), 'a.h' won't be
- # actualized unless we do it here.
- implicit = self.properties_.get("<implicit-dependency>")
-
- for i in implicit:
- i.actualize()
-
- def adjust_properties (self, prop_set):
- """ Determines real properties when trying building with 'properties'.
- This is last chance to fix properties, for example to adjust includes
- to get generated headers correctly. Default implementation returns
- its argument.
- """
- return prop_set
-
-
-class NullAction (Action):
- """ Action class which does nothing --- it produces the targets with
- specific properties out of nowhere. It's needed to distinguish virtual
- targets with different properties that are known to exist, and have no
- actions which create them.
- """
- def __init__ (self, manager, prop_set):
- Action.__init__ (self, manager, [], None, prop_set)
-
- def actualize (self):
- if not self.actualized_:
- self.actualized_ = True
-
- for i in self.targets ():
- i.actualize ()
-
-class NonScanningAction(Action):
- """Class which acts exactly like 'action', except that the sources
- are not scanned for dependencies."""
-
- def __init__(self, sources, action_name, property_set):
- #FIXME: should the manager parameter of Action.__init__
- #be removed? -- Steven Watanabe
- Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
-
- def actualize_source_type(self, sources, property_set):
-
- result = []
- for s in sources:
- result.append(s.actualize())
- return result
-
-def traverse (target, include_roots = False, include_sources = False):
- """ Traverses the dependency graph of 'target' and return all targets that will
- be created before this one is created. If root of some dependency graph is
- found during traversal, it's either included or not, dependencing of the
- value of 'include_roots'. In either case, sources of root are not traversed.
- """
- result = []
-
- if target.action ():
- action = target.action ()
-
- # This includes 'target' as well
- result += action.targets ()
-
- for t in action.sources ():
-
- # FIXME:
- # TODO: see comment in Manager.register_object ()
- #if not isinstance (t, VirtualTarget):
- # t = target.project_.manager_.get_object (t)
-
- if not t.root ():
- result += traverse (t, include_roots, include_sources)
-
- elif include_roots:
- result.append (t)
-
- elif include_sources:
- result.append (target)
-
- return result
-
-def clone_action (action, new_project, new_action_name, new_properties):
- """Takes an 'action' instances and creates new instance of it
- and all produced target. The rule-name and properties are set
- to 'new-rule-name' and 'new-properties', if those are specified.
- Returns the cloned action."""
-
- if not new_action_name:
- new_action_name = action.action_name()
-
- if not new_properties:
- new_properties = action.properties()
-
- cloned_action = action.__class__(action.manager_, action.sources(), new_action_name,
- new_properties)
-
- cloned_targets = []
- for target in action.targets():
-
- n = target.name()
- # Don't modify the name of the produced targets. Strip the directory f
- cloned_target = FileTarget(n, target.type(), new_project,
- cloned_action, exact=True)
-
- d = target.dependencies()
- if d:
- cloned_target.depends(d)
- cloned_target.root(target.root())
- cloned_target.creating_subvariant(target.creating_subvariant())
-
- cloned_targets.append(cloned_target)
-
- return cloned_action
-
-class Subvariant:
-
- def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets):
- """
- main_target: The instance of MainTarget class
- prop_set: Properties requested for this target
- sources:
- build_properties: Actually used properties
- sources_usage_requirements: Properties propagated from sources
- created_targets: Top-level created targets
- """
- self.main_target_ = main_target
- self.properties_ = prop_set
- self.sources_ = sources
- self.build_properties_ = build_properties
- self.sources_usage_requirements_ = sources_usage_requirements
- self.created_targets_ = created_targets
-
- self.usage_requirements_ = None
-
- # Pre-compose the list of other dependency graphs, on which this one
- # depends
- deps = build_properties.get('<implicit-dependency>')
-
- self.other_dg_ = []
- for d in deps:
- self.other_dg_.append(d.creating_subvariant ())
-
- self.other_dg_ = unique (self.other_dg_)
-
- self.implicit_includes_cache_ = {}
- self.target_directories_ = None
-
- def main_target (self):
- return self.main_target_
-
- def created_targets (self):
- return self.created_targets_
-
- def requested_properties (self):
- return self.properties_
-
- def build_properties (self):
- return self.build_properties_
-
- def sources_usage_requirements (self):
- return self.sources_usage_requirements_
-
- def set_usage_requirements (self, usage_requirements):
- self.usage_requirements_ = usage_requirements
-
- def usage_requirements (self):
- return self.usage_requirements_
-
- def all_referenced_targets(self, result):
- """Returns all targets referenced by this subvariant,
- either directly or indirectly, and either as sources,
- or as dependency properties. Targets referred with
- dependency property are returned a properties, not targets."""
-
- # Find directly referenced targets.
- deps = self.build_properties().dependency()
- all_targets = self.sources_ + deps
-
- # Find other subvariants.
- r = []
- for e in all_targets:
- if not e in result:
- result.add(e)
- if isinstance(e, property.Property):
- t = e.value()
- else:
- t = e
-
- # FIXME: how can this be?
- cs = t.creating_subvariant()
- if cs:
- r.append(cs)
- r = unique(r)
- for s in r:
- if s != self:
- s.all_referenced_targets(result)
-
-
- def implicit_includes (self, feature, target_type):
- """ Returns the properties which specify implicit include paths to
- generated headers. This traverses all targets in this subvariant,
- and subvariants referred by <implcit-dependecy>properties.
- For all targets which are of type 'target-type' (or for all targets,
- if 'target_type' is not specified), the result will contain
- <$(feature)>path-to-that-target.
- """
-
- if not target_type:
- key = feature
- else:
- key = feature + "-" + target_type
-
-
- result = self.implicit_includes_cache_.get(key)
- if not result:
- target_paths = self.all_target_directories(target_type)
- target_paths = unique(target_paths)
- result = ["<%s>%s" % (feature, p) for p in target_paths]
- self.implicit_includes_cache_[key] = result
-
- return result
-
- def all_target_directories(self, target_type = None):
- # TODO: does not appear to use target_type in deciding
- # if we've computed this already.
- if not self.target_directories_:
- self.target_directories_ = self.compute_target_directories(target_type)
- return self.target_directories_
-
- def compute_target_directories(self, target_type=None):
- result = []
- for t in self.created_targets():
- if not target_type or b2.build.type.is_derived(t.type(), target_type):
- result.append(t.path())
-
- for d in self.other_dg_:
- result.extend(d.all_target_directories(target_type))
-
- result = unique(result)
- return result
diff --git a/jam-files/boost-build/kernel/boost-build.jam b/jam-files/boost-build/kernel/boost-build.jam
deleted file mode 100644
index 377f6ec0..00000000
--- a/jam-files/boost-build/kernel/boost-build.jam
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-boost-build . ;
diff --git a/jam-files/boost-build/kernel/bootstrap.jam b/jam-files/boost-build/kernel/bootstrap.jam
deleted file mode 100644
index 89048af9..00000000
--- a/jam-files/boost-build/kernel/bootstrap.jam
+++ /dev/null
@@ -1,263 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2005, 2006 Rene Rivera
-# Copyright 2003, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# First of all, check the jam version
-
-if $(JAM_VERSION:J="") < 030112
-{
- ECHO "error: Boost.Jam version 3.1.12 or later required" ;
- EXIT ;
-}
-
-local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
-
-for local r in $(required-rules)
-{
- if ! $(r) in [ RULENAMES ]
- {
- ECHO "error: builtin rule '$(r)' is not present" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
-}
-
-local native =
- regex transform 2
- ;
-while $(native)
-{
- if ! [ HAS_NATIVE_RULE $(native[1]) :
- $(native[2]) :
- $(native[3]) ]
- {
- ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
- ECHO "error: or interface version of that rule is too low" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
- native = $(native[4-]) ;
-}
-
-# Check that the builtin .ENVIRON module is present. We don't have a
-# builtin to check that a module is present, so we assume that the PATH
-# environment variable is always set and verify that the .ENVIRON module
-# has non-empty value of that variable.
-module .ENVIRON
-{
- local p = $(PATH) $(Path) $(path) ;
- if ! $(p)
- {
- ECHO "error: no builtin module .ENVIRON is found" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
-}
-
-# Check that @() functionality is present. Similarly to modules,
-# we don't have a way to test that directly. Instead we check that
-# $(TMPNAME) functionality is present which was added at roughly
-# the same time (more precisely it was added just before).
-{
- if ! $(TMPNAME)
- {
- ECHO "error: no @() functionality found" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
-}
-
-# Make sure that \n escape is avaiable.
-if "\n" = "n"
-{
- if $(OS) = CYGWIN
- {
- ECHO "warning: escape sequences are not supported" ;
- ECHO "warning: this will cause major misbehaviour on cygwin" ;
- ECHO "warning: your version of bjam is likely out of date" ;
- ECHO "warning: please get a fresh version from SVN." ;
- }
-}
-
-# Bootstrap the module system. Then bring the import rule into the global module.
-#
-SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
-module modules { include <module@>modules.jam ; }
-IMPORT modules : import : : import ;
-
-{
- # Add module subdirectories to the BOOST_BUILD_PATH, which allows
- # us to make an incremental refactoring step by moving modules to
- # the appropriate subdirectories, thereby achieving some physical
- # separation of different layers without changing all of our code
- # to specify subdirectories in import statements or use an extra
- # level of qualification on imported names.
-
- local subdirs =
- kernel # only the most-intrinsic modules: modules, errors
- util # low-level substrate: string/number handling, etc.
- build # essential elements of the build system architecture
- tools # toolsets for handling specific build jobs and targets.
- contrib # user contributed (unreviewed) modules
- . # build-system.jam lives here
- ;
- local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
- BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
-
- modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
-
- modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
-}
-
-# Reload the modules, to clean up things. The modules module can tolerate
-# being included twice.
-#
-import modules ;
-
-# Process option plugins first to alow them to prevent loading
-# the rest of the build system.
-#
-import option ;
-local dont-build = [ option.process ] ;
-
-# Should we skip building, i.e. loading the build system, according
-# to the options processed?
-#
-if ! $(dont-build)
-{
- if ! --python in $(ARGV)
- {
- # Allow users to override the build system file from the
- # command-line (mostly for testing)
- local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
- build-system ?= build-system ;
-
- # Use last element in case of multiple command-line options
- import $(build-system[-1]) ;
- }
- else
- {
- ECHO "Boost.Build V2 Python port (experimental)" ;
-
- # Define additional interface that is exposed to Python code. Python code will
- # also have access to select bjam builtins in the 'bjam' module, but some
- # things are easier to define outside C.
- module python_interface
- {
- rule load ( module-name : location )
- {
- USER_MODULE $(module-name) ;
- # Make all rules in the loaded module available in
- # the global namespace, so that we don't have
- # to bother specifying "right" module when calling
- # from Python.
- module $(module-name)
- {
- __name__ = $(1) ;
- include $(2) ;
- local rules = [ RULENAMES $(1) ] ;
- IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
- }
- }
-
- rule peek ( module-name ? : variables + )
- {
- module $(<)
- {
- return $($(>)) ;
- }
- }
-
- rule set-variable ( module-name : name : value * )
- {
- module $(<)
- {
- $(>) = $(3) ;
- }
- }
-
- rule set-top-level-targets ( targets * )
- {
- DEPENDS all : $(targets) ;
- }
-
- rule call-in-module ( m : rulename : * )
- {
- module $(m)
- {
- return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- }
- }
-
-
- rule set-update-action ( action : targets * : sources * : properties * )
- {
- $(action) $(targets) : $(sources) : $(properties) ;
- }
-
- rule set-update-action-in-module ( m : action : targets * : sources * : properties * )
- {
- module $(m)
- {
- $(2) $(3) : $(4) : $(5) ;
- }
- }
-
- rule set-target-variable ( targets + : variable : value * : append ? )
- {
- if $(append)
- {
- $(variable) on $(targets) += $(value) ;
- }
- else
- {
- $(variable) on $(targets) = $(value) ;
- }
- }
-
- rule get-target-variable ( targets + : variable )
- {
- return [ on $(targets) return $($(variable)) ] ;
- }
-
- rule import-rules-from-parent ( parent-module : this-module : user-rules * )
- {
- IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
- EXPORT $(this-module) : $(user-rules) ;
- }
-
- rule mark-included ( targets * : includes * ) {
- NOCARE $(includes) ;
- INCLUDES $(targets) : $(includes) ;
- ISFILE $(includes) ;
- }
- }
-
- PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
- modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
-
- module PyBB
- {
- local ok = [ bootstrap $(root) ] ;
- if ! $(ok)
- {
- EXIT ;
- }
- }
-
-
- #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
-
- #module PyBB
- #{
- # main ;
- #}
-
- }
-}
diff --git a/jam-files/boost-build/kernel/bootstrap.py b/jam-files/boost-build/kernel/bootstrap.py
deleted file mode 100644
index 2e8dd37b..00000000
--- a/jam-files/boost-build/kernel/bootstrap.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2009 Vladimir Prus
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import imp
-import sys
-
-def bootstrap(root_path):
- """Performs python-side bootstrapping of Boost.Build/Python.
-
- This function arranges for 'b2.whatever' package names to work, while also
- allowing to put python files alongside corresponding jam modules.
- """
-
- m = imp.new_module("b2")
- # Note that:
- # 1. If __path__ is not list of strings, nothing will work
- # 2. root_path is already list of strings.
- m.__path__ = root_path
- sys.modules["b2"] = m
-
- import b2.build_system
- return b2.build_system.main()
-
diff --git a/jam-files/boost-build/kernel/class.jam b/jam-files/boost-build/kernel/class.jam
deleted file mode 100644
index b8e55af3..00000000
--- a/jam-files/boost-build/kernel/class.jam
+++ /dev/null
@@ -1,420 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005 Rene Rivera
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Polymorphic class system built on top of core Jam facilities.
-#
-# Classes are defined by 'class' keywords::
-#
-# class myclass
-# {
-# rule __init__ ( arg1 ) # constructor
-# {
-# self.attribute = $(arg1) ;
-# }
-#
-# rule method1 ( ) # method
-# {
-# return [ method2 ] ;
-# }
-#
-# rule method2 ( ) # method
-# {
-# return $(self.attribute) ;
-# }
-# }
-#
-# The __init__ rule is the constructor, and sets member variables.
-#
-# New instances are created by invoking [ new <class> <args...> ]:
-#
-# local x = [ new myclass foo ] ; # x is a new myclass object
-# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
-#
-# Derived class are created by mentioning base classes in the declaration::
-#
-# class derived : myclass
-# {
-# rule __init__ ( arg )
-# {
-# myclass.__init__ $(arg) ; # call base __init__
-#
-# }
-#
-# rule method2 ( ) # method override
-# {
-# return $(self.attribute)XXX ;
-# }
-# }
-#
-# All methods operate virtually, replacing behavior in the base classes. For
-# example::
-#
-# local y = [ new derived foo ] ; # y is a new derived object
-# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
-#
-# Each class instance is its own core Jam module. All instance attributes and
-# methods are accessible without additional qualification from within the class
-# instance. All rules imported in class declaration, or visible in base classses
-# are also visible. Base methods are available in qualified form:
-# base-name.method-name. By convention, attribute names are prefixed with
-# "self.".
-
-import modules ;
-import numbers ;
-
-
-rule xinit ( instance : class )
-{
- module $(instance)
- {
- __class__ = $(2) ;
- __name__ = $(1) ;
- }
-}
-
-
-rule new ( class args * : * )
-{
- .next-instance ?= 1 ;
- local id = object($(class))@$(.next-instance) ;
-
- xinit $(id) : $(class) ;
-
- INSTANCE $(id) : class@$(class) ;
- IMPORT_MODULE $(id) ;
- $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-
- # Bump the next unique object name.
- .next-instance = [ numbers.increment $(.next-instance) ] ;
-
- # Return the name of the new instance.
- return $(id) ;
-}
-
-
-rule bases ( class )
-{
- module class@$(class)
- {
- return $(__bases__) ;
- }
-}
-
-
-rule is-derived ( class : bases + )
-{
- local stack = $(class) ;
- local visited found ;
- while ! $(found) && $(stack)
- {
- local top = $(stack[1]) ;
- stack = $(stack[2-]) ;
- if ! ( $(top) in $(visited) )
- {
- visited += $(top) ;
- stack += [ bases $(top) ] ;
-
- if $(bases) in $(visited)
- {
- found = true ;
- }
- }
- }
- return $(found) ;
-}
-
-
-# Returns true if the 'value' is a class instance.
-#
-rule is-instance ( value )
-{
- return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
-}
-
-
-# Check if the given value is of the given type.
-#
-rule is-a (
- instance # The value to check.
- : type # The type to test for.
-)
-{
- if [ is-instance $(instance) ]
- {
- return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
- }
-}
-
-
-local rule typecheck ( x )
-{
- local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
- if ! [ is-a $(x) : $(class-name) ]
- {
- return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
-
- # This will be the construction function for a class called 'myclass'.
- #
- class myclass
- {
- import assert ;
-
- rule __init__ ( x_ * : y_ * )
- {
- # Set some instance variables.
- x = $(x_) ;
- y = $(y_) ;
- foo += 10 ;
- }
-
- rule set-x ( newx * )
- {
- x = $(newx) ;
- }
-
- rule get-x ( )
- {
- return $(x) ;
- }
-
- rule set-y ( newy * )
- {
- y = $(newy) ;
- }
-
- rule get-y ( )
- {
- return $(y) ;
- }
-
- rule f ( )
- {
- return [ g $(x) ] ;
- }
-
- rule g ( args * )
- {
- if $(x) in $(y)
- {
- return $(x) ;
- }
- else if $(y) in $(x)
- {
- return $(y) ;
- }
- else
- {
- return ;
- }
- }
-
- rule get-class ( )
- {
- return $(__class__) ;
- }
-
- rule get-instance ( )
- {
- return $(__name__) ;
- }
-
- rule invariant ( )
- {
- assert.equal 1 : 1 ;
- }
-
- rule get-foo ( )
- {
- return $(foo) ;
- }
- }
-# class myclass ;
-
- class derived1 : myclass
- {
- rule __init__ ( z_ )
- {
- myclass.__init__ $(z_) : X ;
- z = $(z_) ;
- }
-
- # Override g.
- #
- rule g ( args * )
- {
- return derived1.g ;
- }
-
- rule h ( )
- {
- return derived1.h ;
- }
-
- rule get-z ( )
- {
- return $(z) ;
- }
-
- # Check that 'assert.equal' visible in base class is visible here.
- #
- rule invariant2 ( )
- {
- assert.equal 2 : 2 ;
- }
-
- # Check that 'assert.variable-not-empty' visible in base class is
- # visible here.
- #
- rule invariant3 ( )
- {
- local v = 10 ;
- assert.variable-not-empty v ;
- }
- }
-# class derived1 : myclass ;
-
- class derived2 : myclass
- {
- rule __init__ ( )
- {
- myclass.__init__ 1 : 2 ;
- }
-
- # Override g.
- #
- rule g ( args * )
- {
- return derived2.g ;
- }
-
- # Test the ability to call base class functions with qualification.
- #
- rule get-x ( )
- {
- return [ myclass.get-x ] ;
- }
- }
-# class derived2 : myclass ;
-
- class derived2a : derived2
- {
- rule __init__
- {
- derived2.__init__ ;
- }
- }
-# class derived2a : derived2 ;
-
- local rule expect_derived2 ( [derived2] x ) { }
-
- local a = [ new myclass 3 4 5 : 4 5 ] ;
- local b = [ new derived1 4 ] ;
- local b2 = [ new derived1 4 ] ;
- local c = [ new derived2 ] ;
- local d = [ new derived2 ] ;
- local e = [ new derived2a ] ;
-
- expect_derived2 $(d) ;
- expect_derived2 $(e) ;
-
- # Argument checking is set up to call exit(1) directly on failure, and we
- # can not hijack that with try, so we should better not do this test by
- # default. We could fix this by having errors look up and invoke the EXIT
- # rule instead; EXIT can be hijacked (;-)
- if --fail-typecheck in [ modules.peek : ARGV ]
- {
- try ;
- {
- expect_derived2 $(a) ;
- }
- catch
- "Expected an instance of derived2 but got" instead
- ;
- }
-
- #try ;
- #{
- # new bad_subclass ;
- #}
- #catch
- # bad_subclass.bad_subclass failed to call base class constructor myclass.__init__
- # ;
-
- #try ;
- #{
- # class bad_subclass ;
- #}
- #catch bad_subclass has already been declared ;
-
- assert.result 3 4 5 : $(a).get-x ;
- assert.result 4 5 : $(a).get-y ;
- assert.result 4 : $(b).get-x ;
- assert.result X : $(b).get-y ;
- assert.result 4 : $(b).get-z ;
- assert.result 1 : $(c).get-x ;
- assert.result 2 : $(c).get-y ;
- assert.result 4 5 : $(a).f ;
- assert.result derived1.g : $(b).f ;
- assert.result derived2.g : $(c).f ;
- assert.result derived2.g : $(d).f ;
-
- assert.result 10 : $(b).get-foo ;
-
- $(a).invariant ;
- $(b).invariant2 ;
- $(b).invariant3 ;
-
- # Check that the __class__ attribute is getting properly set.
- assert.result myclass : $(a).get-class ;
- assert.result derived1 : $(b).get-class ;
- assert.result $(a) : $(a).get-instance ;
-
- $(a).set-x a.x ;
- $(b).set-x b.x ;
- $(c).set-x c.x ;
- $(d).set-x d.x ;
- assert.result a.x : $(a).get-x ;
- assert.result b.x : $(b).get-x ;
- assert.result c.x : $(c).get-x ;
- assert.result d.x : $(d).get-x ;
-
- class derived3 : derived1 derived2
- {
- rule __init__ ( )
- {
- }
- }
-
- assert.result : bases myclass ;
- assert.result myclass : bases derived1 ;
- assert.result myclass : bases derived2 ;
- assert.result derived1 derived2 : bases derived3 ;
-
- assert.true is-derived derived1 : myclass ;
- assert.true is-derived derived2 : myclass ;
- assert.true is-derived derived3 : derived1 ;
- assert.true is-derived derived3 : derived2 ;
- assert.true is-derived derived3 : derived1 derived2 myclass ;
- assert.true is-derived derived3 : myclass ;
-
- assert.false is-derived myclass : derived1 ;
-
- assert.true is-instance $(a) ;
- assert.false is-instance bar ;
-
- assert.true is-a $(a) : myclass ;
- assert.true is-a $(c) : derived2 ;
- assert.true is-a $(d) : myclass ;
- assert.false is-a literal : myclass ;
-}
diff --git a/jam-files/boost-build/kernel/errors.jam b/jam-files/boost-build/kernel/errors.jam
deleted file mode 100644
index 63b11e86..00000000
--- a/jam-files/boost-build/kernel/errors.jam
+++ /dev/null
@@ -1,274 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Print a stack backtrace leading to this rule's caller. Each argument
-# represents a line of output to be printed after the first line of the
-# backtrace.
-#
-rule backtrace ( skip-frames prefix messages * : * )
-{
- local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
- local drop-elements = $(frame-skips[$(skip-frames)]) ;
- if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
- {
- ECHO "warning: backtrace doesn't support skipping $(skip-frames) frames;"
- "using 1 instead." ;
- drop-elements = 5 ;
- }
-
- local args = $(.args) ;
- if $(.user-modules-only)
- {
- local bt = [ nearest-user-location ] ;
- ECHO "$(prefix) at $(bt) " ;
- for local n in $(args)
- {
- if $($(n))-is-not-empty
- {
- ECHO $(prefix) $($(n)) ;
- }
- }
- }
- else
- {
- # Get the whole backtrace, then drop the initial quadruples
- # corresponding to the frames that must be skipped.
- local bt = [ BACKTRACE ] ;
- bt = $(bt[$(drop-elements)-]) ;
-
- while $(bt)
- {
- local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
- ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
-
- # The first time through, print each argument on a separate line.
- for local n in $(args)
- {
- if $($(n))-is-not-empty
- {
- ECHO $(prefix) $($(n)) ;
- }
- }
- args = ; # Kill args so that this never happens again.
-
- # Move on to the next quadruple.
- bt = $(bt[5-]) ;
- }
- }
-}
-
-.args ?= messages 2 3 4 5 6 7 8 9 ;
-.disabled ?= ;
-.last-error-$(.args) ?= ;
-
-
-# try-catch --
-#
-# This is not really an exception-handling mechanism, but it does allow us to
-# perform some error-checking on our error-checking. Errors are suppressed after
-# a try, and the first one is recorded. Use catch to check that the error
-# message matched expectations.
-
-# Begin looking for error messages.
-#
-rule try ( )
-{
- .disabled += true ;
- .last-error-$(.args) = ;
-}
-
-
-# Stop looking for error messages; generate an error if an argument of messages
-# is not found in the corresponding argument in the error call.
-#
-rule catch ( messages * : * )
-{
- .disabled = $(.disabled[2-]) ; # Pop the stack.
-
- import sequence ;
-
- if ! $(.last-error-$(.args))-is-not-empty
- {
- error-skip-frames 3 expected an error, but none occurred ;
- }
- else
- {
- for local n in $(.args)
- {
- if ! $($(n)) in $(.last-error-$(n))
- {
- local v = [ sequence.join $($(n)) : " " ] ;
- v ?= "" ;
- local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
-
- .last-error-$(.args) = ;
- error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
- : got \"$(joined)\" instead ;
- }
- }
- }
-}
-
-
-rule error-skip-frames ( skip-frames messages * : * )
-{
- if ! $(.disabled)
- {
- backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- EXIT ;
- }
- else if ! $(.last-error-$(.args))
- {
- for local n in $(.args)
- {
- # Add an extra empty string so that we always have
- # something in the event of an error
- .last-error-$(n) = $($(n)) "" ;
- }
- }
-}
-
-if --no-error-backtrace in [ modules.peek : ARGV ]
-{
- .no-error-backtrace = true ;
-}
-
-
-# Print an error message with a stack backtrace and exit.
-#
-rule error ( messages * : * )
-{
- if $(.no-error-backtrace)
- {
- # Print each argument on a separate line.
- for local n in $(.args)
- {
- if $($(n))-is-not-empty
- {
- if ! $(first-printed)
- {
- ECHO error: $($(n)) ;
- first-printed = true ;
- }
- else
- {
- ECHO $($(n)) ;
- }
- }
- }
- EXIT ;
- }
- else
- {
- error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-}
-
-
-# Same as 'error', but the generated backtrace will include only user files.
-#
-rule user-error ( messages * : * )
-{
- .user-modules-only = 1 ;
- error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-}
-
-
-# Print a warning message with a stack backtrace and exit.
-#
-rule warning
-{
- backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-}
-
-
-# Convert an arbitrary argument list into a list with ":" separators and quoted
-# elements representing the same information. This is mostly useful for
-# formatting descriptions of arguments with which a rule was called when
-# reporting an error.
-#
-rule lol->list ( * )
-{
- local result ;
- local remaining = 1 2 3 4 5 6 7 8 9 ;
- while $($(remaining))
- {
- local n = $(remaining[1]) ;
- remaining = $(remaining[2-]) ;
-
- if $(n) != 1
- {
- result += ":" ;
- }
- result += \"$($(n))\" ;
- }
- return $(result) ;
-}
-
-
-# Return the file:line for the nearest entry in backtrace which correspond to a
-# user module.
-#
-rule nearest-user-location ( )
-{
- local bt = [ BACKTRACE ] ;
-
- local result ;
- while $(bt) && ! $(result)
- {
- local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
- local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ;
-
- if [ MATCH $(user-modules) : $(bt[1]:D=) ]
- {
- result = $(bt[1]):$(bt[2]) ;
- }
- bt = $(bt[5-]) ;
- }
- return $(result) ;
-}
-
-
-# If optimized rule is available in Jam, use it.
-if NEAREST_USER_LOCATION in [ RULENAMES ]
-{
- rule nearest-user-location ( )
- {
- local r = [ NEAREST_USER_LOCATION ] ;
- return $(r[1]):$(r[2]) ;
- }
-}
-
-
-rule __test__ ( )
-{
- # Show that we can correctly catch an expected error.
- try ;
- {
- error an error occurred : somewhere ;
- }
- catch an error occurred : somewhere ;
-
- # Show that unexpected errors generate real errors.
- try ;
- {
- try ;
- {
- error an error occurred : somewhere ;
- }
- catch an error occurred : nowhere ;
- }
- catch expected \"nowhere\" in argument 2 ;
-
- # Show that not catching an error where one was expected is an error.
- try ;
- {
- try ;
- {
- }
- catch ;
- }
- catch expected an error, but none occurred ;
-}
diff --git a/jam-files/boost-build/kernel/modules.jam b/jam-files/boost-build/kernel/modules.jam
deleted file mode 100644
index 1f75354f..00000000
--- a/jam-files/boost-build/kernel/modules.jam
+++ /dev/null
@@ -1,354 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Essentially an include guard; ensures that no module is loaded multiple times.
-.loaded ?= ;
-
-# A list of modules currently being loaded for error reporting of circular
-# dependencies.
-.loading ?= ;
-
-# A list of modules needing to be tested using their __test__ rule.
-.untested ?= ;
-
-# A list of modules which have been tested using their __test__ rule.
-.tested ?= ;
-
-
-# Runs internal Boost Build unit tests for the specified module. The module's
-# __test__ rule is executed in its own module to eliminate any inadvertent
-# effects of testing module dependencies (such as assert) on the module itself.
-#
-local rule run-module-test ( m )
-{
- local tested-modules = [ modules.peek modules : .tested ] ;
-
- if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
- && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
- {
- modules.poke modules : .tested : $(tested-modules) $(m) ;
-
- if ! ( __test__ in [ RULENAMES $(m) ] )
- {
- local argv = [ peek : ARGV ] ;
- if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
- {
- ECHO warning: no __test__ rule defined in module $(m) ;
- }
- }
- else
- {
- if ! ( --quiet in $(argv) )
- {
- ECHO testing module $(m)... ;
- }
-
- local test-module = __test-$(m)__ ;
- IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) ] ;
- IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
- module $(test-module)
- {
- __test__ ;
- }
- }
- }
-}
-
-
-# Return the binding of the given module.
-#
-rule binding ( module )
-{
- return $($(module).__binding__) ;
-}
-
-
-# Sets the module-local value of a variable. This is the most reliable way to
-# set a module-local variable in a different module; it eliminates issues of
-# name shadowing due to dynamic scoping.
-#
-rule poke ( module-name ? : variables + : value * )
-{
- module $(<)
- {
- $(>) = $(3) ;
- }
-}
-
-
-# Returns the module-local value of a variable. This is the most reliable way to
-# examine a module-local variable in a different module; it eliminates issues of
-# name shadowing due to dynamic scoping.
-#
-rule peek ( module-name ? : variables + )
-{
- module $(<)
- {
- return $($(>)) ;
- }
-}
-
-
-# Call the given rule locally in the given module. Use this for rules accepting
-# rule names as arguments, so that the passed rule may be invoked in the context
-# of the rule's caller (for example, if the rule accesses module globals or is a
-# local rule). Note that rules called this way may accept at most 8 parameters.
-#
-rule call-in ( module-name ? : rule-name args * : * )
-{
- module $(module-name)
- {
- return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- }
-}
-
-
-# Given a possibly qualified rule name and arguments, remove any initial module
-# qualification from the rule and invoke it in that module. If there is no
-# module qualification, the rule is invoked in the global module. Note that
-# rules called this way may accept at most 8 parameters.
-#
-rule call-locally ( qualified-rule-name args * : * )
-{
- local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
- local rule-name = $(module-rule[2]) ;
- rule-name ?= $(qualified-rule-name) ;
- # We pass only 8 parameters here since Boost Jam allows at most 9 rule
- # parameter positions and the call-in rule already uses up the initial
- # position for the module name.
- return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
- $(4) : $(5) : $(6) : $(7) : $(8) ] ;
-}
-
-
-# Load the indicated module if it is not already loaded.
-#
-rule load (
- module-name # Name of module to load. Rules will be defined in this
- # module.
- : filename ? # (partial) path to file; Defaults to $(module-name).jam.
- : search * # Directories in which to search for filename. Defaults to
- # $(BOOST_BUILD_PATH).
-)
-{
- # Avoid loading modules twice.
- if ! ( $(module-name) in $(.loaded) )
- {
- filename ?= $(module-name).jam ;
-
- # Mark the module loaded so we do not try to load it recursively.
- .loaded += $(module-name) ;
-
- # Suppress tests if any module loads are already in progress.
- local suppress-test = $(.loading[1]) ;
-
- # Push this module on the loading stack.
- .loading += $(module-name) ;
-
- # Remember that it is untested.
- .untested += $(module-name) ;
-
- # Insert the new module's __name__ and __file__ globals.
- poke $(module-name) : __name__ : $(module-name) ;
- poke $(module-name) : __file__ : $(filename) ;
-
- module $(module-name)
- {
- # Add some grist so that the module will have a unique target name.
- local module-target = $(__file__:G=module@) ;
-
- local search = $(3) ;
- search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
- SEARCH on $(module-target) = $(search) ;
- BINDRULE on $(module-target) = modules.record-binding ;
-
- include $(module-target) ;
-
- # Allow the module to see its own names with full qualification.
- local rules = [ RULENAMES $(__name__) ] ;
- IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
- }
-
- if $(module-name) != modules && ! [ binding $(module-name) ]
- {
- import errors ;
- errors.error "Could not find module" $(module-name) in $(search) ;
- }
-
- # Pop the loading stack. Must happen before testing or we will run into
- # a circular loading dependency.
- .loading = $(.loading[1--2]) ;
-
- # Run any pending tests if this is an outer load.
- if ! $(suppress-test)
- {
- local argv = [ peek : ARGV ] ;
- for local m in $(.untested)
- {
- run-module-test $(m) ;
- }
- .untested = ;
- }
- }
- else if $(module-name) in $(.loading)
- {
- import errors ;
- errors.error loading \"$(module-name)\"
- : circular module loading dependency:
- : $(.loading)" ->" $(module-name) ;
- }
-}
-
-
-# This helper is used by load (above) to record the binding (path) of each
-# loaded module.
-#
-rule record-binding ( module-target : binding )
-{
- $(.loading[-1]).__binding__ = $(binding) ;
-}
-
-
-# Transform each path in the list, with all backslashes converted to forward
-# slashes and all detectable redundancy removed. Something like this is probably
-# needed in path.jam, but I am not sure of that, I do not understand it, and I
-# am not ready to move all of path.jam into the kernel.
-#
-local rule normalize-raw-paths ( paths * )
-{
- local result ;
- for p in $(paths:T)
- {
- result += [ NORMALIZE_PATH $(p) ] ;
- }
- return $(result) ;
-}
-
-
-.cwd = [ PWD ] ;
-
-
-# Load the indicated module and import rule names into the current module. Any
-# members of rules-opt will be available without qualification in the caller's
-# module. Any members of rename-opt will be taken as the names of the rules in
-# the caller's module, in place of the names they have in the imported module.
-# If rules-opt = '*', all rules from the indicated module are imported into the
-# caller's module. If rename-opt is supplied, it must have the same number of
-# elements as rules-opt.
-#
-rule import ( module-names + : rules-opt * : rename-opt * )
-{
- if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
- {
- import errors ;
- errors.error "Rule aliasing is only available for explicit imports." ;
- }
-
- if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
- {
- import errors ;
- errors.error "When loading multiple modules, no specific rules or"
- "renaming is allowed" ;
- }
-
- local caller = [ CALLER_MODULE ] ;
-
- # Import each specified module
- for local m in $(module-names)
- {
- if ! $(m) in $(.loaded)
- {
- # If the importing module isn't already in the BOOST_BUILD_PATH,
- # prepend it to the path. We don't want to invert the search order
- # of modules that are already there.
-
- local caller-location ;
- if $(caller)
- {
- caller-location = [ binding $(caller) ] ;
- caller-location = $(caller-location:D) ;
- caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ;
- }
-
- local search = [ peek : BOOST_BUILD_PATH ] ;
- search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
-
- if $(caller-location) && ! $(caller-location) in $(search)
- {
- search = $(caller-location) $(search) ;
- }
-
- load $(m) : : $(search) ;
- }
-
- IMPORT_MODULE $(m) : $(caller) ;
-
- if $(rules-opt)
- {
- local source-names ;
- if $(rules-opt) = *
- {
- local all-rules = [ RULENAMES $(m) ] ;
- source-names = $(all-rules) ;
- }
- else
- {
- source-names = $(rules-opt) ;
- }
- local target-names = $(rename-opt) ;
- target-names ?= $(source-names) ;
- IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ;
- }
- }
-}
-
-
-# Define exported copies in $(target-module) of all rules exported from
-# $(source-module). Also make them available in the global module with
-# qualification, so that it is just as though the rules were defined originally
-# in $(target-module).
-#
-rule clone-rules ( source-module target-module )
-{
- local rules = [ RULENAMES $(source-module) ] ;
-
- IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ;
- EXPORT $(target-module) : $(rules) ;
- IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ;
-}
-
-
-# These rules need to be available in all modules to implement module loading
-# itself and other fundamental operations.
-local globalize = peek poke record-binding ;
-IMPORT modules : $(globalize) : : modules.$(globalize) ;
-
-
-rule __test__ ( )
-{
- import assert ;
- import modules : normalize-raw-paths ;
-
- module modules.__test__
- {
- foo = bar ;
- }
-
- assert.result bar : peek modules.__test__ : foo ;
-
- poke modules.__test__ : foo : bar baz ;
- assert.result bar baz : peek modules.__test__ : foo ;
-
- assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
- assert.result . : normalize-raw-paths . ;
- assert.result .. : normalize-raw-paths .. ;
- assert.result ../.. : normalize-raw-paths ../.. ;
- assert.result .. : normalize-raw-paths ./.. ;
- assert.result / / : normalize-raw-paths / \\ ;
- assert.result a : normalize-raw-paths a ;
- assert.result a : normalize-raw-paths a/ ;
- assert.result /a : normalize-raw-paths /a/ ;
- assert.result / : normalize-raw-paths /a/.. ;
-}
diff --git a/jam-files/boost-build/options/help.jam b/jam-files/boost-build/options/help.jam
deleted file mode 100644
index b507e1ed..00000000
--- a/jam-files/boost-build/options/help.jam
+++ /dev/null
@@ -1,212 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2006 Rene Rivera
-# Copyright 2003, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module is the plug-in handler for the --help and --help-.*
-# command-line options
-import modules ;
-import assert ;
-import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
-import sequence ;
-import set ;
-import project ;
-import print ;
-import os ;
-import version ;
-import path ;
-
-# List of possible modules, but which really aren't.
-#
-.not-modules =
- boost-build bootstrap site-config test user-config
- -tools allyourbase boost-base features python stlport testing unit-tests ;
-
-# The help system options are parsed here and handed off to the doc
-# module to translate into documentation requests and actions. The
-# understood options are:
-#
-# --help-disable-<option>
-# --help-doc-options
-# --help-enable-<option>
-# --help-internal
-# --help-options
-# --help-usage
-# --help-output <type>
-# --help-output-file <file>
-# --help [<module-or-class>]
-#
-rule process (
- command # The option.
- : values * # The values, starting after the "=".
- )
-{
- assert.result --help : MATCH ^(--help).* : $(command) ;
- local did-help = ;
- switch $(command)
- {
- case --help-internal :
- local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
- path-to-modules ?= . ;
- local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
- local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
- local modules-to-list =
- [ sequence.insertion-sort
- [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
- local modules-to-scan ;
- for local m in $(modules-to-list)
- {
- local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
- modules-to-scan += $(module-files[1]) ;
- }
- do-scan $(modules-to-scan) : print-help-all ;
- did-help = true ;
-
- case --help-enable-* :
- local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
- set-option $(option) : enabled ;
- did-help = true ;
-
- case --help-disable-* :
- local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
- set-option $(option) ;
- did-help = true ;
-
- case --help-output :
- set-output $(values[1]) ;
- did-help = true ;
-
- case --help-output-file :
- set-output-file $(values[1]) ;
- did-help = true ;
-
- case --help-doc-options :
- local doc-module-spec = [ split-symbol doc ] ;
- do-scan $(doc-module-spec[1]) : print-help-options ;
- did-help = true ;
-
- case --help-options :
- print-help-usage ;
- did-help = true ;
-
- case --help :
- local spec = $(values[1]) ;
- if $(spec)
- {
- local spec-parts = [ split-symbol $(spec) ] ;
- if $(spec-parts)
- {
- if $(spec-parts[2])
- {
- do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
- do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
- do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
- }
- else
- {
- do-scan $(spec-parts[1]) : print-help-module ;
- }
- }
- else
- {
- EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
- }
- }
- else
- {
- version.print ;
- ECHO ;
- # First print documentation from the current Jamfile, if any.
- # FIXME: Generally, this duplication of project.jam logic is bad.
- local names = [ modules.peek project : JAMROOT ]
- [ modules.peek project : JAMFILE ] ;
- local project-file = [ path.glob . : $(names) ] ;
- if ! $(project-file)
- {
- project-file = [ path.glob-in-parents . : $(names) ] ;
- }
-
- for local p in $(project-file)
- {
- do-scan $(p) : print-help-project $(p) ;
- }
-
- # Next any user-config help.
- local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
- local user-config = [ GLOB $(user-path) : user-config.jam ] ;
- if $(user-config)
- {
- do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
- }
-
- # Next any site-config help.
- local site-config = [ GLOB $(user-path) : site-config.jam ] ;
- if $(site-config)
- {
- do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
- }
-
- # Then the overall help.
- print-help-top ;
- }
- did-help = true ;
- }
- if $(did-help)
- {
- UPDATE all ;
- NOCARE all ;
- }
- return $(did-help) ;
-}
-
-# Split a reference to a symbol into module and symbol parts.
-#
-local rule split-symbol (
- symbol # The symbol to split.
- )
-{
- local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
- path-to-modules ?= . ;
- local module-name = $(symbol) ;
- local symbol-name = ;
- local result = ;
- while ! $(result)
- {
- local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
- if $(module-path)
- {
- # The 'module-name' in fact refers to module. Return the full
- # module path and a symbol within it. If 'symbol' passed to this
- # rule is already module, 'symbol-name' will be empty. Otherwise,
- # it's initialized on the previous loop iteration.
- # In case there are several modules by this name,
- # use the first one.
- result = $(module-path[1]) $(symbol-name) ;
- }
- else
- {
- if ! $(module-name:S)
- {
- result = - ;
- }
- else
- {
- local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
- if $(symbol-name)
- {
- symbol-name = $(next-symbol-part).$(symbol-name) ;
- }
- else
- {
- symbol-name = $(next-symbol-part) ;
- }
- module-name = $(module-name:B) ;
- }
- }
- }
- if $(result) != -
- {
- return $(result) ;
- }
-}
diff --git a/jam-files/boost-build/site-config.jam b/jam-files/boost-build/site-config.jam
deleted file mode 100644
index ad22d674..00000000
--- a/jam-files/boost-build/site-config.jam
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
diff --git a/jam-files/boost-build/tools/__init__.py b/jam-files/boost-build/tools/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/jam-files/boost-build/tools/__init__.py
+++ /dev/null
diff --git a/jam-files/boost-build/tools/acc.jam b/jam-files/boost-build/tools/acc.jam
deleted file mode 100644
index f04c9dc8..00000000
--- a/jam-files/boost-build/tools/acc.jam
+++ /dev/null
@@ -1,118 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Copyright Toon Knapen 2004.
-# Copyright Boris Gubenko 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# Boost.Build V2 toolset for the HP aC++ compiler.
-#
-
-import toolset : flags ;
-import feature ;
-import generators ;
-import common ;
-
-feature.extend toolset : acc ;
-toolset.inherit acc : unix ;
-generators.override builtin.lib-generator : acc.prebuilt ;
-generators.override acc.searched-lib-generator : searched-lib-generator ;
-
-# Configures the acc toolset.
-rule init ( version ? : user-provided-command * : options * )
-{
- local condition = [ common.check-init-parameters acc
- : version $(version) ] ;
-
- local command = [ common.get-invocation-command acc : aCC
- : $(user-provided-command) ] ;
-
- common.handle-options acc : $(condition) : $(command) : $(options) ;
-}
-
-
-# Declare generators
-generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ;
-generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ;
-
-# Declare flags.
-flags acc CFLAGS <optimization>off : ;
-flags acc CFLAGS <optimization>speed : -O3 ;
-flags acc CFLAGS <optimization>space : -O2 ;
-
-flags acc CFLAGS <inlining>off : +d ;
-flags acc CFLAGS <inlining>on : ;
-flags acc CFLAGS <inlining>full : ;
-
-flags acc C++FLAGS <exception-handling>off : ;
-flags acc C++FLAGS <exception-handling>on : ;
-
-flags acc C++FLAGS <rtti>off : ;
-flags acc C++FLAGS <rtti>on : ;
-
-# We want the full path to the sources in the debug symbols because otherwise
-# the debugger won't find the sources when we use boost.build.
-flags acc CFLAGS <debug-symbols>on : -g ;
-flags acc LINKFLAGS <debug-symbols>on : -g ;
-flags acc LINKFLAGS <debug-symbols>off : -s ;
-
-# V2 does not have <shared-linkable>, not sure what this meant in V1.
-# flags acc CFLAGS <shared-linkable>true : +Z ;
-
-flags acc CFLAGS <profiling>on : -pg ;
-flags acc LINKFLAGS <profiling>on : -pg ;
-
-flags acc CFLAGS <address-model>64 : +DD64 ;
-flags acc LINKFLAGS <address-model>64 : +DD64 ;
-
-# It is unknown if there's separate option for rpath used only
-# at link time, similar to -rpath-link in GNU. We'll use -L.
-flags acc RPATH_LINK : <xdll-path> ;
-
-flags acc CFLAGS <cflags> ;
-flags acc C++FLAGS <cxxflags> ;
-flags acc DEFINES <define> ;
-flags acc UNDEFS <undef> ;
-flags acc HDRS <include> ;
-flags acc STDHDRS <sysinclude> ;
-flags acc LINKFLAGS <linkflags> ;
-flags acc ARFLAGS <arflags> ;
-
-flags acc LIBPATH <library-path> ;
-flags acc NEEDLIBS <library-file> ;
-flags acc FINDLIBS <find-shared-library> ;
-flags acc FINDLIBS <find-static-library> ;
-
-# Select the compiler name according to the threading model.
-flags acc CFLAGS <threading>multi : -mt ;
-flags acc LINKFLAGS <threading>multi : -mt ;
-
-flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
-
-
-actions acc.link bind NEEDLIBS
-{
- $(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
-}
-
-SPACE = " " ;
-actions acc.link.dll bind NEEDLIBS
-{
- $(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
-}
-
-actions acc.compile.c
-{
- cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
-}
-
-actions acc.compile.c++
-{
- $(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
-}
-
-actions updated together piecemeal acc.archive
-{
- ar ru$(ARFLAGS:E="") "$(<)" "$(>)"
-}
diff --git a/jam-files/boost-build/tools/auto-index.jam b/jam-files/boost-build/tools/auto-index.jam
deleted file mode 100644
index ebbf344e..00000000
--- a/jam-files/boost-build/tools/auto-index.jam
+++ /dev/null
@@ -1,212 +0,0 @@
-
-import feature ;
-import generators ;
-import "class" ;
-import toolset ;
-import targets ;
-import "class" : new ;
-import project ;
-
-feature.feature auto-index : off "on" ;
-feature.feature auto-index-internal : off "on" ;
-feature.feature auto-index-verbose : off "on" ;
-feature.feature auto-index-no-duplicates : off "on" ;
-feature.feature auto-index-script : : free ;
-feature.feature auto-index-prefix : : free ;
-feature.feature auto-index-type : : free ;
-feature.feature auto-index-section-names : "on" off ;
-
-toolset.flags auto-index.auto-index FLAGS <auto-index-internal>on : --internal-index ;
-toolset.flags auto-index.auto-index SCRIPT <auto-index-script> ;
-toolset.flags auto-index.auto-index PREFIX <auto-index-prefix> ;
-toolset.flags auto-index.auto-index INDEX_TYPE <auto-index-type> ;
-toolset.flags auto-index.auto-index FLAGS <auto-index-verbose>on : --verbose ;
-toolset.flags auto-index.auto-index FLAGS <auto-index-no-duplicates>on : --no-duplicates ;
-toolset.flags auto-index.auto-index FLAGS <auto-index-section-names>off : --no-section-names ;
-
-# <auto-index-binary> shell command to run AutoIndex
-# <auto-index-binary-dependencies> targets to build AutoIndex from sources.
-feature.feature <auto-index-binary> : : free ;
-feature.feature <auto-index-binary-dependencies> : : free dependency ;
-
-class auto-index-generator : generator
-{
- import common modules path targets build-system ;
- rule run ( project name ? : property-set : sources * )
- {
- # AutoIndex invocation command and dependencies.
- local auto-index-binary = [ modules.peek auto-index : .command ] ;
- local auto-index-binary-dependencies ;
-
- if $(auto-index-binary)
- {
- # Use user-supplied command.
- auto-index-binary = [ common.get-invocation-command auto-index : auto-index : $(auto-index-binary) ] ;
- }
- else
- {
- # Search for AutoIndex sources in sensible places, like
- # $(BOOST_ROOT)/tools/auto_index
- # $(BOOST_BUILD_PATH)/../../auto_index
-
- # And build auto-index executable from sources.
-
- local boost-root = [ modules.peek : BOOST_ROOT ] ;
- local boost-build-path = [ build-system.location ] ;
- local boost-build-path2 = [ modules.peek : BOOST_BUILD_PATH ] ;
-
- local auto-index-dir ;
-
- if $(boost-root)
- {
- auto-index-dir += [ path.join $(boost-root) tools ] ;
- }
-
- if $(boost-build-path)
- {
- auto-index-dir += $(boost-build-path)/../.. ;
- }
- if $(boost-build-path2)
- {
- auto-index-dir += $(boost-build-path2)/.. ;
- }
-
- #ECHO $(auto-index-dir) ;
- auto-index-dir = [ path.glob $(auto-index-dir) : auto_index ] ;
- #ECHO $(auto-index-dir) ;
-
- # If the AutoIndex source directory was found, mark its main target
- # as a dependency for the current project. Otherwise, try to find
- # 'auto-index' in user's PATH
- if $(auto-index-dir)
- {
- auto-index-dir = [ path.make $(auto-index-dir[1]) ] ;
- auto-index-dir = $(auto-index-dir)/build ;
-
- #ECHO $(auto-index-dir) ;
-
- # Get the main-target in AutoIndex directory.
- local auto-index-main-target = [ targets.resolve-reference $(auto-index-dir) : $(project) ] ;
-
- #ECHO $(auto-index-main-target) ;
-
- # The first element are actual targets, the second are
- # properties found in target-id. We do not care about these
- # since we have passed the id ourselves.
- auto-index-main-target =
- [ $(auto-index-main-target[1]).main-target auto_index ] ;
-
- #ECHO $(auto-index-main-target) ;
-
- auto-index-binary-dependencies =
- [ $(auto-index-main-target).generate [ $(property-set).propagated ] ] ;
-
- # Ignore usage-requirements returned as first element.
- auto-index-binary-dependencies = $(auto-index-binary-dependencies[2-]) ;
-
- # Some toolsets generate extra targets (e.g. RSP). We must mark
- # all targets as dependencies for the project, but we will only
- # use the EXE target for auto-index-to-boostbook translation.
- for local target in $(auto-index-binary-dependencies)
- {
- if [ $(target).type ] = EXE
- {
- auto-index-binary =
- [ path.native
- [ path.join
- [ $(target).path ]
- [ $(target).name ]
- ]
- ] ;
- }
- }
- }
- else
- {
- ECHO "AutoIndex warning: The path to the auto-index executable was" ;
- ECHO " not provided. Additionally, couldn't find AutoIndex" ;
- ECHO " sources searching in" ;
- ECHO " * BOOST_ROOT/tools/auto-index" ;
- ECHO " * BOOST_BUILD_PATH/../../auto-index" ;
- ECHO " Will now try to find a precompiled executable by searching" ;
- ECHO " the PATH for 'auto-index'." ;
- ECHO " To disable this warning in the future, or to completely" ;
- ECHO " avoid compilation of auto-index, you can explicitly set the" ;
- ECHO " path to a auto-index executable command in user-config.jam" ;
- ECHO " or site-config.jam with the call" ;
- ECHO " using auto-index : /path/to/auto-index ;" ;
-
- # As a last resort, search for 'auto-index' command in path. Note
- # that even if the 'auto-index' command is not found,
- # get-invocation-command will still return 'auto-index' and might
- # generate an error while generating the virtual-target.
-
- auto-index-binary = [ common.get-invocation-command auto-index : auto-index ] ;
- }
- }
-
- # Add $(auto-index-binary-dependencies) as a dependency of the current
- # project and set it as the <auto-index-binary> feature for the
- # auto-index-to-boostbook rule, below.
- property-set = [ $(property-set).add-raw
- <dependency>$(auto-index-binary-dependencies)
- <auto-index-binary>$(auto-index-binary)
- <auto-index-binary-dependencies>$(auto-index-binary-dependencies)
- ] ;
-
- #ECHO "binary = " $(auto-index-binary) ;
- #ECHO "dependencies = " $(auto-index-binary-dependencies) ;
-
- if [ $(property-set).get <auto-index> ] = "on"
- {
- return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ;
- }
- else
- {
- return [ generators.construct $(project) $(name) : DOCBOOK : $(property-set)
- : $(sources) ] ;
- }
- }
-}
-
-# Initialization of toolset.
-#
-# Parameters:
-# command ? -> path to AutoIndex executable.
-#
-# When command is not supplied toolset will search for AutoIndex directory and
-# compile the executable from source. If that fails we still search the path for
-# 'auto_index'.
-#
-rule init (
- command ? # path to the AutoIndex executable.
- )
-{
- if ! $(.initialized)
- {
- .initialized = true ;
- .command = $(command) ;
- }
-}
-
-toolset.flags auto-index.auto-index AI-COMMAND <auto-index-binary> ;
-toolset.flags auto-index.auto-index AI-DEPENDENCIES <auto-index-binary-dependencies> ;
-
-generators.register [ class.new auto-index-generator auto-index.auto-index : DOCBOOK : DOCBOOK(%.auto_index) ] ;
-generators.override auto-index.auto-index : boostbook.boostbook-to-docbook ;
-
-rule auto-index ( target : source : properties * )
-{
- # Signal dependency of auto-index sources on <auto-index-binary-dependencies>
- # upon invocation of auto-index-to-boostbook.
- #ECHO "AI-COMMAND= " $(AI-COMMAND) ;
- DEPENDS $(target) : [ on $(target) return $(AI-DEPENDENCIES) ] ;
- #DEPENDS $(target) : [ on $(target) return $(SCRIPT) ] ;
-}
-
-actions auto-index
-{
- $(AI-COMMAND) $(FLAGS) "--prefix="$(PREFIX) "--script="$(SCRIPT) "--index-type="$(INDEX_TYPE) "--in="$(>) "--out="$(<)
-}
-
-
diff --git a/jam-files/boost-build/tools/bison.jam b/jam-files/boost-build/tools/bison.jam
deleted file mode 100644
index 0689d4bd..00000000
--- a/jam-files/boost-build/tools/bison.jam
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import generators ;
-import feature ;
-import type ;
-import property ;
-
-feature.feature bison.prefix : : free ;
-type.register Y : y ;
-type.register YY : yy ;
-generators.register-standard bison.bison : Y : C H ;
-generators.register-standard bison.bison : YY : CPP HPP ;
-
-rule init ( )
-{
-}
-
-rule bison ( dst dst_header : src : properties * )
-{
- local r = [ property.select bison.prefix : $(properties) ] ;
- if $(r)
- {
- PREFIX_OPT on $(<) = -p $(r:G=) ;
- }
-}
-
-actions bison
-{
- bison $(PREFIX_OPT) -d -o $(<[1]) $(>)
-}
diff --git a/jam-files/boost-build/tools/boostbook-config.jam b/jam-files/boost-build/tools/boostbook-config.jam
deleted file mode 100644
index 6e3f3ddc..00000000
--- a/jam-files/boost-build/tools/boostbook-config.jam
+++ /dev/null
@@ -1,13 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for BoostBook tools. To use, just import this module.
-#
-# This module is deprecated.
-# using boostbook ;
-# with no arguments now suffices.
-
-import toolset : using ;
-
-using boostbook ;
diff --git a/jam-files/boost-build/tools/boostbook.jam b/jam-files/boost-build/tools/boostbook.jam
deleted file mode 100644
index 3a5964c6..00000000
--- a/jam-files/boost-build/tools/boostbook.jam
+++ /dev/null
@@ -1,727 +0,0 @@
-# Copyright 2003, 2004, 2005 Dave Abrahams
-# Copyright 2003, 2004, 2005 Douglas Gregor
-# Copyright 2005, 2006, 2007 Rene Rivera
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines rules to handle generation of documentation
-# from BoostBook sources.
-#
-# The type of output is controlled by the <format> feature which can
-# have the following values::
-#
-# * html: Generates html documention. This is the default.
-# * xhtml: Generates xhtml documentation
-# * htmlhelp: Generates html help output.
-# * onehtml: Generates a single html page.
-# * man: Generates man pages.
-# * pdf: Generates pdf documentation.
-# * ps: Generates postscript output.
-# * docbook: Generates docbook XML.
-# * fo: Generates XSL formating objects.
-# * tests: Extracts test cases from the boostbook XML.
-#
-# format is an implicit feature, so typing pdf on the command
-# line (for example) is a short-cut for format=pdf.
-
-import "class" : new ;
-import common ;
-import errors ;
-import targets ;
-import feature ;
-import generators ;
-import print ;
-import property ;
-import project ;
-import property-set ;
-import regex ;
-import scanner ;
-import sequence ;
-import make ;
-import os ;
-import type ;
-import modules path project ;
-import build-system ;
-
-import xsltproc : xslt xslt-dir ;
-
-# Make this module into a project.
-project.initialize $(__name__) ;
-project boostbook ;
-
-
-feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests
- : incidental implicit composite propagated ;
-
-type.register DTDXML : dtdxml ;
-type.register XML : xml ;
-type.register BOOSTBOOK : boostbook : XML ;
-type.register DOCBOOK : docbook : XML ;
-type.register FO : fo : XML ;
-type.register PDF : pdf ;
-type.register PS : ps ;
-type.register XSLT : xsl : XML ;
-type.register HTMLDIR ;
-type.register XHTMLDIR ;
-type.register HTMLHELP ;
-type.register MANPAGES ;
-type.register TESTS : tests ;
-# Artificial target type, used to require invocation of top-level
-# BoostBook generator.
-type.register BOOSTBOOK_MAIN ;
-
-
-# Initialize BoostBook support.
-rule init (
- docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not
- # provided, we use DOCBOOK_XSL_DIR from the environment
- # (if available) or look in standard locations.
- # Otherwise, we let the XML processor load the
- # stylesheets remotely.
-
- : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
- # DOCBOOK_DTD_DIR From the environment (if available) or
- # look in standard locations. Otherwise, we let the XML
- # processor load the DTD remotely.
-
- : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
-)
-{
-
- if ! $(.initialized)
- {
- .initialized = true ;
-
- check-boostbook-dir $(boostbook-dir) ;
- find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
-
- # Register generators only if we've were called via "using boostbook ; "
- generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ;
- generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ;
- generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ;
- generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
- generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
- generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
- generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
- generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
- generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ;
-
- # The same about Jamfile main target rules.
- IMPORT $(__name__) : boostbook : : boostbook ;
- }
- else
- {
- if $(docbook-xsl-dir)
- {
- modify-config ;
- .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
- check-docbook-xsl-dir ;
- }
- if $(docbook-dtd-dir)
- {
- modify-config ;
- .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
- check-docbook-dtd-dir ;
- }
- if $(boostbook-dir)
- {
- modify-config ;
- check-boostbook-dir $(boostbook-dir) ;
- local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
- local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
- .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
- .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
- check-boostbook-xsl-dir ;
- check-boostbook-dtd-dir ;
- }
- }
-}
-
-rule lock-config ( )
-{
- if ! $(.initialized)
- {
- errors.user-error "BoostBook has not been configured." ;
- }
- if ! $(.config-locked)
- {
- .config-locked = true ;
- }
-}
-
-rule modify-config ( )
-{
- if $(.config-locked)
- {
- errors.user-error "BoostBook configuration cannot be changed after it has been used." ;
- }
-}
-
-rule find-boost-in-registry ( keys * )
-{
- local boost-root = ;
- for local R in $(keys)
- {
- local installed-boost = [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)"
- : "InstallRoot" ] ;
- if $(installed-boost)
- {
- boost-root += [ path.make $(installed-boost) ] ;
- }
- }
- return $(boost-root) ;
-}
-
-rule check-docbook-xsl-dir ( )
-{
- if $(.docbook-xsl-dir)
- {
- if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
- {
- errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
- }
- }
- }
-}
-
-rule check-docbook-dtd-dir ( )
-{
- if $(.docbook-dtd-dir)
- {
- if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
- {
- errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
- }
- }
- }
-}
-
-rule check-boostbook-xsl-dir ( )
-{
- if ! $(.boostbook-xsl-dir)
- {
- errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ;
- }
- else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
- {
- errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
- }
- }
-}
-
-rule check-boostbook-dtd-dir ( )
-{
- if ! $(.boostbook-dtd-dir)
- {
- errors.user-error "error: BoostBook: could not find boostbook DTD." ;
- }
- else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
- {
- errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
- }
- }
-}
-
-rule check-boostbook-dir ( boostbook-dir ? )
-{
- if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
- {
- errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ;
- }
-}
-
-rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
-{
- docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
- docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
- boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
-
- # Look for the boostbook stylesheets relative to BOOST_ROOT
- # and Boost.Build.
- local boost-build-root = [ path.make [ build-system.location ] ] ;
- local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
-
- local boost-root = [ modules.peek : BOOST_ROOT ] ;
- if $(boost-root)
- {
- boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ;
- }
- boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
-
- # Try to find the tools in platform specific locations
- if [ os.name ] = NT
- {
- # If installed by the Boost installer.
- local boost-root = ;
-
- local boost-installer-versions = snapshot cvs 1.33.0 ;
- local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
- local boostpro-installer-versions =
- 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
- 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
-
- local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ;
-
- # Make sure that the most recent version is searched for first
- boost-root += [ sequence.reverse
- [ find-boost-in-registry
- Boost-Consulting.com\\$(boost-consulting-installer-versions)
- boostpro.com\\$(boostpro-installer-versions) ] ] ;
-
- # Plausible locations.
- local root = [ PWD ] ;
- while $(root) != $(root:D) { root = $(root:D) ; }
- root = [ path.make $(root) ] ;
- local search-dirs = ;
- local docbook-search-dirs = ;
- for local p in $(boost-root) {
- search-dirs += [ path.join $(p) tools ] ;
- }
- for local p in $(old-installer-root)
- {
- search-dirs += [ path.join $(p) share ] ;
- docbook-search-dirs += [ path.join $(p) share ] ;
- }
- search-dirs += [ path.join $(root) Boost tools ] ;
- search-dirs += [ path.join $(root) Boost share ] ;
- docbook-search-dirs += [ path.join $(root) Boost share ] ;
-
- docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
- docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
- boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
- }
- else
- {
- # Plausible locations.
-
- local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
- local dtd-versions = 4.2 ;
-
- docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
- docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ;
- docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
-
- docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
- docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ;
- docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ;
-
- boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
-
- # Ubuntu Linux
- docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ;
- docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ;
- }
-
- if $(docbook-xsl-dir)
- {
- .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
- }
- if $(docbook-dtd-dir)
- {
- .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
- }
-
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: Boost.Book: searching XSL/DTD in" ;
- ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ;
- }
- local boostbook-xsl-dir ;
- for local dir in $(boostbook-dir) {
- boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
- }
- local boostbook-dtd-dir ;
- for local dir in $(boostbook-dir) {
- boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
- }
- .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
- .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
-
- check-docbook-xsl-dir ;
- check-docbook-dtd-dir ;
- check-boostbook-xsl-dir ;
- check-boostbook-dtd-dir ;
-}
-
-rule xsl-dir
-{
- lock-config ;
- return $(.boostbook-xsl-dir) ;
-}
-
-rule dtd-dir
-{
- lock-config ;
- return $(.boostbook-dtd-dir) ;
-}
-
-rule docbook-xsl-dir
-{
- lock-config ;
- return $(.docbook-xsl-dir) ;
-}
-
-rule docbook-dtd-dir
-{
- lock-config ;
- return $(.docbook-dtd-dir) ;
-}
-
-rule dtdxml-to-boostbook ( target : source : properties * )
-{
- lock-config ;
- xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl"
- : $(properties) ;
-}
-
-rule boostbook-to-docbook ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
- xslt $(target) : $(source) $(stylesheet) : $(properties) ;
-}
-
-rule docbook-to-onehtml ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
- xslt $(target) : $(source) $(stylesheet) : $(properties) ;
-}
-
-rule docbook-to-htmldir ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ;
-}
-
-rule docbook-to-xhtmldir ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ;
-}
-
-rule docbook-to-htmlhelp ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ;
-}
-
-rule docbook-to-manpages ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ;
-}
-
-rule docbook-to-fo ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
- xslt $(target) : $(source) $(stylesheet) : $(properties) ;
-}
-
-rule format-catalog-path ( path )
-{
- local result = $(path) ;
- if [ xsltproc.is-cygwin ]
- {
- if [ os.name ] = NT
- {
- drive = [ MATCH ^/(.):(.*)$ : $(path) ] ;
- result = /cygdrive/$(drive[1])$(drive[2]) ;
- }
- }
- else
- {
- if [ os.name ] = CYGWIN
- {
- local native-path = [ path.native $(path) ] ;
- result = [ path.make $(native-path:W) ] ;
- }
- }
- return [ regex.replace $(result) " " "%20" ] ;
-}
-
-rule generate-xml-catalog ( target : sources * : properties * )
-{
- print.output $(target) ;
-
- # BoostBook DTD catalog entry
- local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
- if $(boostbook-dtd-dir)
- {
- boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
- }
-
- print.text
- "<?xml version=\"1.0\"?>"
- "<!DOCTYPE catalog "
- " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
- " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
- "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
- " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
- : true ;
-
- local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
- if ! $(docbook-xsl-dir)
- {
- ECHO "BoostBook warning: no DocBook XSL directory specified." ;
- ECHO " If you have the DocBook XSL stylesheets installed, please " ;
- ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
- ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
- ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
- ECHO " are available here: http://docbook.sourceforge.net/ " ;
- ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
- }
- else
- {
- docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
- print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
- }
-
- local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
- if ! $(docbook-dtd-dir)
- {
- ECHO "BoostBook warning: no DocBook DTD directory specified." ;
- ECHO " If you have the DocBook DTD installed, please set " ;
- ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
- ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
- ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
- ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
- ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
- }
- else
- {
- docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
- print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
- }
-
- print.text "</catalog>" ;
-}
-
-rule xml-catalog ( )
-{
- if ! $(.xml-catalog)
- {
- # The target is created as part of the root project. But ideally
- # it would be created as part of the boostbook project. This is not
- # current possible as such global projects don't inherit things like
- # the build directory.
-
- # Find the root project.
- local root-project = [ project.current ] ;
- root-project = [ $(root-project).project-module ] ;
- while
- [ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config &&
- [ project.attribute $(root-project) parent-module ] != project-config
- {
- root-project = [ project.attribute $(root-project) parent-module ] ;
- }
- .xml-catalog = [ new file-target boostbook_catalog
- : XML
- : [ project.target $(root-project) ]
- : [ new action : boostbook.generate-xml-catalog ]
- :
- ] ;
- .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
- .xml-catalog-file = $(.xml-catalog-file:J=/) ;
- }
- return $(.xml-catalog) $(.xml-catalog-file) ;
-}
-
-class boostbook-generator : generator
-{
- import feature ;
- import virtual-target ;
- import generators ;
- import boostbook ;
-
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- # Generate the catalog, but only once...
- local global-catalog = [ boostbook.xml-catalog ] ;
- local catalog = $(global-catalog[1]) ;
- local catalog-file = $(global-catalog[2]) ;
- local targets ;
-
- # Add the catalog to the property set
- property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
-
- local type = none ;
- local manifest ;
- local format = [ $(property-set).get <format> ] ;
- switch $(format)
- {
- case html :
- {
- type = HTMLDIR ;
- manifest = HTML.manifest ;
- }
- case xhtml :
- {
- type = XHTMLDIR ;
- manifest = HTML.manifest ;
- }
- case htmlhelp :
- {
- type = HTMLHELP ;
- manifest = HTML.manifest ;
- }
-
- case onehtml : type = HTML ;
-
- case man :
- {
- type = MANPAGES ;
- manifest = man.manifest ;
- }
-
- case docbook : type = DOCBOOK ;
- case fo : type = FO ;
- case pdf : type = PDF ;
- case ps : type = PS ;
- case tests : type = TESTS ;
- }
-
- if $(manifest)
- {
- # Create DOCBOOK file from BOOSTBOOK sources.
- local base-target = [ generators.construct $(project)
- : DOCBOOK : $(property-set) : $(sources) ] ;
- base-target = $(base-target[2]) ;
- $(base-target).depends $(catalog) ;
-
- # Generate HTML/PDF/PS from DOCBOOK.
- local target = [ generators.construct $(project) $(name)_$(manifest)
- : $(type)
- : [ $(property-set).add-raw
- <xsl:param>manifest=$(name)_$(manifest) ]
- : $(base-target) ] ;
- local name = [ $(property-set).get <name> ] ;
- name ?= $(format) ;
- $(target[2]).set-path $(name) ;
- $(target[2]).depends $(catalog) ;
-
- targets += $(target[2]) ;
- }
- else {
- local target = [ generators.construct $(project)
- : $(type) : $(property-set) : $(sources) ] ;
-
- if ! $(target)
- {
- errors.error "Cannot build documentation type '$(format)'" ;
- }
- else
- {
- $(target[2]).depends $(catalog) ;
- targets += $(target[2]) ;
- }
- }
-
- return $(targets) ;
- }
-}
-
-generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ;
-
-# Creates a boostbook target.
-rule boostbook ( target-name : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-#############################################################################
-# Dependency scanners
-#############################################################################
-# XInclude scanner. Mostly stolen from c-scanner :)
-# Note that this assumes an "xi" prefix for XIncludes. This isn't always the
-# case for XML documents, but we'll assume it's true for anything we encounter.
-class xinclude-scanner : scanner
-{
- import virtual-target ;
- import path ;
- import scanner ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
- self.includes = $(includes) ;
- }
-
- rule pattern ( )
- {
- return "xi:include[ ]*href=\"([^\"]*)\"" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
-
- NOCARE $(matches) ;
- INCLUDES $(target) : $(matches) ;
- SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
-
- scanner.propagate $(__name__) : $(matches) : $(target) ;
- }
-}
-
-scanner.register xinclude-scanner : xsl:path ;
-type.set-scanner XML : xinclude-scanner ;
-
-rule boostbook-to-tests ( target : source : properties * )
-{
- lock-config ;
- local boost_root = [ modules.peek : BOOST_ROOT ] ;
- local native-path =
- [ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ;
- local stylesheet = $(native-path:S=.xsl) ;
- xslt $(target) : $(source) $(stylesheet)
- : $(properties) <xsl:param>boost.root=$(boost_root)
- ;
-}
-
-
diff --git a/jam-files/boost-build/tools/borland.jam b/jam-files/boost-build/tools/borland.jam
deleted file mode 100644
index 6e43ca93..00000000
--- a/jam-files/boost-build/tools/borland.jam
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright 2005 Dave Abrahams
-# Copyright 2003 Rene Rivera
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for the Borland's command line compiler
-
-import property ;
-import generators ;
-import os ;
-import toolset : flags ;
-import feature : get-values ;
-import type ;
-import common ;
-
-feature.extend toolset : borland ;
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters borland :
- version $(version) ] ;
-
- local command = [ common.get-invocation-command borland : bcc32.exe
- : $(command) ] ;
-
- common.handle-options borland : $(condition) : $(command) : $(options) ;
-
- if $(command)
- {
- command = [ common.get-absolute-tool-path $(command[-1]) ] ;
- }
- root = $(command:D) ;
-
- flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
- flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
- flags borland.link RUN_PATH $(condition) : $(root)/bin ;
- flags borland .root $(condition) : $(root)/bin/ ;
-}
-
-
-# A borland-specific target type
-type.register BORLAND.TDS : tds ;
-
-# Declare generators
-
-generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
-generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
-
-generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
-generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
-generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
-generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
-
-# Declare flags
-
-flags borland.compile OPTIONS <debug-symbols>on : -v ;
-flags borland.link OPTIONS <debug-symbols>on : -v ;
-
-flags borland.compile OPTIONS <optimization>off : -Od ;
-flags borland.compile OPTIONS <optimization>speed : -O2 ;
-flags borland.compile OPTIONS <optimization>space : -O1 ;
-
-if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
-{
- flags borland CFLAGS <inlining>off : -vi- ;
- flags borland CFLAGS <inlining>on : -vi -w-inl ;
- flags borland CFLAGS <inlining>full : -vi -w-inl ;
-}
-else
-{
- flags borland CFLAGS : -vi- ;
-}
-
-flags borland.compile OPTIONS <warnings>off : -w- ;
-flags borland.compile OPTIONS <warnings>all : -w ;
-flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
-
-
-# Deal with various runtime configs...
-
-# This should be not for DLL
-flags borland OPTIONS <user-interface>console : -tWC ;
-
-# -tWR sets -tW as well, so we turn it off here and then turn it
-# on again later if we need it:
-flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
-flags borland OPTIONS <user-interface>gui : -tW ;
-
-flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
-# Hmm.. not sure what's going on here.
-flags borland OPTIONS : -WM- ;
-flags borland OPTIONS <threading>multi : -tWM ;
-
-
-
-flags borland.compile OPTIONS <cxxflags> ;
-flags borland.compile DEFINES <define> ;
-flags borland.compile INCLUDES <include> ;
-
-flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
-
-#
-# for C++ compiles the following options are turned on by default:
-#
-# -j5 stops after 5 errors
-# -g255 allow an unlimited number of warnings
-# -q no banner
-# -c compile to object
-# -P C++ code regardless of file extention
-# -a8 8 byte alignment, this option is on in the IDE by default
-# and effects binary compatibility.
-#
-
-# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
-
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
-}
-
-# For C, we don't pass -P flag
-actions compile.c
-{
- "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
-}
-
-
-# Declare flags and action for linking
-toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
-toolset.flags borland.link LIBRARY_PATH <library-path> ;
-toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
-toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
-toolset.flags borland.link LIBRARIES <library-file> ;
-
-flags borland.link OPTIONS <linkflags> ;
-flags borland.link OPTIONS <link>shared : -tWD ;
-
-flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
-flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
-
-
-
-# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
-# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
-# $(BCC_TOOL_PATH) to the path
-# The NEED_IMPLIB variable controls whether we need to invoke implib.
-
-flags borland.archive AROPTIONS <archiveflags> ;
-
-# Declare action for archives. We don't use response file
-# since it's hard to get "+-" there.
-# The /P256 increases 'page' size -- with too low
-# values tlib fails when building large applications.
-# CONSIDER: don't know what 'together' is for...
-actions updated together piecemeal archive
-{
- $(.set-path)$(.root:W)$(.old-path)
- tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
-}
-
-
-if [ os.name ] = CYGWIN
-{
- .set-path = "cmd /S /C set \"PATH=" ;
- .old-path = ";%PATH%\" \"&&\"" ;
-
-
- # Couldn't get TLIB to stop being confused about pathnames
- # containing dashes (it seemed to treat them as option separators
- # when passed through from bash), so we explicitly write the
- # command into a .bat file and execute that. TLIB is also finicky
- # about pathname style! Forward slashes, too, are treated as
- # options.
- actions updated together piecemeal archive
- {
- chdir $(<:D)
- echo +-$(>:BS) > $(<:BS).rsp
- $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
- }
-}
-else if [ os.name ] = NT
-{
- .set-path = "set \"PATH=" ;
- .old-path = ";%PATH%\"
- " ;
-}
-else
-{
- .set-path = "PATH=\"" ;
- .old-path = "\":$PATH
- export PATH
- " ;
-}
-
-RM = [ common.rm-command ] ;
-
-nl = "
-" ;
-
-actions link
-{
- $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
-}
-
-
-actions link.dll bind LIBRARIES RSP
-{
- $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
-}
-
-# It seems impossible to specify output file with directory when compiling
-# asm files using bcc32, so use tasm32 directly.
-# /ml makes all symbol names case-sensitive
-actions asm
-{
- $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
-}
-
diff --git a/jam-files/boost-build/tools/builtin.jam b/jam-files/boost-build/tools/builtin.jam
deleted file mode 100644
index 148e7308..00000000
--- a/jam-files/boost-build/tools/builtin.jam
+++ /dev/null
@@ -1,960 +0,0 @@
-# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
-# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
-# Copyright 2006 Juergen Hunold
-# Copyright 2005 Toon Knapen
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines standard features and rules.
-
-import alias ;
-import "class" : new ;
-import errors ;
-import feature ;
-import generators ;
-import numbers ;
-import os ;
-import path ;
-import print ;
-import project ;
-import property ;
-import regex ;
-import scanner ;
-import sequence ;
-import stage ;
-import symlink ;
-import toolset ;
-import type ;
-import targets ;
-import types/register ;
-import utility ;
-import virtual-target ;
-import message ;
-import convert ;
-
-# FIXME: the following generate module import is not needed here but removing it
-# too hastly will break using code (e.g. the main Boost library Jamroot file)
-# that forgot to import the generate module before calling the generate rule.
-import generate ;
-
-
-.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd
- openbsd osf qnx qnxnto sgi solaris unix unixware windows
- elf # Not actually an OS -- used for targeting bare metal where
- # object format is ELF. This catches both -elf and -eabi gcc
- # targets and well as other compilers targeting ELF. It is not
- # clear how often do we need to key of ELF specifically as opposed
- # to other bare metal targets, but let's stick with gcc naming.
- ;
-
-# Feature used to determine which OS we're on. New <target-os> and <host-os>
-# features should be used instead.
-local os = [ modules.peek : OS ] ;
-feature.feature os : $(os) : propagated link-incompatible ;
-
-
-# Translates from bjam current OS to the os tags used in host-os and target-os,
-# i.e. returns the running host-os.
-#
-local rule default-host-os ( )
-{
- local host-os ;
- if [ os.name ] in $(.os-names:U)
- {
- host-os = [ os.name ] ;
- }
- else
- {
- switch [ os.name ]
- {
- case NT : host-os = windows ;
- case AS400 : host-os = unix ;
- case MINGW : host-os = windows ;
- case BSDI : host-os = bsd ;
- case COHERENT : host-os = unix ;
- case DRAGONFLYBSD : host-os = bsd ;
- case IRIX : host-os = sgi ;
- case MACOSX : host-os = darwin ;
- case KFREEBSD : host-os = freebsd ;
- case LINUX : host-os = linux ;
- case SUNOS :
- ECHO "SunOS is not a supported operating system." ;
- ECHO "We believe last version of SunOS was released in 1992, " ;
- ECHO "so if you get this message, something is very wrong with configuration logic. " ;
- ECHO "Please report this as a bug. " ;
- EXIT ;
- case * : host-os = unix ;
- }
- }
- return $(host-os:L) ;
-}
-
-
-# The two OS features define a known set of abstract OS names. The host-os is
-# the OS under which bjam is running. Even though this should really be a fixed
-# property we need to list all the values to prevent unknown value errors. Both
-# set the default value to the current OS to account for the default use case of
-# building on the target OS.
-feature.feature host-os : $(.os-names) ;
-feature.set-default host-os : [ default-host-os ] ;
-
-feature.feature target-os : $(.os-names) : propagated link-incompatible ;
-feature.set-default target-os : [ default-host-os ] ;
-
-
-feature.feature toolset : : implicit propagated symmetric ;
-feature.feature stdlib : native : propagated composite ;
-feature.feature link : shared static : propagated ;
-feature.feature runtime-link : shared static : propagated ;
-feature.feature runtime-debugging : on off : propagated ;
-feature.feature optimization : off speed space none : propagated ;
-feature.feature profiling : off on : propagated ;
-feature.feature inlining : off on full : propagated ;
-feature.feature threading : single multi : propagated ;
-feature.feature rtti : on off : propagated ;
-feature.feature exception-handling : on off : propagated ;
-
-# Whether there is support for asynchronous EH (e.g. catching SEGVs).
-feature.feature asynch-exceptions : off on : propagated ;
-
-# Whether all extern "C" functions are considered nothrow by default.
-feature.feature extern-c-nothrow : off on : propagated ;
-
-feature.feature debug-symbols : on off none : propagated ;
-# Controls whether the binary should be stripped -- that is have
-# everything not necessary to running removed. This option should
-# not be very often needed. Also, this feature will show up in
-# target paths of everything, not just binaries. Should fix that
-# when impelementing feature relevance.
-feature.feature strip : off on : propagated ;
-feature.feature define : : free ;
-feature.feature undef : : free ;
-feature.feature "include" : : free path ; #order-sensitive ;
-feature.feature cflags : : free ;
-feature.feature cxxflags : : free ;
-feature.feature fflags : : free ;
-feature.feature asmflags : : free ;
-feature.feature linkflags : : free ;
-feature.feature archiveflags : : free ;
-feature.feature version : : free ;
-
-# Generic, i.e. non-language specific, flags for tools.
-feature.feature flags : : free ;
-feature.feature location-prefix : : free ;
-
-
-# The following features are incidental since they have no effect on built
-# products. Not making them incidental will result in problems in corner cases,
-# e.g.:
-#
-# unit-test a : a.cpp : <use>b ;
-# lib b : a.cpp b ;
-#
-# Here, if <use> is not incidental, we would decide we have two targets for
-# a.obj with different properties and complain about it.
-#
-# Note that making a feature incidental does not mean it is ignored. It may be
-# ignored when creating a virtual target, but the rest of build process will use
-# them.
-feature.feature use : : free dependency incidental ;
-feature.feature dependency : : free dependency incidental ;
-feature.feature implicit-dependency : : free dependency incidental ;
-
-feature.feature warnings :
- on # Enable default/"reasonable" warning level for the tool.
- all # Enable all possible warnings issued by the tool.
- off # Disable all warnings issued by the tool.
- : incidental propagated ;
-
-feature.feature warnings-as-errors :
- off # Do not fail the compilation if there are warnings.
- on # Fail the compilation if there are warnings.
- : incidental propagated ;
-
-# Feature that allows us to configure the maximal template instantiation depth
-# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers
-# actually support this configuration setting.
-#
-# Note that Boost Build currently does not allow defining features that take any
-# positive integral value as a parameter, which is what we need here, so we just
-# define some of the values here and leave it up to the user to extend this set
-# as he needs using the feature.extend rule.
-#
-# TODO: This should be upgraded as soon as Boost Build adds support for custom
-# validated feature values or at least features allowing any positive integral
-# value. See related Boost Build related trac ticket #194.
-#
-feature.feature c++-template-depth
- :
- [ numbers.range 64 1024 : 64 ]
- [ numbers.range 20 1000 : 10 ]
- # Maximum template instantiation depth guaranteed for ANSI/ISO C++
- # conforming programs.
- 17
- :
- incidental optional propagated ;
-
-feature.feature source : : free dependency incidental ;
-feature.feature library : : free dependency incidental ;
-feature.feature file : : free dependency incidental ;
-feature.feature find-shared-library : : free ; #order-sensitive ;
-feature.feature find-static-library : : free ; #order-sensitive ;
-feature.feature library-path : : free path ; #order-sensitive ;
-
-# Internal feature.
-feature.feature library-file : : free dependency ;
-
-feature.feature name : : free ;
-feature.feature tag : : free ;
-feature.feature search : : free path ; #order-sensitive ;
-feature.feature location : : free path ;
-feature.feature dll-path : : free path ;
-feature.feature hardcode-dll-paths : true false : incidental ;
-
-
-# An internal feature that holds the paths of all dependency shared libraries.
-# On Windows, it is needed so that we can add all those paths to PATH when
-# running applications. On Linux, it is needed to add proper -rpath-link command
-# line options.
-feature.feature xdll-path : : free path ;
-
-# Provides means to specify def-file for windows DLLs.
-feature.feature def-file : : free dependency ;
-
-feature.feature suppress-import-lib : false true : incidental ;
-
-# Internal feature used to store the name of a bjam action to call when building
-# a target.
-feature.feature action : : free ;
-
-# This feature is used to allow specific generators to run. For example, QT
-# tools can only be invoked when QT library is used. In that case, <allow>qt
-# will be in usage requirement of the library.
-feature.feature allow : : free ;
-
-# The addressing model to generate code for. Currently a limited set only
-# specifying the bit size of pointers.
-feature.feature address-model : 16 32 64 32_64 : propagated optional ;
-
-# Type of CPU architecture to compile for.
-feature.feature architecture :
- # x86 and x86-64
- x86
-
- # ia64
- ia64
-
- # Sparc
- sparc
-
- # RS/6000 & PowerPC
- power
-
- # MIPS/SGI
- mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
-
- # HP/PA-RISC
- parisc
-
- # Advanced RISC Machines
- arm
-
- # Combined architectures for platforms/toolsets that support building for
- # multiple architectures at once. "combined" would be the default multi-arch
- # for the toolset.
- combined
- combined-x86-power
-
- : propagated optional ;
-
-# The specific instruction set in an architecture to compile.
-feature.feature instruction-set :
- # x86 and x86-64
- native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
- pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe
- conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale
- yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
- athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2
-
- # ia64
- itanium itanium1 merced itanium2 mckinley
-
- # Sparc
- v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
- sparclet tsc701 v9 ultrasparc ultrasparc3
-
- # RS/6000 & PowerPC
- 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
- 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
- power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
-
- # MIPS
- 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
- r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
- vr5000 vr5400 vr5500
-
- # HP/PA-RISC
- 700 7100 7100lc 7200 7300 8000
-
- # Advanced RISC Machines
- armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
-
- : propagated optional ;
-
-# Used to select a specific variant of C++ ABI if the compiler supports several.
-feature.feature c++abi : : propagated optional ;
-
-feature.feature conditional : : incidental free ;
-
-# The value of 'no' prevents building of a target.
-feature.feature build : yes no : optional ;
-
-# Windows-specific features
-
-feature.feature user-interface : console gui wince native auto ;
-
-feature.feature variant : : implicit composite propagated symmetric ;
-
-
-# Declares a new variant.
-#
-# First determines explicit properties for this variant, by refining parents'
-# explicit properties with the passed explicit properties. The result is
-# remembered and will be used if this variant is used as parent.
-#
-# Second, determines the full property set for this variant by adding to the
-# explicit properties default values for all missing non-symmetric properties.
-#
-# Lastly, makes appropriate value of 'variant' property expand to the full
-# property set.
-#
-rule variant ( name # Name of the variant
- : parents-or-properties * # Specifies parent variants, if
- # 'explicit-properties' are given, and
- # explicit-properties or parents otherwise.
- : explicit-properties * # Explicit properties.
- )
-{
- local parents ;
- if ! $(explicit-properties)
- {
- if $(parents-or-properties[1]:G)
- {
- explicit-properties = $(parents-or-properties) ;
- }
- else
- {
- parents = $(parents-or-properties) ;
- }
- }
- else
- {
- parents = $(parents-or-properties) ;
- }
-
- # The problem is that we have to check for conflicts between base variants.
- if $(parents[2])
- {
- errors.error "multiple base variants are not yet supported" ;
- }
-
- local inherited ;
- # Add explicitly specified properties for parents.
- for local p in $(parents)
- {
- # TODO: This check may be made stricter.
- if ! [ feature.is-implicit-value $(p) ]
- {
- errors.error "Invalid base variant" $(p) ;
- }
-
- inherited += $(.explicit-properties.$(p)) ;
- }
- property.validate $(explicit-properties) ;
- explicit-properties = [ property.refine $(inherited)
- : $(explicit-properties) ] ;
-
- # Record explicitly specified properties for this variant. We do this after
- # inheriting parents' properties so they affect other variants derived from
- # this one.
- .explicit-properties.$(name) = $(explicit-properties) ;
-
- feature.extend variant : $(name) ;
- feature.compose <variant>$(name) : $(explicit-properties) ;
-}
-IMPORT $(__name__) : variant : : variant ;
-
-
-variant debug : <optimization>off <debug-symbols>on <inlining>off
- <runtime-debugging>on ;
-variant release : <optimization>speed <debug-symbols>off <inlining>full
- <runtime-debugging>off <define>NDEBUG ;
-variant profile : release : <profiling>on <debug-symbols>on ;
-
-
-class searched-lib-target : abstract-file-target
-{
- rule __init__ ( name
- : project
- : shared ?
- : search *
- : action
- )
- {
- abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
- : $(action) : ;
-
- self.shared = $(shared) ;
- self.search = $(search) ;
- }
-
- rule shared ( )
- {
- return $(self.shared) ;
- }
-
- rule search ( )
- {
- return $(self.search) ;
- }
-
- rule actualize-location ( target )
- {
- NOTFILE $(target) ;
- }
-
- rule path ( )
- {
- }
-}
-
-
-# The generator class for libraries (target type LIB). Depending on properties
-# it will request building of the appropriate specific library type --
-# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
-#
-class lib-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- # The lib generator is composing, and can be only invoked with an
- # explicit name. This check is present in generator.run (and so in
- # builtin.linking-generator) but duplicated here to avoid doing extra
- # work.
- if $(name)
- {
- local properties = [ $(property-set).raw ] ;
- # Determine the needed target type.
- local actual-type ;
- # <source>files can be generated by <conditional>@rule feature
- # in which case we do not consider it a SEARCHED_LIB type.
- if ! <source> in $(properties:G) &&
- ( <search> in $(properties:G) || <name> in $(properties:G) )
- {
- actual-type = SEARCHED_LIB ;
- }
- else if <file> in $(properties:G)
- {
- actual-type = LIB ;
- }
- else if <link>shared in $(properties)
- {
- actual-type = SHARED_LIB ;
- }
- else
- {
- actual-type = STATIC_LIB ;
- }
- property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
- # Construct the target.
- return [ generators.construct $(project) $(name) : $(actual-type)
- : $(property-set) : $(sources) ] ;
- }
- }
-
- rule viable-source-types ( )
- {
- return * ;
- }
-}
-
-
-generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
-
-
-# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
-# simplified: "lib a b c ;".
-#
-rule lib ( names + : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- if $(names[2])
- {
- if <name> in $(requirements:G)
- {
- errors.user-error "When several names are given to the 'lib' rule" :
- "it is not allowed to specify the <name> feature." ;
- }
- if $(sources)
- {
- errors.user-error "When several names are given to the 'lib' rule" :
- "it is not allowed to specify sources." ;
- }
- }
-
- # This is a circular module dependency so it must be imported here.
- import targets ;
-
- local project = [ project.current ] ;
- local result ;
-
- for local name in $(names)
- {
- local r = $(requirements) ;
- # Support " lib a ; " and " lib a b c ; " syntax.
- if ! $(sources) && ! <name> in $(requirements:G)
- && ! <file> in $(requirements:G)
- {
- r += <name>$(name) ;
- }
- result += [ targets.main-target-alternative
- [ new typed-target $(name) : $(project) : LIB
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(r) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
- }
- return $(result) ;
-}
-IMPORT $(__name__) : lib : : lib ;
-
-
-class searched-lib-generator : generator
-{
- import property-set ;
-
- rule __init__ ( )
- {
- # The requirements cause the generators to be tried *only* when we're
- # building a lib target with a 'search' feature. This seems ugly --- all
- # we want is to make sure searched-lib-generator is not invoked deep
- # inside transformation search to produce intermediate targets.
- generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if $(name)
- {
- # If 'name' is empty, it means we have not been called to build a
- # top-level target. In this case, we just fail immediately, because
- # searched-lib-generator cannot be used to produce intermediate
- # targets.
-
- local properties = [ $(property-set).raw ] ;
- local shared ;
- if <link>shared in $(properties)
- {
- shared = true ;
- }
-
- local search = [ feature.get-values <search> : $(properties) ] ;
-
- local a = [ new null-action $(property-set) ] ;
- local lib-name = [ feature.get-values <name> : $(properties) ] ;
- lib-name ?= $(name) ;
- local t = [ new searched-lib-target $(lib-name) : $(project)
- : $(shared) : $(search) : $(a) ] ;
- # We return sources for a simple reason. If there is
- # lib png : z : <name>png ;
- # the 'z' target should be returned, so that apps linking to 'png'
- # will link to 'z', too.
- return [ property-set.create <xdll-path>$(search) ]
- [ virtual-target.register $(t) ] $(sources) ;
- }
- }
-}
-
-generators.register [ new searched-lib-generator ] ;
-
-
-class prebuilt-lib-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- local f = [ $(property-set).get <file> ] ;
- return $(f) $(sources) ;
- }
-}
-
-generators.register
- [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
-
-generators.override builtin.prebuilt : builtin.lib-generator ;
-
-class preprocessed-target-class : basic-target
-{
- import generators ;
- rule construct ( name : sources * : property-set )
- {
- local result = [ generators.construct [ project ]
- $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
- if ! $(result)
- {
- result = [ generators.construct [ project ]
- $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
- }
- if ! $(result)
- {
- local s ;
- for x in $(sources)
- {
- s += [ $(x).name ] ;
- }
- local p = [ project ] ;
- errors.user-error
- "In project" [ $(p).name ] :
- "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
- }
- return $(result) ;
- }
-}
-
-rule preprocessed ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
- return [ targets.main-target-alternative
- [ new preprocessed-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(r) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-}
-
-IMPORT $(__name__) : preprocessed : : preprocessed ;
-
-class compile-action : action
-{
- import sequence ;
-
- rule __init__ ( targets * : sources * : action-name : properties * )
- {
- action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
- }
-
- # For all virtual targets for the same dependency graph as self, i.e. which
- # belong to the same main target, add their directories to the include path.
- #
- rule adjust-properties ( property-set )
- {
- local s = [ $(self.targets[1]).creating-subvariant ] ;
- return [ $(property-set).add-raw
- [ $(s).implicit-includes "include" : H ] ] ;
- }
-}
-
-
-# Declare a special compiler generator. The only thing it does is changing the
-# type used to represent 'action' in the constructed dependency graph to
-# 'compile-action'. That class in turn adds additional include paths to handle
-# cases when a source file includes headers which are generated themselves.
-#
-class C-compiling-generator : generator
-{
- rule __init__ ( id : source-types + : target-types + : requirements *
- : optional-properties * )
- {
- generator.__init__ $(id) : $(source-types) : $(target-types) :
- $(requirements) : $(optional-properties) ;
- }
-
- rule action-class ( )
- {
- return compile-action ;
- }
-}
-
-
-rule register-c-compiler ( id : source-types + : target-types + : requirements *
- : optional-properties * )
-{
- generators.register [ new C-compiling-generator $(id) : $(source-types) :
- $(target-types) : $(requirements) : $(optional-properties) ] ;
-}
-
-# FIXME: this is ugly, should find a better way (we would like client code to
-# register all generators as "generators.some-rule" instead of
-# "some-module.some-rule".)
-#
-IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
-
-
-# The generator class for handling EXE and SHARED_LIB creation.
-#
-class linking-generator : generator
-{
- import path ;
- import project ;
- import property-set ;
- import type ;
-
- rule __init__ ( id
- composing ? : # The generator will be composing if a non-empty
- # string is passed or the parameter is not given. To
- # make the generator non-composing, pass an empty
- # string ("").
- source-types + :
- target-types + :
- requirements * )
- {
- composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types)
- : $(target-types) : $(requirements) ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- sources += [ $(property-set).get <library> ] ;
-
- # Add <library-path> properties for all searched libraries.
- local extra ;
- for local s in $(sources)
- {
- if [ $(s).type ] = SEARCHED_LIB
- {
- local search = [ $(s).search ] ;
- extra += <library-path>$(search) ;
- }
- }
-
- # It is possible that sources include shared libraries that did not came
- # from 'lib' targets, e.g. .so files specified as sources. In this case
- # we have to add extra dll-path properties and propagate extra xdll-path
- # properties so that application linking to us will get xdll-path to
- # those libraries.
- local extra-xdll-paths ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
- {
- # Unfortunately, we do not have a good way to find the path to a
- # file, so use this nasty approach.
- #
- # TODO: This needs to be done better. One thing that is really
- # broken with this is that it does not work correctly with
- # projects having multiple source locations.
- local p = [ $(s).project ] ;
- local location = [ path.root [ $(s).name ]
- [ $(p).get source-location ] ] ;
- extra-xdll-paths += [ path.parent $(location) ] ;
- }
- }
-
- # Hardcode DLL paths only when linking executables.
- # Pros: do not need to relink libraries when installing.
- # Cons: "standalone" libraries (plugins, python extensions) can not
- # hardcode paths to dependent libraries.
- if [ $(property-set).get <hardcode-dll-paths> ] = true
- && [ type.is-derived $(self.target-types[1]) EXE ]
- {
- local xdll-path = [ $(property-set).get <xdll-path> ] ;
- extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
- }
-
- if $(extra)
- {
- property-set = [ $(property-set).add-raw $(extra) ] ;
- }
-
- local result = [ generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- local ur ;
- if $(result)
- {
- ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
- ur = [ $(ur).add
- [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
- }
- return $(ur) $(result) ;
- }
-
- rule extra-usage-requirements ( created-targets * : property-set )
- {
- local result = [ property-set.empty ] ;
- local extra ;
-
- # Add appropricate <xdll-path> usage requirements.
- local raw = [ $(property-set).raw ] ;
- if <link>shared in $(raw)
- {
- local paths ;
- local pwd = [ path.pwd ] ;
- for local t in $(created-targets)
- {
- if [ type.is-derived [ $(t).type ] SHARED_LIB ]
- {
- paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
- }
- }
- extra += $(paths:G=<xdll-path>) ;
- }
-
- # We need to pass <xdll-path> features that we've got from sources,
- # because if a shared library is built, exe using it needs to know paths
- # to other shared libraries this one depends on in order to be able to
- # find them all at runtime.
-
- # Just pass all features in property-set, it is theorically possible
- # that we will propagate <xdll-path> features explicitly specified by
- # the user, but then the user is to blaim for using an internal feature.
- local values = [ $(property-set).get <xdll-path> ] ;
- extra += $(values:G=<xdll-path>) ;
-
- if $(extra)
- {
- result = [ property-set.create $(extra) ] ;
- }
- return $(result) ;
- }
-
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local sources2 ; # Sources to pass to inherited rule.
- local properties2 ; # Properties to pass to inherited rule.
- local libraries ; # Library sources.
-
- # Searched libraries are not passed as arguments to the linker but via
- # some option. So, we pass them to the action using a property.
- properties2 = [ $(property-set).raw ] ;
- local fsa ;
- local fst ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
- {
- local name = [ $(s).name ] ;
- if [ $(s).shared ]
- {
- fsa += $(name) ;
- }
- else
- {
- fst += $(name) ;
- }
- }
- else
- {
- sources2 += $(s) ;
- }
- }
- properties2 += <find-shared-library>$(fsa:J=&&)
- <find-static-library>$(fst:J=&&) ;
-
- return [ generator.generated-targets $(sources2)
- : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
- }
-}
-
-
-rule register-linker ( id composing ? : source-types + : target-types +
- : requirements * )
-{
- generators.register [ new linking-generator $(id) $(composing)
- : $(source-types) : $(target-types) : $(requirements) ] ;
-}
-
-
-# The generator class for handling STATIC_LIB creation.
-#
-class archive-generator : generator
-{
- import property-set ;
-
- rule __init__ ( id composing ? : source-types + : target-types +
- : requirements * )
- {
- composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types)
- : $(target-types) : $(requirements) ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- sources += [ $(property-set).get <library> ] ;
-
- local result = [ generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- # For static linking, if we get a library in source, we can not directly
- # link to it so we need to cause our dependencies to link to that
- # library. There are two approaches:
- # - adding the library to the list of returned targets.
- # - using the <library> usage requirements.
- # The problem with the first is:
- #
- # lib a1 : : <file>liba1.a ;
- # lib a2 : a2.cpp a1 : <link>static ;
- # install dist : a2 ;
- #
- # here we will try to install 'a1', even though it is not necessary in
- # the general case. With the second approach, even indirect dependants
- # will link to the library, but it should not cause any harm. So, return
- # all LIB sources together with created targets, so that dependants link
- # to them.
- local usage-requirements ;
- if [ $(property-set).get <link> ] = static
- {
- for local t in $(sources)
- {
- if [ type.is-derived [ $(t).type ] LIB ]
- {
- usage-requirements += <library>$(t) ;
- }
- }
- }
-
- usage-requirements = [ property-set.create $(usage-requirements) ] ;
-
- return $(usage-requirements) $(result) ;
- }
-}
-
-
-rule register-archiver ( id composing ? : source-types + : target-types +
- : requirements * )
-{
- generators.register [ new archive-generator $(id) $(composing)
- : $(source-types) : $(target-types) : $(requirements) ] ;
-}
-
-
-# Generator that accepts everything and produces nothing. Useful as a general
-# fallback for toolset-specific actions like PCH generation.
-#
-class dummy-generator : generator
-{
- import property-set ;
-
- rule run ( project name ? : property-set : sources + )
- {
- return [ property-set.empty ] ;
- }
-}
-
-IMPORT $(__name__) : register-linker register-archiver
- : : generators.register-linker generators.register-archiver ;
diff --git a/jam-files/boost-build/tools/builtin.py b/jam-files/boost-build/tools/builtin.py
deleted file mode 100644
index 31a7bffe..00000000
--- a/jam-files/boost-build/tools/builtin.py
+++ /dev/null
@@ -1,718 +0,0 @@
-# Status: minor updates by Steven Watanabe to make gcc work
-#
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-""" Defines standard features and rules.
-"""
-
-import b2.build.targets as targets
-
-import sys
-from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
-from b2.util.utility import *
-from b2.util import path, regex, bjam_signature
-import b2.tools.types
-from b2.manager import get_manager
-
-
-# Records explicit properties for a variant.
-# The key is the variant name.
-__variant_explicit_properties = {}
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __variant_explicit_properties
-
- __variant_explicit_properties = {}
-
-@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"]))
-def variant (name, parents_or_properties, explicit_properties = []):
- """ Declares a new variant.
- First determines explicit properties for this variant, by
- refining parents' explicit properties with the passed explicit
- properties. The result is remembered and will be used if
- this variant is used as parent.
-
- Second, determines the full property set for this variant by
- adding to the explicit properties default values for all properties
- which neither present nor are symmetric.
-
- Lastly, makes appropriate value of 'variant' property expand
- to the full property set.
- name: Name of the variant
- parents_or_properties: Specifies parent variants, if
- 'explicit_properties' are given,
- and explicit_properties otherwise.
- explicit_properties: Explicit properties.
- """
- parents = []
- if not explicit_properties:
- explicit_properties = parents_or_properties
- else:
- parents = parents_or_properties
-
- inherited = property_set.empty()
- if parents:
-
- # If we allow multiple parents, we'd have to to check for conflicts
- # between base variants, and there was no demand for so to bother.
- if len (parents) > 1:
- raise BaseException ("Multiple base variants are not yet supported")
-
- p = parents[0]
- # TODO: the check may be stricter
- if not feature.is_implicit_value (p):
- raise BaseException ("Invalid base varaint '%s'" % p)
-
- inherited = __variant_explicit_properties[p]
-
- explicit_properties = property_set.create_with_validation(explicit_properties)
- explicit_properties = inherited.refine(explicit_properties)
-
- # Record explicitly specified properties for this variant
- # We do this after inheriting parents' properties, so that
- # they affect other variants, derived from this one.
- __variant_explicit_properties[name] = explicit_properties
-
- feature.extend('variant', [name])
- feature.compose ("<variant>" + name, explicit_properties.all())
-
-__os_names = """
- amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
- openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
- vms windows
-""".split()
-
-# Translates from bjam current OS to the os tags used in host-os and target-os,
-# i.e. returns the running host-os.
-#
-def default_host_os():
- host_os = os_name()
- if host_os not in (x.upper() for x in __os_names):
- if host_os == 'NT': host_os = 'windows'
- elif host_os == 'AS400': host_os = 'unix'
- elif host_os == 'MINGW': host_os = 'windows'
- elif host_os == 'BSDI': host_os = 'bsd'
- elif host_os == 'COHERENT': host_os = 'unix'
- elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
- elif host_os == 'IRIX': host_os = 'sgi'
- elif host_os == 'MACOSX': host_os = 'darwin'
- elif host_os == 'KFREEBSD': host_os = 'freebsd'
- elif host_os == 'LINUX': host_os = 'linux'
- else: host_os = 'unix'
- return host_os.lower()
-
-def register_globals ():
- """ Registers all features and variants declared by this module.
- """
-
- # This feature is used to determine which OS we're on.
- # In future, this may become <target-os> and <host-os>
- # TODO: check this. Compatibility with bjam names? Subfeature for version?
- os = sys.platform
- feature.feature ('os', [os], ['propagated', 'link-incompatible'])
-
-
- # The two OS features define a known set of abstract OS names. The host-os is
- # the OS under which bjam is running. Even though this should really be a fixed
- # property we need to list all the values to prevent unknown value errors. Both
- # set the default value to the current OS to account for the default use case of
- # building on the target OS.
- feature.feature('host-os', __os_names)
- feature.set_default('host-os', default_host_os())
-
- feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
- feature.set_default('target-os', default_host_os())
-
- feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
-
- feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
-
- feature.feature ('link', ['shared', 'static'], ['propagated'])
- feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
- feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
-
-
- feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
- feature.feature ('profiling', ['off', 'on'], ['propagated'])
- feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
-
- feature.feature ('threading', ['single', 'multi'], ['propagated'])
- feature.feature ('rtti', ['on', 'off'], ['propagated'])
- feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
- feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
- feature.feature ('define', [], ['free'])
- feature.feature ('include', [], ['free', 'path']) #order-sensitive
- feature.feature ('cflags', [], ['free'])
- feature.feature ('cxxflags', [], ['free'])
- feature.feature ('linkflags', [], ['free'])
- feature.feature ('archiveflags', [], ['free'])
- feature.feature ('version', [], ['free'])
-
- feature.feature ('location-prefix', [], ['free'])
-
- feature.feature ('action', [], ['free'])
-
-
- # The following features are incidental, since
- # in themself they have no effect on build products.
- # Not making them incidental will result in problems in corner
- # cases, for example:
- #
- # unit-test a : a.cpp : <use>b ;
- # lib b : a.cpp b ;
- #
- # Here, if <use> is not incidental, we'll decide we have two
- # targets for a.obj with different properties, and will complain.
- #
- # Note that making feature incidental does not mean it's ignored. It may
- # be ignored when creating the virtual target, but the rest of build process
- # will use them.
- feature.feature ('use', [], ['free', 'dependency', 'incidental'])
- feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
- feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
-
- feature.feature('warnings', [
- 'on', # Enable default/"reasonable" warning level for the tool.
- 'all', # Enable all possible warnings issued by the tool.
- 'off'], # Disable all warnings issued by the tool.
- ['incidental', 'propagated'])
-
- feature.feature('warnings-as-errors', [
- 'off', # Do not fail the compilation if there are warnings.
- 'on'], # Fail the compilation if there are warnings.
- ['incidental', 'propagated'])
-
- feature.feature ('source', [], ['free', 'dependency', 'incidental'])
- feature.feature ('library', [], ['free', 'dependency', 'incidental'])
- feature.feature ('file', [], ['free', 'dependency', 'incidental'])
- feature.feature ('find-shared-library', [], ['free']) #order-sensitive ;
- feature.feature ('find-static-library', [], ['free']) #order-sensitive ;
- feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
- # Internal feature.
- feature.feature ('library-file', [], ['free', 'dependency'])
-
- feature.feature ('name', [], ['free'])
- feature.feature ('tag', [], ['free'])
- feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
- feature.feature ('location', [], ['free', 'path'])
-
- feature.feature ('dll-path', [], ['free', 'path'])
- feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
-
-
- # This is internal feature which holds the paths of all dependency
- # dynamic libraries. On Windows, it's needed so that we can all
- # those paths to PATH, when running applications.
- # On Linux, it's needed to add proper -rpath-link command line options.
- feature.feature ('xdll-path', [], ['free', 'path'])
-
- #provides means to specify def-file for windows dlls.
- feature.feature ('def-file', [], ['free', 'dependency'])
-
- # This feature is used to allow specific generators to run.
- # For example, QT tools can only be invoked when QT library
- # is used. In that case, <allow>qt will be in usage requirement
- # of the library.
- feature.feature ('allow', [], ['free'])
-
- # The addressing model to generate code for. Currently a limited set only
- # specifying the bit size of pointers.
- feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
-
- # Type of CPU architecture to compile for.
- feature.feature('architecture', [
- # x86 and x86-64
- 'x86',
-
- # ia64
- 'ia64',
-
- # Sparc
- 'sparc',
-
- # RS/6000 & PowerPC
- 'power',
-
- # MIPS/SGI
- 'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
-
- # HP/PA-RISC
- 'parisc',
-
- # Advanced RISC Machines
- 'arm',
-
- # Combined architectures for platforms/toolsets that support building for
- # multiple architectures at once. "combined" would be the default multi-arch
- # for the toolset.
- 'combined',
- 'combined-x86-power'],
-
- ['propagated', 'optional'])
-
- # The specific instruction set in an architecture to compile.
- feature.feature('instruction-set', [
- # x86 and x86-64
- 'i386', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
- 'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'conroe', 'conroe-xe',
- 'conroe-l', 'allendale', 'mermon', 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
- 'yorksfield', 'nehalem', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp',
- 'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'winchip-c6', 'winchip2', 'c3', 'c3-2',
-
- # ia64
- 'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
-
- # Sparc
- 'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
- 'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
-
- # RS/6000 & PowerPC
- '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
- '603', '603e', '604', '604e', '620', '630', '740', '7400',
- '7450', '750', '801', '821', '823', '860', '970', '8540',
- 'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
- 'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
- 'rios1', 'rsc', 'rios2', 'rs64a',
-
- # MIPS
- '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
- 'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
- 'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
- 'vr4111', 'vr4120', 'vr4130', 'vr4300',
- 'vr5000', 'vr5400', 'vr5500',
-
- # HP/PA-RISC
- '700', '7100', '7100lc', '7200', '7300', '8000',
-
- # Advanced RISC Machines
- 'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
- 'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
-
- ['propagated', 'optional'])
-
- feature.feature('conditional', [], ['incidental', 'free'])
-
- # The value of 'no' prevents building of a target.
- feature.feature('build', ['yes', 'no'], ['optional'])
-
- # Windows-specific features
- feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
- feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
-
-
- variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
- variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
- '<runtime-debugging>off', '<define>NDEBUG'])
- variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
-
- type.register ('H', ['h'])
- type.register ('HPP', ['hpp'], 'H')
- type.register ('C', ['c'])
-
-
-reset ()
-register_globals ()
-
-class SearchedLibTarget (virtual_target.AbstractFileTarget):
- def __init__ (self, name, project, shared, real_name, search, action):
- virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
-
- self.shared_ = shared
- self.real_name_ = real_name
- if not self.real_name_:
- self.real_name_ = name
- self.search_ = search
-
- def shared (self):
- return self.shared_
-
- def real_name (self):
- return self.real_name_
-
- def search (self):
- return self.search_
-
- def actualize_location (self, target):
- bjam.call("NOTFILE", target)
-
- def path (self):
- #FIXME: several functions rely on this not being None
- return ""
-
-
-class CScanner (scanner.Scanner):
- def __init__ (self, includes):
- scanner.Scanner.__init__ (self)
-
- self.includes_ = includes
-
- def pattern (self):
- return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
-
- def process (self, target, matches, binding):
-
- angle = regex.transform (matches, "<(.*)>")
- quoted = regex.transform (matches, '"(.*)"')
-
- g = str(id(self))
- b = os.path.normpath(os.path.dirname(binding[0]))
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
- g2 = g + "#" + b
-
- g = "<" + g + ">"
- g2 = "<" + g2 + ">"
- angle = [g + x for x in angle]
- quoted = [g2 + x for x in quoted]
-
- all = angle + quoted
- bjam.call("mark-included", target, all)
-
- engine = get_manager().engine()
- engine.set_target_variable(angle, "SEARCH", get_value(self.includes_))
- engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_))
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
- get_manager().scanners().propagate(self, angle + quoted)
-
-scanner.register (CScanner, 'include')
-type.set_scanner ('CPP', CScanner)
-type.set_scanner ('C', CScanner)
-
-# Ported to trunk@47077
-class LibGenerator (generators.Generator):
- """ The generator class for libraries (target type LIB). Depending on properties it will
- request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
- SHARED_LIB.
- """
-
- def __init__(self, id = 'LibGenerator', composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
- generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
-
- def run(self, project, name, prop_set, sources):
-
- # The lib generator is composing, and can be only invoked with
- # explicit name. This check is present in generator.run (and so in
- # builtin.LinkingGenerator), but duplicate it here to avoid doing
- # extra work.
- if name:
- properties = prop_set.raw()
- # Determine the needed target type
- actual_type = None
- properties_grist = get_grist(properties)
- if '<source>' not in properties_grist and \
- ('<search>' in properties_grist or '<name>' in properties_grist):
- actual_type = 'SEARCHED_LIB'
- elif '<file>' in properties_grist:
- # The generator for
- actual_type = 'LIB'
- elif '<link>shared' in properties:
- actual_type = 'SHARED_LIB'
- else:
- actual_type = 'STATIC_LIB'
-
- prop_set = prop_set.add_raw(['<main-target-type>LIB'])
-
- # Construct the target.
- return generators.construct(project, name, actual_type, prop_set, sources)
-
- def viable_source_types(self):
- return ['*']
-
-generators.register(LibGenerator())
-
-def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
- """The implementation of the 'lib' rule. Beyond standard syntax that rule allows
- simplified: 'lib a b c ;'."""
-
- if len(names) > 1:
- if any(r.startswith('<name>') for r in requirements):
- get_manager().errors()("When several names are given to the 'lib' rule\n" +
- "it is not allowed to specify the <name> feature.")
-
- if sources:
- get_manager().errors()("When several names are given to the 'lib' rule\n" +
- "it is not allowed to specify sources.")
-
- project = get_manager().projects().current()
- result = []
-
- for name in names:
- r = requirements[:]
-
- # Support " lib a ; " and " lib a b c ; " syntax.
- if not sources and not any(r.startswith("<name>") for r in requirements) \
- and not any(r.startswith("<file") for r in requirements):
- r.append("<name>" + name)
-
- result.append(targets.create_typed_metatarget(name, "LIB", sources,
- r,
- default_build,
- usage_requirements))
- return result
-
-get_manager().projects().add_rule("lib", lib)
-
-
-# Updated to trunk@47077
-class SearchedLibGenerator (generators.Generator):
- def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
- # TODO: the comment below looks strange. There are no requirements!
- # The requirements cause the generators to be tried *only* when we're building
- # lib target and there's 'search' feature. This seems ugly --- all we want
- # is make sure SearchedLibGenerator is not invoked deep in transformation
- # search.
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run(self, project, name, prop_set, sources):
-
- if not name:
- return None
-
- # If name is empty, it means we're called not from top-level.
- # In this case, we just fail immediately, because SearchedLibGenerator
- # cannot be used to produce intermediate targets.
-
- properties = prop_set.raw ()
- shared = '<link>shared' in properties
-
- a = virtual_target.NullAction (project.manager(), prop_set)
-
- real_name = feature.get_values ('<name>', properties)
- if real_name:
- real_name = real_name[0]
- else:
- real_nake = name
- search = feature.get_values('<search>', properties)
- usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
- t = SearchedLibTarget(name, project, shared, real_name, search, a)
-
- # We return sources for a simple reason. If there's
- # lib png : z : <name>png ;
- # the 'z' target should be returned, so that apps linking to
- # 'png' will link to 'z', too.
- return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
-
-generators.register (SearchedLibGenerator ())
-
-### class prebuilt-lib-generator : generator
-### {
-### rule __init__ ( * : * )
-### {
-### generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-### }
-###
-### rule run ( project name ? : prop_set : sources * : multiple ? )
-### {
-### local f = [ $(prop_set).get <file> ] ;
-### return $(f) $(sources) ;
-### }
-### }
-###
-### generators.register
-### [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
-
-
-class CompileAction (virtual_target.Action):
- def __init__ (self, manager, sources, action_name, prop_set):
- virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
-
- def adjust_properties (self, prop_set):
- """ For all virtual targets for the same dependency graph as self,
- i.e. which belong to the same main target, add their directories
- to include path.
- """
- s = self.targets () [0].creating_subvariant ()
-
- return prop_set.add_raw (s.implicit_includes ('include', 'H'))
-
-class CCompilingGenerator (generators.Generator):
- """ Declare a special compiler generator.
- The only thing it does is changing the type used to represent
- 'action' in the constructed dependency graph to 'CompileAction'.
- That class in turn adds additional include paths to handle a case
- when a source file includes headers which are generated themselfs.
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def action_class (self):
- return CompileAction
-
-def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []):
- g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties)
- return generators.register (g)
-
-
-class LinkingGenerator (generators.Generator):
- """ The generator class for handling EXE and SHARED_LIB creation.
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run (self, project, name, prop_set, sources):
-
- lib_sources = prop_set.get('<library>')
- sources.extend(lib_sources)
-
- # Add <library-path> properties for all searched libraries
- extra = []
- for s in sources:
- if s.type () == 'SEARCHED_LIB':
- search = s.search()
- extra.extend(property.Property('<library-path>', sp) for sp in search)
-
- orig_xdll_path = []
-
- if prop_set.get('<hardcode-dll-paths>') == ['true'] \
- and type.is_derived(self.target_types_ [0], 'EXE'):
- xdll_path = prop_set.get('<xdll-path>')
- orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ]
- # It's possible that we have libraries in sources which did not came
- # from 'lib' target. For example, libraries which are specified
- # just as filenames as sources. We don't have xdll-path properties
- # for such target, but still need to add proper dll-path properties.
- for s in sources:
- if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
- # Unfortunately, we don't have a good way to find the path
- # to a file, so use this nasty approach.
- p = s.project()
- location = path.root(s.name(), p.get('source-location'))
- xdll_path.append(path.parent(location))
-
- extra.extend(property.Property('<dll-path>', sp) for sp in xdll_path)
-
- if extra:
- prop_set = prop_set.add_raw (extra)
-
- result = generators.Generator.run(self, project, name, prop_set, sources)
-
- if result:
- ur = self.extra_usage_requirements(result, prop_set)
- ur = ur.add(property_set.create(orig_xdll_path))
- else:
- return None
-
- return(ur, result)
-
- def extra_usage_requirements (self, created_targets, prop_set):
-
- result = property_set.empty ()
- extra = []
-
- # Add appropriate <xdll-path> usage requirements.
- raw = prop_set.raw ()
- if '<link>shared' in raw:
- paths = []
-
- # TODO: is it safe to use the current directory? I think we should use
- # another mechanism to allow this to be run from anywhere.
- pwd = os.getcwd()
-
- for t in created_targets:
- if type.is_derived(t.type(), 'SHARED_LIB'):
- paths.append(path.root(path.make(t.path()), pwd))
-
- extra += replace_grist(paths, '<xdll-path>')
-
- # We need to pass <xdll-path> features that we've got from sources,
- # because if shared library is built, exe which uses it must know paths
- # to other shared libraries this one depends on, to be able to find them
- # all at runtime.
-
- # Just pass all features in property_set, it's theorically possible
- # that we'll propagate <xdll-path> features explicitly specified by
- # the user, but then the user's to blaim for using internal feature.
- values = prop_set.get('<xdll-path>')
- extra += replace_grist(values, '<xdll-path>')
-
- if extra:
- result = property_set.create(extra)
-
- return result
-
- def generated_targets (self, sources, prop_set, project, name):
-
- # sources to pass to inherited rule
- sources2 = []
- # sources which are libraries
- libraries = []
-
- # Searched libraries are not passed as argument to linker
- # but via some option. So, we pass them to the action
- # via property.
- fsa = []
- fst = []
- for s in sources:
- if type.is_derived(s.type(), 'SEARCHED_LIB'):
- n = s.real_name()
- if s.shared():
- fsa.append(n)
-
- else:
- fst.append(n)
-
- else:
- sources2.append(s)
-
- add = []
- if fsa:
- add.append("<find-shared-library>" + '&&'.join(fsa))
- if fst:
- add.append("<find-static-library>" + '&&'.join(fst))
-
- spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
- return spawn
-
-
-def register_linker(id, source_types, target_types, requirements):
- g = LinkingGenerator(id, True, source_types, target_types, requirements)
- generators.register(g)
-
-class ArchiveGenerator (generators.Generator):
- """ The generator class for handling STATIC_LIB creation.
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run (self, project, name, prop_set, sources):
- sources += prop_set.get ('<library>')
-
- result = generators.Generator.run (self, project, name, prop_set, sources)
-
- return result
-
-### rule register-archiver ( id composing ? : source_types + : target_types + :
-### requirements * )
-### {
-### local g = [ new ArchiveGenerator $(id) $(composing) : $(source_types)
-### : $(target_types) : $(requirements) ] ;
-### generators.register $(g) ;
-### }
-###
-###
-### IMPORT $(__name__) : register-linker register-archiver
-### : : generators.register-linker generators.register-archiver ;
-###
-###
-###
-
-get_manager().projects().add_rule("variant", variant)
-
-import stage
-import symlink
-import message
diff --git a/jam-files/boost-build/tools/cast.jam b/jam-files/boost-build/tools/cast.jam
deleted file mode 100644
index 6c84922f..00000000
--- a/jam-files/boost-build/tools/cast.jam
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright 2005 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines main target 'cast', used to change type for target. For example, in Qt
-# library one wants two kinds of CPP files -- those that just compiled and those
-# that are passed via the MOC tool.
-#
-# This is done with:
-#
-# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
-#
-# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
-# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
-# support will run the MOC tool as part of the build process.
-#
-# At the moment, the 'cast' rule only works for non-derived (source) targets.
-#
-# TODO: The following comment is unclear or incorrect. Clean it up.
-# > Another solution would be to add a separate main target 'moc-them' that
-# > would moc all the passed sources, no matter what their type is, but I prefer
-# > cast, as defining a new target type + generator for that type is somewhat
-# > simpler than defining a main target rule.
-
-import "class" : new ;
-import errors ;
-import project ;
-import property-set ;
-import targets ;
-import type ;
-
-
-class cast-target-class : typed-target
-{
- import type ;
-
- rule __init__ ( name : project : type : sources * : requirements * :
- default-build * : usage-requirements * )
- {
- typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local result ;
- for local s in $(source-targets)
- {
- if ! [ class.is-a $(s) : file-target ]
- {
- import errors ;
- errors.user-error Source to the 'cast' rule is not a file! ;
- }
- if [ $(s).action ]
- {
- import errors ;
- errors.user-error Only non-derived target are allowed for
- 'cast'. : when building [ full-name ] ;
- }
- local r = [ $(s).clone-with-different-type $(self.type) ] ;
- result += [ virtual-target.register $(r) ] ;
- }
- return [ property-set.empty ] $(result) ;
- }
-}
-
-
-rule cast ( name type : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
-
- local real-type = [ type.type-from-rule-name $(type) ] ;
- if ! $(real-type)
- {
- errors.user-error No type corresponds to the main target rule name
- '$(type)' : "Hint: try a lowercase name" ;
- }
-
- targets.main-target-alternative [ new cast-target-class $(name) : $(project)
- : $(real-type)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) :
- $(project) ] ] ;
-}
-
-
-IMPORT $(__name__) : cast : : cast ;
diff --git a/jam-files/boost-build/tools/cast.py b/jam-files/boost-build/tools/cast.py
deleted file mode 100644
index 8f053f11..00000000
--- a/jam-files/boost-build/tools/cast.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Status: ported
-# Base revision: 64432.
-# Copyright 2005-2010 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines main target 'cast', used to change type for target. For example, in Qt
-# library one wants two kinds of CPP files -- those that just compiled and those
-# that are passed via the MOC tool.
-#
-# This is done with:
-#
-# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
-#
-# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
-# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
-# support will run the MOC tool as part of the build process.
-#
-# At the moment, the 'cast' rule only works for non-derived (source) targets.
-#
-# TODO: The following comment is unclear or incorrect. Clean it up.
-# > Another solution would be to add a separate main target 'moc-them' that
-# > would moc all the passed sources, no matter what their type is, but I prefer
-# > cast, as defining a new target type + generator for that type is somewhat
-# > simpler than defining a main target rule.
-
-import b2.build.targets as targets
-import b2.build.virtual_target as virtual_target
-
-from b2.manager import get_manager
-from b2.util import bjam_signature
-
-class CastTargetClass(targets.TypedTarget):
-
- def construct(name, source_targets, ps):
- result = []
- for s in source_targets:
- if not isinstance(s, virtual_targets.FileTarget):
- get_manager().errors()("Source to the 'cast' metatager is not a file")
-
- if s.action():
- get_manager().errors()("Only non-derived targets allowed as sources for 'cast'.")
-
-
- r = s.clone_with_different_type(self.type())
- result.append(get_manager().virtual_targets().register(r))
-
- return result
-
-
-@bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"], ["usage_requirements", "*"]))
-def cast(name, type, sources, requirements, default_build, usage_requirements):
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- CastTargetClass(name, project, type,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-
-get_manager().projects().add_rule("cast", cast)
diff --git a/jam-files/boost-build/tools/clang-darwin.jam b/jam-files/boost-build/tools/clang-darwin.jam
deleted file mode 100644
index a8abc7d6..00000000
--- a/jam-files/boost-build/tools/clang-darwin.jam
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import clang ;
-import feature : feature ;
-import os ;
-import toolset ;
-import toolset : flags ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-
-feature.extend-subfeature toolset clang : platform : darwin ;
-
-toolset.inherit-generators clang-darwin
- <toolset>clang <toolset-clang:platform>darwin
- : gcc
- # Don't inherit PCH generators. They were not tested, and probably
- # don't work for this compiler.
- : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
- ;
-
-generators.override clang-darwin.prebuilt : builtin.lib-generator ;
-generators.override clang-darwin.prebuilt : builtin.prebuilt ;
-generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
-
-toolset.inherit-rules clang-darwin : gcc ;
-toolset.inherit-flags clang-darwin : gcc
- : <inlining>off <inlining>on <inlining>full <optimization>space
- <warnings>off <warnings>all <warnings>on
- <architecture>x86/<address-model>32
- <architecture>x86/<address-model>64
- ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# vectorization diagnostics
-feature vectorize : off on full ;
-
-# Initializes the clang-darwin toolset
-# version in optional
-# name (default clang++) is used to invoke the specified clang complier
-# compile and link options allow you to specify addition command line options for each version
-rule init ( version ? : command * : options * )
-{
- command = [ common.get-invocation-command clang-darwin : clang++
- : $(command) ] ;
-
- # Determine the version
- local command-string = $(command:J=" ") ;
- if $(command)
- {
- version ?= [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- }
-
- local condition = [ common.check-init-parameters clang-darwin
- : version $(version) ] ;
-
- common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags clang-darwin darwin $(condition) ;
-
-}
-
-SPACE = " " ;
-
-flags clang-darwin.compile OPTIONS <cflags> ;
-flags clang-darwin.compile OPTIONS <cxxflags> ;
-# flags clang-darwin.compile INCLUDES <include> ;
-
-# Declare flags and action for compilation.
-toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
-toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
-
-toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
-toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
-toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
-
-toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
-toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
-toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
-toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
-toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
-
-actions compile.c
-{
- "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-flags clang-darwin ARFLAGS <archiveflags> ;
-
-# Default value. Mostly for the sake of clang-linux
-# that inherits from gcc, but does not has the same
-# logic to set the .AR variable. We can put the same
-# logic in clang-linux, but that's hardly worth the trouble
-# as on Linux, 'ar' is always available.
-.AR = ar ;
-
-rule archive ( targets * : sources * : properties * )
-{
- # Always remove archive and start again. Here's rationale from
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a.
- # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
- # My program was crashing with absurd errors.
- # After some debugging I traced it back to the fact that a1.o was *still*
- # in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM
- # onto the archive action. That makes archives fail to build on NT
- # when they have many files because it will no longer execute the
- # action directly and blow the line length limit. Instead we
- # remove the file in a different action, just before the building
- # of the archive.
- #
- local clean.a = $(targets[1])(clean) ;
- TEMPORARY $(clean.a) ;
- NOCARE $(clean.a) ;
- LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
- DEPENDS $(clean.a) : $(sources) ;
- DEPENDS $(targets) : $(clean.a) ;
- common.RmTemps $(clean.a) : $(targets) ;
-}
-
-actions piecemeal archive
-{
- "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
- "ranlib" -cs "$(<)"
-}
-
-flags clang-darwin.link USER_OPTIONS <linkflags> ;
-
-# Declare actions for linking
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since
- # running N links in parallel is just slower.
- JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
diff --git a/jam-files/boost-build/tools/clang-linux.jam b/jam-files/boost-build/tools/clang-linux.jam
deleted file mode 100644
index 036d749e..00000000
--- a/jam-files/boost-build/tools/clang-linux.jam
+++ /dev/null
@@ -1,196 +0,0 @@
-# Copyright (c) 2003 Michael Stevens
-# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import toolset ;
-import feature ;
-import toolset : flags ;
-
-import clang ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-import type ;
-import numbers ;
-
-feature.extend-subfeature toolset clang : platform : linux ;
-
-toolset.inherit-generators clang-linux
- <toolset>clang <toolset-clang:platform>linux : gcc
- : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
-generators.override clang-linux.prebuilt : builtin.lib-generator ;
-generators.override clang-linux.prebuilt : builtin.prebuilt ;
-generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
-
-# Override default do-nothing generators.
-generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
-generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-type.set-generated-target-suffix PCH
- : <toolset>clang <toolset-clang:platform>linux : pth ;
-
-toolset.inherit-rules clang-linux : gcc ;
-toolset.inherit-flags clang-linux : gcc
- : <inlining>off <inlining>on <inlining>full
- <optimization>space <optimization>speed
- <warnings>off <warnings>all <warnings>on ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
- .debug-configuration = true ;
-}
-
-rule init ( version ? : command * : options * ) {
- command = [ common.get-invocation-command clang-linux : clang++
- : $(command) ] ;
-
- # Determine the version
- local command-string = $(command:J=" ") ;
-
- if $(command) {
- version ?= [ MATCH "version ([0-9.]+)"
- : [ SHELL "$(command-string) --version" ] ] ;
- }
-
- local condition = [ common.check-init-parameters clang-linux
- : version $(version) ] ;
-
- common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags clang-linux gnu $(condition) ;
-}
-
-###############################################################################
-# Flags
-
-toolset.flags clang-linux.compile OPTIONS <cflags> ;
-toolset.flags clang-linux.compile OPTIONS <cxxflags> ;
-
-toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
-toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
-
-# note: clang silently ignores some of these inlining options
-toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
-toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
-toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
-
-toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
-toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
-toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
-toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
-toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
-
-###############################################################################
-# C and C++ compilation
-
-rule compile.c++ ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-
- local pth-file = [ on $(<) return $(PCH_FILE) ] ;
-
- if $(pth-file) {
- DEPENDS $(<) : $(pth-file) ;
- compile.c++.with-pch $(targets) : $(sources) ;
- }
- else {
- compile.c++.without-pth $(targets) : $(sources) ;
- }
-}
-
-actions compile.c++.without-pth {
- "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
-}
-
-actions compile.c++.with-pch bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
-}
-
-rule compile.c ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-
- local pth-file = [ on $(<) return $(PCH_FILE) ] ;
-
- if $(pth-file) {
- DEPENDS $(<) : $(pth-file) ;
- compile.c.with-pch $(targets) : $(sources) ;
- }
- else {
- compile.c.without-pth $(targets) : $(sources) ;
- }
-}
-
-actions compile.c.without-pth
-{
- "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c.with-pch bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
-}
-
-###############################################################################
-# PCH emission
-
-rule compile.c++.pch ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c++.pch {
- rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
-}
-
-rule compile.c.pch ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c.pch
-{
- rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
-}
-
-###############################################################################
-# Linking
-
-SPACE = " " ;
-
-rule link ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
-}
-
-actions link bind LIBRARIES {
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
-}
-
-rule link.dll ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
-}
-
-# Differ from 'link' above only by -shared.
-actions link.dll bind LIBRARIES {
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
-}
-
diff --git a/jam-files/boost-build/tools/clang.jam b/jam-files/boost-build/tools/clang.jam
deleted file mode 100644
index e0ac9a55..00000000
--- a/jam-files/boost-build/tools/clang.jam
+++ /dev/null
@@ -1,27 +0,0 @@
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# This is a generic 'clang' toolset. Depending on the current system, it
-# forwards either to 'clang-unix' or 'clang-darwin' modules.
-
-import feature ;
-import os ;
-import toolset ;
-
-feature.extend toolset : clang ;
-feature.subfeature toolset clang : platform : : propagated link-incompatible ;
-
-rule init ( * : * )
-{
- if [ os.name ] = MACOSX
- {
- toolset.using clang-darwin :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
- else
- {
- toolset.using clang-linux :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-}
diff --git a/jam-files/boost-build/tools/common.jam b/jam-files/boost-build/tools/common.jam
deleted file mode 100644
index ed835a36..00000000
--- a/jam-files/boost-build/tools/common.jam
+++ /dev/null
@@ -1,994 +0,0 @@
-# Copyright 2003, 2005 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2005 Toon Knapen
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Provides actions common to all toolsets, such as creating directories and
-# removing files.
-
-import os ;
-import modules ;
-import utility ;
-import print ;
-import type ;
-import feature ;
-import errors ;
-import path ;
-import sequence ;
-import toolset ;
-import virtual-target ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
-{
- .show-configuration = true ;
-}
-
-# Configurations
-#
-# The following class helps to manage toolset configurations. Each configuration
-# has a unique ID and one or more parameters. A typical example of a unique ID
-# is a condition generated by 'common.check-init-parameters' rule. Other kinds
-# of IDs can be used. Parameters may include any details about the configuration
-# like 'command', 'path', etc.
-#
-# A toolset configuration may be in one of the following states:
-#
-# - registered
-# Configuration has been registered (e.g. explicitly or by auto-detection
-# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
-# not yet been called for it.
-# - used
-# Once called 'toolset.using' rule marks the configuration as 'used'.
-#
-# The main difference between the states above is that while a configuration is
-# 'registered' its options can be freely changed. This is useful in particular
-# for autodetection code - all detected configurations may be safely overwritten
-# by user code.
-
-class configurations
-{
- import errors ;
-
- rule __init__ ( )
- {
- }
-
- # Registers a configuration.
- #
- # Returns 'true' if the configuration has been added and an empty value if
- # it already exists. Reports an error if the configuration is 'used'.
- #
- rule register ( id )
- {
- if $(id) in $(self.used)
- {
- errors.error "common: the configuration '$(id)' is in use" ;
- }
-
- local retval ;
-
- if ! $(id) in $(self.all)
- {
- self.all += $(id) ;
-
- # Indicate that a new configuration has been added.
- retval = true ;
- }
-
- return $(retval) ;
- }
-
- # Mark a configuration as 'used'.
- #
- # Returns 'true' if the state of the configuration has been changed to
- # 'used' and an empty value if it the state has not been changed. Reports an
- # error if the configuration is not known.
- #
- rule use ( id )
- {
- if ! $(id) in $(self.all)
- {
- errors.error "common: the configuration '$(id)' is not known" ;
- }
-
- local retval ;
-
- if ! $(id) in $(self.used)
- {
- self.used += $(id) ;
-
- # Indicate that the configuration has been marked as 'used'.
- retval = true ;
- }
-
- return $(retval) ;
- }
-
- # Return all registered configurations.
- #
- rule all ( )
- {
- return $(self.all) ;
- }
-
- # Return all used configurations.
- #
- rule used ( )
- {
- return $(self.used) ;
- }
-
- # Returns the value of a configuration parameter.
- #
- rule get ( id : param )
- {
- return $(self.$(param).$(id)) ;
- }
-
- # Sets the value of a configuration parameter.
- #
- rule set ( id : param : value * )
- {
- self.$(param).$(id) = $(value) ;
- }
-}
-
-
-# The rule for checking toolset parameters. Trailing parameters should all be
-# parameter name/value pairs. The rule will check that each parameter either has
-# a value in each invocation or has no value in each invocation. Also, the rule
-# will check that the combination of all parameter values is unique in all
-# invocations.
-#
-# Each parameter name corresponds to a subfeature. This rule will declare a
-# subfeature the first time a non-empty parameter value is passed and will
-# extend it with all the values.
-#
-# The return value from this rule is a condition to be used for flags settings.
-#
-rule check-init-parameters ( toolset requirement * : * )
-{
- local sig = $(toolset) ;
- local condition = <toolset>$(toolset) ;
- local subcondition ;
- for local index in 2 3 4 5 6 7 8 9
- {
- local name = $($(index)[1]) ;
- local value = $($(index)[2]) ;
-
- if $(value)-is-not-empty
- {
- condition = $(condition)-$(value) ;
- if $(.had-unspecified-value.$(toolset).$(name))
- {
- errors.user-error
- "$(toolset) initialization: parameter '$(name)'"
- "inconsistent" : "no value was specified in earlier"
- "initialization" : "an explicit value is specified now" ;
- }
- # The below logic is for intel compiler. It calls this rule with
- # 'intel-linux' and 'intel-win' as toolset, so we need to get the
- # base part of toolset name. We can not pass 'intel' as toolset
- # because in that case it will be impossible to register versionless
- # intel-linux and intel-win toolsets of a specific version.
- local t = $(toolset) ;
- local m = [ MATCH ([^-]*)- : $(toolset) ] ;
- if $(m)
- {
- t = $(m[1]) ;
- }
- if ! $(.had-value.$(toolset).$(name))
- {
- if ! $(.declared-subfeature.$(t).$(name))
- {
- feature.subfeature toolset $(t) : $(name) : : propagated ;
- .declared-subfeature.$(t).$(name) = true ;
- }
- .had-value.$(toolset).$(name) = true ;
- }
- feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
- subcondition += <toolset-$(t):$(name)>$(value) ;
- }
- else
- {
- if $(.had-value.$(toolset).$(name))
- {
- errors.user-error
- "$(toolset) initialization: parameter '$(name)'"
- "inconsistent" : "an explicit value was specified in an"
- "earlier initialization" : "no value is specified now" ;
- }
- .had-unspecified-value.$(toolset).$(name) = true ;
- }
- sig = $(sig)$(value:E="")- ;
- }
- if $(sig) in $(.all-signatures)
- {
- local message =
- "duplicate initialization of $(toolset) with the following parameters: " ;
- for local index in 2 3 4 5 6 7 8 9
- {
- local p = $($(index)) ;
- if $(p)
- {
- message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
- }
- }
- message += "previous initialization at $(.init-loc.$(sig))" ;
- errors.user-error
- $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
- $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
- }
- .all-signatures += $(sig) ;
- .init-loc.$(sig) = [ errors.nearest-user-location ] ;
-
- # If we have a requirement, this version should only be applied under that
- # condition. To accomplish this we add a toolset requirement that imposes
- # the toolset subcondition, which encodes the version.
- if $(requirement)
- {
- local r = <toolset>$(toolset) $(requirement) ;
- r = $(r:J=,) ;
- toolset.add-requirements $(r):$(subcondition) ;
- }
-
- # We add the requirements, if any, to the condition to scope the toolset
- # variables and options to this specific version.
- condition += $(requirement) ;
-
- if $(.show-configuration)
- {
- ECHO notice: $(condition) ;
- }
- return $(condition:J=/) ;
-}
-
-
-# A helper rule to get the command to invoke some tool. If
-# 'user-provided-command' is not given, tries to find binary named 'tool' in
-# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
-# element of 'user-provided-command' is an existing program.
-#
-# This rule returns the command to be used when invoking the tool. If we can not
-# find the tool, a warning is issued. If 'path-last' is specified, PATH is
-# checked after 'additional-paths' when searching for 'tool'.
-#
-rule get-invocation-command-nodefault ( toolset : tool :
- user-provided-command * : additional-paths * : path-last ? )
-{
- local command ;
- if ! $(user-provided-command)
- {
- command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
- if ! $(command) && $(.debug-configuration)
- {
- ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ;
- ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
- }
- }
- else
- {
- command = [ check-tool $(user-provided-command) ] ;
- if ! $(command) && $(.debug-configuration)
- {
- ECHO "warning: toolset $(toolset) initialization: " ;
- ECHO "warning: can not find user-provided command " '$(user-provided-command)' ;
- ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
- }
- }
-
- return $(command) ;
-}
-
-
-# Same as get-invocation-command-nodefault, except that if no tool is found,
-# returns either the user-provided-command, if present, or the 'tool' parameter.
-#
-rule get-invocation-command ( toolset : tool : user-provided-command * :
- additional-paths * : path-last ? )
-{
- local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
- $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
-
- if ! $(result)
- {
- if $(user-provided-command)
- {
- result = $(user-provided-command) ;
- }
- else
- {
- result = $(tool) ;
- }
- }
- return $(result) ;
-}
-
-
-# Given an invocation command return the absolute path to the command. This
-# works even if command has no path element and was found on the PATH.
-#
-rule get-absolute-tool-path ( command )
-{
- if $(command:D)
- {
- return $(command:D) ;
- }
- else
- {
- local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
- return $(m[1]:D) ;
- }
-}
-
-
-# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
-# If found in PATH, returns 'name' and if found in additional paths, returns
-# absolute name. If the tool is found in several directories, returns the
-# first path found. Otherwise, returns an empty string. If 'path-last' is
-# specified, PATH is searched after 'additional-paths'.
-#
-local rule find-tool ( name : additional-paths * : path-last ? )
-{
- local path = [ path.programs-path ] ;
- local match = [ path.glob $(path) : $(name) $(name).exe ] ;
- local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
-
- local result ;
- if $(path-last)
- {
- result = $(additional-match) ;
- if ! $(result) && $(match)
- {
- result = $(name) ;
- }
- }
- else
- {
- if $(match)
- {
- result = $(name) ;
- }
- else
- {
- result = $(additional-match) ;
- }
- }
- if $(result)
- {
- return [ path.native $(result[1]) ] ;
- }
-}
-
-
-# Checks if 'command' can be found either in path or is a full name to an
-# existing file.
-#
-local rule check-tool-aux ( command )
-{
- if $(command:D)
- {
- if [ path.exists $(command) ]
- # Both NT and Cygwin will run .exe files by their unqualified names.
- || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
- # Only NT will run .bat & .cmd files by their unqualified names.
- || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
- [ path.exists $(command).cmd ] ) )
- {
- return $(command) ;
- }
- }
- else
- {
- if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
- {
- return $(command) ;
- }
- }
-}
-
-
-# Checks that a tool can be invoked by 'command'. If command is not an absolute
-# path, checks if it can be found in 'path'. If comand is an absolute path,
-# check that it exists. Returns 'command' if ok or empty string otherwise.
-#
-local rule check-tool ( xcommand + )
-{
- if [ check-tool-aux $(xcommand[1]) ] ||
- [ check-tool-aux $(xcommand[-1]) ]
- {
- return $(xcommand) ;
- }
-}
-
-
-# Handle common options for toolset, specifically sets the following flag
-# variables:
-# - CONFIG_COMMAND to $(command)
-# - OPTIONS for compile to the value of <compileflags> in $(options)
-# - OPTIONS for compile.c to the value of <cflags> in $(options)
-# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
-# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
-# - OPTIONS for link to the value of <linkflags> in $(options)
-#
-rule handle-options ( toolset : condition * : command * : options * )
-{
- if $(.debug-configuration)
- {
- ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ;
- }
-
- # The last parameter ('unchecked') says it is OK to set flags for another
- # module.
- toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
- : unchecked ;
-
- toolset.flags $(toolset).compile OPTIONS $(condition) :
- [ feature.get-values <compileflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).compile.c OPTIONS $(condition) :
- [ feature.get-values <cflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
- [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
- [ feature.get-values <fflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).link OPTIONS $(condition) :
- [ feature.get-values <linkflags> : $(options) ] : unchecked ;
-}
-
-
-# Returns the location of the "program files" directory on a Windows platform.
-#
-rule get-program-files-dir ( )
-{
- local ProgramFiles = [ modules.peek : ProgramFiles ] ;
- if $(ProgramFiles)
- {
- ProgramFiles = "$(ProgramFiles:J= )" ;
- }
- else
- {
- ProgramFiles = "c:\\Program Files" ;
- }
- return $(ProgramFiles) ;
-}
-
-
-if [ os.name ] = NT
-{
- RM = del /f /q ;
- CP = copy /b ;
- IGNORE = "2>nul >nul & setlocal" ;
- LN ?= $(CP) ;
- # Ugly hack to convince copy to set the timestamp of the
- # destination to the current time by concatenating the
- # source with a nonexistent file. Note that this requires
- # /b (binary) as the default when concatenating files is /a (ascii).
- WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
-}
-else
-{
- RM = rm -f ;
- CP = cp ;
- LN = ln ;
-}
-
-
-rule rm-command ( )
-{
- return $(RM) ;
-}
-
-
-rule copy-command ( )
-{
- return $(CP) ;
-}
-
-
-if "\n" = "n"
-{
- # Escape characters are not supported. Use ugly hacks that won't work,
- # see below.
- nl = "
-" ;
- q = "" ;
-}
-else
-{
- nl = "\n" ;
- q = "\"" ;
-}
-
-# Returns the command needed to set an environment variable on the current
-# platform. The variable setting persists through all following commands and is
-# visible in the environment seen by subsequently executed commands. In other
-# words, on Unix systems, the variable is exported, which is consistent with the
-# only possible behavior on Windows systems.
-#
-rule variable-setting-command ( variable : value )
-{
- if [ os.name ] = NT
- {
- return "set $(variable)=$(value)$(nl)" ;
- }
- else
- {
- # If we don't have escape characters support in bjam, the below blows
- # up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n
- # sequence that messes up the executed export command which then reports
- # that the passed variable name is incorrect.
- # But we have a check for cygwin in kernel/bootstrap.jam already.
- return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
- }
-}
-
-
-# Returns a command to sets a named shell path variable to the given NATIVE
-# paths on the current platform.
-#
-rule path-variable-setting-command ( variable : paths * )
-{
- local sep = [ os.path-separator ] ;
- return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
-}
-
-
-# Returns a command that prepends the given paths to the named path variable on
-# the current platform.
-#
-rule prepend-path-variable-command ( variable : paths * )
-{
- return [ path-variable-setting-command $(variable)
- : $(paths) [ os.expand-variable $(variable) ] ] ;
-}
-
-
-# Return a command which can create a file. If 'r' is result of invocation, then
-# 'r foobar' will create foobar with unspecified content. What happens if file
-# already exists is unspecified.
-#
-rule file-creation-command ( )
-{
- if [ os.name ] = NT
- {
- # A few alternative implementations on Windows:
- #
- # 'type NUL >> '
- # That would construct an empty file instead of a file containing
- # a space and an end-of-line marker but it would also not change
- # the target's timestamp in case the file already exists.
- #
- # 'type NUL > '
- # That would construct an empty file instead of a file containing
- # a space and an end-of-line marker but it would also destroy an
- # already existing file by overwriting it with an empty one.
- #
- # I guess the best solution would be to allow Boost Jam to define
- # built-in functions such as 'create a file', 'touch a file' or 'copy a
- # file' which could be used from inside action code. That would allow
- # completely portable operations without this kind of kludge.
- # (22.02.2009.) (Jurko)
- return "echo. > " ;
- }
- else
- {
- return "touch " ;
- }
-}
-
-
-# Returns a command that may be used for 'touching' files. It is not a real
-# 'touch' command on NT because it adds an empty line at the end of file but it
-# works with source files.
-#
-rule file-touch-command ( )
-{
- if [ os.name ] = NT
- {
- return "echo. >> " ;
- }
- else
- {
- return "touch " ;
- }
-}
-
-
-rule MkDir
-{
- # If dir exists, do not update it. Do this even for $(DOT).
- NOUPDATE $(<) ;
-
- if $(<) != $(DOT) && ! $($(<)-mkdir)
- {
- # Cheesy gate to prevent multiple invocations on same dir.
- $(<)-mkdir = true ;
-
- # Schedule the mkdir build action.
- common.mkdir $(<) ;
-
- # Prepare a Jam 'dirs' target that can be used to make the build only
- # construct all the target directories.
- DEPENDS dirs : $(<) ;
-
- # Recursively create parent directories. $(<:P) = $(<)'s parent & we
- # recurse until root.
-
- local s = $(<:P) ;
- if [ os.name ] = NT
- {
- switch $(s)
- {
- case *: : s = ;
- case *:\\ : s = ;
- }
- }
-
- if $(s)
- {
- if $(s) != $(<)
- {
- DEPENDS $(<) : $(s) ;
- MkDir $(s) ;
- }
- else
- {
- NOTFILE $(s) ;
- }
- }
- }
-}
-
-
-#actions MkDir1
-#{
-# mkdir "$(<)"
-#}
-
-# The following quick-fix actions should be replaced using the original MkDir1
-# action once Boost Jam gets updated to correctly detect different paths leading
-# up to the same filesystem target and triggers their build action only once.
-# (todo) (04.07.2008.) (Jurko)
-
-if [ os.name ] = NT
-{
- actions mkdir
- {
- if not exist "$(<)\\" mkdir "$(<)"
- }
-}
-else
-{
- actions mkdir
- {
- mkdir -p "$(<)"
- }
-}
-
-actions piecemeal together existing Clean
-{
- $(RM) "$(>)"
-}
-
-
-rule copy
-{
-}
-
-
-actions copy
-{
- $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
-}
-
-
-rule RmTemps
-{
-}
-
-
-actions quietly updated piecemeal together RmTemps
-{
- $(RM) "$(>)" $(IGNORE)
-}
-
-
-actions hard-link
-{
- $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
- $(LN) "$(>)" "$(<)" $(NULL_OUT)
-}
-
-
-# Given a target, as given to a custom tag rule, returns a string formatted
-# according to the passed format. Format is a list of properties that is
-# represented in the result. For each element of format the corresponding target
-# information is obtained and added to the result string. For all, but the
-# literal, the format value is taken as the as string to prepend to the output
-# to join the item to the rest of the result. If not given "-" is used as a
-# joiner.
-#
-# The format options can be:
-#
-# <base>[joiner]
-# :: The basename of the target name.
-# <toolset>[joiner]
-# :: The abbreviated toolset tag being used to build the target.
-# <threading>[joiner]
-# :: Indication of a multi-threaded build.
-# <runtime>[joiner]
-# :: Collective tag of the build runtime.
-# <version:/version-feature | X.Y[.Z]/>[joiner]
-# :: Short version tag taken from the given "version-feature" in the
-# build properties. Or if not present, the literal value as the
-# version number.
-# <property:/property-name/>[joiner]
-# :: Direct lookup of the given property-name value in the build
-# properties. /property-name/ is a regular expression. E.g.
-# <property:toolset-.*:flavor> will match every toolset.
-# /otherwise/
-# :: The literal value of the format argument.
-#
-# For example this format:
-#
-# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
-#
-# Might return:
-#
-# boost_thread-vc80-mt-gd-1_33.dll, or
-# boost_regex-vc80-gd-1_33.dll
-#
-# The returned name also has the target type specific prefix and suffix which
-# puts it in a ready form to use as the value from a custom tag rule.
-#
-rule format-name ( format * : name : type ? : property-set )
-{
- local result = "" ;
- for local f in $(format)
- {
- switch $(f:G)
- {
- case <base> :
- local matched = [ MATCH "^(boost.*python)-.*" : $(name) ] ;
- if $(matched) = boost_python || $(matched) = boost_mpi_python
- {
- result += $(name) ;
- }
- else
- {
- result += $(name:B) ;
- }
-
- case <toolset> :
- result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <threading> :
- result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
- : $(property-set) ] ] ;
-
- case <runtime> :
- result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <qt> :
- result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <address-model> :
- result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <version:*> :
- local key = [ MATCH <version:(.*)> : $(f:G) ] ;
- local version = [ $(property-set).get <$(key)> ] ;
- version ?= $(key) ;
- version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
- result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
-
- case <property:*> :
- local key = [ MATCH <property:(.*)> : $(f:G) ] ;
- local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
- if $(p0)
- {
- local p = [ $(property-set).get <$(p0)> ] ;
- if $(p)
- {
- result += [ join-tag $(f:G=) : $(p) ] ;
- }
- }
-
- case * :
- result += $(f:G=) ;
- }
- }
- result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
- $(property-set) ] ;
- return $(result) ;
-}
-
-
-local rule join-tag ( joiner ? : tag ? )
-{
- if ! $(joiner) { joiner = - ; }
- return $(joiner)$(tag) ;
-}
-
-
-local rule toolset-tag ( name : type ? : property-set )
-{
- local tag = ;
-
- local properties = [ $(property-set).raw ] ;
- switch [ $(property-set).get <toolset> ]
- {
- case borland* : tag += bcb ;
- case clang* :
- {
- switch [ $(property-set).get <toolset-clang:platform> ]
- {
- case darwin : tag += clang-darwin ;
- case linux : tag += clang ;
- }
- }
- case como* : tag += como ;
- case cw : tag += cw ;
- case darwin* : tag += xgcc ;
- case edg* : tag += edg ;
- case gcc* :
- {
- switch [ $(property-set).get <toolset-gcc:flavor> ]
- {
- case *mingw* : tag += mgw ;
- case * : tag += gcc ;
- }
- }
- case intel :
- if [ $(property-set).get <toolset-intel:platform> ] = win
- {
- tag += iw ;
- }
- else
- {
- tag += il ;
- }
- case kcc* : tag += kcc ;
- case kylix* : tag += bck ;
- #case metrowerks* : tag += cw ;
- #case mingw* : tag += mgw ;
- case mipspro* : tag += mp ;
- case msvc* : tag += vc ;
- case qcc* : tag += qcc ;
- case sun* : tag += sw ;
- case tru64cxx* : tag += tru ;
- case vacpp* : tag += xlc ;
- }
- local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)"
- : $(properties) ] ;
- # For historical reasons, vc6.0 and vc7.0 use different naming.
- if $(tag) = vc
- {
- if $(version[1]) = 6
- {
- # Cancel minor version.
- version = 6 ;
- }
- else if $(version[1]) = 7 && $(version[2]) = 0
- {
- version = 7 ;
- }
- }
- # On intel, version is not added, because it does not matter and it is the
- # version of vc used as backend that matters. Ideally, we should encode the
- # backend version but that would break compatibility with V1.
- if $(tag) = iw
- {
- version = ;
- }
-
- # On borland, version is not added for compatibility with V1.
- if $(tag) = bcb
- {
- version = ;
- }
-
- tag += $(version) ;
-
- return $(tag:J=) ;
-}
-
-
-local rule threading-tag ( name : type ? : property-set )
-{
- local tag = ;
- local properties = [ $(property-set).raw ] ;
- if <threading>multi in $(properties) { tag = mt ; }
-
- return $(tag:J=) ;
-}
-
-
-local rule runtime-tag ( name : type ? : property-set )
-{
- local tag = ;
-
- local properties = [ $(property-set).raw ] ;
- if <runtime-link>static in $(properties) { tag += s ; }
-
- # This is an ugly thing. In V1, there is code to automatically detect which
- # properties affect a target. So, if <runtime-debugging> does not affect gcc
- # toolset, the tag rules will not even see <runtime-debugging>. Similar
- # functionality in V2 is not implemented yet, so we just check for toolsets
- # known to care about runtime debugging.
- if ( <toolset>msvc in $(properties) ) ||
- ( <stdlib>stlport in $(properties) ) ||
- ( <toolset-intel:platform>win in $(properties) )
- {
- if <runtime-debugging>on in $(properties) { tag += g ; }
- }
-
- if <python-debugging>on in $(properties) { tag += y ; }
- if <variant>debug in $(properties) { tag += d ; }
- if <stdlib>stlport in $(properties) { tag += p ; }
- if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
-
- return $(tag:J=) ;
-}
-
-# Create a tag for the Qt library version
-# "<qt>4.6.0" will result in tag "qt460"
-local rule qt-tag ( name : type ? : property-set )
-{
- local properties = [ $(property-set).get <qt> ] ;
- local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)"
- : $(properties) ] ;
- local tag = "qt"$(version:J=) ;
- return $(tag) ;
-}
-
-# Create a tag for the address-model
-# <address-model>64 will simply generate "64"
-local rule address-model-tag ( name : type ? : property-set )
-{
- local tag = ;
- local version = [ $(property-set).get <address-model> ] ;
- return $(version) ;
-}
-
-rule __test__ ( )
-{
- import assert ;
-
- local nl = "
-" ;
-
- local save-os = [ modules.peek os : .name ] ;
-
- modules.poke os : .name : LINUX ;
-
- assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)"
- : path-variable-setting-command PATH : foo bar baz ;
-
- assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)"
- : prepend-path-variable-command PATH : foo bar ;
-
- modules.poke os : .name : NT ;
-
- assert.result "set PATH=foo;bar;baz$(nl)"
- : path-variable-setting-command PATH : foo bar baz ;
-
- assert.result "set PATH=foo;bar;%PATH%$(nl)"
- : prepend-path-variable-command PATH : foo bar ;
-
- modules.poke os : .name : $(save-os) ;
-}
diff --git a/jam-files/boost-build/tools/common.py b/jam-files/boost-build/tools/common.py
deleted file mode 100644
index 612745b8..00000000
--- a/jam-files/boost-build/tools/common.py
+++ /dev/null
@@ -1,840 +0,0 @@
-# Status: being ported by Steven Watanabe
-# Base revision: 47174
-#
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-""" Provides actions common to all toolsets, such as creating directories and
- removing files.
-"""
-
-import re
-import bjam
-import os
-import os.path
-import sys
-
-from b2.build import feature
-from b2.util.utility import *
-from b2.util import path
-
-__re__before_first_dash = re.compile ('([^-]*)-')
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- Note that this must be called _after_ resetting the module 'feature'.
- """
- global __had_unspecified_value, __had_value, __declared_subfeature
- global __init_loc
- global __all_signatures, __debug_configuration, __show_configuration
-
- # Stores toolsets without specified initialization values.
- __had_unspecified_value = {}
-
- # Stores toolsets with specified initialization values.
- __had_value = {}
-
- # Stores toolsets with declared subfeatures.
- __declared_subfeature = {}
-
- # Stores all signatures of the toolsets.
- __all_signatures = {}
-
- # Stores the initialization locations of each toolset
- __init_loc = {}
-
- __debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
- __show_configuration = '--show-configuration' in bjam.variable('ARGV')
-
- global __executable_path_variable
- OS = bjam.call("peek", [], "OS")[0]
- if OS == "NT":
- # On Windows the case and capitalization of PATH is not always predictable, so
- # let's find out what variable name was really set.
- for n in sys.environ:
- if n.lower() == "path":
- __executable_path_variable = n
- break
- else:
- __executable_path_variable = "PATH"
-
- m = {"NT": __executable_path_variable,
- "CYGWIN": "PATH",
- "MACOSX": "DYLD_LIBRARY_PATH",
- "AIX": "LIBPATH"}
- global __shared_library_path_variable
- __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH")
-
-reset()
-
-def shared_library_path_variable():
- return __shared_library_path_variable
-
-# ported from trunk@47174
-class Configurations(object):
- """
- This class helps to manage toolset configurations. Each configuration
- has a unique ID and one or more parameters. A typical example of a unique ID
- is a condition generated by 'common.check-init-parameters' rule. Other kinds
- of IDs can be used. Parameters may include any details about the configuration
- like 'command', 'path', etc.
-
- A toolset configuration may be in one of the following states:
-
- - registered
- Configuration has been registered (e.g. by autodetection code) but has
- not yet been marked as used, i.e. 'toolset.using' rule has not yet been
- called for it.
- - used
- Once called 'toolset.using' rule marks the configuration as 'used'.
-
- The main difference between the states above is that while a configuration is
- 'registered' its options can be freely changed. This is useful in particular
- for autodetection code - all detected configurations may be safely overwritten
- by user code.
- """
-
- def __init__(self):
- self.used_ = set()
- self.all_ = set()
- self.params = {}
-
- def register(self, id):
- """
- Registers a configuration.
-
- Returns True if the configuration has been added and False if
- it already exists. Reports an error if the configuration is 'used'.
- """
- if id in self.used_:
- #FIXME
- errors.error("common: the configuration '$(id)' is in use")
-
- if id not in self.all_:
- self.all_ += [id]
-
- # Indicate that a new configuration has been added.
- return True
- else:
- return False
-
- def use(self, id):
- """
- Mark a configuration as 'used'.
-
- Returns True if the state of the configuration has been changed to
- 'used' and False if it the state wasn't changed. Reports an error
- if the configuration isn't known.
- """
- if id not in self.all_:
- #FIXME:
- errors.error("common: the configuration '$(id)' is not known")
-
- if id not in self.used_:
- self.used_ += [id]
-
- # indicate that the configuration has been marked as 'used'
- return True
- else:
- return False
-
- def all(self):
- """ Return all registered configurations. """
- return self.all_
-
- def used(self):
- """ Return all used configurations. """
- return self.used_
-
- def get(self, id, param):
- """ Returns the value of a configuration parameter. """
- self.params_.getdefault(param, {}).getdefault(id, None)
-
- def set (self, id, param, value):
- """ Sets the value of a configuration parameter. """
- self.params_.setdefault(param, {})[id] = value
-
-# Ported from trunk@47174
-def check_init_parameters(toolset, requirement, *args):
- """ The rule for checking toolset parameters. Trailing parameters should all be
- parameter name/value pairs. The rule will check that each parameter either has
- a value in each invocation or has no value in each invocation. Also, the rule
- will check that the combination of all parameter values is unique in all
- invocations.
-
- Each parameter name corresponds to a subfeature. This rule will declare a
- subfeature the first time a non-empty parameter value is passed and will
- extend it with all the values.
-
- The return value from this rule is a condition to be used for flags settings.
- """
- # The type checking here is my best guess about
- # what the types should be.
- assert(isinstance(toolset, str))
- assert(isinstance(requirement, str) or requirement is None)
- sig = toolset
- condition = replace_grist(toolset, '<toolset>')
- subcondition = []
-
- for arg in args:
- assert(isinstance(arg, tuple))
- assert(len(arg) == 2)
- name = arg[0]
- value = arg[1]
- assert(isinstance(name, str))
- assert(isinstance(value, str) or value is None)
-
- str_toolset_name = str((toolset, name))
-
- # FIXME: is this the correct translation?
- ### if $(value)-is-not-empty
- if value is not None:
- condition = condition + '-' + value
- if __had_unspecified_value.has_key(str_toolset_name):
- raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \
- "no value was specified in earlier initialization\n" \
- "an explicit value is specified now" % (toolset, name))
-
- # The logic below is for intel compiler. It calls this rule
- # with 'intel-linux' and 'intel-win' as toolset, so we need to
- # get the base part of toolset name.
- # We can't pass 'intel' as toolset, because it that case it will
- # be impossible to register versionles intel-linux and
- # intel-win of specific version.
- t = toolset
- m = __re__before_first_dash.match(toolset)
- if m:
- t = m.group(1)
-
- if not __had_value.has_key(str_toolset_name):
- if not __declared_subfeature.has_key(str((t, name))):
- feature.subfeature('toolset', t, name, [], ['propagated'])
- __declared_subfeature[str((t, name))] = True
-
- __had_value[str_toolset_name] = True
-
- feature.extend_subfeature('toolset', t, name, [value])
- subcondition += ['<toolset-' + t + ':' + name + '>' + value ]
-
- else:
- if __had_value.has_key(str_toolset_name):
- raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \
- "an explicit value was specified in an earlier initialization\n" \
- "no value is specified now" % (toolset, name))
-
- __had_unspecified_value[str_toolset_name] = True
-
- if value == None: value = ''
-
- sig = sig + value + '-'
-
- if __all_signatures.has_key(sig):
- message = "duplicate initialization of '%s' with the following parameters: " % toolset
-
- for arg in args:
- name = arg[0]
- value = arg[1]
- if value == None: value = '<unspecified>'
-
- message += "'%s' = '%s'\n" % (name, value)
-
- raise BaseException(message)
-
- __all_signatures[sig] = True
- # FIXME
- __init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ;
-
- # If we have a requirement, this version should only be applied under that
- # condition. To accomplish this we add a toolset requirement that imposes
- # the toolset subcondition, which encodes the version.
- if requirement:
- r = ['<toolset>' + toolset, requirement]
- r = ','.join(r)
- toolset.add_requirements([r + ':' + c for c in subcondition])
-
- # We add the requirements, if any, to the condition to scope the toolset
- # variables and options to this specific version.
- condition = [condition]
- if requirement:
- condition += [requirement]
-
- if __show_configuration:
- print "notice:", condition
- return ['/'.join(condition)]
-
-# Ported from trunk@47077
-def get_invocation_command_nodefault(
- toolset, tool, user_provided_command=[], additional_paths=[], path_last=False):
- """
- A helper rule to get the command to invoke some tool. If
- 'user-provided-command' is not given, tries to find binary named 'tool' in
- PATH and in the passed 'additional-path'. Otherwise, verifies that the first
- element of 'user-provided-command' is an existing program.
-
- This rule returns the command to be used when invoking the tool. If we can't
- find the tool, a warning is issued. If 'path-last' is specified, PATH is
- checked after 'additional-paths' when searching for 'tool'.
- """
- assert(isinstance(toolset, str))
- assert(isinstance(tool, str))
- assert(isinstance(user_provided_command, list))
- if additional_paths is not None:
- assert(isinstance(additional_paths, list))
- assert(all([isinstance(path, str) for path in additional_paths]))
- assert(all(isinstance(path, str) for path in additional_paths))
- assert(isinstance(path_last, bool))
-
- if not user_provided_command:
- command = find_tool(tool, additional_paths, path_last)
- if not command and __debug_configuration:
- print "warning: toolset", toolset, "initialization: can't find tool, tool"
- #FIXME
- #print "warning: initialized from" [ errors.nearest-user-location ] ;
- else:
- command = check_tool(user_provided_command)
- if not command and __debug_configuration:
- print "warning: toolset", toolset, "initialization:"
- print "warning: can't find user-provided command", user_provided_command
- #FIXME
- #ECHO "warning: initialized from" [ errors.nearest-user-location ]
-
- assert(isinstance(command, str))
-
- return command
-
-# ported from trunk@47174
-def get_invocation_command(toolset, tool, user_provided_command = [],
- additional_paths = [], path_last = False):
- """ Same as get_invocation_command_nodefault, except that if no tool is found,
- returns either the user-provided-command, if present, or the 'tool' parameter.
- """
-
- assert(isinstance(toolset, str))
- assert(isinstance(tool, str))
- assert(isinstance(user_provided_command, list))
- if additional_paths is not None:
- assert(isinstance(additional_paths, list))
- assert(all([isinstance(path, str) for path in additional_paths]))
- assert(isinstance(path_last, bool))
-
- result = get_invocation_command_nodefault(toolset, tool,
- user_provided_command,
- additional_paths,
- path_last)
-
- if not result:
- if user_provided_command:
- result = user_provided_command[0]
- else:
- result = tool
-
- assert(isinstance(result, str))
-
- return result
-
-# ported from trunk@47281
-def get_absolute_tool_path(command):
- """
- Given an invocation command,
- return the absolute path to the command. This works even if commnad
- has not path element and is present in PATH.
- """
- if os.path.dirname(command):
- return os.path.dirname(command)
- else:
- programs = path.programs_path()
- m = path.glob(programs, [command, command + '.exe' ])
- if not len(m):
- print "Could not find:", command, "in", programs
- return os.path.dirname(m[0])
-
-# ported from trunk@47174
-def find_tool(name, additional_paths = [], path_last = False):
- """ Attempts to find tool (binary) named 'name' in PATH and in
- 'additional-paths'. If found in path, returns 'name'. If
- found in additional paths, returns full name. If the tool
- is found in several directories, returns the first path found.
- Otherwise, returns the empty string. If 'path_last' is specified,
- path is checked after 'additional_paths'.
- """
- assert(isinstance(name, str))
- assert(isinstance(additional_paths, list))
- assert(isinstance(path_last, bool))
-
- programs = path.programs_path()
- match = path.glob(programs, [name, name + '.exe'])
- additional_match = path.glob(additional_paths, [name, name + '.exe'])
-
- result = []
- if path_last:
- result = additional_match
- if not result and match:
- result = match
-
- else:
- if match:
- result = match
-
- elif additional_match:
- result = additional_match
-
- if result:
- return path.native(result[0])
- else:
- return ''
-
-#ported from trunk@47281
-def check_tool_aux(command):
- """ Checks if 'command' can be found either in path
- or is a full name to an existing file.
- """
- assert(isinstance(command, str))
- dirname = os.path.dirname(command)
- if dirname:
- if os.path.exists(command):
- return command
- # Both NT and Cygwin will run .exe files by their unqualified names.
- elif on_windows() and os.path.exists(command + '.exe'):
- return command
- # Only NT will run .bat files by their unqualified names.
- elif os_name() == 'NT' and os.path.exists(command + '.bat'):
- return command
- else:
- paths = path.programs_path()
- if path.glob(paths, [command]):
- return command
-
-# ported from trunk@47281
-def check_tool(command):
- """ Checks that a tool can be invoked by 'command'.
- If command is not an absolute path, checks if it can be found in 'path'.
- If comand is absolute path, check that it exists. Returns 'command'
- if ok and empty string otherwise.
- """
- assert(isinstance(command, list))
- assert(all(isinstance(c, str) for c in command))
- #FIXME: why do we check the first and last elements????
- if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
- return command
-
-# ported from trunk@47281
-def handle_options(tool, condition, command, options):
- """ Handle common options for toolset, specifically sets the following
- flag variables:
- - CONFIG_COMMAND to 'command'
- - OPTIOns for compile to the value of <compileflags> in options
- - OPTIONS for compile.c to the value of <cflags> in options
- - OPTIONS for compile.c++ to the value of <cxxflags> in options
- - OPTIONS for compile.fortran to the value of <fflags> in options
- - OPTIONs for link to the value of <linkflags> in options
- """
- from b2.build import toolset
-
- assert(isinstance(tool, str))
- assert(isinstance(condition, list))
- assert(isinstance(command, str))
- assert(isinstance(options, list))
- assert(command)
- toolset.flags(tool, 'CONFIG_COMMAND', condition, [command])
- toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options))
- toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options))
- toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options))
- toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options))
- toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options))
-
-# ported from trunk@47281
-def get_program_files_dir():
- """ returns the location of the "program files" directory on a windows
- platform
- """
- ProgramFiles = bjam.variable("ProgramFiles")
- if ProgramFiles:
- ProgramFiles = ' '.join(ProgramFiles)
- else:
- ProgramFiles = "c:\\Program Files"
- return ProgramFiles
-
-# ported from trunk@47281
-def rm_command():
- return __RM
-
-# ported from trunk@47281
-def copy_command():
- return __CP
-
-# ported from trunk@47281
-def variable_setting_command(variable, value):
- """
- Returns the command needed to set an environment variable on the current
- platform. The variable setting persists through all following commands and is
- visible in the environment seen by subsequently executed commands. In other
- words, on Unix systems, the variable is exported, which is consistent with the
- only possible behavior on Windows systems.
- """
- assert(isinstance(variable, str))
- assert(isinstance(value, str))
-
- if os_name() == 'NT':
- return "set " + variable + "=" + value + os.linesep
- else:
- # (todo)
- # The following does not work on CYGWIN and needs to be fixed. On
- # CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that
- # messes up the executed export command which then reports that the
- # passed variable name is incorrect. This is most likely due to the
- # extra \r character getting interpreted as a part of the variable name.
- #
- # Several ideas pop to mind on how to fix this:
- # * One way would be to separate the commands using the ; shell
- # command separator. This seems like the quickest possible
- # solution but I do not know whether this would break code on any
- # platforms I I have no access to.
- # * Another would be to not use the terminating $(nl) but that would
- # require updating all the using code so it does not simply
- # prepend this variable to its own commands.
- # * I guess the cleanest solution would be to update Boost Jam to
- # allow explicitly specifying \n & \r characters in its scripts
- # instead of always relying only on the 'current OS native newline
- # sequence'.
- #
- # Some code found to depend on this behaviour:
- # * This Boost Build module.
- # * __test__ rule.
- # * path-variable-setting-command rule.
- # * python.jam toolset.
- # * xsltproc.jam toolset.
- # * fop.jam toolset.
- # (todo) (07.07.2008.) (Jurko)
- #
- # I think that this works correctly in python -- Steven Watanabe
- return variable + "=" + value + os.linesep + "export " + variable + os.linesep
-
-def path_variable_setting_command(variable, paths):
- """
- Returns a command to sets a named shell path variable to the given NATIVE
- paths on the current platform.
- """
- assert(isinstance(variable, str))
- assert(isinstance(paths, list))
- sep = os.path.pathsep
- return variable_setting_command(variable, sep.join(paths))
-
-def prepend_path_variable_command(variable, paths):
- """
- Returns a command that prepends the given paths to the named path variable on
- the current platform.
- """
- return path_variable_setting_command(variable,
- paths + os.environ.get(variable, "").split(os.pathsep))
-
-def file_creation_command():
- """
- Return a command which can create a file. If 'r' is result of invocation, then
- 'r foobar' will create foobar with unspecified content. What happens if file
- already exists is unspecified.
- """
- if os_name() == 'NT':
- return "echo. > "
- else:
- return "touch "
-
-#FIXME: global variable
-__mkdir_set = set()
-__re_windows_drive = re.compile(r'^.*:\$')
-
-def mkdir(engine, target):
- # If dir exists, do not update it. Do this even for $(DOT).
- bjam.call('NOUPDATE', target)
-
- global __mkdir_set
-
- # FIXME: Where is DOT defined?
- #if $(<) != $(DOT) && ! $($(<)-mkdir):
- if target != '.' and target not in __mkdir_set:
- # Cheesy gate to prevent multiple invocations on same dir.
- __mkdir_set.add(target)
-
- # Schedule the mkdir build action.
- if os_name() == 'NT':
- engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, [])
- else:
- engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, [])
-
- # Prepare a Jam 'dirs' target that can be used to make the build only
- # construct all the target directories.
- engine.add_dependency('dirs', target)
-
- # Recursively create parent directories. $(<:P) = $(<)'s parent & we
- # recurse until root.
-
- s = os.path.dirname(target)
- if os_name() == 'NT':
- if(__re_windows_drive.match(s)):
- s = ''
-
- if s:
- if s != target:
- engine.add_dependency(target, s)
- mkdir(engine, s)
- else:
- bjam.call('NOTFILE', s)
-
-__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)')
-
-def format_name(format, name, target_type, prop_set):
- """ Given a target, as given to a custom tag rule, returns a string formatted
- according to the passed format. Format is a list of properties that is
- represented in the result. For each element of format the corresponding target
- information is obtained and added to the result string. For all, but the
- literal, the format value is taken as the as string to prepend to the output
- to join the item to the rest of the result. If not given "-" is used as a
- joiner.
-
- The format options can be:
-
- <base>[joiner]
- :: The basename of the target name.
- <toolset>[joiner]
- :: The abbreviated toolset tag being used to build the target.
- <threading>[joiner]
- :: Indication of a multi-threaded build.
- <runtime>[joiner]
- :: Collective tag of the build runtime.
- <version:/version-feature | X.Y[.Z]/>[joiner]
- :: Short version tag taken from the given "version-feature"
- in the build properties. Or if not present, the literal
- value as the version number.
- <property:/property-name/>[joiner]
- :: Direct lookup of the given property-name value in the
- build properties. /property-name/ is a regular expression.
- e.g. <property:toolset-.*:flavor> will match every toolset.
- /otherwise/
- :: The literal value of the format argument.
-
- For example this format:
-
- boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
-
- Might return:
-
- boost_thread-vc80-mt-gd-1_33.dll, or
- boost_regex-vc80-gd-1_33.dll
-
- The returned name also has the target type specific prefix and suffix which
- puts it in a ready form to use as the value from a custom tag rule.
- """
- assert(isinstance(format, list))
- assert(isinstance(name, str))
- assert(isinstance(target_type, str) or not type)
- # assert(isinstance(prop_set, property_set.PropertySet))
- if type.is_derived(target_type, 'LIB'):
- result = "" ;
- for f in format:
- grist = get_grist(f)
- if grist == '<base>':
- result += os.path.basename(name)
- elif grist == '<toolset>':
- result += join_tag(ungrist(f),
- toolset_tag(name, target_type, prop_set))
- elif grist == '<threading>':
- result += join_tag(ungrist(f),
- threading_tag(name, target_type, prop_set))
- elif grist == '<runtime>':
- result += join_tag(ungrist(f),
- runtime_tag(name, target_type, prop_set))
- elif grist.startswith('<version:'):
- key = grist[len('<version:'):-1]
- version = prop_set.get('<' + key + '>')
- if not version:
- version = key
- version = __re_version.match(version)
- result += join_tag(ungrist(f), version[1] + '_' + version[2])
- elif grist.startswith('<property:'):
- key = grist[len('<property:'):-1]
- property_re = re.compile('<(' + key + ')>')
- p0 = None
- for prop in prop_set.raw():
- match = property_re.match(prop)
- if match:
- p0 = match[1]
- break
- if p0:
- p = prop_set.get('<' + p0 + '>')
- if p:
- assert(len(p) == 1)
- result += join_tag(ungrist(f), p)
- else:
- result += ungrist(f)
-
- result = virtual_target.add_prefix_and_suffix(
- ''.join(result), target_type, prop_set)
- return result
-
-def join_tag(joiner, tag):
- if not joiner: joiner = '-'
- return joiner + tag
-
-__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)")
-
-def toolset_tag(name, target_type, prop_set):
- tag = ''
-
- properties = prop_set.raw()
- tools = prop_set.get('<toolset>')
- assert(len(tools) == 0)
- tools = tools[0]
- if tools.startswith('borland'): tag += 'bcb'
- elif tools.startswith('como'): tag += 'como'
- elif tools.startswith('cw'): tag += 'cw'
- elif tools.startswith('darwin'): tag += 'xgcc'
- elif tools.startswith('edg'): tag += edg
- elif tools.startswith('gcc'):
- flavor = prop_set.get('<toolset-gcc:flavor>')
- ''.find
- if flavor.find('mingw') != -1:
- tag += 'mgw'
- else:
- tag += 'gcc'
- elif tools == 'intel':
- if prop_set.get('<toolset-intel:platform>') == ['win']:
- tag += 'iw'
- else:
- tag += 'il'
- elif tools.startswith('kcc'): tag += 'kcc'
- elif tools.startswith('kylix'): tag += 'bck'
- #case metrowerks* : tag += cw ;
- #case mingw* : tag += mgw ;
- elif tools.startswith('mipspro'): tag += 'mp'
- elif tools.startswith('msvc'): tag += 'vc'
- elif tools.startswith('sun'): tag += 'sw'
- elif tools.startswith('tru64cxx'): tag += 'tru'
- elif tools.startswith('vacpp'): tag += 'xlc'
-
- for prop in properties:
- match = __re_toolset_version.match(prop)
- if(match):
- version = match
- break
- version_string = None
- # For historical reasons, vc6.0 and vc7.0 use different naming.
- if tag == 'vc':
- if version.group(1) == '6':
- # Cancel minor version.
- version_string = '6'
- elif version.group(1) == '7' and version.group(2) == '0':
- version_string = '7'
-
- # On intel, version is not added, because it does not matter and it's the
- # version of vc used as backend that matters. Ideally, we'd encode the
- # backend version but that would break compatibility with V1.
- elif tag == 'iw':
- version_string = ''
-
- # On borland, version is not added for compatibility with V1.
- elif tag == 'bcb':
- version_string = ''
-
- if version_string is None:
- version = version.group(1) + version.group(2)
-
- tag += version
-
- return tag
-
-
-def threading_tag(name, target_type, prop_set):
- tag = ''
- properties = prop_set.raw()
- if '<threading>multi' in properties: tag = 'mt'
-
- return tag
-
-
-def runtime_tag(name, target_type, prop_set ):
- tag = ''
-
- properties = prop_set.raw()
- if '<runtime-link>static' in properties: tag += 's'
-
- # This is an ugly thing. In V1, there's a code to automatically detect which
- # properties affect a target. So, if <runtime-debugging> does not affect gcc
- # toolset, the tag rules won't even see <runtime-debugging>. Similar
- # functionality in V2 is not implemented yet, so we just check for toolsets
- # which are known to care about runtime debug.
- if '<toolset>msvc' in properties \
- or '<stdlib>stlport' in properties \
- or '<toolset-intel:platform>win' in properties:
- if '<runtime-debugging>on' in properties: tag += 'g'
-
- if '<python-debugging>on' in properties: tag += 'y'
- if '<variant>debug' in properties: tag += 'd'
- if '<stdlib>stlport' in properties: tag += 'p'
- if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n'
-
- return tag
-
-
-## TODO:
-##rule __test__ ( )
-##{
-## import assert ;
-##
-## local nl = "
-##" ;
-##
-## local save-os = [ modules.peek os : .name ] ;
-##
-## modules.poke os : .name : LINUX ;
-##
-## assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)"
-## : path-variable-setting-command PATH : foo bar baz ;
-##
-## assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)"
-## : prepend-path-variable-command PATH : foo bar ;
-##
-## modules.poke os : .name : NT ;
-##
-## assert.result "set PATH=foo;bar;baz$(nl)"
-## : path-variable-setting-command PATH : foo bar baz ;
-##
-## assert.result "set PATH=foo;bar;%PATH%$(nl)"
-## : prepend-path-variable-command PATH : foo bar ;
-##
-## modules.poke os : .name : $(save-os) ;
-##}
-
-def init(manager):
- engine = manager.engine()
-
- engine.register_action("common.MkDir1-quick-fix-for-unix", 'mkdir -p "$(<)"')
- engine.register_action("common.MkDir1-quick-fix-for-windows", 'if not exist "$(<)\\" mkdir "$(<)"')
-
- import b2.tools.make
- import b2.build.alias
-
- global __RM, __CP, __IGNORE, __LN
- # ported from trunk@47281
- if os_name() == 'NT':
- __RM = 'del /f /q'
- __CP = 'copy'
- __IGNORE = '2>nul >nul & setlocal'
- __LN = __CP
- #if not __LN:
- # __LN = CP
- else:
- __RM = 'rm -f'
- __CP = 'cp'
- __IGNORE = ''
- __LN = 'ln'
-
- engine.register_action("common.Clean", __RM + ' "$(>)"',
- flags=['piecemeal', 'together', 'existing'])
- engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"')
- engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE,
- flags=['quietly', 'updated', 'piecemeal', 'together'])
-
- engine.register_action("common.hard-link",
- __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep +
- __LN + ' "$(>)" "$(<)" $(NULL_OUT)')
diff --git a/jam-files/boost-build/tools/como-linux.jam b/jam-files/boost-build/tools/como-linux.jam
deleted file mode 100644
index 5c554c8f..00000000
--- a/jam-files/boost-build/tools/como-linux.jam
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# The following #// line will be used by the regression test table generation
-# program as the column heading for HTML tables. Must not include a version
-# number.
-#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
-
-import toolset ;
-import feature ;
-import toolset : flags ;
-import common ;
-import generators ;
-
-import unix ;
-import como ;
-
-feature.extend-subfeature toolset como : platform : linux ;
-
-toolset.inherit-generators como-linux
- <toolset>como <toolset-como:platform>linux : unix ;
-generators.override como-linux.prebuilt : builtin.lib-generator ;
-generators.override como-linux.searched-lib-generator : searched-lib-generator ;
-toolset.inherit-flags como-linux : unix ;
-toolset.inherit-rules como-linux : gcc ;
-
-generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ
- : <toolset>como <toolset-como:platform>linux ;
-generators.register-c-compiler como-linux.compile.c : C : OBJ
- : <toolset>como <toolset-como:platform>linux ;
-
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters como-linux
- : version $(version) ] ;
-
- command = [ common.get-invocation-command como-linux : como
- : $(command) ] ;
-
- common.handle-options como-linux : $(condition) : $(command) : $(options) ;
-}
-
-
-flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ;
-flags como-linux C++FLAGS <exception-handling>on : --exceptions ;
-
-flags como-linux CFLAGS <inlining>off : --no_inlining ;
-flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ;
-
-flags como-linux CFLAGS <optimization>off : -O0 ;
-flags como-linux CFLAGS <optimization>speed : -O3 ;
-flags como-linux CFLAGS <optimization>space : -Os ;
-
-flags como-linux CFLAGS <debug-symbols>on : -g ;
-flags como-linux LINKFLAGS <debug-symbols>on : -g ;
-
-flags como-linux FINDLIBS : m ;
-flags como-linux FINDLIBS : rt ;
-
-flags como-linux CFLAGS <cflags> ;
-flags como-linux C++FLAGS <cxxflags> ;
-flags como-linux DEFINES <define> ;
-flags como-linux UNDEFS <undef> ;
-flags como-linux HDRS <include> ;
-flags como-linux STDHDRS <sysinclude> ;
-flags como-linux LINKFLAGS <linkflags> ;
-flags como-linux ARFLAGS <arflags> ;
-
-flags como-linux.link LIBRARIES <library-file> ;
-flags como-linux.link LINKPATH <library-path> ;
-flags como-linux.link FINDLIBS-ST <find-static-library> ;
-flags como-linux.link FINDLIBS-SA <find-shared-library> ;
-
-flags como-linux.link RPATH <dll-path> ;
-flags como-linux.link RPATH_LINK <xdll-path> ;
-
-
-actions link bind LIBRARIES
-{
- $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
-}
-
-actions link.dll bind LIBRARIES
-{
- $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
-}
-
-actions compile.c
-{
- $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
-}
-
-actions compile.c++
-{
- $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
-}
-
-actions archive
-{
- ar rcu $(<) $(>)
-}
diff --git a/jam-files/boost-build/tools/como-win.jam b/jam-files/boost-build/tools/como-win.jam
deleted file mode 100644
index d21a70d6..00000000
--- a/jam-files/boost-build/tools/como-win.jam
+++ /dev/null
@@ -1,117 +0,0 @@
-# (C) Copyright David Abrahams 2001.
-# (C) Copyright MetaCommunications, Inc. 2004.
-
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# The following #// line will be used by the regression test table generation
-# program as the column heading for HTML tables. Must not include a version
-# number.
-#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
-
-import common ;
-import como ;
-import feature ;
-import generators ;
-import toolset : flags ;
-
-feature.extend-subfeature toolset como : platform : win ;
-
-
-# Initializes the Comeau toolset for windows. The command is the command which
-# invokes the compiler. You should either set environment variable
-# COMO_XXX_INCLUDE where XXX is the used backend (as described in the
-# documentation), or pass that as part of command, e.g:
-#
-# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ;
-#
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters como-win
- : version $(version) ] ;
-
- command = [ common.get-invocation-command como-win : como.exe :
- $(command) ] ;
-
- common.handle-options como-win : $(condition) : $(command) : $(options) ;
-}
-
-generators.register-c-compiler como-win.compile.c++ : CPP : OBJ
- : <toolset>como <toolset-como:platform>win ;
-generators.register-c-compiler como-win.compile.c : C : OBJ
- : <toolset>como <toolset-como:platform>win ;
-
-
-generators.register-linker como-win.link
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : EXE
- : <toolset>como <toolset-como:platform>win ;
-
-# Note that status of shared libraries support is not clear, so we do not define
-# the link.dll generator.
-generators.register-archiver como-win.archive
- : OBJ : STATIC_LIB
- : <toolset>como <toolset-como:platform>win ;
-
-
-flags como-win C++FLAGS <exception-handling>off : --no_exceptions ;
-flags como-win C++FLAGS <exception-handling>on : --exceptions ;
-
-flags como-win CFLAGS <inlining>off : --no_inlining ;
-flags como-win CFLAGS <inlining>on <inlining>full : --inlining ;
-
-
-# The following seems to be VC-specific options. At least, when I uncomment
-# then, Comeau with bcc as backend reports that bcc32 invocation failed.
-#
-#flags como-win CFLAGS <debug-symbols>on : /Zi ;
-#flags como-win CFLAGS <optimization>off : /Od ;
-
-
-flags como-win CFLAGS <cflags> ;
-flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header.
-flags como-win CFLAGS <threading>multi : -D_MT ; # Make sure that our config knows that threading is on.
-flags como-win C++FLAGS <cxxflags> ;
-flags como-win DEFINES <define> ;
-flags como-win UNDEFS <undef> ;
-flags como-win HDRS <include> ;
-flags como-win SYSHDRS <sysinclude> ;
-flags como-win LINKFLAGS <linkflags> ;
-flags como-win ARFLAGS <arflags> ;
-flags como-win NO_WARN <no-warn> ;
-
-#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ;
-#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ;
-
-flags como-win LIBPATH <library-path> ;
-flags como-win LIBRARIES <library-file> ;
-flags como-win FINDLIBS <find-shared-library> ;
-flags como-win FINDLIBS <find-static-library> ;
-
-nl = "
-" ;
-
-
-# For como, we repeat all libraries so that dependencies are always resolved.
-#
-actions link bind LIBRARIES
-{
- $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)"
-}
-
-actions compile.c
-{
- $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)"
-}
-
-actions compile.c++
-{
- $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)"
-}
-
-actions archive
-{
- $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
- lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
-}
diff --git a/jam-files/boost-build/tools/como.jam b/jam-files/boost-build/tools/como.jam
deleted file mode 100644
index 04a05a94..00000000
--- a/jam-files/boost-build/tools/como.jam
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# This is a generic 'como' toolset. Depending on the current system, it
-# forwards either to 'como-linux' or 'como-win' modules.
-
-import feature ;
-import os ;
-import toolset ;
-
-feature.extend toolset : como ;
-feature.subfeature toolset como : platform : : propagated link-incompatible ;
-
-rule init ( * : * )
-{
- if [ os.name ] = LINUX
- {
- toolset.using como-linux :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
- else
- {
- toolset.using como-win :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-
- }
-}
diff --git a/jam-files/boost-build/tools/convert.jam b/jam-files/boost-build/tools/convert.jam
deleted file mode 100644
index ac1d7010..00000000
--- a/jam-files/boost-build/tools/convert.jam
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (c) 2009 Vladimir Prus
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements 'convert' target that takes a bunch of source and
-# tries to convert each one to the specified type.
-#
-# For example:
-#
-# convert objects obj : a.cpp b.cpp ;
-#
-
-import targets ;
-import generators ;
-import project ;
-import type ;
-import "class" : new ;
-
-class convert-target-class : typed-target
-{
- rule __init__ ( name : project : type
- : sources * : requirements * : default-build * : usage-requirements * )
- {
- typed-target.__init__ $(name) : $(project) : $(type)
- : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local r = [ generators.construct $(self.project) : $(self.type)
- : [ property-set.create [ $(property-set).raw ] # [ feature.expand
- <main-target-type>$(self.type) ]
- # ]
- : $(source-targets) ] ;
- if ! $(r)
- {
- errors.error "unable to construct" [ full-name ] ;
- }
-
- return $(r) ;
- }
-
-}
-
-rule convert ( name type : sources * : requirements * : default-build *
- : usage-requirements * )
-{
- local project = [ project.current ] ;
-
- # This is a circular module dependency, so it must be imported here
- modules.import targets ;
- targets.main-target-alternative
- [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ]
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ;
-}
-IMPORT $(__name__) : convert : : convert ;
diff --git a/jam-files/boost-build/tools/cw-config.jam b/jam-files/boost-build/tools/cw-config.jam
deleted file mode 100644
index 1211b7c0..00000000
--- a/jam-files/boost-build/tools/cw-config.jam
+++ /dev/null
@@ -1,34 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for CodeWarrior toolset. To use, just import this module.
-
-import os ;
-import toolset : using ;
-
-if [ os.name ] = NT
-{
- for local R in 9 8 7
- {
- local cw-path = [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
- : "PATH" ] ;
- local cw-version = [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
- : "VERSION" ] ;
- cw-path ?= [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0"
- : "PATH" ] ;
- cw-version ?= $(R).0 ;
-
- if $(cw-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
- }
- using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
- }
- }
-}
diff --git a/jam-files/boost-build/tools/cw.jam b/jam-files/boost-build/tools/cw.jam
deleted file mode 100644
index ddcbfeb2..00000000
--- a/jam-files/boost-build/tools/cw.jam
+++ /dev/null
@@ -1,246 +0,0 @@
-# Copyright (C) Reece H Dunn 2004
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# based on the msvc.jam toolset
-
-import property ;
-import generators ;
-import os ;
-import type ;
-import toolset : flags ;
-import errors : error ;
-import feature : feature get-values ;
-import path ;
-import sequence : unique ;
-import common ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-feature.extend toolset : cw ;
-
-toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ;
-
-nl = "
-" ;
-
-rule init ( version ? : command * : options * )
-{
- # TODO: fix the $(command[1]) = $(compiler) issue
-
- setup = [ get-values <setup> : $(options) ] ;
- setup ?= cwenv.bat ;
- compiler = [ get-values <compiler> : $(options) ] ;
- compiler ?= mwcc ;
- linker = [ get-values <linker> : $(options) ] ;
- linker ?= mwld ;
-
- local condition = [ common.check-init-parameters cw :
- version $(version) ] ;
-
- command = [ common.get-invocation-command cw : mwcc.exe : $(command) :
- [ default-paths $(version) ] ] ;
-
- common.handle-options cw : $(condition) : $(command) : $(options) ;
-
- local root = [ feature.get-values <root> : $(options) ] ;
- if $(command)
- {
- command = [ common.get-absolute-tool-path $(command[-1]) ] ;
- }
- local tool-root = $(command) ;
-
- setup = $(tool-root)\\$(setup) ;
-
- # map the batch file in setup so it can be executed
-
- other-tools = $(tool-root:D) ;
- root ?= $(other-tools:D) ;
-
- flags cw.link RUN_PATH $(condition) :
- "$(root)\\Win32-x86 Support\\Libraries\\Runtime"
- "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ;
-
- setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ;
-
- if [ os.name ] = NT
- {
- setup = $(setup)"
-" ;
- }
- else
- {
- setup = "cmd /S /C "$(setup)" \"&&\" " ;
- }
-
- # bind the setup command to the tool so it can be executed before the
- # command
-
- local prefix = $(setup) ;
-
- flags cw.compile .CC $(condition) : $(prefix)$(compiler) ;
- flags cw.link .LD $(condition) : $(prefix)$(linker) ;
- flags cw.archive .LD $(condition) : $(prefix)$(linker) ;
-
- if [ MATCH ^([89]\\.) : $(version) ]
- {
- if [ os.name ] = NT
- {
- # The runtime libraries
- flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ;
- flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ;
-
- flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ;
- flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ;
-
- flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ;
- flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ;
- }
- }
-}
-
-
-local rule default-paths ( version ? ) # FIXME
-{
- local possible-paths ;
- local ProgramFiles = [ common.get-program-files-dir ] ;
-
- # TODO: add support for cw8 and cw9 detection
-
- local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ;
- possible-paths += $(version-6-path) ;
-
- # perform post-processing
-
- possible-paths
- = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ;
-
- possible-paths += [ modules.peek : PATH Path path ] ;
-
- return $(possible-paths) ;
-}
-
-
-
-
-## declare generators
-
-generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ;
-generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ;
-
-generators.register-linker cw.link
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : EXE
- : <toolset>cw
- ;
-generators.register-linker cw.link.dll
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : SHARED_LIB IMPORT_LIB
- : <toolset>cw
- ;
-
-generators.register-archiver cw.archive
- : OBJ
- : STATIC_LIB
- : <toolset>cw
- ;
-
-## compilation phase
-
-flags cw WHATEVER <toolset-cw:version> ;
-
-flags cw.compile CFLAGS <debug-symbols>on : -g ;
-flags cw.compile CFLAGS <optimization>off : -O0 ;
-flags cw.compile CFLAGS <optimization>speed : -O4,p ;
-flags cw.compile CFLAGS <optimization>space : -O4,s ;
-flags cw.compile CFLAGS <inlining>off : -inline off ;
-flags cw.compile CFLAGS <inlining>on : -inline on ;
-flags cw.compile CFLAGS <inlining>full : -inline all ;
-flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ;
-
-
-flags cw.compile CFLAGS <rtti>on : -RTTI on ;
-flags cw.compile CFLAGS <rtti>off : -RTTI off ;
-
-flags cw.compile CFLAGS <warnings>on : -w on ;
-flags cw.compile CFLAGS <warnings>off : -w off ;
-flags cw.compile CFLAGS <warnings>all : -w all ;
-flags cw.compile CFLAGS <warnings-as-errors>on : -w error ;
-
-flags cw.compile USER_CFLAGS <cflags> : ;
-flags cw.compile.c++ USER_CFLAGS <cxxflags> : ;
-
-flags cw.compile DEFINES <define> ;
-flags cw.compile UNDEFS <undef> ;
-flags cw.compile INCLUDES <include> ;
-
-actions compile.c
-{
- $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
-}
-actions compile.c++
-{
- $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
-}
-
-## linking phase
-
-flags cw.link DEF_FILE <def-file> ;
-
-flags cw LINKFLAGS : -search ;
-flags cw LINKFLAGS <debug-symbols>on : -g ;
-flags cw LINKFLAGS <user-interface>console : -subsystem console ;
-flags cw LINKFLAGS <user-interface>gui : -subsystem windows ;
-flags cw LINKFLAGS <user-interface>wince : -subsystem wince ;
-flags cw LINKFLAGS <user-interface>native : -subsystem native ;
-flags cw LINKFLAGS <user-interface>auto : -subsystem auto ;
-
-flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ;
-
-flags cw.link USER_LINKFLAGS <linkflags> ;
-flags cw.link LINKPATH <library-path> ;
-
-flags cw.link FINDLIBS_ST <find-static-library> ;
-flags cw.link FINDLIBS_SA <find-shared-library> ;
-flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ;
-flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
-
-rule link.dll ( targets + : sources * : properties * )
-{
- DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
-}
-
-if [ os.name ] in NT
-{
- actions archive
- {
- if exist "$(<[1])" DEL "$(<[1])"
- $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
- }
-}
-else # cygwin
-{
- actions archive
- {
- _bbv2_out_="$(<)"
- if test -f "$_bbv2_out_" ; then
- _bbv2_existing_="$(<:W)"
- fi
- $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
- }
-}
-
-actions link bind DEF_FILE
-{
- $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
-}
-
-actions link.dll bind DEF_FILE
-{
- $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
-}
-
diff --git a/jam-files/boost-build/tools/darwin.jam b/jam-files/boost-build/tools/darwin.jam
deleted file mode 100644
index bb6dd45e..00000000
--- a/jam-files/boost-build/tools/darwin.jam
+++ /dev/null
@@ -1,568 +0,0 @@
-# Copyright 2003 Christopher Currie
-# Copyright 2006 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Copyright 2005-2007 Mat Marcus
-# Copyright 2005-2007 Adobe Systems Incorporated
-# Copyright 2007-2010 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
-# for explanation why it's a separate toolset.
-
-import feature : feature ;
-import toolset : flags ;
-import type ;
-import common ;
-import generators ;
-import path : basename ;
-import version ;
-import property-set ;
-import regex ;
-import errors ;
-
-## Use a framework.
-feature framework : : free ;
-
-## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
-feature macosx-version : : propagated link-incompatible symmetric optional ;
-
-## The minimal MacOSX version to target.
-feature macosx-version-min : : propagated optional ;
-
-## A dependency, that is forced to be included in the link.
-feature force-load : : free dependency incidental ;
-
-#############################################################################
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-feature.extend toolset : darwin ;
-import gcc ;
-toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
-
-generators.override darwin.prebuilt : builtin.prebuilt ;
-generators.override darwin.searched-lib-generator : searched-lib-generator ;
-
-# Override default do-nothing generators.
-generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
-generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
-
-toolset.inherit-rules darwin : gcc : localize ;
-toolset.inherit-flags darwin : gcc
- : <runtime-link>static
- <architecture>arm/<address-model>32
- <architecture>arm/<address-model>64
- <architecture>arm/<instruction-set>
- <architecture>x86/<address-model>32
- <architecture>x86/<address-model>64
- <architecture>x86/<instruction-set>
- <architecture>power/<address-model>32
- <architecture>power/<address-model>64
- <architecture>power/<instruction-set> ;
-
-# Options:
-#
-# <root>PATH
-# Platform root path. The common autodetection will set this to
-# "/Developer". And when a command is given it will be set to
-# the corresponding "*.platform/Developer" directory.
-#
-rule init ( version ? : command * : options * : requirement * )
-{
- # First time around, figure what is host OSX version
- if ! $(.host-osx-version)
- {
- .host-osx-version = [ MATCH "^([0-9.]+)"
- : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
- if $(.debug-configuration)
- {
- ECHO notice: OSX version on this machine is $(.host-osx-version) ;
- }
- }
-
- # - The root directory of the tool install.
- local root = [ feature.get-values <root> : $(options) ] ;
-
- # - The bin directory where to find the commands to execute.
- local bin ;
-
- # - The configured compile driver command.
- local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
-
- # The version as reported by the compiler
- local real-version ;
-
- # - Autodetect the root and bin dir if not given.
- if $(command)
- {
- bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
- if $(bin) = "/usr/bin"
- {
- root ?= /Developer ;
- }
- else
- {
- local r = $(bin:D) ;
- r = $(r:D) ;
- root ?= $(r) ;
- }
- }
-
- # - Autodetect the version if not given.
- if $(command)
- {
- # - The 'command' variable can have multiple elements. When calling
- # the SHELL builtin we need a single string.
- local command-string = $(command:J=" ") ;
- real-version = [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- version ?= $(real-version) ;
- }
-
- .real-version.$(version) = $(real-version) ;
-
- # - Define the condition for this toolset instance.
- local condition =
- [ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
-
- # - Set the toolset generic common options.
- common.handle-options darwin : $(condition) : $(command) : $(options) ;
-
- # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
- if $(real-version) < "4.0.0"
- {
- flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
- }
- # - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
- if $(real-version) < "4.2.0"
- {
- flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
- }
-
- # - Set the link flags common with the GCC toolset.
- gcc.init-link-flags darwin darwin $(condition) ;
-
- # - The symbol strip program.
- local strip ;
- if <striper> in $(options)
- {
- # We can turn off strip by specifying it as empty. In which
- # case we switch to using the linker to do the strip.
- flags darwin.link.dll OPTIONS
- $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
- flags darwin.link.dll OPTIONS
- $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
- flags darwin.link OPTIONS
- $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
- flags darwin.link OPTIONS
- $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
- }
- else
- {
- # Otherwise we need to find a strip program to use. And hence
- # also tell the link action that we need to use a strip
- # post-process.
- flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
- strip =
- [ common.get-invocation-command darwin
- : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
- flags darwin.link .STRIP $(condition) : $(strip[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using strip for $(condition) at $(strip[1]) ;
- }
- }
-
- # - The archive builder (libtool is the default as creating
- # archives in darwin is complicated.
- local archiver =
- [ common.get-invocation-command darwin
- : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
- flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using archiver for $(condition) at $(archiver[1]) ;
- }
-
- # - Initialize the SDKs available in the root for this tool.
- local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
-
- #~ ECHO --- ;
- #~ ECHO --- bin :: $(bin) ;
- #~ ECHO --- root :: $(root) ;
- #~ ECHO --- version :: $(version) ;
- #~ ECHO --- condition :: $(condition) ;
- #~ ECHO --- strip :: $(strip) ;
- #~ ECHO --- archiver :: $(archiver) ;
- #~ ECHO --- sdks :: $(sdks) ;
- #~ ECHO --- ;
- #~ EXIT ;
-}
-
-# Add and set options for a discovered SDK version.
-local rule init-sdk ( condition * : root ? : version + : version-feature ? )
-{
- local rule version-to-feature ( version + )
- {
- switch $(version[1])
- {
- case iphone* :
- {
- return $(version[1])-$(version[2-]:J=.) ;
- }
- case mac* :
- {
- return $(version[2-]:J=.) ;
- }
- case * :
- {
- return $(version:J=.) ;
- }
- }
- }
-
- if $(version-feature)
- {
- if $(.debug-configuration)
- {
- ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ;
- }
-
- # Add the version to the features for specifying them.
- if ! $(version-feature) in [ feature.values macosx-version ]
- {
- feature.extend macosx-version : $(version-feature) ;
- }
- if ! $(version-feature) in [ feature.values macosx-version-min ]
- {
- feature.extend macosx-version-min : $(version-feature) ;
- }
-
- # Set the flags the version needs to compile with, first
- # generic options.
- flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
- : -isysroot $(sdk) ;
- flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
- : -isysroot $(sdk) ;
-
- # Then device variation options.
- switch $(version[1])
- {
- case iphonesim* :
- {
- local N = $(version[2]) ;
- if ! $(version[3]) { N += 00 ; }
- else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
- else { N += 0$(version[3]) ; }
- if ! $(version[4]) { N += 00 ; }
- else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
- else { N += 0$(version[4]) ; }
- N = $(N:J=) ;
- flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
- : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
- flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
- : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
- }
-
- case iphone* :
- {
- flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
- : -miphoneos-version-min=$(version[2-]:J=.) ;
- flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
- : -miphoneos-version-min=$(version[2-]:J=.) ;
- }
-
- case mac* :
- {
- flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
- : -mmacosx-version-min=$(version[2-]:J=.) ;
- flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
- : -mmacosx-version-min=$(version[2-]:J=.) ;
- }
- }
-
- return $(version-feature) ;
- }
- else if $(version[4])
- {
- # We have a patch version of an SDK. We want to set up
- # both the specific patch version, and the minor version.
- # So we recurse to set up the minor version. Plus the minor version.
- return
- [ init-sdk $(condition) : $(root)
- : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
- [ init-sdk $(condition) : $(root)
- : $(version) : [ version-to-feature $(version) ] ] ;
- }
- else
- {
- # Yes, this is intentionally recursive.
- return
- [ init-sdk $(condition) : $(root)
- : $(version) : [ version-to-feature $(version) ] ] ;
- }
-}
-
-# Determine the MacOSX SDK versions installed and their locations.
-local rule init-available-sdk-versions ( condition * : root ? )
-{
- root ?= /Developer ;
- local sdks-root = $(root)/SDKs ;
- local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
- local result ;
- for local sdk in $(sdks)
- {
- local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ;
- local sdk-platform = $(sdk-match[1]:L) ;
- local sdk-version = $(sdk-match[2-]) ;
- if $(sdk-version)
- {
- switch $(sdk-platform)
- {
- case macosx :
- {
- sdk-version = mac $(sdk-version) ;
- }
- case iphoneos :
- {
- sdk-version = iphone $(sdk-version) ;
- }
- case iphonesimulator :
- {
- sdk-version = iphonesim $(sdk-version) ;
- }
- case * :
- {
- sdk-version = $(sdk-version:J=-) ;
- }
- }
- result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
- }
- }
- return $(result) ;
-}
-
-# Generic options.
-flags darwin.compile OPTIONS <flags> ;
-
-# The following adds objective-c support to darwin.
-# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
-
-generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
-generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
-
-rule setup-address-model ( targets * : sources * : properties * )
-{
- local ps = [ property-set.create $(properties) ] ;
- local arch = [ $(ps).get <architecture> ] ;
- local address-model = [ $(ps).get <address-model> ] ;
- local osx-version = [ $(ps).get <macosx-version> ] ;
- local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
- gcc-version = $(.real-version.$(gcc-version)) ;
- local options ;
-
- local support-ppc64 = 1 ;
-
- osx-version ?= $(.host-osx-version) ;
-
- switch $(osx-version)
- {
- case iphone* :
- {
- support-ppc64 = ;
- }
-
- case * :
- if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
- {
- # When targeting 10.6:
- # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
- # - gcc 4.0 will compile fine, somehow, but then fail at link time
- support-ppc64 = ;
- }
- }
- switch $(arch)
- {
- case combined :
- {
- if $(address-model) = 32_64 {
- if $(support-ppc64) {
- options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
- } else {
- # Build 3-way binary
- options = -arch i386 -arch ppc -arch x86_64 ;
- }
- } else if $(address-model) = 64 {
- if $(support-ppc64) {
- options = -arch x86_64 -arch ppc64 ;
- } else {
- errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
- }
- } else {
- options = -arch i386 -arch ppc ;
- }
- }
-
- case x86 :
- {
- if $(address-model) = 32_64 {
- options = -arch i386 -arch x86_64 ;
- } else if $(address-model) = 64 {
- options = -arch x86_64 ;
- } else {
- options = -arch i386 ;
- }
- }
-
- case power :
- {
- if ! $(support-ppc64)
- && ( $(address-model) = 32_64 || $(address-model) = 64 )
- {
- errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
- }
-
- if $(address-model) = 32_64 {
- options = -arch ppc -arch ppc64 ;
- } else if $(address-model) = 64 {
- options = -arch ppc64 ;
- } else {
- options = -arch ppc ;
- }
- }
-
- case arm :
- {
- options = -arch armv6 ;
- }
- }
-
- if $(options)
- {
- OPTIONS on $(targets) += $(options) ;
- }
-}
-
-rule setup-threading ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
-}
-
-rule setup-fpic ( targets * : sources * : properties * )
-{
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
-}
-
-rule compile.m ( targets * : sources * : properties * )
-{
- LANG on $(<) = "-x objective-c" ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.m
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.mm ( targets * : sources * : properties * )
-{
- LANG on $(<) = "-x objective-c++" ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.mm
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# Set the max header padding to allow renaming of libs for installation.
-flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
-
-# To link the static runtime we need to link to all the core runtime libraries.
-flags darwin.link OPTIONS <runtime-link>static
- : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
-
-# Strip as much as possible when optimizing.
-flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
-flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
-
-# Dynamic/shared linking.
-flags darwin.compile OPTIONS <link>shared : -dynamic ;
-
-# Misc options.
-flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ;
-#~ flags darwin.link OPTIONS : -fexceptions ;
-
-# Add the framework names to use.
-flags darwin.link FRAMEWORK <framework> ;
-
-#
-flags darwin.link FORCE_LOAD <force-load> ;
-
-# This is flag is useful for debugging the link step
-# uncomment to see what libtool is doing under the hood
-#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
-
-_ = " " ;
-
-# set up the -F option to include the paths to any frameworks used.
-local rule prepare-framework-path ( target + )
-{
- # The -framework option only takes basename of the framework.
- # The -F option specifies the directories where a framework
- # is searched for. So, if we find <framework> feature
- # with some path, we need to generate property -F option.
- local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
-
- # Be sure to generate no -F if there's no path.
- for local framework-path in $(framework-paths)
- {
- if $(framework-path) != ""
- {
- FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
- }
- }
-}
-
-rule link ( targets * : sources * : properties * )
-{
- DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- prepare-framework-path $(<) ;
-}
-
-# Note that using strip without any options was reported to result in broken
-# binaries, at least on OS X 10.5.5, see:
-# http://svn.boost.org/trac/boost/ticket/2347
-# So we pass -S -x.
-actions link bind LIBRARIES FORCE_LOAD
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
- $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
-}
-
-rule link.dll ( targets * : sources * : properties * )
-{
- setup-address-model $(targets) : $(sources) : $(properties) ;
- prepare-framework-path $(<) ;
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
-}
-
-# We use libtool instead of ar to support universal binary linking
-# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
-actions piecemeal archive
-{
- "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
-}
diff --git a/jam-files/boost-build/tools/darwin.py b/jam-files/boost-build/tools/darwin.py
deleted file mode 100644
index c2919606..00000000
--- a/jam-files/boost-build/tools/darwin.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright (C) Christopher Currie 2003. Permission to copy, use,
-# modify, sell and distribute this software is granted provided this
-# copyright notice appears in all copies. This software is provided
-# "as is" without express or implied warranty, and with no claim as to
-# its suitability for any purpose.
-
-# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
-# for explanation why it's a separate toolset.
-
-import common, gcc, builtin
-from b2.build import feature, toolset, type, action, generators
-from b2.util.utility import *
-
-toolset.register ('darwin')
-
-toolset.inherit_generators ('darwin', [], 'gcc')
-toolset.inherit_flags ('darwin', 'gcc')
-toolset.inherit_rules ('darwin', 'gcc')
-
-def init (version = None, command = None, options = None):
- options = to_seq (options)
-
- condition = common.check_init_parameters ('darwin', None, ('version', version))
-
- command = common.get_invocation_command ('darwin', 'g++', command)
-
- common.handle_options ('darwin', condition, command, options)
-
- gcc.init_link_flags ('darwin', 'darwin', condition)
-
-# Darwin has a different shared library suffix
-type.set_generated_target_suffix ('SHARED_LIB', ['<toolset>darwin'], 'dylib')
-
-# we need to be able to tell the type of .dylib files
-type.register_suffixes ('dylib', 'SHARED_LIB')
-
-feature.feature ('framework', [], ['free'])
-
-toolset.flags ('darwin.compile', 'OPTIONS', '<link>shared', ['-dynamic'])
-toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp'])
-toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates'])
-
-toolset.flags ('darwin.link', 'FRAMEWORK', '<framework>')
-
-# This is flag is useful for debugging the link step
-# uncomment to see what libtool is doing under the hood
-# toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v'])
-
-action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)'])
-
-# TODO: how to set 'bind LIBRARIES'?
-action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)'])
-
-def darwin_archive (manager, targets, sources, properties):
- pass
-
-action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"'])
diff --git a/jam-files/boost-build/tools/dmc.jam b/jam-files/boost-build/tools/dmc.jam
deleted file mode 100644
index 8af8725a..00000000
--- a/jam-files/boost-build/tools/dmc.jam
+++ /dev/null
@@ -1,134 +0,0 @@
-# Digital Mars C++
-
-# (C) Copyright Christof Meerwald 2003.
-# (C) Copyright Aleksey Gurtovoy 2004.
-# (C) Copyright Arjan Knepper 2006.
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# The following #// line will be used by the regression test table generation
-# program as the column heading for HTML tables. Must not include version number.
-#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a>
-
-import feature generators common ;
-import toolset : flags ;
-import sequence regex ;
-
-feature.extend toolset : dmc ;
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters dmc : version $(version) ] ;
-
- local command = [ common.get-invocation-command dmc : dmc : $(command) ] ;
- command ?= dmc ;
-
- common.handle-options dmc : $(condition) : $(command) : $(options) ;
-
- if $(command)
- {
- command = [ common.get-absolute-tool-path $(command[-1]) ] ;
- }
- root = $(command:D) ;
-
- if $(root)
- {
- # DMC linker is sensitive the the direction of slashes, and
- # won't link if forward slashes are used in command.
- root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ;
- flags dmc .root $(condition) : $(root)\\bin\\ ;
- }
- else
- {
- flags dmc .root $(condition) : "" ;
- }
-}
-
-
-# Declare generators
-generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ;
-generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ;
-
-generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ;
-generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ;
-generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ;
-
-
-# Declare flags
-# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds.
-# flags dmc.compile OPTIONS <debug-symbols>on : -g ;
-flags dmc.compile OPTIONS <debug-symbols>on : -gl ;
-flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ;
-flags dmc.link OPTIONS <debug-symbols>off : /PACKF ;
-
-flags dmc.compile OPTIONS <optimization>off : -S -o+none ;
-flags dmc.compile OPTIONS <optimization>speed : -o+time ;
-flags dmc.compile OPTIONS <optimization>space : -o+space ;
-flags dmc.compile OPTIONS <exception-handling>on : -Ae ;
-flags dmc.compile OPTIONS <rtti>on : -Ar ;
-# FIXME:
-# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used
-# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used
-# But for some reason the -WD cflag is always in use.
-# flags dmc.compile OPTIONS <link>shared : -WD ;
-# flags dmc.compile OPTIONS <link>static : -WA ;
-
-# Note that these two options actually imply multithreading support on DMC
-# because there is no single-threaded dynamic runtime library. Specifying
-# <threading>multi would be a bad idea, though, because no option would be
-# matched when the build uses the default settings of <runtime-link>dynamic
-# and <threading>single.
-flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ;
-flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ;
-
-flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ;
-flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ;
-flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ;
-flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ;
-
-flags dmc.compile OPTIONS : <cflags> ;
-flags dmc.compile.c++ OPTIONS : <cxxflags> ;
-
-flags dmc.compile DEFINES : <define> ;
-flags dmc.compile INCLUDES : <include> ;
-
-flags dmc.link <linkflags> ;
-flags dmc.archive OPTIONS <arflags> ;
-
-flags dmc LIBPATH <library-path> ;
-flags dmc LIBRARIES <library-file> ;
-flags dmc FINDLIBS <find-library-sa> ;
-flags dmc FINDLIBS <find-library-st> ;
-
-actions together link bind LIBRARIES
-{
- "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
-}
-
-actions together link.dll bind LIBRARIES
-{
- echo LIBRARY "$(<[1])" > $(<[2]:B).def
- echo DESCRIPTION 'A Library' >> $(<[2]:B).def
- echo EXETYPE NT >> $(<[2]:B).def
- echo SUBSYSTEM WINDOWS >> $(<[2]:B).def
- echo CODE EXECUTE READ >> $(<[2]:B).def
- echo DATA READ WRITE >> $(<[2]:B).def
- "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
-}
-
-actions compile.c
-{
- "$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
-}
-
-actions together piecemeal archive
-{
- "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)"
-}
diff --git a/jam-files/boost-build/tools/docutils.jam b/jam-files/boost-build/tools/docutils.jam
deleted file mode 100644
index bf061617..00000000
--- a/jam-files/boost-build/tools/docutils.jam
+++ /dev/null
@@ -1,84 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for docutils ReStructuredText processing.
-
-import type ;
-import scanner ;
-import generators ;
-import os ;
-import common ;
-import toolset ;
-import path ;
-import feature : feature ;
-import property ;
-
-.initialized = ;
-
-type.register ReST : rst ;
-
-class rst-scanner : common-scanner
-{
- rule __init__ ( paths * )
- {
- common-scanner.__init__ . $(paths) ;
- }
-
- rule pattern ( )
- {
- return "^[ ]*\\.\\.[ ]+include::[ ]+([^
-]+)"
- "^[ ]*\\.\\.[ ]+image::[ ]+([^
-]+)"
- "^[ ]*\\.\\.[ ]+figure::[ ]+([^
-]+)"
- ;
- }
-}
-
-scanner.register rst-scanner : include ;
-type.set-scanner ReST : rst-scanner ;
-
-generators.register-standard docutils.html : ReST : HTML ;
-
-rule init ( docutils-dir ? : tools-dir ? )
-{
- docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
- tools-dir ?= $(docutils-dir)/tools ;
-
- if ! $(.initialized)
- {
- .initialized = true ;
- .docutils-dir = $(docutils-dir) ;
- .tools-dir = $(tools-dir:R="") ;
-
- .setup = [
- common.prepend-path-variable-command PYTHONPATH
- : $(.docutils-dir) $(.docutils-dir)/extras ] ;
- }
-}
-
-rule html ( target : source : properties * )
-{
- if ! [ on $(target) return $(RST2XXX) ]
- {
- local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
- RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
- }
-}
-
-
-feature docutils : : free ;
-feature docutils-html : : free ;
-feature docutils-cmd : : free ;
-toolset.flags docutils COMMON-FLAGS : <docutils> ;
-toolset.flags docutils HTML-FLAGS : <docutils-html> ;
-toolset.flags docutils RST2XXX : <docutils-cmd> ;
-
-actions html
-{
- $(.setup)
- "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
-}
-
diff --git a/jam-files/boost-build/tools/doxproc.py b/jam-files/boost-build/tools/doxproc.py
deleted file mode 100644
index 4cbd5edd..00000000
--- a/jam-files/boost-build/tools/doxproc.py
+++ /dev/null
@@ -1,859 +0,0 @@
-#!/usr/bin/python
-# Copyright 2006 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-'''
-Processing of Doxygen generated XML.
-'''
-
-import os
-import os.path
-import sys
-import time
-import string
-import getopt
-import glob
-import re
-import xml.dom.minidom
-
-
-def usage():
- print '''
-Usage:
- %s options
-
-Options:
- --xmldir Directory with the Doxygen xml result files.
- --output Write the output BoostBook to the given location.
- --id The ID of the top level BoostBook section.
- --title The title of the top level BoostBook section.
- --enable-index Generate additional index sections for classes and
- types.
-''' % ( sys.argv[0] )
-
-
-def get_args( argv = sys.argv[1:] ):
- spec = [
- 'xmldir=',
- 'output=',
- 'id=',
- 'title=',
- 'enable-index',
- 'help' ]
- options = {
- '--xmldir' : 'xml',
- '--output' : None,
- '--id' : 'dox',
- '--title' : 'Doxygen'
- }
- ( option_pairs, other ) = getopt.getopt( argv, '', spec )
- map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs )
-
- if options.has_key( '--help' ):
- usage()
- sys.exit(1)
-
- return {
- 'xmldir' : options['--xmldir'],
- 'output' : options['--output'],
- 'id' : options['--id'],
- 'title' : options['--title'],
- 'index' : options.has_key('--enable-index')
- }
-
-def if_attribute(node, attribute, true_value, false_value=None):
- if node.getAttribute(attribute) == 'yes':
- return true_value
- else:
- return false_value
-
-class Doxygen2BoostBook:
-
- def __init__( self, **kwargs ):
- ##
- self.args = kwargs
- self.args.setdefault('id','')
- self.args.setdefault('title','')
- self.args.setdefault('last_revision', time.asctime())
- self.args.setdefault('index', False)
- self.id = '%(id)s.reference' % self.args
- self.args['id'] = self.id
- #~ This is our template BoostBook document we insert the generated content into.
- self.boostbook = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
-<section id="%(id)s" name="%(title)s" last-revision="%(last_revision)s">
- <title>%(title)s</title>
- <library-reference id="%(id)s.headers">
- <title>Headers</title>
- </library-reference>
- <index id="%(id)s.classes">
- <title>Classes</title>
- </index>
- <index id="%(id)s.index">
- <title>Index</title>
- </index>
-</section>
-''' % self.args )
- self.section = {
- 'headers' : self._getChild('library-reference',id='%(id)s.headers' % self.args),
- 'classes' : self._getChild('index',id='%(id)s.classes' % self.args),
- 'index' : self._getChild('index',id='%(id)s.index' % self.args)
- }
- #~ Remove the index sections if we aren't generating it.
- if not self.args['index']:
- self.section['classes'].parentNode.removeChild(self.section['classes'])
- self.section['classes'].unlink()
- del self.section['classes']
- self.section['index'].parentNode.removeChild(self.section['index'])
- self.section['index'].unlink()
- del self.section['index']
- #~ The symbols, per Doxygen notion, that we translated.
- self.symbols = {}
- #~ Map of Doxygen IDs and BoostBook IDs, so we can translate as needed.
- self.idmap = {}
- #~ Marks generation, to prevent redoing it.
- self.generated = False
-
- #~ Add an Doxygen generated XML document to the content we are translating.
- def addDox( self, document ):
- self._translateNode(document.documentElement)
-
- #~ Turns the internal XML tree into an output UTF-8 string.
- def tostring( self ):
- self._generate()
- #~ return self.boostbook.toprettyxml(' ')
- return self.boostbook.toxml('utf-8')
-
- #~ Does post-processing on the partial generated content to generate additional info
- #~ now that we have the complete source documents.
- def _generate( self ):
- if not self.generated:
- self.generated = True
- symbols = self.symbols.keys()
- symbols.sort()
- #~ Populate the header section.
- for symbol in symbols:
- if self.symbols[symbol]['kind'] in ('header'):
- self.section['headers'].appendChild(self.symbols[symbol]['dom'])
- for symbol in symbols:
- if self.symbols[symbol]['kind'] not in ('namespace', 'header'):
- container = self._resolveContainer(self.symbols[symbol],
- self.symbols[self.symbols[symbol]['header']]['dom'])
- if container.nodeName != 'namespace':
- ## The current BoostBook to Docbook translation doesn't
- ## respect, nor assign, IDs to inner types of any kind.
- ## So nuke the ID entry so as not create bogus links.
- del self.idmap[self.symbols[symbol]['id']]
- container.appendChild(self.symbols[symbol]['dom'])
- self._rewriteIDs(self.boostbook.documentElement)
-
- #~ Rewrite the various IDs from Doxygen references to the newly created
- #~ BoostBook references.
- def _rewriteIDs( self, node ):
- if node.nodeName in ('link'):
- if (self.idmap.has_key(node.getAttribute('linkend'))):
- #~ A link, and we have someplace to repoint it at.
- node.setAttribute('linkend',self.idmap[node.getAttribute('linkend')])
- else:
- #~ A link, but we don't have a generated target for it.
- node.removeAttribute('linkend')
- elif hasattr(node,'hasAttribute') and node.hasAttribute('id') and self.idmap.has_key(node.getAttribute('id')):
- #~ Simple ID, and we have a translation.
- node.setAttribute('id',self.idmap[node.getAttribute('id')])
- #~ Recurse, and iterate, depth-first traversal which turns out to be
- #~ left-to-right and top-to-bottom for the document.
- if node.firstChild:
- self._rewriteIDs(node.firstChild)
- if node.nextSibling:
- self._rewriteIDs(node.nextSibling)
-
- def _resolveContainer( self, cpp, root ):
- container = root
- for ns in cpp['namespace']:
- node = self._getChild('namespace',name=ns,root=container)
- if not node:
- node = container.appendChild(
- self._createNode('namespace',name=ns))
- container = node
- for inner in cpp['name'].split('::'):
- node = self._getChild(name=inner,root=container)
- if not node:
- break
- container = node
- return container
-
- def _setID( self, id, name ):
- self.idmap[id] = name.replace('::','.').replace('/','.')
- #~ print '--| setID:',id,'::',self.idmap[id]
-
- #~ Translate a given node within a given context.
- #~ The translation dispatches to a local method of the form
- #~ "_translate[_context0,...,_contextN]", and the keyword args are
- #~ passed along. If there is no translation handling method we
- #~ return None.
- def _translateNode( self, *context, **kwargs ):
- node = None
- names = [ ]
- for c in context:
- if c:
- if not isinstance(c,xml.dom.Node):
- suffix = '_'+c.replace('-','_')
- else:
- suffix = '_'+c.nodeName.replace('-','_')
- node = c
- names.append('_translate')
- names = map(lambda x: x+suffix,names)
- if node:
- for name in names:
- if hasattr(self,name):
- return getattr(self,name)(node,**kwargs)
- return None
-
- #~ Translates the children of the given parent node, appending the results
- #~ to the indicated target. For nodes not translated by the translation method
- #~ it copies the child over and recurses on that child to translate any
- #~ possible interior nodes. Hence this will translate the entire subtree.
- def _translateChildren( self, parent, **kwargs ):
- target = kwargs['target']
- for n in parent.childNodes:
- child = self._translateNode(n,target=target)
- if child:
- target.appendChild(child)
- else:
- child = n.cloneNode(False)
- if hasattr(child,'data'):
- child.data = re.sub(r'\s+',' ',child.data)
- target.appendChild(child)
- self._translateChildren(n,target=child)
-
- #~ Translate the given node as a description, into the description subnode
- #~ of the target. If no description subnode is present in the target it
- #~ is created.
- def _translateDescription( self, node, target=None, tag='description', **kwargs ):
- description = self._getChild(tag,root=target)
- if not description:
- description = target.appendChild(self._createNode(tag))
- self._translateChildren(node,target=description)
- return description
-
- #~ Top level translation of: <doxygen ...>...</doxygen>,
- #~ translates the children.
- def _translate_doxygen( self, node ):
- #~ print '_translate_doxygen:', node.nodeName
- result = []
- for n in node.childNodes:
- newNode = self._translateNode(n)
- if newNode:
- result.append(newNode)
- return result
-
- #~ Top level translation of:
- #~ <doxygenindex ...>
- #~ <compound ...>
- #~ <member ...>
- #~ <name>...</name>
- #~ </member>
- #~ ...
- #~ </compound>
- #~ ...
- #~ </doxygenindex>
- #~ builds the class and symbol sections, if requested.
- def _translate_doxygenindex( self, node ):
- #~ print '_translate_doxygenindex:', node.nodeName
- if self.args['index']:
- entries = []
- classes = []
- #~ Accumulate all the index entries we care about.
- for n in node.childNodes:
- if n.nodeName == 'compound':
- if n.getAttribute('kind') not in ('file','dir','define'):
- cpp = self._cppName(self._getChildData('name',root=n))
- entry = {
- 'name' : cpp['name'],
- 'compoundname' : cpp['compoundname'],
- 'id' : n.getAttribute('refid')
- }
- if n.getAttribute('kind') in ('class','struct'):
- classes.append(entry)
- entries.append(entry)
- for m in n.childNodes:
- if m.nodeName == 'member':
- cpp = self._cppName(self._getChildData('name',root=m))
- entry = {
- 'name' : cpp['name'],
- 'compoundname' : cpp['compoundname'],
- 'id' : n.getAttribute('refid')
- }
- if hasattr(m,'getAttribute') and m.getAttribute('kind') in ('class','struct'):
- classes.append(entry)
- entries.append(entry)
- #~ Put them in a sensible order.
- entries.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower()))
- classes.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower()))
- #~ And generate the BoostBook for them.
- self._translate_index_(entries,target=self.section['index'])
- self._translate_index_(classes,target=self.section['classes'])
- return None
-
- #~ Translate a set of index entries in the BoostBook output. The output
- #~ is grouped into groups of the first letter of the entry names.
- def _translate_index_(self, entries, target=None, **kwargs ):
- i = 0
- targetID = target.getAttribute('id')
- while i < len(entries):
- dividerKey = entries[i]['name'][0].upper()
- divider = target.appendChild(self._createNode('indexdiv',id=targetID+'.'+dividerKey))
- divider.appendChild(self._createText('title',dividerKey))
- while i < len(entries) and dividerKey == entries[i]['name'][0].upper():
- iename = entries[i]['name']
- ie = divider.appendChild(self._createNode('indexentry'))
- ie = ie.appendChild(self._createText('primaryie',iename))
- while i < len(entries) and entries[i]['name'] == iename:
- ie.appendChild(self.boostbook.createTextNode(' ('))
- ie.appendChild(self._createText(
- 'link',entries[i]['compoundname'],linkend=entries[i]['id']))
- ie.appendChild(self.boostbook.createTextNode(')'))
- i += 1
-
- #~ Translate a <compounddef ...>...</compounddef>,
- #~ by retranslating with the "kind" of compounddef.
- def _translate_compounddef( self, node, target=None, **kwargs ):
- return self._translateNode(node,node.getAttribute('kind'))
-
- #~ Translate a <compounddef kind="namespace"...>...</compounddef>. For
- #~ namespaces we just collect the information for later use as there is no
- #~ currently namespaces are not included in the BoostBook format. In the future
- #~ it might be good to generate a namespace index.
- def _translate_compounddef_namespace( self, node, target=None, **kwargs ):
- namespace = {
- 'id' : node.getAttribute('id'),
- 'kind' : 'namespace',
- 'name' : self._getChildData('compoundname',root=node),
- 'brief' : self._getChildData('briefdescription',root=node),
- 'detailed' : self._getChildData('detaileddescription',root=node),
- 'parsed' : False
- }
- if self.symbols.has_key(namespace['name']):
- if not self.symbols[namespace['name']]['parsed']:
- self.symbols[namespace['name']]['parsed'] = True
- #~ for n in node.childNodes:
- #~ if hasattr(n,'getAttribute'):
- #~ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
- else:
- self.symbols[namespace['name']] = namespace
- #~ self._setID(namespace['id'],namespace['name'])
- return None
-
- #~ Translate a <compounddef kind="class"...>...</compounddef>, which
- #~ forwards to the kind=struct as they are the same.
- def _translate_compounddef_class( self, node, target=None, **kwargs ):
- return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs)
-
- #~ Translate a <compounddef kind="struct"...>...</compounddef> into:
- #~ <header id="?" name="?">
- #~ <struct name="?">
- #~ ...
- #~ </struct>
- #~ </header>
- def _translate_compounddef_struct( self, node, tag='struct', target=None, **kwargs ):
- result = None
- includes = self._getChild('includes',root=node)
- if includes:
- ## Add the header into the output table.
- self._translate_compounddef_includes_(includes,includes,**kwargs)
- ## Compounds are the declared symbols, classes, types, etc.
- ## We add them to the symbol table, along with the partial DOM for them
- ## so that they can be organized into the output later.
- compoundname = self._getChildData('compoundname',root=node)
- compoundname = self._cppName(compoundname)
- self._setID(node.getAttribute('id'),compoundname['compoundname'])
- struct = self._createNode(tag,name=compoundname['name'].split('::')[-1])
- self.symbols[compoundname['compoundname']] = {
- 'header' : includes.firstChild.data,
- 'namespace' : compoundname['namespace'],
- 'id' : node.getAttribute('id'),
- 'kind' : tag,
- 'name' : compoundname['name'],
- 'dom' : struct
- }
- ## Add the children which will be the members of the struct.
- for n in node.childNodes:
- self._translateNode(n,target=struct,scope=compoundname['compoundname'])
- result = struct
- return result
-
- #~ Translate a <compounddef ...><includes ...>...</includes></compounddef>,
- def _translate_compounddef_includes_( self, node, target=None, **kwargs ):
- name = node.firstChild.data
- if not self.symbols.has_key(name):
- self._setID(node.getAttribute('refid'),name)
- self.symbols[name] = {
- 'kind' : 'header',
- 'id' : node.getAttribute('refid'),
- 'dom' : self._createNode('header',
- id=node.getAttribute('refid'),
- name=name)
- }
- return None
-
- #~ Translate a <basecompoundref...>...</basecompoundref> into:
- #~ <inherit access="?">
- #~ ...
- #~ </inherit>
- def _translate_basecompoundref( self, ref, target=None, **kwargs ):
- inherit = target.appendChild(self._createNode('inherit',
- access=ref.getAttribute('prot')))
- self._translateChildren(ref,target=inherit)
- return
-
- #~ Translate:
- #~ <templateparamlist>
- #~ <param>
- #~ <type>...</type>
- #~ <declname>...</declname>
- #~ <defname>...</defname>
- #~ <defval>...</defval>
- #~ </param>
- #~ ...
- #~ </templateparamlist>
- #~ Into:
- #~ <template>
- #~ <template-type-parameter name="?" />
- #~ <template-nontype-parameter name="?">
- #~ <type>?</type>
- #~ <default>?</default>
- #~ </template-nontype-parameter>
- #~ </template>
- def _translate_templateparamlist( self, templateparamlist, target=None, **kwargs ):
- template = target.appendChild(self._createNode('template'))
- for param in templateparamlist.childNodes:
- if param.nodeName == 'param':
- type = self._getChildData('type',root=param)
- defval = self._getChild('defval',root=param)
- paramKind = None
- if type in ('class','typename'):
- paramKind = 'template-type-parameter'
- else:
- paramKind = 'template-nontype-parameter'
- templateParam = template.appendChild(
- self._createNode(paramKind,
- name=self._getChildData('declname',root=param)))
- if paramKind == 'template-nontype-parameter':
- template_type = templateParam.appendChild(self._createNode('type'))
- self._translate_type(
- self._getChild('type',root=param),target=template_type)
- if defval:
- value = self._getChildData('ref',root=defval.firstChild)
- if not value:
- value = self._getData(defval)
- templateParam.appendChild(self._createText('default',value))
- return template
-
- #~ Translate:
- #~ <briefdescription>...</briefdescription>
- #~ Into:
- #~ <purpose>...</purpose>
- def _translate_briefdescription( self, brief, target=None, **kwargs ):
- self._translateDescription(brief,target=target,**kwargs)
- return self._translateDescription(brief,target=target,tag='purpose',**kwargs)
-
- #~ Translate:
- #~ <detaileddescription>...</detaileddescription>
- #~ Into:
- #~ <description>...</description>
- def _translate_detaileddescription( self, detailed, target=None, **kwargs ):
- return self._translateDescription(detailed,target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="?">...</sectiondef>
- #~ With kind specific translation.
- def _translate_sectiondef( self, sectiondef, target=None, **kwargs ):
- self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs)
-
- #~ Translate non-function sections.
- def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ):
- for n in sectiondef.childNodes:
- if hasattr(n,'getAttribute'):
- self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
- return None
-
- #~ Translate:
- #~ <sectiondef kind="public-type">...</sectiondef>
- def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ):
- return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="public-sttrib">...</sectiondef>
- def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs):
- return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="?-func">...</sectiondef>
- #~ All the various function group translations end up here for which
- #~ they are translated into:
- #~ <method-group name="?">
- #~ ...
- #~ </method-group>
- def _translate_sectiondef_func_( self, sectiondef, name='functions', target=None, **kwargs ):
- members = target.appendChild(self._createNode('method-group',name=name))
- for n in sectiondef.childNodes:
- if hasattr(n,'getAttribute'):
- self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs)
- return members
-
- #~ Translate:
- #~ <sectiondef kind="public-func">...</sectiondef>
- def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ):
- return self._translate_sectiondef_func_(sectiondef,
- name='public member functions',target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="public-static-func">...</sectiondef>
- def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs):
- return self._translate_sectiondef_func_(sectiondef,
- name='public static functions',target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="protected-func">...</sectiondef>
- def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ):
- return self._translate_sectiondef_func_(sectiondef,
- name='protected member functions',target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="private-static-func">...</sectiondef>
- def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs):
- return self._translate_sectiondef_func_(sectiondef,
- name='private static functions',target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="public-func">...</sectiondef>
- def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ):
- return self._translate_sectiondef_func_(sectiondef,
- name='private member functions',target=target,**kwargs)
-
- #~ Translate:
- #~ <sectiondef kind="user-defined"><header>...</header>...</sectiondef>
- def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ):
- return self._translate_sectiondef_func_(sectiondef,
- name=self._getChildData('header', root=sectiondef),target=target,**kwargs)
-
- #~ Translate:
- #~ <memberdef kind="typedef" id="?">
- #~ <name>...</name>
- #~ </memberdef>
- #~ To:
- #~ <typedef id="?" name="?">
- #~ <type>...</type>
- #~ </typedef>
- def _translate_memberdef_typedef( self, memberdef, target=None, scope=None, **kwargs ):
- self._setID(memberdef.getAttribute('id'),
- scope+'::'+self._getChildData('name',root=memberdef))
- typedef = target.appendChild(self._createNode('typedef',
- id=memberdef.getAttribute('id'),
- name=self._getChildData('name',root=memberdef)))
- typedef_type = typedef.appendChild(self._createNode('type'))
- self._translate_type(self._getChild('type',root=memberdef),target=typedef_type)
- return typedef
-
- #~ Translate:
- #~ <memberdef kind="function" id="?" const="?" static="?" explicit="?" inline="?">
- #~ <name>...</name>
- #~ </memberdef>
- #~ To:
- #~ <method name="?" cv="?" specifiers="?">
- #~ ...
- #~ </method>
- def _translate_memberdef_function( self, memberdef, target=None, scope=None, **kwargs ):
- name = self._getChildData('name',root=memberdef)
- self._setID(memberdef.getAttribute('id'),scope+'::'+name)
- ## Check if we have some specific kind of method.
- if name == scope.split('::')[-1]:
- kind = 'constructor'
- target = target.parentNode
- elif name == '~'+scope.split('::')[-1]:
- kind = 'destructor'
- target = target.parentNode
- elif name == 'operator=':
- kind = 'copy-assignment'
- target = target.parentNode
- else:
- kind = 'method'
- method = target.appendChild(self._createNode(kind,
- # id=memberdef.getAttribute('id'),
- name=name,
- cv=' '.join([
- if_attribute(memberdef,'const','const','').strip()
- ]),
- specifiers=' '.join([
- if_attribute(memberdef,'static','static',''),
- if_attribute(memberdef,'explicit','explicit',''),
- if_attribute(memberdef,'inline','inline','')
- ]).strip()
- ))
- ## We iterate the children to translate each part of the function.
- for n in memberdef.childNodes:
- self._translateNode(memberdef,'function',n,target=method)
- return method
-
- #~ Translate:
- #~ <memberdef kind="function"...><templateparamlist>...</templateparamlist></memberdef>
- def _translate_memberdef_function_templateparamlist(
- self, templateparamlist, target=None, **kwargs ):
- return self._translate_templateparamlist(templateparamlist,target=target,**kwargs)
-
- #~ Translate:
- #~ <memberdef kind="function"...><type>...</type></memberdef>
- #~ To:
- #~ ...<type>?</type>
- def _translate_memberdef_function_type( self, resultType, target=None, **kwargs ):
- methodType = self._createNode('type')
- self._translate_type(resultType,target=methodType)
- if methodType.hasChildNodes():
- target.appendChild(methodType)
- return methodType
-
- #~ Translate:
- #~ <memberdef kind="function"...><briefdescription>...</briefdescription></memberdef>
- def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ):
- result = self._translateDescription(description,target=target,**kwargs)
- ## For functions if we translate the brief docs to the purpose they end up
- ## right above the regular description. And since we just added the brief to that
- ## on the previous line, don't bother with the repetition.
- # result = self._translateDescription(description,target=target,tag='purpose',**kwargs)
- return result
-
- #~ Translate:
- #~ <memberdef kind="function"...><detaileddescription>...</detaileddescription></memberdef>
- def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ):
- return self._translateDescription(description,target=target,**kwargs)
-
- #~ Translate:
- #~ <memberdef kind="function"...><inbodydescription>...</inbodydescription></memberdef>
- def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ):
- return self._translateDescription(description,target=target,**kwargs)
-
- #~ Translate:
- #~ <memberdef kind="function"...><param>...</param></memberdef>
- def _translate_memberdef_function_param( self, param, target=None, **kwargs ):
- return self._translate_param(param,target=target,**kwargs)
-
- #~ Translate:
- #~ <memberdef kind="variable" id="?">
- #~ <name>...</name>
- #~ <type>...</type>
- #~ </memberdef>
- #~ To:
- #~ <data-member id="?" name="?">
- #~ <type>...</type>
- #~ </data-member>
- def _translate_memberdef_variable( self, memberdef, target=None, scope=None, **kwargs ):
- self._setID(memberdef.getAttribute('id'),
- scope+'::'+self._getChildData('name',root=memberdef))
- data_member = target.appendChild(self._createNode('data-member',
- id=memberdef.getAttribute('id'),
- name=self._getChildData('name',root=memberdef)))
- data_member_type = data_member.appendChild(self._createNode('type'))
- self._translate_type(self._getChild('type',root=memberdef),target=data_member_type)
-
- #~ Translate:
- #~ <memberdef kind="enum" id="?">
- #~ <name>...</name>
- #~ ...
- #~ </memberdef>
- #~ To:
- #~ <enum id="?" name="?">
- #~ ...
- #~ </enum>
- def _translate_memberdef_enum( self, memberdef, target=None, scope=None, **kwargs ):
- self._setID(memberdef.getAttribute('id'),
- scope+'::'+self._getChildData('name',root=memberdef))
- enum = target.appendChild(self._createNode('enum',
- id=memberdef.getAttribute('id'),
- name=self._getChildData('name',root=memberdef)))
- for n in memberdef.childNodes:
- self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs)
- return enum
-
- #~ Translate:
- #~ <memberdef kind="enum"...>
- #~ <enumvalue id="?">
- #~ <name>...</name>
- #~ <initializer>...</initializer>
- #~ </enumvalue>
- #~ </memberdef>
- #~ To:
- #~ <enumvalue id="?" name="?">
- #~ <default>...</default>
- #~ </enumvalue>
- def _translate_memberdef_enum_enumvalue( self, enumvalue, target=None, scope=None, **kwargs ):
- self._setID(enumvalue.getAttribute('id'),
- scope+'::'+self._getChildData('name',root=enumvalue))
- value = target.appendChild(self._createNode('enumvalue',
- id=enumvalue.getAttribute('id'),
- name=self._getChildData('name',root=enumvalue)))
- initializer = self._getChild('initializer',root=enumvalue)
- if initializer:
- self._translateChildren(initializer,
- target=target.appendChild(self._createNode('default')))
- return value
-
- #~ Translate:
- #~ <param>
- #~ <type>...</type>
- #~ <declname>...</declname>
- #~ <defval>...</defval>
- #~ </param>
- #~ To:
- #~ <parameter name="?">
- #~ <paramtype>...</paramtype>
- #~ ...
- #~ </parameter>
- def _translate_param( self, param, target=None, **kwargs):
- parameter = target.appendChild(self._createNode('parameter',
- name=self._getChildData('declname',root=param)))
- paramtype = parameter.appendChild(self._createNode('paramtype'))
- self._translate_type(self._getChild('type',root=param),target=paramtype)
- defval = self._getChild('defval',root=param)
- if defval:
- self._translateChildren(self._getChild('defval',root=param),target=parameter)
- return parameter
-
- #~ Translate:
- #~ <ref kindref="?" ...>...</ref>
- def _translate_ref( self, ref, **kwargs ):
- return self._translateNode(ref,ref.getAttribute('kindref'))
-
- #~ Translate:
- #~ <ref refid="?" kindref="compound">...</ref>
- #~ To:
- #~ <link linkend="?"><classname>...</classname></link>
- def _translate_ref_compound( self, ref, **kwargs ):
- result = self._createNode('link',linkend=ref.getAttribute('refid'))
- classname = result.appendChild(self._createNode('classname'))
- self._translateChildren(ref,target=classname)
- return result
-
- #~ Translate:
- #~ <ref refid="?" kindref="member">...</ref>
- #~ To:
- #~ <link linkend="?">...</link>
- def _translate_ref_member( self, ref, **kwargs ):
- result = self._createNode('link',linkend=ref.getAttribute('refid'))
- self._translateChildren(ref,target=result)
- return result
-
- #~ Translate:
- #~ <type>...</type>
- def _translate_type( self, type, target=None, **kwargs ):
- result = self._translateChildren(type,target=target,**kwargs)
- #~ Filter types to clean up various readability problems, most notably
- #~ with really long types.
- xml = target.toxml('utf-8');
- if (
- xml.startswith('<type>boost::mpl::') or
- xml.startswith('<type>BOOST_PP_') or
- re.match('<type>boost::(lazy_)?(enable|disable)_if',xml)
- ):
- while target.firstChild:
- target.removeChild(target.firstChild)
- target.appendChild(self._createText('emphasis','unspecified'))
- return result
-
- def _getChild( self, tag = None, id = None, name = None, root = None ):
- if not root:
- root = self.boostbook.documentElement
- for n in root.childNodes:
- found = True
- if tag and found:
- found = found and tag == n.nodeName
- if id and found:
- if n.hasAttribute('id'):
- found = found and n.getAttribute('id') == id
- else:
- found = found and n.hasAttribute('id') and n.getAttribute('id') == id
- if name and found:
- found = found and n.hasAttribute('name') and n.getAttribute('name') == name
- if found:
- #~ print '--|', n
- return n
- return None
-
- def _getChildData( self, tag, **kwargs ):
- return self._getData(self._getChild(tag,**kwargs),**kwargs)
-
- def _getData( self, node, **kwargs ):
- if node:
- text = self._getChild('#text',root=node)
- if text:
- return text.data.strip()
- return ''
-
- def _cppName( self, type ):
- parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':'))
- result = {
- 'compoundname' : parts.group(1),
- 'namespace' : parts.group(1).split('::')[0:-1],
- 'name' : parts.group(1).split('::')[-1],
- 'specialization' : parts.group(2)
- }
- if result['namespace'] and len(result['namespace']) > 0:
- namespace = '::'.join(result['namespace'])
- while (
- len(result['namespace']) > 0 and (
- not self.symbols.has_key(namespace) or
- self.symbols[namespace]['kind'] != 'namespace')
- ):
- result['name'] = result['namespace'].pop()+'::'+result['name']
- namespace = '::'.join(result['namespace'])
- return result
-
- def _createNode( self, tag, **kwargs ):
- result = self.boostbook.createElement(tag)
- for k in kwargs.keys():
- if kwargs[k] != '':
- if k == 'id':
- result.setAttribute('id',kwargs[k])
- else:
- result.setAttribute(k,kwargs[k])
- return result
-
- def _createText( self, tag, data, **kwargs ):
- result = self._createNode(tag,**kwargs)
- data = data.strip()
- if len(data) > 0:
- result.appendChild(self.boostbook.createTextNode(data))
- return result
-
-
-def main( xmldir=None, output=None, id=None, title=None, index=False ):
- #~ print '--- main: xmldir = %s, output = %s' % (xmldir,output)
-
- input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) )
- input.sort
- translator = Doxygen2BoostBook(id=id, title=title, index=index)
- #~ Feed in the namespaces first to build up the set of namespaces
- #~ and definitions so that lookup is unambiguous when reading in the definitions.
- namespace_files = filter(
- lambda x:
- os.path.basename(x).startswith('namespace'),
- input)
- decl_files = filter(
- lambda x:
- not os.path.basename(x).startswith('namespace') and not os.path.basename(x).startswith('_'),
- input)
- for dox in namespace_files:
- #~ print '--|',os.path.basename(dox)
- translator.addDox(xml.dom.minidom.parse(dox))
- for dox in decl_files:
- #~ print '--|',os.path.basename(dox)
- translator.addDox(xml.dom.minidom.parse(dox))
-
- if output:
- output = open(output,'w')
- else:
- output = sys.stdout
- if output:
- output.write(translator.tostring())
-
-
-main( **get_args() )
diff --git a/jam-files/boost-build/tools/doxygen-config.jam b/jam-files/boost-build/tools/doxygen-config.jam
deleted file mode 100644
index 2cd2ccae..00000000
--- a/jam-files/boost-build/tools/doxygen-config.jam
+++ /dev/null
@@ -1,11 +0,0 @@
-#~ Copyright 2005, 2006 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for Doxygen tools. To use, just import this module.
-
-import toolset : using ;
-
-ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ;
-
-using doxygen ;
diff --git a/jam-files/boost-build/tools/doxygen.jam b/jam-files/boost-build/tools/doxygen.jam
deleted file mode 100644
index 8394848d..00000000
--- a/jam-files/boost-build/tools/doxygen.jam
+++ /dev/null
@@ -1,776 +0,0 @@
-# Copyright 2003, 2004 Douglas Gregor
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Copyright 2006 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines rules to handle generation of various outputs from source
-# files documented with doxygen comments. The supported transformations are:
-#
-# * Source -> Doxygen XML -> BoostBook XML
-# * Source -> Doxygen HTML
-#
-# The type of transformation is selected based on the target requested. For
-# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
-# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
-# HTML specifying a target with an ".html" suffix will produce a directory
-# <target> with the Doxygen html files, and a <target>.html file redirecting to
-# that directory.
-
-import "class" : new ;
-import targets ;
-import feature ;
-import property ;
-import generators ;
-import boostbook ;
-import type ;
-import path ;
-import print ;
-import regex ;
-import stage ;
-import project ;
-import xsltproc ;
-import make ;
-import os ;
-import toolset : flags ;
-import alias ;
-import common ;
-import modules ;
-import project ;
-import utility ;
-import errors ;
-
-
-# Use to specify extra configuration paramters. These get translated
-# into a doxyfile which configures the building of the docs.
-feature.feature doxygen:param : : free ;
-
-# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
-feature.feature prefix : : free ;
-
-# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
-feature.feature reftitle : : free ;
-
-# Which processor to use for various translations from Doxygen.
-feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
-
-# To generate, or not, index sections.
-feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
-
-# The ID for the resulting BoostBook reference section.
-feature.feature doxygen.doxproc.id : : free ;
-
-# The title for the resulting BoostBook reference section.
-feature.feature doxygen.doxproc.title : : free ;
-
-# Location for images when generating XML
-feature.feature doxygen:xml-imagedir : : free ;
-
-# Indicates whether the entire directory should be deleted
-feature.feature doxygen.rmdir : off on : optional incidental ;
-
-# Doxygen configuration input file.
-type.register DOXYFILE : doxyfile ;
-
-# Doxygen XML multi-file output.
-type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
-
-# Doxygen XML coallesed output.
-type.register DOXYGEN_XML : doxygen : XML ;
-
-# Doxygen HTML multifile directory.
-type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
-
-# Redirection HTML file to HTML multifile directory.
-type.register DOXYGEN_HTML : : HTML ;
-
-type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
-
-# Initialize the Doxygen module. Parameters are:
-# name: the name of the 'doxygen' executable. If not specified, the name
-# 'doxygen' will be used
-#
-rule init ( name ? )
-{
- if ! $(.initialized)
- {
- .initialized = true ;
-
- .doxproc = [ modules.binding $(__name__) ] ;
- .doxproc = $(.doxproc:D)/doxproc.py ;
-
- generators.register-composing doxygen.headers-to-doxyfile
- : H HPP CPP : DOXYFILE ;
- generators.register-standard doxygen.run
- : DOXYFILE : DOXYGEN_XML_MULTIFILE ;
- generators.register-standard doxygen.xml-dir-to-boostbook
- : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
- generators.register-standard doxygen.xml-to-boostbook
- : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
- generators.register-standard doxygen.collect
- : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
- generators.register-standard doxygen.run
- : DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
- generators.register-standard doxygen.html-redirect
- : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
- generators.register-standard doxygen.copy-latex-pngs
- : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
-
- IMPORT $(__name__) : doxygen : : doxygen ;
- }
-
- if $(name)
- {
- modify-config ;
- .doxygen = $(name) ;
- check-doxygen ;
- }
-
- if ! $(.doxygen)
- {
- check-doxygen ;
- }
-}
-
-rule freeze-config ( )
-{
- if ! $(.initialized)
- {
- errors.user-error "doxygen must be initialized before it can be used." ;
- }
- if ! $(.config-frozen)
- {
- .config-frozen = true ;
-
- if [ .is-cygwin ]
- {
- .is-cygwin = true ;
- }
- }
-}
-
-rule modify-config ( )
-{
- if $(.config-frozen)
- {
- errors.user-error "Cannot change doxygen after it has been used." ;
- }
-}
-
-rule check-doxygen ( )
-{
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using doxygen ":" $(.doxygen) ;
- }
- local extra-paths ;
- if [ os.name ] = NT
- {
- local ProgramFiles = [ modules.peek : ProgramFiles ] ;
- if $(ProgramFiles)
- {
- extra-paths = "$(ProgramFiles:J= )" ;
- }
- else
- {
- extra-paths = "C:\\Program Files" ;
- }
- }
- .doxygen = [ common.get-invocation-command doxygen :
- doxygen : $(.doxygen) : $(extra-paths) ] ;
-}
-
-rule name ( )
-{
- freeze-config ;
- return $(.doxygen) ;
-}
-
-rule .is-cygwin ( )
-{
- if [ os.on-windows ]
- {
- local file = [ path.make [ modules.binding $(__name__) ] ] ;
- local dir = [ path.native
- [ path.join [ path.parent $(file) ] doxygen ] ] ;
- local command =
- "cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ;
- result = [ SHELL $(command) ] ;
- if [ MATCH "(Parsing file /)" : $(result) ]
- {
- return true ;
- }
- }
-}
-
-# Runs Doxygen on the given Doxygen configuration file (the source) to generate
-# the Doxygen files. The output is dumped according to the settings in the
-# Doxygen configuration file, not according to the target! Because of this, we
-# essentially "touch" the target file, in effect making it look like we have
-# really written something useful to it. Anyone that uses this action must deal
-# with this behavior.
-#
-actions doxygen-action
-{
- $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
-}
-
-
-# Runs the Python doxproc XML processor.
-#
-actions doxproc
-{
- python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
-}
-
-
-rule translate-path ( path )
-{
- freeze-config ;
- if [ os.on-windows ]
- {
- if [ os.name ] = CYGWIN
- {
- if $(.is-cygwin)
- {
- return $(path) ;
- }
- else
- {
- return $(path:W) ;
- }
- }
- else
- {
- if $(.is-cygwin)
- {
- match = [ MATCH ^(.):(.*) : $(path) ] ;
- if $(match)
- {
- return /cygdrive/$(match[1])$(match[2]:T) ;
- }
- else
- {
- return $(path:T) ;
- }
- }
- else
- {
- return $(path) ;
- }
- }
- }
- else
- {
- return $(path) ;
- }
-}
-
-
-# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
-# and a property list that may contain <doxygen:param> features.
-#
-rule headers-to-doxyfile ( target : sources * : properties * )
-{
- local text "# Generated by Boost.Build version 2" ;
-
- local output-dir ;
-
- # Translate <doxygen:param> into command line flags.
- for local param in [ feature.get-values <doxygen:param> : $(properties) ]
- {
- local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ;
- if $(namevalue[1]) = OUTPUT_DIRECTORY
- {
- output-dir = [ translate-path
- [ utility.unquote $(namevalue[2]) ] ] ;
- text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
- }
- else
- {
- text += "$(namevalue[1]) = $(namevalue[2])" ;
- }
- }
-
- if ! $(output-dir)
- {
- output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
- text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
- }
-
- local headers = ;
- for local header in $(sources:G=)
- {
- header = [ translate-path $(header) ] ;
- headers += \"$(header)\" ;
- }
-
- # Doxygen generates LaTex by default. So disable it unconditionally, or at
- # least until someone needs, and hence writes support for, LaTex output.
- text += "GENERATE_LATEX = NO" ;
- text += "INPUT = $(headers:J= )" ;
- print.output $(target) plain ;
- print.text $(text) : true ;
-}
-
-
-# Run Doxygen. See doxygen-action for a description of the strange properties of
-# this rule.
-#
-rule run ( target : source : properties * )
-{
- freeze-config ;
- if <doxygen.rmdir>on in $(properties)
- {
- local output-dir =
- [ path.make
- [ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) :
- $(properties) ] ] ;
- local html-dir =
- [ path.make
- [ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
- $(properties) ] ] ;
- if $(output-dir) && $(html-dir) &&
- [ path.glob $(output-dir) : $(html-dir) ]
- {
- HTMLDIR on $(target) =
- [ path.native [ path.join $(output-dir) $(html-dir) ] ] ;
- rm-htmldir $(target) ;
- }
- }
- doxygen-action $(target) : $(source) ;
- NAME on $(target) = $(.doxygen) ;
- RM on $(target) = [ modules.peek common : RM ] ;
- *.XML on $(target) =
- [ path.native
- [ path.join
- [ path.make [ on $(target) return $(LOCATE) ] ]
- $(target:B:S=)
- *.xml ] ] ;
-}
-
-if [ os.name ] = NT
-{
- RMDIR = rmdir /s /q ;
-}
-else
-{
- RMDIR = rm -rf ;
-}
-
-actions quietly rm-htmldir
-{
- $(RMDIR) $(HTMLDIR)
-}
-
-# The rules below require Boost.Book stylesheets, so we need some code to check
-# that the boostbook module has actualy been initialized.
-#
-rule check-boostbook ( )
-{
- if ! [ modules.peek boostbook : .initialized ]
- {
- ECHO "error: the boostbook module is not initialized" ;
- ECHO "error: you've attempted to use the 'doxygen' toolset, " ;
- ECHO "error: which requires Boost.Book," ;
- ECHO "error: but never initialized Boost.Book." ;
- EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ;
- }
-}
-
-
-# Collect the set of Doxygen XML files into a single XML source file that can be
-# handled by an XSLT processor. The source is completely ignored (see
-# doxygen-action), because this action picks up the Doxygen XML index file
-# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL
-# program and take a "-o output.xml" argument (grrrr). The target of the
-# collection will be a single Doxygen XML file.
-#
-rule collect ( target : source : properties * )
-{
- check-boostbook ;
- local collect-xsl-dir
- = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
- local source-path
- = [ path.make [ on $(source) return $(LOCATE) ] ] ;
- local collect-path
- = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
- local native-path
- = [ path.native $(collect-path) ] ;
- local real-source
- = [ path.native [ path.join $(collect-path) index.xml ] ] ;
- xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
- : <xsl:param>doxygen.xml.path=$(native-path) ;
-}
-
-
-# Translate Doxygen XML into BoostBook.
-#
-rule xml-to-boostbook ( target : source : properties * )
-{
- check-boostbook ;
- local xsl-dir = [ boostbook.xsl-dir ] ;
- local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
- doxygen2boostbook.xsl ] ] ;
-
- local xslt-properties = $(properties) ;
- for local prefix in [ feature.get-values <prefix> : $(properties) ]
- {
- xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
- }
- for local title in [ feature.get-values <reftitle> : $(properties) ]
- {
- xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
- }
-
- xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
-}
-
-
-flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ;
-flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
-flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
-
-
-rule xml-dir-to-boostbook ( target : source : properties * )
-{
- DOXPROC on $(target) = $(.doxproc) ;
-
- LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
-
- doxygen.doxproc $(target) : $(source:S=) ;
-}
-
-
-# Generate the HTML redirect to HTML dir index.html file.
-#
-rule html-redirect ( target : source : properties * )
-{
- local uri = "$(target:B)/index.html" ;
- print.output $(target) plain ;
- print.text
-"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
- \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
-<html xmlns=\"http://www.w3.org/1999/xhtml\">
-<head>
- <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
-
- <title></title>
-</head>
-
-<body>
- Automatic redirection failed, please go to <a href=
- \"$(uri)\">$(uri)</a>.
-</body>
-</html>
-"
- : true ;
-}
-
-rule copy-latex-pngs ( target : source : requirements * )
-{
- local directory = [ path.native
- [ feature.get-values <doxygen:xml-imagedir> :
- $(requirements) ] ] ;
-
- local location = [ on $(target) return $(LOCATE) ] ;
-
- local pdf-location =
- [ path.native
- [ path.join
- [ path.make $(location) ]
- [ path.make $(directory) ] ] ] ;
- local html-location =
- [ path.native
- [ path.join
- .
- html
- [ path.make $(directory) ] ] ] ;
-
- common.MkDir $(pdf-location) ;
- common.MkDir $(html-location) ;
-
- DEPENDS $(target) : $(pdf-location) $(html-location) ;
-
- if [ os.name ] = NT
- {
- CP on $(target) = copy /y ;
- FROM on $(target) = \\*.png ;
- TOHTML on $(target) = .\\html\\$(directory) ;
- TOPDF on $(target) = \\$(directory) ;
- }
- else
- {
- CP on $(target) = cp ;
- FROM on $(target) = /*.png ;
- TOHTML on $(target) = ./html/$(directory) ;
- TOPDF on $(target) = $(target:D)/$(directory) ;
- }
-}
-
-actions copy-latex-pngs
-{
- $(CP) $(>:S=)$(FROM) $(TOHTML)
- $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
- echo "Stamped" > "$(<)"
-}
-
-# building latex images for doxygen XML depends
-# on latex, dvips, and ps being in your PATH.
-# This is true for most Unix installs, but
-# not on Win32, where you will need to install
-# MkTex and Ghostscript and add these tools
-# to your path.
-
-actions check-latex
-{
- latex -version >$(<)
-}
-
-actions check-dvips
-{
- dvips -version >$(<)
-}
-
-if [ os.name ] = "NT"
-{
- actions check-gs
- {
- gswin32c -version >$(<)
- }
-}
-else
-{
- actions check-gs
- {
- gs -version >$(<)
- }
-}
-
-rule check-tools ( )
-{
- if ! $(.check-tools-targets)
- {
- # Find the root project.
- local root-project = [ project.current ] ;
- root-project = [ $(root-project).project-module ] ;
- while
- [ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config
- {
- root-project =
- [ project.attribute $(root-project) parent-module ] ;
- }
-
- .latex.check = [ new file-target latex.check
- :
- : [ project.target $(root-project) ]
- : [ new action : doxygen.check-latex ]
- :
- ] ;
- .dvips.check = [ new file-target dvips.check
- :
- : [ project.target $(root-project) ]
- : [ new action : doxygen.check-dvips ]
- :
- ] ;
- .gs.check = [ new file-target gs.check
- :
- : [ project.target $(root-project) ]
- : [ new action : doxygen.check-gs ]
- :
- ] ;
- .check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ;
- }
- return $(.check-tools-targets) ;
-}
-
-project.initialize $(__name__) ;
-project doxygen ;
-
-class doxygen-check-tools-target-class : basic-target
-{
- import doxygen ;
- rule construct ( name : sources * : property-set )
- {
- return [ property-set.empty ] [ doxygen.check-tools ] ;
- }
-}
-
-local project = [ project.current ] ;
-
-targets.main-target-alternative
- [ new doxygen-check-tools-target-class check-tools : $(project)
- : [ targets.main-target-sources : check-tools : no-renaming ]
- : [ targets.main-target-requirements : $(project) ]
- : [ targets.main-target-default-build : $(project) ]
- : [ targets.main-target-usage-requirements : $(project) ]
- ] ;
-
-# User-level rule to generate BoostBook XML from a set of headers via Doxygen.
-#
-rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
-{
- freeze-config ;
- local project = [ project.current ] ;
-
- if $(target:S) = .html
- {
- # Build an HTML directory from the sources.
- local html-location = [ feature.get-values <location> : $(requirements) ] ;
- local output-dir ;
- if [ $(project).get build-dir ]
- {
- # Explicitly specified build dir. Add html at the end.
- output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ;
- }
- else
- {
- # Trim 'bin' from implicit build dir, for no other reason that backward
- # compatibility.
- output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
- $(html-location:E=html) ] ;
- }
- output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
- local output-dir-native = [ path.native $(output-dir) ] ;
- requirements = [ property.change $(requirements) : <location> ] ;
-
- ## The doxygen configuration file.
- targets.main-target-alternative
- [ new typed-target $(target:S=.tag) : $(project) : DOXYFILE
- : [ targets.main-target-sources $(sources) : $(target:S=.tag) ]
- : [ targets.main-target-requirements $(requirements)
- <doxygen:param>GENERATE_HTML=YES
- <doxygen:param>GENERATE_XML=NO
- <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
- <doxygen:param>HTML_OUTPUT=$(target:B)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target:S=.tag) ;
-
- ## The html directory to generate by running doxygen.
- targets.main-target-alternative
- [ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE
- : $(target:S=.tag)
- : [ targets.main-target-requirements $(requirements)
- <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
- <doxygen:param>HTML_OUTPUT=$(target:B)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target:S=.dir) ;
-
- ## The redirect html file into the generated html.
- targets.main-target-alternative
- [ new typed-target $(target) : $(project) : DOXYGEN_HTML
- : $(target:S=.dir)
- : [ targets.main-target-requirements $(requirements)
- <location>$(output-dir)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- }
- else
- {
- # Build a BoostBook XML file from the sources.
- local location-xml = [ feature.get-values <location> : $(requirements) ] ;
- requirements = [ property.change $(requirements) : <location> ] ;
- local target-xml = $(target:B=$(target:B)-xml) ;
-
- # Check whether we need to build images
- local images-location =
- [ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ;
- if $(images-location)
- {
- doxygen $(target).doxygen-xml-images.html : $(sources)
- : $(requirements)
- <doxygen.rmdir>on
- <doxygen:param>QUIET=YES
- <doxygen:param>WARNINGS=NO
- <doxygen:param>WARN_IF_UNDOCUMENTED=NO
- <dependency>/doxygen//check-tools ;
- $(project).mark-target-as-explicit
- $(target).doxygen-xml-images.html ;
-
- targets.main-target-alternative
- [ new typed-target $(target).doxygen-xml-images
- : $(project) : DOXYGEN_XML_IMAGES
- : $(target).doxygen-xml-images.html
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build)
- : $(project) ]
- ] ;
-
- $(project).mark-target-as-explicit
- $(target).doxygen-xml-images ;
-
- if ! [ regex.match "^(.*/)$" : $(images-location) ]
- {
- images-location = $(images-location)/ ;
- }
-
- requirements +=
- <dependency>$(target).doxygen-xml-images
- <xsl:param>boost.doxygen.formuladir=$(images-location) ;
- }
-
- ## The doxygen configuration file.
- targets.main-target-alternative
- [ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE
- : [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ]
- : [ targets.main-target-requirements $(requirements)
- <doxygen:param>GENERATE_HTML=NO
- <doxygen:param>GENERATE_XML=YES
- <doxygen:param>XML_OUTPUT=$(target-xml)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target-xml:S=.tag) ;
-
- ## The Doxygen XML directory of the processed source files.
- targets.main-target-alternative
- [ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE
- : $(target-xml:S=.tag)
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target-xml:S=.dir) ;
-
- ## The resulting BoostBook file is generated by the processor tool. The
- ## tool can be either the xsltproc plus accompanying XSL scripts. Or it
- ## can be the python doxproc.py script.
- targets.main-target-alternative
- [ new typed-target $(target-xml) : $(project) : BOOSTBOOK
- : $(target-xml:S=.dir)
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target-xml) ;
-
- targets.main-target-alternative
- [ new install-target-class $(target:S=.xml) : $(project)
- : $(target-xml)
- : [ targets.main-target-requirements $(requirements)
- <location>$(location-xml:E=.)
- <name>$(target:S=.xml)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target:S=.xml) ;
-
- targets.main-target-alternative
- [ new alias-target-class $(target) : $(project)
- :
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements)
- <dependency>$(target:S=.xml)
- : $(project) ]
- ] ;
- }
-}
diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile
deleted file mode 100644
index 9b969df9..00000000
--- a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile
+++ /dev/null
@@ -1,3 +0,0 @@
-INPUT = windows-paths-check.hpp
-GENERATE_HTML = NO
-GENERATE_LATEX = NO
diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp
deleted file mode 100644
index e69de29b..00000000
--- a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp
+++ /dev/null
diff --git a/jam-files/boost-build/tools/fop.jam b/jam-files/boost-build/tools/fop.jam
deleted file mode 100644
index c24b8725..00000000
--- a/jam-files/boost-build/tools/fop.jam
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed
-# under the Boost Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-#
-# This module defines rules to handle generation of PDF and
-# PostScript files from XSL Formatting Objects via Apache FOP
-
-import generators ;
-import common ;
-import boostbook ;
-
-generators.register-standard fop.render.pdf : FO : PDF ;
-generators.register-standard fop.render.ps : FO : PS ;
-
-# Initializes the fop toolset.
-#
-rule init ( fop-command ? : java-home ? : java ? )
-{
- local has-command = $(.has-command) ;
-
- if $(fop-command)
- {
- .has-command = true ;
- }
-
- if $(fop-command) || ! $(has-command)
- {
- fop-command = [ common.get-invocation-command fop : fop : $(fop-command)
- : [ modules.peek : FOP_DIR ] ] ;
- }
-
- if $(fop-command)
- {
- .FOP_COMMAND = $(fop-command) ;
- }
-
- if $(java-home) || $(java)
- {
- .FOP_SETUP = ;
-
-
- # JAVA_HOME is the location that java was installed to.
-
- if $(java-home)
- {
- .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
- }
-
- # JAVACMD is the location that of the java executable, useful for a
- # non-standard java installation, where the executable isn't at
- # $JAVA_HOME/bin/java.
-
- if $(java)
- {
- .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ;
- }
- }
-}
-
-actions render.pdf
-{
- $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<)
-}
-
-actions render.ps
-{
- $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<)
-}
diff --git a/jam-files/boost-build/tools/fortran.jam b/jam-files/boost-build/tools/fortran.jam
deleted file mode 100644
index 37665825..00000000
--- a/jam-files/boost-build/tools/fortran.jam
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright (C) 2004 Toon Knapen
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# This file contains common settings for all fortran tools
-#
-
-import "class" : new ;
-import feature : feature ;
-
-import type ;
-import generators ;
-import common ;
-
-type.register FORTRAN : f F for f77 ;
-type.register FORTRAN90 : f90 F90 ;
-
-feature fortran : : free ;
-feature fortran90 : : free ;
-
-class fortran-compiling-generator : generator
-{
- rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
- {
- generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
- }
-}
-
-rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
-{
- local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
- generators.register $(g) ;
-}
-
-class fortran90-compiling-generator : generator
-{
- rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
- {
- generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
- }
-}
-
-rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
-{
- local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
- generators.register $(g) ;
-}
-
-# FIXME: this is ugly, should find a better way (we'd want client code to
-# register all generators as "generator.some-rule", not with "some-module.some-rule".)
-IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ;
-IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ;
diff --git a/jam-files/boost-build/tools/gcc.jam b/jam-files/boost-build/tools/gcc.jam
deleted file mode 100644
index f7b0da54..00000000
--- a/jam-files/boost-build/tools/gcc.jam
+++ /dev/null
@@ -1,1185 +0,0 @@
-# Copyright 2001 David Abrahams.
-# Copyright 2002-2006 Rene Rivera.
-# Copyright 2002-2003 Vladimir Prus.
-# Copyright (c) 2005 Reece H. Dunn.
-# Copyright 2006 Ilya Sokolov.
-# Copyright 2007 Roland Schwarz
-# Copyright 2007 Boris Gubenko.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import os ;
-import pch ;
-import property ;
-import property-set ;
-import toolset ;
-import type ;
-import rc ;
-import regex ;
-import set ;
-import unix ;
-import fortran ;
-
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-
-feature.extend toolset : gcc ;
-# feature.subfeature toolset gcc : flavor : : optional ;
-
-toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
-toolset.inherit-flags gcc : unix ;
-toolset.inherit-rules gcc : unix ;
-
-generators.override gcc.prebuilt : builtin.prebuilt ;
-generators.override gcc.searched-lib-generator : searched-lib-generator ;
-
-# Make gcc toolset object files use the "o" suffix on all platforms.
-type.set-generated-target-suffix OBJ : <toolset>gcc : o ;
-type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ;
-type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ;
-
-# Initializes the gcc toolset for the given version. If necessary, command may
-# be used to specify where the compiler is located. The parameter 'options' is a
-# space-delimited list of options, each one specified as
-# <option-name>option-value. Valid option names are: cxxflags, linkflags and
-# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or
-# sun and the default value will be selected based on the current OS.
-# Example:
-# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
-#
-# The compiler command to use is detected in a three step manner:
-# 1) If an explicit command is specified by the user, it will be used and must available.
-# 2) If only a certain version is specified, it is enforced:
-# - either a command 'g++-VERSION' must be available
-# - or the default command 'g++' must be available and match the exact version.
-# 3) Without user-provided restrictions use default 'g++'
-rule init ( version ? : command * : options * )
-{
- #1): use user-provided command
- local tool-command = ;
- if $(command)
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ;
- if ! $(tool-command)
- {
- errors.error "toolset gcc initialization:" :
- "provided command '$(command)' not found" :
- "initialized from" [ errors.nearest-user-location ] ;
- }
- }
- #2): enforce user-provided version
- else if $(version)
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ;
-
- #2.1) fallback: check whether "g++" reports the requested version
- if ! $(tool-command)
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
- if $(tool-command)
- {
- local tool-command-string = $(tool-command:J=" ") ;
- local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ;
- if $(tool-version) != $(version)
- {
- # Permit a match betwen two-digit version specified by the user
- # (e.g. 4.4) and 3-digit version reported by gcc.
- # Since only two digits are present in binary name anyway,
- # insisting that user specify 3-digit version when
- # configuring Boost.Build while it's not required on
- # command like would be strange.
- local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ;
- if $(stripped) != $(version)
- {
- errors.error "toolset gcc initialization:" :
- "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" :
- "initialized from" [ errors.nearest-user-location ] ;
- tool-command = ;
- }
- # Use full 3-digit version to be compatible with the 'using gcc ;' case
- version = $(tool-version) ;
- }
- }
- else
- {
- errors.error "toolset gcc initialization:" :
- "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" :
- "initialized from" [ errors.nearest-user-location ] ;
- }
- }
- }
- #3) default: no command and no version specified, try using default command "g++"
- else
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
- if ! $(tool-command)
- {
- errors.error "toolset gcc initialization:" :
- "no command provided, default command 'g++' not found" :
- "initialized from" [ errors.nearest-user-location ] ;
- }
- }
-
-
- # Information about the gcc command...
- # The command.
- local command = $(tool-command) ;
- # The root directory of the tool install.
- local root = [ feature.get-values <root> : $(options) ] ;
- # The bin directory where to find the command to execute.
- local bin ;
- # The flavor of compiler.
- local flavor = [ feature.get-values <flavor> : $(options) ] ;
- # Autodetect the root and bin dir if not given.
- if $(command)
- {
- bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
- root ?= $(bin:D) ;
- }
- # The 'command' variable can have multiple elements. When calling
- # the SHELL builtin we need a single string.
- local command-string = $(command:J=" ") ;
- # Autodetect the version and flavor if not given.
- if $(command)
- {
- local machine = [ MATCH "^([^ ]+)"
- : [ SHELL "$(command-string) -dumpmachine" ] ] ;
- version ?= [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- switch $(machine:L)
- {
- case *mingw* : flavor ?= mingw ;
- }
- }
-
- local condition ;
- if $(flavor)
- {
- condition = [ common.check-init-parameters gcc
- : version $(version)
- : flavor $(flavor)
- ] ;
- }
- else
- {
- condition = [ common.check-init-parameters gcc
- : version $(version)
- ] ;
- condition = $(condition) ; #/<toolset-gcc:flavor> ;
- }
-
- common.handle-options gcc : $(condition) : $(command) : $(options) ;
-
- local linker = [ feature.get-values <linker-type> : $(options) ] ;
- # The logic below should actually be keyed on <target-os>
- if ! $(linker)
- {
- if [ os.name ] = OSF
- {
- linker = osf ;
- }
- else if [ os.name ] = HPUX
- {
- linker = hpux ;
- }
- else if [ os.name ] = AIX
- {
- linker = aix ;
- }
- else if [ os.name ] = SOLARIS
- {
- linker = sun ;
- }
- else
- {
- linker = gnu ;
- }
- }
- init-link-flags gcc $(linker) $(condition) ;
-
-
- # If gcc is installed in non-standard location, we'd need to add
- # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
- # rules).
- if $(command)
- {
- # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
- # and all must be added to LD_LIBRARY_PATH. The linker will pick the
- # right onces. Note that we don't provide a clean way to build 32-bit
- # binary with 64-bit compiler, but user can always pass -m32 manually.
- local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
- if $(.debug-configuration)
- {
- ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ;
- }
- toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
- }
-
- # If it's not a system gcc install we should adjust the various programs as
- # needed to prefer using the install specific versions. This is essential
- # for correct use of MinGW and for cross-compiling.
-
- local nl = "
-" ;
-
- # - The archive builder.
- local archiver = [ common.get-invocation-command gcc
- : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ar" ] ] ]
- : [ feature.get-values <archiver> : $(options) ]
- : $(bin)
- : search-path ] ;
- toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ;
- }
-
- # - Ranlib
- local ranlib = [ common.get-invocation-command gcc
- : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ]
- : [ feature.get-values <ranlib> : $(options) ]
- : $(bin)
- : search-path ] ;
- toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ;
- }
-
-
- # - The resource compiler.
- local rc =
- [ common.get-invocation-command-nodefault gcc
- : windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
- local rc-type =
- [ feature.get-values <rc-type> : $(options) ] ;
- rc-type ?= windres ;
- if ! $(rc)
- {
- # If we can't find an RC compiler we fallback to a null RC compiler that
- # creates empty object files. This allows the same Jamfiles to work
- # across the board. The null RC uses the assembler to create the empty
- # objects, so configure that.
- rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ;
- rc-type = null ;
- }
- rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
-}
-
-if [ os.name ] = NT
-{
- # This causes single-line command invocation to not go through .bat files,
- # thus avoiding command-line length limitations.
- JAMSHELL = % ;
-}
-
-generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
-generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ;
-
-# pch support
-
-# The compiler looks for a precompiled header in each directory just before it
-# looks for the include file in that directory. The name searched for is the
-# name specified in the #include directive with ".gch" suffix appended. The
-# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
-# full name of the header.
-
-type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
-
-# GCC-specific pch generator.
-class gcc-pch-generator : pch-generator
-{
- import project ;
- import property-set ;
- import type ;
-
- rule run-pch ( project name ? : property-set : sources + )
- {
- # Find the header in sources. Ignore any CPP sources.
- local header ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] H ]
- {
- header = $(s) ;
- }
- }
-
- # Error handling: Base header file name should be the same as the base
- # precompiled header name.
- local header-name = [ $(header).name ] ;
- local header-basename = $(header-name:B) ;
- if $(header-basename) != $(name)
- {
- local location = [ $(project).project-module ] ;
- errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
- }
-
- local pch-file = [ generator.run $(project) $(name) : $(property-set)
- : $(header) ] ;
-
- # return result of base class and pch-file property as usage-requirements
- return
- [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ]
- $(pch-file)
- ;
- }
-
- # Calls the base version specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- rule generated-targets ( sources + : property-set : project name ? )
- {
- name = [ $(sources[1]).name ] ;
- return [ generator.generated-targets $(sources)
- : $(property-set) : $(project) $(name) ] ;
- }
-}
-
-# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
-# latter have HPP type, but HPP type is derived from H. The type of compilation
-# is determined entirely by the destination type.
-generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
-generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
-
-# Override default do-nothing generators.
-generators.override gcc.compile.c.pch : pch.default-c-pch-generator ;
-generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
-
-# Declare flags and action for compilation.
-toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ;
-toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags gcc.compile OPTIONS <optimization>space : -Os ;
-
-toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
-toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
-toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
-
-toolset.flags gcc.compile OPTIONS <warnings>off : -w ;
-toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ;
-toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ;
-toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags gcc.compile OPTIONS <profiling>on : -pg ;
-toolset.flags gcc.compile OPTIONS <rtti>off : -fno-rtti ;
-
-rule setup-fpic ( targets * : sources * : properties * )
-{
- local link = [ feature.get-values link : $(properties) ] ;
- if $(link) = shared
- {
- local target = [ feature.get-values target-os : $(properties) ] ;
-
- # This logic will add -fPIC for all compilations:
- #
- # lib a : a.cpp b ;
- # obj b : b.cpp ;
- # exe c : c.cpp a d ;
- # obj d : d.cpp ;
- #
- # This all is fine, except that 'd' will be compiled with -fPIC even though
- # it is not needed, as 'd' is used only in exe. However, it is hard to
- # detect where a target is going to be used. Alternatively, we can set -fPIC
- # only when main target type is LIB but than 'b' would be compiled without
- # -fPIC which would lead to link errors on x86-64. So, compile everything
- # with -fPIC.
- #
- # Yet another alternative would be to create a propagated <sharedable>
- # feature and set it when building shared libraries, but that would be hard
- # to implement and would increase the target path length even more.
-
- # On Windows, fPIC is default, specifying -fPIC explicitly leads to
- # a warning.
- if $(target) != cygwin && $(target) != windows
- {
- OPTIONS on $(targets) += -fPIC ;
- }
- }
-}
-
-rule setup-address-model ( targets * : sources * : properties * )
-{
- local model = [ feature.get-values address-model : $(properties) ] ;
- if $(model)
- {
- local option ;
- local os = [ feature.get-values target-os : $(properties) ] ;
- if $(os) = aix
- {
- if $(model) = 32
- {
- option = -maix32 ;
- }
- else
- {
- option = -maix64 ;
- }
- }
- else if $(os) = hpux
- {
- if $(model) = 32
- {
- option = -milp32 ;
- }
- else
- {
- option = -mlp64 ;
- }
- }
- else
- {
- if $(model) = 32
- {
- option = -m32 ;
- }
- else if $(model) = 64
- {
- option = -m64 ;
- }
- # For darwin, the model can be 32_64. darwin.jam will handle that
- # on its own.
- }
- OPTIONS on $(targets) += $(option) ;
- }
-}
-
-
-# FIXME: this should not use os.name.
-if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX && [ os.name ] != AIX
-{
- # OSF does have an option called -soname but it does not seem to work as
- # expected, therefore it has been disabled.
- HAVE_SONAME = "" ;
- SONAME_OPTION = -h ;
-}
-
-# HPUX, for some reason, seem to use '+h', not '-h'.
-if [ os.name ] = HPUX
-{
- HAVE_SONAME = "" ;
- SONAME_OPTION = +h ;
-}
-
-toolset.flags gcc.compile USER_OPTIONS <cflags> ;
-toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
-toolset.flags gcc.compile DEFINES <define> ;
-toolset.flags gcc.compile INCLUDES <include> ;
-toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
-toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ;
-
-rule compile.c++.pch ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c++.pch
-{
- "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c.pch ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c.pch
-{
- "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c++.preprocess ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # Some extensions are compiled as C++ by default. For others, we need to
- # pass -x c++. We could always pass -x c++ but distcc does not work with it.
- if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
- {
- LANG on $(<) = "-x c++" ;
- }
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-rule compile.c.preprocess ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # If we use the name g++ then default file suffix -> language mapping does
- # not work. So have to pass -x option. Maybe, we can work around this by
- # allowing the user to specify both C and C++ compiler names.
- #if $(>:S) != .c
- #{
- LANG on $(<) = "-x c" ;
- #}
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-rule compile.c++ ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # Some extensions are compiled as C++ by default. For others, we need to
- # pass -x c++. We could always pass -x c++ but distcc does not work with it.
- if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
- {
- LANG on $(<) = "-x c++" ;
- }
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-
- # Here we want to raise the template-depth parameter value to something
- # higher than the default value of 17. Note that we could do this using the
- # feature.set-default rule but we do not want to set the default value for
- # all toolsets as well.
- #
- # TODO: This 'modified default' has been inherited from some 'older Boost
- # Build implementation' and has most likely been added to make some Boost
- # library parts compile correctly. We should see what exactly prompted this
- # and whether we can get around the problem more locally.
- local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ;
- if ! $(template-depth)
- {
- TEMPLATE_DEPTH on $(<) = 128 ;
- }
-}
-
-rule compile.c ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # If we use the name g++ then default file suffix -> language mapping does
- # not work. So have to pass -x option. Maybe, we can work around this by
- # allowing the user to specify both C and C++ compiler names.
- #if $(>:S) != .c
- #{
- LANG on $(<) = "-x c" ;
- #}
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-rule compile.fortran ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c++ bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)"
-}
-
-actions compile.c bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++.preprocess bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
-}
-
-actions compile.c.preprocess bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
-}
-
-actions compile.fortran
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.asm ( targets * : sources * : properties * )
-{
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- LANG on $(<) = "-x assembler-with-cpp" ;
-}
-
-actions compile.asm
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# The class which check that we don't try to use the <runtime-link>static
-# property while creating or using shared library, since it's not supported by
-# gcc/libc.
-class gcc-linking-generator : unix-linking-generator
-{
- rule run ( project name ? : property-set : sources + )
- {
- # TODO: Replace this with the use of a target-os property.
- local no-static-link = ;
- if [ modules.peek : UNIX ]
- {
- switch [ modules.peek : JAMUNAME ]
- {
- case * : no-static-link = true ;
- }
- }
-
- local properties = [ $(property-set).raw ] ;
- local reason ;
- if $(no-static-link) && <runtime-link>static in $(properties)
- {
- if <link>shared in $(properties)
- {
- reason =
- "On gcc, DLL can't be build with '<runtime-link>static'." ;
- }
- else if [ type.is-derived $(self.target-types[1]) EXE ]
- {
- for local s in $(sources)
- {
- local type = [ $(s).type ] ;
- if $(type) && [ type.is-derived $(type) SHARED_LIB ]
- {
- reason =
- "On gcc, using DLLS together with the"
- "<runtime-link>static options is not possible " ;
- }
- }
- }
- }
- if $(reason)
- {
- ECHO warning:
- $(reason) ;
- ECHO warning:
- "It is suggested to use '<runtime-link>static' together"
- "with '<link>static'." ;
- return ;
- }
- else
- {
- local generated-targets = [ unix-linking-generator.run $(project)
- $(name) : $(property-set) : $(sources) ] ;
- return $(generated-targets) ;
- }
- }
-}
-
-# The set of permissible input types is different on mingw.
-# So, define two sets of generators, with mingw generators
-# selected when target-os=windows.
-
-local g ;
-g = [ new gcc-linking-generator gcc.mingw.link
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : EXE
- : <toolset>gcc <target-os>windows ] ;
-$(g).set-rule-name gcc.link ;
-generators.register $(g) ;
-
-g = [ new gcc-linking-generator gcc.mingw.link.dll
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : IMPORT_LIB SHARED_LIB
- : <toolset>gcc <target-os>windows ] ;
-$(g).set-rule-name gcc.link.dll ;
-generators.register $(g) ;
-
-generators.register
- [ new gcc-linking-generator gcc.link
- : LIB OBJ
- : EXE
- : <toolset>gcc ] ;
-generators.register
- [ new gcc-linking-generator gcc.link.dll
- : LIB OBJ
- : SHARED_LIB
- : <toolset>gcc ] ;
-
-generators.override gcc.mingw.link : gcc.link ;
-generators.override gcc.mingw.link.dll : gcc.link.dll ;
-
-# Cygwin is similar to msvc and mingw in that it uses import libraries.
-# While in simple cases, it can directly link to a shared library,
-# it is believed to be slower, and not always possible. Define cygwin-specific
-# generators here.
-
-g = [ new gcc-linking-generator gcc.cygwin.link
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : EXE
- : <toolset>gcc <target-os>cygwin ] ;
-$(g).set-rule-name gcc.link ;
-generators.register $(g) ;
-
-g = [ new gcc-linking-generator gcc.cygwin.link.dll
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : IMPORT_LIB SHARED_LIB
- : <toolset>gcc <target-os>cygwin ] ;
-$(g).set-rule-name gcc.link.dll ;
-generators.register $(g) ;
-
-generators.override gcc.cygwin.link : gcc.link ;
-generators.override gcc.cygwin.link.dll : gcc.link.dll ;
-
-# Declare flags for linking.
-# First, the common flags.
-toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ;
-toolset.flags gcc.link OPTIONS <profiling>on : -pg ;
-toolset.flags gcc.link USER_OPTIONS <linkflags> ;
-toolset.flags gcc.link LINKPATH <library-path> ;
-toolset.flags gcc.link FINDLIBS-ST <find-static-library> ;
-toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ;
-toolset.flags gcc.link LIBRARIES <library-file> ;
-
-toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
-toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
-
-# For <runtime-link>static we made sure there are no dynamic libraries in the
-# link. On HP-UX not all system libraries exist as archived libraries (for
-# example, there is no libunwind.a), so, on this platform, the -static option
-# cannot be specified.
-if [ os.name ] != HPUX
-{
- toolset.flags gcc.link OPTIONS <runtime-link>static : -static ;
-}
-
-# Now, the vendor specific flags.
-# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun.
-rule init-link-flags ( toolset linker condition )
-{
- switch $(linker)
- {
- case aix :
- {
- #
- # On AIX we *have* to use the native linker.
- #
- # Using -brtl, the AIX linker will look for libraries with both the .a
- # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
- # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
- # file that may contain shared objects and is different from static libs
- # as on Linux.
- #
- # The -bnoipath strips the prepending (relative) path of libraries from
- # the loader section in the target library or executable. Hence, during
- # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
- # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
- # this option, the prepending (relative) path + library name is
- # hard-coded in the loader section, causing *only* this path to be
- # searched during load-time. Note that the AIX linker does not have an
- # -soname equivalent, this is as close as it gets.
- #
- # The above options are definately for AIX 5.x, and most likely also for
- # AIX 4.x and AIX 6.x. For details about the AIX linker see:
- # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
- #
-
- toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath
- : unchecked ;
- }
-
- case darwin :
- {
- # On Darwin, the -s option to ld does not work unless we pass -static,
- # and passing -static unconditionally is a bad idea. So, don't pass -s.
- # at all, darwin.jam will use separate 'strip' invocation.
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
- }
-
- case gnu :
- {
- # Strip the binary when no debugging is needed. We use --strip-all flag
- # as opposed to -s since icc (intel's compiler) is generally
- # option-compatible with and inherits from the gcc toolset, but does not
- # support -s.
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ;
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
- toolset.flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
- toolset.flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
-
- # gnu ld has the ability to change the search behaviour for libraries
- # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
- # and change search for -l switches that follow them. The following list
- # shows the tried variants.
- # The search stops at the first variant that has a match.
- # *nix: -Bstatic -lxxx
- # libxxx.a
- #
- # *nix: -Bdynamic -lxxx
- # libxxx.so
- # libxxx.a
- #
- # windows (mingw,cygwin) -Bstatic -lxxx
- # libxxx.a
- # xxx.lib
- #
- # windows (mingw,cygwin) -Bdynamic -lxxx
- # libxxx.dll.a
- # xxx.dll.a
- # libxxx.a
- # xxx.lib
- # cygxxx.dll (*)
- # libxxx.dll
- # xxx.dll
- # libxxx.a
- #
- # (*) This is for cygwin
- # Please note that -Bstatic and -Bdynamic are not a guarantee that a
- # static or dynamic lib indeed gets linked in. The switches only change
- # search patterns!
-
- # On *nix mixing shared libs with static runtime is not a good idea.
- toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared
- : -Wl,-Bstatic : unchecked ;
- toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared
- : -Wl,-Bdynamic : unchecked ;
-
- # On windows allow mixing of static and dynamic libs with static
- # runtime.
- toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows
- : -Wl,-Bstatic : unchecked ;
- toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows
- : -Wl,-Bdynamic : unchecked ;
- toolset.flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows
- : -Wl,-Bstatic : unchecked ;
- }
-
- case hpux :
- {
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
- : -Wl,-s : unchecked ;
- toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
- : -fPIC : unchecked ;
- }
-
- case osf :
- {
- # No --strip-all, just -s.
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
- : -Wl,-s : unchecked ;
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
- : unchecked ;
- # This does not supports -R.
- toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath
- : unchecked ;
- # -rpath-link is not supported at all.
- }
-
- case sun :
- {
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
- : -Wl,-s : unchecked ;
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
- : unchecked ;
- # Solaris linker does not have a separate -rpath-link, but allows to use
- # -L for the same purpose.
- toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path>
- : unchecked ;
-
- # This permits shared libraries with non-PIC code on Solaris.
- # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
- # following is not needed. Whether -fPIC should be hardcoded, is a
- # separate question.
- # AH, 2004/10/16: it is still necessary because some tests link against
- # static libraries that were compiled without PIC.
- toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
- : -mimpure-text : unchecked ;
- }
-
- case * :
- {
- errors.user-error
- "$(toolset) initialization: invalid linker '$(linker)'" :
- "The value '$(linker)' specified for <linker> is not recognized." :
- "Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'" ;
- }
- }
-}
-
-# Enclose the RPATH variable on 'targets' in (double) quotes,
-# unless it's already enclosed in single quotes.
-# This special casing is done because it's common to pass
-# '$ORIGIN' to linker -- and it has to have single quotes
-# to prevent expansion by shell -- and if we add double
-# quotes then preventing properties of single quotes disappear.
-rule quote-rpath ( targets * )
-{
- local r = [ on $(targets[1]) return $(RPATH) ] ;
- if ! [ MATCH "('.*')" : $(r) ]
- {
- r = "\"$(r)\"" ;
- }
- RPATH on $(targets) = $(r) ;
-}
-
-# Declare actions for linking.
-rule link ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since running N links in
- # parallel is just slower. For now, serialize only gcc links, it might be a
- # good idea to serialize all links.
- JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
- quote-rpath $(targets) ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
-
-}
-
-# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
-# does not have the same logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
-# always available.
-.AR = ar ;
-.RANLIB = ranlib ;
-
-toolset.flags gcc.archive AROPTIONS <archiveflags> ;
-
-rule archive ( targets * : sources * : properties * )
-{
- # Always remove archive and start again. Here is the rationale from
- #
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
- # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
- # errors. After some debugging I traced it back to the fact that a1.o was
- # *still* in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM onto the
- # archive action. That makes archives fail to build on NT when they have
- # many files because it will no longer execute the action directly and blow
- # the line length limit. Instead we remove the file in a different action,
- # just before building the archive.
- #
- local clean.a = $(targets[1])(clean) ;
- TEMPORARY $(clean.a) ;
- NOCARE $(clean.a) ;
- LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
- DEPENDS $(clean.a) : $(sources) ;
- DEPENDS $(targets) : $(clean.a) ;
- common.RmTemps $(clean.a) : $(targets) ;
-}
-
-# Declare action for creating static libraries.
-# The letter 'r' means to add files to the archive with replacement. Since we
-# remove archive, we don't care about replacement, but there's no option "add
-# without replacement".
-# The letter 'c' suppresses the warning in case the archive does not exists yet.
-# That warning is produced only on some platforms, for whatever reasons.
-actions piecemeal archive
-{
- "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
- "$(.RANLIB)" "$(<)"
-}
-
-rule link.dll ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
- quote-rpath $(targets) ;
-}
-
-# Differs from 'link' above only by -shared.
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
-}
-
-rule setup-threading ( targets * : sources * : properties * )
-{
- local threading = [ feature.get-values threading : $(properties) ] ;
- if $(threading) = multi
- {
- local target = [ feature.get-values target-os : $(properties) ] ;
- local option ;
- local libs ;
-
- switch $(target)
- {
- case windows :
- {
- option = -mthreads ;
- }
- case cygwin :
- {
- option = -mthreads ;
- }
- case solaris :
- {
- option = -pthreads ;
- libs = rt ;
- }
- case beos :
- {
- # BeOS has no threading options, so do not set anything here.
- }
- case *bsd :
- {
- option = -pthread ;
- # There is no -lrt on BSD.
- }
- case sgi :
- {
- # gcc on IRIX does not support multi-threading so do not set anything
- # here.
- }
- case darwin :
- {
- # Darwin has no threading options so do not set anything here.
- }
- case * :
- {
- option = -pthread ;
- libs = rt ;
- }
- }
-
- if $(option)
- {
- OPTIONS on $(targets) += $(option) ;
- }
- if $(libs)
- {
- FINDLIBS-SA on $(targets) += $(libs) ;
- }
- }
-}
-
-local rule cpu-flags ( toolset variable : architecture : instruction-set + : values + : default ? )
-{
- if $(default)
- {
- toolset.flags $(toolset) $(variable)
- <architecture>$(architecture)/<instruction-set>
- : $(values) ;
- }
- toolset.flags $(toolset) $(variable)
- <architecture>/<instruction-set>$(instruction-set)
- <architecture>$(architecture)/<instruction-set>$(instruction-set)
- : $(values) ;
-}
-
-# Set architecture/instruction-set options.
-#
-# x86 and compatible
-# The 'native' option appeared in gcc 4.2 so we cannot safely use it
-# as default. Use conservative i386 instead.
-cpu-flags gcc OPTIONS : x86 : native : -march=native ;
-cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 : default ;
-cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
-cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
-cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
-cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ;
-cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ;
-cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ;
-cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ;
-cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ;
-cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ;
-cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ;
-cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ;
-cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ;
-cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ;
-cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ;
-cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ;
-cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
-cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
-cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
-cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ;
-cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ;
-cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ;
-cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ;
-cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ;
-##
-cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ;
-cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ;
-cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ;
-cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ;
-cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
-cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
-cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
-cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ;
-# Sparc
-cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ;
-cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ;
-cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ;
-cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ;
-cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ;
-cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ;
-cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ;
-cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ;
-cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ;
-cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ;
-cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ;
-cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ;
-cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ;
-cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ;
-cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ;
-# RS/6000 & PowerPC
-cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ;
-cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ;
-cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ;
-cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ;
-cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ;
-cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ;
-cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ;
-cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ;
-cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ;
-cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ;
-cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ;
-cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ;
-cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ;
-cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ;
-cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ;
-cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ;
-cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ;
-cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ;
-cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ;
-cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ;
-cpu-flags gcc OPTIONS : power : power : -mcpu=power ;
-cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ;
-cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ;
-cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ;
-cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ;
-cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ;
-cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ;
-cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ;
-cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ;
-cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ;
-cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ;
-cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ;
-# AIX variant of RS/6000 & PowerPC
-toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X 64" ;
diff --git a/jam-files/boost-build/tools/gcc.py b/jam-files/boost-build/tools/gcc.py
deleted file mode 100644
index 2a3e675e..00000000
--- a/jam-files/boost-build/tools/gcc.py
+++ /dev/null
@@ -1,796 +0,0 @@
-# Status: being ported by Steven Watanabe
-# Base revision: 47077
-# TODO: common.jam needs to be ported
-# TODO: generators.jam needs to have register_c_compiler.
-#
-# Copyright 2001 David Abrahams.
-# Copyright 2002-2006 Rene Rivera.
-# Copyright 2002-2003 Vladimir Prus.
-# Copyright (c) 2005 Reece H. Dunn.
-# Copyright 2006 Ilya Sokolov.
-# Copyright 2007 Roland Schwarz
-# Copyright 2007 Boris Gubenko.
-# Copyright 2008 Steven Watanabe
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import os
-import subprocess
-import re
-
-import bjam
-
-from b2.tools import unix, common, rc, pch, builtin
-from b2.build import feature, type, toolset, generators
-from b2.util.utility import os_name, on_windows
-from b2.manager import get_manager
-from b2.build.generators import Generator
-from b2.build.toolset import flags
-from b2.util.utility import to_seq
-
-__debug = None
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
- return __debug
-
-feature.extend('toolset', ['gcc'])
-
-
-toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll'])
-toolset.inherit_flags('gcc', 'unix')
-toolset.inherit_rules('gcc', 'unix')
-
-generators.override('gcc.prebuilt', 'builtin.prebuilt')
-generators.override('gcc.searched-lib-generator', 'searched-lib-generator')
-
-# Target naming is determined by types/lib.jam and the settings below this
-# comment.
-#
-# On *nix:
-# libxxx.a static library
-# libxxx.so shared library
-#
-# On windows (mingw):
-# libxxx.lib static library
-# xxx.dll DLL
-# xxx.lib import library
-#
-# On windows (cygwin) i.e. <target-os>cygwin
-# libxxx.a static library
-# xxx.dll DLL
-# libxxx.dll.a import library
-#
-# Note: user can always override by using the <tag>@rule
-# This settings have been choosen, so that mingw
-# is in line with msvc naming conventions. For
-# cygwin the cygwin naming convention has been choosen.
-
-# Make the "o" suffix used for gcc toolset on all
-# platforms
-type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o')
-type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a')
-
-type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a')
-type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib')
-
-__machine_match = re.compile('^([^ ]+)')
-__version_match = re.compile('^([0-9.]+)')
-
-def init(version = None, command = None, options = None):
- """
- Initializes the gcc toolset for the given version. If necessary, command may
- be used to specify where the compiler is located. The parameter 'options' is a
- space-delimited list of options, each one specified as
- <option-name>option-value. Valid option names are: cxxflags, linkflags and
- linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
- and the default value will be selected based on the current OS.
- Example:
- using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
- """
-
- options = to_seq(options)
- command = to_seq(command)
-
- # Information about the gcc command...
- # The command.
- command = to_seq(common.get_invocation_command('gcc', 'g++', command))
- # The root directory of the tool install.
- root = feature.get_values('<root>', options) ;
- # The bin directory where to find the command to execute.
- bin = None
- # The flavor of compiler.
- flavor = feature.get_values('<flavor>', options)
- # Autodetect the root and bin dir if not given.
- if command:
- if not bin:
- bin = common.get_absolute_tool_path(command[-1])
- if not root:
- root = os.path.dirname(bin)
- # Autodetect the version and flavor if not given.
- if command:
- machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0]
- machine = __machine_match.search(machine_info).group(1)
-
- version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0]
- version = __version_match.search(version_info).group(1)
- if not flavor and machine.find('mingw') != -1:
- flavor = 'mingw'
-
- condition = None
- if flavor:
- condition = common.check_init_parameters('gcc', None,
- ('version', version),
- ('flavor', flavor))
- else:
- condition = common.check_init_parameters('gcc', None,
- ('version', version))
-
- if command:
- command = command[0]
-
- common.handle_options('gcc', condition, command, options)
-
- linker = feature.get_values('<linker-type>', options)
- if not linker:
- if os_name() == 'OSF':
- linker = 'osf'
- elif os_name() == 'HPUX':
- linker = 'hpux' ;
- else:
- linker = 'gnu'
-
- init_link_flags('gcc', linker, condition)
-
- # If gcc is installed in non-standard location, we'd need to add
- # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
- # rules).
- if command:
- # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
- # and all must be added to LD_LIBRARY_PATH. The linker will pick the
- # right onces. Note that we don't provide a clean way to build 32-bit
- # binary with 64-bit compiler, but user can always pass -m32 manually.
- lib_path = [os.path.join(root, 'bin'),
- os.path.join(root, 'lib'),
- os.path.join(root, 'lib32'),
- os.path.join(root, 'lib64')]
- if debug():
- print 'notice: using gcc libraries ::', condition, '::', lib_path
- toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path)
-
- # If it's not a system gcc install we should adjust the various programs as
- # needed to prefer using the install specific versions. This is essential
- # for correct use of MinGW and for cross-compiling.
-
- # - The archive builder.
- archiver = common.get_invocation_command('gcc',
- 'ar', feature.get_values('<archiver>', options), [bin], path_last=True)
- toolset.flags('gcc.archive', '.AR', condition, [archiver])
- if debug():
- print 'notice: using gcc archiver ::', condition, '::', archiver
-
- # - The resource compiler.
- rc_command = common.get_invocation_command_nodefault('gcc',
- 'windres', feature.get_values('<rc>', options), [bin], path_last=True)
- rc_type = feature.get_values('<rc-type>', options)
-
- if not rc_type:
- rc_type = 'windres'
-
- if not rc_command:
- # If we can't find an RC compiler we fallback to a null RC compiler that
- # creates empty object files. This allows the same Jamfiles to work
- # across the board. The null RC uses the assembler to create the empty
- # objects, so configure that.
- rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True)
- rc_type = 'null'
- rc.configure(rc_command, condition, '<rc-type>' + rc_type)
-
-###if [ os.name ] = NT
-###{
-### # This causes single-line command invocation to not go through .bat files,
-### # thus avoiding command-line length limitations.
-### JAMSHELL = % ;
-###}
-
-#FIXME: when register_c_compiler is moved to
-# generators, these should be updated
-builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
-builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
-builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc'])
-
-# pch support
-
-# The compiler looks for a precompiled header in each directory just before it
-# looks for the include file in that directory. The name searched for is the
-# name specified in the #include directive with ".gch" suffix appended. The
-# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
-# full name of the header.
-
-type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch')
-
-# GCC-specific pch generator.
-class GccPchGenerator(pch.PchGenerator):
-
- # Inherit the __init__ method
-
- def run_pch(self, project, name, prop_set, sources):
- # Find the header in sources. Ignore any CPP sources.
- header = None
- for s in sources:
- if type.is_derived(s.type, 'H'):
- header = s
-
- # Error handling: Base header file name should be the same as the base
- # precompiled header name.
- header_name = header.name
- header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
- if header_basename != name:
- location = project.project_module
- ###FIXME:
- raise Exception()
- ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
-
- pch_file = Generator.run(self, project, name, prop_set, [header])
-
- # return result of base class and pch-file property as usage-requirements
- # FIXME: what about multiple results from generator.run?
- return (property_set.create('<pch-file>' + pch_file[0], '<cflags>-Winvalid-pch'),
- pch_file)
-
- # Calls the base version specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- def generated_targets(self, sources, prop_set, project, name = None):
- name = sources[0].name
- return Generator.generated_targets(self, sources,
- prop_set, project, name)
-
-# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
-# latter have HPP type, but HPP type is derived from H. The type of compilation
-# is determined entirely by the destination type.
-generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ]))
-generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ]))
-
-# Override default do-nothing generators.
-generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator')
-generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator')
-
-flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>'])
-
-# Declare flags and action for compilation
-flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
-flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
-flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
-
-flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
-flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
-flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
-
-flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w'])
-flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall'])
-flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic'])
-flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror'])
-
-flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
-flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
-flags('gcc.compile', 'OPTIONS', ['<rtti>off'], ['-fno-rtti'])
-
-# On cygwin and mingw, gcc generates position independent code by default, and
-# warns if -fPIC is specified. This might not be the right way of checking if
-# we're using cygwin. For example, it's possible to run cygwin gcc from NT
-# shell, or using crosscompiling. But we'll solve that problem when it's time.
-# In that case we'll just add another parameter to 'init' and move this login
-# inside 'init'.
-if not os_name () in ['CYGWIN', 'NT']:
- # This logic will add -fPIC for all compilations:
- #
- # lib a : a.cpp b ;
- # obj b : b.cpp ;
- # exe c : c.cpp a d ;
- # obj d : d.cpp ;
- #
- # This all is fine, except that 'd' will be compiled with -fPIC even though
- # it's not needed, as 'd' is used only in exe. However, it's hard to detect
- # where a target is going to be used. Alternative, we can set -fPIC only
- # when main target type is LIB but than 'b' will be compiled without -fPIC.
- # In x86-64 that will lead to link errors. So, compile everything with
- # -fPIC.
- #
- # Yet another alternative would be to create propagated <sharedable>
- # feature, and set it when building shared libraries, but that's hard to
- # implement and will increase target path length even more.
- flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC'])
-
-if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX':
- # OSF does have an option called -soname but it doesn't seem to work as
- # expected, therefore it has been disabled.
- HAVE_SONAME = ''
- SONAME_OPTION = '-h'
-
-
-flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>'])
-flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>'])
-flags('gcc.compile', 'DEFINES', [], ['<define>'])
-flags('gcc.compile', 'INCLUDES', [], ['<include>'])
-
-engine = get_manager().engine()
-
-engine.register_action('gcc.compile.c++.pch',
- '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
-
-engine.register_action('gcc.compile.c.pch',
- '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
-
-
-def gcc_compile_cpp(targets, sources, properties):
- # Some extensions are compiled as C++ by default. For others, we need to
- # pass -x c++. We could always pass -x c++ but distcc does not work with it.
- extension = os.path.splitext (sources [0]) [1]
- lang = ''
- if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
- lang = '-x c++'
- get_manager().engine().set_target_variable (targets, 'LANG', lang)
- engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
-
-def gcc_compile_c(targets, sources, properties):
- engine = get_manager().engine()
- # If we use the name g++ then default file suffix -> language mapping does
- # not work. So have to pass -x option. Maybe, we can work around this by
- # allowing the user to specify both C and C++ compiler names.
- #if $(>:S) != .c
- #{
- engine.set_target_variable (targets, 'LANG', '-x c')
- #}
- engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
-
-engine.register_action(
- 'gcc.compile.c++',
- '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
- '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' +
- '-c -o "$(<:W)" "$(>:W)"',
- function=gcc_compile_cpp,
- bound_list=['PCH_FILE'])
-
-engine.register_action(
- 'gcc.compile.c',
- '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' +
- '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
- function=gcc_compile_c,
- bound_list=['PCH_FILE'])
-
-def gcc_compile_asm(targets, sources, properties):
- get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp')
-
-engine.register_action(
- 'gcc.compile.asm',
- '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
- function=gcc_compile_asm)
-
-
-class GccLinkingGenerator(unix.UnixLinkingGenerator):
- """
- The class which check that we don't try to use the <runtime-link>static
- property while creating or using shared library, since it's not supported by
- gcc/libc.
- """
- def run(self, project, name, ps, sources):
- # TODO: Replace this with the use of a target-os property.
-
- no_static_link = False
- if bjam.variable('UNIX'):
- no_static_link = True;
- ##FIXME: what does this mean?
-## {
-## switch [ modules.peek : JAMUNAME ]
-## {
-## case * : no-static-link = true ;
-## }
-## }
-
- reason = None
- if no_static_link and ps.get('runtime-link') == 'static':
- if ps.get('link') == 'shared':
- reason = "On gcc, DLL can't be build with '<runtime-link>static'."
- elif type.is_derived(self.target_types[0], 'EXE'):
- for s in sources:
- source_type = s.type()
- if source_type and type.is_derived(source_type, 'SHARED_LIB'):
- reason = "On gcc, using DLLS together with the " +\
- "<runtime-link>static options is not possible "
- if reason:
- print 'warning:', reason
- print 'warning:',\
- "It is suggested to use '<runtime-link>static' together",\
- "with '<link>static'." ;
- return
- else:
- generated_targets = unix.UnixLinkingGenerator.run(self, project,
- name, ps, sources)
- return generated_targets
-
-if on_windows():
- flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,'])
- generators.register(
- GccLinkingGenerator('gcc.link', True,
- ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
- [ 'EXE' ],
- [ '<toolset>gcc' ]))
- generators.register(
- GccLinkingGenerator('gcc.link.dll', True,
- ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
- ['IMPORT_LIB', 'SHARED_LIB'],
- ['<toolset>gcc']))
-else:
- generators.register(
- GccLinkingGenerator('gcc.link', True,
- ['LIB', 'OBJ'],
- ['EXE'],
- ['<toolset>gcc']))
- generators.register(
- GccLinkingGenerator('gcc.link.dll', True,
- ['LIB', 'OBJ'],
- ['SHARED_LIB'],
- ['<toolset>gcc']))
-
-# Declare flags for linking.
-# First, the common flags.
-flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
-flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
-flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>'])
-flags('gcc.link', 'LINKPATH', [], ['<library-path>'])
-flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
-flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
-flags('gcc.link', 'LIBRARIES', [], ['<library-file>'])
-
-# For <runtime-link>static we made sure there are no dynamic libraries in the
-# link. On HP-UX not all system libraries exist as archived libraries (for
-# example, there is no libunwind.a), so, on this platform, the -static option
-# cannot be specified.
-if os_name() != 'HPUX':
- flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static'])
-
-# Now, the vendor specific flags.
-# The parameter linker can be either gnu, darwin, osf, hpux or sun.
-def init_link_flags(toolset, linker, condition):
- """
- Now, the vendor specific flags.
- The parameter linker can be either gnu, darwin, osf, hpux or sun.
- """
- toolset_link = toolset + '.link'
- if linker == 'gnu':
- # Strip the binary when no debugging is needed. We use --strip-all flag
- # as opposed to -s since icc (intel's compiler) is generally
- # option-compatible with and inherits from the gcc toolset, but does not
- # support -s.
-
- # FIXME: what does unchecked translate to?
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all']) # : unchecked ;
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
- flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ;
- flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ;
-
- # gnu ld has the ability to change the search behaviour for libraries
- # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
- # and change search for -l switches that follow them. The following list
- # shows the tried variants.
- # The search stops at the first variant that has a match.
- # *nix: -Bstatic -lxxx
- # libxxx.a
- #
- # *nix: -Bdynamic -lxxx
- # libxxx.so
- # libxxx.a
- #
- # windows (mingw,cygwin) -Bstatic -lxxx
- # libxxx.a
- # xxx.lib
- #
- # windows (mingw,cygwin) -Bdynamic -lxxx
- # libxxx.dll.a
- # xxx.dll.a
- # libxxx.a
- # xxx.lib
- # cygxxx.dll (*)
- # libxxx.dll
- # xxx.dll
- # libxxx.a
- #
- # (*) This is for cygwin
- # Please note that -Bstatic and -Bdynamic are not a guarantee that a
- # static or dynamic lib indeed gets linked in. The switches only change
- # search patterns!
-
- # On *nix mixing shared libs with static runtime is not a good idea.
- flags(toolset_link, 'FINDLIBS-ST-PFX',
- map(lambda x: x + '/<runtime-link>shared', condition),
- ['-Wl,-Bstatic']) # : unchecked ;
- flags(toolset_link, 'FINDLIBS-SA-PFX',
- map(lambda x: x + '/<runtime-link>shared', condition),
- ['-Wl,-Bdynamic']) # : unchecked ;
-
- # On windows allow mixing of static and dynamic libs with static
- # runtime.
- flags(toolset_link, 'FINDLIBS-ST-PFX',
- map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
- ['-Wl,-Bstatic']) # : unchecked ;
- flags(toolset_link, 'FINDLIBS-SA-PFX',
- map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
- ['-Wl,-Bdynamic']) # : unchecked ;
- flags(toolset_link, 'OPTIONS',
- map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
- ['-Wl,-Bstatic']) # : unchecked ;
-
- elif linker == 'darwin':
- # On Darwin, the -s option to ld does not work unless we pass -static,
- # and passing -static unconditionally is a bad idea. So, don't pass -s.
- # at all, darwin.jam will use separate 'strip' invocation.
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
-
- elif linker == 'osf':
- # No --strip-all, just -s.
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
- # : unchecked ;
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- # This does not supports -R.
- flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ;
- # -rpath-link is not supported at all.
-
- elif linker == 'sun':
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
- # : unchecked ;
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- # Solaris linker does not have a separate -rpath-link, but allows to use
- # -L for the same purpose.
- flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ;
-
- # This permits shared libraries with non-PIC code on Solaris.
- # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
- # following is not needed. Whether -fPIC should be hardcoded, is a
- # separate question.
- # AH, 2004/10/16: it is still necessary because some tests link against
- # static libraries that were compiled without PIC.
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text'])
- # : unchecked ;
-
- elif linker == 'hpux':
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition),
- ['-Wl,-s']) # : unchecked ;
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition),
- ['-fPIC']) # : unchecked ;
-
- else:
- # FIXME:
- errors.user_error(
- "$(toolset) initialization: invalid linker '$(linker)' " +
- "The value '$(linker)' specified for <linker> is not recognized. " +
- "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'")
-
-# Declare actions for linking.
-def gcc_link(targets, sources, properties):
- engine = get_manager().engine()
- engine.set_target_variable(targets, 'SPACE', ' ')
- # Serialize execution of the 'link' action, since running N links in
- # parallel is just slower. For now, serialize only gcc links, it might be a
- # good idea to serialize all links.
- engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
-
-engine.register_action(
- 'gcc.link',
- '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
- '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
- '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' +
- '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
- '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
- '$(OPTIONS) $(USER_OPTIONS)',
- function=gcc_link,
- bound_list=['LIBRARIES'])
-
-# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
-# does not have the same logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
-# always available.
-__AR = 'ar'
-
-flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>'])
-
-def gcc_archive(targets, sources, properties):
- # Always remove archive and start again. Here's rationale from
- #
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
- # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
- # errors. After some debugging I traced it back to the fact that a1.o was
- # *still* in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM onto the
- # archive action. That makes archives fail to build on NT when they have
- # many files because it will no longer execute the action directly and blow
- # the line length limit. Instead we remove the file in a different action,
- # just before building the archive.
- clean = targets[0] + '(clean)'
- bjam.call('TEMPORARY', clean)
- bjam.call('NOCARE', clean)
- engine = get_manager().engine()
- engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE'))
- engine.add_dependency(clean, sources)
- engine.add_dependency(targets, clean)
- engine.set_update_action('common.RmTemps', clean, targets)
-
-# Declare action for creating static libraries.
-# The letter 'r' means to add files to the archive with replacement. Since we
-# remove archive, we don't care about replacement, but there's no option "add
-# without replacement".
-# The letter 'c' suppresses the warning in case the archive does not exists yet.
-# That warning is produced only on some platforms, for whatever reasons.
-engine.register_action('gcc.archive',
- '"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"',
- function=gcc_archive,
- flags=['piecemeal'])
-
-def gcc_link_dll(targets, sources, properties):
- engine = get_manager().engine()
- engine.set_target_variable(targets, 'SPACE', ' ')
- engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
- engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME)
- engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION)
-
-engine.register_action(
- 'gcc.link.dll',
- # Differ from 'link' above only by -shared.
- '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
- '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
- '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' +
- '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' +
- '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
- '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
- '$(OPTIONS) $(USER_OPTIONS)',
- function = gcc_link_dll,
- bound_list=['LIBRARIES'])
-
-# Set up threading support. It's somewhat contrived, so perform it at the end,
-# to avoid cluttering other code.
-
-if on_windows():
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads'])
-elif bjam.variable('UNIX'):
- jamuname = bjam.variable('JAMUNAME')
- host_os_name = jamuname[0]
- if host_os_name.startswith('SunOS'):
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads'])
- flags('gcc', 'FINDLIBS-SA', [], ['rt'])
- elif host_os_name == 'BeOS':
- # BeOS has no threading options, don't set anything here.
- pass
- elif host_os_name.endswith('BSD'):
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
- # there is no -lrt on BSD
- elif host_os_name == 'DragonFly':
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
- # there is no -lrt on BSD - DragonFly is a FreeBSD variant,
- # which anoyingly doesn't say it's a *BSD.
- elif host_os_name == 'IRIX':
- # gcc on IRIX does not support multi-threading, don't set anything here.
- pass
- elif host_os_name == 'Darwin':
- # Darwin has no threading options, don't set anything here.
- pass
- else:
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
- flags('gcc', 'FINDLIBS-SA', [], ['rt'])
-
-def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None):
- #FIXME: for some reason this fails. Probably out of date feature code
-## if default:
-## flags(toolset, variable,
-## ['<architecture>' + architecture + '/<instruction-set>'],
-## values)
- flags(toolset, variable,
- #FIXME: same as above
- [##'<architecture>/<instruction-set>' + instruction_set,
- '<architecture>' + architecture + '/<instruction-set>' + instruction_set],
- values)
-
-# Set architecture/instruction-set options.
-#
-# x86 and compatible
-flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32'])
-flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i386', ['-march=i386'], default=True)
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp'])
-##
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2'])
-# Sparc
-flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32'])
-flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True)
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3'])
-# RS/6000 & PowerPC
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32'])
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64'])
-cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403'])
-cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505'])
-cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601'])
-cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602'])
-cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603'])
-cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e'])
-cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604'])
-cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e'])
-cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620'])
-cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630'])
-cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740'])
-cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400'])
-cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450'])
-cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750'])
-cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801'])
-cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821'])
-cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823'])
-cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860'])
-cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970'])
-cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64'])
-# AIX variant of RS/6000 & PowerPC
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32'])
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64'])
-flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X 64'])
diff --git a/jam-files/boost-build/tools/generate.jam b/jam-files/boost-build/tools/generate.jam
deleted file mode 100644
index 6732fa35..00000000
--- a/jam-files/boost-build/tools/generate.jam
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Declares main target 'generate' used to produce targets by calling a
-# user-provided rule that takes and produces virtual targets.
-
-import "class" : new ;
-import errors ;
-import feature ;
-import project ;
-import property ;
-import property-set ;
-import targets ;
-import regex ;
-
-
-feature.feature generating-rule : : free ;
-
-
-class generated-target-class : basic-target
-{
- import errors ;
- import indirect ;
- import virtual-target ;
-
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources)
- : $(requirements) : $(default-build) : $(usage-requirements) ;
-
- if ! [ $(self.requirements).get <generating-rule> ]
- {
- errors.user-error "The generate rule requires the <generating-rule>"
- "property to be set" ;
- }
- }
-
- rule construct ( name : sources * : property-set )
- {
- local result ;
- local gr = [ $(property-set).get <generating-rule> ] ;
-
- # FIXME: this is a copy-paste from virtual-target.jam. We should add a
- # utility rule to call a rule like this.
- local rule-name = [ MATCH ^@(.*) : $(gr) ] ;
- if $(rule-name)
- {
- if $(gr[2])
- {
- local target-name = [ full-name ] ;
- errors.user-error "Multiple <generating-rule> properties"
- "encountered for target $(target-name)." ;
- }
-
- result = [ indirect.call $(rule-name) $(self.project) $(name)
- : $(property-set) : $(sources) ] ;
-
- if ! $(result)
- {
- ECHO "warning: Unable to construct" [ full-name ] ;
- }
- }
-
- local ur ;
- local targets ;
-
- if $(result)
- {
- if [ class.is-a $(result[1]) : property-set ]
- {
- ur = $(result[1]) ;
- targets = $(result[2-]) ;
- }
- else
- {
- ur = [ property-set.empty ] ;
- targets = $(result) ;
- }
- }
- # FIXME: the following loop should be doable using sequence.transform or
- # some similar utility rule.
- local rt ;
- for local t in $(targets)
- {
- rt += [ virtual-target.register $(t) ] ;
- }
- return $(ur) $(rt) ;
- }
-}
-
-
-rule generate ( name : sources * : requirements * : default-build *
- : usage-requirements * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new generated-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ;
-}
-
-IMPORT $(__name__) : generate : : generate ;
diff --git a/jam-files/boost-build/tools/gettext.jam b/jam-files/boost-build/tools/gettext.jam
deleted file mode 100644
index 99a43ffe..00000000
--- a/jam-files/boost-build/tools/gettext.jam
+++ /dev/null
@@ -1,230 +0,0 @@
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module support GNU gettext internationalization utilities.
-#
-# It provides two main target rules: 'gettext.catalog', used for
-# creating machine-readable catalogs from translations files, and
-# 'gettext.update', used for update translation files from modified
-# sources.
-#
-# To add i18n support to your application you should follow these
-# steps.
-#
-# - Decide on a file name which will contain translations and
-# what main target name will be used to update it. For example::
-#
-# gettext.update update-russian : russian.po a.cpp my_app ;
-#
-# - Create the initial translation file by running::
-#
-# bjam update-russian
-#
-# - Edit russian.po. For example, you might change fields like LastTranslator.
-#
-# - Create a main target for final message catalog::
-#
-# gettext.catalog russian : russian.po ;
-#
-# The machine-readable catalog will be updated whenever you update
-# "russian.po". The "russian.po" file will be updated only on explicit
-# request. When you're ready to update translations, you should
-#
-# - Run::
-#
-# bjam update-russian
-#
-# - Edit "russian.po" in appropriate editor.
-#
-# The next bjam run will convert "russian.po" into machine-readable form.
-#
-# By default, translations are marked by 'i18n' call. The 'gettext.keyword'
-# feature can be used to alter this.
-
-
-import targets ;
-import property-set ;
-import virtual-target ;
-import "class" : new ;
-import project ;
-import type ;
-import generators ;
-import errors ;
-import feature : feature ;
-import toolset : flags ;
-import regex ;
-
-.path = "" ;
-
-# Initializes the gettext module.
-rule init ( path ? # Path where all tools are located. If not specified,
- # they should be in PATH.
- )
-{
- if $(.initialized) && $(.path) != $(path)
- {
- errors.error "Attempt to reconfigure with different path" ;
- }
- .initialized = true ;
- if $(path)
- {
- .path = $(path)/ ;
- }
-}
-
-# Creates a main target 'name', which, when updated, will cause
-# file 'existing-translation' to be updated with translations
-# extracted from 'sources'. It's possible to specify main target
-# in sources --- it which case all target from dependency graph
-# of those main targets will be scanned, provided they are of
-# appropricate type. The 'gettext.types' feature can be used to
-# control the types.
-#
-# The target will be updated only if explicitly requested on the
-# command line.
-rule update ( name : existing-translation sources + : requirements * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new typed-target $(name) : $(project) : gettext.UPDATE :
- $(existing-translation) $(sources)
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(name) ;
-}
-
-
-# The human editable source, containing translation.
-type.register gettext.PO : po ;
-# The machine readable message catalog.
-type.register gettext.catalog : mo ;
-# Intermediate type produce by extracting translations from
-# sources.
-type.register gettext.POT : pot ;
-# Pseudo type used to invoke update-translations generator
-type.register gettext.UPDATE ;
-
-# Identifies the keyword that should be used when scanning sources.
-# Default: i18n
-feature gettext.keyword : : free ;
-# Contains space-separated list of sources types which should be scanned.
-# Default: "C CPP"
-feature gettext.types : : free ;
-
-generators.register-standard gettext.compile : gettext.PO : gettext.catalog ;
-
-class update-translations-generator : generator
-{
- import regex : split ;
- import property-set ;
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- # The rule should be called with at least two sources. The first source
- # is the translation (.po) file to update. The remaining sources are targets
- # which should be scanned for new messages. All sources files for those targets
- # will be found and passed to the 'xgettext' utility, which extracts the
- # messages for localization. Those messages will be merged to the .po file.
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- local types = [ $(property-set).get <gettext.types> ] ;
- types ?= "C CPP" ;
- types = [ regex.split $(types) " " ] ;
-
- local keywords = [ $(property-set).get <gettext.keyword> ] ;
- property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ;
-
- # First deterime the list of sources that must be scanned for
- # messages.
- local all-sources ;
- # CONSIDER: I'm not sure if the logic should be the same as for 'stage':
- # i.e. following dependency properties as well.
- for local s in $(sources[2-])
- {
- all-sources += [ virtual-target.traverse $(s) : : include-sources ] ;
- }
- local right-sources ;
- for local s in $(all-sources)
- {
- if [ $(s).type ] in $(types)
- {
- right-sources += $(s) ;
- }
- }
-
- local .constructed ;
- if $(right-sources)
- {
- # Create the POT file, which will contain list of messages extracted
- # from the sources.
- local extract =
- [ new action $(right-sources) : gettext.extract : $(property-set) ] ;
- local new-messages = [ new file-target $(name) : gettext.POT
- : $(project) : $(extract) ] ;
-
- # Create a notfile target which will update the existing translation file
- # with new messages.
- local a = [ new action $(sources[1]) $(new-messages)
- : gettext.update-po-dispatch ] ;
- local r = [ new notfile-target $(name) : $(project) : $(a) ] ;
- .constructed = [ virtual-target.register $(r) ] ;
- }
- else
- {
- errors.error "No source could be scanned by gettext tools" ;
- }
- return $(.constructed) ;
- }
-}
-generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ;
-
-flags gettext.extract KEYWORD <gettext.keyword> ;
-actions extract
-{
- $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>)
-}
-
-# Does realy updating of po file. The tricky part is that
-# we're actually updating one of the sources:
-# $(<) is the NOTFILE target we're updating
-# $(>[1]) is the PO file to be really updated.
-# $(>[2]) is the PO file created from sources.
-#
-# When file to be updated does not exist (during the
-# first run), we need to copy the file created from sources.
-# In all other cases, we need to update the file.
-rule update-po-dispatch
-{
- NOCARE $(>[1]) ;
- gettext.create-po $(<) : $(>) ;
- gettext.update-po $(<) : $(>) ;
- _ on $(<) = " " ;
- ok on $(<) = "" ;
- EXISTING_PO on $(<) = $(>[1]) ;
-}
-
-# Due to fancy interaction of existing and updated, this rule can be called with
-# one source, in which case we copy the lonely source into EXISTING_PO, or with
-# two sources, in which case the action body expands to nothing. I'd really like
-# to have "missing" action modifier.
-actions quietly existing updated create-po bind EXISTING_PO
-{
- cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
-}
-
-actions updated update-po bind EXISTING_PO
-{
- $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])"
-}
-
-actions gettext.compile
-{
- $(.path)msgfmt -o $(<) $(>)
-}
-
-IMPORT $(__name__) : update : : gettext.update ;
diff --git a/jam-files/boost-build/tools/gfortran.jam b/jam-files/boost-build/tools/gfortran.jam
deleted file mode 100644
index 0aa69b85..00000000
--- a/jam-files/boost-build/tools/gfortran.jam
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (C) 2004 Toon Knapen
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import toolset : flags ;
-import feature ;
-import fortran ;
-
-rule init ( version ? : command * : options * )
-{
-}
-
-# Declare flags and action for compilation
-flags gfortran OPTIONS <fflags> ;
-
-flags gfortran OPTIONS <optimization>off : -O0 ;
-flags gfortran OPTIONS <optimization>speed : -O3 ;
-flags gfortran OPTIONS <optimization>space : -Os ;
-
-flags gfortran OPTIONS <debug-symbols>on : -g ;
-flags gfortran OPTIONS <profiling>on : -pg ;
-
-flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ;
-
-flags gfortran DEFINES <define> ;
-flags gfortran INCLUDES <include> ;
-
-rule compile.fortran
-{
-}
-
-actions compile.fortran
-{
- gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
-}
-
-generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ;
diff --git a/jam-files/boost-build/tools/hp_cxx.jam b/jam-files/boost-build/tools/hp_cxx.jam
deleted file mode 100644
index 86cd783e..00000000
--- a/jam-files/boost-build/tools/hp_cxx.jam
+++ /dev/null
@@ -1,181 +0,0 @@
-# Copyright 2001 David Abrahams.
-# Copyright 2004, 2005 Markus Schoepflin.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# HP CXX compiler
-# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN
-#
-#
-# Notes on this toolset:
-#
-# - Because of very subtle issues with the default ansi mode, strict_ansi mode
-# is used for compilation. One example of things that don't work correctly in
-# the default ansi mode is overload resolution of function templates when
-# mixed with non-template functions.
-#
-# - For template instantiation "-timplicit_local" is used. Previously,
-# "-tlocal" has been tried to avoid the need for a template repository
-# but this doesn't work with manually instantiated templates. "-tweak"
-# has not been used to avoid the stream of warning messages issued by
-# ar or ld when creating a library or linking an application.
-#
-# - Debug symbols are generated with "-g3", as this works both in debug and
-# release mode. When compiling C++ code without optimization, we additionally
-# use "-gall", which generates full symbol table information for all classes,
-# structs, and unions. As this turns off optimization, it can't be used when
-# optimization is needed.
-#
-
-import feature generators common ;
-import toolset : flags ;
-
-feature.extend toolset : hp_cxx ;
-feature.extend c++abi : cxxarm ;
-
-# Inherit from Unix toolset to get library ordering magic.
-toolset.inherit hp_cxx : unix ;
-
-generators.override hp_cxx.prebuilt : builtin.lib-generator ;
-generators.override hp_cxx.prebuilt : builtin.prebuilt ;
-generators.override hp_cxx.searched-lib-generator : searched-lib-generator ;
-
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ;
-
- local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ;
-
- if $(command)
- {
- local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
-
- if $(root)
- {
- flags hp_cxx .root $(condition) : "\"$(root)\"/" ;
- }
- }
- # If we can't find 'cxx' anyway, at least show 'cxx' in the commands
- command ?= cxx ;
-
- common.handle-options hp_cxx : $(condition) : $(command) : $(options) ;
-}
-
-generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ;
-generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ;
-
-
-
-# No static linking as far as I can tell.
-# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
-flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ;
-flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ;
-flags hp_cxx.link OPTIONS <debug-symbols>on : -g ;
-flags hp_cxx.link OPTIONS <debug-symbols>off : -s ;
-
-flags hp_cxx.compile OPTIONS <optimization>off : -O0 ;
-flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ;
-flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ;
-
-# This (undocumented) macro needs to be defined to get all C function
-# overloads required by the C++ standard.
-flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ;
-
-# Added for threading support
-flags hp_cxx.compile OPTIONS <threading>multi : -pthread ;
-flags hp_cxx.link OPTIONS <threading>multi : -pthread ;
-
-flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ;
-flags hp_cxx.compile OPTIONS <optimization>space : -O1 ;
-flags hp_cxx.compile OPTIONS <inlining>off : -inline none ;
-
-# The compiler versions tried (up to V6.5-040) hang when compiling Boost code
-# with full inlining enabled. So leave it at the default level for now.
-#
-# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ;
-
-flags hp_cxx.compile OPTIONS <profiling>on : -pg ;
-flags hp_cxx.link OPTIONS <profiling>on : -pg ;
-
-# Selection of the object model. This flag is needed on both the C++ compiler
-# and linker command line.
-
-# Unspecified ABI translates to '-model ansi' as most
-# standard-conforming.
-flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ;
-flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ;
-flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ;
-flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ;
-
-# Display a descriptive tag together with each compiler message. This tag can
-# be used by the user to explicitely suppress the compiler message.
-flags hp_cxx.compile OPTIONS : -msg_display_tag ;
-
-flags hp_cxx.compile OPTIONS <cflags> ;
-flags hp_cxx.compile.c++ OPTIONS <cxxflags> ;
-flags hp_cxx.compile DEFINES <define> ;
-flags hp_cxx.compile INCLUDES <include> ;
-flags hp_cxx.link OPTIONS <linkflags> ;
-
-flags hp_cxx.link LIBPATH <library-path> ;
-flags hp_cxx.link LIBRARIES <library-file> ;
-flags hp_cxx.link FINDLIBS-ST <find-static-library> ;
-flags hp_cxx.link FINDLIBS-SA <find-shared-library> ;
-
-flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
-
-actions link bind LIBRARIES
-{
- $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm
-}
-
-# When creating dynamic libraries, we don't want to be warned about unresolved
-# symbols, therefore all unresolved symbols are marked as expected by
-# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
-# chain.
-
-actions link.dll bind LIBRARIES
-{
- $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm
-}
-
-
-# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
-# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
-# is the default, no special flag is needed.
-actions compile.c
-{
- $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
-}
-
-# Note: The compiler is forced to compile the files as C++ (-x cxx) because
-# otherwise it will silently ignore files with no file extension.
-#
-# Note: We deliberately don't suppress any warnings on the compiler command
-# line, the user can always do this in a customized toolset later on.
-
-rule compile.c++
-{
- # We preprocess the TEMPLATE_DEPTH command line option here because we found
- # no way to do it correctly in the actual action code. There we either get
- # the -pending_instantiations parameter when no c++-template-depth property
- # has been specified or we get additional quotes around
- # "-pending_instantiations ".
- local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
- TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ;
-}
-
-actions compile.c++
-{
- $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
-}
-
-# Always create archive from scratch. See the gcc toolet for rationale.
-RM = [ common.rm-command ] ;
-actions together piecemeal archive
-{
- $(RM) "$(<)"
- ar rc $(<) $(>)
-}
diff --git a/jam-files/boost-build/tools/hpfortran.jam b/jam-files/boost-build/tools/hpfortran.jam
deleted file mode 100644
index 96e8d18b..00000000
--- a/jam-files/boost-build/tools/hpfortran.jam
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (C) 2004 Toon Knapen
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import toolset : flags ;
-import feature ;
-import fortran ;
-
-rule init ( version ? : command * : options * )
-{
-}
-
-# Declare flags and action for compilation
-flags hpfortran OPTIONS <optimization>off : -O0 ;
-flags hpfortran OPTIONS <optimization>speed : -O3 ;
-flags hpfortran OPTIONS <optimization>space : -O1 ;
-
-flags hpfortran OPTIONS <debug-symbols>on : -g ;
-flags hpfortran OPTIONS <profiling>on : -pg ;
-
-flags hpfortran DEFINES <define> ;
-flags hpfortran INCLUDES <include> ;
-
-rule compile.fortran
-{
-}
-
-actions compile.fortran
-{
- f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
-}
-
-generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/ifort.jam b/jam-files/boost-build/tools/ifort.jam
deleted file mode 100644
index eb7c1988..00000000
--- a/jam-files/boost-build/tools/ifort.jam
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (C) 2004 Toon Knapen
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import toolset : flags ;
-import feature ;
-import fortran ;
-
-rule init ( version ? : command * : options * )
-{
-}
-
-# Declare flags and action for compilation
-flags ifort OPTIONS <fflags> ;
-
-flags ifort OPTIONS <optimization>off : /Od ;
-flags ifort OPTIONS <optimization>speed : /O3 ;
-flags ifort OPTIONS <optimization>space : /O1 ;
-
-flags ifort OPTIONS <debug-symbols>on : /debug:full ;
-flags ifort OPTIONS <profiling>on : /Qprof_gen ;
-
-flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
-flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
-flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
-flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
-flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
-flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
-
-flags ifort DEFINES <define> ;
-flags ifort INCLUDES <include> ;
-
-rule compile.fortran
-{
-}
-
-actions compile.fortran
-{
- ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)"
-}
-
-generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/intel-darwin.jam b/jam-files/boost-build/tools/intel-darwin.jam
deleted file mode 100644
index aa0fd8fb..00000000
--- a/jam-files/boost-build/tools/intel-darwin.jam
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import intel ;
-import feature : feature ;
-import os ;
-import toolset ;
-import toolset : flags ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-
-feature.extend-subfeature toolset intel : platform : darwin ;
-
-toolset.inherit-generators intel-darwin
- <toolset>intel <toolset-intel:platform>darwin
- : gcc
- # Don't inherit PCH generators. They were not tested, and probably
- # don't work for this compiler.
- : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
- ;
-
-generators.override intel-darwin.prebuilt : builtin.lib-generator ;
-generators.override intel-darwin.prebuilt : builtin.prebuilt ;
-generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
-
-toolset.inherit-rules intel-darwin : gcc ;
-toolset.inherit-flags intel-darwin : gcc
- : <inlining>off <inlining>on <inlining>full <optimization>space
- <warnings>off <warnings>all <warnings>on
- <architecture>x86/<address-model>32
- <architecture>x86/<address-model>64
- ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# vectorization diagnostics
-feature vectorize : off on full ;
-
-# Initializes the intel-darwin toolset
-# version in mandatory
-# name (default icc) is used to invoke the specified intel complier
-# compile and link options allow you to specify addition command line options for each version
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters intel-darwin
- : version $(version) ] ;
-
- command = [ common.get-invocation-command intel-darwin : icc
- : $(command) : /opt/intel_cc_80/bin ] ;
-
- common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags intel-darwin darwin $(condition) ;
-
- # handle <library-path>
- # local library-path = [ feature.get-values <library-path> : $(options) ] ;
- # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
-
- local root = [ feature.get-values <root> : $(options) ] ;
- local bin ;
- if $(command) || $(root)
- {
- bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
- root ?= $(bin:D) ;
-
- if $(root)
- {
- # Libraries required to run the executable may be in either
- # $(root)/lib (10.1 and earlier)
- # or
- # $(root)/lib/architecture-name (11.0 and later:
- local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
- if $(.debug-configuration)
- {
- ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
- }
- flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
- }
- }
-
- local m = [ MATCH (..).* : $(version) ] ;
- local n = [ MATCH (.)\\. : $(m) ] ;
- if $(n) {
- m = $(n) ;
- }
-
- local major = $(m) ;
-
- if $(major) = "9" {
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
- }
- else {
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
- }
-
- local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
-
- # wchar_t char_traits workaround for compilers older than 10.2
- if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
- flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
- }
-}
-
-SPACE = " " ;
-
-flags intel-darwin.compile OPTIONS <cflags> ;
-flags intel-darwin.compile OPTIONS <cxxflags> ;
-# flags intel-darwin.compile INCLUDES <include> ;
-
-flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
-
-#
-cpu-type-em64t = prescott nocona ;
-flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ;
-flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ;
-
-flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
-flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
-flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
-
-flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
-flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
-flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
-
-actions compile.c
-{
- "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-flags intel-darwin ARFLAGS <archiveflags> ;
-
-# Default value. Mostly for the sake of intel-linux
-# that inherits from gcc, but does not has the same
-# logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble
-# as on Linux, 'ar' is always available.
-.AR = ar ;
-
-rule archive ( targets * : sources * : properties * )
-{
- # Always remove archive and start again. Here's rationale from
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a.
- # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
- # My program was crashing with absurd errors.
- # After some debugging I traced it back to the fact that a1.o was *still*
- # in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM
- # onto the archive action. That makes archives fail to build on NT
- # when they have many files because it will no longer execute the
- # action directly and blow the line length limit. Instead we
- # remove the file in a different action, just before the building
- # of the archive.
- #
- local clean.a = $(targets[1])(clean) ;
- TEMPORARY $(clean.a) ;
- NOCARE $(clean.a) ;
- LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
- DEPENDS $(clean.a) : $(sources) ;
- DEPENDS $(targets) : $(clean.a) ;
- common.RmTemps $(clean.a) : $(targets) ;
-}
-
-actions piecemeal archive
-{
- "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
- "ranlib" -cs "$(<)"
-}
-
-flags intel-darwin.link USER_OPTIONS <linkflags> ;
-
-# Declare actions for linking
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since
- # running N links in parallel is just slower.
- JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
diff --git a/jam-files/boost-build/tools/intel-linux.jam b/jam-files/boost-build/tools/intel-linux.jam
deleted file mode 100644
index d9164add..00000000
--- a/jam-files/boost-build/tools/intel-linux.jam
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright (c) 2003 Michael Stevens
-# Copyright (c) 2011 Bryce Lelbach
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import toolset ;
-import feature ;
-import toolset : flags ;
-
-import intel ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-import type ;
-import numbers ;
-
-feature.extend-subfeature toolset intel : platform : linux ;
-
-toolset.inherit-generators intel-linux
- <toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ;
-generators.override intel-linux.prebuilt : builtin.lib-generator ;
-generators.override intel-linux.prebuilt : builtin.prebuilt ;
-generators.override intel-linux.searched-lib-generator : searched-lib-generator ;
-
-# Override default do-nothing generators.
-generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ;
-generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ;
-
-toolset.inherit-rules intel-linux : gcc ;
-toolset.inherit-flags intel-linux : gcc
- : <inlining>off <inlining>on <inlining>full
- <optimization>space <optimization>speed
- <warnings>off <warnings>all <warnings>on
- ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# Initializes the intel-linux toolset
-# version in mandatory
-# name (default icpc) is used to invoke the specified intel-linux complier
-# compile and link options allow you to specify addition command line options for each version
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters intel-linux
- : version $(version) ] ;
-
- if $(.debug-configuration)
- {
- ECHO "notice: intel-linux version is" $(version) ;
- }
-
- local default_path ;
-
- # Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0,
- # aka intel-linux-12.0. In this version, Intel thankfully decides to install
- # to a sane 'intel' folder in /opt.
- if [ MATCH "(12[.]0|12)" : $(version) ]
- { default_path = /opt/intel/bin ; }
- # Intel C++ Compiler 11.1.
- else if [ MATCH "(11[.]1)" : $(version) ]
- { default_path = /opt/intel_cce_11.1.064.x86_64/bin ; }
- # Intel C++ Compiler 11.0.
- else if [ MATCH "(11[.]0|11)" : $(version) ]
- { default_path = /opt/intel_cce_11.0.074.x86_64/bin ; }
- # Intel C++ Compiler 10.1.
- else if [ MATCH "(10[.]1)" : $(version) ]
- { default_path = /opt/intel_cce_10.1.013_x64/bin ; }
- # Intel C++ Compiler 9.1.
- else if [ MATCH "(9[.]1)" : $(version) ]
- { default_path = /opt/intel_cc_91/bin ; }
- # Intel C++ Compiler 9.0.
- else if [ MATCH "(9[.]0|9)" : $(version) ]
- { default_path = /opt/intel_cc_90/bin ; }
- # Intel C++ Compiler 8.1.
- else if [ MATCH "(8[.]1)" : $(version) ]
- { default_path = /opt/intel_cc_81/bin ; }
- # Intel C++ Compiler 8.0 - this used to be the default, so now it's the
- # fallback.
- else
- { default_path = /opt/intel_cc_80/bin ; }
-
- if $(.debug-configuration)
- {
- ECHO "notice: default search path for intel-linux is" $(default_path) ;
- }
-
- command = [ common.get-invocation-command intel-linux : icpc
- : $(command) : $(default_path) ] ;
-
- common.handle-options intel-linux : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags intel-linux gnu $(condition) ;
-
- local root = [ feature.get-values <root> : $(options) ] ;
- local bin ;
- if $(command) || $(root)
- {
- bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
- root ?= $(bin:D) ;
-
- local command-string = $(command:J=" ") ;
- local version-output = [ SHELL "$(command-string) --version" ] ;
- local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ;
- local major = [ MATCH "([0-9]+).*" : $(real-version) ] ;
-
- # If we failed to determine major version, use the behaviour for
- # the current compiler.
- if $(major) && [ numbers.less $(major) 10 ]
- {
- flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ;
- flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ;
- flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ;
- flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
- flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
- }
- else if $(major) && [ numbers.less $(major) 11 ]
- {
- flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
- flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
- flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
- flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
- flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
- }
- else # newer version of intel do have -Os (at least 11+, don't know about 10)
- {
- flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
- flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
- flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
- flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ;
- flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
- }
-
- if $(root)
- {
- # Libraries required to run the executable may be in either
- # $(root)/lib (10.1 and earlier)
- # or
- # $(root)/lib/architecture-name (11.0 and later:
- local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
- if $(.debug-configuration)
- {
- ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
- }
- flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ;
- }
- }
-}
-
-SPACE = " " ;
-
-flags intel-linux.compile OPTIONS <warnings>off : -w0 ;
-flags intel-linux.compile OPTIONS <warnings>on : -w1 ;
-flags intel-linux.compile OPTIONS <warnings>all : -w2 ;
-
-rule compile.c++ ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-actions compile.c++ bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-actions compile.c bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c++.pch ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-#
-# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler
-# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi
-# etc - which appear not to do anything except take up disk space :-(
-#
-actions compile.c++.pch
-{
- rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
-}
-
-actions compile.fortran
-{
- "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c.pch ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c.pch
-{
- rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
-}
-
-rule link ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
-}
-
-rule link.dll ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
-}
-
-# Differ from 'link' above only by -shared.
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
-}
-
-
-
diff --git a/jam-files/boost-build/tools/intel-win.jam b/jam-files/boost-build/tools/intel-win.jam
deleted file mode 100644
index 691b5dce..00000000
--- a/jam-files/boost-build/tools/intel-win.jam
+++ /dev/null
@@ -1,184 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# Importing common is needed because the rules we inherit here depend on it.
-# That is nasty.
-import common ;
-import errors ;
-import feature ;
-import intel ;
-import msvc ;
-import os ;
-import toolset ;
-import generators ;
-import type ;
-
-feature.extend-subfeature toolset intel : platform : win ;
-
-toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
-toolset.inherit-flags intel-win : msvc : : YLOPTION ;
-toolset.inherit-rules intel-win : msvc ;
-
-# Override default do-nothing generators.
-generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
-generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
-generators.override intel-win.compile.rc : rc.compile.resource ;
-generators.override intel-win.compile.mc : mc.compile ;
-
-toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
-
-toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
-
-# Initializes the intel toolset for windows
-rule init ( version ? : # the compiler version
- command * : # the command to invoke the compiler itself
- options * # Additional option: <compatibility>
- # either 'vc6', 'vc7', 'vc7.1'
- # or 'native'(default).
- )
-{
- local compatibility =
- [ feature.get-values <compatibility> : $(options) ] ;
- local condition = [ common.check-init-parameters intel-win
- : version $(version) : compatibility $(compatibility) ] ;
-
- command = [ common.get-invocation-command intel-win : icl.exe :
- $(command) ] ;
-
- common.handle-options intel-win : $(condition) : $(command) : $(options) ;
-
- local root ;
- if $(command)
- {
- root = [ common.get-absolute-tool-path $(command[-1]) ] ;
- root = $(root)/ ;
- }
-
- local setup ;
- setup = [ GLOB $(root) : iclvars_*.bat ] ;
- if ! $(setup)
- {
- setup = $(root)/iclvars.bat ;
- }
- setup = "call \""$(setup)"\" > nul " ;
-
- if [ os.name ] = NT
- {
- setup = $(setup)"
-" ;
- }
- else
- {
- setup = "cmd /S /C "$(setup)" \"&&\" " ;
- }
-
- toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ;
- toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ;
- toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ;
- toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ;
- toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ;
- toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ;
-
- local m = [ MATCH (.).* : $(version) ] ;
- local major = $(m[1]) ;
-
- local C++FLAGS ;
-
- C++FLAGS += /nologo ;
-
- # Reduce the number of spurious error messages
- C++FLAGS += /Qwn5 /Qwd985 ;
-
- # Enable ADL
- C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
-
- # Disable Microsoft "secure" overloads in Dinkumware libraries since they
- # cause compile errors with Intel versions 9 and 10.
- C++FLAGS += -D_SECURE_SCL=0 ;
-
- if $(major) > 5
- {
- C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping.
- }
-
- # Add options recognized only by intel7 and above.
- if $(major) >= 7
- {
- C++FLAGS += /Qansi_alias ;
- }
-
- if $(compatibility) = vc6
- {
- C++FLAGS +=
- # Emulate VC6
- /Qvc6
-
- # No wchar_t support in vc6 dinkum library. Furthermore, in vc6
- # compatibility-mode, wchar_t is not a distinct type from unsigned
- # short.
- -DBOOST_NO_INTRINSIC_WCHAR_T
- ;
- }
- else
- {
- if $(major) > 5
- {
- # Add support for wchar_t
- C++FLAGS += /Zc:wchar_t
- # Tell the dinkumware library about it.
- -D_NATIVE_WCHAR_T_DEFINED
- ;
- }
- }
-
- if $(compatibility) && $(compatibility) != native
- {
- C++FLAGS += /Q$(base-vc) ;
- }
- else
- {
- C++FLAGS +=
- -Qoption,cpp,--arg_dep_lookup
- # The following options were intended to disable the Intel compiler's
- # 'bug-emulation' mode, but were later reported to be causing ICE with
- # Intel-Win 9.0. It is not yet clear which options can be safely used.
- # -Qoption,cpp,--const_string_literals
- # -Qoption,cpp,--new_for_init
- # -Qoption,cpp,--no_implicit_typename
- # -Qoption,cpp,--no_friend_injection
- # -Qoption,cpp,--no_microsoft_bugs
- ;
- }
-
- toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
- # By default, when creating PCH, intel adds 'i' to the explicitly
- # specified name of the PCH file. Of course, Boost.Build is not
- # happy when compiler produces not the file it was asked for.
- # The option below stops this behaviour.
- toolset.flags intel-win CFLAGS : -Qpchi- ;
-
- if ! $(compatibility)
- {
- # If there's no backend version, assume 7.1.
- compatibility = vc7.1 ;
- }
-
- local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ;
- if ! $(extract-version)
- {
- errors.user-error "Invalid value for compatibility option:"
- $(compatibility) ;
- }
-
- # Depending on the settings, running of tests require some runtime DLLs.
- toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
-
- msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ;
-}
-
-toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
-
-toolset.flags intel-win YLOPTION ;
-
diff --git a/jam-files/boost-build/tools/intel.jam b/jam-files/boost-build/tools/intel.jam
deleted file mode 100644
index 67038aa2..00000000
--- a/jam-files/boost-build/tools/intel.jam
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# This is a generic 'intel' toolset. Depending on the current
-# system, it forwards either to 'intel-linux' or 'intel-win'
-# modules.
-
-import feature ;
-import os ;
-import toolset ;
-
-feature.extend toolset : intel ;
-feature.subfeature toolset intel : platform : : propagated link-incompatible ;
-
-rule init ( * : * )
-{
- if [ os.name ] = LINUX
- {
- toolset.using intel-linux :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
- else if [ os.name ] = MACOSX
- {
- toolset.using intel-darwin :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
- else
- {
- toolset.using intel-win :
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-}
diff --git a/jam-files/boost-build/tools/lex.jam b/jam-files/boost-build/tools/lex.jam
deleted file mode 100644
index 75d64131..00000000
--- a/jam-files/boost-build/tools/lex.jam
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-import generators ;
-import feature ;
-import property ;
-
-
-feature.feature flex.prefix : : free ;
-type.register LEX : l ;
-type.register LEX++ : ll ;
-generators.register-standard lex.lex : LEX : C ;
-generators.register-standard lex.lex : LEX++ : CPP ;
-
-rule init ( )
-{
-}
-
-rule lex ( target : source : properties * )
-{
- local r = [ property.select flex.prefix : $(properties) ] ;
- if $(r)
- {
- PREFIX on $(<) = $(r:G=) ;
- }
-}
-
-actions lex
-{
- flex -P$(PREFIX) -o$(<) $(>)
-}
diff --git a/jam-files/boost-build/tools/make.jam b/jam-files/boost-build/tools/make.jam
deleted file mode 100644
index 08567285..00000000
--- a/jam-files/boost-build/tools/make.jam
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Douglas Gregor
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'make' main target rule.
-
-import "class" : new ;
-import errors : error ;
-import project ;
-import property ;
-import property-set ;
-import regex ;
-import targets ;
-
-
-class make-target-class : basic-target
-{
- import type regex virtual-target ;
- import "class" : new ;
-
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local action-name = [ $(property-set).get <action> ] ;
- # 'm' will always be set -- we add '@' ourselves in the 'make' rule
- # below.
- local m = [ MATCH ^@(.*) : $(action-name) ] ;
-
- local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
- local t = [ new file-target $(self.name) exact : [ type.type
- $(self.name) ] : $(self.project) : $(a) ] ;
- return [ property-set.empty ] [ virtual-target.register $(t) ] ;
- }
-}
-
-
-# Declares the 'make' main target.
-#
-rule make ( target-name : sources * : generating-rule + : requirements * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
-
- # The '@' sign causes the feature.jam module to qualify rule name with the
- # module name of current project, if needed.
- local m = [ MATCH ^(@).* : $(generating-rule) ] ;
- if ! $(m)
- {
- generating-rule = @$(generating-rule) ;
- }
- requirements += <action>$(generating-rule) ;
-
- targets.main-target-alternative
- [ new make-target-class $(target-name) : $(project)
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) :
- $(project) ] ] ;
-}
-
-
-IMPORT $(__name__) : make : : make ;
diff --git a/jam-files/boost-build/tools/make.py b/jam-files/boost-build/tools/make.py
deleted file mode 100644
index 10baa1cb..00000000
--- a/jam-files/boost-build/tools/make.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Status: ported.
-# Base revision: 64068
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Douglas Gregor
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'make' main target rule.
-
-from b2.build.targets import BasicTarget
-from b2.build.virtual_target import Action, FileTarget
-from b2.build import type
-from b2.manager import get_manager
-import b2.build.property_set
-
-
-class MakeTarget(BasicTarget):
-
- def construct(self, name, source_targets, property_set):
-
- action_name = property_set.get("<action>")[0]
- action = Action(get_manager(), source_targets, action_name[1:], property_set)
- target = FileTarget(self.name(), type.type(self.name()),
- self.project(), action, exact=True)
- return [ b2.build.property_set.empty(),
- [self.project().manager().virtual_targets().register(target)]]
-
-def make (target_name, sources, generating_rule,
- requirements=None, usage_requirements=None):
-
- target_name = target_name[0]
- generating_rule = generating_rule[0]
- if generating_rule[0] != '@':
- generating_rule = '@' + generating_rule
-
- if not requirements:
- requirements = []
-
-
- requirements.append("<action>%s" % generating_rule)
-
- m = get_manager()
- targets = m.targets()
- project = m.projects().current()
- engine = m.engine()
- engine.register_bjam_action(generating_rule)
-
- targets.main_target_alternative(MakeTarget(
- target_name, project,
- targets.main_target_sources(sources, target_name),
- targets.main_target_requirements(requirements, project),
- targets.main_target_default_build([], project),
- targets.main_target_usage_requirements(usage_requirements or [], project)))
-
-get_manager().projects().add_rule("make", make)
-
diff --git a/jam-files/boost-build/tools/mc.jam b/jam-files/boost-build/tools/mc.jam
deleted file mode 100644
index 57837773..00000000
--- a/jam-files/boost-build/tools/mc.jam
+++ /dev/null
@@ -1,44 +0,0 @@
-#~ Copyright 2005 Alexey Pakhunov.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for Microsoft message compiler tool.
-# Notes:
-# - there's just message compiler tool, there's no tool for
-# extracting message strings from sources
-# - This file allows to use Microsoft message compiler
-# with any toolset. In msvc.jam, there's more specific
-# message compiling action.
-
-import common ;
-import generators ;
-import feature : feature get-values ;
-import toolset : flags ;
-import type ;
-import rc ;
-
-rule init ( )
-{
-}
-
-type.register MC : mc ;
-
-
-# Command line options
-feature mc-input-encoding : ansi unicode : free ;
-feature mc-output-encoding : unicode ansi : free ;
-feature mc-set-customer-bit : no yes : free ;
-
-flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ;
-flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ;
-flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ;
-flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ;
-flags mc.compile MCFLAGS <mc-set-customer-bit>no : ;
-flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ;
-
-generators.register-standard mc.compile : MC : H RC ;
-
-actions compile
-{
- mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
-}
diff --git a/jam-files/boost-build/tools/message.jam b/jam-files/boost-build/tools/message.jam
deleted file mode 100644
index 212d8542..00000000
--- a/jam-files/boost-build/tools/message.jam
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2008 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines main target type 'message', that prints a message when built for the
-# first time.
-
-import project ;
-import "class" : new ;
-import targets ;
-import property-set ;
-
-class message-target-class : basic-target
-{
- rule __init__ ( name-and-dir : project : * )
- {
- basic-target.__init__ $(name-and-dir) : $(project) ;
- self.3 = $(3) ;
- self.4 = $(4) ;
- self.5 = $(5) ;
- self.6 = $(6) ;
- self.7 = $(7) ;
- self.8 = $(8) ;
- self.9 = $(9) ;
- self.built = ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- if ! $(self.built)
- {
- for i in 3 4 5 6 7 8 9
- {
- if $(self.$(i))
- {
- ECHO $(self.$(i)) ;
- }
- }
- self.built = 1 ;
- }
-
- return [ property-set.empty ] ;
- }
-}
-
-
-rule message ( name : * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new message-target-class $(name) : $(project)
- : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ;
-}
-IMPORT $(__name__) : message : : message ; \ No newline at end of file
diff --git a/jam-files/boost-build/tools/message.py b/jam-files/boost-build/tools/message.py
deleted file mode 100644
index cc0b946f..00000000
--- a/jam-files/boost-build/tools/message.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Status: ported.
-# Base revision: 64488.
-#
-# Copyright 2008, 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines main target type 'message', that prints a message when built for the
-# first time.
-
-import b2.build.targets as targets
-import b2.build.property_set as property_set
-
-from b2.manager import get_manager
-
-class MessageTargetClass(targets.BasicTarget):
-
- def __init__(self, name, project, *args):
-
- targets.BasicTarget.__init__(self, name, project, [])
- self.args = args
- self.built = False
-
- def construct(self, name, sources, ps):
-
- if not self.built:
- for arg in self.args:
- if type(arg) == type([]):
- arg = " ".join(arg)
- print arg
- self.built = True
-
- return (property_set.empty(), [])
-
-def message(name, *args):
-
- if type(name) == type([]):
- name = name[0]
-
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(MessageTargetClass(*((name, project) + args)))
-
-get_manager().projects().add_rule("message", message)
diff --git a/jam-files/boost-build/tools/midl.jam b/jam-files/boost-build/tools/midl.jam
deleted file mode 100644
index 0aa5dda3..00000000
--- a/jam-files/boost-build/tools/midl.jam
+++ /dev/null
@@ -1,142 +0,0 @@
-# Copyright (c) 2005 Alexey Pakhunov.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Microsoft Interface Definition Language (MIDL) related routines
-
-import common ;
-import generators ;
-import feature : feature get-values ;
-import os ;
-import scanner ;
-import toolset : flags ;
-import type ;
-
-rule init ( )
-{
-}
-
-type.register IDL : idl ;
-
-# A type library (.tlb) is generated by MIDL compiler and can be included
-# to resources of an application (.rc). In order to be found by a resource
-# compiler its target type should be derived from 'H' - otherwise
-# the property '<implicit-dependency>' will be ignored.
-type.register MSTYPELIB : tlb : H ;
-
-
-# Register scanner for MIDL files
-class midl-scanner : scanner
-{
- import path property-set regex scanner type virtual-target ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
-
- self.includes = $(includes) ;
-
- # List of quoted strings
- self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
-
- # 'import' and 'importlib' directives
- self.re-import = "import"$(self.re-strings)"[ \t]*;" ;
- self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ;
-
- # C preprocessor 'include' directive
- self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ;
- self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
- }
-
- rule pattern ( )
- {
- # Match '#include', 'import' and 'importlib' directives
- return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ;
- local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ;
- local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ;
- local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ;
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- local g = [ on $(target) return $(HDRGRIST) ] ;
- local b = [ NORMALIZE_PATH $(binding:D) ] ;
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- local g2 = $(g)"#"$(b) ;
-
- included-angle = $(included-angle:G=$(g)) ;
- included-quoted = $(included-quoted:G=$(g2)) ;
- imported = $(imported:G=$(g2)) ;
- imported_tlbs = $(imported_tlbs:G=$(g2)) ;
-
- local all = $(included-angle) $(included-quoted) $(imported) ;
-
- INCLUDES $(target) : $(all) ;
- DEPENDS $(target) : $(imported_tlbs) ;
- NOCARE $(all) $(imported_tlbs) ;
- SEARCH on $(included-angle) = $(self.includes:G=) ;
- SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ;
- SEARCH on $(imported) = $(b) $(self.includes:G=) ;
- SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ;
-
- scanner.propagate
- [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] :
- $(included-angle) $(included-quoted) : $(target) ;
-
- scanner.propagate $(__name__) : $(imported) : $(target) ;
- }
-}
-
-scanner.register midl-scanner : include ;
-type.set-scanner IDL : midl-scanner ;
-
-
-# Command line options
-feature midl-stubless-proxy : yes no : propagated ;
-feature midl-robust : yes no : propagated ;
-
-flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ;
-flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ;
-flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ;
-flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ;
-
-# Architecture-specific options
-architecture-x86 = <architecture> <architecture>x86 ;
-address-model-32 = <address-model> <address-model>32 ;
-address-model-64 = <address-model> <address-model>64 ;
-
-flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ;
-flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ;
-flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ;
-
-
-flags midl.compile.idl DEFINES <define> ;
-flags midl.compile.idl UNDEFS <undef> ;
-flags midl.compile.idl INCLUDES <include> ;
-
-
-generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ;
-
-
-# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
-# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
-# that both files will be created so bjam will not try to recreate them
-# constantly.
-TOUCH_FILE = [ common.file-touch-command ] ;
-
-actions compile.idl
-{
- midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")"
- $(TOUCH_FILE) "$(<[4]:W)"
- $(TOUCH_FILE) "$(<[5]:W)"
-}
diff --git a/jam-files/boost-build/tools/mipspro.jam b/jam-files/boost-build/tools/mipspro.jam
deleted file mode 100644
index 417eaefc..00000000
--- a/jam-files/boost-build/tools/mipspro.jam
+++ /dev/null
@@ -1,145 +0,0 @@
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property ;
-import generators ;
-import os ;
-import toolset : flags ;
-import feature ;
-import fortran ;
-import type ;
-import common ;
-
-feature.extend toolset : mipspro ;
-toolset.inherit mipspro : unix ;
-generators.override mipspro.prebuilt : builtin.lib-generator ;
-generators.override mipspro.searched-lib-generator : searched-lib-generator ;
-
-# Documentation and toolchain description located
-# http://www.sgi.com/products/software/irix/tools/
-
-rule init ( version ? : command * : options * )
-{
- local condition = [
- common.check-init-parameters mipspro : version $(version) ] ;
-
- command = [ common.get-invocation-command mipspro : CC : $(command) ] ;
-
- common.handle-options mipspro : $(condition) : $(command) : $(options) ;
-
- command_c = $(command_c[1--2]) $(command[-1]:B=cc) ;
-
- toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ;
-
- # fortran support
- local command = [
- common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ;
-
- command_f = $(command_f[1--2]) $(command[-1]:B=f77) ;
- toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ;
-
- # set link flags
- flags mipspro.link FINDLIBS-ST : [
- feature.get-values <find-static-library> : $(options) ] : unchecked ;
-
- flags mipspro.link FINDLIBS-SA : [
- feature.get-values <find-shared-library> : $(options) ] : unchecked ;
-}
-
-# Declare generators
-generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ;
-generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ;
-generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ;
-
-cpu-arch-32 =
- <architecture>/<address-model>
- <architecture>/<address-model>32 ;
-
-cpu-arch-64 =
- <architecture>/<address-model>64 ;
-
-flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ;
-flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ;
-
-# Declare flags and actions for compilation
-flags mipspro.compile OPTIONS <debug-symbols>on : -g ;
-# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ;
-flags mipspro.compile OPTIONS <warnings>off : -w ;
-flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning
-flags mipspro.compile OPTIONS <warnings>all : -fullwarn ;
-flags mipspro.compile OPTIONS <optimization>speed : -Ofast ;
-flags mipspro.compile OPTIONS <optimization>space : -O2 ;
-flags mipspro.compile OPTIONS <cflags> : -LANG:std ;
-flags mipspro.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
-flags mipspro.compile.c++ OPTIONS <cxxflags> ;
-flags mipspro.compile DEFINES <define> ;
-flags mipspro.compile INCLUDES <include> ;
-
-
-flags mipspro.compile.fortran OPTIONS <fflags> ;
-
-actions compile.c
-{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.fortran
-{
- "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# Declare flags and actions for linking
-flags mipspro.link OPTIONS <debug-symbols>on : -g ;
-# Strip the binary when no debugging is needed
-# flags mipspro.link OPTIONS <debug-symbols>off : -s ;
-# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ;
-# flags mipspro.link OPTIONS <threading>multi : -mt ;
-
-flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ;
-flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ;
-
-flags mipspro.link OPTIONS <optimization>speed : -Ofast ;
-flags mipspro.link OPTIONS <optimization>space : -O2 ;
-flags mipspro.link OPTIONS <linkflags> ;
-flags mipspro.link LINKPATH <library-path> ;
-flags mipspro.link FINDLIBS-ST <find-static-library> ;
-flags mipspro.link FINDLIBS-SA <find-shared-library> ;
-flags mipspro.link FINDLIBS-SA <threading>multi : pthread ;
-flags mipspro.link LIBRARIES <library-file> ;
-flags mipspro.link LINK-RUNTIME <runtime-link>static : static ;
-flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ;
-flags mipspro.link RPATH <dll-path> ;
-
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm
-}
-
-# Slight mods for dlls
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-}
-
-# Declare action for creating static libraries
-actions piecemeal archive
-{
- ar -cr "$(<)" "$(>)"
-}
diff --git a/jam-files/boost-build/tools/mpi.jam b/jam-files/boost-build/tools/mpi.jam
deleted file mode 100644
index 0fe490be..00000000
--- a/jam-files/boost-build/tools/mpi.jam
+++ /dev/null
@@ -1,583 +0,0 @@
-# Support for the Message Passing Interface (MPI)
-#
-# (C) Copyright 2005, 2006 Trustees of Indiana University
-# (C) Copyright 2005 Douglas Gregor
-#
-# Distributed under the Boost Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
-#
-# Authors: Douglas Gregor
-# Andrew Lumsdaine
-#
-# ==== MPI Configuration ====
-#
-# For many users, MPI support can be enabled simply by adding the following
-# line to your user-config.jam file:
-#
-# using mpi ;
-#
-# This should auto-detect MPI settings based on the MPI wrapper compiler in
-# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
-# has a different name, you can pass the name of the wrapper compiler as the
-# first argument to the mpi module:
-#
-# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
-#
-# If your MPI implementation does not have a wrapper compiler, or the MPI
-# auto-detection code does not work with your MPI's wrapper compiler,
-# you can pass MPI-related options explicitly via the second parameter to the
-# mpi module:
-#
-# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
-# <find-shared-library>mpi <find-shared-library>lam
-# <find-shared-library>dl ;
-#
-# To see the results of MPI auto-detection, pass "--debug-configuration" on
-# the bjam command line.
-#
-# The (optional) fourth argument configures Boost.MPI for running
-# regression tests. These parameters specify the executable used to
-# launch jobs (default: "mpirun") followed by any necessary arguments
-# to this to run tests and tell the program to expect the number of
-# processors to follow (default: "-np"). With the default parameters,
-# for instance, the test harness will execute, e.g.,
-#
-# mpirun -np 4 all_gather_test
-#
-# ==== Linking Against the MPI Libraries ===
-#
-# To link against the MPI libraries, import the "mpi" module and add the
-# following requirement to your target:
-#
-# <library>/mpi//mpi
-#
-# Since MPI support is not always available, you should check
-# "mpi.configured" before trying to link against the MPI libraries.
-
-import "class" : new ;
-import common ;
-import feature : feature ;
-import generators ;
-import os ;
-import project ;
-import property ;
-import testing ;
-import toolset ;
-import type ;
-import path ;
-
-# Make this module a project
-project.initialize $(__name__) ;
-project mpi ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# Assuming the first part of the command line is the given prefix
-# followed by some non-empty value, remove the first argument. Returns
-# either nothing (if there was no prefix or no value) or a pair
-#
-# <name>value rest-of-cmdline
-#
-# This is a subroutine of cmdline_to_features
-rule add_feature ( prefix name cmdline )
-{
- local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
-
- # If there was no value associated with the prefix, abort
- if ! $(match) {
- return ;
- }
-
- local value = $(match[1]) ;
-
- if [ MATCH " +" : $(value) ] {
- value = "\"$(value)\"" ;
- }
-
- return "<$(name)>$(value)" $(match[2]) ;
-}
-
-# Strip any end-of-line characters off the given string and return the
-# result.
-rule strip-eol ( string )
-{
- local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
-
- if $(match)
- {
- return $(match[1]) ;
- }
- else
- {
- return $(string) ;
- }
-}
-
-# Split a command-line into a set of features. Certain kinds of
-# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
-# with their Boost.Build equivalents (e.g., <include>, <define>,
-# <library-path>, <find-library>). All other arguments are introduced
-# using the features in the unknown-features parameter, because we
-# don't know how to deal with them. For instance, if your compile and
-# correct. The incoming command line should be a string starting with
-# an executable (e.g., g++ -I/include/path") and may contain any
-# number of command-line arguments thereafter. The result is a list of
-# features corresponding to the given command line, ignoring the
-# executable.
-rule cmdline_to_features ( cmdline : unknown-features ? )
-{
- local executable ;
- local features ;
- local otherflags ;
- local result ;
-
- unknown-features ?= <cxxflags> <linkflags> ;
-
- # Pull the executable out of the command line. At this point, the
- # executable is just thrown away.
- local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
- executable = $(match[1]) ;
- cmdline = $(match[2]) ;
-
- # List the prefix/feature pairs that we will be able to transform.
- # Every kind of parameter not mentioned here will be placed in both
- # cxxflags and linkflags, because we don't know where they should go.
- local feature_kinds-D = "define" ;
- local feature_kinds-I = "include" ;
- local feature_kinds-L = "library-path" ;
- local feature_kinds-l = "find-shared-library" ;
-
- while $(cmdline) {
-
- # Check for one of the feature prefixes we know about. If we
- # find one (and the associated value is nonempty), convert it
- # into a feature.
- local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
- local matched ;
- if $(match) && $(match[2]) {
- local prefix = $(match[1]) ;
- if $(feature_kinds$(prefix)) {
- local name = $(feature_kinds$(prefix)) ;
- local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
-
- if $(add) {
-
- if $(add[1]) = <find-shared-library>pthread
- {
- # Uhm. It's not really nice that this MPI implementation
- # uses -lpthread as opposed to -pthread. We do want to
- # set <threading>multi, instead of -lpthread.
- result += "<threading>multi" ;
- MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
- }
- else
- {
- result += $(add[1]) ;
- }
-
- cmdline = $(add[2]) ;
- matched = yes ;
- }
- }
- }
-
- # If we haven't matched a feature prefix, just grab the command-line
- # argument itself. If we can map this argument to a feature
- # (e.g., -pthread -> <threading>multi), then do so; otherwise,
- # and add it to the list of "other" flags that we don't
- # understand.
- if ! $(matched) {
- match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
- local value = $(match[1]) ;
- cmdline = $(match[2]) ;
-
- # Check for multithreading support
- if $(value) = "-pthread" || $(value) = "-pthreads"
- {
- result += "<threading>multi" ;
-
- # DPG: This is a hack intended to work around a BBv2 bug where
- # requirements propagated from libraries are not checked for
- # conflicts when BBv2 determines which "common" properties to
- # apply to a target. In our case, the <threading>single property
- # gets propagated from the common properties to Boost.MPI
- # targets, even though <threading>multi is in the usage
- # requirements of <library>/mpi//mpi.
- MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
- }
- else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
- otherflags += $(value) ;
- }
- }
- }
-
- # If there are other flags that we don't understand, add them to the
- # result as both <cxxflags> and <linkflags>
- if $(otherflags) {
- for unknown in $(unknown-features)
- {
- result += "$(unknown)$(otherflags:J= )" ;
- }
- }
-
- return $(result) ;
-}
-
-# Determine if it is safe to execute the given shell command by trying
-# to execute it and determining whether the exit code is zero or
-# not. Returns true for an exit code of zero, false otherwise.
-local rule safe-shell-command ( cmdline )
-{
- local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
- return [ MATCH ".*(SSCOK).*" : $(result) ] ;
-}
-
-# Initialize the MPI module.
-rule init ( mpicxx ? : options * : mpirun-with-options * )
-{
- if ! $(options) && $(.debug-configuration)
- {
- ECHO "===============MPI Auto-configuration===============" ;
- }
-
- if ! $(mpicxx) && [ os.on-windows ]
- {
- # Try to auto-configure to the Microsoft Compute Cluster Pack
- local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
- local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
- if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
- }
-
- # Pick up either the 32-bit or 64-bit library, depending on which address
- # model the user has selected. Default to 32-bit.
- options = <include>$(cluster_pack_path)/Include
- <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
- <library-path>$(cluster_pack_path)/Lib/i386
- <find-static-library>msmpi
- <toolset>msvc:<define>_SECURE_SCL=0
- ;
-
- # Setup the "mpirun" equivalent (mpiexec)
- .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
- .mpirun_flags = -n ;
- }
- else if $(.debug-configuration)
- {
- ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
- }
- }
-
- if ! $(options)
- {
- # Try to auto-detect options based on the wrapper compiler
- local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
-
- if ! $(mpicxx) && ! $(command)
- {
- # Try "mpiCC", which is used by MPICH
- command = [ common.get-invocation-command mpi : mpiCC ] ;
- }
-
- if ! $(mpicxx) && ! $(command)
- {
- # Try "mpicxx", which is used by OpenMPI and MPICH2
- command = [ common.get-invocation-command mpi : mpicxx ] ;
- }
-
- local result ;
- local compile_flags ;
- local link_flags ;
-
- if ! $(command)
- {
- # Do nothing: we'll complain later
- }
- # OpenMPI and newer versions of LAM-MPI have -showme:compile and
- # -showme:link.
- else if [ safe-shell-command "$(command) -showme:compile" ] &&
- [ safe-shell-command "$(command) -showme:link" ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
- }
-
- compile_flags = [ SHELL "$(command) -showme:compile" ] ;
- link_flags = [ SHELL "$(command) -showme:link" ] ;
-
- # Prepend COMPILER as the executable name, to match the format of
- # other compilation commands.
- compile_flags = "COMPILER $(compile_flags)" ;
- link_flags = "COMPILER $(link_flags)" ;
- }
- # Look for LAM-MPI's -showme
- else if [ safe-shell-command "$(command) -showme" ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
- }
-
- result = [ SHELL "$(command) -showme" ] ;
- }
- # Look for MPICH
- else if [ safe-shell-command "$(command) -show" ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found MPICH wrapper compiler: $(command)" ;
- }
- compile_flags = [ SHELL "$(command) -compile_info" ] ;
- link_flags = [ SHELL "$(command) -link_info" ] ;
- }
- # Sun HPC and Ibm POE
- else if [ SHELL "$(command) -v 2>/dev/null" ]
- {
- compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
-
- local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
- if $(back)
- {
- # Sun HPC
- if $(.debug-configuration)
- {
- ECHO "Found Sun MPI wrapper compiler: $(command)" ;
- }
-
- compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
- compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
- link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
- link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
- link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
-
- # strip out -v from compile options
- local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
- local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
- link_flags = "$(front) $(back)" ;
- front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
- back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
- link_flags = "$(front) $(back)" ;
- }
- else
- {
- # Ibm POE
- if $(.debug-configuration)
- {
- ECHO "Found IBM MPI wrapper compiler: $(command)" ;
- }
-
- #
- compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
- compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
- local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
- local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
- compile_flags = "$(front) $(back)" ;
- front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
- back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
- compile_flags = "$(front) $(back)" ;
- link_flags = $(compile_flags) ;
-
- # get location of mpif.h from mpxlf
- local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
- f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
- front = [ MATCH "(.*)-v" : $(f_flags) ] ;
- back = [ MATCH "-v(.*)" : $(f_flags) ] ;
- f_flags = "$(front) $(back)" ;
- f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
- f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
- compile_flags = [ strip-eol $(compile_flags) ] ;
- compile_flags = "$(compile_flags) $(f_flags)" ;
- }
- }
-
- if $(result) || $(compile_flags) && $(link_flags)
- {
- if $(result)
- {
- result = [ strip-eol $(result) ] ;
- options = [ cmdline_to_features $(result) ] ;
- }
- else
- {
- compile_flags = [ strip-eol $(compile_flags) ] ;
- link_flags = [ strip-eol $(link_flags) ] ;
-
- # Separately process compilation and link features, then combine
- # them at the end.
- local compile_features = [ cmdline_to_features $(compile_flags)
- : "<cxxflags>" ] ;
- local link_features = [ cmdline_to_features $(link_flags)
- : "<linkflags>" ] ;
- options = $(compile_features) $(link_features) ;
- }
-
- # If requested, display MPI configuration information.
- if $(.debug-configuration)
- {
- if $(result)
- {
- ECHO " Wrapper compiler command line: $(result)" ;
- }
- else
- {
- local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
- : $(compile_flags) ] ;
- ECHO "MPI compilation flags: $(match[2])" ;
- local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
- : $(link_flags) ] ;
- ECHO "MPI link flags: $(match[2])" ;
- }
- }
- }
- else
- {
- if $(command)
- {
- ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
- ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
- }
- else if $(mpicxx)
- {
- ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
- }
- else
- {
- ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
- }
- ECHO "You will need to manually configure MPI support." ;
- }
-
- }
-
- # Find mpirun (or its equivalent) and its flags
- if ! $(.mpirun)
- {
- .mpirun =
- [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
- .mpirun_flags = $(mpirun-with-options[2-]) ;
- .mpirun_flags ?= -np ;
- }
-
- if $(.debug-configuration)
- {
- if $(options)
- {
- echo "MPI build features: " ;
- ECHO $(options) ;
- }
-
- if $(.mpirun)
- {
- echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
- }
-
- ECHO "====================================================" ;
- }
-
- if $(options)
- {
- .configured = true ;
-
- # Set up the "mpi" alias
- alias mpi : : : : $(options) ;
- }
-}
-
-# States whether MPI has bee configured
-rule configured ( )
-{
- return $(.configured) ;
-}
-
-# Returs the "extra" requirements needed to build MPI. These requirements are
-# part of the /mpi//mpi library target, but they need to be added to anything
-# that uses MPI directly to work around bugs in BBv2's propagation of
-# requirements.
-rule extra-requirements ( )
-{
- return $(MPI_EXTRA_REQUIREMENTS) ;
-}
-
-# Support for testing; borrowed from Python
-type.register RUN_MPI_OUTPUT ;
-type.register RUN_MPI : : TEST ;
-
-class mpi-test-generator : generator
-{
- import property-set ;
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- self.composing = true ;
- }
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- # Generate an executable from the sources. This is the executable we will run.
- local executable =
- [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
-
- result =
- [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
- }
-}
-
-# Use mpi-test-generator to generate MPI tests from sources
-generators.register
- [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
-
-generators.register-standard testing.expect-success
- : RUN_MPI_OUTPUT : RUN_MPI ;
-
-# The number of processes to spawn when executing an MPI test.
-feature mpi:processes : : free incidental ;
-
-# The flag settings on testing.capture-output do not
-# apply to mpi.capture output at the moment.
-# Redo this explicitly.
-toolset.flags mpi.capture-output ARGS <testing.arg> ;
-rule capture-output ( target : sources * : properties * )
-{
- # Use the standard capture-output rule to run the tests
- testing.capture-output $(target) : $(sources[1]) : $(properties) ;
-
- # Determine the number of processes we should run on.
- local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
- num_processes = $(num_processes:G=) ;
-
- # serialize the MPI tests to avoid overloading systems
- JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
-
- # We launch MPI processes using the "mpirun" equivalent specified by the user.
- LAUNCHER on $(target) =
- [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
-}
-
-# Creates a set of test cases to be run through the MPI launcher. The name, sources,
-# and requirements are the same as for any other test generator. However, schedule is
-# a list of numbers, which indicates how many processes each test run will use. For
-# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
-# 7 processes. The name provided is just the base name: the actual tests will be
-# the name followed by a hypen, then the number of processes.
-rule mpi-test ( name : sources * : requirements * : schedule * )
-{
- sources ?= $(name).cpp ;
- schedule ?= 1 2 3 4 7 8 13 17 ;
-
- local result ;
- for processes in $(schedule)
- {
- result += [ testing.make-test
- run-mpi : $(sources) /boost/mpi//boost_mpi
- : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
- }
- return $(result) ;
-}
diff --git a/jam-files/boost-build/tools/msvc-config.jam b/jam-files/boost-build/tools/msvc-config.jam
deleted file mode 100644
index 6c71e3b0..00000000
--- a/jam-files/boost-build/tools/msvc-config.jam
+++ /dev/null
@@ -1,12 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for VisualStudio toolset. To use, just import this module.
-
-import toolset : using ;
-
-ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ;
-
-using msvc : all ;
-
diff --git a/jam-files/boost-build/tools/msvc.jam b/jam-files/boost-build/tools/msvc.jam
deleted file mode 100644
index e33a66d2..00000000
--- a/jam-files/boost-build/tools/msvc.jam
+++ /dev/null
@@ -1,1392 +0,0 @@
-# Copyright (c) 2003 David Abrahams.
-# Copyright (c) 2005 Vladimir Prus.
-# Copyright (c) 2005 Alexey Pakhunov.
-# Copyright (c) 2006 Bojan Resnik.
-# Copyright (c) 2006 Ilya Sokolov.
-# Copyright (c) 2007 Rene Rivera
-# Copyright (c) 2008 Jurko Gospodnetic
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-################################################################################
-#
-# MSVC Boost Build toolset module.
-# --------------------------------
-#
-# All toolset versions need to have their location either auto-detected or
-# explicitly specified except for the special 'default' version that expects the
-# environment to find the needed tools or report an error.
-#
-################################################################################
-
-import "class" : new ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import mc ;
-import midl ;
-import os ;
-import path ;
-import pch ;
-import property ;
-import rc ;
-import toolset ;
-import type ;
-
-
-type.register MANIFEST : manifest ;
-feature.feature embed-manifest : on off : incidental propagated ;
-
-type.register PDB : pdb ;
-
-################################################################################
-#
-# Public rules.
-#
-################################################################################
-
-# Initialize a specific toolset version configuration. As the result, path to
-# compiler and, possible, program names are set up, and will be used when that
-# version of compiler is requested. For example, you might have:
-#
-# using msvc : 6.5 : cl.exe ;
-# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
-#
-# The version parameter may be ommited:
-#
-# using msvc : : Z:/foo/bar/cl.exe ;
-#
-# The following keywords have special meanings when specified as versions:
-# - all - all detected but not yet used versions will be marked as used
-# with their default options.
-# - default - this is an equivalent to an empty version.
-#
-# Depending on a supplied version, detected configurations and presence 'cl.exe'
-# in the path different results may be achieved. The following table describes
-# the possible scenarios:
-#
-# Nothing "x.y"
-# Passed Nothing "x.y" detected, detected,
-# version detected detected cl.exe in path cl.exe in path
-#
-# default Error Use "x.y" Create "default" Use "x.y"
-# all None Use all None Use all
-# x.y - Use "x.y" - Use "x.y"
-# a.b Error Error Create "a.b" Create "a.b"
-#
-# "x.y" - refers to a detected version;
-# "a.b" - refers to an undetected version.
-#
-# FIXME: Currently the command parameter and the <compiler> property parameter
-# seem to overlap in duties. Remove this duplication. This seems to be related
-# to why someone started preparing to replace init with configure rules.
-#
-rule init (
- # The msvc version being configured. When omitted the tools invoked when no
- # explicit version is given will be configured.
- version ?
-
- # The command used to invoke the compiler. If not specified:
- # - if version is given, default location for that version will be
- # searched
- #
- # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0
- # and 6.* will be searched
- #
- # - if compiler is not found in the default locations, PATH will be
- # searched.
- : command *
-
- # Options may include:
- #
- # All options shared by multiple toolset types as handled by the
- # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>,
- # <fflags> & <linkflags>.
- #
- # <assembler>
- # <compiler>
- # <idl-compiler>
- # <linker>
- # <mc-compiler>
- # <resource-compiler>
- # Exact tool names to be used by this msvc toolset configuration.
- #
- # <compiler-filter>
- # Command through which to pipe the output of running the compiler.
- # For example to pass the output to STLfilt.
- #
- # <setup>
- # Global setup command to invoke before running any of the msvc tools.
- # It will be passed additional option parameters depending on the actual
- # target platform.
- #
- # <setup-amd64>
- # <setup-i386>
- # <setup-ia64>
- # Platform specific setup command to invoke before running any of the
- # msvc tools used when builing a target for a specific platform, e.g.
- # when building a 32 or 64 bit executable.
- : options *
-)
-{
- if $(command)
- {
- options += <command>$(command) ;
- }
- configure $(version) : $(options) ;
-}
-
-
-# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
-# a part of the 'options' list. See the 'init' rule comment for more detailed
-# information.
-#
-rule configure ( version ? : options * )
-{
- switch $(version)
- {
- case "all" :
- if $(options)
- {
- errors.error "MSVC toolset configuration: options should be"
- "empty when '$(version)' is specified." ;
- }
-
- # Configure (i.e. mark as used) all registered versions.
- local all-versions = [ $(.versions).all ] ;
- if ! $(all-versions)
- {
- if $(.debug-configuration)
- {
- ECHO "notice: [msvc-cfg] Asked to configure all registered"
- "msvc toolset versions when there are none currently"
- "registered." ;
- }
- }
- else
- {
- for local v in $(all-versions)
- {
- # Note that there is no need to skip already configured
- # versions here as this will request configure-really rule
- # to configure the version using default options which will
- # in turn cause it to simply do nothing in case the version
- # has already been configured.
- configure-really $(v) ;
- }
- }
-
- case "default" :
- configure-really : $(options) ;
-
- case * :
- configure-really $(version) : $(options) ;
- }
-}
-
-
-# Sets up flag definitions dependent on the compiler version used.
-# - 'version' is the version of compiler in N.M format.
-# - 'conditions' is the property set to be used as flag conditions.
-# - 'toolset' is the toolset for which flag settings are to be defined.
-# This makes the rule reusable for other msvc-option-compatible compilers.
-#
-rule configure-version-specific ( toolset : version : conditions )
-{
- toolset.push-checking-for-flags-module unchecked ;
- # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
- # /Zc:wchar_t options that improve C++ standard conformance, but those
- # options are off by default. If we are sure that the msvc version is at
- # 7.*, add those options explicitly. We can be sure either if user specified
- # version 7.* explicitly or if we auto-detected the version ourselves.
- if ! [ MATCH ^(6\\.) : $(version) ]
- {
- toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ;
- toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ;
-
- # Explicitly disable the 'function is deprecated' warning. Some msvc
- # versions have a bug, causing them to emit the deprecation warning even
- # with /W0.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ;
-
- if [ MATCH ^([78]\\.) : $(version) ]
- {
- # 64-bit compatibility warning deprecated since 9.0, see
- # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ;
- }
- }
-
- #
- # Processor-specific optimization.
- #
-
- if [ MATCH ^([67]) : $(version) ]
- {
- # 8.0 deprecates some of the options.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ;
-
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i386 : /G3 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ;
-
- # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
- # tests will fail.
- toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ;
-
- # 7.1 and below have single-threaded static RTL.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
- }
- else
- {
- # 8.0 and above adds some more options.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ;
-
- # 8.0 and above only has multi-threaded static RTL.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
-
- # Specify target machine type so the linker will not need to guess.
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ;
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : /MACHINE:X86 ;
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : /MACHINE:IA64 ;
-
- # Make sure that manifest will be generated even if there is no
- # dependencies to put there.
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/<embed-manifest>off : /MANIFEST ;
- }
- toolset.pop-checking-for-flags-module ;
-}
-
-
-# Registers this toolset including all of its flags, features & generators. Does
-# nothing on repeated calls.
-#
-rule register-toolset ( )
-{
- if ! msvc in [ feature.values toolset ]
- {
- register-toolset-really ;
- }
-}
-
-
-# Declare action for creating static libraries. If library exists, remove it
-# before adding files. See
-# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
-if [ os.name ] in NT
-{
- # The 'DEL' command would issue a message to stdout if the file does not
- # exist, so need a check.
- actions archive
- {
- if exist "$(<[1])" DEL "$(<[1])"
- $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-}
-else
-{
- actions archive
- {
- $(.RM) "$(<[1])"
- $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-}
-
-
-# For the assembler the following options are turned on by default:
-#
-# -Zp4 align structures to 4 bytes
-# -Cp preserve case of user identifiers
-# -Cx preserve case in publics, externs
-#
-actions compile.asm
-{
- $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
-}
-
-
-rule compile.c ( targets + : sources * : properties * )
-{
- C++FLAGS on $(targets[1]) = ;
- get-rspline $(targets) : -TC ;
- compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-
-rule compile.c.preprocess ( targets + : sources * : properties * )
-{
- C++FLAGS on $(targets[1]) = ;
- get-rspline $(targets) : -TC ;
- preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-
-rule compile.c.pch ( targets + : sources * : properties * )
-{
- C++FLAGS on $(targets[1]) = ;
- get-rspline $(targets[1]) : -TC ;
- get-rspline $(targets[2]) : -TC ;
- local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
- if $(pch-source)
- {
- DEPENDS $(<) : $(pch-source) ;
- compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
- }
- else
- {
- compile-c-c++-pch $(targets) : $(sources) ;
- }
-}
-
-toolset.flags msvc YLOPTION : "-Yl" ;
-
-# Action for running the C/C++ compiler without using precompiled headers.
-#
-# WARNING: Synchronize any changes this in action with intel-win
-#
-# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
-#
-# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
-#
-# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
-# The linker will pull these into the executable's PDB
-#
-# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
-# as in this case the compiler must be used to create a single PDB for our library.
-#
-actions compile-c-c++ bind PDB_NAME
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
-}
-
-actions preprocess-c-c++ bind PDB_NAME
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
-}
-
-rule compile-c-c++ ( targets + : sources * )
-{
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
- PDB_NAME on $(<) = $(<:S=.pdb) ;
-}
-
-rule preprocess-c-c++ ( targets + : sources * )
-{
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
- PDB_NAME on $(<) = $(<:S=.pdb) ;
-}
-
-# Action for running the C/C++ compiler using precompiled headers. In addition
-# to whatever else it needs to compile, this action also adds a temporary source
-# .cpp file used to compile the precompiled headers themselves.
-#
-# The global .escaped-double-quote variable is used to avoid messing up Emacs
-# syntax highlighting in the messy N-quoted code below.
-actions compile-c-c++-pch
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
-}
-
-
-# Action for running the C/C++ compiler using precompiled headers. An already
-# built source file for compiling the precompiled headers is expected to be
-# given as one of the source parameters.
-actions compile-c-c++-pch-s
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
-}
-
-
-rule compile.c++ ( targets + : sources * : properties * )
-{
- get-rspline $(targets) : -TP ;
- compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-rule compile.c++.preprocess ( targets + : sources * : properties * )
-{
- get-rspline $(targets) : -TP ;
- preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-
-rule compile.c++.pch ( targets + : sources * : properties * )
-{
- get-rspline $(targets[1]) : -TP ;
- get-rspline $(targets[2]) : -TP ;
- local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
- if $(pch-source)
- {
- DEPENDS $(<) : $(pch-source) ;
- compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
- }
- else
- {
- compile-c-c++-pch $(targets) : $(sources) ;
- }
-}
-
-
-# See midl.jam for details.
-#
-actions compile.idl
-{
- $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
- $(.TOUCH_FILE) "$(<[4]:W)"
- $(.TOUCH_FILE) "$(<[5]:W)"
-}
-
-
-actions compile.mc
-{
- $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
-}
-
-
-actions compile.rc
-{
- $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
-}
-
-
-rule link ( targets + : sources * : properties * )
-{
- if <embed-manifest>on in $(properties)
- {
- msvc.manifest $(targets) : $(sources) : $(properties) ;
- }
-}
-
-rule link.dll ( targets + : sources * : properties * )
-{
- DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
- if <embed-manifest>on in $(properties)
- {
- msvc.manifest.dll $(targets) : $(sources) : $(properties) ;
- }
-}
-
-# Incremental linking a DLL causes no end of problems: if the actual exports do
-# not change, the import .lib file is never updated. Therefore, the .lib is
-# always out-of-date and gets rebuilt every time. I am not sure that incremental
-# linking is such a great idea in general, but in this case I am sure we do not
-# want it.
-
-# Windows manifest is a new way to specify dependencies on managed DotNet
-# assemblies and Windows native DLLs. The manifests are embedded as resources
-# and are useful in any PE target (both DLL and EXE).
-
-if [ os.name ] in NT
-{
- actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
- }
-
- actions manifest
- {
- if exist "$(<[1]).manifest" (
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
- )
- }
-
- actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
- }
-
- actions manifest.dll
- {
- if exist "$(<[1]).manifest" (
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
- )
- }
-}
-else
-{
- actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-
- actions manifest
- {
- if test -e "$(<[1]).manifest"; then
- $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1"
- fi
- }
-
- actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-
- actions manifest.dll
- {
- if test -e "$(<[1]).manifest"; then
- $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
- fi
- }
-}
-
-# this rule sets up the pdb file that will be used when generating static
-# libraries and the debug-store option is database, so that the compiler
-# puts all debug info into a single .pdb file named after the library
-#
-# Poking at source targets this way is probably not clean, but it's the
-# easiest approach.
-rule archive ( targets + : sources * : properties * )
-{
- PDB_NAME on $(>) = $(<:S=.pdb) ;
-}
-
-################################################################################
-#
-# Classes.
-#
-################################################################################
-
-class msvc-pch-generator : pch-generator
-{
- import property-set ;
-
- rule run-pch ( project name ? : property-set : sources * )
- {
- # Searching for the header and source file in the sources.
- local pch-header ;
- local pch-source ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] H ]
- {
- pch-header = $(s) ;
- }
- else if
- [ type.is-derived [ $(s).type ] CPP ] ||
- [ type.is-derived [ $(s).type ] C ]
- {
- pch-source = $(s) ;
- }
- }
-
- if ! $(pch-header)
- {
- errors.user-error "can not build pch without pch-header" ;
- }
-
- # If we do not have the PCH source - that is fine. We will just create a
- # temporary .cpp file in the action.
-
- local generated = [ generator.run $(project) $(name)
- : [ property-set.create
- # Passing of <pch-source> is a dirty trick, needed because
- # non-composing generators with multiple inputs are subtly
- # broken. For more detailed information see:
- # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
- <pch-source>$(pch-source)
- [ $(property-set).raw ] ]
- : $(pch-header) ] ;
-
- local pch-file ;
- for local g in $(generated)
- {
- if [ type.is-derived [ $(g).type ] PCH ]
- {
- pch-file = $(g) ;
- }
- }
-
- return [ property-set.create <pch-header>$(pch-header)
- <pch-file>$(pch-file) ] $(generated) ;
- }
-}
-
-
-################################################################################
-#
-# Local rules.
-#
-################################################################################
-
-# Detects versions listed as '.known-versions' by checking registry information,
-# environment variables & default paths. Supports both native Windows and
-# Cygwin.
-#
-local rule auto-detect-toolset-versions ( )
-{
- if [ os.name ] in NT CYGWIN
- {
- # Get installation paths from the registry.
- for local i in $(.known-versions)
- {
- if $(.version-$(i)-reg)
- {
- local vc-path ;
- for local x in "" "Wow6432Node\\"
- {
- vc-path += [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
- : "ProductDir" ] ;
- }
-
- if $(vc-path)
- {
- vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ;
- register-configuration $(i) : [ path.native $(vc-path[1]) ] ;
- }
- }
- }
- }
-
- # Check environment and default installation paths.
- for local i in $(.known-versions)
- {
- if ! $(i) in [ $(.versions).all ]
- {
- register-configuration $(i) : [ default-path $(i) ] ;
- }
- }
-}
-
-
-# Worker rule for toolset version configuration. Takes an explicit version id or
-# nothing in case it should configure the default toolset version (the first
-# registered one or a new 'default' one in case no toolset versions have been
-# registered yet).
-#
-local rule configure-really ( version ? : options * )
-{
- local v = $(version) ;
-
- # Decide what the 'default' version is.
- if ! $(v)
- {
- # Take the first registered (i.e. auto-detected) version.
- version = [ $(.versions).all ] ;
- version = $(version[1]) ;
- v = $(version) ;
-
- # Note: 'version' can still be empty at this point if no versions have
- # been auto-detected.
- version ?= "default" ;
- }
-
- # Version alias -> real version number.
- if $(.version-alias-$(version))
- {
- version = $(.version-alias-$(version)) ;
- }
-
- # Check whether the selected configuration is already in use.
- if $(version) in [ $(.versions).used ]
- {
- # Allow multiple 'toolset.using' calls for the same configuration if the
- # identical sets of options are used.
- if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
- {
- errors.error "MSVC toolset configuration: Toolset version"
- "'$(version)' already configured." ;
- }
- }
- else
- {
- # Register a new configuration.
- $(.versions).register $(version) ;
-
- # Add user-supplied to auto-detected options.
- options = [ $(.versions).get $(version) : options ] $(options) ;
-
- # Mark the configuration as 'used'.
- $(.versions).use $(version) ;
-
- # Generate conditions and save them.
- local conditions = [ common.check-init-parameters msvc : version $(v) ]
- ;
-
- $(.versions).set $(version) : conditions : $(conditions) ;
-
- local command = [ feature.get-values <command> : $(options) ] ;
-
- # If version is specified, we try to search first in default paths, and
- # only then in PATH.
- command = [ common.get-invocation-command msvc : cl.exe : $(command) :
- [ default-paths $(version) ] : $(version) ] ;
-
- common.handle-options msvc : $(conditions) : $(command) : $(options) ;
-
- if ! $(version)
- {
- # Even if version is not explicitly specified, try to detect the
- # version from the path.
- # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
- # 9.0express as 9.0 here.
- if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
- {
- version = 10.0 ;
- }
- else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
- {
- version = 9.0 ;
- }
- else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
- {
- version = 8.0 ;
- }
- else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
- {
- version = 7.1 ;
- }
- else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" :
- $(command) ]
- {
- version = 7.1toolkit ;
- }
- else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ]
- {
- version = 7.0 ;
- }
- else
- {
- version = 6.0 ;
- }
- }
-
- # Generate and register setup command.
-
- local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ;
-
- local cpu = i386 amd64 ia64 ;
- if $(below-8.0)
- {
- cpu = i386 ;
- }
-
- local setup-amd64 ;
- local setup-i386 ;
- local setup-ia64 ;
-
- if $(command)
- {
- # TODO: Note that if we specify a non-existant toolset version then
- # this rule may find and use a corresponding compiler executable
- # belonging to an incorrect toolset version. For example, if you
- # have only MSVC 7.1 installed, have its executable on the path and
- # specify you want Boost Build to use MSVC 9.0, then you want Boost
- # Build to report an error but this may cause it to silently use the
- # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
- # toolset version.
- command = [ common.get-absolute-tool-path $(command[-1]) ] ;
- }
-
- if $(command)
- {
- local parent = [ path.make $(command) ] ;
- parent = [ path.parent $(parent) ] ;
- parent = [ path.native $(parent) ] ;
-
- # Setup will be used if the command name has been specified. If
- # setup is not specified explicitly then a default setup script will
- # be used instead. Setup scripts may be global or arhitecture/
- # /platform/cpu specific. Setup options are used only in case of
- # global setup scripts.
-
- # Default setup scripts provided with different VC distributions:
- #
- # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
- # builds. It was located in the bin folder for the regular version
- # and in the root folder for the free VC 7.1 tools.
- #
- # Later 8.0 & 9.0 versions introduce separate platform specific
- # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
- # located in or under the bin folder. Most also include a global
- # vcvarsall.bat helper script located in the root folder which runs
- # one of the aforementioned vcvars*.bat scripts based on the options
- # passed to it. So far only the version coming with some PlatformSDK
- # distributions does not include this top level script but to
- # support those we need to fall back to using the worker scripts
- # directly in case the top level script can not be found.
-
- local global-setup = [ feature.get-values <setup> : $(options) ] ;
- global-setup = $(global-setup[1]) ;
- if ! $(below-8.0)
- {
- global-setup ?= [ locate-default-setup $(command) : $(parent) :
- vcvarsall.bat ] ;
- }
-
- local default-setup-amd64 = vcvarsx86_amd64.bat ;
- local default-setup-i386 = vcvars32.bat ;
- local default-setup-ia64 = vcvarsx86_ia64.bat ;
-
- # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
- # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
- # mention an x86_IPF option, that seems to be a documentation bug
- # and x86_ia64 is the correct option.
- local default-global-setup-options-amd64 = x86_amd64 ;
- local default-global-setup-options-i386 = x86 ;
- local default-global-setup-options-ia64 = x86_ia64 ;
-
- # When using 64-bit Windows, and targeting 64-bit, it is possible to
- # use a native 64-bit compiler, selected by the "amd64" & "ia64"
- # parameters to vcvarsall.bat. There are two variables we can use --
- # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
- # 'x86' when running 32-bit Windows, no matter which processor is
- # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
- # Windows.
- #
- if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
- {
- default-global-setup-options-amd64 = amd64 ;
- }
- # TODO: The same 'native compiler usage' should be implemented for
- # the Itanium platform by using the "ia64" parameter. For this
- # though we need someone with access to this platform who can find
- # out how to correctly detect this case.
- else if $(somehow-detect-the-itanium-platform)
- {
- default-global-setup-options-ia64 = ia64 ;
- }
-
- local setup-prefix = "call " ;
- local setup-suffix = " >nul"$(.nl) ;
- if ! [ os.name ] in NT
- {
- setup-prefix = "cmd.exe /S /C call " ;
- setup-suffix = " \">nul\" \"&&\" " ;
- }
-
- for local c in $(cpu)
- {
- local setup-options ;
-
- setup-$(c) = [ feature.get-values <setup-$(c)> : $(options) ] ;
-
- if ! $(setup-$(c))-is-not-empty
- {
- if $(global-setup)-is-not-empty
- {
- setup-$(c) = $(global-setup) ;
-
- # If needed we can easily add using configuration flags
- # here for overriding which options get passed to the
- # global setup command for which target platform:
- # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ;
-
- setup-options ?= $(default-global-setup-options-$(c)) ;
- }
- else
- {
- setup-$(c) = [ locate-default-setup $(command) : $(parent) : $(default-setup-$(c)) ] ;
- }
- }
-
- # Cygwin to Windows path translation.
- setup-$(c) = "\""$(setup-$(c):W)"\"" ;
-
- # Append setup options to the setup name and add the final setup
- # prefix & suffix.
- setup-options ?= "" ;
- setup-$(c) = $(setup-prefix)$(setup-$(c):J=" ")" "$(setup-options:J=" ")$(setup-suffix) ;
- }
- }
-
- # Get tool names (if any) and finish setup.
-
- compiler = [ feature.get-values <compiler> : $(options) ] ;
- compiler ?= cl ;
-
- linker = [ feature.get-values <linker> : $(options) ] ;
- linker ?= link ;
-
- resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
- resource-compiler ?= rc ;
-
- # Turn on some options for i386 assembler
- # -coff generate COFF format object file (compatible with cl.exe output)
- local default-assembler-amd64 = ml64 ;
- local default-assembler-i386 = "ml -coff" ;
- local default-assembler-ia64 = ias ;
-
- assembler = [ feature.get-values <assembler> : $(options) ] ;
-
- idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
- idl-compiler ?= midl ;
-
- mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
- mc-compiler ?= mc ;
-
- manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
- manifest-tool ?= mt ;
-
- local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ;
-
- for local c in $(cpu)
- {
- # Setup script is not required in some configurations.
- setup-$(c) ?= "" ;
-
- local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ;
-
- if $(.debug-configuration)
- {
- for local cpu-condition in $(cpu-conditions)
- {
- ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ;
- }
- }
-
- local cpu-assembler = $(assembler) ;
- cpu-assembler ?= $(default-assembler-$(c)) ;
-
- toolset.flags msvc.compile .CC $(cpu-conditions) : $(setup-$(c))$(compiler) /Zm800 -nologo ;
- toolset.flags msvc.compile .RC $(cpu-conditions) : $(setup-$(c))$(resource-compiler) ;
- toolset.flags msvc.compile .ASM $(cpu-conditions) : $(setup-$(c))$(cpu-assembler) -nologo ;
- toolset.flags msvc.link .LD $(cpu-conditions) : $(setup-$(c))$(linker) /NOLOGO /INCREMENTAL:NO ;
- toolset.flags msvc.archive .LD $(cpu-conditions) : $(setup-$(c))$(linker) /lib /NOLOGO ;
- toolset.flags msvc.compile .IDL $(cpu-conditions) : $(setup-$(c))$(idl-compiler) ;
- toolset.flags msvc.compile .MC $(cpu-conditions) : $(setup-$(c))$(mc-compiler) ;
-
- toolset.flags msvc.link .MT $(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ;
-
- if $(cc-filter)
- {
- toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ;
- }
- }
-
- # Set version-specific flags.
- configure-version-specific msvc : $(version) : $(conditions) ;
- }
-}
-
-
-# Returns the default installation path for the given version.
-#
-local rule default-path ( version )
-{
- # Use auto-detected path if possible.
- local path = [ feature.get-values <command> : [ $(.versions).get $(version)
- : options ] ] ;
-
- if $(path)
- {
- path = $(path:D) ;
- }
- else
- {
- # Check environment.
- if $(.version-$(version)-env)
- {
- local vc-path = [ os.environ $(.version-$(version)-env) ] ;
- if $(vc-path)
- {
- vc-path = [ path.make $(vc-path) ] ;
- vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ;
- vc-path = [ path.native $(vc-path) ] ;
-
- path = $(vc-path) ;
- }
- }
-
- # Check default path.
- if ! $(path) && $(.version-$(version)-path)
- {
- path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ;
- }
- }
-
- return $(path) ;
-}
-
-
-# Returns either the default installation path (if 'version' is not empty) or
-# list of all known default paths (if no version is given)
-#
-local rule default-paths ( version ? )
-{
- local possible-paths ;
-
- if $(version)
- {
- possible-paths += [ default-path $(version) ] ;
- }
- else
- {
- for local i in $(.known-versions)
- {
- possible-paths += [ default-path $(i) ] ;
- }
- }
-
- return $(possible-paths) ;
-}
-
-
-rule get-rspline ( target : lang-opt )
-{
- CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS)
- $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES)
- $(.nl)\"-I$(INCLUDES:W)\" ] ;
-}
-
-class msvc-linking-generator : linking-generator
-{
- # Calls the base version. If necessary, also create a target for the
- # manifest file.specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local result = [ linking-generator.generated-targets $(sources)
- : $(property-set) : $(project) $(name) ] ;
-
- if $(result)
- {
- local name-main = [ $(result[0]).name ] ;
- local action = [ $(result[0]).action ] ;
-
- if [ $(property-set).get <debug-symbols> ] = "on"
- {
- # We force exact name on PDB. The reason is tagging -- the tag rule may
- # reasonably special case some target types, like SHARED_LIB. The tag rule
- # will not catch PDB, and it cannot even easily figure if PDB is paired with
- # SHARED_LIB or EXE or something else. Because PDB always get the
- # same name as the main target, with .pdb as extension, just force it.
- local target = [ class.new file-target $(name-main:S=.pdb) exact : PDB : $(project) : $(action) ] ;
- local registered-target = [ virtual-target.register $(target) ] ;
- if $(target) != $(registered-target)
- {
- $(action).replace-targets $(target) : $(registered-target) ;
- }
- result += $(registered-target) ;
- }
-
- if [ $(property-set).get <embed-manifest> ] = "off"
- {
- # Manifest is evil target. It has .manifest appened to the name of
- # main target, including extension. E.g. a.exe.manifest. We use 'exact'
- # name because to achieve this effect.
- local target = [ class.new file-target $(name-main).manifest exact : MANIFEST : $(project) : $(action) ] ;
- local registered-target = [ virtual-target.register $(target) ] ;
- if $(target) != $(registered-target)
- {
- $(action).replace-targets $(target) : $(registered-target) ;
- }
- result += $(registered-target) ;
- }
- }
- return $(result) ;
- }
-}
-
-
-
-# Unsafe worker rule for the register-toolset() rule. Must not be called
-# multiple times.
-#
-local rule register-toolset-really ( )
-{
- feature.extend toolset : msvc ;
-
- # Intel and msvc supposedly have link-compatible objects.
- feature.subfeature toolset msvc : vendor : intel : propagated optional ;
-
- # Inherit MIDL flags.
- toolset.inherit-flags msvc : midl ;
-
- # Inherit MC flags.
- toolset.inherit-flags msvc : mc ;
-
- # Dynamic runtime comes only in MT flavour.
- toolset.add-requirements
- <toolset>msvc,<runtime-link>shared:<threading>multi ;
-
- # Declare msvc toolset specific features.
- {
- feature.feature debug-store : object database : propagated ;
- feature.feature pch-source : : dependency free ;
- }
-
- # Declare generators.
- {
- # TODO: Is it possible to combine these? Make the generators
- # non-composing so that they do not convert each source into a separate
- # .rsp file.
- generators.register [ new msvc-linking-generator
- msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ;
- generators.register [ new msvc-linking-generator
- msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ] ;
-
- generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ;
-
- # Using 'register-c-compiler' adds the build directory to INCLUDES.
- generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ;
- generators.override msvc.compile.rc : rc.compile.resource ;
- generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ;
-
- generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ;
- generators.override msvc.compile.idl : midl.compile.idl ;
-
- generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ;
- generators.override msvc.compile.mc : mc.compile ;
-
- # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
- # the latter have their HPP type derived from H. The type of compilation
- # is determined entirely by the destination type.
- generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
- generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
-
- generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
- generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
- }
-
- toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
- toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
- toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
-
- #
- # Declare flags for compilation.
- #
-
- toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ;
- toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ;
-
- toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ;
- toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ;
-
- toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ;
- toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ;
- toolset.flags msvc.compile CFLAGS <optimization>off : /Od ;
- toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ;
- toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ;
- toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ;
-
- toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ;
- toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ;
- toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ;
- toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ;
-
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ;
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ;
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
-
- # By default 8.0 enables rtti support while prior versions disabled it. We
- # simply enable or disable it explicitly so we do not have to depend on this
- # default behaviour.
- toolset.flags msvc.compile CFLAGS <rtti>on : /GR ;
- toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ;
- toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
- toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
-
- toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
- toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
-
- toolset.flags msvc.compile OPTIONS <cflags> : ;
- toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ;
-
- toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ;
-
- toolset.flags msvc.compile DEFINES <define> ;
- toolset.flags msvc.compile UNDEFS <undef> ;
- toolset.flags msvc.compile INCLUDES <include> ;
-
- # Declare flags for the assembler.
- toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ;
-
- toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ;
-
- toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ;
- toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ;
- toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ;
- toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ;
-
- toolset.flags msvc.compile.asm DEFINES <define> ;
-
- # Declare flags for linking.
- {
- toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
- toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
- toolset.flags msvc.link DEF_FILE <def-file> ;
-
- # The linker disables the default optimizations when using /DEBUG so we
- # have to enable them manually for release builds with debug symbols.
- toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ;
-
- toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ;
- toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ;
- toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ;
- toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ;
- toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ;
-
- toolset.flags msvc.link OPTIONS <linkflags> ;
- toolset.flags msvc.link LINKPATH <library-path> ;
-
- toolset.flags msvc.link FINDLIBS_ST <find-static-library> ;
- toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ;
- toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
- toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
- }
-
- toolset.flags msvc.archive AROPTIONS <archiveflags> ;
-}
-
-
-# Locates the requested setup script under the given folder and returns its full
-# path or nothing in case the script can not be found. In case multiple scripts
-# are found only the first one is returned.
-#
-# TODO: There used to exist a code comment for the msvc.init rule stating that
-# we do not correctly detect the location of the vcvars32.bat setup script for
-# the free VC7.1 tools in case user explicitly provides a path. This should be
-# tested or simply remove this whole comment in case this toolset version is no
-# longer important.
-#
-local rule locate-default-setup ( command : parent : setup-name )
-{
- local result = [ GLOB $(command) $(parent) : $(setup-name) ] ;
- if $(result[1])
- {
- return $(result[1]) ;
- }
-}
-
-
-# Validates given path, registers found configuration and prints debug
-# information about it.
-#
-local rule register-configuration ( version : path ? )
-{
- if $(path)
- {
- local command = [ GLOB $(path) : cl.exe ] ;
-
- if $(command)
- {
- if $(.debug-configuration)
- {
- ECHO "notice: [msvc-cfg] msvc-$(version) detected, command: '$(command)'" ;
- }
-
- $(.versions).register $(version) ;
- $(.versions).set $(version) : options : <command>$(command) ;
- }
- }
-}
-
-
-################################################################################
-#
-# Startup code executed when loading this module.
-#
-################################################################################
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# Miscellaneous constants.
-.RM = [ common.rm-command ] ;
-.nl = "
-" ;
-.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ;
-.escaped-double-quote = "\"" ;
-.TOUCH_FILE = [ common.file-touch-command ] ;
-
-# List of all registered configurations.
-.versions = [ new configurations ] ;
-
-# Supported CPU architectures.
-.cpu-arch-i386 =
- <architecture>/<address-model>
- <architecture>/<address-model>32
- <architecture>x86/<address-model>
- <architecture>x86/<address-model>32 ;
-
-.cpu-arch-amd64 =
- <architecture>/<address-model>64
- <architecture>x86/<address-model>64 ;
-
-.cpu-arch-ia64 =
- <architecture>ia64/<address-model>
- <architecture>ia64/<address-model>64 ;
-
-
-# Supported CPU types (only Itanium optimization options are supported from
-# VC++ 2005 on). See
-# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
-# detailed information.
-.cpu-type-g5 = i586 pentium pentium-mmx ;
-.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6
- k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ;
-.cpu-type-em64t = prescott nocona conroe conroe-xe conroe-l allendale mermon
- mermon-xe kentsfield kentsfield-xe penryn wolfdale
- yorksfield nehalem ;
-.cpu-type-amd64 = k8 opteron athlon64 athlon-fx ;
-.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp
- athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ;
-.cpu-type-itanium = itanium itanium1 merced ;
-.cpu-type-itanium2 = itanium2 mckinley ;
-
-
-# Known toolset versions, in order of preference.
-.known-versions = 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ;
-
-# Version aliases.
-.version-alias-6 = 6.0 ;
-.version-alias-6.5 = 6.0 ;
-.version-alias-7 = 7.0 ;
-.version-alias-8 = 8.0 ;
-.version-alias-9 = 9.0 ;
-.version-alias-10 = 10.0 ;
-
-# Names of registry keys containing the Visual C++ installation path (relative
-# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
-.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
-.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ;
-.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ;
-.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ;
-.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ;
-.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ;
-.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
-.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
-.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
-
-# Visual C++ Toolkit 2003 does not store its installation path in the registry.
-# The environment variable 'VCToolkitInstallDir' and the default installation
-# path will be checked instead.
-.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ;
-.version-7.1toolkit-env = VCToolkitInstallDir ;
-
-# Path to the folder containing "cl.exe" relative to the value of the
-# corresponding environment variable.
-.version-7.1toolkit-envpath = "bin" ;
-
-
-# Auto-detect all the available msvc installations on the system.
-auto-detect-toolset-versions ;
-
-
-# And finally trigger the actual Boost Build toolset registration.
-register-toolset ;
diff --git a/jam-files/boost-build/tools/notfile.jam b/jam-files/boost-build/tools/notfile.jam
deleted file mode 100644
index 97a5b0e8..00000000
--- a/jam-files/boost-build/tools/notfile.jam
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2005 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import generators ;
-import project ;
-import targets ;
-import toolset ;
-import type ;
-
-
-type.register NOTFILE_MAIN ;
-
-
-class notfile-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- local action ;
- local action-name = [ $(property-set).get <action> ] ;
-
- local m = [ MATCH ^@(.*) : $(action-name) ] ;
-
- if $(m)
- {
- action = [ new action $(sources) : $(m[1])
- : $(property-set) ] ;
- }
- else
- {
- action = [ new action $(sources) : notfile.run
- : $(property-set) ] ;
- }
- return [ virtual-target.register
- [ new notfile-target $(name) : $(project) : $(action) ] ] ;
- }
-}
-
-
-generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
-
-
-toolset.flags notfile.run ACTION : <action> ;
-
-
-actions run
-{
- $(ACTION)
-}
-
-
-rule notfile ( target-name : action + : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- requirements += <action>$(action) ;
-
- targets.main-target-alternative
- [ new typed-target $(target-name) : $(project) : NOTFILE_MAIN
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-IMPORT $(__name__) : notfile : : notfile ;
diff --git a/jam-files/boost-build/tools/notfile.py b/jam-files/boost-build/tools/notfile.py
deleted file mode 100644
index afbf68fb..00000000
--- a/jam-files/boost-build/tools/notfile.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Status: ported.
-# Base revision: 64429.
-#
-# Copyright (c) 2005-2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-
-import b2.build.type as type
-import b2.build.generators as generators
-import b2.build.virtual_target as virtual_target
-import b2.build.toolset as toolset
-import b2.build.targets as targets
-
-from b2.manager import get_manager
-from b2.util import bjam_signature
-
-type.register("NOTFILE_MAIN")
-
-class NotfileGenerator(generators.Generator):
-
- def run(self, project, name, ps, sources):
- pass
- action_name = ps.get('action')[0]
- if action_name[0] == '@':
- action = virtual_target.Action(get_manager(), sources, action_name[1:], ps)
- else:
- action = virtual_target.Action(get_manager(), sources, "notfile.run", ps)
-
- return [get_manager().virtual_targets().register(
- virtual_target.NotFileTarget(name, project, action))]
-
-generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"]))
-
-toolset.flags("notfile.run", "ACTION", [], ["<action>"])
-
-get_manager().engine().register_action("notfile.run", "$(ACTION)")
-
-@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"]))
-def notfile(target_name, action, sources, requirements, default_build):
-
- requirements.append("<action>" + action)
-
- return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements,
- default_build, [])
-
-
-get_manager().projects().add_rule("notfile", notfile)
diff --git a/jam-files/boost-build/tools/package.jam b/jam-files/boost-build/tools/package.jam
deleted file mode 100644
index 198c2231..00000000
--- a/jam-files/boost-build/tools/package.jam
+++ /dev/null
@@ -1,165 +0,0 @@
-# Copyright (c) 2005 Vladimir Prus.
-# Copyright 2006 Rene Rivera.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Provides mechanism for installing whole packages into a specific directory
-# structure. This is opposed to the 'install' rule, that installs a number of
-# targets to a single directory, and does not care about directory structure at
-# all.
-
-# Example usage:
-#
-# package.install boost : <properties>
-# : <binaries>
-# : <libraries>
-# : <headers>
-# ;
-#
-# This will install binaries, libraries and headers to the 'proper' location,
-# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
-# --includedir.
-#
-# The rule is just a convenient wrapper, avoiding the need to define several
-# 'install' targets.
-#
-# The only install-related feature is <install-source-root>. It will apply to
-# headers only and if present, paths of headers relatively to source root will
-# be retained after installing. If it is not specified, then "." is assumed, so
-# relative paths in headers are always preserved.
-
-import "class" : new ;
-import option ;
-import project ;
-import feature ;
-import property ;
-import stage ;
-import targets ;
-import modules ;
-
-feature.feature install-default-prefix : : free incidental ;
-
-rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * )
-{
- package-name ?= $(name) ;
- if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
- {
- # If --prefix is explicitly specified on the command line,
- # then we need wipe away any settings of libdir/includir that
- # is specified via options in config files.
- option.set bindir : ;
- option.set libdir : ;
- option.set includedir : ;
- }
-
- # If <install-source-root> is not specified, all headers are installed to
- # prefix/include, no matter what their relative path is. Sometimes that is
- # what is needed.
- local install-source-root = [ property.select <install-source-root> :
- $(requirements) ] ;
- install-source-root = $(install-source-root:G=) ;
- requirements = [ property.change $(requirements) : <install-source-root> ] ;
-
- local install-header-subdir = [ property.select <install-header-subdir> :
- $(requirements) ] ;
- install-header-subdir = /$(install-header-subdir:G=) ;
- install-header-subdir ?= "" ;
- requirements = [ property.change $(requirements) : <install-header-subdir> ]
- ;
-
- # First, figure out all locations. Use the default if no prefix option
- # given.
- local prefix = [ get-prefix $(name) : $(requirements) ] ;
-
- # Architecture dependent files.
- local exec-locate = [ option.get exec-prefix : $(prefix) ] ;
-
- # Binaries.
- local bin-locate = [ option.get bindir : $(prefix)/bin ] ;
-
- # Object code libraries.
- local lib-locate = [ option.get libdir : $(prefix)/lib ] ;
-
- # Source header files.
- local include-locate = [ option.get includedir : $(prefix)/include ] ;
-
- stage.install $(name)-bin : $(binaries) : $(requirements)
- <location>$(bin-locate) ;
- alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ;
-
- # Since the install location of shared libraries differs on universe
- # and cygwin, use target alternatives to make different targets.
- # We should have used indirection conditioanl requirements, but it's
- # awkward to pass bin-locate and lib-locate from there to another rule.
- alias $(name)-lib-shared : $(name)-lib-shared-universe ;
- alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ;
-
- # For shared libraries, we install both explicitly specified one and the
- # shared libraries that the installed executables depend on.
- stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements)
- <location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ;
- stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements)
- <location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ;
-
- # For static libraries, we do not care about executable dependencies, since
- # static libraries are already incorporated into them.
- stage.install $(name)-lib-static : $(libraries) : $(requirements)
- <location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ;
- stage.install $(name)-headers : $(headers) : $(requirements)
- <location>$(include-locate)$(install-header-subdir)
- <install-source-root>$(install-source-root) ;
- alias $(name) : $(name)-bin $(name)-lib $(name)-headers ;
-
- local c = [ project.current ] ;
- local project-module = [ $(c).project-module ] ;
- module $(project-module)
- {
- explicit $(1)-bin $(1)-lib $(1)-headers $(1) $(1)-lib-shared $(1)-lib-static
- $(1)-lib-shared-universe $(1)-lib-shared-cygwin ;
- }
-}
-
-rule install-data ( target-name : package-name : data * : requirements * )
-{
- package-name ?= target-name ;
- if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
- {
- # If --prefix is explicitly specified on the command line,
- # then we need wipe away any settings of datarootdir
- option.set datarootdir : ;
- }
-
- local prefix = [ get-prefix $(package-name) : $(requirements) ] ;
- local datadir = [ option.get datarootdir : $(prefix)/share ] ;
-
- stage.install $(target-name)
- : $(data)
- : $(requirements) <location>$(datadir)/$(package-name)
- ;
-
- local c = [ project.current ] ;
- local project-module = [ $(c).project-module ] ;
- module $(project-module)
- {
- explicit $(1) ;
- }
-}
-
-local rule get-prefix ( package-name : requirements * )
-{
- local prefix = [ option.get prefix : [ property.select
- <install-default-prefix> : $(requirements) ] ] ;
- prefix = $(prefix:G=) ;
- requirements = [ property.change $(requirements) : <install-default-prefix>
- ] ;
- # Or some likely defaults if neither is given.
- if ! $(prefix)
- {
- if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; }
- else if [ modules.peek : UNIX ] { prefix = /usr/local ; }
- }
- return $(prefix) ;
-}
-
diff --git a/jam-files/boost-build/tools/package.py b/jam-files/boost-build/tools/package.py
deleted file mode 100644
index aa081b4f..00000000
--- a/jam-files/boost-build/tools/package.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# Status: ported
-# Base revision: 64488
-#
-# Copyright (c) 2005, 2010 Vladimir Prus.
-# Copyright 2006 Rene Rivera.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Provides mechanism for installing whole packages into a specific directory
-# structure. This is opposed to the 'install' rule, that installs a number of
-# targets to a single directory, and does not care about directory structure at
-# all.
-
-# Example usage:
-#
-# package.install boost : <properties>
-# : <binaries>
-# : <libraries>
-# : <headers>
-# ;
-#
-# This will install binaries, libraries and headers to the 'proper' location,
-# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
-# --includedir.
-#
-# The rule is just a convenient wrapper, avoiding the need to define several
-# 'install' targets.
-#
-# The only install-related feature is <install-source-root>. It will apply to
-# headers only and if present, paths of headers relatively to source root will
-# be retained after installing. If it is not specified, then "." is assumed, so
-# relative paths in headers are always preserved.
-
-import b2.build.feature as feature
-import b2.build.property as property
-import b2.util.option as option
-import b2.tools.stage as stage
-
-from b2.build.alias import alias
-
-from b2.manager import get_manager
-
-from b2.util import bjam_signature
-from b2.util.utility import ungrist
-
-
-import os
-
-feature.feature("install-default-prefix", [], ["free", "incidental"])
-
-@bjam_signature((["name", "package_name", "?"], ["requirements", "*"],
- ["binaries", "*"], ["libraries", "*"], ["headers", "*"]))
-def install(name, package_name=None, requirements=[], binaries=[], libraries=[], headers=[]):
-
- requirements = requirements[:]
- binaries = binaries[:]
- libraries
-
- if not package_name:
- package_name = name
-
- if option.get("prefix"):
- # If --prefix is explicitly specified on the command line,
- # then we need wipe away any settings of libdir/includir that
- # is specified via options in config files.
- option.set("bindir", None)
- option.set("libdir", None)
- option.set("includedir", None)
-
- # If <install-source-root> is not specified, all headers are installed to
- # prefix/include, no matter what their relative path is. Sometimes that is
- # what is needed.
- install_source_root = property.select('install-source-root', requirements)
- if install_source_root:
- requirements = property.change(requirements, 'install-source-root', None)
-
- install_header_subdir = property.select('install-header-subdir', requirements)
- if install_header_subdir:
- install_header_subdir = ungrist(install_header_subdir[0])
- requirements = property.change(requirements, 'install-header-subdir', None)
-
- # First, figure out all locations. Use the default if no prefix option
- # given.
- prefix = get_prefix(name, requirements)
-
- # Architecture dependent files.
- exec_locate = option.get("exec-prefix", prefix)
-
- # Binaries.
- bin_locate = option.get("bindir", os.path.join(prefix, "bin"))
-
- # Object code libraries.
- lib_locate = option.get("libdir", os.path.join(prefix, "lib"))
-
- # Source header files.
- include_locate = option.get("includedir", os.path.join(prefix, "include"))
-
- stage.install(name + "-bin", binaries, requirements + ["<location>" + bin_locate])
-
- alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"])
-
- # Since the install location of shared libraries differs on universe
- # and cygwin, use target alternatives to make different targets.
- # We should have used indirection conditioanl requirements, but it's
- # awkward to pass bin-locate and lib-locate from there to another rule.
- alias(name + "-lib-shared", [name + "-lib-shared-universe"])
- alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["<target-os>cygwin"])
-
- # For shared libraries, we install both explicitly specified one and the
- # shared libraries that the installed executables depend on.
- stage.install(name + "-lib-shared-universe", binaries + libraries,
- requirements + ["<location>" + lib_locate, "<install-dependencies>on",
- "<install-type>SHARED_LIB"])
- stage.install(name + "-lib-shared-cygwin", binaries + libraries,
- requirements + ["<location>" + bin_locate, "<install-dependencies>on",
- "<install-type>SHARED_LIB"])
-
- # For static libraries, we do not care about executable dependencies, since
- # static libraries are already incorporated into them.
- stage.install(name + "-lib-static", libraries, requirements +
- ["<location>" + lib_locate, "<install-dependencies>on", "<install-type>STATIC_LIB"])
- stage.install(name + "-headers", headers, requirements \
- + ["<location>" + os.path.join(include_locate, s) for s in install_header_subdir]
- + install_source_root)
-
- alias(name, [name + "-bin", name + "-lib", name + "-headers"])
-
- pt = get_manager().projects().current()
-
- for subname in ["bin", "lib", "headers", "lib-shared", "lib-static", "lib-shared-universe", "lib-shared-cygwin"]:
- pt.mark_targets_as_explicit([name + "-" + subname])
-
-@bjam_signature((["target_name"], ["package_name"], ["data", "*"], ["requirements", "*"]))
-def install_data(target_name, package_name, data, requirements):
- if not package_name:
- package_name = target_name
-
- if option.get("prefix"):
- # If --prefix is explicitly specified on the command line,
- # then we need wipe away any settings of datarootdir
- option.set("datarootdir", None)
-
- prefix = get_prefix(package_name, requirements)
- datadir = option.get("datarootdir", os.path.join(prefix, "share"))
-
- stage.install(target_name, data,
- requirements + ["<location>" + os.path.join(datadir, package_name)])
-
- get_manager().projects().current().mark_targets_as_explicit([target_name])
-
-def get_prefix(package_name, requirements):
-
- specified = property.select("install-default-prefix", requirements)
- if specified:
- specified = ungrist(specified[0])
- prefix = option.get("prefix", specified)
- requirements = property.change(requirements, "install-default-prefix", None)
- # Or some likely defaults if neither is given.
- if not prefix:
- if os.name == "nt":
- prefix = "C:\\" + package_name
- elif os.name == "posix":
- prefix = "/usr/local"
-
- return prefix
-
diff --git a/jam-files/boost-build/tools/pathscale.jam b/jam-files/boost-build/tools/pathscale.jam
deleted file mode 100644
index 454e3454..00000000
--- a/jam-files/boost-build/tools/pathscale.jam
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright 2006 Noel Belcourt
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property ;
-import generators ;
-import toolset : flags ;
-import feature ;
-import type ;
-import common ;
-import fortran ;
-
-feature.extend toolset : pathscale ;
-toolset.inherit pathscale : unix ;
-generators.override pathscale.prebuilt : builtin.prebuilt ;
-generators.override pathscale.searched-lib-generator : searched-lib-generator ;
-
-# Documentation and toolchain description located
-# http://www.pathscale.com/docs.html
-
-rule init ( version ? : command * : options * )
-{
- command = [ common.get-invocation-command pathscale : pathCC : $(command)
- : /opt/ekopath/bin ] ;
-
- # Determine the version
- local command-string = $(command:J=" ") ;
- if $(command)
- {
- version ?= [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- }
-
- local condition = [ common.check-init-parameters pathscale
- : version $(version) ] ;
-
- common.handle-options pathscale : $(condition) : $(command) : $(options) ;
-
- toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
- [ feature.get-values <fflags> : $(options) ] : unchecked ;
-
- command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
-
- toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
-
- # fortran support
- local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
- local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
- local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
-
- toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
- toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
-
- # always link lib rt to resolve clock_gettime()
- flags pathscale.link FINDLIBS-SA : rt : unchecked ;
-}
-
-# Declare generators
-generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
-generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
-generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
-generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
-
-# Declare flags and actions for compilation
-flags pathscale.compile OPTIONS <optimization>off : -O0 ;
-flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
-flags pathscale.compile OPTIONS <optimization>space : -Os ;
-
-flags pathscale.compile OPTIONS <inlining>off : -noinline ;
-flags pathscale.compile OPTIONS <inlining>on : -inline ;
-flags pathscale.compile OPTIONS <inlining>full : -inline ;
-
-flags pathscale.compile OPTIONS <warnings>off : -woffall ;
-flags pathscale.compile OPTIONS <warnings>on : -Wall ;
-flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
-flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
-flags pathscale.compile OPTIONS <profiling>on : -pg ;
-flags pathscale.compile OPTIONS <link>shared : -fPIC ;
-flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
-flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
-
-flags pathscale.compile USER_OPTIONS <cflags> ;
-flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
-flags pathscale.compile DEFINES <define> ;
-flags pathscale.compile INCLUDES <include> ;
-
-flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
-flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
-
-actions compile.c
-{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.fortran
-{
- "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.fortran90 ( targets * : sources * : properties * )
-{
- # the space rule inserts spaces between targets and it's necessary
- SPACE on $(targets) = " " ;
- # Serialize execution of the compile.fortran90 action
- # F90 source must be compiled in a particular order so we
- # serialize the build as a parallel F90 compile might fail
- JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
-}
-
-actions compile.fortran90
-{
- "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
-}
-
-# Declare flags and actions for linking
-flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
-# Strip the binary when no debugging is needed
-flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
-flags pathscale.link OPTIONS <profiling>on : -pg ;
-flags pathscale.link USER_OPTIONS <linkflags> ;
-flags pathscale.link LINKPATH <library-path> ;
-flags pathscale.link FINDLIBS-ST <find-static-library> ;
-flags pathscale.link FINDLIBS-SA <find-shared-library> ;
-flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
-flags pathscale.link LIBRARIES <library-file> ;
-flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
-flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
-flags pathscale.link RPATH <dll-path> ;
-# On gcc, there are separate options for dll path at runtime and
-# link time. On Solaris, there's only one: -R, so we have to use
-# it, even though it's bad idea.
-flags pathscale.link RPATH <xdll-path> ;
-
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
-}
-
-# Slight mods for dlls
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
-}
-
-# Declare action for creating static libraries
-# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
-actions piecemeal archive
-{
- ar $(ARFLAGS) ru "$(<)" "$(>)"
-}
diff --git a/jam-files/boost-build/tools/pch.jam b/jam-files/boost-build/tools/pch.jam
deleted file mode 100644
index 0c6e98fa..00000000
--- a/jam-files/boost-build/tools/pch.jam
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright (c) 2005 Reece H. Dunn.
-# Copyright 2006 Ilya Sokolov
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-##### Using Precompiled Headers (Quick Guide) #####
-#
-# Make precompiled mypch.hpp:
-#
-# import pch ;
-#
-# cpp-pch mypch
-# : # sources
-# mypch.hpp
-# : # requiremnts
-# <toolset>msvc:<source>mypch.cpp
-# ;
-#
-# Add cpp-pch to sources:
-#
-# exe hello
-# : main.cpp hello.cpp mypch
-# ;
-
-import "class" : new ;
-import type ;
-import feature ;
-import generators ;
-
-type.register PCH : pch ;
-
-type.register C_PCH : : PCH ;
-type.register CPP_PCH : : PCH ;
-
-# Control precompiled header (PCH) generation.
-feature.feature pch :
- on
- off
- : propagated ;
-
-
-feature.feature pch-header : : free dependency ;
-feature.feature pch-file : : free dependency ;
-
-# Base PCH generator. The 'run' method has the logic to prevent this generator
-# from being run unless it's being used for a top-level PCH target.
-class pch-generator : generator
-{
- import property-set ;
-
- rule action-class ( )
- {
- return compile-action ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- if ! $(name)
- {
- # Unless this generator is invoked as the top-most generator for a
- # main target, fail. This allows using 'H' type as input type for
- # this generator, while preventing Boost.Build to try this generator
- # when not explicitly asked for.
- #
- # One bad example is msvc, where pch generator produces both PCH
- # target and OBJ target, so if there's any header generated (like by
- # bison, or by msidl), we'd try to use pch generator to get OBJ from
- # that H, which is completely wrong. By restricting this generator
- # only to pch main target, such problem is solved.
- }
- else
- {
- local r = [ run-pch $(project) $(name)
- : [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ]
- : $(sources) ] ;
- return [ generators.add-usage-requirements $(r)
- : <define>BOOST_BUILD_PCH_ENABLED ] ;
- }
- }
-
- # This rule must be overridden by the derived classes.
- rule run-pch ( project name ? : property-set : sources + )
- {
- }
-}
-
-
-# NOTE: requirements are empty, default pch generator can be applied when
-# pch=off.
-generators.register
- [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
-generators.register
- [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
diff --git a/jam-files/boost-build/tools/pch.py b/jam-files/boost-build/tools/pch.py
deleted file mode 100644
index 21d3db09..00000000
--- a/jam-files/boost-build/tools/pch.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Status: Being ported by Steven Watanabe
-# Base revision: 47077
-#
-# Copyright (c) 2005 Reece H. Dunn.
-# Copyright 2006 Ilya Sokolov
-# Copyright (c) 2008 Steven Watanabe
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-##### Using Precompiled Headers (Quick Guide) #####
-#
-# Make precompiled mypch.hpp:
-#
-# import pch ;
-#
-# cpp-pch mypch
-# : # sources
-# mypch.hpp
-# : # requiremnts
-# <toolset>msvc:<source>mypch.cpp
-# ;
-#
-# Add cpp-pch to sources:
-#
-# exe hello
-# : main.cpp hello.cpp mypch
-# ;
-
-from b2.build import type, feature, generators
-
-type.register('PCH', ['pch'])
-type.register('C_PCH', [], 'PCH')
-type.register('CPP_PCH', [], 'PCH')
-
-# Control precompiled header (PCH) generation.
-feature.feature('pch',
- ['on', 'off'],
- ['propagated'])
-
-feature.feature('pch-header', [], ['free', 'dependency'])
-feature.feature('pch-file', [], ['free', 'dependency'])
-
-class PchGenerator(generators.Generator):
- """
- Base PCH generator. The 'run' method has the logic to prevent this generator
- from being run unless it's being used for a top-level PCH target.
- """
- def action_class(self):
- return 'compile-action'
-
- def run(self, project, name, prop_set, sources):
- if not name:
- # Unless this generator is invoked as the top-most generator for a
- # main target, fail. This allows using 'H' type as input type for
- # this generator, while preventing Boost.Build to try this generator
- # when not explicitly asked for.
- #
- # One bad example is msvc, where pch generator produces both PCH
- # target and OBJ target, so if there's any header generated (like by
- # bison, or by msidl), we'd try to use pch generator to get OBJ from
- # that H, which is completely wrong. By restricting this generator
- # only to pch main target, such problem is solved.
- pass
- else:
- r = self.run_pch(project, name,
- prop_set.add_raw('<define>BOOST_BUILD_PCH_ENABLED'),
- sources)
- return generators.add_usage_requirements(
- r, ['<define>BOOST_BUILD_PCH_ENABLED'])
-
- # This rule must be overridden by the derived classes.
- def run_pch(self, project, name, prop_set, sources):
- pass
-
-#FIXME: dummy-generator in builtins.jam needs to be ported.
-# NOTE: requirements are empty, default pch generator can be applied when
-# pch=off.
-###generators.register(
-### [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
-###generators.register
-### [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
diff --git a/jam-files/boost-build/tools/pgi.jam b/jam-files/boost-build/tools/pgi.jam
deleted file mode 100644
index 3a35c644..00000000
--- a/jam-files/boost-build/tools/pgi.jam
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property ;
-import generators ;
-import os ;
-import toolset : flags ;
-import feature ;
-import fortran ;
-import type ;
-import common ;
-import gcc ;
-
-feature.extend toolset : pgi ;
-toolset.inherit pgi : unix ;
-generators.override pgi.prebuilt : builtin.lib-generator ;
-generators.override pgi.searched-lib-generator : searched-lib-generator ;
-
-# Documentation and toolchain description located
-# http://www.pgroup.com/resources/docs.htm
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters pgi : version $(version) ] ;
-
- local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ;
-
- common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
-
- command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ;
-
- toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
-
- flags pgi.compile DEFINES $(condition) :
- [ feature.get-values <define> : $(options) ] : unchecked ;
-
- # IOV_MAX support
- flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ;
-
- # set link flags
- flags pgi.link FINDLIBS-ST : [
- feature.get-values <find-static-library> : $(options) ] : unchecked ;
-
- # always link lib rt to resolve clock_gettime()
- flags pgi.link FINDLIBS-SA : rt [
- feature.get-values <find-shared-library> : $(options) ] : unchecked ;
-
- gcc.init-link-flags pgi gnu $(condition) ;
-}
-
-# Declare generators
-generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
-generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
-generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
-
-# Declare flags and actions for compilation
-flags pgi.compile OPTIONS : -Kieee ;
-flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
-flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
-flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
-flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
-flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ;
-# flags pgi.compile OPTIONS <threading>multi : -mt ;
-
-flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
-flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
-
-flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
-
-flags pgi.compile OPTIONS <cflags> ;
-flags pgi.compile.c++ OPTIONS <cxxflags> ;
-flags pgi.compile DEFINES <define> ;
-flags pgi.compile INCLUDES <include> ;
-
-flags pgi.compile.fortran OPTIONS <fflags> ;
-
-actions compile.c
-{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.fortran
-{
- "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# Declare flags and actions for linking
-flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
-# Strip the binary when no debugging is needed
-flags pgi.link OPTIONS <debug-symbols>off : -s ;
-flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
-flags pgi.link OPTIONS <linkflags> ;
-flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
-flags pgi.link LINKPATH <library-path> ;
-flags pgi.link FINDLIBS-ST <find-static-library> ;
-flags pgi.link FINDLIBS-SA <find-shared-library> ;
-flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
-flags pgi.link LIBRARIES <library-file> ;
-flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
-flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
-flags pgi.link RPATH <dll-path> ;
-
-# On gcc, there are separate options for dll path at runtime and
-# link time. On Solaris, there's only one: -R, so we have to use
-# it, even though it's bad idea.
-flags pgi.link RPATH <xdll-path> ;
-
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line
-# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied.
-# Yod on redstorm refuses to load an executable that is dynamically linked.
-# removing the dynamic link options should get us where we need to be on redstorm.
-# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME)
-}
-
-# Slight mods for dlls
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-}
-
-actions updated together piecemeal pgi.archive
-{
- ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
-}
-
diff --git a/jam-files/boost-build/tools/python-config.jam b/jam-files/boost-build/tools/python-config.jam
deleted file mode 100644
index 40aa825b..00000000
--- a/jam-files/boost-build/tools/python-config.jam
+++ /dev/null
@@ -1,27 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for Python tools and librries. To use, just import this module.
-
-import os ;
-import toolset : using ;
-
-if [ os.name ] = NT
-{
- for local R in 2.4 2.3 2.2
- {
- local python-path = [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ;
- local python-version = $(R) ;
-
- if $(python-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ;
- }
- using python : $(python-version) : $(python-path) ;
- }
- }
-}
diff --git a/jam-files/boost-build/tools/python.jam b/jam-files/boost-build/tools/python.jam
deleted file mode 100644
index 97a9f9a5..00000000
--- a/jam-files/boost-build/tools/python.jam
+++ /dev/null
@@ -1,1267 +0,0 @@
-# Copyright 2004 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for Python and the the Boost.Python library.
-#
-# This module defines
-#
-# - a project 'python' with a target 'python' in it, that corresponds to the
-# python library
-#
-# - a main target rule 'python-extension' which can be used to build a python
-# extension.
-#
-# Extensions that use Boost.Python must explicitly link to it.
-
-import type ;
-import testing ;
-import generators ;
-import project ;
-import errors ;
-import targets ;
-import "class" : new ;
-import os ;
-import common ;
-import toolset ;
-import regex ;
-import numbers ;
-import string ;
-import property ;
-import sequence ;
-import path ;
-import feature ;
-import set ;
-import builtin ;
-import version ;
-
-
-# Make this module a project.
-project.initialize $(__name__) ;
-project python ;
-
-# Save the project so that if 'init' is called several times we define new
-# targets in the python project, not in whatever project we were called by.
-.project = [ project.current ] ;
-
-# Dynamic linker lib. Necessary to specify it explicitly on some platforms.
-lib dl ;
-# This contains 'openpty' function need by python. Again, on some system need to
-# pass this to linker explicitly.
-lib util ;
-# Python uses pthread symbols.
-lib pthread ;
-# Extra library needed by phtread on some platforms.
-lib rt ;
-
-# The pythonpath feature specifies additional elements for the PYTHONPATH
-# environment variable, set by run-pyd. For example, pythonpath can be used to
-# access Python modules that are part of the product being built, but are not
-# installed in the development system's default paths.
-feature.feature pythonpath : : free optional path ;
-
-# Initializes the Python toolset. Note that all parameters are optional.
-#
-# - version -- the version of Python to use. Should be in Major.Minor format,
-# for example 2.3. Do not include the subminor version.
-#
-# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter.
-# Alternatively, the installation prefix for Python libraries and includes. If
-# empty, will be guessed from the version, the platform's installation
-# patterns, and the python executables that can be found in PATH.
-#
-# - includes: the include path to Python headers. If empty, will be guessed.
-#
-# - libraries: the path to Python library binaries. If empty, will be guessed.
-# On MacOS/Darwin, you can also pass the path of the Python framework.
-#
-# - condition: if specified, should be a set of properties that are matched
-# against the build configuration when Boost.Build selects a Python
-# configuration to use.
-#
-# - extension-suffix: A string to append to the name of extension modules before
-# the true filename extension. Ordinarily we would just compute this based on
-# the value of the <python-debugging> feature. However ubuntu's python-dbg
-# package uses the windows convention of appending _d to debug-build extension
-# modules. We have no way of detecting ubuntu, or of probing python for the
-# "_d" requirement, and if you configure and build python using
-# --with-pydebug, you'll be using the standard *nix convention. Defaults to ""
-# (or "_d" when targeting windows and <python-debugging> is set).
-#
-# Example usage:
-#
-# using python : 2.3 ;
-# using python : 2.3 : /usr/local/bin/python ;
-#
-rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
- : condition * : extension-suffix ? )
-{
- project.push-current $(.project) ;
-
- debug-message Configuring python... ;
- for local v in version cmd-or-prefix includes libraries condition
- {
- if $($(v))
- {
- debug-message " user-specified "$(v): \"$($(v))\" ;
- }
- }
-
- configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
-
- project.pop-current ;
-}
-
-# A simpler version of SHELL that grabs stderr as well as stdout, but returns
-# nothing if there was an error.
-#
-local rule shell-cmd ( cmd )
-{
- debug-message running command '$(cmd)" 2>&1"' ;
- x = [ SHELL $(cmd)" 2>&1" : exit-status ] ;
- if $(x[2]) = 0
- {
- return $(x[1]) ;
- }
- else
- {
- return ;
- }
-}
-
-
-# Try to identify Cygwin symlinks. Invoking such a file directly as an NT
-# executable from a native Windows build of bjam would be fatal to the bjam
-# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove
-# that those are not also symlinks. ;-)
-#
-# If a symlink is found returns non-empty; we try to extract the target of the
-# symlink from the file and return that.
-#
-# Note: 1. only works on NT 2. path is a native path.
-local rule is-cygwin-symlink ( path )
-{
- local is-symlink = ;
-
- # Look for a file with the given path having the S attribute set, as cygwin
- # symlinks do. /-C means "do not use thousands separators in file sizes."
- local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ;
-
- if $(dir-listing)
- {
- # Escape any special regex characters in the base part of the path.
- local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ;
-
- # Extract the file's size from the directory listing.
- local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ;
-
- # If the file has a reasonably small size, look for the special symlink
- # identification text.
- if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ]
- {
- local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ;
- if $(link[2]) != 0
- {
- local nl = "
-
-" ;
- is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ;
- if $(is-symlink)
- {
- is-symlink = [ *nix-path-to-native $(is-symlink) ] ;
- is-symlink = $(is-symlink:R=$(path:D)) ;
- }
-
- }
- }
- }
- return $(is-symlink) ;
-}
-
-
-# Append ext to each member of names that does not contain '.'.
-#
-local rule default-extension ( names * : ext * )
-{
- local result ;
- for local n in $(names)
- {
- switch $(n)
- {
- case *.* : result += $(n) ;
- case * : result += $(n)$(ext) ;
- }
- }
- return $(result) ;
-}
-
-
-# Tries to determine whether invoking "cmd" would actually attempt to launch a
-# cygwin symlink.
-#
-# Note: only works on NT.
-#
-local rule invokes-cygwin-symlink ( cmd )
-{
- local dirs = $(cmd:D) ;
- if ! $(dirs)
- {
- dirs = . [ os.executable-path ] ;
- }
- local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ;
- local paths = [ GLOB $(dirs) : $(base) ] ;
- if $(paths)
- {
- # Make sure we have not run into a Cygwin symlink. Invoking such a file
- # as an NT executable would be fatal for the bjam process.
- return [ is-cygwin-symlink $(paths[1]) ] ;
- }
-}
-
-
-local rule debug-message ( message * )
-{
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO notice: [python-cfg] $(message) ;
- }
-}
-
-
-# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and
-# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result
-# found. Also accounts for the fact that on 64-bit machines, 32-bit software has
-# its own area, under SOFTWARE\Wow6432node.
-#
-local rule software-registry-value ( path : data ? )
-{
- local result ;
- for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE
- {
- for local x64elt in "" Wow6432node\\ # Account for 64-bit windows
- {
- if ! $(result)
- {
- result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ;
- }
- }
-
- }
- return $(result) ;
-}
-
-
-.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ;
-.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ;
-
-.working-directory = [ PWD ] ;
-.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ;
-.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ;
-
-
-local rule windows-to-cygwin-path ( path )
-{
- # If path is rooted with a drive letter, rewrite it using the /cygdrive
- # mountpoint.
- local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ;
-
- # Else if path is rooted without a drive letter, use the working directory.
- p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ;
-
- # Else return the path unchanged.
- return $(p:E=$(path:T)) ;
-}
-
-
-# :W only works in Cygwin builds of bjam. This one works on NT builds as well.
-#
-local rule cygwin-to-windows-path ( path )
-{
- path = $(path:R="") ; # strip any trailing slash
-
- local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ;
- if $(drive-letter)
- {
- path = $(drive-letter) ;
- }
- else if $(path:R=/x) = $(path) # already rooted?
- {
- # Look for a cygwin mount that includes each head sequence in $(path).
- local head = $(path) ;
- local tail = "" ;
-
- while $(head)
- {
- local root = [ software-registry-value
- "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ;
-
- if $(root)
- {
- path = $(tail:R=$(root)) ;
- head = ;
- }
- tail = $(tail:R=$(head:D=)) ;
-
- if $(head) = /
- {
- head = ;
- }
- else
- {
- head = $(head:D) ;
- }
- }
- }
- return [ regex.replace $(path:R="") / \\ ] ;
-}
-
-
-# Convert a *nix path to native.
-#
-local rule *nix-path-to-native ( path )
-{
- if [ os.name ] = NT
- {
- path = [ cygwin-to-windows-path $(path) ] ;
- }
- return $(path) ;
-}
-
-
-# Convert an NT path to native.
-#
-local rule windows-path-to-native ( path )
-{
- if [ os.name ] = NT
- {
- return $(path) ;
- }
- else
- {
- return [ windows-to-cygwin-path $(path) ] ;
- }
-}
-
-
-# Return nonempty if path looks like a windows path, i.e. it starts with a drive
-# letter or contains backslashes.
-#
-local rule guess-windows-path ( path )
-{
- return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ;
-}
-
-
-local rule path-to-native ( paths * )
-{
- local result ;
-
- for local p in $(paths)
- {
- if [ guess-windows-path $(p) ]
- {
- result += [ windows-path-to-native $(p) ] ;
- }
- else
- {
- result += [ *nix-path-to-native $(p:T) ] ;
- }
- }
- return $(result) ;
-}
-
-
-# Validate the version string and extract the major/minor part we care about.
-#
-local rule split-version ( version )
-{
- local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
- if ! $(major-minor[2]) || $(major-minor[3])
- {
- ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ;
-
- # Add a zero to account for the missing digit if necessary.
- major-minor += 0 ;
- }
-
- return $(major-minor[1]) $(major-minor[2]) ;
-}
-
-
-# Build a list of versions from 3.0 down to 1.5. Because bjam can not enumerate
-# registry sub-keys, we have no way of finding a version with a 2-digit minor
-# version, e.g. 2.10 -- let us hope that never happens.
-#
-.version-countdown = ;
-for local v in [ numbers.range 15 30 ]
-{
- .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ;
-}
-
-
-local rule windows-installed-pythons ( version ? )
-{
- version ?= $(.version-countdown) ;
- local interpreters ;
-
- for local v in $(version)
- {
- local install-path = [
- software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ;
-
- if $(install-path)
- {
- install-path = [ windows-path-to-native $(install-path) ] ;
- debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ;
- }
-
- interpreters += $(:E=python:R=$(install-path)) ;
- }
- return $(interpreters) ;
-}
-
-
-local rule darwin-installed-pythons ( version ? )
-{
- version ?= $(.version-countdown) ;
-
- local prefix
- = [ GLOB /System/Library/Frameworks /Library/Frameworks
- : Python.framework ] ;
-
- return $(prefix)/Versions/$(version)/bin/python ;
-}
-
-
-# Assume "python-cmd" invokes a python interpreter and invoke it to extract all
-# the information we care about from its "sys" module. Returns void if
-# unsuccessful.
-#
-local rule probe ( python-cmd )
-{
- # Avoid invoking a Cygwin symlink on NT.
- local skip-symlink ;
- if [ os.name ] = NT
- {
- skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ;
- }
-
- if $(skip-symlink)
- {
- debug-message -------------------------------------------------------------------- ;
- debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ;
- debug-message causing a bjam built for Windows to hang. ;
- debug-message ;
- debug-message If you intend to target a Cygwin build of Python, please ;
- debug-message replace the path to the link with the path to a real executable ;
- debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ;
- debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ;
- debug-message backslashes ;
- debug-message -------------------------------------------------------------------- ;
- }
- else
- {
- # Prepare a List of Python format strings and expressions that can be
- # used to print the constants we want from the sys module.
-
- # We do not really want sys.version since that is a complicated string,
- # so get the information from sys.version_info instead.
- local format = "version=%d.%d" ;
- local exprs = "version_info[0]" "version_info[1]" ;
-
- for local s in $(sys-elements[2-])
- {
- format += $(s)=%s ;
- exprs += $(s) ;
- }
-
- # Invoke Python and ask it for all those values.
- if [ version.check-jam-version 3 1 17 ] || ( [ os.name ] != NT )
- {
- # Prior to version 3.1.17 Boost Jam's SHELL command did not support
- # quoted commands correctly on Windows. This means that on that
- # platform we do not support using a Python command interpreter
- # executable whose path contains a space character.
- python-cmd = \"$(python-cmd)\" ;
- }
- local full-cmd =
- $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ;
-
- local output = [ shell-cmd $(full-cmd) ] ;
- if $(output)
- {
- # Parse the output to get all the results.
- local nl = "
-
-" ;
- for s in $(sys-elements)
- {
- # These variables are expected to be declared local in the
- # caller, so Jam's dynamic scoping will set their values there.
- sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ;
- }
- }
- return $(output) ;
- }
-}
-
-
-# Make sure the "libraries" and "includes" variables (in an enclosing scope)
-# have a value based on the information given.
-#
-local rule compute-default-paths ( target-os : version ? : prefix ? :
- exec-prefix ? )
-{
- exec-prefix ?= $(prefix) ;
-
- if $(target-os) = windows
- {
- # The exec_prefix is where you're supposed to look for machine-specific
- # libraries.
- local default-library-path = $(exec-prefix)\\libs ;
- local default-include-path = $(:E=Include:R=$(prefix)) ;
-
- # If the interpreter was found in a directory called "PCBuild" or
- # "PCBuild8," assume we're looking at a Python built from the source
- # distro, and go up one additional level to the default root. Otherwise,
- # the default root is the directory where the interpreter was found.
-
- # We ask Python itself what the executable path is in case of
- # intermediate symlinks or shell scripts.
- local executable-dir = $(sys.executable:D) ;
-
- if [ MATCH ^(PCBuild) : $(executable-dir:D=) ]
- {
- debug-message "This Python appears to reside in a source distribution;" ;
- debug-message "prepending \""$(executable-dir)"\" to default library search path" ;
-
- default-library-path = $(executable-dir) $(default-library-path) ;
-
- default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ;
-
- debug-message "and \""$(default-include-path[1])"\" to default #include path" ;
- }
-
- libraries ?= $(default-library-path) ;
- includes ?= $(default-include-path) ;
- }
- else
- {
- includes ?= $(prefix)/include/python$(version) ;
-
- local lib = $(exec-prefix)/lib ;
- libraries ?= $(lib)/python$(version)/config $(lib) ;
- }
-}
-
-# The version of the python interpreter to use.
-feature.feature python : : propagated ;
-feature.feature python.interpreter : : free ;
-
-toolset.flags python.capture-output PYTHON : <python.interpreter> ;
-
-#
-# Support for Python configured --with-pydebug
-#
-feature.feature python-debugging : off on : propagated ;
-builtin.variant debug-python : debug : <python-debugging>on ;
-
-
-# Return a list of candidate commands to try when looking for a Python
-# interpreter. prefix is expected to be a native path.
-#
-local rule candidate-interpreters ( version ? : prefix ? : target-os )
-{
- local bin-path = bin ;
- if $(target-os) = windows
- {
- # On Windows, look in the root directory itself and, to work with the
- # result of a build-from-source, the PCBuild directory.
- bin-path = PCBuild8 PCBuild "" ;
- }
-
- bin-path = $(bin-path:R=$(prefix)) ;
-
- if $(target-os) in windows darwin
- {
- return # Search:
- $(:E=python:R=$(bin-path)) # Relative to the prefix, if any
- python # In the PATH
- [ $(target-os)-installed-pythons $(version) ] # Standard install locations
- ;
- }
- else
- {
- # Search relative to the prefix, or if none supplied, in PATH.
- local unversioned = $(:E=python:R=$(bin-path:E=)) ;
-
- # If a version was specified, look for a python with that specific
- # version appended before looking for one called, simply, "python"
- return $(unversioned)$(version) $(unversioned) ;
- }
-}
-
-
-# Compute system library dependencies for targets linking with static Python
-# libraries.
-#
-# On many systems, Python uses libraries such as pthreads or libdl. Since static
-# libraries carry no library dependency information of their own that the linker
-# can extract, these extra dependencies have to be given explicitly on the link
-# line of the client. The information about these dependencies is packaged into
-# the "python" target below.
-#
-# Even where Python itself uses pthreads, it never allows extension modules to
-# be entered concurrently (unless they explicitly give up the interpreter lock).
-# Therefore, extension modules do not need the efficiency overhead of threadsafe
-# code as produced by <threading>multi, and we handle libpthread along with
-# other libraries here. Note: this optimization is based on an assumption that
-# the compiler generates link-compatible code in both the single- and
-# multi-threaded cases, and that system libraries do not change their ABIs
-# either.
-#
-# Returns a list of usage-requirements that link to the necessary system
-# libraries.
-#
-local rule system-library-dependencies ( target-os )
-{
- switch $(target-os)
- {
- case s[uo][nl]* : # solaris, sun, sunos
- # Add a librt dependency for the gcc toolset on SunOS (the sun
- # toolset adds -lrt unconditionally). While this appears to
- # duplicate the logic already in gcc.jam, it does not as long as
- # we are not forcing <threading>multi.
-
- # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields
- # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem
- # to be the right list for extension modules. For example, on my
- # installation, adding -ldl causes at least one test to fail because
- # the library can not be found and removing it causes no failures.
-
- # Apparently, though, we need to add -lrt for gcc.
- return <toolset>gcc:<library>rt ;
-
- case osf : return <library>pthread <toolset>gcc:<library>rt ;
-
- case qnx* : return ;
- case darwin : return ;
- case windows : return ;
-
- case hpux : return <library>rt ;
- case *bsd : return <library>pthread <toolset>gcc:<library>util ;
-
- case aix : return <library>pthread <library>dl ;
-
- case * : return <library>pthread <library>dl
- <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ;
- }
-}
-
-
-# Declare a target to represent Python's library.
-#
-local rule declare-libpython-target ( version ? : requirements * )
-{
- # Compute the representation of Python version in the name of Python's
- # library file.
- local lib-version = $(version) ;
- if <target-os>windows in $(requirements)
- {
- local major-minor = [ split-version $(version) ] ;
- lib-version = $(major-minor:J="") ;
- if <python-debugging>on in $(requirements)
- {
- lib-version = $(lib-version)_d ;
- }
- }
-
- if ! $(lib-version)
- {
- ECHO *** warning: could not determine Python version, which will ;
- ECHO *** warning: probably prevent us from linking with the python ;
- ECHO *** warning: library. Consider explicitly passing the version ;
- ECHO *** warning: to 'using python'. ;
- }
-
- # Declare it.
- lib python.lib : : <name>python$(lib-version) $(requirements) ;
-}
-
-
-# Implementation of init.
-local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
- condition * : extension-suffix ? )
-{
- local prefix ;
- local exec-prefix ;
- local cmds-to-try ;
- local interpreter-cmd ;
-
- local target-os = [ feature.get-values target-os : $(condition) ] ;
- target-os ?= [ feature.defaults target-os ] ;
- target-os = $(target-os:G=) ;
-
- if $(target-os) = windows && <python-debugging>on in $(condition)
- {
- extension-suffix ?= _d ;
- }
- extension-suffix ?= "" ;
-
- # Normalize and dissect any version number.
- local major-minor ;
- if $(version)
- {
- major-minor = [ split-version $(version) ] ;
- version = $(major-minor:J=.) ;
- }
-
- local cmds-to-try ;
-
- if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ]
- {
- # If the user did not pass a command, whatever we got was a prefix.
- prefix = $(cmd-or-prefix) ;
- cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ;
- }
- else
- {
- # Work with the command the user gave us.
- cmds-to-try = $(cmd-or-prefix) ;
-
- # On Windows, do not nail down the interpreter command just yet in case
- # the user specified something that turns out to be a cygwin symlink,
- # which could bring down bjam if we invoke it.
- if $(target-os) != windows
- {
- interpreter-cmd = $(cmd-or-prefix) ;
- }
- }
-
- # Values to use in case we can not really find anything in the system.
- local fallback-cmd = $(cmds-to-try[1]) ;
- local fallback-version ;
-
- # Anything left to find or check?
- if ! ( $(interpreter-cmd) && $(includes) && $(libraries) )
- {
- # Values to be extracted from python's sys module. These will be set by
- # the probe rule, above, using Jam's dynamic scoping.
- local sys-elements = version platform prefix exec_prefix executable ;
- local sys.$(sys-elements) ;
-
- # Compute the string Python's sys.platform needs to match. If not
- # targeting Windows or cygwin we will assume only native builds can
- # possibly run, so we will not require a match and we leave sys.platform
- # blank.
- local platform ;
- switch $(target-os)
- {
- case windows : platform = win32 ;
- case cygwin : platform = cygwin ;
- }
-
- while $(cmds-to-try)
- {
- # Pop top command.
- local cmd = $(cmds-to-try[1]) ;
- cmds-to-try = $(cmds-to-try[2-]) ;
-
- debug-message Checking interpreter command \"$(cmd)\"... ;
- if [ probe $(cmd) ]
- {
- fallback-version ?= $(sys.version) ;
-
- # Check for version/platform validity.
- for local x in version platform
- {
- if $($(x)) && $($(x)) != $(sys.$(x))
- {
- debug-message ...$(x) "mismatch (looking for"
- $($(x)) but found $(sys.$(x))")" ;
- cmd = ;
- }
- }
-
- if $(cmd)
- {
- debug-message ...requested configuration matched! ;
-
- exec-prefix = $(sys.exec_prefix) ;
-
- compute-default-paths $(target-os) : $(sys.version) :
- $(sys.prefix) : $(sys.exec_prefix) ;
-
- version = $(sys.version) ;
- interpreter-cmd ?= $(cmd) ;
- cmds-to-try = ; # All done.
- }
- }
- else
- {
- debug-message ...does not invoke a working interpreter ;
- }
- }
- }
-
- # Anything left to compute?
- if $(includes) && $(libraries)
- {
- .configured = true ;
- }
- else
- {
- version ?= $(fallback-version) ;
- version ?= 2.5 ;
- exec-prefix ?= $(prefix) ;
- compute-default-paths $(target-os) : $(version) : $(prefix:E=) ;
- }
-
- if ! $(interpreter-cmd)
- {
- fallback-cmd ?= python ;
- debug-message No working Python interpreter found. ;
- if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ]
- {
- interpreter-cmd = $(fallback-cmd) ;
- debug-message falling back to \"$(interpreter-cmd)\" ;
- }
- }
-
- includes = [ path-to-native $(includes) ] ;
- libraries = [ path-to-native $(libraries) ] ;
-
- debug-message "Details of this Python configuration:" ;
- debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ;
- debug-message " include path:" \"$(includes:E=<empty>)\" ;
- debug-message " library path:" \"$(libraries:E=<empty>)\" ;
- if $(target-os) = windows
- {
- debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ;
- }
-
- #
- # End autoconfiguration sequence.
- #
- local target-requirements = $(condition) ;
-
- # Add the version, if any, to the target requirements.
- if $(version)
- {
- if ! $(version) in [ feature.values python ]
- {
- feature.extend python : $(version) ;
- }
- target-requirements += <python>$(version:E=default) ;
- }
-
- target-requirements += <target-os>$(target-os) ;
-
- # See if we can find a framework directory on darwin.
- local framework-directory ;
- if $(target-os) = darwin
- {
- # Search upward for the framework directory.
- local framework-directory = $(libraries[-1]) ;
- while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
- {
- framework-directory = $(framework-directory:D) ;
- }
-
- if $(framework-directory:D=) = Python.framework
- {
- debug-message framework directory is \"$(framework-directory)\" ;
- }
- else
- {
- debug-message "no framework directory found; using library path" ;
- framework-directory = ;
- }
- }
-
- local dll-path = $(libraries) ;
-
- # Make sure that we can find the Python DLL on Windows.
- if ( $(target-os) = windows ) && $(exec-prefix)
- {
- dll-path += $(exec-prefix) ;
- }
-
- #
- # Prepare usage requirements.
- #
- local usage-requirements = [ system-library-dependencies $(target-os) ] ;
- usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ;
- if <python-debugging>on in $(condition)
- {
- if $(target-os) = windows
- {
- # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define
- # Py_DEBUG we will get multiple definition warnings.
- usage-requirements += <define>_DEBUG ;
- }
- else
- {
- usage-requirements += <define>Py_DEBUG ;
- }
- }
-
- # Global, but conditional, requirements to give access to the interpreter
- # for general utilities, like other toolsets, that run Python scripts.
- toolset.add-requirements
- $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ;
-
- # Register the right suffix for extensions.
- register-extension-suffix $(extension-suffix) : $(target-requirements) ;
-
- #
- # Declare the "python" target. This should really be called
- # python_for_embedding.
- #
-
- if $(framework-directory)
- {
- alias python
- :
- : $(target-requirements)
- :
- : $(usage-requirements) <framework>$(framework-directory)
- ;
- }
- else
- {
- declare-libpython-target $(version) : $(target-requirements) ;
-
- # This is an evil hack. On, Windows, when Python is embedded, nothing
- # seems to set up sys.path to include Python's standard library
- # (http://article.gmane.org/gmane.comp.python.general/544986). The evil
- # here, aside from the workaround necessitated by Python's bug, is that:
- #
- # a. we're guessing the location of the python standard library from the
- # location of pythonXX.lib
- #
- # b. we're hijacking the <testing.launcher> property to get the
- # environment variable set up, and the user may want to use it for
- # something else (e.g. launch the debugger).
- local set-PYTHONPATH ;
- if $(target-os) = windows
- {
- set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH :
- $(libraries:D)/Lib ] ;
- }
-
- alias python
- :
- : $(target-requirements)
- :
- # Why python.lib must be listed here instead of along with the
- # system libs is a mystery, but if we do not do it, on cygwin,
- # -lpythonX.Y never appears in the command line (although it does on
- # linux).
- : $(usage-requirements)
- <testing.launcher>$(set-PYTHONPATH)
- <library-path>$(libraries) <library>python.lib
- ;
- }
-
- # On *nix, we do not want to link either Boost.Python or Python extensions
- # to libpython, because the Python interpreter itself provides all those
- # symbols. If we linked to libpython, we would get duplicate symbols. So
- # declare two targets -- one for building extensions and another for
- # embedding.
- #
- # Unlike most *nix systems, Mac OS X's linker does not permit undefined
- # symbols when linking a shared library. So, we still need to link against
- # the Python framework, even when building extensions. Note that framework
- # builds of Python always use shared libraries, so we do not need to worry
- # about duplicate Python symbols.
- if $(target-os) in windows cygwin darwin
- {
- alias python_for_extensions : python : $(target-requirements) ;
- }
- # On AIX we need Python extensions and Boost.Python to import symbols from
- # the Python interpreter. Dynamic libraries opened with dlopen() do not
- # inherit the symbols from the Python interpreter.
- else if $(target-os) = aix
- {
- alias python_for_extensions
- :
- : $(target-requirements)
- :
- : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp
- ;
- }
- else
- {
- alias python_for_extensions
- :
- : $(target-requirements)
- :
- : $(usage-requirements)
- ;
- }
-}
-
-
-rule configured ( )
-{
- return $(.configured) ;
-}
-
-
-type.register PYTHON_EXTENSION : : SHARED_LIB ;
-
-
-local rule register-extension-suffix ( root : condition * )
-{
- local suffix ;
-
- switch [ feature.get-values target-os : $(condition) ]
- {
- case windows : suffix = pyd ;
- case cygwin : suffix = dll ;
- case hpux :
- {
- if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
- {
- suffix = sl ;
- }
- else
- {
- suffix = so ;
- }
- }
- case * : suffix = so ;
- }
-
- type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
-}
-
-
-# Unset 'lib' prefix for PYTHON_EXTENSION
-type.set-generated-target-prefix PYTHON_EXTENSION : : "" ;
-
-
-rule python-extension ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- if [ configured ]
- {
- requirements += <use>/python//python_for_extensions ;
- }
- requirements += <suppress-import-lib>true ;
-
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-IMPORT python : python-extension : : python-extension ;
-
-rule py2to3
-{
- common.copy $(>) $(<) ;
- 2to3 $(<) ;
-}
-
-actions 2to3
-{
- 2to3 -wn "$(<)"
- 2to3 -dwn "$(<)"
-}
-
-
-# Support for testing.
-type.register PY : py ;
-type.register RUN_PYD_OUTPUT ;
-type.register RUN_PYD : : TEST ;
-
-
-class python-test-generator : generator
-{
- import set ;
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- self.composing = true ;
- }
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- local pyversion = [ $(property-set).get <python> ] ;
- local python ;
- local other-pythons ;
-
- # Make new target that converting Python source by 2to3 when running with Python 3.
- local rule make-2to3-source ( source )
- {
- if $(pyversion) >= 3.0
- {
- local a = [ new action $(source) : python.py2to3 : $(property-set) ] ;
- local t = [ utility.basename [ $(s).name ] ] ;
- local p = [ new file-target $(t) : PY : $(project) : $(a) ] ;
- return $(p) ;
- }
- else
- {
- return $(source) ;
- }
- }
-
- for local s in $(sources)
- {
- if [ $(s).type ] = PY
- {
- if ! $(python)
- {
- # First Python source ends up on command line.
- python = [ make-2to3-source $(s) ] ;
-
- }
- else
- {
- # Other Python sources become dependencies.
- other-pythons += [ make-2to3-source $(s) ] ;
- }
- }
- }
-
- local extensions ;
- for local s in $(sources)
- {
- if [ $(s).type ] = PYTHON_EXTENSION
- {
- extensions += $(s) ;
- }
- }
-
- local libs ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] LIB ]
- && ! $(s) in $(extensions)
- {
- libs += $(s) ;
- }
- }
-
- local new-sources ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] CPP ]
- {
- local name = [ utility.basename [ $(s).name ] ] ;
- if $(name) = [ utility.basename [ $(python).name ] ]
- {
- name = $(name)_ext ;
- }
- local extension = [ generators.construct $(project) $(name) :
- PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
-
- # The important part of usage requirements returned from
- # PYTHON_EXTENSION generator are xdll-path properties that will
- # allow us to find the python extension at runtime.
- property-set = [ $(property-set).add $(extension[1]) ] ;
-
- # Ignore usage requirements. We're a top-level generator and
- # nobody is going to use what we generate.
- new-sources += $(extension[2-]) ;
- }
- }
-
- property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
-
- result = [ construct-result $(python) $(extensions) $(new-sources) :
- $(project) $(name) : $(property-set) ] ;
- }
-}
-
-
-generators.register
- [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ;
-
-generators.register-standard testing.expect-success
- : RUN_PYD_OUTPUT : RUN_PYD ;
-
-
-# There are two different ways of spelling OS names. One is used for [ os.name ]
-# and the other is used for the <host-os> and <target-os> properties. Until that
-# is remedied, this sets up a crude mapping from the latter to the former, that
-# will work *for the purposes of cygwin/NT cross-builds only*. Could not think
-# of a better name than "translate".
-#
-.translate-os-windows = NT ;
-.translate-os-cygwin = CYGWIN ;
-local rule translate-os ( src-os )
-{
- local x = $(.translate-os-$(src-os)) [ os.name ] ;
- return $(x[1]) ;
-}
-
-
-# Extract the path to a single ".pyd" source. This is used to build the
-# PYTHONPATH for running bpl tests.
-#
-local rule pyd-pythonpath ( source )
-{
- return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
-}
-
-
-# The flag settings on testing.capture-output do not apply to python.capture
-# output at the moment. Redo this explicitly.
-toolset.flags python.capture-output ARGS <testing.arg> ;
-
-
-rule capture-output ( target : sources * : properties * )
-{
- # Setup up a proper DLL search path. Here, $(sources[1]) is a python module
- # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to
- # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not
- # consulted. Move it over explicitly.
- RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
-
- PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
- PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
-
- # After test is run, we remove the Python module, but not the Python script.
- testing.capture-output $(target) : $(sources[1]) : $(properties) :
- $(sources[2-]) ;
-
- # PYTHONPATH is different; it will be interpreted by whichever Python is
- # invoked and so must follow path rules for the target os. The only OSes
- # where we can run python for other OSes currently are NT and CYGWIN so we
- # only need to handle those cases.
- local target-os = [ feature.get-values target-os : $(properties) ] ;
- # Oddly, host-os is not in properties, so grab the default value.
- local host-os = [ feature.defaults host-os ] ;
- host-os = $(host-os:G=) ;
- if $(target-os) != $(host-os)
- {
- PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path :
- $(PYTHONPATH) ] ;
- }
- local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
- local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
- $(PYTHONPATH:J=$(path-separator)) ] ;
- LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
-}
-
-
-rule bpl-test ( name : sources * : requirements * )
-{
- local s ;
- sources ?= $(name).py $(name).cpp ;
- return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python
- : $(requirements) : $(name) ] ;
-}
-
-
-IMPORT $(__name__) : bpl-test : : bpl-test ;
diff --git a/jam-files/boost-build/tools/qcc.jam b/jam-files/boost-build/tools/qcc.jam
deleted file mode 100644
index 4f2a4fc1..00000000
--- a/jam-files/boost-build/tools/qcc.jam
+++ /dev/null
@@ -1,236 +0,0 @@
-# Copyright (c) 2001 David Abrahams.
-# Copyright (c) 2002-2003 Rene Rivera.
-# Copyright (c) 2002-2003 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import os ;
-import property ;
-import set ;
-import toolset ;
-import type ;
-import unix ;
-
-feature.extend toolset : qcc ;
-
-toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
-generators.override builtin.lib-generator : qcc.prebuilt ;
-toolset.inherit-flags qcc : unix ;
-toolset.inherit-rules qcc : unix ;
-
-# Initializes the qcc toolset for the given version. If necessary, command may
-# be used to specify where the compiler is located. The parameter 'options' is a
-# space-delimited list of options, each one being specified as
-# <option-name>option-value. Valid option names are: cxxflags, linkflags and
-# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
-# default.
-#
-# Example:
-# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
-#
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters qcc : version $(version) ] ;
- local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
- common.handle-options qcc : $(condition) : $(command) : $(options) ;
-}
-
-
-generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
-generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
-generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
-
-
-# Declare flags for compilation.
-toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
-
-# Declare flags and action for compilation.
-toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
-toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
-
-toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
-toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
-toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
-
-toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
-toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
-toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
-
-toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
-
-toolset.flags qcc.compile OPTIONS <cflags> ;
-toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
-toolset.flags qcc.compile DEFINES <define> ;
-toolset.flags qcc.compile INCLUDES <include> ;
-
-toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
-
-toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
-
-
-rule compile.c++
-{
- # Here we want to raise the template-depth parameter value to something
- # higher than the default value of 17. Note that we could do this using the
- # feature.set-default rule but we do not want to set the default value for
- # all toolsets as well.
- #
- # TODO: This 'modified default' has been inherited from some 'older Boost
- # Build implementation' and has most likely been added to make some Boost
- # library parts compile correctly. We should see what exactly prompted this
- # and whether we can get around the problem more locally.
- local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
- if ! $(template-depth)
- {
- TEMPLATE_DEPTH on $(1) = 128 ;
- }
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.asm
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-
-# The class checking that we do not try to use the <runtime-link>static property
-# while creating or using a shared library, since it is not supported by qcc/
-# /libc.
-#
-class qcc-linking-generator : unix-linking-generator
-{
- rule generated-targets ( sources + : property-set : project name ? )
- {
- if <runtime-link>static in [ $(property-set).raw ]
- {
- local m ;
- if [ id ] = "qcc.link.dll"
- {
- m = "on qcc, DLL can't be build with <runtime-link>static" ;
- }
- if ! $(m)
- {
- for local s in $(sources)
- {
- local type = [ $(s).type ] ;
- if $(type) && [ type.is-derived $(type) SHARED_LIB ]
- {
- m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
- }
- }
- }
- if $(m)
- {
- errors.user-error $(m) : "It is suggested to use"
- "<runtime-link>static together with <link>static." ;
- }
- }
-
- return [ unix-linking-generator.generated-targets
- $(sources) : $(property-set) : $(project) $(name) ] ;
- }
-}
-
-generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
- : <toolset>qcc ] ;
-
-generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
- : SHARED_LIB : <toolset>qcc ] ;
-
-generators.override qcc.prebuilt : builtin.prebuilt ;
-generators.override qcc.searched-lib-generator : searched-lib-generator ;
-
-
-# Declare flags for linking.
-# First, the common flags.
-toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
-toolset.flags qcc.link OPTIONS <profiling>on : -p ;
-toolset.flags qcc.link OPTIONS <linkflags> ;
-toolset.flags qcc.link LINKPATH <library-path> ;
-toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
-toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
-toolset.flags qcc.link LIBRARIES <library-file> ;
-
-toolset.flags qcc.link FINDLIBS-SA : m ;
-
-# For <runtime-link>static we made sure there are no dynamic libraries in the
-# link.
-toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
-
-# Assuming this is just like with gcc.
-toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
-toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
-
-
-# Declare actions for linking.
-#
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since running N links in
- # parallel is just slower. For now, serialize only qcc links while it might
- # be a good idea to serialize all links.
- JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
-}
-
-
-# Always remove archive and start again. Here is the rationale from Andre Hentz:
-# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
-# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
-# After some debugging I traced it back to the fact that a1.o was *still* in
-# liba.a
-RM = [ common.rm-command ] ;
-if [ os.name ] = NT
-{
- RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
-}
-
-
-# Declare action for creating static libraries. The 'r' letter means to add
-# files to the archive with replacement. Since we remove the archive, we do not
-# care about replacement, but there is no option to "add without replacement".
-# The 'c' letter suppresses warnings in case the archive does not exists yet.
-# That warning is produced only on some platforms, for whatever reasons.
-#
-actions piecemeal archive
-{
- $(RM) "$(<)"
- ar rc "$(<)" "$(>)"
-}
-
-
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
-}
-
-
-# Differ from 'link' above only by -shared.
-#
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
-}
diff --git a/jam-files/boost-build/tools/qt.jam b/jam-files/boost-build/tools/qt.jam
deleted file mode 100644
index 8aa7ca26..00000000
--- a/jam-files/boost-build/tools/qt.jam
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2006 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Forwarning toolset file to Qt GUI library. Forwards to the toolset file
-# for the current version of Qt.
-
-import qt4 ;
-
-rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
-{
- qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ;
-}
-
-
diff --git a/jam-files/boost-build/tools/qt3.jam b/jam-files/boost-build/tools/qt3.jam
deleted file mode 100644
index f82cf0ac..00000000
--- a/jam-files/boost-build/tools/qt3.jam
+++ /dev/null
@@ -1,209 +0,0 @@
-# Copyright 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for the Qt GUI library version 3
-# (http://www.trolltech.com/products/qt3/index.html).
-# For new developments, it is recommended to use Qt4 via the qt4 Boost.Build
-# module.
-
-import modules ;
-import feature ;
-import errors ;
-import type ;
-import "class" : new ;
-import generators ;
-import project ;
-import toolset : flags ;
-
-# Convert this module into a project, so that we can declare targets here.
-project.initialize $(__name__) ;
-project qt3 ;
-
-
-# Initialized the QT support module. The 'prefix' parameter tells where QT is
-# installed. When not given, environmental variable QTDIR should be set.
-#
-rule init ( prefix ? )
-{
- if ! $(prefix)
- {
- prefix = [ modules.peek : QTDIR ] ;
- if ! $(prefix)
- {
- errors.error
- "QT installation prefix not given and QTDIR variable is empty" ;
- }
- }
-
- if $(.initialized)
- {
- if $(prefix) != $(.prefix)
- {
- errors.error
- "Attempt the reinitialize QT with different installation prefix" ;
- }
- }
- else
- {
- .initialized = true ;
- .prefix = $(prefix) ;
-
- generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ;
- # Note: the OBJ target type here is fake, take a look at
- # qt4.jam/uic-h-generator for explanations that apply in this case as
- # well.
- generators.register [ new moc-h-generator-qt3
- qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ;
-
- # The UI type is defined in types/qt.jam, and UIC_H is only used in
- # qt.jam, but not in qt4.jam, so define it here.
- type.register UIC_H : : H ;
-
- generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ;
-
- # The following generator is used to convert UI files to CPP. It creates
- # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also
- # returns UIC_H target, so that it can be mocced.
- class qt::uic-cpp-generator : generator
- {
- rule __init__ ( )
- {
- generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ;
- }
-
- rule run ( project name ? : properties * : sources + )
- {
- # Consider this:
- # obj test : test_a.cpp : <optimization>off ;
- #
- # This generator will somehow be called in this case, and,
- # will fail -- which is okay. However, if there are <library>
- # properties they will be converted to sources, so the size of
- # 'sources' will be more than 1. In this case, the base generator
- # will just crash -- and that's not good. Just use a quick test
- # here.
-
- local result ;
- if ! $(sources[2])
- {
- # Construct CPP as usual
- result = [ generator.run $(project) $(name)
- : $(properties) : $(sources) ] ;
-
- # If OK, process UIC_H with moc. It's pretty clear that
- # the object generated with UIC will have Q_OBJECT macro.
- if $(result)
- {
- local action = [ $(result[1]).action ] ;
- local sources = [ $(action).sources ] ;
- local mocced = [ generators.construct $(project) $(name)
- : CPP : $(properties) : $(sources[2]) ] ;
- result += $(mocced[2-]) ;
- }
- }
-
- return $(result) ;
- }
- }
-
- generators.register [ new qt::uic-cpp-generator ] ;
-
- # Finally, declare prebuilt target for QT library.
- local usage-requirements =
- <include>$(.prefix)/include
- <dll-path>$(.prefix)/lib
- <library-path>$(.prefix)/lib
- <allow>qt3
- ;
- lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ;
- lib qt : : <name>qt <threading>single : : $(usage-requirements) ;
- }
-}
-
-class moc-h-generator-qt3 : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
- {
- name = [ $(sources[1]).name ] ;
- name = $(name:B) ;
-
- local a = [ new action $(sources[1]) : qt3.moc.cpp :
- $(property-set) ] ;
-
- local target = [
- new file-target $(name) : MOC : $(project) : $(a) ] ;
-
- local r = [ virtual-target.register $(target) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However,
- # we need the target to be seen by bjam, so that the dependency from
- # sources to this generated header is detected -- if Jam does not
- # know about this target, it won't do anything.
- DEPENDS all : [ $(r).actualize ] ;
-
- return $(r) ;
- }
- }
-}
-
-
-# Query the installation directory. This is needed in at least two scenarios.
-# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
-# plugins to the Qt-Tree.
-#
-rule directory
-{
- return $(.prefix) ;
-}
-
-# -f forces moc to include the processed source file. Without it, it would think
-# that .qpp is not a header and would not include it from the generated file.
-#
-actions moc
-{
- $(.prefix)/bin/moc -f $(>) -o $(<)
-}
-
-# When moccing .cpp files, we don't need -f, otherwise generated code will
-# include .cpp and we'll get duplicated symbols.
-#
-actions moc.cpp
-{
- $(.prefix)/bin/moc $(>) -o $(<)
-}
-
-
-space = " " ;
-
-# Sometimes it's required to make 'plugins' available during uic invocation. To
-# help with this we add paths to all dependency libraries to uic commane line.
-# The intention is that it's possible to write
-#
-# exe a : ... a.ui ... : <uses>some_plugin ;
-#
-# and have everything work. We'd add quite a bunch of unrelated paths but it
-# won't hurt.
-#
-flags qt3.uic-h LIBRARY_PATH <xdll-path> ;
-actions uic-h
-{
- $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH)
-}
-
-
-flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ;
-# The second target is uic-generated header name. It's placed in build dir, but
-# we want to include it using only basename.
-actions uic-cpp
-{
- $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH)
-}
diff --git a/jam-files/boost-build/tools/qt4.jam b/jam-files/boost-build/tools/qt4.jam
deleted file mode 100644
index 71d1b762..00000000
--- a/jam-files/boost-build/tools/qt4.jam
+++ /dev/null
@@ -1,724 +0,0 @@
-# Copyright 2002-2006 Vladimir Prus
-# Copyright 2005 Alo Sarv
-# Copyright 2005-2009 Juergen Hunold
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Qt4 library support module
-#
-# The module attempts to auto-detect QT installation location from QTDIR
-# environment variable; failing that, installation location can be passed as
-# argument:
-#
-# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
-#
-# The module supports code generation from .ui and .qrc files, as well as
-# running the moc preprocessor on headers. Note that you must list all your
-# moc-able headers in sources.
-#
-# Example:
-#
-# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
-# /qt4//QtGui /qt4//QtNetwork ;
-#
-# It's also possible to run moc on cpp sources:
-#
-# import cast ;
-#
-# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
-#
-# When moccing source file myapp.cpp you need to include "myapp.moc" from
-# myapp.cpp. When moccing .h files, the output of moc will be automatically
-# compiled and linked in, you don't need any includes.
-#
-# This is consistent with Qt guidelines:
-# http://doc.trolltech.com/4.0/moc.html
-
-import modules ;
-import feature ;
-import errors ;
-import type ;
-import "class" : new ;
-import generators ;
-import project ;
-import toolset : flags ;
-import os ;
-import virtual-target ;
-import scanner ;
-
-# Qt3Support control feature
-#
-# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
-# The autodetection is missing, so we default to disable Qt3Support.
-# This prevents the user from inadvertedly using a deprecated API.
-#
-# The Qt3Support library can be activated by adding
-# "<qt3support>on" to requirements
-#
-# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
-# to get warnings about deprecated Qt3 support funtions and classes.
-# Files ported by the "qt3to4" conversion tool contain _tons_ of
-# warnings, so this define is not set as default.
-#
-# Todo: Detect Qt3Support from Qt's configure data.
-# Or add more auto-configuration (like python).
-feature.feature qt3support : off on : propagated link-incompatible ;
-
-# The Qt version used for requirements
-# Valid are <qt>4.4 or <qt>4.5.0
-# Auto-detection via qmake sets '<qt>major.minor.patch'
-feature.feature qt : : propagated ;
-
-project.initialize $(__name__) ;
-project qt ;
-
-# Save the project so that we tolerate 'import + using' combo.
-.project = [ project.current ] ;
-
-# Helper utils for easy debug output
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = TRUE ;
-}
-
-local rule debug-message ( message * )
-{
- if $(.debug-configuration) = TRUE
- {
- ECHO notice: [qt4-cfg] $(message) ;
- }
-}
-
-# Capture qmake output line by line
-local rule read-output ( content )
-{
- local lines ;
- local nl = "
-" ;
- local << = "([^$(nl)]*)[$(nl)](.*)" ;
- local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
- while $(line+)
- {
- lines += $(line+[1]) ;
- line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
- }
- return $(lines) ;
-}
-
-# Capture Qt version from qmake
-local rule check-version ( bin_prefix )
-{
- full-cmd = $(bin_prefix)"/qmake -v" ;
- debug-message Running '$(full-cmd)' ;
- local output = [ SHELL $(full-cmd) ] ;
- for line in [ read-output $(output) ]
- {
- # Parse the output to get all the results.
- if [ MATCH "QMake" : $(line) ]
- {
- # Skip first line of output
- }
- else
- {
- temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
- }
- }
- return $(temp) ;
-}
-
-# Validate the version string and extract the major/minor part we care about.
-#
-local rule split-version ( version )
-{
- local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
- if ! $(major-minor[2]) || $(major-minor[3])
- {
- ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
-
- # Add a zero to account for the missing digit if necessary.
- major-minor += 0 ;
- }
-
- return $(major-minor[1]) $(major-minor[2]) ;
-}
-
-# Initialize the QT support module.
-# Parameters:
-# - 'prefix' parameter tells where Qt is installed.
-# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
-# - 'full_inc' optional full path to Qt top-level include directory
-# - 'full_lib' optional full path to Qt library directory
-# - 'version' optional version of Qt, else autodetected via 'qmake -v'
-# - 'condition' optional requirements
-rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
-{
- project.push-current $(.project) ;
-
- debug-message "==== Configuring Qt ... ====" ;
- for local v in version cmd-or-prefix includes libraries condition
- {
- if $($(v))
- {
- debug-message " user-specified "$(v): '$($(v))' ;
- }
- }
-
- # Needed as default value
- .prefix = $(prefix) ;
-
- # pre-build paths to detect reinitializations changes
- local inc_prefix lib_prefix bin_prefix ;
- if $(full_inc)
- {
- inc_prefix = $(full_inc) ;
- }
- else
- {
- inc_prefix = $(prefix)/include ;
- }
- if $(full_lib)
- {
- lib_prefix = $(full_lib) ;
- }
- else
- {
- lib_prefix = $(prefix)/lib ;
- }
- if $(full_bin)
- {
- bin_prefix = $(full_bin) ;
- }
- else
- {
- bin_prefix = $(prefix)/bin ;
- }
-
- # Globally needed variables
- .incprefix = $(inc_prefix) ;
- .libprefix = $(lib_prefix) ;
- .binprefix = $(bin_prefix) ;
-
- if ! $(.initialized)
- {
- # Make sure this is initialised only once
- .initialized = true ;
-
- # Generates cpp files from header files using "moc" tool
- generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
-
- # The OBJ result type is a fake, 'H' will be really produced. See
- # comments on the generator class, defined below the 'init' function.
- generators.register [ new uic-generator qt4.uic : UI : OBJ :
- <allow>qt4 ] ;
-
- # The OBJ result type is a fake here too.
- generators.register [ new moc-h-generator
- qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
-
- generators.register [ new moc-inc-generator
- qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
-
- # Generates .cpp files from .qrc files.
- generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ;
-
- # dependency scanner for wrapped files.
- type.set-scanner QRC : qrc-scanner ;
-
- # Save value of first occuring prefix
- .PREFIX = $(prefix) ;
- }
-
- if $(version)
- {
- major-minor = [ split-version $(version) ] ;
- version = $(major-minor:J=.) ;
- }
- else
- {
- version = [ check-version $(bin_prefix) ] ;
- if $(version)
- {
- version = $(version:J=.) ;
- }
- debug-message Detected version '$(version)' ;
- }
-
- local target-requirements = $(condition) ;
-
- # Add the version, if any, to the target requirements.
- if $(version)
- {
- if ! $(version) in [ feature.values qt ]
- {
- feature.extend qt : $(version) ;
- }
- target-requirements += <qt>$(version:E=default) ;
- }
-
- local target-os = [ feature.get-values target-os : $(condition) ] ;
- if ! $(target-os)
- {
- target-os ?= [ feature.defaults target-os ] ;
- target-os = $(target-os:G=) ;
- target-requirements += <target-os>$(target-os) ;
- }
-
- # Build exact requirements for the tools
- local tools-requirements = $(target-requirements:J=/) ;
-
- debug-message "Details of this Qt configuration:" ;
- debug-message " prefix: " '$(prefix:E=<empty>)' ;
- debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
- debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
- debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
- debug-message " target requirements:" '$(target-requirements)' ;
- debug-message " tool requirements: " '$(tools-requirements)' ;
-
- # setup the paths for the tools
- toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
- toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
- toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
-
- # TODO: 2009-02-12: Better support for directories
- # Most likely needed are separate getters for: include,libraries,binaries and sources.
- toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
-
- # Test for a buildable Qt.
- if [ glob $(.prefix)/Jamroot ]
- {
- .bjam-qt = true
-
- # this will declare QtCore (and qtmain on <target-os>windows)
- add-shared-library QtCore ;
- }
- else
- # Setup common pre-built Qt.
- # Special setup for QtCore on which everything depends
- {
- local usage-requirements =
- <include>$(.incprefix)
- <library-path>$(.libprefix)
- <dll-path>$(.libprefix)
- <threading>multi
- <allow>qt4 ;
-
- local suffix ;
-
- # Since Qt-4.2, debug versions on unix have to be built
- # separately and therefore have no suffix.
- .suffix_version = "" ;
- .suffix_debug = "" ;
-
- # Control flag for auto-configuration of the debug libraries.
- # This setup requires Qt 'configure -debug-and-release'.
- # Only available on some platforms.
- # ToDo: 2009-02-12: Maybe throw this away and
- # require separate setup with <variant>debug as condition.
- .have_separate_debug = FALSE ;
-
- # Setup other platforms
- if $(target-os) in windows cygwin
- {
- .have_separate_debug = TRUE ;
-
- # On NT, the libs have "4" suffix, and "d" suffix in debug builds.
- .suffix_version = "4" ;
- .suffix_debug = "d" ;
-
- # On Windows we must link against the qtmain library
- lib qtmain
- : # sources
- : # requirements
- <name>qtmain$(.suffix_debug)
- <variant>debug
- $(target-requirements)
- ;
-
- lib qtmain
- : # sources
- : # requirements
- <name>qtmain
- $(target-requirements)
- ;
- }
- else if $(target-os) = darwin
- {
- # On MacOS X, both debug and release libraries are available.
- .suffix_debug = "_debug" ;
-
- .have_separate_debug = TRUE ;
-
- alias qtmain ;
- }
- else
- {
- alias qtmain : : $(target-requirements) ;
- }
-
- lib QtCore : qtmain
- : # requirements
- <name>QtCore$(.suffix_version)
- $(target-requirements)
- : # default-build
- : # usage-requirements
- <define>QT_CORE_LIB
- <define>QT_NO_DEBUG
- <include>$(.incprefix)/QtCore
- $(usage-requirements)
- ;
-
- if $(.have_separate_debug) = TRUE
- {
- debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
-
- lib QtCore : $(main)
- : # requirements
- <name>QtCore$(.suffix_debug)$(.suffix_version)
- <variant>debug
- $(target-requirements)
- : # default-build
- : # usage-requirements
- <define>QT_CORE_LIB
- <include>$(.incprefix)/QtCore
- $(usage-requirements)
- ;
- }
- }
-
- # Initialising the remaining libraries is canonical
- # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
- # 'include' only for non-canonical include paths.
- add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
- add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
- add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
- add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
-
- add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
- : QT_QT3SUPPORT_LIB QT3_SUPPORT
- : <qt3support>on $(target-requirements) ;
-
- # Dummy target to enable "<qt3support>off" and
- # "<library>/qt//Qt3Support" at the same time. This enables quick
- # switching from one to the other for test/porting purposes.
- alias Qt3Support : : <qt3support>off $(target-requirements) ;
-
- # OpenGl Support
- add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
-
- # SVG-Support (Qt 4.1)
- add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
-
- # Test-Support (Qt 4.1)
- add-shared-library QtTest : QtCore : : $(target-requirements) ;
-
- # Qt designer library
- add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
- add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ;
-
- # Support for dynamic Widgets (Qt 4.1)
- add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
-
- # DBus-Support (Qt 4.2)
- add-shared-library QtDBus : QtXml : : $(target-requirements) ;
-
- # Script-Engine (Qt 4.3)
- add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
-
- # Tools for the Script-Engine (Qt 4.5)
- add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
-
- # WebKit (Qt 4.4)
- add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
-
- # Phonon Multimedia (Qt 4.4)
- add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
-
- # Multimedia engine (Qt 4.6)
- add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
-
- # XmlPatterns-Engine (Qt 4.4)
- add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
-
- # Help-Engine (Qt 4.4)
- add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
- add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ;
-
- # QML-Engine (Qt 4.7)
- add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ;
-
- # AssistantClient Support
- # Compat library removed in 4.7.0
- # Pre-4.4 help system, use QtHelp for new programs
- if $(version) < "4.7"
- {
- add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
- }
- debug-message "==== Configured Qt-$(version) ====" ;
-
- project.pop-current ;
-}
-
-rule initialized ( )
-{
- return $(.initialized) ;
-}
-
-
-
-# This custom generator is needed because in QT4, UI files are translated only
-# into H files, and no C++ files are created. Further, the H files need not be
-# passed via MOC. The header is used only via inclusion. If we define a standard
-# UI -> H generator, Boost.Build will run MOC on H, and then compile the
-# resulting cpp. It will give a warning, since output from moc will be empty.
-#
-# This generator is declared with a UI -> OBJ signature, so it gets invoked when
-# linking generator tries to convert sources to OBJ, but it produces target of
-# type H. This is non-standard, but allowed. That header won't be mocced.
-#
-class uic-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(name)
- {
- name = [ $(sources[0]).name ] ;
- name = $(name:B) ;
- }
-
- local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
-
- # The 'ui_' prefix is to match qmake's default behavior.
- local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
-
- local r = [ virtual-target.register $(target) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However, we
- # need the target to be seen by bjam, so that dependency from sources to
- # this generated header is detected -- if jam does not know about this
- # target, it won't do anything.
- DEPENDS all : [ $(r).actualize ] ;
-
- return $(r) ;
- }
-}
-
-
-class moc-h-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
- {
- name = [ $(sources[0]).name ] ;
- name = $(name:B) ;
-
- local a = [ new action $(sources[1]) : qt4.moc.inc :
- $(property-set) ] ;
-
- local target = [ new file-target $(name) : MOC : $(project) : $(a)
- ] ;
-
- local r = [ virtual-target.register $(target) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However,
- # we need the target to be seen by bjam, so that dependency from
- # sources to this generated header is detected -- if jam does not
- # know about this target, it won't do anything.
- DEPENDS all : [ $(r).actualize ] ;
-
- return $(r) ;
- }
- }
-}
-
-
-class moc-inc-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
- {
- name = [ $(sources[0]).name ] ;
- name = $(name:B) ;
-
- local a = [ new action $(sources[1]) : qt4.moc.inc :
- $(property-set) ] ;
-
- local target = [ new file-target moc_$(name) : CPP : $(project) :
- $(a) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However,
- # we need the target to be seen by bjam, so that dependency from
- # sources to this generated header is detected -- if jam does not
- # know about this target, it won't do anything.
- DEPENDS all : [ $(target).actualize ] ;
-
- return [ virtual-target.register $(target) ] ;
- }
- }
-}
-
-
-# Query the installation directory. This is needed in at least two scenarios.
-# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
-# plugins to the Qt-Tree.
-#
-rule directory
-{
- return $(.PREFIX) ;
-}
-
-# Add a shared Qt library.
-rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
-{
- add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
-}
-
-# Add a static Qt library.
-rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
-{
- add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
-}
-
-# Add a Qt library.
-# Static libs are unversioned, whereas shared libs have the major number as suffix.
-# Creates both release and debug versions on platforms where both are enabled by Qt configure.
-# Flags:
-# - lib-name Qt library Name
-# - version Qt major number used as shared library suffix (QtCore4.so)
-# - depends-on other Qt libraries
-# - usage-defines those are set by qmake, so set them when using this library
-# - requirements addional requirements
-# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
-rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
-{
- if $(.bjam-qt)
- {
- # Import Qt module
- # Eveything will be setup there
- alias $(lib-name)
- : $(.prefix)//$(lib-name)
- :
- :
- : <allow>qt4 ;
- }
- else
- {
- local real_include ;
- real_include ?= $(include) ;
- real_include ?= $(lib-name) ;
-
- lib $(lib-name)
- : # sources
- $(depends-on)
- : # requirements
- <name>$(lib-name)$(version)
- $(requirements)
- : # default-build
- : # usage-requirements
- <define>$(usage-defines)
- <include>$(.incprefix)/$(real_include)
- ;
-
- if $(.have_separate_debug) = TRUE
- {
- lib $(lib-name)
- : # sources
- $(depends-on)
- : # requirements
- <name>$(lib-name)$(.suffix_debug)$(version)
- $(requirements)
- <variant>debug
- : # default-build
- : # usage-requirements
- <define>$(usage-defines)
- <include>$(.incprefix)/$(real_include)
- ;
- }
- }
-
- # Make library explicit so that a simple <use>qt4 will not bring in everything.
- # And some components like QtDBus/Phonon may not be available on all platforms.
- explicit $(lib-name) ;
-}
-
-# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
-# The exact match is the last one.
-
-# Get <include> and <defines> from current toolset.
-flags qt4.moc INCLUDES <include> ;
-flags qt4.moc DEFINES <define> ;
-
-# need a newline for expansion of DEFINES and INCLUDES in the response file.
-.nl = "
-" ;
-
-# Processes headers to create Qt MetaObject information. Qt4-moc has its
-# c++-parser, so pass INCLUDES and DEFINES.
-# We use response file with one INCLUDE/DEFINE per line
-#
-actions moc
-{
- $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
-}
-
-# When moccing files for include only, we don't need -f, otherwise the generated
-# code will include the .cpp and we'll get duplicated symbols.
-#
-actions moc.inc
-{
- $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
-}
-
-
-# Generates source files from resource files.
-#
-actions rcc
-{
- $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<)
-}
-
-
-# Generates user-interface source from .ui files.
-#
-actions uic
-{
- $(.BINPREFIX[-1])/uic $(>) -o $(<)
-}
-
-
-# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
-# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
-# detailed documentation of the Qt Resource System.
-#
-class qrc-scanner : common-scanner
-{
- rule pattern ( )
- {
- return "<file.*>(.*)</file>" ;
- }
-}
-
-
-# Wrapped files are "included".
-scanner.register qrc-scanner : include ;
diff --git a/jam-files/boost-build/tools/quickbook-config.jam b/jam-files/boost-build/tools/quickbook-config.jam
deleted file mode 100644
index e983a78a..00000000
--- a/jam-files/boost-build/tools/quickbook-config.jam
+++ /dev/null
@@ -1,44 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for BoostBook tools. To use, just import this module.
-
-import os ;
-import toolset : using ;
-
-if [ os.name ] = NT
-{
- local boost-dir = ;
- for local R in snapshot cvs 1.33.0
- {
- boost-dir += [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)"
- : "InstallRoot" ] ;
- }
- local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ;
- quickbook-path = $(quickbook-path[1]) ;
-
- if $(quickbook-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using quickbook ":" $(quickbook-path) ;
- }
- using quickbook : $(quickbook-path) ;
- }
-}
-else
-{
- local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ;
- quickbook-path = $(quickbook-path[1]) ;
-
- if $(quickbook-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using quickbook ":" $(quickbook-path) ;
- }
- using quickbook : $(quickbook-path) ;
- }
-}
diff --git a/jam-files/boost-build/tools/quickbook.jam b/jam-files/boost-build/tools/quickbook.jam
deleted file mode 100644
index 6de2d42f..00000000
--- a/jam-files/boost-build/tools/quickbook.jam
+++ /dev/null
@@ -1,361 +0,0 @@
-#
-# Copyright (c) 2005 João Abecasis
-# Copyright (c) 2005 Vladimir Prus
-# Copyright (c) 2006 Rene Rivera
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-#
-
-# This toolset defines a generator to translate QuickBook to BoostBook. It can
-# be used to generate nice (!) user documentation in different formats
-# (pdf/html/...), from a single text file with simple markup.
-#
-# The toolset defines the QUICKBOOK type (file extension 'qbk') and
-# a QUICKBOOK to XML (BOOSTBOOK) generator.
-#
-#
-# ===========================================================================
-# Q & A
-# ===========================================================================
-#
-# If you don't know what this is all about, some Q & A will hopefully get you
-# up to speed with QuickBook and this toolset.
-#
-#
-# What is QuickBook ?
-#
-# QuickBook is a WikiWiki style documentation tool geared towards C++
-# documentation using simple rules and markup for simple formatting tasks.
-# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook
-# documents are simple text files. A single QuickBook document can
-# generate a fully linked set of nice HTML and PostScript/PDF documents
-# complete with images and syntax-colorized source code.
-#
-#
-# Where can I get QuickBook ?
-#
-# Quickbook can be found in Boost's repository, under the tools/quickbook
-# directory it was added there on Jan 2005, some time after the release of
-# Boost v1.32.0 and has been an integral part of the Boost distribution
-# since v1.33.
-#
-# Here's a link to the SVN repository:
-# https://svn.boost.org/svn/boost/trunk/tools/quickbook
-#
-# And to QuickBook's QuickBook-generated docs:
-# http://www.boost.org/doc/libs/release/tools/quickbook/index.html
-#
-#
-# How do I use QuickBook and this toolset in my projects ?
-#
-# The minimal example is:
-#
-# using boostbook ;
-# import quickbook ;
-#
-# boostbook my_docs : my_docs_source.qbk ;
-#
-# where my_docs is a target name and my_docs_source.qbk is a QuickBook
-# file. The documentation format to be generated is determined by the
-# boostbook toolset. By default html documentation should be generated,
-# but you should check BoostBook's docs to be sure.
-#
-#
-# What do I need ?
-#
-# You should start by setting up the BoostBook toolset. Please refer to
-# boostbook.jam and the BoostBook documentation for information on how to
-# do this.
-#
-# A QuickBook executable is also needed. The toolset will generate this
-# executable if it can find the QuickBook sources. The following
-# directories will be searched:
-#
-# BOOST_ROOT/tools/quickbook/
-# BOOST_BUILD_PATH/../../quickbook/
-#
-# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables)
-#
-# If QuickBook sources are not found the toolset will then try to use
-# the shell command 'quickbook'.
-#
-#
-# How do I provide a custom QuickBook executable ?
-#
-# You may put the following in your user-config.jam or site-config.jam:
-#
-# using quickbook : /path/to/quickbook ;
-#
-# or, if 'quickbook' can be found in your PATH,
-#
-# using quickbook : quickbook ;
-#
-#
-# For convenience three alternatives are tried to get a QuickBook executable:
-#
-# 1. If the user points us to the a QuickBook executable, that is used.
-#
-# 2. Otherwise, we search for the QuickBook sources and compile QuickBook
-# using the default toolset.
-#
-# 3. As a last resort, we rely on the shell for finding 'quickbook'.
-#
-
-import boostbook ;
-import "class" : new ;
-import feature ;
-import generators ;
-import toolset ;
-import type ;
-import scanner ;
-import project ;
-import targets ;
-import build-system ;
-import path ;
-import common ;
-import errors ;
-
-# The one and only QUICKBOOK type!
-type.register QUICKBOOK : qbk ;
-
-# <quickbook-binary> shell command to run QuickBook
-# <quickbook-binary-dependencies> targets to build QuickBook from sources.
-feature.feature <quickbook-binary> : : free ;
-feature.feature <quickbook-binary-dependencies> : : free dependency ;
-feature.feature <quickbook-define> : : free ;
-feature.feature <quickbook-indent> : : free ;
-feature.feature <quickbook-line-width> : : free ;
-
-
-# quickbook-binary-generator handles generation of the QuickBook executable, by
-# marking it as a dependency for QuickBook docs.
-#
-# If the user supplied the QuickBook command that will be used.
-#
-# Otherwise we search some sensible places for the QuickBook sources and compile
-# from scratch using the default toolset.
-#
-# As a last resort we rely on the shell to find 'quickbook'.
-#
-class quickbook-binary-generator : generator
-{
- import modules path targets quickbook ;
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- quickbook.freeze-config ;
- # QuickBook invocation command and dependencies.
- local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ;
- local quickbook-binary-dependencies ;
-
- if ! $(quickbook-binary)
- {
- # If the QuickBook source directory was found, mark its main target
- # as a dependency for the current project. Otherwise, try to find
- # 'quickbook' in user's PATH
- local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ;
- if $(quickbook-dir)
- {
- # Get the main-target in QuickBook directory.
- local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ;
-
- # The first element are actual targets, the second are
- # properties found in target-id. We do not care about these
- # since we have passed the id ourselves.
- quickbook-main-target =
- [ $(quickbook-main-target[1]).main-target quickbook ] ;
-
- quickbook-binary-dependencies =
- [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ;
-
- # Ignore usage-requirements returned as first element.
- quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ;
-
- # Some toolsets generate extra targets (e.g. RSP). We must mark
- # all targets as dependencies for the project, but we will only
- # use the EXE target for quickbook-to-boostbook translation.
- for local target in $(quickbook-binary-dependencies)
- {
- if [ $(target).type ] = EXE
- {
- quickbook-binary =
- [ path.native
- [ path.join
- [ $(target).path ]
- [ $(target).name ]
- ]
- ] ;
- }
- }
- }
- }
-
- # Add $(quickbook-binary-dependencies) as a dependency of the current
- # project and set it as the <quickbook-binary> feature for the
- # quickbook-to-boostbook rule, below.
- property-set = [ $(property-set).add-raw
- <dependency>$(quickbook-binary-dependencies)
- <quickbook-binary>$(quickbook-binary)
- <quickbook-binary-dependencies>$(quickbook-binary-dependencies)
- ] ;
-
- return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ;
- }
-}
-
-
-# Define a scanner for tracking QBK include dependencies.
-#
-class qbk-scanner : common-scanner
-{
- rule pattern ( )
- {
- return "\\[[ ]*include[ ]+([^]]+)\\]"
- "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]"
- "\\[[ ]*import[ ]+([^]]+)\\]" ;
- }
-}
-
-
-scanner.register qbk-scanner : include ;
-
-type.set-scanner QUICKBOOK : qbk-scanner ;
-
-
-# Initialization of toolset.
-#
-# Parameters:
-# command ? -> path to QuickBook executable.
-#
-# When command is not supplied toolset will search for QuickBook directory and
-# compile the executable from source. If that fails we still search the path for
-# 'quickbook'.
-#
-rule init (
- command ? # path to the QuickBook executable.
- )
-{
- if $(command)
- {
- if $(.config-frozen)
- {
- errors.user-error "quickbook: configuration cannot be changed after it has been used." ;
- }
- .command = $(command) ;
- }
-}
-
-rule freeze-config ( )
-{
- if ! $(.config-frozen)
- {
- .config-frozen = true ;
-
- # QuickBook invocation command and dependencies.
-
- .quickbook-binary = $(.command) ;
-
- if $(.quickbook-binary)
- {
- # Use user-supplied command.
- .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ;
- }
- else
- {
- # Search for QuickBook sources in sensible places, like
- # $(BOOST_ROOT)/tools/quickbook
- # $(BOOST_BUILD_PATH)/../../quickbook
-
- # And build quickbook executable from sources.
-
- local boost-root = [ modules.peek : BOOST_ROOT ] ;
- local boost-build-path = [ build-system.location ] ;
-
- if $(boost-root)
- {
- .quickbook-dir += [ path.join $(boost-root) tools ] ;
- }
-
- if $(boost-build-path)
- {
- .quickbook-dir += $(boost-build-path)/../.. ;
- }
-
- .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ;
-
- # If the QuickBook source directory was found, mark its main target
- # as a dependency for the current project. Otherwise, try to find
- # 'quickbook' in user's PATH
- if $(.quickbook-dir)
- {
- .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ;
- }
- else
- {
- ECHO "QuickBook warning: The path to the quickbook executable was" ;
- ECHO " not provided. Additionally, couldn't find QuickBook" ;
- ECHO " sources searching in" ;
- ECHO " * BOOST_ROOT/tools/quickbook" ;
- ECHO " * BOOST_BUILD_PATH/../../quickbook" ;
- ECHO " Will now try to find a precompiled executable by searching" ;
- ECHO " the PATH for 'quickbook'." ;
- ECHO " To disable this warning in the future, or to completely" ;
- ECHO " avoid compilation of quickbook, you can explicitly set the" ;
- ECHO " path to a quickbook executable command in user-config.jam" ;
- ECHO " or site-config.jam with the call" ;
- ECHO " using quickbook : /path/to/quickbook ;" ;
-
- # As a last resort, search for 'quickbook' command in path. Note
- # that even if the 'quickbook' command is not found,
- # get-invocation-command will still return 'quickbook' and might
- # generate an error while generating the virtual-target.
-
- .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ;
- }
- }
- }
-}
-
-
-generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ;
-
-
-# <quickbook-binary> shell command to run QuickBook
-# <quickbook-binary-dependencies> targets to build QuickBook from sources.
-toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ;
-toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ;
-toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ;
-toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ;
-toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ;
-toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ;
-
-
-rule quickbook-to-boostbook ( target : source : properties * )
-{
- # Signal dependency of quickbook sources on <quickbook-binary-dependencies>
- # upon invocation of quickbook-to-boostbook.
- DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ;
-}
-
-
-actions quickbook-to-boostbook
-{
- "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)"
-}
-
-
-# Declare a main target to convert a quickbook source into a boostbook XML file.
-#
-rule to-boostbook ( target-name : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new typed-target $(target-name) : $(project) : XML
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
diff --git a/jam-files/boost-build/tools/rc.jam b/jam-files/boost-build/tools/rc.jam
deleted file mode 100644
index 9964d339..00000000
--- a/jam-files/boost-build/tools/rc.jam
+++ /dev/null
@@ -1,156 +0,0 @@
-# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-#
-# Copyright (c) 2006 Rene Rivera.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-import generators ;
-import feature ;
-import errors ;
-import scanner ;
-import toolset : flags ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-type.register RC : rc ;
-
-rule init ( )
-{
-}
-
-# Configures a new resource compilation command specific to a condition,
-# usually a toolset selection condition. The possible options are:
-#
-# * <rc-type>(rc|windres) - Indicates the type of options the command
-# accepts.
-#
-# Even though the arguments are all optional, only when a command, condition,
-# and at minimum the rc-type option are given will the command be configured.
-# This is so that callers don't have to check auto-configuration values
-# before calling this. And still get the functionality of build failures when
-# the resource compiler can't be found.
-#
-rule configure ( command ? : condition ? : options * )
-{
- local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
-
- if $(command) && $(condition) && $(rc-type)
- {
- flags rc.compile.resource .RC $(condition) : $(command) ;
- flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
- flags rc.compile.resource DEFINES <define> ;
- flags rc.compile.resource INCLUDES <include> ;
- if $(.debug-configuration)
- {
- ECHO notice: using rc compiler :: $(condition) :: $(command) ;
- }
- }
-}
-
-rule compile.resource ( target : sources * : properties * )
-{
- local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
- rc-type ?= null ;
- compile.resource.$(rc-type) $(target) : $(sources[1]) ;
-}
-
-actions compile.resource.rc
-{
- "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
-}
-
-actions compile.resource.windres
-{
- "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
-}
-
-actions quietly compile.resource.null
-{
- as /dev/null -o "$(<)"
-}
-
-# Since it's a common practice to write
-# exe hello : hello.cpp hello.rc
-# we change the name of object created from RC file, to
-# avoid conflict with hello.cpp.
-# The reason we generate OBJ and not RES, is that gcc does not
-# seem to like RES files, but works OK with OBJ.
-# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
-#
-# Using 'register-c-compiler' adds the build directory to INCLUDES
-generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
-
-# Register scanner for resources
-class res-scanner : scanner
-{
- import regex virtual-target path scanner ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
-
- self.includes = $(includes) ;
- }
-
- rule pattern ( )
- {
- return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
- local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
- local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
-
- # Icons and other includes may referenced as
- #
- # IDR_MAINFRAME ICON "res\\icon.ico"
- #
- # so we have to replace double backslashes to single ones.
- res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- local g = [ on $(target) return $(HDRGRIST) ] ;
- local b = [ NORMALIZE_PATH $(binding:D) ] ;
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
- local g2 = $(g)"#"$(b) ;
-
- angle = $(angle:G=$(g)) ;
- quoted = $(quoted:G=$(g2)) ;
- res = $(res:G=$(g2)) ;
-
- local all = $(angle) $(quoted) ;
-
- INCLUDES $(target) : $(all) ;
- DEPENDS $(target) : $(res) ;
- NOCARE $(all) $(res) ;
- SEARCH on $(angle) = $(self.includes:G=) ;
- SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
- SEARCH on $(res) = $(b) $(self.includes:G=) ;
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
- scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
- }
-}
-
-scanner.register res-scanner : include ;
-type.set-scanner RC : res-scanner ;
diff --git a/jam-files/boost-build/tools/rc.py b/jam-files/boost-build/tools/rc.py
deleted file mode 100644
index 0b82d231..00000000
--- a/jam-files/boost-build/tools/rc.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Status: being ported by Steven Watanabe
-# Base revision: 47077
-#
-# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-#
-# Copyright (c) 2006 Rene Rivera.
-#
-# Copyright (c) 2008 Steven Watanabe
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-##import type ;
-##import generators ;
-##import feature ;
-##import errors ;
-##import scanner ;
-##import toolset : flags ;
-
-from b2.build import type, toolset, generators, scanner, feature
-from b2.tools import builtin
-from b2.util import regex
-from b2.build.toolset import flags
-from b2.manager import get_manager
-
-__debug = None
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
- return __debug
-
-type.register('RC', ['rc'])
-
-def init():
- pass
-
-def configure (command = None, condition = None, options = None):
- """
- Configures a new resource compilation command specific to a condition,
- usually a toolset selection condition. The possible options are:
-
- * <rc-type>(rc|windres) - Indicates the type of options the command
- accepts.
-
- Even though the arguments are all optional, only when a command, condition,
- and at minimum the rc-type option are given will the command be configured.
- This is so that callers don't have to check auto-configuration values
- before calling this. And still get the functionality of build failures when
- the resource compiler can't be found.
- """
- rc_type = feature.get_values('<rc-type>', options)
- if rc_type:
- assert(len(rc_type) == 1)
- rc_type = rc_type[0]
-
- if command and condition and rc_type:
- flags('rc.compile.resource', '.RC', condition, command)
- flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower())
- flags('rc.compile.resource', 'DEFINES', [], ['<define>'])
- flags('rc.compile.resource', 'INCLUDES', [], ['<include>'])
- if debug():
- print 'notice: using rc compiler ::', condition, '::', command
-
-engine = get_manager().engine()
-
-class RCAction:
- """Class representing bjam action defined from Python.
- The function must register the action to execute."""
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
- if self.function:
- self.function(targets, sources, property_set)
-
-# FIXME: What is the proper way to dispatch actions?
-def rc_register_action(action_name, function = None):
- global engine
- if engine.actions.has_key(action_name):
- raise "Bjam action %s is already defined" % action_name
- engine.actions[action_name] = RCAction(action_name, function)
-
-def rc_compile_resource(targets, sources, properties):
- rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE')
- global engine
- engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties)
-
-rc_register_action('rc.compile.resource', rc_compile_resource)
-
-
-engine.register_action(
- 'rc.compile.resource.rc',
- '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"')
-
-engine.register_action(
- 'rc.compile.resource.windres',
- '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"')
-
-# FIXME: this was originally declared quietly
-engine.register_action(
- 'compile.resource.null',
- 'as /dev/null -o "$(<)"')
-
-# Since it's a common practice to write
-# exe hello : hello.cpp hello.rc
-# we change the name of object created from RC file, to
-# avoid conflict with hello.cpp.
-# The reason we generate OBJ and not RES, is that gcc does not
-# seem to like RES files, but works OK with OBJ.
-# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
-#
-# Using 'register-c-compiler' adds the build directory to INCLUDES
-# FIXME: switch to generators
-builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], [])
-
-__angle_include_re = "#include[ ]*<([^<]+)>"
-
-# Register scanner for resources
-class ResScanner(scanner.Scanner):
-
- def __init__(self, includes):
- scanner.__init__ ;
- self.includes = includes
-
- def pattern(self):
- return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
- "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
-
- def process(self, target, matches, binding):
-
- angle = regex.transform(matches, "#include[ ]*<([^<]+)>")
- quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"")
- res = regex.transform(matches,
- "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
- "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
-
- # Icons and other includes may referenced as
- #
- # IDR_MAINFRAME ICON "res\\icon.ico"
- #
- # so we have to replace double backslashes to single ones.
- res = [ re.sub(r'\\\\', '/', match) for match in res ]
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- g = bjam.call('get-target-variable', target, 'HDRGRIST')
- b = os.path.normalize_path(os.path.dirname(binding))
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
- g2 = g + "#" + b
-
- g = "<" + g + ">"
- g2 = "<" + g2 + ">"
- angle = [g + x for x in angle]
- quoted = [g2 + x for x in quoted]
- res = [g2 + x for x in res]
-
- all = angle + quoted
-
- bjam.call('mark-included', target, all)
-
- engine = get_manager().engine()
-
- engine.add_dependency(target, res)
- bjam.call('NOCARE', all + res)
- engine.set_target_variable(angle, 'SEARCH', ungrist(self.includes))
- engine.set_target_variable(quoted, 'SEARCH', b + ungrist(self.includes))
- engine.set_target_variable(res, 'SEARCH', b + ungrist(self.includes)) ;
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
- get_manager().scanners().propagate(self, angle + quoted)
-
-scanner.register(ResScanner, 'include')
-type.set_scanner('RC', ResScanner)
diff --git a/jam-files/boost-build/tools/stage.jam b/jam-files/boost-build/tools/stage.jam
deleted file mode 100644
index 296e7558..00000000
--- a/jam-files/boost-build/tools/stage.jam
+++ /dev/null
@@ -1,524 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'install' rule, used to copy a set of targets to a
-# single location.
-
-import targets ;
-import "class" : new ;
-import errors ;
-import type ;
-import generators ;
-import feature ;
-import project ;
-import virtual-target ;
-import path ;
-import types/register ;
-
-
-feature.feature <install-dependencies> : off on : incidental ;
-feature.feature <install-type> : : free incidental ;
-feature.feature <install-source-root> : : free path ;
-feature.feature <so-version> : : free incidental ;
-
-# If 'on', version symlinks for shared libraries will not be created. Affects
-# Unix builds only.
-feature.feature <install-no-version-symlinks> : on : optional incidental ;
-
-
-class install-target-class : basic-target
-{
- import feature ;
- import project ;
- import type ;
- import errors ;
- import generators ;
- import path ;
- import stage ;
- import "class" : new ;
- import property ;
- import property-set ;
-
- rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * )
- {
- basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
- $(requirements) : $(default-build) ;
- }
-
- # If <location> is not set, sets it based on the project data.
- #
- rule update-location ( property-set )
- {
- local loc = [ $(property-set).get <location> ] ;
- if ! $(loc)
- {
- loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
- property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
- }
-
- return $(property-set) ;
- }
-
- # Takes a target that is installed and a property set which is used when
- # installing.
- #
- rule adjust-properties ( target : build-property-set )
- {
- local ps-raw ;
- local a = [ $(target).action ] ;
- if $(a)
- {
- local ps = [ $(a).properties ] ;
- ps-raw = [ $(ps).raw ] ;
-
- # Unless <hardcode-dll-paths>true is in properties, which can happen
- # only if the user has explicitly requested it, nuke all <dll-path>
- # properties.
- if [ $(build-property-set).get <hardcode-dll-paths> ] != true
- {
- ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
- }
-
- # If any <dll-path> properties were specified for installing, add
- # them.
- local l = [ $(build-property-set).get <dll-path> ] ;
- ps-raw += $(l:G=<dll-path>) ;
-
- # Also copy <linkflags> feature from current build set, to be used
- # for relinking.
- local l = [ $(build-property-set).get <linkflags> ] ;
- ps-raw += $(l:G=<linkflags>) ;
-
- # Remove the <tag> feature on original targets.
- ps-raw = [ property.change $(ps-raw) : <tag> ] ;
-
- # And <location>. If stage target has another stage target in
- # sources, then we shall get virtual targets with the <location>
- # property set.
- ps-raw = [ property.change $(ps-raw) : <location> ] ;
- }
-
- local d = [ $(build-property-set).get <dependency> ] ;
- ps-raw += $(d:G=<dependency>) ;
-
- local d = [ $(build-property-set).get <location> ] ;
- ps-raw += $(d:G=<location>) ;
-
- local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
- ps-raw += $(ns:G=<install-no-version-symlinks>) ;
-
- local d = [ $(build-property-set).get <install-source-root> ] ;
- # Make the path absolute: we shall use it to compute relative paths and
- # making the path absolute will help.
- if $(d)
- {
- d = [ path.root $(d) [ path.pwd ] ] ;
- ps-raw += $(d:G=<install-source-root>) ;
- }
-
- if $(ps-raw)
- {
- return [ property-set.create $(ps-raw) ] ;
- }
- else
- {
- return [ property-set.empty ] ;
- }
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- source-targets = [ targets-to-stage $(source-targets) :
- $(property-set) ] ;
-
- property-set = [ update-location $(property-set) ] ;
-
- local ename = [ $(property-set).get <name> ] ;
-
- if $(ename) && $(source-targets[2])
- {
- errors.error "When <name> property is used in 'install', only one"
- "source is allowed" ;
- }
-
- local result ;
- for local i in $(source-targets)
- {
- local staged-targets ;
-
- local new-properties = [ adjust-properties $(i) :
- $(property-set) ] ;
-
- # See if something special should be done when staging this type. It
- # is indicated by the presence of a special "INSTALLED_" type.
- local t = [ $(i).type ] ;
- if $(t) && [ type.registered INSTALLED_$(t) ]
- {
- if $(ename)
- {
- errors.error "In 'install': <name> property specified with target that requires relinking." ;
- }
- else
- {
- local targets = [ generators.construct $(self.project)
- $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
- staged-targets += $(targets[2-]) ;
- }
- }
- else
- {
- staged-targets = [ stage.copy-file $(self.project) $(ename) :
- $(i) : $(new-properties) ] ;
- }
-
- if ! $(staged-targets)
- {
- errors.error "Unable to generate staged version of " [ $(source).str ] ;
- }
-
- for t in $(staged-targets)
- {
- result += [ virtual-target.register $(t) ] ;
- }
- }
-
- return [ property-set.empty ] $(result) ;
- }
-
- # Given the list of source targets explicitly passed to 'stage', returns the
- # list of targets which must be staged.
- #
- rule targets-to-stage ( source-targets * : property-set )
- {
- local result ;
-
- # Traverse the dependencies, if needed.
- if [ $(property-set).get <install-dependencies> ] = "on"
- {
- source-targets = [ collect-targets $(source-targets) ] ;
- }
-
- # Filter the target types, if needed.
- local included-types = [ $(property-set).get <install-type> ] ;
- for local r in $(source-targets)
- {
- local ty = [ $(r).type ] ;
- if $(ty)
- {
- # Do not stage searched libs.
- if $(ty) != SEARCHED_LIB
- {
- if $(included-types)
- {
- if [ include-type $(ty) : $(included-types) ]
- {
- result += $(r) ;
- }
- }
- else
- {
- result += $(r) ;
- }
- }
- }
- else if ! $(included-types)
- {
- # Don't install typeless target if there is an explicit list of
- # allowed types.
- result += $(r) ;
- }
- }
-
- return $(result) ;
- }
-
- # CONSIDER: figure out why we can not use virtual-target.traverse here.
- #
- rule collect-targets ( targets * )
- {
- # Find subvariants
- local s ;
- for local t in $(targets)
- {
- s += [ $(t).creating-subvariant ] ;
- }
- s = [ sequence.unique $(s) ] ;
-
- local result = [ new set ] ;
- $(result).add $(targets) ;
-
- for local i in $(s)
- {
- $(i).all-referenced-targets $(result) ;
- }
- local result2 ;
- for local r in [ $(result).list ]
- {
- if $(r:G) != <use>
- {
- result2 += $(r:G=) ;
- }
- }
- DELETE_MODULE $(result) ;
- result = [ sequence.unique $(result2) ] ;
- }
-
- # Returns true iff 'type' is subtype of some element of 'types-to-include'.
- #
- local rule include-type ( type : types-to-include * )
- {
- local found ;
- while $(types-to-include) && ! $(found)
- {
- if [ type.is-subtype $(type) $(types-to-include[1]) ]
- {
- found = true ;
- }
- types-to-include = $(types-to-include[2-]) ;
- }
-
- return $(found) ;
- }
-}
-
-
-# Creates a copy of target 'source'. The 'properties' object should have a
-# <location> property which specifies where the target must be placed.
-#
-rule copy-file ( project name ? : source : properties )
-{
- name ?= [ $(source).name ] ;
- local relative ;
-
- local new-a = [ new non-scanning-action $(source) : common.copy :
- $(properties) ] ;
- local source-root = [ $(properties).get <install-source-root> ] ;
- if $(source-root)
- {
- # Get the real path of the target. We probably need to strip relative
- # path from the target name at construction.
- local path = [ $(source).path ] ;
- path = [ path.root $(name:D) $(path) ] ;
- # Make the path absolute. Otherwise, it would be hard to compute the
- # relative path. The 'source-root' is already absolute, see the
- # 'adjust-properties' method above.
- path = [ path.root $(path) [ path.pwd ] ] ;
-
- relative = [ path.relative-to $(source-root) $(path) ] ;
- }
-
- # Note: Using $(name:D=$(relative)) might be faster here, but then we would
- # need to explicitly check that relative is not ".", otherwise we might get
- # paths like '<prefix>/boost/.', try to create it and mkdir would obviously
- # fail.
- name = [ path.join $(relative) $(name:D=) ] ;
-
- return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
- $(new-a) ] ;
-}
-
-
-rule symlink ( name : project : source : properties )
-{
- local a = [ new action $(source) : symlink.ln : $(properties) ] ;
- return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
- $(a) ] ;
-}
-
-
-rule relink-file ( project : source : property-set )
-{
- local action = [ $(source).action ] ;
- local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
- "" : $(property-set) ] ;
- return [ $(cloned-action).targets ] ;
-}
-
-
-# Declare installed version of the EXE type. Generator for this type will cause
-# relinking to the new location.
-type.register INSTALLED_EXE : : EXE ;
-
-
-class installed-exe-generator : generator
-{
- import type ;
- import property-set ;
- import modules ;
- import stage ;
-
- rule __init__ ( )
- {
- generator.__init__ install-exe : EXE : INSTALLED_EXE ;
- }
-
- rule run ( project name ? : property-set : source : multiple ? )
- {
- local need-relink ;
-
- if [ $(property-set).get <os> ] in NT CYGWIN ||
- [ $(property-set).get <target-os> ] in windows cygwin
- {
- }
- else
- {
- # See if the dll-path properties are not changed during
- # install. If so, copy, don't relink.
- local a = [ $(source).action ] ;
- local p = [ $(a).properties ] ;
- local original = [ $(p).get <dll-path> ] ;
- local current = [ $(property-set).get <dll-path> ] ;
-
- if $(current) != $(original)
- {
- need-relink = true ;
- }
- }
-
-
- if $(need-relink)
- {
- return [ stage.relink-file $(project)
- : $(source) : $(property-set) ] ;
- }
- else
- {
- return [ stage.copy-file $(project)
- : $(source) : $(property-set) ] ;
- }
- }
-}
-
-
-generators.register [ new installed-exe-generator ] ;
-
-
-# Installing a shared link on Unix might cause a creation of versioned symbolic
-# links.
-type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
-
-
-class installed-shared-lib-generator : generator
-{
- import type ;
- import property-set ;
- import modules ;
- import stage ;
-
- rule __init__ ( )
- {
- generator.__init__ install-shared-lib : SHARED_LIB
- : INSTALLED_SHARED_LIB ;
- }
-
- rule run ( project name ? : property-set : source : multiple ? )
- {
- if [ $(property-set).get <os> ] in NT CYGWIN ||
- [ $(property-set).get <target-os> ] in windows cygwin
- {
- local copied = [ stage.copy-file $(project) : $(source) :
- $(property-set) ] ;
- return [ virtual-target.register $(copied) ] ;
- }
- else
- {
- local a = [ $(source).action ] ;
- local copied ;
- if ! $(a)
- {
- # Non-derived file, just copy.
- copied = [ stage.copy-file $(project) : $(source) :
- $(property-set) ] ;
- }
- else
- {
- local cp = [ $(a).properties ] ;
- local current-dll-path = [ $(cp).get <dll-path> ] ;
- local new-dll-path = [ $(property-set).get <dll-path> ] ;
-
- if $(current-dll-path) != $(new-dll-path)
- {
- # Rpath changed, need to relink.
- copied = [ stage.relink-file $(project) : $(source) :
- $(property-set) ] ;
- }
- else
- {
- copied = [ stage.copy-file $(project) : $(source) :
- $(property-set) ] ;
- }
- }
-
- copied = [ virtual-target.register $(copied) ] ;
-
- local result = $(copied) ;
- # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
- # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
- # symbolic links.
- local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
- : [ $(copied).name ] ] ;
- if $(m)
- {
- # Symlink without version at all is used to make
- # -lsome_library work.
- result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
- $(property-set) ] ;
-
- # Symlinks of some libfoo.N and libfoo.N.M are used so that
- # library can found at runtime, if libfoo.N.M.X has soname of
- # libfoo.N. That happens when the library makes some binary
- # compatibility guarantees. If not, it is possible to skip those
- # symlinks.
- local suppress =
- [ $(property-set).get <install-no-version-symlinks> ] ;
-
- if $(suppress) != "on"
- {
- result += [ stage.symlink $(m[1]).$(m[2]) : $(project)
- : $(copied) : $(property-set) ] ;
- result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project)
- : $(copied) : $(property-set) ] ;
- }
- }
-
- return $(result) ;
- }
- }
-}
-
-generators.register [ new installed-shared-lib-generator ] ;
-
-
-# Main target rule for 'install'.
-#
-rule install ( name : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- # Unless the user has explicitly asked us to hardcode dll paths, add
- # <hardcode-dll-paths>false in requirements, to override default value.
- if ! <hardcode-dll-paths>true in $(requirements)
- {
- requirements += <hardcode-dll-paths>false ;
- }
-
- if <tag> in $(requirements:G)
- {
- errors.user-error
- "The <tag> property is not allowed for the 'install' rule" ;
- }
-
- targets.main-target-alternative
- [ new install-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-
-IMPORT $(__name__) : install : : install ;
-IMPORT $(__name__) : install : : stage ;
diff --git a/jam-files/boost-build/tools/stage.py b/jam-files/boost-build/tools/stage.py
deleted file mode 100644
index 25eccbe5..00000000
--- a/jam-files/boost-build/tools/stage.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Status: ported.
-# Base revision 64444.
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'install' rule, used to copy a set of targets to a
-# single location.
-
-import b2.build.feature as feature
-import b2.build.targets as targets
-import b2.build.property as property
-import b2.build.property_set as property_set
-import b2.build.generators as generators
-import b2.build.virtual_target as virtual_target
-
-from b2.manager import get_manager
-from b2.util.sequence import unique
-from b2.util import bjam_signature
-
-import b2.build.type
-
-import os.path
-import re
-import types
-
-feature.feature('install-dependencies', ['off', 'on'], ['incidental'])
-feature.feature('install-type', [], ['free', 'incidental'])
-feature.feature('install-source-root', [], ['free', 'path'])
-feature.feature('so-version', [], ['free', 'incidental'])
-
-# If 'on', version symlinks for shared libraries will not be created. Affects
-# Unix builds only.
-feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental'])
-
-class InstallTargetClass(targets.BasicTarget):
-
- def update_location(self, ps):
- """If <location> is not set, sets it based on the project data."""
-
- loc = ps.get('location')
- if not loc:
- loc = os.path.join(self.project().get('location'), self.name())
- ps = ps.add_raw(["<location>" + loc])
-
- return ps
-
- def adjust_properties(self, target, build_ps):
- a = target.action()
- properties = []
- if a:
- ps = a.properties()
- properties = ps.all()
-
- # Unless <hardcode-dll-paths>true is in properties, which can happen
- # only if the user has explicitly requested it, nuke all <dll-path>
- # properties.
-
- if build_ps.get('hardcode-dll-paths') != ['true']:
- properties = [p for p in properties if p.feature().name() != 'dll-path']
-
- # If any <dll-path> properties were specified for installing, add
- # them.
- properties.extend(build_ps.get_properties('dll-path'))
-
- # Also copy <linkflags> feature from current build set, to be used
- # for relinking.
- properties.extend(build_ps.get_properties('linkflags'))
-
- # Remove the <tag> feature on original targets.
- # And <location>. If stage target has another stage target in
- # sources, then we shall get virtual targets with the <location>
- # property set.
- properties = [p for p in properties
- if not p.feature().name() in ['tag', 'location']]
-
- properties.extend(build_ps.get_properties('dependency'))
-
- properties.extend(build_ps.get_properties('location'))
-
-
- properties.extend(build_ps.get_properties('install-no-version-symlinks'))
-
- d = build_ps.get_properties('install-source-root')
-
- # Make the path absolute: we shall use it to compute relative paths and
- # making the path absolute will help.
- if d:
- p = d[0]
- properties.append(property.Property(p.feature(), os.path.abspath(p.value())))
-
- return property_set.create(properties)
-
-
- def construct(self, name, source_targets, ps):
-
- source_targets = self.targets_to_stage(source_targets, ps)
- ps = self.update_location(ps)
-
- ename = ps.get('name')
- if ename:
- ename = ename[0]
- if ename and len(source_targets) > 1:
- get_manager().errors()("When <name> property is used in 'install', only one source is allowed")
-
- result = []
-
- for i in source_targets:
-
- staged_targets = []
- new_ps = self.adjust_properties(i, ps)
-
- # See if something special should be done when staging this type. It
- # is indicated by the presence of a special "INSTALLED_" type.
- t = i.type()
- if t and b2.build.type.registered("INSTALLED_" + t):
-
- if ename:
- get_manager().errors()("In 'install': <name> property specified with target that requires relinking.")
- else:
- (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t,
- new_ps, [i])
- assert isinstance(r, property_set.PropertySet)
- staged_targets.extend(targets)
-
- else:
- staged_targets.append(copy_file(self.project(), ename, i, new_ps))
-
- if not staged_targets:
- get_manager().errors()("Unable to generate staged version of " + i)
-
- result.extend(get_manager().virtual_targets().register(t) for t in staged_targets)
-
- return (property_set.empty(), result)
-
- def targets_to_stage(self, source_targets, ps):
- """Given the list of source targets explicitly passed to 'stage', returns the
- list of targets which must be staged."""
-
- result = []
-
- # Traverse the dependencies, if needed.
- if ps.get('install-dependencies') == ['on']:
- source_targets = self.collect_targets(source_targets)
-
- # Filter the target types, if needed.
- included_types = ps.get('install-type')
- for r in source_targets:
- ty = r.type()
- if ty:
- # Do not stage searched libs.
- if ty != "SEARCHED_LIB":
- if included_types:
- if self.include_type(ty, included_types):
- result.append(r)
- else:
- result.append(r)
- elif not included_types:
- # Don't install typeless target if there is an explicit list of
- # allowed types.
- result.append(r)
-
- return result
-
- # CONSIDER: figure out why we can not use virtual-target.traverse here.
- #
- def collect_targets(self, targets):
-
- s = [t.creating_subvariant() for t in targets]
- s = unique(s)
-
- result = set(targets)
- for i in s:
- i.all_referenced_targets(result)
-
- result2 = []
- for r in result:
- if isinstance(r, property.Property):
-
- if r.feature().name() != 'use':
- result2.append(r.value())
- else:
- result2.append(r)
- result2 = unique(result2)
- return result2
-
- # Returns true iff 'type' is subtype of some element of 'types-to-include'.
- #
- def include_type(self, type, types_to_include):
- return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include)
-
-# Creates a copy of target 'source'. The 'properties' object should have a
-# <location> property which specifies where the target must be placed.
-#
-def copy_file(project, name, source, ps):
-
- if not name:
- name = source.name()
-
- relative = ""
-
- new_a = virtual_target.NonScanningAction([source], "common.copy", ps)
- source_root = ps.get('install-source-root')
- if source_root:
- source_root = source_root[0]
- # Get the real path of the target. We probably need to strip relative
- # path from the target name at construction.
- path = os.path.join(source.path(), os.path.dirname(name))
- # Make the path absolute. Otherwise, it would be hard to compute the
- # relative path. The 'source-root' is already absolute, see the
- # 'adjust-properties' method above.
- path = os.path.abspath(path)
-
- relative = os.path.relpath(path, source_root)
-
- name = os.path.join(relative, os.path.basename(name))
- return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True)
-
-def symlink(name, project, source, ps):
- a = virtual_target.Action([source], "symlink.ln", ps)
- return virtual_target.FileTarget(name, source.type(), project, a, exact=True)
-
-def relink_file(project, source, ps):
- action = source.action()
- cloned_action = virtual_target.clone_action(action, project, "", ps)
- targets = cloned_action.targets()
- # We relink only on Unix, where exe or shared lib is always a single file.
- assert len(targets) == 1
- return targets[0]
-
-
-# Declare installed version of the EXE type. Generator for this type will cause
-# relinking to the new location.
-b2.build.type.register('INSTALLED_EXE', [], 'EXE')
-
-class InstalledExeGenerator(generators.Generator):
-
- def __init__(self):
- generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE'])
-
- def run(self, project, name, ps, source):
-
- need_relink = False;
-
- if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
- # Never relink
- pass
- else:
- # See if the dll-path properties are not changed during
- # install. If so, copy, don't relink.
- need_relink = ps.get('dll-path') != source[0].action().properties().get('dll-path')
-
- if need_relink:
- return [relink_file(project, source, ps)]
- else:
- return [copy_file(project, None, source[0], ps)]
-
-generators.register(InstalledExeGenerator())
-
-
-# Installing a shared link on Unix might cause a creation of versioned symbolic
-# links.
-b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB')
-
-class InstalledSharedLibGenerator(generators.Generator):
-
- def __init__(self):
- generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB'])
-
- def run(self, project, name, ps, source):
-
- source = source[0]
- if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
- copied = copy_file(project, None, source, ps)
- return [get_manager().virtual_targets().register(copied)]
- else:
- a = source.action()
- if not a:
- # Non-derived file, just copy.
- copied = copy_file(project, source, ps)
- else:
-
- need_relink = ps.get('dll-path') != source.action().properties().get('dll-path')
-
- if need_relink:
- # Rpath changed, need to relink.
- copied = relink_file(project, source, ps)
- else:
- copied = copy_file(project, None, source, ps)
-
- result = [get_manager().virtual_targets().register(copied)]
- # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
- # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
- # symbolic links.
- m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$",
- copied.name());
- if m:
- # Symlink without version at all is used to make
- # -lsome_library work.
- result.append(symlink(m.group(1), project, copied, ps))
-
- # Symlinks of some libfoo.N and libfoo.N.M are used so that
- # library can found at runtime, if libfoo.N.M.X has soname of
- # libfoo.N. That happens when the library makes some binary
- # compatibility guarantees. If not, it is possible to skip those
- # symlinks.
- if ps.get('install-no-version-symlinks') != ['on']:
-
- result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps))
- result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3),
- project, copied, ps))
-
- return result
-
-generators.register(InstalledSharedLibGenerator())
-
-
-# Main target rule for 'install'.
-#
-@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"], ["usage_requirements", "*"]))
-def install(name, sources, requirements=[], default_build=[], usage_requirements=[]):
-
- requirements = requirements[:]
- # Unless the user has explicitly asked us to hardcode dll paths, add
- # <hardcode-dll-paths>false in requirements, to override default value.
- if not '<hardcode-dll-paths>true' in requirements:
- requirements.append('<hardcode-dll-paths>false')
-
- if any(r.startswith('<tag>') for r in requirements):
- get_manager().errors()("The <tag> property is not allowed for the 'install' rule")
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- InstallTargetClass(name, project,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-get_manager().projects().add_rule("install", install)
-get_manager().projects().add_rule("stage", install)
-
diff --git a/jam-files/boost-build/tools/stlport.jam b/jam-files/boost-build/tools/stlport.jam
deleted file mode 100644
index 62eebda5..00000000
--- a/jam-files/boost-build/tools/stlport.jam
+++ /dev/null
@@ -1,303 +0,0 @@
-# Copyright Gennadiy Rozental
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# The STLPort is usable by means of 'stdlib' feature. When
-# stdlib=stlport is specified, default version of STLPort will be used,
-# while stdlib=stlport-4.5 will use specific version.
-# The subfeature value 'hostios' means to use host compiler's iostreams.
-#
-# The specific version of stlport is selected by features:
-# The <runtime-link> feature selects between static and shared library
-# The <runtime-debugging>on selects STLPort with debug symbols
-# and stl debugging.
-# There's no way to use STLPort with debug symbols but without
-# stl debugging.
-
-# TODO: must implement selection of different STLPort installations based
-# on used toolset.
-# Also, finish various flags:
-#
-# This is copied from V1 toolset, "+" means "implemented"
-#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
-#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
-# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
-# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
-# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
-# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
-#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
-#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
-
-
-import feature : feature subfeature ;
-import project ;
-import "class" : new ;
-import targets ;
-import property-set ;
-import common ;
-import type ;
-
-# Make this module into a project.
-project.initialize $(__name__) ;
-project stlport ;
-
-# The problem: how to request to use host compiler's iostreams?
-#
-# Solution 1: Global 'stlport-iostream' feature.
-# That's ugly. Subfeature make more sense for stlport-specific thing.
-# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
-# is default.
-# The problem is that such subfeature will appear in target paths, and that's ugly
-# Solution 3: Use optional subfeature with only one value.
-
-feature.extend stdlib : stlport ;
-feature.compose <stdlib>stlport : <library>/stlport//stlport ;
-
-# STLport iostreams or native iostreams
-subfeature stdlib stlport : iostream : hostios : optional propagated ;
-
-# STLport extensions
-subfeature stdlib stlport : extensions : noext : optional propagated ;
-
-# STLport anachronisms -- NOT YET SUPPORTED
-# subfeature stdlib stlport : anachronisms : on off ;
-
-# STLport debug allocation -- NOT YET SUPPORTED
-#subfeature stdlib stlport : debug-alloc : off on ;
-
-# Declare a special target class to handle the creation of search-lib-target
-# instances for STLport. We need a special class, because otherwise we'll have
-# - declare prebuilt targets for all possible toolsets. And by the time 'init'
-# is called we don't even know the list of toolsets that are registered
-# - when host iostreams are used, we really should produce nothing. It would
-# be hard/impossible to achieve this using prebuilt targets.
-
-class stlport-target-class : basic-target
-{
- import feature project type errors generators ;
- import set : difference ;
-
- rule __init__ ( project : headers ? : libraries * : version ? )
- {
- basic-target.__init__ stlport : $(project) ;
- self.headers = $(headers) ;
- self.libraries = $(libraries) ;
- self.version = $(version) ;
- self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
-
- local requirements ;
- requirements += <stdlib-stlport:version>$(self.version) ;
- self.requirements = [ property-set.create $(requirements) ] ;
- }
-
- rule generate ( property-set )
- {
- # Since this target is built with <stdlib>stlport, it will also
- # have <library>/stlport//stlport in requirements, which will
- # cause a loop in main target references. Remove that property
- # manually.
-
- property-set = [ property-set.create
- [ difference
- [ $(property-set).raw ] :
- <library>/stlport//stlport
- <stdlib>stlport
- ]
- ] ;
- return [ basic-target.generate $(property-set) ] ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- # Deduce the name of stlport library, based on toolset and
- # debug setting.
- local raw = [ $(property-set).raw ] ;
- local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
- local toolset = [ feature.get-values <toolset> : $(raw) ] ;
-
- if $(self.version.5)
- {
- # Version 5.x
-
- # STLport host IO streams no longer supported. So we always
- # need libraries.
-
- # name: stlport(stl)?[dg]?(_static)?.M.R
- local name = stlport ;
- if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
- {
- name += stl ;
- switch $(toolset)
- {
- case gcc* : name += g ;
- case darwin* : name += g ;
- case * : name += d ;
- }
- }
-
- if [ feature.get-values <runtime-link> : $(raw) ] = "static"
- {
- name += _static ;
- }
-
- # Starting with version 5.2.0, the STLport static libraries no longer
- # include a version number in their name
- local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
- if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static"
- {
- name += .$(self.version.5) ;
- }
-
- name = $(name:J=) ;
-
- if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
- {
- #~ Allow explicitly asking to install the STLport lib by
- #~ refering to it directly: /stlport//stlport/<install-dependencies>on
- #~ This allows for install packaging of all libs one might need for
- #~ a standalone distribution.
- import path : make : path-make ;
- local runtime-link
- = [ feature.get-values <runtime-link> : $(raw) ] ;
- local lib-file.props
- = [ property-set.create $(raw) <link>$(runtime-link) ] ;
- local lib-file.prefix
- = [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
- local lib-file.suffix
- = [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
- lib-file.prefix
- ?= "" "lib" ;
- lib-file.suffix
- ?= "" ;
- local lib-file
- = [ GLOB $(self.libraries) [ modules.peek : PATH ] :
- $(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
- lib-file
- = [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ;
- lib-file
- = [ $(lib-file).generate "" ] ;
- local lib-file.requirements
- = [ targets.main-target-requirements
- [ $(lib-file.props).raw ] <file>$(lib-file[-1])
- : $(self.project) ] ;
- return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ;
- }
- else
- {
- #~ Otherwise, it's just a regular usage of the library.
- return [ generators.construct
- $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
- }
- }
- else if ! $(hostios) && $(toolset) != msvc
- {
- # We don't need libraries if host istreams are used. For
- # msvc, automatic library selection will be used.
-
- # name: stlport_<toolset>(_stldebug)?
- local name = stlport ;
- name = $(name)_$(toolset) ;
- if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
- {
- name = $(name)_stldebug ;
- }
-
- return [ generators.construct
- $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
- }
- else
- {
- return [ property-set.empty ] ;
- }
- }
-
- rule compute-usage-requirements ( subvariant )
- {
- local usage-requirements =
- <include>$(self.headers)
- <dll-path>$(self.libraries)
- <library-path>$(self.libraries)
- ;
-
- local rproperties = [ $(subvariant).build-properties ] ;
- # CONSIDER: should this "if" sequence be replaced with
- # some use of 'property-map' class?
- if [ $(rproperties).get <runtime-debugging> ] = "on"
- {
- usage-requirements +=
- <define>_STLP_DEBUG=1
- <define>_STLP_DEBUG_UNINITIALIZED=1 ;
- }
- if [ $(rproperties).get <runtime-link> ] = "shared"
- {
- usage-requirements +=
- <define>_STLP_USE_DYNAMIC_LIB=1 ;
- }
- if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
- {
- usage-requirements +=
- <define>_STLP_NO_EXTENSIONS=1 ;
- }
- if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
- {
- usage-requirements +=
- <define>_STLP_NO_OWN_IOSTREAMS=1
- <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
- }
- if $(self.version.5)
- {
- # Version 5.x
- if [ $(rproperties).get <threading> ] = "single"
- {
- # Since STLport5 doesn't normally support single-thread
- # we force STLport5 into the multi-thread mode. Hence
- # getting what other libs provide of single-thread code
- # linking against a multi-thread lib.
- usage-requirements +=
- <define>_STLP_THREADS=1 ;
- }
- }
-
- return [ property-set.create $(usage-requirements) ] ;
- }
-}
-
-rule stlport-target ( headers ? : libraries * : version ? )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new stlport-target-class $(project) : $(headers) : $(libraries)
- : $(version)
- ] ;
-}
-
-local .version-subfeature-defined ;
-
-# Initialize stlport support.
-rule init (
- version ? :
- headers : # Location of header files
- libraries * # Location of libraries, lib and bin subdirs of STLport.
- )
-{
- # FIXME: need to use common.check-init-parameters here.
- # At the moment, that rule always tries to define subfeature
- # of the 'toolset' feature, while we need to define subfeature
- # of <stdlib>stlport, so tweaks to check-init-parameters are needed.
- if $(version)
- {
- if ! $(.version-subfeature-defined)
- {
- feature.subfeature stdlib stlport : version : : propagated ;
- .version-subfeature-defined = true ;
- }
- feature.extend-subfeature stdlib stlport : version : $(version) ;
- }
-
- # Declare the main target for this STLPort version.
- stlport-target $(headers) : $(libraries) : $(version) ;
-}
-
diff --git a/jam-files/boost-build/tools/sun.jam b/jam-files/boost-build/tools/sun.jam
deleted file mode 100644
index 0ca927d3..00000000
--- a/jam-files/boost-build/tools/sun.jam
+++ /dev/null
@@ -1,142 +0,0 @@
-# Copyright (C) Christopher Currie 2003. Permission to copy, use,
-# modify, sell and distribute this software is granted provided this
-# copyright notice appears in all copies. This software is provided
-# "as is" without express or implied warranty, and with no claim as
-# to its suitability for any purpose.
-
-import property ;
-import generators ;
-import os ;
-import toolset : flags ;
-import feature ;
-import type ;
-import common ;
-
-feature.extend toolset : sun ;
-toolset.inherit sun : unix ;
-generators.override sun.prebuilt : builtin.lib-generator ;
-generators.override sun.prebuilt : builtin.prebuilt ;
-generators.override sun.searched-lib-generator : searched-lib-generator ;
-
-feature.extend stdlib : sun-stlport ;
-feature.compose <stdlib>sun-stlport
- : <cxxflags>-library=stlport4 <linkflags>-library=stlport4
- ;
-
-rule init ( version ? : command * : options * )
-{
- local condition = [
- common.check-init-parameters sun : version $(version) ] ;
-
- command = [ common.get-invocation-command sun : CC
- : $(command) : "/opt/SUNWspro/bin" ] ;
-
- # Even if the real compiler is not found, put CC to
- # command line so that user see command line that would have being executed.
- command ?= CC ;
-
- common.handle-options sun : $(condition) : $(command) : $(options) ;
-
- command_c = $(command[1--2]) $(command[-1]:B=cc) ;
-
- toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ;
-}
-
-# Declare generators
-generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ;
-generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ;
-
-# Declare flags and actions for compilation
-flags sun.compile OPTIONS <debug-symbols>on : -g ;
-flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ;
-flags sun.compile OPTIONS <optimization>speed : -xO4 ;
-flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ;
-flags sun.compile OPTIONS <threading>multi : -mt ;
-flags sun.compile OPTIONS <warnings>off : -erroff ;
-flags sun.compile OPTIONS <warnings>on : -erroff=%none ;
-flags sun.compile OPTIONS <warnings>all : -erroff=%none ;
-flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ;
-
-flags sun.compile.c++ OPTIONS <inlining>off : +d ;
-
-# The -m32 and -m64 options are supported starting
-# with Sun Studio 12. On earlier compilers, the
-# 'address-model' feature is not supported and should not
-# be used. Instead, use -xarch=generic64 command line
-# option.
-# See http://svn.boost.org/trac/boost/ticket/1186
-# for details.
-flags sun OPTIONS <address-model>32 : -m32 ;
-flags sun OPTIONS <address-model>64 : -m64 ;
-# On sparc, there's a difference between -Kpic
-# and -KPIC. The first is slightly more efficient,
-# but has the limits on the size of GOT table.
-# For minimal fuss on user side, we use -KPIC here.
-# See http://svn.boost.org/trac/boost/ticket/1186#comment:6
-# for detailed explanation.
-flags sun OPTIONS <link>shared : -KPIC ;
-
-flags sun.compile OPTIONS <cflags> ;
-flags sun.compile.c++ OPTIONS <cxxflags> ;
-flags sun.compile DEFINES <define> ;
-flags sun.compile INCLUDES <include> ;
-
-actions compile.c
-{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# Declare flags and actions for linking
-flags sun.link OPTIONS <debug-symbols>on : -g ;
-# Strip the binary when no debugging is needed
-flags sun.link OPTIONS <debug-symbols>off : -s ;
-flags sun.link OPTIONS <profiling>on : -xprofile=tcov ;
-flags sun.link OPTIONS <threading>multi : -mt ;
-flags sun.link OPTIONS <linkflags> ;
-flags sun.link LINKPATH <library-path> ;
-flags sun.link FINDLIBS-ST <find-static-library> ;
-flags sun.link FINDLIBS-SA <find-shared-library> ;
-flags sun.link LIBRARIES <library-file> ;
-flags sun.link LINK-RUNTIME <runtime-link>static : static ;
-flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ;
-flags sun.link RPATH <dll-path> ;
-# On gcc, there are separate options for dll path at runtime and
-# link time. On Solaris, there's only one: -R, so we have to use
-# it, even though it's bad idea.
-flags sun.link RPATH <xdll-path> ;
-
-# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.)
-flags sun.link FINDLIBS-SA : rt ;
-
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-}
-
-# Slight mods for dlls
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-}
-
-# Declare action for creating static libraries
-actions piecemeal archive
-{
- "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)"
-}
-
diff --git a/jam-files/boost-build/tools/symlink.jam b/jam-files/boost-build/tools/symlink.jam
deleted file mode 100644
index b33e8260..00000000
--- a/jam-files/boost-build/tools/symlink.jam
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines the "symlink" special target. 'symlink' targets make symbolic links
-# to the sources.
-
-import targets modules path class os feature project property-set ;
-
-.count = 0 ;
-
-feature.feature symlink-location : project-relative build-relative : incidental ;
-
-# The class representing "symlink" targets.
-#
-class symlink-targets : basic-target
-{
- import numbers modules class property project path ;
-
- rule __init__ (
- project
- : targets *
- : sources *
- )
- {
- # Generate a fake name for now. Need unnamed targets eventually.
- local c = [ modules.peek symlink : .count ] ;
- modules.poke symlink : .count : [ numbers.increment $(c) ] ;
- local fake-name = symlink#$(c) ;
-
- basic-target.__init__ $(fake-name) : $(project) : $(sources) ;
-
- # Remember the targets to map the sources onto. Pad or truncate
- # to fit the sources given.
- self.targets = ;
- for local source in $(sources)
- {
- if $(targets)
- {
- self.targets += $(targets[1]) ;
- targets = $(targets[2-]) ;
- }
- else
- {
- self.targets += $(source) ;
- }
- }
-
- # The virtual targets corresponding to the given targets.
- self.virtual-targets = ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local i = 1 ;
- for local t in $(source-targets)
- {
- local s = $(self.targets[$(i)]) ;
- local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ;
- local vt = [ class.new file-target $(s:D=)
- : [ $(t).type ] : $(self.project) : $(a) ] ;
-
- # Place the symlink in the directory relative to the project
- # location, instead of placing it in the build directory.
- if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative
- {
- $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ;
- }
-
- self.virtual-targets += $(vt) ;
- i = [ numbers.increment $(i) ] ;
- }
- return [ property-set.empty ] $(self.virtual-targets) ;
- }
-}
-
-# Creates a symbolic link from a set of targets to a set of sources.
-# The targets and sources map one to one. The symlinks generated are
-# limited to be the ones given as the sources. That is, the targets
-# are either padded or trimmed to equate to the sources. The padding
-# is done with the name of the corresponding source. For example::
-#
-# symlink : one two ;
-#
-# Is equal to::
-#
-# symlink one two : one two ;
-#
-# Names for symlink are relative to the project location. They cannot
-# include ".." path components.
-rule symlink (
- targets *
- : sources *
- )
-{
- local project = [ project.current ] ;
-
- return [ targets.main-target-alternative
- [ class.new symlink-targets $(project) : $(targets) :
- # Note: inline targets are not supported for symlink, intentionally,
- # since it's used to linking existing non-local targets.
- $(sources) ] ] ;
-}
-
-rule ln
-{
- local os ;
- if [ modules.peek : UNIX ] { os = UNIX ; }
- else { os ?= [ os.name ] ; }
- # Remember the path to make the link relative to where the symlink is located.
- local path-to-source = [ path.relative-to
- [ path.make [ on $(<) return $(LOCATE) ] ]
- [ path.make [ on $(>) return $(LOCATE) ] ] ] ;
- if $(path-to-source) = .
- {
- PATH_TO_SOURCE on $(<) = "" ;
- }
- else
- {
- PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ;
- }
- ln-$(os) $(<) : $(>) ;
-}
-
-actions ln-UNIX
-{
- ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'
-}
-
-# there is a way to do this; we fall back to a copy for now
-actions ln-NT
-{
- echo "NT symlinks not supported yet, making copy"
- del /f /q "$(<)" 2>nul >nul
- copy "$(>)" "$(<)" $(NULL_OUT)
-}
-
-IMPORT $(__name__) : symlink : : symlink ;
diff --git a/jam-files/boost-build/tools/symlink.py b/jam-files/boost-build/tools/symlink.py
deleted file mode 100644
index 6345ded6..00000000
--- a/jam-files/boost-build/tools/symlink.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Status: ported.
-# Base revision: 64488.
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines the "symlink" special target. 'symlink' targets make symbolic links
-# to the sources.
-
-import b2.build.feature as feature
-import b2.build.targets as targets
-import b2.build.property_set as property_set
-import b2.build.virtual_target as virtual_target
-import b2.build.targets
-
-from b2.manager import get_manager
-
-import bjam
-
-import os
-
-
-feature.feature("symlink-location", ["project-relative", "build-relative"], ["incidental"])
-
-class SymlinkTarget(targets.BasicTarget):
-
- _count = 0
-
- def __init__(self, project, targets, sources):
-
- # Generate a fake name for now. Need unnamed targets eventually.
- fake_name = "symlink#%s" % SymlinkTarget._count
- SymlinkTarget._count = SymlinkTarget._count + 1
-
- b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources)
-
- # Remember the targets to map the sources onto. Pad or truncate
- # to fit the sources given.
- assert len(targets) <= len(sources)
- self.targets = targets[:] + sources[len(targets):]
-
- # The virtual targets corresponding to the given targets.
- self.virtual_targets = []
-
- def construct(self, name, source_targets, ps):
- i = 0
- for t in source_targets:
- s = self.targets[i]
- a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps)
- vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a)
-
- # Place the symlink in the directory relative to the project
- # location, instead of placing it in the build directory.
- if not ps.get('symlink-location') == "project-relative":
- vt.set_path(os.path.join(self.project().get('location'), os.path.dirname(s)))
-
- vt = get_manager().virtual_targets().register(vt)
- self.virtual_targets.append(vt)
- i = i + 1
-
- return (property_set.empty(), self.virtual_targets)
-
-# Creates a symbolic link from a set of targets to a set of sources.
-# The targets and sources map one to one. The symlinks generated are
-# limited to be the ones given as the sources. That is, the targets
-# are either padded or trimmed to equate to the sources. The padding
-# is done with the name of the corresponding source. For example::
-#
-# symlink : one two ;
-#
-# Is equal to::
-#
-# symlink one two : one two ;
-#
-# Names for symlink are relative to the project location. They cannot
-# include ".." path components.
-def symlink(targets, sources):
-
- from b2.manager import get_manager
- t = get_manager().targets()
- p = get_manager().projects().current()
-
- return t.main_target_alternative(
- SymlinkTarget(p, targets,
- # Note: inline targets are not supported for symlink, intentionally,
- # since it's used to linking existing non-local targets.
- sources))
-
-
-def setup_ln(targets, sources, ps):
-
- source_path = bjam.call("get-target-variable", sources[0], "LOCATE")[0]
- target_path = bjam.call("get-target-variable", targets[0], "LOCATE")[0]
- rel = os.path.relpath(source_path, target_path)
- if rel == ".":
- bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", "")
- else:
- bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", rel)
-
-if os.name == 'nt':
- ln_action = """echo "NT symlinks not supported yet, making copy"
-del /f /q "$(<)" 2>nul >nul
-copy "$(>)" "$(<)" $(NULL_OUT)"""
-else:
- ln_action = "ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'"
-
-get_manager().engine().register_action("symlink.ln", ln_action, function=setup_ln)
-
-get_manager().projects().add_rule("symlink", symlink)
diff --git a/jam-files/boost-build/tools/testing-aux.jam b/jam-files/boost-build/tools/testing-aux.jam
deleted file mode 100644
index 525dafd0..00000000
--- a/jam-files/boost-build/tools/testing-aux.jam
+++ /dev/null
@@ -1,210 +0,0 @@
-# This module is imported by testing.py. The definitions here are
-# too tricky to do in Python
-
-# Causes the 'target' to exist after bjam invocation if and only if all the
-# dependencies were successfully built.
-#
-rule expect-success ( target : dependency + : requirements * )
-{
- **passed** $(target) : $(sources) ;
-}
-IMPORT testing : expect-success : : testing.expect-success ;
-
-# Causes the 'target' to exist after bjam invocation if and only if all some of
-# the dependencies were not successfully built.
-#
-rule expect-failure ( target : dependency + : properties * )
-{
- local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
- local marker = $(dependency:G=$(grist)*fail) ;
- (failed-as-expected) $(marker) ;
- FAIL_EXPECTED $(dependency) ;
- LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
- RMOLD $(marker) ;
- DEPENDS $(marker) : $(dependency) ;
- DEPENDS $(target) : $(marker) ;
- **passed** $(target) : $(marker) ;
-}
-IMPORT testing : expect-failure : : testing.expect-failure ;
-
-# The rule/action combination used to report successful passing of a test.
-#
-rule **passed**
-{
- # Force deletion of the target, in case any dependencies failed to build.
- RMOLD $(<) ;
-}
-
-
-# Used to create test files signifying passed tests.
-#
-actions **passed**
-{
- echo passed > "$(<)"
-}
-
-
-# Used to create replacement object files that do not get created during tests
-# that are expected to fail.
-#
-actions (failed-as-expected)
-{
- echo failed as expected > "$(<)"
-}
-
-# Runs executable 'sources' and stores stdout in file 'target'. Unless
-# --preserve-test-targets command line option has been specified, removes the
-# executable. The 'target-to-remove' parameter controls what should be removed:
-# - if 'none', does not remove anything, ever
-# - if empty, removes 'source'
-# - if non-empty and not 'none', contains a list of sources to remove.
-#
-rule capture-output ( target : source : properties * : targets-to-remove * )
-{
- output-file on $(target) = $(target:S=.output) ;
- LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
-
- # The INCLUDES kill a warning about independent target...
- INCLUDES $(target) : $(target:S=.output) ;
- # but it also puts .output into dependency graph, so we must tell jam it is
- # OK if it cannot find the target or updating rule.
- NOCARE $(target:S=.output) ;
-
- # This has two-fold effect. First it adds input files to the dependendency
- # graph, preventing a warning. Second, it causes input files to be bound
- # before target is created. Therefore, they are bound using SEARCH setting
- # on them and not LOCATE setting of $(target), as in other case (due to jam
- # bug).
- DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
-
- if $(targets-to-remove) = none
- {
- targets-to-remove = ;
- }
- else if ! $(targets-to-remove)
- {
- targets-to-remove = $(source) ;
- }
-
- if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
- {
- TEMPORARY $(targets-to-remove) ;
- # Set a second action on target that will be executed after capture
- # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
- # always considered succeeded. This is needed for 'run-fail' test. For
- # that test the target will be marked with FAIL_EXPECTED, and without
- # 'ignore' successful execution will be negated and be reported as
- # failure. With 'ignore' we do not detect a case where removing files
- # fails, but it is not likely to happen.
- RmTemps $(target) : $(targets-to-remove) ;
- }
-}
-
-
-if [ os.name ] = NT
-{
- .STATUS = %status% ;
- .SET_STATUS = "set status=%ERRORLEVEL%" ;
- .RUN_OUTPUT_NL = "echo." ;
- .STATUS_0 = "%status% EQU 0 (" ;
- .STATUS_NOT_0 = "%status% NEQ 0 (" ;
- .VERBOSE = "%verbose% EQU 1 (" ;
- .ENDIF = ")" ;
- .SHELL_SET = "set " ;
- .CATENATE = type ;
- .CP = copy ;
-}
-else
-{
- .STATUS = "$status" ;
- .SET_STATUS = "status=$?" ;
- .RUN_OUTPUT_NL = "echo" ;
- .STATUS_0 = "test $status -eq 0 ; then" ;
- .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
- .VERBOSE = "test $verbose -eq 1 ; then" ;
- .ENDIF = "fi" ;
- .SHELL_SET = "" ;
- .CATENATE = cat ;
- .CP = cp ;
-}
-
-
-.VERBOSE_TEST = 0 ;
-if --verbose-test in [ modules.peek : ARGV ]
-{
- .VERBOSE_TEST = 1 ;
-}
-
-
-.RM = [ common.rm-command ] ;
-
-
-actions capture-output bind INPUT_FILES output-file
-{
- $(PATH_SETUP)
- $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
- $(.SET_STATUS)
- $(.RUN_OUTPUT_NL) >> "$(output-file)"
- echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
- if $(.STATUS_0)
- $(.CP) "$(output-file)" "$(<)"
- $(.ENDIF)
- $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
- if $(.STATUS_NOT_0)
- $(.SHELL_SET)verbose=1
- $(.ENDIF)
- if $(.VERBOSE)
- echo ====== BEGIN OUTPUT ======
- $(.CATENATE) "$(output-file)"
- echo ====== END OUTPUT ======
- $(.ENDIF)
- exit $(.STATUS)
-}
-
-IMPORT testing : capture-output : : testing.capture-output ;
-
-
-actions quietly updated ignore piecemeal together RmTemps
-{
- $(.RM) "$(>)"
-}
-
-
-.MAKE_FILE = [ common.file-creation-command ] ;
-
-actions unit-test
-{
- $(PATH_SETUP)
- $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
-}
-
-rule record-time ( target : source : start end user system )
-{
- local src-string = [$(source:G=:J=",")"] " ;
- USER_TIME on $(target) += $(src-string)$(user) ;
- SYSTEM_TIME on $(target) += $(src-string)$(system) ;
-}
-
-# Calling this rule requests that Boost Build time how long it taks to build the
-# 'source' target and display the results both on the standard output and in the
-# 'target' file.
-#
-rule time ( target : source : properties * )
-{
- # Set up rule for recording timing information.
- __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
-
- # Make sure that the source is rebuilt any time we need to retrieve that
- # information.
- REBUILDS $(target) : $(source) ;
-}
-
-
-actions time
-{
- echo user: $(USER_TIME)
- echo system: $(SYSTEM_TIME)
-
- echo user: $(USER_TIME)" seconds" > "$(<)"
- echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
-}
diff --git a/jam-files/boost-build/tools/testing.jam b/jam-files/boost-build/tools/testing.jam
deleted file mode 100644
index c42075b7..00000000
--- a/jam-files/boost-build/tools/testing.jam
+++ /dev/null
@@ -1,581 +0,0 @@
-# Copyright 2005 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module implements regression testing framework. It declares a number of
-# main target rules which perform some action and, if the results are OK,
-# creates an output file.
-#
-# The exact list of rules is:
-# 'compile' -- creates .test file if compilation of sources was
-# successful.
-# 'compile-fail' -- creates .test file if compilation of sources failed.
-# 'run' -- creates .test file is running of executable produced from
-# sources was successful. Also leaves behind .output file
-# with the output from program run.
-# 'run-fail' -- same as above, but .test file is created if running fails.
-#
-# In all cases, presence of .test file is an indication that the test passed.
-# For more convenient reporting, you might want to use C++ Boost regression
-# testing utilities (see http://www.boost.org/more/regression.html).
-#
-# For historical reason, a 'unit-test' rule is available which has the same
-# syntax as 'exe' and behaves just like 'run'.
-
-# Things to do:
-# - Teach compiler_status handle Jamfile.v2.
-# Notes:
-# - <no-warn> is not implemented, since it is Como-specific, and it is not
-# clear how to implement it
-# - std::locale-support is not implemented (it is used in one test).
-
-
-import alias ;
-import "class" ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import os ;
-import path ;
-import project ;
-import property ;
-import property-set ;
-import regex ;
-import sequence ;
-import targets ;
-import toolset ;
-import type ;
-import virtual-target ;
-
-
-rule init ( )
-{
-}
-
-
-# Feature controling the command used to lanch test programs.
-feature.feature testing.launcher : : free optional ;
-
-feature.feature test-info : : free incidental ;
-feature.feature testing.arg : : free incidental ;
-feature.feature testing.input-file : : free dependency ;
-
-feature.feature preserve-test-targets : on off : incidental propagated ;
-
-# Register target types.
-type.register TEST : test ;
-type.register COMPILE : : TEST ;
-type.register COMPILE_FAIL : : TEST ;
-type.register RUN_OUTPUT : run ;
-type.register RUN : : TEST ;
-type.register RUN_FAIL : : TEST ;
-type.register LINK_FAIL : : TEST ;
-type.register LINK : : TEST ;
-type.register UNIT_TEST : passed : TEST ;
-
-
-# Declare the rules which create main targets. While the 'type' module already
-# creates rules with the same names for us, we need extra convenience: default
-# name of main target, so write our own versions.
-
-# Helper rule. Create a test target, using basename of first source if no target
-# name is explicitly passed. Remembers the created target in a global variable.
-#
-rule make-test ( target-type : sources + : requirements * : target-name ? )
-{
- target-name ?= $(sources[1]:D=:S=) ;
-
- # Having periods (".") in the target name is problematic because the typed
- # generator will strip the suffix and use the bare name for the file
- # targets. Even though the location-prefix averts problems most times it
- # does not prevent ambiguity issues when referring to the test targets. For
- # example when using the XML log output. So we rename the target to remove
- # the periods, and provide an alias for users.
- local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
-
- local project = [ project.current ] ;
- # The <location-prefix> forces the build system for generate paths in the
- # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
- # post-processing tools to work.
- local t = [ targets.create-typed-target [ type.type-from-rule-name
- $(target-type) ] : $(project) : $(real-name) : $(sources) :
- $(requirements) <location-prefix>$(real-name).test ] ;
-
- # The alias to the real target, per period replacement above.
- if $(real-name) != $(target-name)
- {
- alias $(target-name) : $(t) ;
- }
-
- # Remember the test (for --dump-tests). A good way would be to collect all
- # given a project. This has some technical problems: e.g. we can not call
- # this dump from a Jamfile since projects referred by 'build-project' are
- # not available until the whole Jamfile has been loaded.
- .all-tests += $(t) ;
- return $(t) ;
-}
-
-
-# Note: passing more that one cpp file here is known to fail. Passing a cpp file
-# and a library target works.
-#
-rule compile ( sources + : requirements * : target-name ? )
-{
- return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
- ;
-}
-
-
-rule compile-fail ( sources + : requirements * : target-name ? )
-{
- return [ make-test compile-fail : $(sources) : $(requirements) :
- $(target-name) ] ;
-}
-
-
-rule link ( sources + : requirements * : target-name ? )
-{
- return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
-}
-
-
-rule link-fail ( sources + : requirements * : target-name ? )
-{
- return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
- ] ;
-}
-
-
-rule handle-input-files ( input-files * )
-{
- if $(input-files[2])
- {
- # Check that sorting made when creating property-set instance will not
- # change the ordering.
- if [ sequence.insertion-sort $(input-files) ] != $(input-files)
- {
- errors.user-error "Names of input files must be sorted alphabetically"
- : "due to internal limitations" ;
- }
- }
- return <testing.input-file>$(input-files) ;
-}
-
-
-rule run ( sources + : args * : input-files * : requirements * : target-name ? :
- default-build * )
-{
- requirements += <testing.arg>$(args:J=" ") ;
- requirements += [ handle-input-files $(input-files) ] ;
- return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
-}
-
-
-rule run-fail ( sources + : args * : input-files * : requirements * :
- target-name ? : default-build * )
-{
- requirements += <testing.arg>$(args:J=" ") ;
- requirements += [ handle-input-files $(input-files) ] ;
- return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
- ] ;
-}
-
-
-# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
-IMPORT : alias : : test-suite ;
-
-
-# For all main targets in 'project-module', which are typed targets with type
-# derived from 'TEST', produce some interesting information.
-#
-rule dump-tests
-{
- for local t in $(.all-tests)
- {
- dump-test $(t) ;
- }
-}
-
-
-# Given a project location in normalized form (slashes are forward), compute the
-# name of the Boost library.
-#
-local rule get-library-name ( path )
-{
- # Path is in normalized form, so all slashes are forward.
- local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
- local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
- local match3 = [ MATCH (/status$) : $(path) ] ;
-
- if $(match1) { return $(match1[2]) ; }
- else if $(match2) { return $(match2[2]) ; }
- else if $(match3) { return "" ; }
- else if --dump-tests in [ modules.peek : ARGV ]
- {
- # The 'run' rule and others might be used outside boost. In that case,
- # just return the path, since the 'library name' makes no sense.
- return $(path) ;
- }
-}
-
-
-# Was an XML dump requested?
-.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
-
-
-# Takes a target (instance of 'basic-target') and prints
-# - its type
-# - its name
-# - comments specified via the <test-info> property
-# - relative location of all source from the project root.
-#
-rule dump-test ( target )
-{
- local type = [ $(target).type ] ;
- local name = [ $(target).name ] ;
- local project = [ $(target).project ] ;
-
- local project-root = [ $(project).get project-root ] ;
- local library = [ get-library-name [ path.root [ $(project).get location ]
- [ path.pwd ] ] ] ;
- if $(library)
- {
- name = $(library)/$(name) ;
- }
-
- local sources = [ $(target).sources ] ;
- local source-files ;
- for local s in $(sources)
- {
- if [ class.is-a $(s) : file-reference ]
- {
- local location = [ path.root [ path.root [ $(s).name ]
- [ $(s).location ] ] [ path.pwd ] ] ;
-
- source-files += [ path.relative-to [ path.root $(project-root)
- [ path.pwd ] ] $(location) ] ;
- }
- }
-
- local target-name = [ $(project).get location ] // [ $(target).name ] .test
- ;
- target-name = $(target-name:J=) ;
-
- local r = [ $(target).requirements ] ;
- # Extract values of the <test-info> feature.
- local test-info = [ $(r).get <test-info> ] ;
-
- # If the user requested XML output on the command-line, add the test info to
- # that XML file rather than dumping them to stdout.
- if $(.out-xml)
- {
- local nl = "
-" ;
- .contents on $(.out-xml) +=
- "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
- "$(nl) <target><![CDATA[$(target-name)]]></target>"
- "$(nl) <info><![CDATA[$(test-info)]]></info>"
- "$(nl) <source><![CDATA[$(source-files)]]></source>"
- "$(nl) </test>"
- ;
- }
- else
- {
- # Format them into a single string of quoted strings.
- test-info = \"$(test-info:J=\"\ \")\" ;
-
- ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":"
- \"$(source-files)\" ;
- }
-}
-
-
-# Register generators. Depending on target type, either 'expect-success' or
-# 'expect-failure' rule will be used.
-generators.register-standard testing.expect-success : OBJ : COMPILE ;
-generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
-generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
-generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
-generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
-generators.register-standard testing.expect-success : EXE : LINK ;
-
-# Generator which runs an EXE and captures output.
-generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
-
-# Generator which creates a target if sources run successfully. Differs from RUN
-# in that run output is not captured. The reason why it exists is that the 'run'
-# rule is much better for automated testing, but is not user-friendly (see
-# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
-generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
-
-
-# The action rules called by generators.
-
-# Causes the 'target' to exist after bjam invocation if and only if all the
-# dependencies were successfully built.
-#
-rule expect-success ( target : dependency + : requirements * )
-{
- **passed** $(target) : $(sources) ;
-}
-
-
-# Causes the 'target' to exist after bjam invocation if and only if all some of
-# the dependencies were not successfully built.
-#
-rule expect-failure ( target : dependency + : properties * )
-{
- local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
- local marker = $(dependency:G=$(grist)*fail) ;
- (failed-as-expected) $(marker) ;
- FAIL_EXPECTED $(dependency) ;
- LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
- RMOLD $(marker) ;
- DEPENDS $(marker) : $(dependency) ;
- DEPENDS $(target) : $(marker) ;
- **passed** $(target) : $(marker) ;
-}
-
-
-# The rule/action combination used to report successful passing of a test.
-#
-rule **passed**
-{
- # Dump all the tests, if needed. We do it here, since dump should happen
- # only after all Jamfiles have been read, and there is no such place
- # currently defined (but there should be).
- if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
- {
- .dumped-tests = true ;
- dump-tests ;
- }
-
- # Force deletion of the target, in case any dependencies failed to build.
- RMOLD $(<) ;
-}
-
-
-# Used to create test files signifying passed tests.
-#
-actions **passed**
-{
- echo passed > "$(<)"
-}
-
-
-# Used to create replacement object files that do not get created during tests
-# that are expected to fail.
-#
-actions (failed-as-expected)
-{
- echo failed as expected > "$(<)"
-}
-
-
-rule run-path-setup ( target : source : properties * )
-{
- # For testing, we need to make sure that all dynamic libraries needed by the
- # test are found. So, we collect all paths from dependency libraries (via
- # xdll-path property) and add whatever explicit dll-path user has specified.
- # The resulting paths are added to the environment on each test invocation.
- local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
- dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
- dll-paths += [ on $(source) return $(RUN_PATH) ] ;
- dll-paths = [ sequence.unique $(dll-paths) ] ;
- if $(dll-paths)
- {
- dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
- PATH_SETUP on $(target) = [ common.prepend-path-variable-command
- [ os.shared-library-path-variable ] : $(dll-paths) ] ;
- }
-}
-
-
-local argv = [ modules.peek : ARGV ] ;
-
-toolset.flags testing.capture-output ARGS <testing.arg> ;
-toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
-toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
-
-
-# Runs executable 'sources' and stores stdout in file 'target'. Unless
-# --preserve-test-targets command line option has been specified, removes the
-# executable. The 'target-to-remove' parameter controls what should be removed:
-# - if 'none', does not remove anything, ever
-# - if empty, removes 'source'
-# - if non-empty and not 'none', contains a list of sources to remove.
-#
-rule capture-output ( target : source : properties * : targets-to-remove * )
-{
- output-file on $(target) = $(target:S=.output) ;
- LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
-
- # The INCLUDES kill a warning about independent target...
- INCLUDES $(target) : $(target:S=.output) ;
- # but it also puts .output into dependency graph, so we must tell jam it is
- # OK if it cannot find the target or updating rule.
- NOCARE $(target:S=.output) ;
-
- # This has two-fold effect. First it adds input files to the dependendency
- # graph, preventing a warning. Second, it causes input files to be bound
- # before target is created. Therefore, they are bound using SEARCH setting
- # on them and not LOCATE setting of $(target), as in other case (due to jam
- # bug).
- DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
-
- if $(targets-to-remove) = none
- {
- targets-to-remove = ;
- }
- else if ! $(targets-to-remove)
- {
- targets-to-remove = $(source) ;
- }
-
- run-path-setup $(target) : $(source) : $(properties) ;
-
- if [ feature.get-values preserve-test-targets : $(properties) ] = off
- {
- TEMPORARY $(targets-to-remove) ;
- # Set a second action on target that will be executed after capture
- # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
- # always considered succeeded. This is needed for 'run-fail' test. For
- # that test the target will be marked with FAIL_EXPECTED, and without
- # 'ignore' successful execution will be negated and be reported as
- # failure. With 'ignore' we do not detect a case where removing files
- # fails, but it is not likely to happen.
- RmTemps $(target) : $(targets-to-remove) ;
- }
-}
-
-
-if [ os.name ] = NT
-{
- .STATUS = %status% ;
- .SET_STATUS = "set status=%ERRORLEVEL%" ;
- .RUN_OUTPUT_NL = "echo." ;
- .STATUS_0 = "%status% EQU 0 (" ;
- .STATUS_NOT_0 = "%status% NEQ 0 (" ;
- .VERBOSE = "%verbose% EQU 1 (" ;
- .ENDIF = ")" ;
- .SHELL_SET = "set " ;
- .CATENATE = type ;
- .CP = copy ;
-}
-else
-{
- .STATUS = "$status" ;
- .SET_STATUS = "status=$?" ;
- .RUN_OUTPUT_NL = "echo" ;
- .STATUS_0 = "test $status -eq 0 ; then" ;
- .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
- .VERBOSE = "test $verbose -eq 1 ; then" ;
- .ENDIF = "fi" ;
- .SHELL_SET = "" ;
- .CATENATE = cat ;
- .CP = cp ;
-}
-
-
-.VERBOSE_TEST = 0 ;
-if --verbose-test in [ modules.peek : ARGV ]
-{
- .VERBOSE_TEST = 1 ;
-}
-
-
-.RM = [ common.rm-command ] ;
-
-
-actions capture-output bind INPUT_FILES output-file
-{
- $(PATH_SETUP)
- $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
- $(.SET_STATUS)
- $(.RUN_OUTPUT_NL) >> "$(output-file)"
- echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
- if $(.STATUS_0)
- $(.CP) "$(output-file)" "$(<)"
- $(.ENDIF)
- $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
- if $(.STATUS_NOT_0)
- $(.SHELL_SET)verbose=1
- $(.ENDIF)
- if $(.VERBOSE)
- echo ====== BEGIN OUTPUT ======
- $(.CATENATE) "$(output-file)"
- echo ====== END OUTPUT ======
- $(.ENDIF)
- exit $(.STATUS)
-}
-
-
-actions quietly updated ignore piecemeal together RmTemps
-{
- $(.RM) "$(>)"
-}
-
-
-.MAKE_FILE = [ common.file-creation-command ] ;
-
-toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
-toolset.flags testing.unit-test ARGS <testing.arg> ;
-
-
-rule unit-test ( target : source : properties * )
-{
- run-path-setup $(target) : $(source) : $(properties) ;
-}
-
-
-actions unit-test
-{
- $(PATH_SETUP)
- $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
-}
-
-
-IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
- : : compile compile-fail run run-fail link link-fail ;
-
-
-type.register TIME : time ;
-generators.register-standard testing.time : : TIME ;
-
-
-rule record-time ( target : source : start end user system )
-{
- local src-string = [$(source:G=:J=",")"] " ;
- USER_TIME on $(target) += $(src-string)$(user) ;
- SYSTEM_TIME on $(target) += $(src-string)$(system) ;
-}
-
-
-IMPORT testing : record-time : : testing.record-time ;
-
-
-# Calling this rule requests that Boost Build time how long it taks to build the
-# 'source' target and display the results both on the standard output and in the
-# 'target' file.
-#
-rule time ( target : source : properties * )
-{
- # Set up rule for recording timing information.
- __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
-
- # Make sure that the source is rebuilt any time we need to retrieve that
- # information.
- REBUILDS $(target) : $(source) ;
-}
-
-
-actions time
-{
- echo user: $(USER_TIME)
- echo system: $(SYSTEM_TIME)
-
- echo user: $(USER_TIME)" seconds" > "$(<)"
- echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
-}
diff --git a/jam-files/boost-build/tools/testing.py b/jam-files/boost-build/tools/testing.py
deleted file mode 100644
index 3b53500c..00000000
--- a/jam-files/boost-build/tools/testing.py
+++ /dev/null
@@ -1,342 +0,0 @@
-# Status: ported, except for --out-xml
-# Base revision: 64488
-#
-# Copyright 2005 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module implements regression testing framework. It declares a number of
-# main target rules which perform some action and, if the results are OK,
-# creates an output file.
-#
-# The exact list of rules is:
-# 'compile' -- creates .test file if compilation of sources was
-# successful.
-# 'compile-fail' -- creates .test file if compilation of sources failed.
-# 'run' -- creates .test file is running of executable produced from
-# sources was successful. Also leaves behind .output file
-# with the output from program run.
-# 'run-fail' -- same as above, but .test file is created if running fails.
-#
-# In all cases, presence of .test file is an indication that the test passed.
-# For more convenient reporting, you might want to use C++ Boost regression
-# testing utilities (see http://www.boost.org/more/regression.html).
-#
-# For historical reason, a 'unit-test' rule is available which has the same
-# syntax as 'exe' and behaves just like 'run'.
-
-# Things to do:
-# - Teach compiler_status handle Jamfile.v2.
-# Notes:
-# - <no-warn> is not implemented, since it is Como-specific, and it is not
-# clear how to implement it
-# - std::locale-support is not implemented (it is used in one test).
-
-import b2.build.feature as feature
-import b2.build.type as type
-import b2.build.targets as targets
-import b2.build.generators as generators
-import b2.build.toolset as toolset
-import b2.tools.common as common
-import b2.util.option as option
-import b2.build_system as build_system
-
-
-
-from b2.manager import get_manager
-from b2.util import stem, bjam_signature
-from b2.util.sequence import unique
-
-import bjam
-
-import re
-import os.path
-import sys
-
-def init():
- pass
-
-# Feature controling the command used to lanch test programs.
-feature.feature("testing.launcher", [], ["free", "optional"])
-
-feature.feature("test-info", [], ["free", "incidental"])
-feature.feature("testing.arg", [], ["free", "incidental"])
-feature.feature("testing.input-file", [], ["free", "dependency"])
-
-feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"])
-
-# Register target types.
-type.register("TEST", ["test"])
-type.register("COMPILE", [], "TEST")
-type.register("COMPILE_FAIL", [], "TEST")
-
-type.register("RUN_OUTPUT", ["run"])
-type.register("RUN", [], "TEST")
-type.register("RUN_FAIL", [], "TEST")
-
-type.register("LINK", [], "TEST")
-type.register("LINK_FAIL", [], "TEST")
-type.register("UNIT_TEST", ["passed"], "TEST")
-
-__all_tests = []
-
-# Declare the rules which create main targets. While the 'type' module already
-# creates rules with the same names for us, we need extra convenience: default
-# name of main target, so write our own versions.
-
-# Helper rule. Create a test target, using basename of first source if no target
-# name is explicitly passed. Remembers the created target in a global variable.
-def make_test(target_type, sources, requirements, target_name=None):
-
- if not target_name:
- target_name = stem(os.path.basename(sources[0]))
-
- # Having periods (".") in the target name is problematic because the typed
- # generator will strip the suffix and use the bare name for the file
- # targets. Even though the location-prefix averts problems most times it
- # does not prevent ambiguity issues when referring to the test targets. For
- # example when using the XML log output. So we rename the target to remove
- # the periods, and provide an alias for users.
- real_name = target_name.replace(".", "~")
-
- project = get_manager().projects().current()
- # The <location-prefix> forces the build system for generate paths in the
- # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
- # post-processing tools to work.
- t = get_manager().targets().create_typed_target(
- type.type_from_rule_name(target_type), project, real_name, sources,
- requirements + ["<location-prefix>" + real_name + ".test"], [], [])
-
- # The alias to the real target, per period replacement above.
- if real_name != target_name:
- get_manager().projects().project_rules().all_names_["alias"](
- target_name, [t])
-
- # Remember the test (for --dump-tests). A good way would be to collect all
- # given a project. This has some technical problems: e.g. we can not call
- # this dump from a Jamfile since projects referred by 'build-project' are
- # not available until the whole Jamfile has been loaded.
- __all_tests.append(t)
- return t
-
-
-# Note: passing more that one cpp file here is known to fail. Passing a cpp file
-# and a library target works.
-#
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def compile(sources, requirements, target_name=None):
- return make_test("compile", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def compile_fail(sources, requirements, target_name=None):
- return make_test("compile-fail", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def link(sources, requirements, target_name=None):
- return make_test("link", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def link_fail(sources, requirements, target_name=None):
- return make_test("link-fail", sources, requirements, target_name)
-
-def handle_input_files(input_files):
- if len(input_files) > 1:
- # Check that sorting made when creating property-set instance will not
- # change the ordering.
- if sorted(input_files) != input_files:
- get_manager().errors()("Names of input files must be sorted alphabetically\n" +
- "due to internal limitations")
- return ["<testing.input-file>" + f for f in input_files]
-
-@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
- ["requirements", "*"], ["target_name", "?"],
- ["default_build", "*"]))
-def run(sources, args, input_files, requirements, target_name=None, default_build=[]):
- if args:
- requirements.append("<testing.arg>" + " ".join(args))
- requirements.extend(handle_input_files(input_files))
- return make_test("run", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
- ["requirements", "*"], ["target_name", "?"],
- ["default_build", "*"]))
-def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]):
- if args:
- requirements.append("<testing.arg>" + " ".join(args))
- requirements.extend(handle_input_files(input_files))
- return make_test("run-fail", sources, requirements, target_name)
-
-# Register all the rules
-for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]:
- get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_")))
-
-# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
-from b2.build.alias import alias
-get_manager().projects().add_rule("test-suite", alias)
-
-# For all main targets in 'project-module', which are typed targets with type
-# derived from 'TEST', produce some interesting information.
-#
-def dump_tests():
- for t in __all_tests:
- dump_test(t)
-
-# Given a project location in normalized form (slashes are forward), compute the
-# name of the Boost library.
-#
-__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)")
-__ln2 = re.compile("/(tools|libs)/(.*)$")
-__ln3 = re.compile("(/status$)")
-def get_library_name(path):
-
- path = path.replace("\\", "/")
- match1 = __ln1.match(path)
- match2 = __ln2.match(path)
- match3 = __ln3.match(path)
-
- if match1:
- return match1.group(2)
- elif match2:
- return match2.group(2)
- elif match3:
- return ""
- elif option.get("dump-tests", False, True):
- # The 'run' rule and others might be used outside boost. In that case,
- # just return the path, since the 'library name' makes no sense.
- return path
-
-# Was an XML dump requested?
-__out_xml = option.get("out-xml", False, True)
-
-# Takes a target (instance of 'basic-target') and prints
-# - its type
-# - its name
-# - comments specified via the <test-info> property
-# - relative location of all source from the project root.
-#
-def dump_test(target):
- type = target.type()
- name = target.name()
- project = target.project()
-
- project_root = project.get('project-root')
- library = get_library_name(os.path.abspath(project.get('location')))
- if library:
- name = library + "/" + name
-
- sources = target.sources()
- source_files = []
- for s in sources:
- if isinstance(s, targets.FileReference):
- location = os.path.abspath(os.path.join(s.location(), s.name()))
- source_files.append(os.path.relpath(location, os.path.abspath(project_root)))
-
- target_name = project.get('location') + "//" + target.name() + ".test"
-
- test_info = target.requirements().get('test-info')
- test_info = " ".join('"' + ti + '"' for ti in test_info)
-
- # If the user requested XML output on the command-line, add the test info to
- # that XML file rather than dumping them to stdout.
- #if $(.out-xml)
- #{
-# local nl = "
-#" ;
-# .contents on $(.out-xml) +=
-# "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
-# "$(nl) <target><![CDATA[$(target-name)]]></target>"
-# "$(nl) <info><![CDATA[$(test-info)]]></info>"
-# "$(nl) <source><![CDATA[$(source-files)]]></source>"
-# "$(nl) </test>"
-# ;
-# }
-# else
-
- source_files = " ".join('"' + s + '"' for s in source_files)
- if test_info:
- print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files)
- else:
- print 'boost-test(%s) "%s" : %s' % (type, name, source_files)
-
-# Register generators. Depending on target type, either 'expect-success' or
-# 'expect-failure' rule will be used.
-generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"])
-generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"])
-generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"])
-generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"])
-generators.register_standard("testing.expect-success", ["EXE"], ["LINK"])
-generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"])
-
-# Generator which runs an EXE and captures output.
-generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"])
-
-# Generator which creates a target if sources run successfully. Differs from RUN
-# in that run output is not captured. The reason why it exists is that the 'run'
-# rule is much better for automated testing, but is not user-friendly (see
-# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
-generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"])
-
-# FIXME: if those calls are after bjam.call, then bjam will crash
-# when toolset.flags calls bjam.caller.
-toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"])
-toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"])
-toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"])
-
-toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"])
-toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"])
-
-type.register("TIME", ["time"])
-generators.register_standard("testing.time", [], ["TIME"])
-
-
-# The following code sets up actions for this module. It's pretty convoluted,
-# but the basic points is that we most of actions are defined by Jam code
-# contained in testing-aux.jam, which we load into Jam module named 'testing'
-
-def run_path_setup(target, sources, ps):
-
- # For testing, we need to make sure that all dynamic libraries needed by the
- # test are found. So, we collect all paths from dependency libraries (via
- # xdll-path property) and add whatever explicit dll-path user has specified.
- # The resulting paths are added to the environment on each test invocation.
- dll_paths = ps.get('dll-path')
- dll_paths.extend(ps.get('xdll-path'))
- dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH"))
- dll_paths = unique(dll_paths)
- if dll_paths:
- bjam.call("set-target-variable", target, "PATH_SETUP",
- common.prepend_path_variable_command(
- common.shared_library_path_variable(), dll_paths))
-
-def capture_output_setup(target, sources, ps):
- run_path_setup(target, sources, ps)
-
- if ps.get('preserve-test-targets') == ['off']:
- bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1")
-
-get_manager().engine().register_bjam_action("testing.capture-output",
- capture_output_setup)
-
-
-path = os.path.dirname(get_manager().projects().loaded_tool_module_path_[__name__])
-import b2.util.os_j
-get_manager().projects().project_rules()._import_rule("testing", "os.name",
- b2.util.os_j.name)
-import b2.tools.common
-get_manager().projects().project_rules()._import_rule("testing", "common.rm-command",
- b2.tools.common.rm_command)
-get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command",
- b2.tools.common.file_creation_command)
-
-bjam.call("load", "testing", os.path.join(path, "testing-aux.jam"))
-
-
-for name in ["expect-success", "expect-failure", "time"]:
- get_manager().engine().register_bjam_action("testing." + name)
-
-get_manager().engine().register_bjam_action("testing.unit-test",
- run_path_setup)
-
-if option.get("dump-tests", False, True):
- build_system.add_pre_build_hook(dump_tests)
diff --git a/jam-files/boost-build/tools/types/__init__.py b/jam-files/boost-build/tools/types/__init__.py
deleted file mode 100644
index f972b714..00000000
--- a/jam-files/boost-build/tools/types/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-__all__ = [
- 'asm',
- 'cpp',
- 'exe',
- 'html',
- 'lib',
- 'obj',
- 'rsp',
-]
-
-def register_all ():
- for i in __all__:
- m = __import__ (__name__ + '.' + i)
- reg = i + '.register ()'
- #exec (reg)
-
-# TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't?
-register_all ()
diff --git a/jam-files/boost-build/tools/types/asm.jam b/jam-files/boost-build/tools/types/asm.jam
deleted file mode 100644
index a340db36..00000000
--- a/jam-files/boost-build/tools/types/asm.jam
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright Craig Rodrigues 2005. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-type ASM : s S asm ;
diff --git a/jam-files/boost-build/tools/types/asm.py b/jam-files/boost-build/tools/types/asm.py
deleted file mode 100644
index b4e1c30e..00000000
--- a/jam-files/boost-build/tools/types/asm.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright Craig Rodrigues 2005.
-# Copyright (c) 2008 Steven Watanabe
-#
-# Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register():
- type.register_type('ASM', ['s', 'S', 'asm'])
-
-register()
diff --git a/jam-files/boost-build/tools/types/cpp.jam b/jam-files/boost-build/tools/types/cpp.jam
deleted file mode 100644
index 3159cdd7..00000000
--- a/jam-files/boost-build/tools/types/cpp.jam
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright David Abrahams 2004.
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Copyright 2010 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-import type ;
-import scanner ;
-
-class c-scanner : scanner
-{
- import path ;
- import regex ;
- import scanner ;
- import sequence ;
- import virtual-target ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
-
- for local i in $(includes)
- {
- self.includes += [ sequence.transform path.native
- : [ regex.split $(i:G=) "&&" ] ] ;
- }
- }
-
- rule pattern ( )
- {
- return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
- angle = [ sequence.transform path.native : $(angle) ] ;
- local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
- quoted = [ sequence.transform path.native : $(quoted) ] ;
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- local g = [ on $(target) return $(HDRGRIST) ] ;
- local b = [ NORMALIZE_PATH $(binding:D) ] ;
-
- # Attach binding of including file to included targets. When a target is
- # directly created from virtual target this extra information is
- # unnecessary. But in other cases, it allows us to distinguish between
- # two headers of the same name included from different places. We do not
- # need this extra information for angle includes, since they should not
- # depend on including file (we can not get literal "." in include path).
- local g2 = $(g)"#"$(b) ;
-
- angle = $(angle:G=$(g)) ;
- quoted = $(quoted:G=$(g2)) ;
-
- local all = $(angle) $(quoted) ;
-
- INCLUDES $(target) : $(all) ;
- NOCARE $(all) ;
- SEARCH on $(angle) = $(self.includes:G=) ;
- SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
-
- # Just propagate the current scanner to includes in hope that includes
- # do not change scanners.
- scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
-
- ISFILE $(angle) $(quoted) ;
- }
-}
-
-scanner.register c-scanner : include ;
-
-type.register CPP : cpp cxx cc ;
-type.register H : h ;
-type.register HPP : hpp : H ;
-type.register C : c ;
-
-# It most cases where a CPP file or a H file is a source of some action, we
-# should rebuild the result if any of files included by CPP/H are changed. One
-# case when this is not needed is installation, which is handled specifically.
-type.set-scanner CPP : c-scanner ;
-type.set-scanner C : c-scanner ;
-# One case where scanning of H/HPP files is necessary is PCH generation -- if
-# any header included by HPP being precompiled changes, we need to recompile the
-# header.
-type.set-scanner H : c-scanner ;
-type.set-scanner HPP : c-scanner ;
diff --git a/jam-files/boost-build/tools/types/cpp.py b/jam-files/boost-build/tools/types/cpp.py
deleted file mode 100644
index 7b56111c..00000000
--- a/jam-files/boost-build/tools/types/cpp.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register ():
- type.register_type ('CPP', ['cpp', 'cxx', 'cc'])
-
-register ()
diff --git a/jam-files/boost-build/tools/types/exe.jam b/jam-files/boost-build/tools/types/exe.jam
deleted file mode 100644
index 47109513..00000000
--- a/jam-files/boost-build/tools/types/exe.jam
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-
-type.register EXE ;
-type.set-generated-target-suffix EXE : <target-os>windows : "exe" ;
-type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ;
diff --git a/jam-files/boost-build/tools/types/exe.py b/jam-files/boost-build/tools/types/exe.py
deleted file mode 100644
index a4935e24..00000000
--- a/jam-files/boost-build/tools/types/exe.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register ():
- type.register_type ('EXE', ['exe'], None, ['NT', 'CYGWIN'])
- type.register_type ('EXE', [], None, [])
-
-register ()
diff --git a/jam-files/boost-build/tools/types/html.jam b/jam-files/boost-build/tools/types/html.jam
deleted file mode 100644
index 5cd337d0..00000000
--- a/jam-files/boost-build/tools/types/html.jam
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-type HTML : html ;
diff --git a/jam-files/boost-build/tools/types/html.py b/jam-files/boost-build/tools/types/html.py
deleted file mode 100644
index 63af4d90..00000000
--- a/jam-files/boost-build/tools/types/html.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register ():
- type.register_type ('HTML', ['html'])
-
-register ()
diff --git a/jam-files/boost-build/tools/types/lib.jam b/jam-files/boost-build/tools/types/lib.jam
deleted file mode 100644
index 854ab8fd..00000000
--- a/jam-files/boost-build/tools/types/lib.jam
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import type ; # for set-generated-target-suffix
-import os ;
-
-# The following naming scheme is used for libraries.
-#
-# On *nix:
-# libxxx.a static library
-# libxxx.so shared library
-#
-# On windows (msvc)
-# libxxx.lib static library
-# xxx.dll DLL
-# xxx.lib import library
-#
-# On windows (mingw):
-# libxxx.a static library
-# libxxx.dll DLL
-# libxxx.dll.a import library
-#
-# On cygwin i.e. <target-os>cygwin
-# libxxx.a static library
-# cygxxx.dll DLL
-# libxxx.dll.a import library
-#
-
-type.register LIB ;
-
-# FIXME: should not register both extensions on both platforms.
-type.register STATIC_LIB : a lib : LIB ;
-
-# The 'lib' prefix is used everywhere
-type.set-generated-target-prefix STATIC_LIB : : lib ;
-
-# Use '.lib' suffix for windows
-type.set-generated-target-suffix STATIC_LIB : <target-os>windows : lib ;
-
-# Except with gcc.
-type.set-generated-target-suffix STATIC_LIB : <toolset>gcc <target-os>windows : a ;
-
-# Use xxx.lib for import libs
-type IMPORT_LIB : : STATIC_LIB ;
-type.set-generated-target-prefix IMPORT_LIB : : "" ;
-type.set-generated-target-suffix IMPORT_LIB : : lib ;
-
-# Except with gcc (mingw or cygwin), where use libxxx.dll.a
-type.set-generated-target-prefix IMPORT_LIB : <toolset>gcc : lib ;
-type.set-generated-target-suffix IMPORT_LIB : <toolset>gcc : dll.a ;
-
-type.register SHARED_LIB : so dll dylib : LIB ;
-
-# Both mingw and cygwin use libxxx.dll naming scheme.
-# On Linux, use "lib" prefix
-type.set-generated-target-prefix SHARED_LIB : : lib ;
-# But don't use it on windows
-type.set-generated-target-prefix SHARED_LIB : <target-os>windows : "" ;
-# But use it again on mingw
-type.set-generated-target-prefix SHARED_LIB : <toolset>gcc <target-os>windows : lib ;
-# And use 'cyg' on cygwin
-type.set-generated-target-prefix SHARED_LIB : <target-os>cygwin : cyg ;
-
-
-type.set-generated-target-suffix SHARED_LIB : <target-os>windows : dll ;
-type.set-generated-target-suffix SHARED_LIB : <target-os>cygwin : dll ;
-type.set-generated-target-suffix SHARED_LIB : <target-os>darwin : dylib ;
-
-type SEARCHED_LIB : : LIB ;
-# This is needed so that when we create a target of SEARCHED_LIB
-# type, there's no prefix or suffix automatically added.
-type.set-generated-target-prefix SEARCHED_LIB : : "" ;
-type.set-generated-target-suffix SEARCHED_LIB : : "" ;
diff --git a/jam-files/boost-build/tools/types/lib.py b/jam-files/boost-build/tools/types/lib.py
deleted file mode 100644
index d0ec1fb5..00000000
--- a/jam-files/boost-build/tools/types/lib.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Status: ported
-# Base revision: 64456.
-# Copyright David Abrahams 2004.
-# Copyright Vladimir Prus 2010.
-# Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import b2.build.type as type
-
-# The following naming scheme is used for libraries.
-#
-# On *nix:
-# libxxx.a static library
-# libxxx.so shared library
-#
-# On windows (msvc)
-# libxxx.lib static library
-# xxx.dll DLL
-# xxx.lib import library
-#
-# On windows (mingw):
-# libxxx.a static library
-# libxxx.dll DLL
-# libxxx.dll.a import library
-#
-# On cygwin i.e. <target-os>cygwin
-# libxxx.a static library
-# cygxxx.dll DLL
-# libxxx.dll.a import library
-#
-
-type.register('LIB')
-
-# FIXME: should not register both extensions on both platforms.
-type.register('STATIC_LIB', ['a', 'lib'], 'LIB')
-
-# The 'lib' prefix is used everywhere
-type.set_generated_target_prefix('STATIC_LIB', [], 'lib')
-
-# Use '.lib' suffix for windows
-type.set_generated_target_suffix('STATIC_LIB', ['<target-os>windows'], 'lib')
-
-# Except with gcc.
-type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>windows'], 'a')
-
-# Use xxx.lib for import libs
-type.register('IMPORT_LIB', [], 'STATIC_LIB')
-type.set_generated_target_prefix('IMPORT_LIB', [], '')
-type.set_generated_target_suffix('IMPORT_LIB', [], 'lib')
-
-# Except with gcc (mingw or cygwin), where use libxxx.dll.a
-type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc'], 'lib')
-type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc'], 'dll.a')
-
-type.register('SHARED_LIB', ['so', 'dll', 'dylib'], 'LIB')
-
-# Both mingw and cygwin use libxxx.dll naming scheme.
-# On Linux, use "lib" prefix
-type.set_generated_target_prefix('SHARED_LIB', [], 'lib')
-# But don't use it on windows
-type.set_generated_target_prefix('SHARED_LIB', ['<target-os>windows'], '')
-# But use it again on mingw
-type.set_generated_target_prefix('SHARED_LIB', ['<toolset>gcc', '<target-os>windows'], 'lib')
-# And use 'cyg' on cygwin
-type.set_generated_target_prefix('SHARED_LIB', ['<target-os>cygwin'], 'cyg')
-
-
-type.set_generated_target_suffix('SHARED_LIB', ['<target-os>windows'], 'dll')
-type.set_generated_target_suffix('SHARED_LIB', ['<target-os>cygwin'], 'dll')
-type.set_generated_target_suffix('SHARED_LIB', ['<target-os>darwin'], 'dylib')
-
-type.register('SEARCHED_LIB', [], 'LIB')
-# This is needed so that when we create a target of SEARCHED_LIB
-# type, there's no prefix or suffix automatically added.
-type.set_generated_target_prefix('SEARCHED_LIB', [], '')
-type.set_generated_target_suffix('SEARCHED_LIB', [], '')
diff --git a/jam-files/boost-build/tools/types/obj.jam b/jam-files/boost-build/tools/types/obj.jam
deleted file mode 100644
index 6afbcaa6..00000000
--- a/jam-files/boost-build/tools/types/obj.jam
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-
-type.register OBJ : o obj ;
-type.set-generated-target-suffix OBJ : <target-os>windows : obj ;
-type.set-generated-target-suffix OBJ : <target-os>cygwin : obj ;
diff --git a/jam-files/boost-build/tools/types/obj.py b/jam-files/boost-build/tools/types/obj.py
deleted file mode 100644
index e61e99a8..00000000
--- a/jam-files/boost-build/tools/types/obj.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register ():
- type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN'])
- type.register_type ('OBJ', ['o'])
-
-register ()
diff --git a/jam-files/boost-build/tools/types/objc.jam b/jam-files/boost-build/tools/types/objc.jam
deleted file mode 100644
index 709cbd0c..00000000
--- a/jam-files/boost-build/tools/types/objc.jam
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright Rene Rivera 2008, 2010.
-# Distributed under the Boost Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-import type ;
-import scanner ;
-import types/cpp ;
-
-class objc-scanner : c-scanner
-{
- rule __init__ ( includes * )
- {
- c-scanner.__init__ $(includes) ;
- }
-
- rule pattern ( )
- {
- return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ;
- }
-}
-
-scanner.register objc-scanner : include ;
-
-type.register OBJECTIVE_C : m ;
-type.register OBJECTIVE_CPP : mm ;
-type.set-scanner OBJECTIVE_C : objc-scanner ;
-type.set-scanner OBJECTIVE_CPP : objc-scanner ;
diff --git a/jam-files/boost-build/tools/types/preprocessed.jam b/jam-files/boost-build/tools/types/preprocessed.jam
deleted file mode 100644
index c9187ba6..00000000
--- a/jam-files/boost-build/tools/types/preprocessed.jam
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright Steven Watanabe 2011
-# Distributed under the Boost Software License Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-
-type.register PREPROCESSED_C : i : C ;
-type.register PREPROCESSED_CPP : ii : CPP ;
diff --git a/jam-files/boost-build/tools/types/qt.jam b/jam-files/boost-build/tools/types/qt.jam
deleted file mode 100644
index 6d1dfbd4..00000000
--- a/jam-files/boost-build/tools/types/qt.jam
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright Vladimir Prus 2005. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-type UI : ui ;
-type QRC : qrc ;
-type MOCCABLE_CPP ;
-type MOCCABLE_H ;
-# Result of running moc.
-type MOC : moc : H ;
diff --git a/jam-files/boost-build/tools/types/register.jam b/jam-files/boost-build/tools/types/register.jam
deleted file mode 100644
index 203992ca..00000000
--- a/jam-files/boost-build/tools/types/register.jam
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# This module's job is to automatically import all the type
-# registration modules in its directory.
-import type os path modules ;
-
-# Register the given type on the specified OSes, or on remaining OSes
-# if os is not specified. This rule is injected into each of the type
-# modules for the sake of convenience.
-local rule type ( type : suffixes * : base-type ? : os * )
-{
- if ! [ type.registered $(type) ]
- {
- if ( ! $(os) ) || [ os.name ] in $(os)
- {
- type.register $(type) : $(suffixes) : $(base-type) ;
- }
- }
-}
-
-.this-module's-file = [ modules.binding $(__name__) ] ;
-.this-module's-dir = [ path.parent $(.this-module's-file) ] ;
-.sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ;
-.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ;
-
-# A loop over all modules in this directory
-for m in $(.sibling-modules)
-{
- m = [ path.basename $(m) ] ;
- m = types/$(m) ;
-
- # Inject the type rule into the new module
- IMPORT $(__name__) : type : $(m) : type ;
- import $(m) ;
-}
-
-
diff --git a/jam-files/boost-build/tools/types/rsp.jam b/jam-files/boost-build/tools/types/rsp.jam
deleted file mode 100644
index bdf8a7c9..00000000
--- a/jam-files/boost-build/tools/types/rsp.jam
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-type RSP : rsp ;
diff --git a/jam-files/boost-build/tools/types/rsp.py b/jam-files/boost-build/tools/types/rsp.py
deleted file mode 100644
index ccb379e9..00000000
--- a/jam-files/boost-build/tools/types/rsp.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register ():
- type.register_type ('RSP', ['rsp'])
-
-register ()
diff --git a/jam-files/boost-build/tools/unix.jam b/jam-files/boost-build/tools/unix.jam
deleted file mode 100644
index 75949851..00000000
--- a/jam-files/boost-build/tools/unix.jam
+++ /dev/null
@@ -1,224 +0,0 @@
-# Copyright (c) 2004 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# This file implements linking semantic common to all unixes. On unix, static
-# libraries must be specified in a fixed order on the linker command line. Generators
-# declared there store information about the order and use it property.
-
-import feature ;
-import "class" : new ;
-import generators ;
-import type ;
-import set ;
-import order ;
-import builtin ;
-
-class unix-linking-generator : linking-generator
-{
- import property-set ;
- import type ;
- import unix ;
-
- rule __init__ ( id
- composing ? : # Specify if generator is composing. The generator will be
- # composing if non-empty string is passed, or parameter is
- # not given. To make generator non-composing, pass empty
- # string ("")
- source-types + : target-types + :
- requirements * )
- {
- composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
- $(requirements) ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- local result = [ linking-generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
-
- return $(result) ;
- }
-
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local sources2 ;
- local libraries ;
- for local l in $(sources)
- {
- if [ type.is-derived [ $(l).type ] LIB ]
- {
- libraries += $(l) ;
- }
- else
- {
- sources2 += $(l) ;
- }
- }
-
- sources = $(sources2) [ unix.order-libraries $(libraries) ] ;
-
- return [ linking-generator.generated-targets $(sources) : $(property-set)
- : $(project) $(name) ] ;
- }
-
-}
-
-class unix-archive-generator : archive-generator
-{
- import unix ;
-
- rule __init__ ( id composing ? : source-types + : target-types + :
- requirements * )
- {
- composing ?= true ;
- archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
- $(requirements) ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- local result = [ archive-generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
-
- return $(result) ;
-
- }
-}
-
-class unix-searched-lib-generator : searched-lib-generator
-{
- import unix ;
- rule __init__ ( * : * )
- {
- generator.__init__
- $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule optional-properties ( )
- {
- return $(self.requirements) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- local result = [ searched-lib-generator.run $(project) $(name)
- : $(property-set) : $(sources) ] ;
-
- unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
-
- return $(result) ;
- }
-}
-
-class unix-prebuilt-lib-generator : generator
-{
- import unix ;
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- local f = [ $(property-set).get <file> ] ;
- unix.set-library-order-aux $(f) : $(sources) ;
- return $(f) $(sources) ;
- }
-}
-
-generators.register
- [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB
- : <file> <toolset>unix ] ;
-
-generators.override unix.prebuilt : builtin.lib-generator ;
-
-
-# Declare generators
-generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE
- : <toolset>unix ] ;
-
-generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB
- : <toolset>unix ] ;
-
-generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB
- : <toolset>unix ] ;
-
-generators.register [ new unix-searched-lib-generator
- unix.searched-lib-generator : : SEARCHED_LIB : <toolset>unix ] ;
-
-
-# The derived toolset must specify their own actions.
-actions link {
-}
-
-actions link.dll {
-}
-
-actions archive {
-}
-
-actions searched-lib-generator {
-}
-
-actions prebuilt {
-}
-
-
-
-
-
-.order = [ new order ] ;
-
-rule set-library-order-aux ( from * : to * )
-{
- for local f in $(from)
- {
- for local t in $(to)
- {
- if $(f) != $(t)
- {
- $(.order).add-pair $(f) $(t) ;
- }
- }
- }
-}
-
-rule set-library-order ( sources * : property-set : result * )
-{
- local used-libraries ;
- local deps = [ $(property-set).dependency ] ;
- for local l in $(sources) $(deps:G=)
- {
- if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
- {
- used-libraries += $(l) ;
- }
- }
-
- local created-libraries ;
- for local l in $(result)
- {
- if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
- {
- created-libraries += $(l) ;
- }
- }
-
- created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ;
- set-library-order-aux $(created-libraries) : $(used-libraries) ;
-}
-
-rule order-libraries ( libraries * )
-{
- local r = [ $(.order).order $(libraries) ] ;
- return $(r) ;
-}
- \ No newline at end of file
diff --git a/jam-files/boost-build/tools/unix.py b/jam-files/boost-build/tools/unix.py
deleted file mode 100644
index d409c2e4..00000000
--- a/jam-files/boost-build/tools/unix.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright (c) 2004 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-""" This file implements linking semantics common to all unixes. On unix, static
- libraries must be specified in a fixed order on the linker command line. Generators
- declared there store information about the order and use it properly.
-"""
-
-import builtin
-from b2.build import generators, type
-from b2.util.utility import *
-from b2.util import set, sequence
-
-class UnixLinkingGenerator (builtin.LinkingGenerator):
-
- def __init__ (self, id, composing, source_types, target_types, requirements):
- builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements)
-
- def run (self, project, name, prop_set, sources):
- result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources)
- if result:
- set_library_order (project.manager (), sources, prop_set, result [1])
-
- return result
-
- def generated_targets (self, sources, prop_set, project, name):
- sources2 = []
- libraries = []
- for l in sources:
- if type.is_derived (l.type (), 'LIB'):
- libraries.append (l)
-
- else:
- sources2.append (l)
-
- sources = sources2 + order_libraries (libraries)
-
- return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name)
-
-
-class UnixArchiveGenerator (builtin.ArchiveGenerator):
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run (self, project, name, prop_set, sources):
- result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources)
- set_library_order(project.manager(), sources, prop_set, result)
- return result
-
-class UnixSearchedLibGenerator (builtin.SearchedLibGenerator):
-
- def __init__ (self):
- builtin.SearchedLibGenerator.__init__ (self)
-
- def optional_properties (self):
- return self.requirements ()
-
- def run (self, project, name, prop_set, sources, multiple):
- result = SearchedLibGenerator.run (project, name, prop_set, sources, multiple)
-
- set_library_order (sources, prop_set, result)
-
- return result
-
-class UnixPrebuiltLibGenerator (generators.Generator):
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run (self, project, name, prop_set, sources, multiple):
- f = prop_set.get ('<file>')
- set_library_order_aux (f, sources)
- return (f, sources)
-
-### # The derived toolset must specify their own rules and actions.
-# FIXME: restore?
-# action.register ('unix.prebuilt', None, None)
-
-
-generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['<file>', '<toolset>unix']))
-
-
-
-
-
-### # Declare generators
-### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE
-### : <toolset>unix ] ;
-generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['<toolset>unix']))
-
-### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB
-### : <toolset>unix ] ;
-###
-### generators.register [ new UnixSearchedLibGenerator
-### unix.SearchedLibGenerator : : SEARCHED_LIB : <toolset>unix ] ;
-###
-###
-### # The derived toolset must specify their own actions.
-### actions link {
-### }
-###
-### actions link.dll {
-### }
-
-def unix_archive (manager, targets, sources, properties):
- pass
-
-# FIXME: restore?
-#action.register ('unix.archive', unix_archive, [''])
-
-### actions searched-lib-generator {
-### }
-###
-### actions prebuilt {
-### }
-
-
-from b2.util.order import Order
-__order = Order ()
-
-def set_library_order_aux (from_libs, to_libs):
- for f in from_libs:
- for t in to_libs:
- if f != t:
- __order.add_pair (f, t)
-
-def set_library_order (manager, sources, prop_set, result):
- used_libraries = []
- deps = prop_set.dependency ()
-
- sources.extend(d.value() for d in deps)
- sources = sequence.unique(sources)
-
- for l in sources:
- if l.type () and type.is_derived (l.type (), 'LIB'):
- used_libraries.append (l)
-
- created_libraries = []
- for l in result:
- if l.type () and type.is_derived (l.type (), 'LIB'):
- created_libraries.append (l)
-
- created_libraries = set.difference (created_libraries, used_libraries)
- set_library_order_aux (created_libraries, used_libraries)
-
-def order_libraries (libraries):
- return __order.order (libraries)
-
diff --git a/jam-files/boost-build/tools/vacpp.jam b/jam-files/boost-build/tools/vacpp.jam
deleted file mode 100644
index f4080fc0..00000000
--- a/jam-files/boost-build/tools/vacpp.jam
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Copyright Toon Knapen 2004.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# Boost.Build V2 toolset for the IBM XL C++ compiler
-#
-
-import toolset : flags ;
-import feature ;
-import common ;
-import generators ;
-import os ;
-
-feature.extend toolset : vacpp ;
-toolset.inherit vacpp : unix ;
-generators.override vacpp.prebuilt : builtin.prebuilt ;
-generators.override vacpp.searched-lib-generator : searched-lib-generator ;
-
-# Configure the vacpp toolset
-rule init ( version ? : command * : options * )
-{
- local condition = [
- common.check-init-parameters vacpp : version $(version) ] ;
-
- command = [ common.get-invocation-command vacpp : xlC
- : $(command) : "/usr/vacpp/bin/xlC" ] ;
-
- common.handle-options vacpp : $(condition) : $(command) : $(options) ;
-}
-
-# Declare generators
-generators.register-c-compiler vacpp.compile.c : C : OBJ : <toolset>vacpp ;
-generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : <toolset>vacpp ;
-
-# Allow C++ style comments in C files
-flags vacpp CFLAGS : -qcpluscmt ;
-
-# Declare flags
-flags vacpp CFLAGS <optimization>off : -qNOOPTimize ;
-flags vacpp CFLAGS <optimization>speed : -O3 -qstrict ;
-flags vacpp CFLAGS <optimization>space : -O2 -qcompact ;
-
-# Discretionary inlining (not recommended)
-flags vacpp CFLAGS <inlining>off : -qnoinline ;
-flags vacpp CFLAGS <inlining>on : -qinline ;
-#flags vacpp CFLAGS <inlining>full : -qinline ;
-flags vacpp CFLAGS <inlining>full : ;
-
-# Exception handling
-flags vacpp C++FLAGS <exception-handling>off : -qnoeh ;
-flags vacpp C++FLAGS <exception-handling>on : -qeh ;
-
-# Run-time Type Identification
-flags vacpp C++FLAGS <rtti>off : -qnortti ;
-flags vacpp C++FLAGS <rtti>on : -qrtti ;
-
-# Enable 64-bit memory addressing model
-flags vacpp CFLAGS <address-model>64 : -q64 ;
-flags vacpp LINKFLAGS <address-model>64 : -q64 ;
-flags vacpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ;
-
-# Use absolute path when generating debug information
-flags vacpp CFLAGS <debug-symbols>on : -g -qfullpath ;
-flags vacpp LINKFLAGS <debug-symbols>on : -g -qfullpath ;
-flags vacpp LINKFLAGS <debug-symbols>off : -s ;
-
-if [ os.name ] = AIX
-{
- flags vacpp.compile C++FLAGS : -qfuncsect ;
-
- # The -bnoipath strips the prepending (relative) path of libraries from
- # the loader section in the target library or executable. Hence, during
- # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
- # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
- # this option, the prepending (relative) path + library name is
- # hard-coded in the loader section, causing *only* this path to be
- # searched during load-time. Note that the AIX linker does not have an
- # -soname equivalent, this is as close as it gets.
- #
- # The above options are definately for AIX 5.x, and most likely also for
- # AIX 4.x and AIX 6.x. For details about the AIX linker see:
- # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
- #
- flags vacpp.link LINKFLAGS <link>shared : -bnoipath ;
-
- # Run-time linking
- flags vacpp.link EXE-LINKFLAGS <link>shared : -brtl ;
-}
-else
-{
- # Linux PPC
- flags vacpp.compile CFLAGS <link>shared : -qpic=large ;
- flags vacpp FINDLIBS : rt ;
-}
-
-# Profiling
-flags vacpp CFLAGS <profiling>on : -pg ;
-flags vacpp LINKFLAGS <profiling>on : -pg ;
-
-flags vacpp.compile OPTIONS <cflags> ;
-flags vacpp.compile.c++ OPTIONS <cxxflags> ;
-flags vacpp DEFINES <define> ;
-flags vacpp UNDEFS <undef> ;
-flags vacpp HDRS <include> ;
-flags vacpp STDHDRS <sysinclude> ;
-flags vacpp.link OPTIONS <linkflags> ;
-flags vacpp ARFLAGS <arflags> ;
-
-flags vacpp LIBPATH <library-path> ;
-flags vacpp NEEDLIBS <library-file> ;
-flags vacpp FINDLIBS <find-shared-library> ;
-flags vacpp FINDLIBS <find-static-library> ;
-
-# Select the compiler name according to the threading model.
-flags vacpp VA_C_COMPILER <threading>single : xlc ;
-flags vacpp VA_C_COMPILER <threading>multi : xlc_r ;
-flags vacpp VA_CXX_COMPILER <threading>single : xlC ;
-flags vacpp VA_CXX_COMPILER <threading>multi : xlC_r ;
-
-SPACE = " " ;
-
-flags vacpp.link.dll HAVE_SONAME <target-os>linux : "" ;
-
-actions vacpp.link bind NEEDLIBS
-{
- $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
-}
-
-actions vacpp.link.dll bind NEEDLIBS
-{
- xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
-}
-
-actions vacpp.compile.c
-{
- $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
-}
-
-actions vacpp.compile.c++
-{
- $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
-}
-
-actions updated together piecemeal vacpp.archive
-{
- ar $(ARFLAGS) ru "$(<)" "$(>)"
-}
diff --git a/jam-files/boost-build/tools/whale.jam b/jam-files/boost-build/tools/whale.jam
deleted file mode 100644
index 9335ff0c..00000000
--- a/jam-files/boost-build/tools/whale.jam
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright (C) Vladimir Prus 2002-2005.
-
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# This module implements support for Whale/Dolphin/WD parser/lexer tools.
-# See http://www.cs.queensu.ca/home/okhotin/whale/ for details.
-#
-# There are three interesting target types:
-# - WHL (the parser sources), that are converted to CPP and H
-# - DLP (the lexer sources), that are converted to CPP and H
-# - WD (combined parser/lexer sources), that are converted to WHL + DLP
-
-import type ;
-import generators ;
-import path ;
-import "class" : new ;
-import errors ;
-
-rule init ( path # path the Whale/Dolphin/WD binaries
- )
-{
- if $(.configured) && $(.path) != $(path)
- {
- errors.user-error "Attempt to reconfigure Whale support" :
- "Previously configured with path \"$(.path:E=<empty>)\"" :
- "Now configuring with path \"$(path:E=<empty>)\"" ;
-
- }
- .configured = true ;
- .path = $(path) ;
-
- .whale = [ path.join $(path) whale ] ;
- .dolphin = [ path.join $(path) dolphin ] ;
- .wd = [ path.join $(path) wd ] ;
-}
-
-
-# Declare the types.
-type.register WHL : whl ;
-type.register DLP : dlp ;
-type.register WHL_LR0 : lr0 ;
-type.register WD : wd ;
-
-# Declare standard generators.
-generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ;
-generators.register-standard whale.dolphin : DLP : CPP H ;
-generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ;
-
-# The conversions defines above a ambiguious when we generated CPP from WD.
-# We can either go via WHL type, or via DLP type.
-# The following custom generator handles this by running both conversions.
-
-class wd-to-cpp : generator
-{
- rule __init__ ( * : * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) ;
- }
-
- rule run ( project name ? : property-set : source * )
- {
- if ! $(source[2])
- {
- local new-sources ;
- if ! [ $(source).type ] in WHL DLP
- {
- local r1 = [ generators.construct $(project) $(name)
- : WHL : $(property-set) : $(source) ] ;
- local r2 = [ generators.construct $(project) $(name)
- : DLP : $(property-set) : $(source) ] ;
-
- new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
- }
- else
- {
- new-sources = $(source) ;
- }
-
- local result ;
- for local i in $(new-sources)
- {
- local t = [ generators.construct $(project) $(name) : CPP
- : $(property-set) : $(i) ] ;
- result += $(t[2-]) ;
- }
- return $(result) ;
- }
- }
-
-}
-
-
-generators.override whale.wd-to-cpp : whale.whale ;
-generators.override whale.wd-to-cpp : whale.dolphin ;
-
-
-generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ;
-
-
-actions whale
-{
- $(.whale) -d $(<[1]:D) $(>)
-}
-
-actions dolphin
-{
- $(.dolphin) -d $(<[1]:D) $(>)
-}
-
-actions wd
-{
- $(.wd) -d $(<[1]:D) -g $(>)
-}
-
diff --git a/jam-files/boost-build/tools/xlf.jam b/jam-files/boost-build/tools/xlf.jam
deleted file mode 100644
index e7fcc608..00000000
--- a/jam-files/boost-build/tools/xlf.jam
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (C) 2004 Toon Knapen
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# toolset configuration for the IBM Fortran compiler (xlf)
-#
-
-import toolset : flags ;
-import feature ;
-import fortran ;
-
-rule init ( version ? : command * : options * )
-{
-}
-
-# Declare flags and action for compilation
-flags xlf OPTIONS <optimization>off : -O0 ;
-flags xlf OPTIONS <optimization>speed : -O3 ;
-flags xlf OPTIONS <optimization>space : -Os ;
-
-flags xlf OPTIONS <debug-symbols>on : -g ;
-flags xlf OPTIONS <profiling>on : -pg ;
-
-flags xlf DEFINES <define> ;
-flags xlf INCLUDES <include> ;
-
-rule compile-fortran
-{
-}
-
-actions compile-fortran
-{
- xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)"
-}
-
-generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/xsltproc-config.jam b/jam-files/boost-build/tools/xsltproc-config.jam
deleted file mode 100644
index de54a2eb..00000000
--- a/jam-files/boost-build/tools/xsltproc-config.jam
+++ /dev/null
@@ -1,37 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for Python tools and librries. To use, just import this module.
-
-import os ;
-import toolset : using ;
-
-if [ os.name ] = NT
-{
- local xsltproc-path = [ GLOB [ modules.peek : PATH ] "C:\\Boost\\bin" : xsltproc\.exe ] ;
- xsltproc-path = $(xsltproc-path[1]) ;
-
- if $(xsltproc-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
- }
- using xsltproc : $(xsltproc-path) ;
- }
-}
-else
-{
- local xsltproc-path = [ GLOB [ modules.peek : PATH ] : xsltproc ] ;
- xsltproc-path = $(xsltproc-path[1]) ;
-
- if $(xsltproc-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
- }
- using xsltproc : $(xsltproc-path) ;
- }
-}
diff --git a/jam-files/boost-build/tools/xsltproc.jam b/jam-files/boost-build/tools/xsltproc.jam
deleted file mode 100644
index 96f5170b..00000000
--- a/jam-files/boost-build/tools/xsltproc.jam
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# This module defines rules to apply an XSLT stylesheet to an XML file using the
-# xsltproc driver, part of libxslt.
-#
-# Note: except for 'init', this modules does not provide any rules for end
-# users.
-
-import feature ;
-import regex ;
-import sequence ;
-import common ;
-import os ;
-import modules ;
-import path ;
-import errors ;
-
-feature.feature xsl:param : : free ;
-feature.feature xsl:path : : free ;
-feature.feature catalog : : free ;
-
-
-# Initialize xsltproc support. The parameters are:
-# xsltproc: The xsltproc executable
-#
-rule init ( xsltproc ? )
-{
- if $(xsltproc)
- {
- modify-config ;
- .xsltproc = $(xsltproc) ;
- check-xsltproc ;
- }
-}
-
-rule freeze-config ( )
-{
- if ! $(.config-frozen)
- {
- .config-frozen = true ;
- .xsltproc ?= [ modules.peek : XSLTPROC ] ;
- .xsltproc ?= xsltproc ;
- check-xsltproc ;
- .is-cygwin = [ .is-cygwin $(.xsltproc) ] ;
- }
-}
-
-rule modify-config
-{
- if $(.config-frozen)
- {
- errors.user-error "xsltproc: Cannot change xsltproc command after it has been used." ;
- }
-}
-
-rule check-xsltproc ( )
-{
- if $(.xsltproc)
- {
- local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] ;
- if $(status[2]) != "0"
- {
- errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ;
- }
- }
-}
-
-# Returns a non-empty string if a cygwin xsltproc binary was specified.
-rule is-cygwin ( )
-{
- freeze-config ;
- return $(.is-cygwin) ;
-}
-
-rule .is-cygwin ( xsltproc )
-{
- if [ os.on-windows ]
- {
- local file = [ path.make [ modules.binding $(__name__) ] ] ;
- local dir = [ path.native
- [ path.join [ path.parent $(file) ] xsltproc ] ] ;
- if [ os.name ] = CYGWIN
- {
- dir = $(dir:W) ;
- }
- local command =
- "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ;
- local status = [ SHELL $(command) : no-output : exit-status ] ;
- if $(status[2]) != "0"
- {
- return true ;
- }
- }
-}
-
-rule compute-xslt-flags ( target : properties * )
-{
- local flags ;
-
- # Raw flags.
- flags += [ feature.get-values <flags> : $(properties) ] ;
-
- # Translate <xsl:param> into command line flags.
- for local param in [ feature.get-values <xsl:param> : $(properties) ]
- {
- local namevalue = [ regex.split $(param) "=" ] ;
- flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
- }
-
- # Translate <xsl:path>.
- for local path in [ feature.get-values <xsl:path> : $(properties) ]
- {
- flags += --path \"$(path:G=)\" ;
- }
-
- # Take care of implicit dependencies.
- local other-deps ;
- for local dep in [ feature.get-values <implicit-dependency> : $(properties) ]
- {
- other-deps += [ $(dep:G=).creating-subvariant ] ;
- }
-
- local implicit-target-directories ;
- for local dep in [ sequence.unique $(other-deps) ]
- {
- implicit-target-directories += [ $(dep).all-target-directories ] ;
- }
-
- for local dir in $(implicit-target-directories)
- {
- flags += --path \"$(dir:T)\" ;
- }
-
- return $(flags) ;
-}
-
-
-local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : action )
-{
- freeze-config ;
- STYLESHEET on $(target) = $(stylesheet) ;
- FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ;
- NAME on $(target) = $(.xsltproc) ;
-
- for local catalog in [ feature.get-values <catalog> : $(properties) ]
- {
- CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : $(catalog:T) ] ;
- }
-
- if [ os.on-windows ] && ! [ is-cygwin ]
- {
- action = $(action).windows ;
- }
-
- $(action) $(target) : $(source) ;
-}
-
-
-rule xslt ( target : source stylesheet : properties * )
-{
- return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : xslt-xsltproc ] ;
-}
-
-
-rule xslt-dir ( target : source stylesheet : properties * : dirname )
-{
- return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : $(dirname) : xslt-xsltproc-dir ] ;
-}
-
-actions xslt-xsltproc.windows
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
-}
-
-
-actions xslt-xsltproc bind STYLESHEET
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
-}
-
-
-actions xslt-xsltproc-dir.windows bind STYLESHEET
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
-}
-
-
-actions xslt-xsltproc-dir bind STYLESHEET
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
-}
diff --git a/jam-files/boost-build/tools/xsltproc/included.xsl b/jam-files/boost-build/tools/xsltproc/included.xsl
deleted file mode 100644
index ef86394a..00000000
--- a/jam-files/boost-build/tools/xsltproc/included.xsl
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
- Copyright (c) 2010 Steven Watanabe
-
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
- -->
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
- version="1.0">
-</xsl:stylesheet>
diff --git a/jam-files/boost-build/tools/xsltproc/test.xml b/jam-files/boost-build/tools/xsltproc/test.xml
deleted file mode 100644
index 57c8ba18..00000000
--- a/jam-files/boost-build/tools/xsltproc/test.xml
+++ /dev/null
@@ -1,2 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<root/>
diff --git a/jam-files/boost-build/tools/xsltproc/test.xsl b/jam-files/boost-build/tools/xsltproc/test.xsl
deleted file mode 100644
index a142c91d..00000000
--- a/jam-files/boost-build/tools/xsltproc/test.xsl
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
- Copyright (c) 2010 Steven Watanabe
-
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
- -->
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
- version="1.0">
- <xsl:include href="included.xsl"/>
-</xsl:stylesheet>
diff --git a/jam-files/boost-build/tools/zlib.jam b/jam-files/boost-build/tools/zlib.jam
deleted file mode 100644
index f9138fd5..00000000
--- a/jam-files/boost-build/tools/zlib.jam
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports the zlib library
-#
-# After 'using zlib', the following targets are available:
-#
-# /zlib//zlib -- The zlib library
-
-
-# In addition to direct purpose of supporting zlib, this module also
-# serves as canonical example of how third-party condiguration works
-# in Boost.Build. The operation is as follows
-#
-# - For each 'using zlib : condition ... : ...' we create a target alternative
-# for zlib, with the specified condition.
-# - There's one target alternative for 'zlib' with no specific condition
-# properties.
-#
-# Two invocations of 'using zlib' with the same condition but different
-# properties are not permitted, e.g.:
-#
-# using zlib : condition <target-os>windows : include foo ;
-# using zlib : condition <target-os>windows : include bar ;
-#
-# is in error. One exception is for empty condition, 'using' without any
-# parameters is overridable. That is:
-#
-# using zlib ;
-# using zlib : include foo ;
-#
-# Is OK then the first 'using' is ignored. Likewise if the order of the statements
-# is reversed.
-#
-# When 'zlib' target is built, a target alternative is selected as usual for
-# Boost.Build. The selected alternative is a custom target class, which:
-#
-# - calls ac.find-include-path to find header path. If explicit path is provided
-# in 'using', only that path is checked, and if no header is found there, error
-# is emitted. Otherwise, we check a directory specified using ZLIB_INCLUDE
-# environment variable, and failing that, in standard directories.
-# [TODO: document sysroot handling]
-# - calls ac.find-library to find the library, in an identical fashion.
-#
-
-import project ;
-import ac ;
-import errors ;
-import "class" : new ;
-import targets ;
-
-project.initialize $(__name__) ;
-project = [ project.current ] ;
-project zlib ;
-
-header = zlib.h ;
-names = z zlib zll zdll ;
-
-.default-alternative = [ new ac-library zlib : $(project) ] ;
-$(.default-alternative).set-header $(header) ;
-$(.default-alternative).set-default-names $(names) ;
-targets.main-target-alternative $(.default-alternative) ;
-
-rule init ( * : * )
-{
- if ! $(condition)
- {
- # Special case the no-condition case so that 'using' without parameters
- # can mix with more specific 'using'.
- $(.default-alternative).reconfigure $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
- else
- {
- # FIXME: consider if we should allow overriding definitions for a given
- # condition -- e.g. project-config.jam might want to override whatever is
- # in user-config.jam.
- local mt = [ new ac-library zlib : $(project)
- : $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- $(mt).set-header $(header) ;
- $(mt).set-default-names $(names) ;
- targets.main-target-alternative $(mt) ;
- }
-}
-
-
-
-
-
-
diff --git a/jam-files/boost-build/user-config.jam b/jam-files/boost-build/user-config.jam
deleted file mode 100644
index fbbf13fd..00000000
--- a/jam-files/boost-build/user-config.jam
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright 2003, 2005 Douglas Gregor
-# Copyright 2004 John Maddock
-# Copyright 2002, 2003, 2004, 2007 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This file is used to configure your Boost.Build installation. You can modify
-# this file in place, or you can place it in a permanent location so that it
-# does not get overwritten should you get a new version of Boost.Build. See:
-#
-# http://www.boost.org/boost-build2/doc/html/bbv2/overview/configuration.html
-#
-# for documentation about possible permanent locations.
-
-# This file specifies which toolsets (C++ compilers), libraries, and other
-# tools are available. Often, you should be able to just uncomment existing
-# example lines and adjust them to taste. The complete list of supported tools,
-# and configuration instructions can be found at:
-#
-# http://boost.org/boost-build2/doc/html/bbv2/reference/tools.html
-#
-
-# This file uses Jam language syntax to describe available tools. Mostly,
-# there are 'using' lines, that contain the name of the used tools, and
-# parameters to pass to those tools -- where paremeters are separated by
-# semicolons. Important syntax notes:
-#
-# - Both ':' and ';' must be separated from other tokens by whitespace
-# - The '\' symbol is a quote character, so when specifying Windows paths you
-# should use '/' or '\\' instead.
-#
-# More details about the syntax can be found at:
-#
-# http://boost.org/boost-build2/doc/html/bbv2/advanced.html#bbv2.advanced.jam_language
-#
-
-# ------------------
-# GCC configuration.
-# ------------------
-
-# Configure gcc (default version).
-# using gcc ;
-
-# Configure specific gcc version, giving alternative name to use.
-# using gcc : 3.2 : g++-3.2 ;
-
-
-# -------------------
-# MSVC configuration.
-# -------------------
-
-# Configure msvc (default version, searched for in standard locations and PATH).
-# using msvc ;
-
-# Configure specific msvc version (searched for in standard locations and PATH).
-# using msvc : 8.0 ;
-
-
-# ----------------------
-# Borland configuration.
-# ----------------------
-# using borland ;
-
-
-# ----------------------
-# STLPort configuration.
-# ----------------------
-
-# Configure specifying location of STLPort headers. Libraries must be either
-# not needed or available to the compiler by default.
-# using stlport : : /usr/include/stlport ;
-
-# Configure specifying location of both headers and libraries explicitly.
-# using stlport : : /usr/include/stlport /usr/lib ;
-
-
-# -----------------
-# QT configuration.
-# -----------------
-
-# Configure assuming QTDIR gives the installation prefix.
-# using qt ;
-
-# Configure with an explicit installation prefix.
-# using qt : /usr/opt/qt ;
-
-# ---------------------
-# Python configuration.
-# ---------------------
-
-# Configure specific Python version.
-# using python : 3.1 : /usr/bin/python3 : /usr/include/python3.1 : /usr/lib ;
diff --git a/jam-files/boost-build/util/__init__.py b/jam-files/boost-build/util/__init__.py
deleted file mode 100644
index f80fe70e..00000000
--- a/jam-files/boost-build/util/__init__.py
+++ /dev/null
@@ -1,136 +0,0 @@
-
-import bjam
-import re
-import types
-
-# Decorator the specifies bjam-side prototype for a Python function
-def bjam_signature(s):
-
- def wrap(f):
- f.bjam_signature = s
- return f
-
- return wrap
-
-def metatarget(f):
-
- f.bjam_signature = (["name"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"], ["usage_requirements", "*"])
- return f
-
-class cached(object):
-
- def __init__(self, function):
- self.function = function
- self.cache = {}
-
- def __call__(self, *args):
- try:
- return self.cache[args]
- except KeyError:
- v = self.function(*args)
- self.cache[args] = v
- return v
-
- def __get__(self, instance, type):
- return types.MethodType(self, instance, type)
-
-def unquote(s):
- if s and s[0] == '"' and s[-1] == '"':
- return s[1:-1]
- else:
- return s
-
-_extract_jamfile_and_rule = re.compile("(Jamfile<.*>)%(.*)")
-
-def qualify_jam_action(action_name, context_module):
-
- if action_name.startswith("###"):
- # Callable exported from Python. Don't touch
- return action_name
- elif _extract_jamfile_and_rule.match(action_name):
- # Rule is already in indirect format
- return action_name
- else:
- ix = action_name.find('.')
- if ix != -1 and action_name[:ix] == context_module:
- return context_module + '%' + action_name[ix+1:]
-
- return context_module + '%' + action_name
-
-
-def set_jam_action(name, *args):
-
- m = _extract_jamfile_and_rule.match(name)
- if m:
- args = ("set-update-action-in-module", m.group(1), m.group(2)) + args
- else:
- args = ("set-update-action", name) + args
-
- return bjam.call(*args)
-
-
-def call_jam_function(name, *args):
-
- m = _extract_jamfile_and_rule.match(name)
- if m:
- args = ("call-in-module", m.group(1), m.group(2)) + args
- return bjam.call(*args)
- else:
- return bjam.call(*((name,) + args))
-
-__value_id = 0
-__python_to_jam = {}
-__jam_to_python = {}
-
-def value_to_jam(value, methods=False):
- """Makes a token to refer to a Python value inside Jam language code.
-
- The token is merely a string that can be passed around in Jam code and
- eventually passed back. For example, we might want to pass PropertySet
- instance to a tag function and it might eventually call back
- to virtual_target.add_suffix_and_prefix, passing the same instance.
-
- For values that are classes, we'll also make class methods callable
- from Jam.
-
- Note that this is necessary to make a bit more of existing Jamfiles work.
- This trick should not be used to much, or else the performance benefits of
- Python port will be eaten.
- """
-
- global __value_id
-
- r = __python_to_jam.get(value, None)
- if r:
- return r
-
- exported_name = '###_' + str(__value_id)
- __value_id = __value_id + 1
- __python_to_jam[value] = exported_name
- __jam_to_python[exported_name] = value
-
- if methods and type(value) == types.InstanceType:
- for field_name in dir(value):
- field = getattr(value, field_name)
- if callable(field) and not field_name.startswith("__"):
- bjam.import_rule("", exported_name + "." + field_name, field)
-
- return exported_name
-
-def record_jam_to_value_mapping(jam_value, python_value):
- __jam_to_python[jam_value] = python_value
-
-def jam_to_value_maybe(jam_value):
-
- if type(jam_value) == type(""):
- return __jam_to_python.get(jam_value, jam_value)
- else:
- return jam_value
-
-def stem(filename):
- i = filename.find('.')
- if i != -1:
- return filename[0:i]
- else:
- return filename
diff --git a/jam-files/boost-build/util/assert.jam b/jam-files/boost-build/util/assert.jam
deleted file mode 100644
index abedad52..00000000
--- a/jam-files/boost-build/util/assert.jam
+++ /dev/null
@@ -1,336 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-import modules ;
-
-
-################################################################################
-#
-# Private implementation details.
-#
-################################################################################
-
-# Rule added as a replacement for the regular Jam = operator but which does not
-# ignore trailing empty string elements.
-#
-local rule exact-equal-test ( lhs * : rhs * )
-{
- local lhs_extended = $(lhs) xxx ;
- local rhs_extended = $(rhs) xxx ;
- if $(lhs_extended) = $(rhs_extended)
- {
- return true ;
- }
-}
-
-
-# Two lists are considered set-equal if they contain the same elements, ignoring
-# duplicates and ordering.
-#
-local rule set-equal-test ( set1 * : set2 * )
-{
- if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) )
- {
- return true ;
- }
-}
-
-
-################################################################################
-#
-# Public interface.
-#
-################################################################################
-
-# Assert the equality of A and B, ignoring trailing empty string elements.
-#
-rule equal ( a * : b * )
-{
- if $(a) != $(b)
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\"
- (ignoring trailing empty strings) ;
- }
-}
-
-
-# Assert that the result of calling RULE-NAME on the given arguments has a false
-# logical value (is either an empty list or all empty strings).
-#
-rule false ( rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
- : $(7) : $(8) : $(9) ] ;
- }
-
- if $(result)
- {
- errors.error-skip-frames 3 assertion failure: Expected false result from
- "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
- $(5) : $(6) : $(7) : $(8) : $(9) ] "]" : Got: "[" \"$(result)\" "]" ;
- }
-}
-
-
-# Assert that ELEMENT is present in LIST.
-#
-rule "in" ( element : list * )
-{
- if ! $(element) in $(list)
- {
- errors.error-skip-frames 3 assertion failure: Expected \"$(element)\" in
- "[" \"$(list)\" "]" ;
- }
-}
-
-
-# Assert the inequality of A and B, ignoring trailing empty string elements.
-#
-rule not-equal ( a * : b * )
-{
- if $(a) = $(b)
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\"
- (ignoring trailing empty strings) ;
- }
-}
-
-
-# Assert that ELEMENT is not present in LIST.
-#
-rule not-in ( element : list * )
-{
- if $(element) in $(list)
- {
- errors.error-skip-frames 3 assertion failure: Did not expect
- \"$(element)\" in "[" \"$(list)\" "]" ;
- }
-}
-
-
-# Assert the inequality of A and B as sets.
-#
-rule not-set-equal ( a * : b * )
-{
- if [ set-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
- and "[" \"$(b)\" "]" to not be equal as sets ;
- }
-}
-
-
-# Assert that A and B are not exactly equal, not ignoring trailing empty string
-# elements.
-#
-rule not-exact-equal ( a * : b * )
-{
- if [ exact-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" ;
- }
-}
-
-
-# Assert that EXPECTED is the result of calling RULE-NAME with the given
-# arguments.
-#
-rule result ( expected * : rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
- : $(8) : $(9) ] ;
- }
-
- if ! [ exact-equal-test $(result) : $(expected) ]
- {
- errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
- errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
- $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
- \"$(result)\" "]" ;
- }
-}
-
-
-# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored)
-# to the result of calling RULE-NAME with the given arguments. Note that rules
-# called this way may accept at most 8 parameters.
-#
-rule result-set-equal ( expected * : rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
- : $(8) : $(9) ] ;
- }
-
- if ! [ set-equal-test $(result) : $(expected) ]
- {
- errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
- errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
- $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
- \"$(result)\" "]" ;
- }
-}
-
-
-# Assert the equality of A and B as sets.
-#
-rule set-equal ( a * : b * )
-{
- if ! [ set-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
- and "[" \"$(b)\" "]" to be equal as sets ;
- }
-}
-
-
-# Assert that the result of calling RULE-NAME on the given arguments has a true
-# logical value (is neither an empty list nor all empty strings).
-#
-rule true ( rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
- : $(7) : $(8) : $(9) ] ;
- }
-
- if ! $(result)
- {
- errors.error-skip-frames 3 assertion failure: Expected true result from
- "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
- $(5) : $(6) : $(7) : $(8) : $(9) ] "]" ;
- }
-}
-
-
-# Assert the exact equality of A and B, not ignoring trailing empty string
-# elements.
-#
-rule exact-equal ( a * : b * )
-{
- if ! [ exact-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" ;
- }
-}
-
-
-# Assert that the given variable is not an empty list.
-#
-rule variable-not-empty ( name )
-{
- local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ;
- if ! $(value)-is-not-empty
- {
- errors.error-skip-frames 3 assertion failure: Expected variable
- \"$(name)\" not to be an empty list ;
- }
-}
-
-
-rule __test__ ( )
-{
- # Helper rule used to avoid test duplication related to different list
- # equality test rules.
- #
- local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? )
- {
- local not-equality-assert = not-$(equality-assert) ;
-
- # When the given equality test is expected to ignore trailing empty
- # strings some of the test results should be inverted.
- local not-equality-assert-i = not-$(equality-assert) ;
- if $(ignore-trailing-empty-strings)
- {
- not-equality-assert-i = $(equality-assert) ;
- }
-
- $(equality-assert) : ;
- $(equality-assert) "" "" : "" "" ;
- $(not-equality-assert-i) : "" "" ;
- $(equality-assert) x : x ;
- $(not-equality-assert) : x ;
- $(not-equality-assert) "" : x ;
- $(not-equality-assert) "" "" : x ;
- $(not-equality-assert-i) x : x "" ;
- $(equality-assert) x "" : x "" ;
- $(not-equality-assert) x : "" x ;
- $(equality-assert) "" x : "" x ;
-
- $(equality-assert) 1 2 3 : 1 2 3 ;
- $(not-equality-assert) 1 2 3 : 3 2 1 ;
- $(not-equality-assert) 1 2 3 : 1 5 3 ;
- $(not-equality-assert) 1 2 3 : 1 "" 3 ;
- $(not-equality-assert) 1 2 3 : 1 1 2 3 ;
- $(not-equality-assert) 1 2 3 : 1 2 2 3 ;
- $(not-equality-assert) 1 2 3 : 5 6 7 ;
-
- # Extra variables used here just to make sure Boost Jam or Boost Build
- # do not handle lists with empty strings differently depending on
- # whether they are literals or stored in variables.
-
- local empty = ;
- local empty-strings = "" "" ;
- local x-empty-strings = x "" "" ;
- local empty-strings-x = "" "" x ;
-
- $(equality-assert) : $(empty) ;
- $(not-equality-assert-i) "" : $(empty) ;
- $(not-equality-assert-i) "" "" : $(empty) ;
- $(not-equality-assert-i) : $(empty-strings) ;
- $(not-equality-assert-i) "" : $(empty-strings) ;
- $(equality-assert) "" "" : $(empty-strings) ;
- $(equality-assert) $(empty) : $(empty) ;
- $(equality-assert) $(empty-strings) : $(empty-strings) ;
- $(not-equality-assert-i) $(empty) : $(empty-strings) ;
- $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ;
- $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ;
- $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ;
- $(not-equality-assert-i) x : $(x-empty-strings) ;
- $(not-equality-assert) x : $(empty-strings-x) ;
- $(not-equality-assert-i) x : $(x-empty-strings) ;
- $(not-equality-assert-i) x "" : $(x-empty-strings) ;
- $(equality-assert) x "" "" : $(x-empty-strings) ;
- $(not-equality-assert) x : $(empty-strings-x) ;
- $(not-equality-assert) "" x : $(empty-strings-x) ;
- $(equality-assert) "" "" x : $(empty-strings-x) ;
- }
-
-
- # ---------------
- # Equality tests.
- # ---------------
-
- run-equality-test equal : ignore-trailing-empty-strings ;
- run-equality-test exact-equal ;
-
-
- # -------------------------
- # assert.set-equal() tests.
- # -------------------------
-
- set-equal : ;
- not-set-equal "" "" : ;
- set-equal "" "" : "" ;
- set-equal "" "" : "" "" ;
- set-equal a b c : a b c ;
- set-equal a b c : b c a ;
- set-equal a b c a : a b c ;
- set-equal a b c : a b c a ;
- not-set-equal a b c : a b c d ;
- not-set-equal a b c d : a b c ;
-}
diff --git a/jam-files/boost-build/util/container.jam b/jam-files/boost-build/util/container.jam
deleted file mode 100644
index dd496393..00000000
--- a/jam-files/boost-build/util/container.jam
+++ /dev/null
@@ -1,339 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003 Rene Rivera
-# Copyright 2002, 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Various container classes.
-
-# Base for container objects. This lets us construct recursive structures. That
-# is containers with containers in them, specifically so we can tell literal
-# values from node values.
-#
-class node
-{
- rule __init__ (
- value ? # Optional value to set node to initially.
- )
- {
- self.value = $(value) ;
- }
-
- # Set the value of this node, passing nothing will clear it.
- #
- rule set ( value * )
- {
- self.value = $(value) ;
- }
-
- # Get the value of this node.
- #
- rule get ( )
- {
- return $(self.value) ;
- }
-}
-
-
-# A simple vector. Interface mimics the C++ std::vector and std::list, with the
-# exception that indices are one (1) based to follow Jam standard.
-#
-# TODO: Possibly add assertion checks.
-#
-class vector : node
-{
- import numbers ;
- import utility ;
- import sequence ;
-
- rule __init__ (
- values * # Initial contents of vector.
- )
- {
- node.__init__ ;
- self.value = $(values) ;
- }
-
- # Get the value of the first element.
- #
- rule front ( )
- {
- return $(self.value[1]) ;
- }
-
- # Get the value of the last element.
- #
- rule back ( )
- {
- return $(self.value[-1]) ;
- }
-
- # Get the value of the element at the given index, one based. Access to
- # elements of recursive structures is supported directly. Specifying
- # additional index values recursively accesses the elements as containers.
- # For example: [ $(v).at 1 : 2 ] would retrieve the second element of our
- # first element, assuming the first element is a container.
- #
- rule at (
- index # The element index, one based.
- : * # Additional indices to access recursively.
- )
- {
- local r = $(self.value[$(index)]) ;
- if $(2)
- {
- r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- }
- return $(r) ;
- }
-
- # Get the value contained in the given element. This has the same
- # functionality and interface as "at" but in addition gets the value of the
- # referenced element, assuming it is a "node".
- #
- rule get-at (
- index # The element index, one based.
- : * # Additional indices to access recursively.
- )
- {
- local r = $(self.value[$(index)]) ;
- if $(2)
- {
- r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- }
- return [ $(r).get ] ;
- }
-
- # Insert the given value into the front of the vector pushing the rest of
- # the elements back.
- #
- rule push-front (
- value # Value to become first element.
- )
- {
- self.value = $(value) $(self.value) ;
- }
-
- # Remove the front element from the vector. Does not return the value. No
- # effect if vector is empty.
- #
- rule pop-front ( )
- {
- self.value = $(self.value[2-]) ;
- }
-
- # Add the given value at the end of the vector.
- #
- rule push-back (
- value # Value to become back element.
- )
- {
- self.value += $(value) ;
- }
-
- # Remove the back element from the vector. Does not return the value. No
- # effect if vector is empty.
- #
- rule pop-back ( )
- {
- self.value = $(self.value[1--2]) ;
- }
-
- # Insert the given value at the given index, one based. The values at and to
- # the right of the index are pushed back to make room for the new value.
- # If the index is passed the end of the vector the element is added to the
- # end.
- #
- rule insert (
- index # The index to insert at, one based.
- : value # The value to insert.
- )
- {
- local left = $(self.value[1-$(index)]) ;
- local right = $(self.value[$(index)-]) ;
- if $(right)-is-not-empty
- {
- left = $(left[1--2]) ;
- }
- self.value = $(left) $(value) $(right) ;
- }
-
- # Remove one or more elements from the vector. The range is inclusive, and
- # not specifying an end is equivalent to the [start, start] range.
- #
- rule erase (
- start # Index of first element to remove.
- end ? # Optional, index of last element to remove.
- )
- {
- end ?= $(start) ;
- local left = $(self.value[1-$(start)]) ;
- left = $(left[1--2]) ;
- local right = $(self.value[$(end)-]) ;
- right = $(right[2-]) ;
- self.value = $(left) $(right) ;
- }
-
- # Remove all elements from the vector.
- #
- rule clear ( )
- {
- self.value = ;
- }
-
- # The number of elements in the vector.
- #
- rule size ( )
- {
- return [ sequence.length $(self.value) ] ;
- }
-
- # Returns "true" if there are NO elements in the vector, empty otherwise.
- #
- rule empty ( )
- {
- if ! $(self.value)-is-not-empty
- {
- return true ;
- }
- }
-
- # Returns the textual representation of content.
- #
- rule str ( )
- {
- return "[" [ sequence.transform utility.str : $(self.value) ] "]" ;
- }
-
- # Sorts the vector inplace, calling 'utility.less' for comparisons.
- #
- rule sort ( )
- {
- self.value = [ sequence.insertion-sort $(self.value) : utility.less ] ;
- }
-
- # Returns true if content is equal to the content of other vector. Uses
- # 'utility.equal' for comparison.
- #
- rule equal ( another )
- {
- local mismatch ;
- local size = [ size ] ;
- if $(size) = [ $(another).size ]
- {
- for local i in [ numbers.range 1 $(size) ]
- {
- if ! [ utility.equal [ at $(i) ] [ $(another).at $(i) ] ]
- {
- mismatch = true ;
- }
- }
- }
- else
- {
- mismatch = true ;
- }
-
- if ! $(mismatch)
- {
- return true ;
- }
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
-
- local v1 = [ new vector ] ;
- assert.true $(v1).equal $(v1) ;
- assert.true $(v1).empty ;
- assert.result 0 : $(v1).size ;
- assert.result "[" "]" : $(v1).str ;
- $(v1).push-back b ;
- $(v1).push-front a ;
- assert.result "[" a b "]" : $(v1).str ;
- assert.result a : $(v1).front ;
- assert.result b : $(v1).back ;
- $(v1).insert 2 : d ;
- $(v1).insert 2 : c ;
- $(v1).insert 4 : f ;
- $(v1).insert 4 : e ;
- $(v1).pop-back ;
- assert.result 5 : $(v1).size ;
- assert.result d : $(v1).at 3 ;
- $(v1).pop-front ;
- assert.result c : $(v1).front ;
- assert.false $(v1).empty ;
- $(v1).erase 3 4 ;
- assert.result 2 : $(v1).size ;
-
- local v2 = [ new vector q w e r t y ] ;
- assert.result 6 : $(v2).size ;
- $(v1).push-back $(v2) ;
- assert.result 3 : $(v1).size ;
- local v2-alias = [ $(v1).back ] ;
- assert.result e : $(v2-alias).at 3 ;
- $(v1).clear ;
- assert.true $(v1).empty ;
- assert.false $(v2-alias).empty ;
- $(v2).pop-back ;
- assert.result t : $(v2-alias).back ;
-
- local v3 = [ new vector ] ;
- $(v3).push-back [ new vector 1 2 3 4 5 ] ;
- $(v3).push-back [ new vector a b c ] ;
- assert.result "[" "[" 1 2 3 4 5 "]" "[" a b c "]" "]" : $(v3).str ;
- $(v3).push-back [ new vector [ new vector x y z ] [ new vector 7 8 9 ] ] ;
- assert.result 1 : $(v3).at 1 : 1 ;
- assert.result b : $(v3).at 2 : 2 ;
- assert.result a b c : $(v3).get-at 2 ;
- assert.result 7 8 9 : $(v3).get-at 3 : 2 ;
-
- local v4 = [ new vector 4 3 6 ] ;
- $(v4).sort ;
- assert.result 3 4 6 : $(v4).get ;
- assert.false $(v4).equal $(v3) ;
-
- local v5 = [ new vector 3 4 6 ] ;
- assert.true $(v4).equal $(v5) ;
- # Check that vectors of different sizes are considered non-equal.
- $(v5).pop-back ;
- assert.false $(v4).equal $(v5) ;
-
- local v6 = [ new vector [ new vector 1 2 3 ] ] ;
- assert.true $(v6).equal [ new vector [ new vector 1 2 3 ] ] ;
-
- local v7 = [ new vector 111 222 333 ] ;
- assert.true $(v7).equal $(v7) ;
- $(v7).insert 4 : 444 ;
- assert.result 111 222 333 444 : $(v7).get ;
- $(v7).insert 999 : xxx ;
- assert.result 111 222 333 444 xxx : $(v7).get ;
-
- local v8 = [ new vector "" "" "" ] ;
- assert.true $(v8).equal $(v8) ;
- assert.false $(v8).empty ;
- assert.result 3 : $(v8).size ;
- assert.result "" : $(v8).at 1 ;
- assert.result "" : $(v8).at 2 ;
- assert.result "" : $(v8).at 3 ;
- assert.result : $(v8).at 4 ;
- $(v8).insert 2 : 222 ;
- assert.result 4 : $(v8).size ;
- assert.result "" 222 "" "" : $(v8).get ;
- $(v8).insert 999 : "" ;
- assert.result 5 : $(v8).size ;
- assert.result "" 222 "" "" "" : $(v8).get ;
- $(v8).insert 999 : xxx ;
- assert.result 6 : $(v8).size ;
- assert.result "" 222 "" "" "" xxx : $(v8).get ;
-
- # Regression test for a bug causing vector.equal to compare only the first
- # and the last element in the given vectors.
- local v9 = [ new vector 111 xxx 222 ] ;
- local v10 = [ new vector 111 yyy 222 ] ;
- assert.false $(v9).equal $(v10) ;
-}
diff --git a/jam-files/boost-build/util/doc.jam b/jam-files/boost-build/util/doc.jam
deleted file mode 100644
index a7515588..00000000
--- a/jam-files/boost-build/util/doc.jam
+++ /dev/null
@@ -1,997 +0,0 @@
-# Copyright 2002, 2005 Dave Abrahams
-# Copyright 2002, 2003, 2006 Rene Rivera
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Documentation system, handles --help requests.
-# It defines rules that attach documentation to modules, rules, and variables.
-# Collects and generates documentation for the various parts of the build
-# system. The documentation is collected from comments integrated into the code.
-
-import modules ;
-import print ;
-import set ;
-import container ;
-import "class" ;
-import sequence ;
-import path ;
-
-
-# The type of output to generate.
-# "console" is formated text echoed to the console (the default);
-# "text" is formated text appended to the output file;
-# "html" is HTML output to the file.
-#
-help-output = console ;
-
-
-# The file to output documentation to when generating "text" or "html" help.
-# This is without extension as the extension is determined by the type of
-# output.
-#
-help-output-file = help ;
-
-# Whether to include local rules in help output.
-#
-.option.show-locals ?= ;
-
-# When showing documentation for a module, whether to also generate
-# automatically the detailed docs for each item in the module.
-#
-.option.detailed ?= ;
-
-# Generate debug output as the help is generated and modules are parsed.
-#
-.option.debug ?= ;
-
-# Enable or disable a documentation option.
-#
-local rule set-option (
- option # The option name.
- : value ? # Enabled (non-empty), or disabled (empty)
-)
-{
- .option.$(option) = $(value) ;
-}
-
-
-# Set the type of output.
-#
-local rule set-output ( type )
-{
- help-output = $(type) ;
-}
-
-
-# Set the output to a file.
-#
-local rule set-output-file ( file )
-{
- help-output-file = $(file) ;
-}
-
-
-# Extracts the brief comment from a complete comment. The brief comment is the
-# first sentence.
-#
-local rule brief-comment (
- docs * # The comment documentation.
-)
-{
- local d = $(docs:J=" ") ;
- local p = [ MATCH ".*([.])$" : $(d) ] ;
- if ! $(p) { d = $(d)"." ; }
- d = $(d)" " ;
- local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ;
- local brief = $(m[1]) ;
- while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ]
- {
- m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ;
- brief += $(m[1]) ;
- }
- return $(brief:J="") ;
-}
-
-
-# Specifies the documentation for the current module.
-#
-local rule set-module-doc (
- module-name ? # The name of the module to document.
- : docs * # The documentation for the module.
-)
-{
- module-name ?= * ;
-
- $(module-name).brief = [ brief-comment $(docs) ] ;
- $(module-name).docs = $(docs) ;
-
- if ! $(module-name) in $(documented-modules)
- {
- documented-modules += $(module-name) ;
- }
-}
-
-
-# Specifies the documentation for the current module.
-#
-local rule set-module-copyright (
- module-name ? # The name of the module to document.
- : copyright * # The copyright for the module.
-)
-{
- module-name ?= * ;
-
- $(module-name).copy-brief = [ brief-comment $(copyright) ] ;
- $(module-name).copy-docs = $(docs) ;
-
- if ! $(module-name) in $(documented-modules)
- {
- documented-modules += $(module-name) ;
- }
-}
-
-
-# Specifies the documentation for a rule in the current module. If called in the
-# global module, this documents a global rule.
-#
-local rule set-rule-doc (
- name # The name of the rule.
- module-name ? # The name of the module to document.
- is-local ? # Whether the rule is local to the module.
- : docs * # The documentation for the rule.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
- $(module-name).$(name).docs = $(docs) ;
- $(module-name).$(name).is-local = $(is-local) ;
-
- if ! $(name) in $($(module-name).rules)
- {
- $(module-name).rules += $(name) ;
- }
-}
-
-
-# Specify a class, will turn a rule into a class.
-#
-local rule set-class-doc (
- name # The name of the class.
- module-name ? # The name of the module to document.
- : super-name ? # The super class name.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).is-class = true ;
- $(module-name).$(name).super-name = $(super-name) ;
- $(module-name).$(name).class-rules =
- [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ;
- $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ;
-
- $(module-name).classes += $(name) ;
- $(module-name).class-rules += $($(module-name).$(name).class-rules) ;
- $(module-name).rules =
- [ set.difference $($(module-name).rules) :
- $(name) $($(module-name).$(name).class-rules) ] ;
-}
-
-
-# Set the argument call signature of a rule.
-#
-local rule set-rule-arguments-signature (
- name # The name of the rule.
- module-name ? # The name of the module to document.
- : signature * # The arguments signature.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).signature = $(signature) ;
-}
-
-
-# Specifies the documentation for an argument of a rule.
-#
-local rule set-argument-doc (
- name # The name of the argument.
- qualifier # Argument syntax qualifier, "*", "+", etc.
- rule-name # The name of the rule.
- module-name ? # THe optional name of the module.
- : docs * # The documentation.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ;
- $(module-name).$(rule-name).args.$(name).docs = $(docs) ;
-
- if ! $(name) in $($(module-name).$(rule-name).args)
- {
- $(module-name).$(rule-name).args += $(name) ;
- }
-}
-
-
-# Specifies the documentation for a variable in the current module. If called in
-# the global module, the global variable is documented.
-#
-local rule set-variable-doc (
- name # The name of the variable.
- default # The default value.
- initial # The initial value.
- module-name ? # The name of the module to document.
- : docs * # The documentation for the variable.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
- $(module-name).$(name).default = $(default) ;
- $(module-name).$(name).initial = $(initial) ;
- $(module-name).$(name).docs = $(docs) ;
-
- if ! $(name) in $($(module-name).variables)
- {
- $(module-name).variables += $(name) ;
- }
-}
-
-
-# Generates a general description of the documentation and help system.
-#
-local rule print-help-top ( )
-{
- print.section "General command line usage" ;
-
- print.text " bjam [options] [properties] [targets]
-
- Options, properties and targets can be specified in any order.
- " ;
-
- print.section "Important Options" ;
-
- print.list-start ;
- print.list-item "--clean Remove targets instead of building" ;
- print.list-item "-a Rebuild everything" ;
- print.list-item "-n Don't execute the commands, only print them" ;
- print.list-item "-d+2 Show commands as they are executed" ;
- print.list-item "-d0 Supress all informational messages" ;
- print.list-item "-q Stop at first error" ;
- print.list-item "--debug-configuration Diagnose configuration" ;
- print.list-item "--debug-building Report which targets are built with what properties" ;
- print.list-item "--debug-generator Diagnose generator search/execution" ;
- print.list-end ;
-
- print.section "Further Help"
- The following options can be used to obtain additional documentation.
- ;
-
- print.list-start ;
- print.list-item "--help-options Print more obscure command line options." ;
- print.list-item "--help-internal Boost.Build implementation details." ;
- print.list-item "--help-doc-options Implementation details doc formatting." ;
- print.list-end ;
-}
-
-
-# Generate Jam/Boost.Jam command usage information.
-#
-local rule print-help-usage ( )
-{
- print.section "Boost.Jam Usage"
- "bjam [ options... ] targets..."
- ;
- print.list-start ;
- print.list-item -a;
- Build all targets, even if they are current. ;
- print.list-item -fx;
- Read '"x"' as the Jamfile for building instead of searching for the
- Boost.Build system. ;
- print.list-item -jx;
- Run up to '"x"' commands concurrently. ;
- print.list-item -n;
- Do not execute build commands. Instead print out the commands as they
- would be executed if building. ;
- print.list-item -ox;
- Output the used build commands to file '"x"'. ;
- print.list-item -q;
- Quit as soon as a build failure is encountered. Without this option
- Boost.Jam will continue building as many targets as it can.
- print.list-item -sx=y;
- Sets a Jam variable '"x"' to the value '"y"', overriding any value that
- variable would have from the environment. ;
- print.list-item -tx;
- Rebuild the target '"x"', even if it is up-to-date. ;
- print.list-item -v;
- Display the version of bjam. ;
- print.list-item --x;
- Any option not explicitly handled by Boost.Jam remains available to
- build scripts using the '"ARGV"' variable. ;
- print.list-item -dn;
- Enables output of diagnostic messages. The debug level '"n"' and all
- below it are enabled by this option. ;
- print.list-item -d+n;
- Enables output of diagnostic messages. Only the output for debug level
- '"n"' is enabled. ;
- print.list-end ;
- print.section "Debug Levels"
- Each debug level shows a different set of information. Usually with
- higher levels producing more verbose information. The following levels
- are supported: ;
- print.list-start ;
- print.list-item 0;
- Turn off all diagnostic output. Only errors are reported. ;
- print.list-item 1;
- Show the actions taken for building targets, as they are executed. ;
- print.list-item 2;
- Show "quiet" actions and display all action text, as they are executed. ;
- print.list-item 3;
- Show dependency analysis, and target/source timestamps/paths. ;
- print.list-item 4;
- Show arguments of shell invocations. ;
- print.list-item 5;
- Show rule invocations and variable expansions. ;
- print.list-item 6;
- Show directory/header file/archive scans, and attempts at binding to targets. ;
- print.list-item 7;
- Show variable settings. ;
- print.list-item 8;
- Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ;
- print.list-item 9;
- Show variable manipulation, scanner tokens, and memory usage. ;
- print.list-item 10;
- Show execution times for rules. ;
- print.list-item 11;
- Show parsing progress of Jamfiles. ;
- print.list-item 12;
- Show graph for target dependencies. ;
- print.list-item 13;
- Show changes in target status (fate). ;
- print.list-end ;
-}
-
-
-# Generates description of options controlling the help system. This
-# automatically reads the options as all variables in the doc module of the form
-# ".option.*".
-#
-local rule print-help-options (
- module-name # The doc module.
-)
-{
- print.section "Help Options"
- These are all the options available for enabling or disabling to control
- the help system in various ways. Options can be enabled or disabled with
- '"--help-enable-<option>"', and "'--help-disable-<option>'"
- respectively.
- ;
- local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ;
- if $(options-to-list)
- {
- print.list-start ;
- for local option in [ sequence.insertion-sort $(options-to-list) ]
- {
- local def = disabled ;
- if $($(module-name)..option.$(option).default) != "(empty)"
- {
- def = enabled ;
- }
- print.list-item $(option): $($(module-name)..option.$(option).docs)
- Default is $(def). ;
- }
- print.list-end ;
- }
-}
-
-
-# Generate brief documentation for all the known items in the section for a
-# module. Possible sections are: "rules", and "variables".
-#
-local rule print-help-module-section (
- module # The module name.
- section # rules or variables.
- : section-head # The title of the section.
- section-description * # The detailed description of the section.
-)
-{
- if $($(module).$(section))
- {
- print.section $(section-head) $(section-description) ;
- print.list-start ;
- for local item in [ sequence.insertion-sort $($(module).$(section)) ]
- {
- local show = ;
- if ! $($(module).$(item).is-local)
- {
- show = yes ;
- }
- if $(.option.show-locals)
- {
- show = yes ;
- }
- if $(show)
- {
- print.list-item $(item): $($(module).$(item).brief) ;
- }
- }
- print.list-end ;
- }
-}
-
-
-# Generate documentation for all possible modules. We attempt to list all known
-# modules together with a brief description of each.
-#
-local rule print-help-all (
- ignored # Usually the module name, but is ignored here.
-)
-{
- print.section "Modules"
- "These are all the known modules. Use --help <module> to get more"
- "detailed information."
- ;
- if $(documented-modules)
- {
- print.list-start ;
- for local module-name in [ sequence.insertion-sort $(documented-modules) ]
- {
- # The brief docs for each module.
- print.list-item $(module-name): $($(module-name).brief) ;
- }
- print.list-end ;
- }
- # The documentation for each module when details are requested.
- if $(documented-modules) && $(.option.detailed)
- {
- for local module-name in [ sequence.insertion-sort $(documented-modules) ]
- {
- # The brief docs for each module.
- print-help-module $(module-name) ;
- }
- }
-}
-
-
-# Generate documentation for a module. Basic information about the module is
-# generated.
-#
-local rule print-help-module (
- module-name # The module to generate docs for.
-)
-{
- # Print the docs.
- print.section "Module '$(module-name)'" $($(module-name).docs) ;
-
- # Print out the documented classes.
- print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
- Use --help $(module-name).<class-name> to get more information. ;
-
- # Print out the documented rules.
- print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
- Use --help $(module-name).<rule-name> to get more information. ;
-
- # Print out the documented variables.
- print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
- Use --help $(module-name).<variable-name> to get more information. ;
-
- # Print out all the same information but indetailed form.
- if $(.option.detailed)
- {
- print-help-classes $(module-name) ;
- print-help-rules $(module-name) ;
- print-help-variables $(module-name) ;
- }
-}
-
-
-# Generate documentation for a set of rules in a module.
-#
-local rule print-help-rules (
- module-name # Module of the rules.
- : name * # Optional list of rules to describe.
-)
-{
- name ?= $($(module-name).rules) ;
- if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ]
- {
- # Print out the given rules.
- for local rule-name in [ sequence.insertion-sort $(name) ]
- {
- if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local)
- {
- local signature = $($(module-name).$(rule-name).signature:J=" ") ;
- signature ?= "" ;
- print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'"
- $($(module-name).$(rule-name).docs) ;
- if $($(module-name).$(rule-name).args)
- {
- print.list-start ;
- for local arg-name in $($(module-name).$(rule-name).args)
- {
- print.list-item $(arg-name): $($(module-name).$(rule-name).args.$(arg-name).docs) ;
- }
- print.list-end ;
- }
- }
- }
- }
-}
-
-
-# Generate documentation for a set of classes in a module.
-#
-local rule print-help-classes (
- module-name # Module of the classes.
- : name * # Optional list of classes to describe.
-)
-{
- name ?= $($(module-name).classes) ;
- if [ set.intersection $(name) : $($(module-name).classes) ]
- {
- # Print out the given classes.
- for local class-name in [ sequence.insertion-sort $(name) ]
- {
- if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local)
- {
- local signature = $($(module-name).$(class-name).signature:J=" ") ;
- signature ?= "" ;
- print.section "Class '$(module-name).$(class-name) ( $(signature) )'"
- $($(module-name).$(class-name).docs)
- "Inherits from '"$($(module-name).$(class-name).super-name)"'." ;
- if $($(module-name).$(class-name).args)
- {
- print.list-start ;
- for local arg-name in $($(module-name).$(class-name).args)
- {
- print.list-item $(arg-name): $($(module-name).$(class-name).args.$(arg-name).docs) ;
- }
- print.list-end ;
- }
- }
-
- # Print out the documented rules of the class.
- print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
- Use --help $(module-name).<rule-name> to get more information. ;
-
- # Print out all the rules if details are requested.
- if $(.option.detailed)
- {
- print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ;
- }
- }
- }
-}
-
-
-# Generate documentation for a set of variables in a module.
-#
-local rule print-help-variables (
- module-name ? # Module of the variables.
- : name * # Optional list of variables to describe.
-)
-{
- name ?= $($(module-name).variables) ;
- if [ set.intersection $(name) : $($(module-name).variables) ]
- {
- # Print out the given variables.
- for local variable-name in [ sequence.insertion-sort $(name) ]
- {
- print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ;
- if $($(module-name).$(variable-name).default) ||
- $($(module-name).$(variable-name).initial)
- {
- print.list-start ;
- if $($(module-name).$(variable-name).default)
- {
- print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ;
- }
- if $($(module-name).$(variable-name).initial)
- {
- print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ;
- }
- print.list-end ;
- }
- }
- }
-}
-
-
-# Generate documentation for a project.
-#
-local rule print-help-project (
- unused ?
- : jamfile * # The project Jamfile.
-)
-{
- if $(jamfile<$(jamfile)>.docs)
- {
- # Print the docs.
- print.section "Project-specific help"
- Project has jamfile at $(jamfile) ;
-
- print.lines $(jamfile<$(jamfile)>.docs) "" ;
- }
-}
-
-
-# Generate documentation for a config file.
-#
-local rule print-help-config (
- unused ?
- : type # The type of configuration file user or site.
- config-file # The configuration Jamfile.
-)
-{
- if $(jamfile<$(config-file)>.docs)
- {
- # Print the docs.
- print.section "Configuration help"
- Configuration file at $(config-file) ;
-
- print.lines $(jamfile<$(config-file)>.docs) "" ;
- }
-}
-
-
-ws = " " ;
-
-# Extract the text from a block of comments.
-#
-local rule extract-comment (
- var # The name of the variable to extract from.
-)
-{
- local comment = ;
- local line = $($(var)[1]) ;
- local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
- while $(l[1]) && $($(var))
- {
- if $(l[2]) { comment += [ MATCH "^[$(ws)]?(.*)$" : $(l[2]) ] ; }
- else { comment += "" ; }
- $(var) = $($(var)[2-]) ;
- line = $($(var)[1]) ;
- l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
- }
- return $(comment) ;
-}
-
-
-# Extract s single line of Jam syntax, ignoring any comments.
-#
-local rule extract-syntax (
- var # The name of the variable to extract from.
-)
-{
- local syntax = ;
- local line = $($(var)[1]) ;
- while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var))
- {
- local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ;
- if $(m) && ! $(m) = ""
- {
- syntax = $(m) ;
- }
- $(var) = $($(var)[2-]) ;
- line = $($(var)[1]) ;
- }
- return $(syntax) ;
-}
-
-
-# Extract the next token, this is either a single Jam construct or a comment as
-# a single token.
-#
-local rule extract-token (
- var # The name of the variable to extract from.
-)
-{
- local parts = ;
- while ! $(parts)
- {
- parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ;
- if ! $(parts)
- {
- $(var) = $($(var)[2-]) ;
- }
- }
- local token = ;
- if [ MATCH "^(#)" : $(parts[1]) ]
- {
- token = $(parts:J=" ") ;
- $(var) = $($(var)[2-]) ;
- }
- else
- {
- token = $(parts[1]) ;
- $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ;
- }
- return $(token) ;
-}
-
-
-# Scan for a rule declaration as the next item in the variable.
-#
-local rule scan-rule (
- syntax ? # The first part of the text which contains the rule declaration.
- : var # The name of the variable to extract from.
-)
-{
- local rule-parts =
- [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ;
- if $(rule-parts[1])
- {
- # Mark as doc for rule.
- local rule-name = $(rule-parts[2]) ;
- if $(scope-name)
- {
- rule-name = $(scope-name).$(rule-name) ;
- }
- local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ;
- if $(comment-block)
- {
- set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ;
- }
- # Parse args of rule.
- $(var) = $(rule-parts[3-]) $($(var)) ;
- set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ;
- # Scan within this rules scope.
- local scope-level = [ extract-token $(var) ] ;
- local scope-name = $(rule-name) ;
- while $(scope-level)
- {
- local comment-block = [ extract-comment $(var) ] ;
- local syntax-block = [ extract-syntax $(var) ] ;
- if [ scan-rule $(syntax-block) : $(var) ]
- {
- }
- else if [ MATCH "^(\\{)" : $(syntax-block) ]
- {
- scope-level += "{" ;
- }
- else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
- {
- scope-level = $(scope-level[2-]) ;
- }
- }
-
- return true ;
- }
-}
-
-
-# Scan the arguments of a rule.
-#
-local rule scan-rule-arguments (
- var # The name of the variable to extract from.
-)
-{
- local arg-syntax = ;
- local token = [ extract-token $(var) ] ;
- while $(token) != "(" && $(token) != "{"
- {
- token = [ extract-token $(var) ] ;
- }
- if $(token) != "{"
- {
- token = [ extract-token $(var) ] ;
- }
- local arg-signature = ;
- while $(token) != ")" && $(token) != "{"
- {
- local arg-name = ;
- local arg-qualifier = " " ;
- local arg-doc = ;
- if $(token) = ":"
- {
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- }
- arg-name = $(token) ;
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- if [ MATCH "^([\\*\\+\\?])" : $(token) ]
- {
- arg-qualifier = $(token) ;
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- }
- if $(token) = ":"
- {
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- }
- if [ MATCH "^(#)" : $(token) ]
- {
- $(var) = $(token) $($(var)) ;
- arg-doc = [ extract-comment $(var) ] ;
- token = [ extract-token $(var) ] ;
- }
- set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ;
- }
- while $(token) != "{"
- {
- token = [ extract-token $(var) ] ;
- }
- $(var) = "{" $($(var)) ;
- arg-signature ?= "" ;
- return $(arg-signature) ;
-}
-
-
-# Scan for a variable declaration.
-#
-local rule scan-variable (
- syntax ? # The first part of the text which contains the variable declaration.
- : var # The name of the variable to extract from.
-)
-{
- # [1] = name, [2] = value(s)
- local var-parts =
- [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ;
- if $(var-parts)
- {
- local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ;
- local default-value = "" ;
- local initial-valie = "" ;
- if $(var-parts[2]) = "?="
- {
- default-value = $(value) ;
- default-value ?= "(empty)" ;
- }
- else
- {
- initial-value = $(value) ;
- initial-value ?= "(empty)" ;
- }
- if $(comment-block)
- {
- set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ;
- }
- return true ;
- }
-}
-
-
-# Scan a class declaration.
-#
-local rule scan-class (
- syntax ? # The syntax text for the class declaration.
-)
-{
- # [1] = class?, [2] = name, [3] = superclass
- local class-parts =
- [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]+:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ;
- if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class"
- {
- set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ;
- }
-}
-
-
-# Scan a module file for documentation comments. This also invokes any actions
-# assigned to the module. The actions are the rules that do the actual output of
-# the documentation. This rule is invoked as the header scan rule for the module
-# file.
-#
-rule scan-module (
- target # The module file.
- : text * # The text in the file, one item per line.
- : action * # Rule to call to output docs for the module.
-)
-{
- if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; }
- local module-name = $(target:B) ;
- local module-documented = ;
- local comment-block = ;
- local syntax-block = ;
- # This is a hack because we can not get the line of a file if it happens to
- # not have a new-line termination.
- text += "}" ;
- while $(text)
- {
- comment-block = [ extract-comment text ] ;
- syntax-block = [ extract-syntax text ] ;
- if $(.option.debug)
- {
- ECHO "HELP:" comment block; '$(comment-block)' ;
- ECHO "HELP:" syntax block; '$(syntax-block)' ;
- }
- if [ scan-rule $(syntax-block) : text ] { }
- else if [ scan-variable $(syntax-block) : text ] { }
- else if [ scan-class $(syntax-block) ] { }
- else if [ MATCH .*([cC]opyright).* : $(comment-block:J=" ") ]
- {
- # mark as the copy for the module.
- set-module-copyright $(module-name) : $(comment-block) ;
- }
- else if $(action[1]) in "print-help-project" "print-help-config"
- && ! $(jamfile<$(target)>.docs)
- {
- # special module docs for the project jamfile.
- jamfile<$(target)>.docs = $(comment-block) ;
- }
- else if ! $(module-documented)
- {
- # document the module.
- set-module-doc $(module-name) : $(comment-block) ;
- module-documented = true ;
- }
- }
- if $(action)
- {
- $(action[1]) $(module-name) : $(action[2-]) ;
- }
-}
-
-
-# Import scan-module to global scope, so that it is available during header
-# scanning phase.
-#
-IMPORT $(__name__) : scan-module : : doc.scan-module ;
-
-
-# Read in a file using the SHELL builtin and return the individual lines as
-# would be done for header scanning.
-#
-local rule read-file (
- file # The file to read in.
-)
-{
- file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ;
- if ! $(.file<$(file)>.lines)
- {
- local content ;
- switch [ modules.peek : OS ]
- {
- case NT :
- content = [ SHELL "TYPE \"$(file)\"" ] ;
-
- case * :
- content = [ SHELL "cat \"$(file)\"" ] ;
- }
- local lines ;
- local nl = "
-" ;
- local << = "([^$(nl)]*)[$(nl)](.*)" ;
- local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
- while $(line+)
- {
- lines += $(line+[1]) ;
- line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
- }
- .file<$(file)>.lines = $(lines) ;
- }
- return $(.file<$(file)>.lines) ;
-}
-
-
-# Add a scan action to perform to generate the help documentation. The action
-# rule is passed the name of the module as the first argument. The second
-# argument(s) are optional and passed directly as specified here.
-#
-local rule do-scan (
- modules + # The modules to scan and perform the action on.
- : action * # The action rule, plus the secondary arguments to pass to the action rule.
-)
-{
- if $(help-output) = text
- {
- print.output $(help-output-file).txt plain ;
- ALWAYS $(help-output-file).txt ;
- DEPENDS all : $(help-output-file).txt ;
- }
- if $(help-output) = html
- {
- print.output $(help-output-file).html html ;
- ALWAYS $(help-output-file).html ;
- DEPENDS all : $(help-output-file).html ;
- }
- for local module-file in $(modules[1--2])
- {
- scan-module $(module-file) : [ read-file $(module-file) ] ;
- }
- scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ;
-}
diff --git a/jam-files/boost-build/util/indirect.jam b/jam-files/boost-build/util/indirect.jam
deleted file mode 100644
index ec63f192..00000000
--- a/jam-files/boost-build/util/indirect.jam
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import modules ;
-import numbers ;
-
-
-# The pattern that indirect rules must match: module%rule
-.pattern = ^([^%]*)%([^%]+)$ ;
-
-
-#
-# Type checking rules.
-#
-local rule indirect-rule ( x )
-{
- if ! [ MATCH $(.pattern) : $(x) ]
- {
- return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
- }
-}
-
-
-# Make an indirect rule which calls the given rule. If context is supplied it is
-# expected to be the module in which to invoke the rule by the 'call' rule
-# below. Otherwise, the rule will be invoked in the module of this rule's
-# caller.
-#
-rule make ( rulename bound-args * : context ? )
-{
- context ?= [ CALLER_MODULE ] ;
- context ?= "" ;
- return $(context)%$(rulename) $(bound-args) ;
-}
-
-
-# Make an indirect rule which calls the given rule. 'rulename' may be a
-# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
-# supplied, the result will be invoked (by 'call', below) in the module of the
-# caller. Otherwise, frames > 1 specifies additional call frames to back up in
-# order to find the module context.
-#
-rule make-qualified ( rulename bound-args * : frames ? )
-{
- if [ MATCH $(.pattern) : $(rulename) ]
- {
- return $(rulename) $(bound-args) ;
- }
- else
- {
- frames ?= 1 ;
- # If the rule name includes a Jamfile module, grab it.
- local module-context = [ MATCH ^(Jamfile<[^>]*>)\\..* : $(rulename) ] ;
-
- if ! $(module-context)
- {
- # Take the first dot-separated element as module name. This disallows
- # module names with dots, but allows rule names with dots.
- module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ;
- }
- module-context ?= [ CALLER_MODULE $(frames) ] ;
- return [ make $(rulename) $(bound-args) : $(module-context) ] ;
- }
-}
-
-
-# Returns the module name in which the given indirect rule will be invoked.
-#
-rule get-module ( [indirect-rule] x )
-{
- local m = [ MATCH $(.pattern) : $(x) ] ;
- if ! $(m[1])
- {
- m = ;
- }
- return $(m[1]) ;
-}
-
-
-# Returns the rulename that will be called when x is invoked.
-#
-rule get-rule ( [indirect-rule] x )
-{
- local m = [ MATCH $(.pattern) : $(x) ] ;
- return $(m[2]) ;
-}
-
-
-# Invoke the given indirect-rule.
-#
-rule call ( [indirect-rule] r args * : * )
-{
- return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args)
- : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
-}
-
-
-rule __test__
-{
- import assert ;
-
- rule foo-barr! ( x )
- {
- assert.equal $(x) : x ;
- }
-
- assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
- assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
-
- call [ make foo-barr! ] x ;
- call [ make foo-barr! x ] ;
- call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
-}
diff --git a/jam-files/boost-build/util/indirect.py b/jam-files/boost-build/util/indirect.py
deleted file mode 100644
index 78fa8994..00000000
--- a/jam-files/boost-build/util/indirect.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Status: minimally ported. This module is not supposed to be used much
-# with Boost.Build/Python.
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.util import call_jam_function, bjam_signature
-
-def call(*args):
- a1 = args[0]
- name = a1[0]
- a1tail = a1[1:]
- call_jam_function(name, *((a1tail,) + args[1:]))
diff --git a/jam-files/boost-build/util/logger.py b/jam-files/boost-build/util/logger.py
deleted file mode 100644
index de652129..00000000
--- a/jam-files/boost-build/util/logger.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright Pedro Ferreira 2005. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import sys
-
-class NullLogger:
- def __init__ (self):
- self.indent_ = ''
-
- def log (self, source_name, *args):
- if self.on () and self.interesting (source_name):
- self.do_log (self.indent_)
- for i in args:
- self.do_log (i)
- self.do_log ('\n')
-
- def increase_indent (self):
- if self.on ():
- self.indent_ += ' '
-
- def decrease_indent (self):
- if self.on () and len (self.indent_) > 4:
- self.indent_ = self.indent_ [-4:]
-
- def do_log (self, *args):
- pass
-
- def interesting (self, source_name):
- return False
-
- def on (self):
- return True
-
-class TextLogger (NullLogger):
- def __init__ (self):
- NullLogger.__init__ (self)
-
- def do_log (self, arg):
- sys.stdout.write (str (arg))
-
- def interesting (self, source_name):
- return True
-
- def on (self):
- return True
diff --git a/jam-files/boost-build/util/numbers.jam b/jam-files/boost-build/util/numbers.jam
deleted file mode 100644
index 665347d3..00000000
--- a/jam-files/boost-build/util/numbers.jam
+++ /dev/null
@@ -1,218 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-
-
-rule trim-leading-zeroes ( value )
-{
- return [ CALC $(value) + 0 ] ;
-}
-
-
-rule check ( numbers * )
-{
- for local n in $(numbers)
- {
- switch $(n)
- {
- case *[^0-9]* :
- errors.error $(n) "in" $(numbers) : is not a number ;
- }
- }
-}
-
-
-rule increment ( number )
-{
- return [ CALC $(number) + 1 ] ;
-}
-
-
-rule decrement ( number )
-{
- return [ CALC $(number) - 1 ] ;
-}
-
-
-rule range ( start finish ? : step ? )
-{
- if ! $(finish)
- {
- finish = $(start) ;
- start = 1 ;
- }
- step ?= 1 ;
-
- check $(start) $(finish) $(step) ;
-
- if $(finish) != 0
- {
- local result ;
- while [ less $(start) $(finish) ] || $(start) = $(finish)
- {
- result += $(start) ;
- start = [ CALC $(start) + $(step) ] ;
- }
- return $(result) ;
- }
-}
-
-
-rule less ( n1 n2 )
-{
- switch [ CALC $(n2) - $(n1) ]
- {
- case [1-9]* : return true ;
- }
-}
-
-
-rule log10 ( number )
-{
- switch $(number)
- {
- case *[^0-9]* : errors.error $(number) is not a number ;
- case 0 : errors.error can't take log of zero ;
- case [1-9] : return 0 ;
- case [1-9]? : return 1 ;
- case [1-9]?? : return 2 ;
- case [1-9]??? : return 3 ;
- case [1-9]???? : return 4 ;
- case [1-9]????? : return 5 ;
- case [1-9]?????? : return 6 ;
- case [1-9]??????? : return 7 ;
- case [1-9]???????? : return 8 ;
- case [1-9]????????? : return 9 ;
- case * :
- {
- import sequence ;
- import string ;
- local chars = [ string.chars $(number) ] ;
- while $(chars[1]) = 0
- {
- chars = $(chars[2-]) ;
- }
- if ! $(chars)
- {
- errors.error can't take log of zero ;
- }
- else
- {
- return [ decrement [ sequence.length $(chars) ] ] ;
- }
- }
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- assert.result 1 : increment 0 ;
- assert.result 2 : increment 1 ;
- assert.result 1 : decrement 2 ;
- assert.result 0 : decrement 1 ;
- assert.result 50 : increment 49 ;
- assert.result 49 : decrement 50 ;
- assert.result 99 : increment 98 ;
- assert.result 99 : decrement 100 ;
- assert.result 100 : increment 99 ;
- assert.result 999 : decrement 1000 ;
- assert.result 1000 : increment 999 ;
-
- assert.result 1 2 3 : range 3 ;
- assert.result 1 2 3 4 5 6 7 8 9 10 11 12 : range 12 ;
- assert.result 3 4 5 6 7 8 9 10 11 : range 3 11 ;
- assert.result : range 0 ;
- assert.result 1 4 7 10 : range 10 : 3 ;
- assert.result 2 4 6 8 10 : range 2 10 : 2 ;
- assert.result 25 50 75 100 : range 25 100 : 25 ;
-
- assert.result 0 : trim-leading-zeroes 0 ;
- assert.result 1234 : trim-leading-zeroes 1234 ;
- assert.result 123456 : trim-leading-zeroes 0000123456 ;
- assert.result 1000123456 : trim-leading-zeroes 1000123456 ;
- assert.result 10000 : trim-leading-zeroes 10000 ;
- assert.result 10000 : trim-leading-zeroes 00010000 ;
-
- assert.true less 1 2 ;
- assert.true less 1 12 ;
- assert.true less 1 21 ;
- assert.true less 005 217 ;
- assert.false less 0 0 ;
- assert.false less 03 3 ;
- assert.false less 3 03 ;
- assert.true less 005 217 ;
- assert.true less 0005 217 ;
- assert.true less 5 00217 ;
-
- # TEMPORARY disabled, because nested "try"/"catch" do not work and I do no
- # have the time to fix that right now.
- if $(0)
- {
- try ;
- {
- decrement 0 ;
- }
- catch can't decrement zero! ;
-
- try ;
- {
- check foo ;
- }
- catch : not a number ;
-
- try ;
- {
- increment foo ;
- }
- catch : not a number ;
-
- try ;
- {
- log10 0 ;
- }
- catch can't take log of zero ;
-
- try ;
- {
- log10 000 ;
- }
- catch can't take log of zero ;
-
- }
-
- assert.result 0 : log10 1 ;
- assert.result 0 : log10 9 ;
- assert.result 1 : log10 10 ;
- assert.result 1 : log10 99 ;
- assert.result 2 : log10 100 ;
- assert.result 2 : log10 101 ;
- assert.result 2 : log10 125 ;
- assert.result 2 : log10 999 ;
- assert.result 3 : log10 1000 ;
- assert.result 10 : log10 12345678901 ;
-
- for local x in [ range 75 110 : 5 ]
- {
- for local y in [ range $(x) 111 : 3 ]
- {
- if $(x) != $(y)
- {
- assert.true less $(x) $(y) ;
- }
- }
- }
-
- for local x in [ range 90 110 : 2 ]
- {
- for local y in [ range 80 $(x) : 4 ]
- {
- assert.false less $(x) $(y) ;
- }
- }
-}
diff --git a/jam-files/boost-build/util/option.jam b/jam-files/boost-build/util/option.jam
deleted file mode 100644
index f6dc3752..00000000
--- a/jam-files/boost-build/util/option.jam
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright (c) 2005 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import modules ;
-
-# Set a value for a named option, to be used when not overridden on the command
-# line.
-rule set ( name : value ? )
-{
- .option.$(name) = $(value) ;
-}
-
-rule get ( name : default-value ? : implied-value ? )
-{
- local m = [ MATCH --$(name)=(.*) : [ modules.peek : ARGV ] ] ;
- if $(m)
- {
- return $(m[1]) ;
- }
- else
- {
- m = [ MATCH (--$(name)) : [ modules.peek : ARGV ] ] ;
- if $(m) && $(implied-value)
- {
- return $(implied-value) ;
- }
- else if $(.option.$(name))
- {
- return $(.option.$(name)) ;
- }
- else
- {
- return $(default-value) ;
- }
- }
-}
-
-
-# Check command-line args as soon as possible. For each option try to load
-# module named after option. Is that succeeds, invoke 'process' rule in the
-# module. The rule may return "true" to indicate that the regular build process
-# should not be attempted.
-#
-# Options take the general form of: --<name>[=<value>] [<value>]
-#
-rule process ( )
-{
- local ARGV = [ modules.peek : ARGV ] ;
- local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ;
-
- local dont-build ;
- local args = $(ARGV) ;
- while $(args)
- {
- local arg = [ MATCH ^--(.*) : $(args[1]) ] ;
- while $(args[2-]) && ! $(arg)
- {
- args = $(args[2-]) ;
- arg = [ MATCH ^--(.*) : $(args[1]) ] ;
- }
- args = $(args[2-]) ;
-
- if $(arg)
- {
- local split = [ MATCH ^(([^-=]+)[^=]*)(=?)(.*)$ : $(arg) ] ;
- local full-name = $(split[1]) ;
- local prefix = $(split[2]) ;
- local values ;
-
- if $(split[3])
- {
- values = $(split[4]) ;
- }
- if $(args) && ! [ MATCH ^(--).* : $(args[1]) ]
- {
- values += $(args[1]) ;
- args = $(args[2-]) ;
- }
-
- # Jook in options subdirectories of BOOST_BUILD_PATH for modules
- # matching the full option name and then its prefix.
- local plugin-dir = options ;
- local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) :
- $(full-name).jam $(prefix).jam ] ;
-
- if $(option-files)
- {
- # Load the file into a module named for the option.
- local f = $(option-files[1]) ;
- local module-name = --$(f:D=:S=) ;
- modules.load $(module-name) : $(f:D=) : $(f:D) ;
-
- # If there is a process rule, call it with the full option name
- # and its value (if any). If there was no "=" in the option, the
- # value will be empty.
- if process in [ RULENAMES $(module-name) ]
- {
- dont-build += [ modules.call-in $(module-name) : process
- --$(full-name) : $(values) ] ;
- }
- }
- }
- }
-
- return $(dont-build) ;
-}
diff --git a/jam-files/boost-build/util/option.py b/jam-files/boost-build/util/option.py
deleted file mode 100644
index 47d6abdf..00000000
--- a/jam-files/boost-build/util/option.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2005-2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import sys
-import re
-import b2.util.regex
-
-options = {}
-
-# Set a value for a named option, to be used when not overridden on the command
-# line.
-def set(name, value=None):
-
- global options
-
- options[name] = value
-
-def get(name, default_value=None, implied_value=None):
-
- global options
-
- matches = b2.util.regex.transform(sys.argv, "--" + re.escape(name) + "=(.*)")
- if matches:
- return matches[-1]
- else:
- m = b2.util.regex.transform(sys.argv, "--(" + re.escape(name) + ")")
- if m and implied_value:
- return implied_value
- elif options.has_key(name) and options[name] != None:
- return options[name]
- else:
- return default_value
diff --git a/jam-files/boost-build/util/order.jam b/jam-files/boost-build/util/order.jam
deleted file mode 100644
index a74fc8c8..00000000
--- a/jam-files/boost-build/util/order.jam
+++ /dev/null
@@ -1,169 +0,0 @@
-# Copyright (C) 2003 Vladimir Prus
-# Use, modification, and distribution is subject to the Boost Software
-# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
-# at http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines a class which allows to order arbitrary object with
-# regard to arbitrary binary relation.
-#
-# The primary use case is the gcc toolset, which is sensitive to library order:
-# if library 'a' uses symbols from library 'b', then 'a' must be present before
-# 'b' on the linker's command line.
-#
-# This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris
-# LD (and for Solaris toolset as well), the order always matters.
-#
-# So, we need to store order requirements and then order libraries according to
-# them. It is not possible to use the dependency graph as order requirements.
-# What we need is a "use symbols" relationship while dependency graph provides
-# the "needs to be updated" relationship.
-#
-# For example::
-# lib a : a.cpp b;
-# lib b ;
-#
-# For static linking, library 'a' need not depend on 'b'. However, it should
-# still come before 'b' on the command line.
-
-class order
-{
- rule __init__ ( )
- {
- }
-
- # Adds the constraint that 'first' should preceede 'second'.
- rule add-pair ( first second )
- {
- .constraits += $(first)--$(second) ;
- }
- NATIVE_RULE class@order : add-pair ;
-
- # Given a list of objects, reorder them so that the constraints specified by
- # 'add-pair' are satisfied.
- #
- # The algorithm was adopted from an awk script by Nikita Youshchenko
- # (yoush at cs dot msu dot su)
- rule order ( objects * )
- {
- # The algorithm used is the same is standard transitive closure, except
- # that we're not keeping in-degree for all vertices, but rather removing
- # edges.
- local result ;
- if $(objects)
- {
- local constraints = [ eliminate-unused-constraits $(objects) ] ;
-
- # Find some library that nobody depends upon and add it to the
- # 'result' array.
- local obj ;
- while $(objects)
- {
- local new_objects ;
- while $(objects)
- {
- obj = $(objects[1]) ;
- if [ has-no-dependents $(obj) : $(constraints) ]
- {
- # Emulate break ;
- new_objects += $(objects[2-]) ;
- objects = ;
- }
- else
- {
- new_objects += $(obj) ;
- obj = ;
- objects = $(objects[2-]) ;
- }
- }
-
- if ! $(obj)
- {
- errors.error "Circular order dependencies" ;
- }
- # No problem with placing first.
- result += $(obj) ;
- # Remove all contraints where 'obj' comes first, since they are
- # already satisfied.
- constraints = [ remove-satisfied $(constraints) : $(obj) ] ;
-
- # Add the remaining objects for further processing on the next
- # iteration
- objects = $(new_objects) ;
- }
-
- }
- return $(result) ;
- }
- NATIVE_RULE class@order : order ;
-
- # Eliminate constraints which mention objects not in 'objects'. In
- # graph-theory terms, this is finding a subgraph induced by ordered
- # vertices.
- rule eliminate-unused-constraits ( objects * )
- {
- local result ;
- for local c in $(.constraints)
- {
- local m = [ MATCH (.*)--(.*) : $(c) ] ;
- if $(m[1]) in $(objects) && $(m[2]) in $(objects)
- {
- result += $(c) ;
- }
- }
- return $(result) ;
- }
-
- # Returns true if there's no constraint in 'constaraints' where 'obj' comes
- # second.
- rule has-no-dependents ( obj : constraints * )
- {
- local failed ;
- while $(constraints) && ! $(failed)
- {
- local c = $(constraints[1]) ;
- local m = [ MATCH (.*)--(.*) : $(c) ] ;
- if $(m[2]) = $(obj)
- {
- failed = true ;
- }
- constraints = $(constraints[2-]) ;
- }
- if ! $(failed)
- {
- return true ;
- }
- }
-
- rule remove-satisfied ( constraints * : obj )
- {
- local result ;
- for local c in $(constraints)
- {
- local m = [ MATCH (.*)--(.*) : $(c) ] ;
- if $(m[1]) != $(obj)
- {
- result += $(c) ;
- }
- }
- return $(result) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import "class" : new ;
- import assert ;
-
- c1 = [ new order ] ;
- $(c1).add-pair l1 l2 ;
-
- assert.result l1 l2 : $(c1).order l1 l2 ;
- assert.result l1 l2 : $(c1).order l2 l1 ;
-
- $(c1).add-pair l2 l3 ;
- assert.result l1 l2 : $(c1).order l2 l1 ;
- $(c1).add-pair x l2 ;
- assert.result l1 l2 : $(c1).order l2 l1 ;
- assert.result l1 l2 l3 : $(c1).order l2 l3 l1 ;
-}
diff --git a/jam-files/boost-build/util/order.py b/jam-files/boost-build/util/order.py
deleted file mode 100644
index 4e67b3f1..00000000
--- a/jam-files/boost-build/util/order.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright (C) 2003 Vladimir Prus
-# Use, modification, and distribution is subject to the Boost Software
-# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
-# at http://www.boost.org/LICENSE_1_0.txt)
-
-class Order:
- """Allows ordering arbitrary objects with regard to arbitrary binary relation.
-
- The primary use case is the gcc toolset, which is sensitive to
- library order: if library 'a' uses symbols from library 'b',
- then 'a' must be present before 'b' on the linker's command line.
-
- This requirement can be lifted for gcc with GNU ld, but for gcc with
- Solaris LD (and for Solaris toolset as well), the order always matters.
-
- So, we need to store order requirements and then order libraries
- according to them. It it not possible to use dependency graph as
- order requirements. What we need is "use symbols" relationship
- while dependency graph provides "needs to be updated" relationship.
-
- For example::
- lib a : a.cpp b;
- lib b ;
-
- For static linking, the 'a' library need not depend on 'b'. However, it
- still should come before 'b' on the command line.
- """
-
- def __init__ (self):
- self.constraints_ = []
-
- def add_pair (self, first, second):
- """ Adds the constraint that 'first' should precede 'second'.
- """
- self.constraints_.append ((first, second))
-
- def order (self, objects):
- """ Given a list of objects, reorder them so that the constains specified
- by 'add_pair' are satisfied.
-
- The algorithm was adopted from an awk script by Nikita Youshchenko
- (yoush at cs dot msu dot su)
- """
- # The algorithm used is the same is standard transitive closure,
- # except that we're not keeping in-degree for all vertices, but
- # rather removing edges.
- result = []
-
- if not objects:
- return result
-
- constraints = self.__eliminate_unused_constraits (objects)
-
- # Find some library that nobody depends upon and add it to
- # the 'result' array.
- obj = None
- while objects:
- new_objects = []
- while objects:
- obj = objects [0]
-
- if self.__has_no_dependents (obj, constraints):
- # Emulate break ;
- new_objects.extend (objects [1:])
- objects = []
-
- else:
- new_objects.append (obj)
- obj = None
- objects = objects [1:]
-
- if not obj:
- raise BaseException ("Circular order dependencies")
-
- # No problem with placing first.
- result.append (obj)
-
- # Remove all containts where 'obj' comes first,
- # since they are already satisfied.
- constraints = self.__remove_satisfied (constraints, obj)
-
- # Add the remaining objects for further processing
- # on the next iteration
- objects = new_objects
-
- return result
-
- def __eliminate_unused_constraits (self, objects):
- """ Eliminate constraints which mention objects not in 'objects'.
- In graph-theory terms, this is finding subgraph induced by
- ordered vertices.
- """
- result = []
- for c in self.constraints_:
- if c [0] in objects and c [1] in objects:
- result.append (c)
-
- return result
-
- def __has_no_dependents (self, obj, constraints):
- """ Returns true if there's no constraint in 'constraints' where
- 'obj' comes second.
- """
- failed = False
- while constraints and not failed:
- c = constraints [0]
-
- if c [1] == obj:
- failed = True
-
- constraints = constraints [1:]
-
- return not failed
-
- def __remove_satisfied (self, constraints, obj):
- result = []
- for c in constraints:
- if c [0] != obj:
- result.append (c)
-
- return result
diff --git a/jam-files/boost-build/util/os.jam b/jam-files/boost-build/util/os.jam
deleted file mode 100644
index daef27f7..00000000
--- a/jam-files/boost-build/util/os.jam
+++ /dev/null
@@ -1,171 +0,0 @@
-# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import modules ;
-import string ;
-
-
-# Return the value(s) of the given environment variable(s) at the time bjam was
-# invoked.
-rule environ ( variable-names + )
-{
- return [ modules.peek .ENVIRON : $(variable-names) ] ;
-}
-
-.name = [ modules.peek : OS ] ;
-.platform = [ modules.peek : OSPLAT ] ;
-.version = [ modules.peek : OSVER ] ;
-
-
-local rule constant ( c : os ? )
-{
- os ?= $(.name) ;
- # First look for a platform-specific name, then the general value.
- local variables = .$(c)-$(os) .$(c) ;
- local result = $($(variables)) ;
- return $(result[1]) ;
-}
-
-rule get-constant ( os ? )
-{
- # Find the name of the constant being accessed, which is equal to the name
- # used to invoke us.
- local bt = [ BACKTRACE 1 ] ;
- local rulename = [ MATCH ([^.]*)$ : $(bt[4]) ] ;
- return [ constant $(rulename) : $(os) ] ;
-}
-
-
-# export all the common constants
-.constants = name platform version shared-library-path-variable path-separator executable-path-variable executable-suffix ;
-for local constant in $(.constants)
-{
- IMPORT $(__name__) : get-constant : $(__name__) : $(constant) ;
-}
-EXPORT $(__name__) : $(.constants) ;
-
-.executable-path-variable-NT = PATH ;
-# On Windows the case and capitalization of PATH is not always predictable, so
-# let's find out what variable name was really set.
-if $(.name) = NT
-{
- for local n in [ VARNAMES .ENVIRON ]
- {
- if $(n:L) = path
- {
- .executable-path-variable-NT = $(n) ;
- }
- }
-}
-
-# Specific constants for various platforms. There's no need to define any
-# constant whose value would be the same as the default, below.
-.shared-library-path-variable-NT = $(.executable-path-variable-NT) ;
-.path-separator-NT = ";" ;
-.expand-variable-prefix-NT = % ;
-.expand-variable-suffix-NT = % ;
-.executable-suffix-NT = .exe ;
-
-.shared-library-path-variable-CYGWIN = PATH ;
-
-.shared-library-path-variable-MACOSX = DYLD_LIBRARY_PATH ;
-
-.shared-library-path-variable-AIX = LIBPATH ;
-
-# Default constants
-.shared-library-path-variable = LD_LIBRARY_PATH ;
-.path-separator = ":" ;
-.expand-variable-prefix = $ ;
-.expand-variable-suffix = "" ;
-.executable-path-variable = PATH ;
-.executable-suffix = "" ;
-
-
-# Return a list of the directories in the PATH. Yes, that information is (sort
-# of) available in the global module, but jam code can change those values, and
-# it isn't always clear what case/capitalization to use when looking. This rule
-# is a more reliable way to get there.
-rule executable-path ( )
-{
- return [ string.words [ environ [ constant executable-path-variable ] ]
- : [ constant path-separator ] ] ;
-}
-
-
-# Initialize the list of home directories for the current user depending on the
-# OS.
-if $(.name) = NT
-{
- local home = [ environ HOMEDRIVE HOMEPATH ] ;
- .home-directories = $(home[1])$(home[2]) [ environ HOME ] [ environ USERPROFILE ] ;
-}
-else
-{
- .home-directories = [ environ HOME ] ;
-}
-
-
-# Can't use 'constant' mechanism because it only returns 1-element values.
-rule home-directories ( )
-{
- return $(.home-directories) ;
-}
-
-
-# Return the string needed to represent the expansion of the named shell
-# variable.
-rule expand-variable ( variable )
-{
- local prefix = [ constant expand-variable-prefix ] ;
- local suffix = [ constant expand-variable-suffix ] ;
- return $(prefix)$(variable)$(suffix) ;
-}
-
-
-# Returns true if running on windows, whether in cygwin or not.
-rule on-windows ( )
-{
- local result ;
- if [ modules.peek : NT ]
- {
- result = true ;
- }
- else if [ modules.peek : UNIX ]
- {
- switch [ modules.peek : JAMUNAME ]
- {
- case CYGWIN* :
- {
- result = true ;
- }
- }
- }
- return $(result) ;
-}
-
-
-if ! [ on-windows ]
-{
- .on-unix = 1 ;
-}
-
-
-rule on-unix
-{
- return $(.on-unix) ;
-}
-
-
-rule __test__
-{
- import assert ;
- if ! ( --quiet in [ modules.peek : ARGV ] )
- {
- ECHO os: name= [ name ] ;
- ECHO os: version= [ version ] ;
- }
- assert.true name ;
-}
diff --git a/jam-files/boost-build/util/os_j.py b/jam-files/boost-build/util/os_j.py
deleted file mode 100644
index f44cca62..00000000
--- a/jam-files/boost-build/util/os_j.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Status: stub, just enough to make tests work.
-#
-# Named os_j to avoid conflicts with standard 'os'. See
-# project.py:import for special-casing.
-#
-# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import bjam
-
-__OS = bjam.call("peek", [], "OS")[0]
-
-# Return Jam's name of OS to prevent existing code from burning
-# when faced with Python naming
-def name():
- return __OS
diff --git a/jam-files/boost-build/util/path.jam b/jam-files/boost-build/util/path.jam
deleted file mode 100644
index ea26b816..00000000
--- a/jam-files/boost-build/util/path.jam
+++ /dev/null
@@ -1,934 +0,0 @@
-# Copyright Vladimir Prus 2002-2006.
-# Copyright Dave Abrahams 2003-2004.
-# Copyright Rene Rivera 2003-2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Performs various path manipulations. Paths are always in a 'normalized'
-# representation. In it, a path may be either:
-#
-# - '.', or
-#
-# - ['/'] [ ( '..' '/' )* (token '/')* token ]
-#
-# In plain english, path can be rooted, '..' elements are allowed only at the
-# beginning, and it never ends in slash, except for path consisting of slash
-# only.
-
-import errors ;
-import modules ;
-import regex ;
-import sequence ;
-import set ;
-import version ;
-
-
-os = [ modules.peek : OS ] ;
-if [ modules.peek : UNIX ]
-{
- local uname = [ modules.peek : JAMUNAME ] ;
- switch $(uname)
- {
- case CYGWIN* : os = CYGWIN ;
- case * : os = UNIX ;
- }
-}
-
-
-# Converts the native path into normalized form.
-#
-rule make ( native )
-{
- return [ make-$(os) $(native) ] ;
-}
-
-
-# Builds native representation of the path.
-#
-rule native ( path )
-{
- return [ native-$(os) $(path) ] ;
-}
-
-
-# Tests if a path is rooted.
-#
-rule is-rooted ( path )
-{
- return [ MATCH "^(/)" : $(path) ] ;
-}
-
-
-# Tests if a path has a parent.
-#
-rule has-parent ( path )
-{
- if $(path) != /
- {
- return 1 ;
- }
- else
- {
- return ;
- }
-}
-
-
-# Returns the path without any directory components.
-#
-rule basename ( path )
-{
- return [ MATCH "([^/]+)$" : $(path) ] ;
-}
-
-
-# Returns parent directory of the path. If no parent exists, error is issued.
-#
-rule parent ( path )
-{
- if [ has-parent $(path) ]
- {
- if $(path) = .
- {
- return .. ;
- }
- else
- {
- # Strip everything at the end of path up to and including the last
- # slash.
- local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
-
- # Did we strip what we shouldn't?
- if $(result[2]) = ".."
- {
- return $(path)/.. ;
- }
- else
- {
- if ! $(result[1])
- {
- if [ is-rooted $(path) ]
- {
- result = / ;
- }
- else
- {
- result = . ;
- }
- }
- return $(result[1]) ;
- }
- }
- }
- else
- {
- errors.error "Path '$(path)' has no parent" ;
- }
-}
-
-
-# Returns path2 such that "[ join path path2 ] = .". The path may not contain
-# ".." element or be rooted.
-#
-rule reverse ( path )
-{
- if $(path) = .
- {
- return $(path) ;
- }
- else
- {
- local tokens = [ regex.split $(path) "/" ] ;
- local tokens2 ;
- for local i in $(tokens)
- {
- tokens2 += .. ;
- }
- return [ sequence.join $(tokens2) : "/" ] ;
- }
-}
-
-
-# Concatenates the passed path elements. Generates an error if any element other
-# than the first one is rooted. Skips any empty or undefined path elements.
-#
-rule join ( elements + )
-{
- if ! $(elements[2-])
- {
- return $(elements[1]) ;
- }
- else
- {
- for local e in $(elements[2-])
- {
- if [ is-rooted $(e) ]
- {
- errors.error only the first element may be rooted ;
- }
- }
- if [ version.check-jam-version 3 1 17 ]
- {
- return [ NORMALIZE_PATH "$(elements)" ] ;
- }
- else
- {
- # Boost Jam prior to version 3.1.17 had problems with its
- # NORMALIZE_PATH rule in case you passed it a leading backslash
- # instead of a slash, in some cases when you sent it an empty
- # initial path element and possibly some others. At least some of
- # those cases were being hit and relied upon when calling this rule
- # from the path.make-NT rule.
- if ! $(elements[1]) && $(elements[2])
- {
- return [ NORMALIZE_PATH "/" "$(elements[2-])" ] ;
- }
- else
- {
- return [ NORMALIZE_PATH "$(elements)" ] ;
- }
- }
- }
-}
-
-
-# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged.
-#
-rule root ( path root )
-{
- if [ is-rooted $(path) ]
- {
- return $(path) ;
- }
- else
- {
- return [ join $(root) $(path) ] ;
- }
-}
-
-
-# Returns the current working directory.
-#
-rule pwd ( )
-{
- if ! $(.pwd)
- {
- .pwd = [ make [ PWD ] ] ;
- }
- return $(.pwd) ;
-}
-
-
-# Returns the list of files matching the given pattern in the specified
-# directory. Both directories and patterns are supplied as portable paths. Each
-# pattern should be non-absolute path, and can't contain "." or ".." elements.
-# Each slash separated element of pattern can contain the following special
-# characters:
-# - '?', which match any character
-# - '*', which matches arbitrary number of characters.
-# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and
-# only if e1 matches p1, e2 matches p2 and so on.
-#
-# For example:
-# [ glob . : *.cpp ]
-# [ glob . : */build/Jamfile ]
-#
-rule glob ( dirs * : patterns + : exclude-patterns * )
-{
- local result ;
- local real-patterns ;
- local real-exclude-patterns ;
- for local d in $(dirs)
- {
- for local p in $(patterns)
- {
- local pattern = [ path.root $(p) $(d) ] ;
- real-patterns += [ path.native $(pattern) ] ;
- }
-
- for local p in $(exclude-patterns)
- {
- local pattern = [ path.root $(p) $(d) ] ;
- real-exclude-patterns += [ path.native $(pattern) ] ;
- }
- }
-
- local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
- inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
- local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
- exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
-
- return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ]
- ;
-}
-
-
-# Recursive version of GLOB. Builds the glob of files while also searching in
-# the subdirectories of the given roots. An optional set of exclusion patterns
-# will filter out the matching entries from the result. The exclusions also
-# apply to the subdirectory scanning, such that directories that match the
-# exclusion patterns will not be searched.
-#
-rule glob-tree ( roots * : patterns + : exclude-patterns * )
-{
- return [ sequence.transform path.make : [ .glob-tree [ sequence.transform
- path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ;
-}
-
-
-local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
-{
- local excluded ;
- if $(exclude-patterns)
- {
- excluded = [ GLOB $(roots) : $(exclude-patterns) ] ;
- }
- local result = [ set.difference [ GLOB $(roots) : $(patterns) ] :
- $(excluded) ] ;
- local subdirs ;
- for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ]
- {
- if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ]
- {
- subdirs += $(d) ;
- }
- }
- if $(subdirs)
- {
- result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ]
- ;
- }
- return $(result) ;
-}
-
-
-# Returns true is the specified file exists.
-#
-rule exists ( file )
-{
- return [ path.glob $(file:D) : $(file:D=) ] ;
-}
-NATIVE_RULE path : exists ;
-
-
-# Find out the absolute name of path and returns the list of all the parents,
-# starting with the immediate one. Parents are returned as relative names. If
-# 'upper_limit' is specified, directories above it will be pruned.
-#
-rule all-parents ( path : upper_limit ? : cwd ? )
-{
- cwd ?= [ pwd ] ;
- local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ;
-
- if ! $(upper_limit)
- {
- upper_limit = / ;
- }
- local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ;
-
- # Leave only elements in 'path_ele' below 'upper_ele'.
- while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) )
- {
- upper_ele = $(upper_ele[2-]) ;
- path_ele = $(path_ele[2-]) ;
- }
-
- # Have all upper elements been removed ?
- if $(upper_ele)
- {
- errors.error "$(upper_limit) is not prefix of $(path)" ;
- }
-
- # Create the relative paths to parents, number of elements in 'path_ele'.
- local result ;
- for local i in $(path_ele)
- {
- path = [ parent $(path) ] ;
- result += $(path) ;
- }
- return $(result) ;
-}
-
-
-# Search for 'pattern' in parent directories of 'dir', up till and including
-# 'upper_limit', if it is specified, or till the filesystem root otherwise.
-#
-rule glob-in-parents ( dir : patterns + : upper-limit ? )
-{
- local result ;
- local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
-
- while $(parent-dirs) && ! $(result)
- {
- result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
- parent-dirs = $(parent-dirs[2-]) ;
- }
- return $(result) ;
-}
-
-
-# Assuming 'child' is a subdirectory of 'parent', return the relative path from
-# 'parent' to 'child'.
-#
-rule relative ( child parent : no-error ? )
-{
- local not-a-child ;
- if $(parent) = "."
- {
- return $(child) ;
- }
- else
- {
- local split1 = [ regex.split $(parent) / ] ;
- local split2 = [ regex.split $(child) / ] ;
-
- while $(split1)
- {
- if $(split1[1]) = $(split2[1])
- {
- split1 = $(split1[2-]) ;
- split2 = $(split2[2-]) ;
- }
- else
- {
- not-a-child = true ;
- split1 = ;
- }
- }
- if $(split2)
- {
- if $(not-a-child)
- {
- if $(no-error)
- {
- return not-a-child ;
- }
- else
- {
- errors.error $(child) is not a subdir of $(parent) ;
- }
- }
- else
- {
- return [ join $(split2) ] ;
- }
- }
- else
- {
- return "." ;
- }
- }
-}
-
-
-# Returns the minimal path to path2 that is relative path1.
-#
-rule relative-to ( path1 path2 )
-{
- local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
- local split1 = [ regex.split $(path1) / ] ;
- local split2 = [ regex.split $(path2) / ] ;
-
- while $(split1) && $(root_1)
- {
- if $(split1[1]) = $(split2[1])
- {
- root_1 = $(root_1[2-]) ;
- split1 = $(split1[2-]) ;
- split2 = $(split2[2-]) ;
- }
- else
- {
- split1 = ;
- }
- }
- return [ join . $(root_1) $(split2) ] ;
-}
-
-
-# Returns the list of paths which are used by the operating system for looking
-# up programs.
-#
-rule programs-path ( )
-{
- local result ;
- local raw = [ modules.peek : PATH Path path ] ;
- for local p in $(raw)
- {
- if $(p)
- {
- result += [ path.make $(p) ] ;
- }
- }
- return $(result) ;
-}
-
-rule makedirs ( path )
-{
- local result = true ;
- local native = [ native $(path) ] ;
- if ! [ exists $(native) ]
- {
- if [ makedirs [ parent $(path) ] ]
- {
- if ! [ MAKEDIR $(native) ]
- {
- errors.error "Could not create directory '$(path)'" ;
- result = ;
- }
- }
- }
- return $(result) ;
-}
-
-# Converts native Windows paths into our internal canonic path representation.
-# Supports 'invalid' paths containing multiple successive path separator
-# characters.
-#
-# TODO: Check and if needed add support for Windows 'X:file' path format where
-# the file is located in the current folder on drive X.
-#
-rule make-NT ( native )
-{
- local result ;
-
- if [ version.check-jam-version 3 1 17 ]
- {
- result = [ NORMALIZE_PATH $(native) ] ;
- }
- else
- {
- # This old implementation is really fragile due to a not so clear way
- # NORMALIZE_PATH rule worked in Boost.Jam versions prior to 3.1.17. E.g.
- # path.join would mostly ignore empty path elements but would root the
- # joined path in case the initial two path elements were empty or some
- # similar accidental wierdness.
- result = [ path.join [ regex.split $(native) "[/\\]" ] ] ;
- }
-
- # We need to add an extra '/' in front in case this is a rooted Windows path
- # starting with a drive letter and not a path separator character since the
- # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter
- # and treats it as a regular folder name.
- if [ regex.match "(^.:)" : $(native) ]
- {
- result = /$(result) ;
- }
-
- return $(result) ;
-}
-
-
-rule native-NT ( path )
-{
- local result ;
- if [ is-rooted $(path) ] && ! [ regex.match "^/(.:)" : $(path) ]
- {
- result = $(path) ;
- }
- else
- {
- result = [ MATCH "^/?(.*)" : $(path) ] ;
- }
- result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
- return $(result) ;
-}
-
-
-rule make-UNIX ( native )
-{
- # VP: I have no idea now 'native' can be empty here! But it can!
- if ! $(native)
- {
- errors.error "Empty path passed to 'make-UNIX'" ;
- }
- else
- {
- return [ NORMALIZE_PATH $(native:T) ] ;
- }
-}
-
-
-rule native-UNIX ( path )
-{
- return $(path) ;
-}
-
-
-rule make-CYGWIN ( path )
-{
- return [ make-NT $(path) ] ;
-}
-
-
-rule native-CYGWIN ( path )
-{
- local result = $(path) ;
- if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path.
- {
- result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'.
- }
- return [ native-UNIX $(result) ] ;
-}
-
-
-# split-path-VMS: splits input native path into device dir file (each part is
-# optional).
-#
-# example:
-#
-# dev:[dir]file.c => dev: [dir] file.c
-#
-rule split-path-VMS ( native )
-{
- local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
- local device = $(matches[1]) ;
- local dir = $(matches[2]) ;
- local file = $(matches[3]) ;
-
- return $(device) $(dir) $(file) ;
-}
-
-
-# Converts a native VMS path into a portable path spec.
-#
-# Does not handle current-device absolute paths such as "[dir]File.c" as it is
-# not clear how to represent them in the portable path notation.
-#
-# Adds a trailing dot (".") to the file part if no extension is present (helps
-# when converting it back into native path).
-#
-rule make-VMS ( native )
-{
- if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
- {
- errors.error "Can't handle default-device absolute paths: " $(native) ;
- }
-
- local parts = [ split-path-VMS $(native) ] ;
- local device = $(parts[1]) ;
- local dir = $(parts[2]) ;
- local file = $(parts[3]) ;
- local elems ;
-
- if $(device)
- {
- #
- # rooted
- #
- elems = /$(device) ;
- }
-
- if $(dir) = "[]"
- {
- #
- # Special case: current directory
- #
- elems = $(elems) "." ;
- }
- else if $(dir)
- {
- dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
- local dir_parts = [ regex.split $(dir) \\. ] ;
-
- if $(dir_parts[1]) = ""
- {
- #
- # Relative path
- #
- dir_parts = $(dir_parts[2--1]) ;
- }
-
- #
- # replace "parent-directory" parts (- => ..)
- #
- dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
-
- elems = $(elems) $(dir_parts) ;
- }
-
- if $(file)
- {
- if ! [ MATCH (\\.) : $(file) ]
- {
- #
- # Always add "." to end of non-extension file.
- #
- file = $(file). ;
- }
- elems = $(elems) $(file) ;
- }
-
- local portable = [ path.join $(elems) ] ;
-
- return $(portable) ;
-}
-
-
-# Converts a portable path spec into a native VMS path.
-#
-# Relies on having at least one dot (".") included in the file name to be able
-# to differentiate it from the directory part.
-#
-rule native-VMS ( path )
-{
- local device = "" ;
- local dir = $(path) ;
- local file = "" ;
- local native ;
- local split ;
-
- #
- # Has device ?
- #
- if [ is-rooted $(dir) ]
- {
- split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
- device = $(split[1]) ;
- dir = $(split[2]) ;
- }
-
- #
- # Has file ?
- #
- # This is no exact science, just guess work:
- #
- # If the last part of the current path spec
- # includes some chars, followed by a dot,
- # optionally followed by more chars -
- # then it is a file (keep your fingers crossed).
- #
- split = [ regex.split $(dir) / ] ;
- local maybe_file = $(split[-1]) ;
-
- if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
- {
- file = $(maybe_file) ;
- dir = [ sequence.join $(split[1--2]) : / ] ;
- }
-
- #
- # Has dir spec ?
- #
- if $(dir) = "."
- {
- dir = "[]" ;
- }
- else if $(dir)
- {
- dir = [ regex.replace $(dir) \\.\\. - ] ;
- dir = [ regex.replace $(dir) / . ] ;
-
- if $(device) = ""
- {
- #
- # Relative directory
- #
- dir = "."$(dir) ;
- }
- dir = "["$(dir)"]" ;
- }
-
- native = [ sequence.join $(device) $(dir) $(file) ] ;
-
- return $(native) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import errors : try catch ;
-
- assert.true is-rooted "/" ;
- assert.true is-rooted "/foo" ;
- assert.true is-rooted "/foo/bar" ;
- assert.result : is-rooted "." ;
- assert.result : is-rooted "foo" ;
- assert.result : is-rooted "foo/bar" ;
-
- assert.true has-parent "foo" ;
- assert.true has-parent "foo/bar" ;
- assert.true has-parent "." ;
- assert.result : has-parent "/" ;
-
- assert.result "." : basename "." ;
- assert.result ".." : basename ".." ;
- assert.result "foo" : basename "foo" ;
- assert.result "foo" : basename "bar/foo" ;
- assert.result "foo" : basename "gaz/bar/foo" ;
- assert.result "foo" : basename "/gaz/bar/foo" ;
-
- assert.result "." : parent "foo" ;
- assert.result "/" : parent "/foo" ;
- assert.result "foo/bar" : parent "foo/bar/giz" ;
- assert.result ".." : parent "." ;
- assert.result ".." : parent "../foo" ;
- assert.result "../../foo" : parent "../../foo/bar" ;
-
- assert.result "." : reverse "." ;
- assert.result ".." : reverse "foo" ;
- assert.result "../../.." : reverse "foo/bar/giz" ;
-
- assert.result "foo" : join "foo" ;
- assert.result "/foo" : join "/" "foo" ;
- assert.result "foo/bar" : join "foo" "bar" ;
- assert.result "foo/bar" : join "foo/giz" "../bar" ;
- assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
- assert.result ".." : join "." ".." ;
- assert.result ".." : join "foo" "../.." ;
- assert.result "../.." : join "../foo" "../.." ;
- assert.result "/foo" : join "/bar" "../foo" ;
- assert.result "foo/giz" : join "foo/giz" "." ;
- assert.result "." : join lib2 ".." ;
- assert.result "/" : join "/a" ".." ;
-
- assert.result /a/b : join /a/b/c .. ;
-
- assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
- assert.result "giz" : join "foo" ".." "giz" ;
- assert.result "foo/giz" : join "foo" "." "giz" ;
-
- try ;
- {
- join "a" "/b" ;
- }
- catch only first element may be rooted ;
-
- local CWD = "/home/ghost/build" ;
- assert.result : all-parents . : . : $(CWD) ;
- assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
- assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
- assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
-
- local CWD = "/home/ghost" ;
- assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
- assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
-
- assert.result "c/d" : relative "a/b/c/d" "a/b" ;
- assert.result "foo" : relative "foo" "." ;
-
- local save-os = [ modules.peek path : os ] ;
- modules.poke path : os : NT ;
-
- assert.result "foo/bar/giz" : make "foo/bar/giz" ;
- assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
- assert.result "foo" : make "foo/" ;
- assert.result "foo" : make "foo\\" ;
- assert.result "foo" : make "foo/." ;
- assert.result "foo" : make "foo/bar/.." ;
- assert.result "foo" : make "foo/bar/../" ;
- assert.result "foo" : make "foo/bar/..\\" ;
- assert.result "foo/bar" : make "foo/././././bar" ;
- assert.result "/foo" : make "\\foo" ;
- assert.result "/D:/My Documents" : make "D:\\My Documents" ;
- assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
-
- # Test processing 'invalid' paths containing multiple successive path
- # separators.
- assert.result "foo" : make "foo//" ;
- assert.result "foo" : make "foo///" ;
- assert.result "foo" : make "foo\\\\" ;
- assert.result "foo" : make "foo\\\\\\" ;
- assert.result "/foo" : make "//foo" ;
- assert.result "/foo" : make "///foo" ;
- assert.result "/foo" : make "\\\\foo" ;
- assert.result "/foo" : make "\\\\\\foo" ;
- assert.result "/foo" : make "\\/\\/foo" ;
- assert.result "foo/bar" : make "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ;
- assert.result "foo" : make "foo/bar//.." ;
- assert.result "foo/bar" : make "foo/bar/giz//.." ;
- assert.result "foo/giz" : make "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ;
- assert.result "../../../foo" : make "..///.//..///.//..////foo///" ;
-
- # Test processing 'invalid' rooted paths with too many '..' path elements
- # that would place them before the root.
- assert.result : make "/.." ;
- assert.result : make "/../" ;
- assert.result : make "/../." ;
- assert.result : make "/.././" ;
- assert.result : make "/foo/../bar/giz/.././././../../." ;
- assert.result : make "/foo/../bar/giz/.././././../.././" ;
- assert.result : make "//foo/../bar/giz/.././././../../." ;
- assert.result : make "//foo/../bar/giz/.././././../.././" ;
- assert.result : make "\\\\foo/../bar/giz/.././././../../." ;
- assert.result : make "\\\\foo/../bar/giz/.././././../.././" ;
- assert.result : make "/..///.//..///.//..////foo///" ;
-
- assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
- assert.result "foo" : native "foo" ;
- assert.result "\\foo" : native "/foo" ;
- assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
-
- modules.poke path : os : UNIX ;
-
- assert.result "foo/bar/giz" : make "foo/bar/giz" ;
- assert.result "/sub1" : make "/sub1/." ;
- assert.result "/sub1" : make "/sub1/sub2/.." ;
- assert.result "sub1" : make "sub1/." ;
- assert.result "sub1" : make "sub1/sub2/.." ;
- assert.result "/foo/bar" : native "/foo/bar" ;
-
- modules.poke path : os : VMS ;
-
- #
- # Don't really need to poke os before these
- #
- assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
- assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
- assert.result "disk:" "" "" : split-path-VMS "disk:" ;
- assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
- assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
- assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
- assert.result "" "" "file" : split-path-VMS "file" ;
- assert.result "" "" "" : split-path-VMS "" ;
-
- #
- # Special case: current directory
- #
- assert.result "" "[]" "" : split-path-VMS "[]" ;
- assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
- assert.result "" "[]" "file" : split-path-VMS "[]file" ;
- assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
-
- #
- # Make portable paths
- #
- assert.result "/disk:" : make "disk:" ;
- assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
- assert.result "foo" : make "[.foo]" ;
- assert.result "foo" : make "[.foo.bar.-]" ;
- assert.result ".." : make "[.-]" ;
- assert.result ".." : make "[-]" ;
- assert.result "." : make "[]" ;
- assert.result "giz.h" : make "giz.h" ;
- assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
- assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
- assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
-
- #
- # Special case (adds '.' to end of file w/o extension to
- # disambiguate from directory in portable path spec).
- #
- assert.result "Jamfile." : make "Jamfile" ;
- assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
- assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
-
- #
- # Make native paths
- #
- assert.result "disk:" : native "/disk:" ;
- assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
- assert.result "[.foo]" : native "foo" ;
- assert.result "[.-]" : native ".." ;
- assert.result "[.foo.-]" : native "foo/.." ;
- assert.result "[]" : native "." ;
- assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
- assert.result "giz.h" : native "giz.h" ;
- assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
- assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
-
- modules.poke path : os : $(save-os) ;
-}
diff --git a/jam-files/boost-build/util/path.py b/jam-files/boost-build/util/path.py
deleted file mode 100644
index 222b96bf..00000000
--- a/jam-files/boost-build/util/path.py
+++ /dev/null
@@ -1,904 +0,0 @@
-# Status: this module is ported on demand by however needs something
-# from it. Functionality that is not needed by Python port will
-# be dropped.
-
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# Performs various path manipulations. Path are always in a 'normilized'
-# representation. In it, a path may be either:
-#
-# - '.', or
-#
-# - ['/'] [ ( '..' '/' )* (token '/')* token ]
-#
-# In plain english, path can be rooted, '..' elements are allowed only
-# at the beginning, and it never ends in slash, except for path consisting
-# of slash only.
-
-import os.path
-from utility import to_seq
-from glob import glob as builtin_glob
-
-from b2.util import bjam_signature
-
-@bjam_signature((["path", "root"],))
-def root (path, root):
- """ If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
- """
- if os.path.isabs (path):
- return path
- else:
- return os.path.join (root, path)
-
-@bjam_signature((["native"],))
-def make (native):
- """ Converts the native path into normalized form.
- """
- # TODO: make os selection here.
- return make_UNIX (native)
-
-def make_UNIX (native):
-
- # VP: I have no idea now 'native' can be empty here! But it can!
- assert (native)
-
- return os.path.normpath (native)
-
-@bjam_signature((["path"],))
-def native (path):
- """ Builds a native representation of the path.
- """
- # TODO: make os selection here.
- return native_UNIX (path)
-
-def native_UNIX (path):
- return path
-
-
-def pwd ():
- """ Returns the current working directory.
- # TODO: is it a good idea to use the current dir? Some use-cases
- may not allow us to depend on the current dir.
- """
- return make (os.getcwd ())
-
-def is_rooted (path):
- """ Tests if a path is rooted.
- """
- return path and path [0] == '/'
-
-
-###################################################################
-# Still to port.
-# Original lines are prefixed with "# "
-#
-# # Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# # distribute this software is granted provided this copyright notice appears in
-# # all copies. This software is provided "as is" without express or implied
-# # warranty, and with no claim as to its suitability for any purpose.
-#
-# # Performs various path manipulations. Path are always in a 'normilized'
-# # representation. In it, a path may be either:
-# #
-# # - '.', or
-# #
-# # - ['/'] [ ( '..' '/' )* (token '/')* token ]
-# #
-# # In plain english, path can be rooted, '..' elements are allowed only
-# # at the beginning, and it never ends in slash, except for path consisting
-# # of slash only.
-#
-# import modules ;
-# import sequence ;
-# import regex ;
-# import errors : error ;
-#
-#
-# os = [ modules.peek : OS ] ;
-# if [ modules.peek : UNIX ]
-# {
-# local uname = [ modules.peek : JAMUNAME ] ;
-# switch $(uname)
-# {
-# case CYGWIN* :
-# os = CYGWIN ;
-#
-# case * :
-# os = UNIX ;
-# }
-# }
-#
-# #
-# # Tests if a path is rooted.
-# #
-# rule is-rooted ( path )
-# {
-# return [ MATCH "^(/)" : $(path) ] ;
-# }
-#
-# #
-# # Tests if a path has a parent.
-# #
-# rule has-parent ( path )
-# {
-# if $(path) != / {
-# return 1 ;
-# } else {
-# return ;
-# }
-# }
-#
-# #
-# # Returns the path without any directory components.
-# #
-# rule basename ( path )
-# {
-# return [ MATCH "([^/]+)$" : $(path) ] ;
-# }
-#
-# #
-# # Returns parent directory of the path. If no parent exists, error is issued.
-# #
-# rule parent ( path )
-# {
-# if [ has-parent $(path) ] {
-#
-# if $(path) = . {
-# return .. ;
-# } else {
-#
-# # Strip everything at the end of path up to and including
-# # the last slash
-# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
-#
-# # Did we strip what we shouldn't?
-# if $(result[2]) = ".." {
-# return $(path)/.. ;
-# } else {
-# if ! $(result[1]) {
-# if [ is-rooted $(path) ] {
-# result = / ;
-# } else {
-# result = . ;
-# }
-# }
-# return $(result[1]) ;
-# }
-# }
-# } else {
-# error "Path '$(path)' has no parent" ;
-# }
-# }
-#
-# #
-# # Returns path2 such that "[ join path path2 ] = .".
-# # The path may not contain ".." element or be rooted.
-# #
-# rule reverse ( path )
-# {
-# if $(path) = .
-# {
-# return $(path) ;
-# }
-# else
-# {
-# local tokens = [ regex.split $(path) "/" ] ;
-# local tokens2 ;
-# for local i in $(tokens) {
-# tokens2 += .. ;
-# }
-# return [ sequence.join $(tokens2) : "/" ] ;
-# }
-# }
-#
-# #
-# # Auxillary rule: does all the semantic of 'join', except for error cheching.
-# # The error checking is separated because this rule is recursive, and I don't
-# # like the idea of checking the same input over and over.
-# #
-# local rule join-imp ( elements + )
-# {
-# return [ NORMALIZE_PATH $(elements:J="/") ] ;
-# }
-#
-# #
-# # Contanenates the passed path elements. Generates an error if
-# # any element other than the first one is rooted.
-# #
-# rule join ( elements + )
-# {
-# if ! $(elements[2])
-# {
-# return $(elements[1]) ;
-# }
-# else
-# {
-# for local e in $(elements[2-])
-# {
-# if [ is-rooted $(e) ]
-# {
-# error only first element may be rooted ;
-# }
-# }
-# return [ join-imp $(elements) ] ;
-# }
-# }
-
-
-def glob (dirs, patterns):
- """ Returns the list of files matching the given pattern in the
- specified directory. Both directories and patterns are
- supplied as portable paths. Each pattern should be non-absolute
- path, and can't contain "." or ".." elements. Each slash separated
- element of pattern can contain the following special characters:
- - '?', which match any character
- - '*', which matches arbitrary number of characters.
- A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
- if and only if e1 matches p1, e2 matches p2 and so on.
-
- For example:
- [ glob . : *.cpp ]
- [ glob . : */build/Jamfile ]
- """
-# {
-# local result ;
-# if $(patterns:D)
-# {
-# # When a pattern has a directory element, we first glob for
-# # directory, and then glob for file name is the found directories.
-# for local p in $(patterns)
-# {
-# # First glob for directory part.
-# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ;
-# result += [ glob $(globbed-dirs) : $(p:D="") ] ;
-# }
-# }
-# else
-# {
-# # When a pattern has not directory, we glob directly.
-# # Take care of special ".." value. The "GLOB" rule simply ignores
-# # the ".." element (and ".") element in directory listings. This is
-# # needed so that
-# #
-# # [ glob libs/*/Jamfile ]
-# #
-# # don't return
-# #
-# # libs/../Jamfile (which is the same as ./Jamfile)
-# #
-# # On the other hand, when ".." is explicitly present in the pattern
-# # we need to return it.
-# #
-# for local dir in $(dirs)
-# {
-# for local p in $(patterns)
-# {
-# if $(p) != ".."
-# {
-# result += [ sequence.transform make
-# : [ GLOB [ native $(dir) ] : $(p) ] ] ;
-# }
-# else
-# {
-# result += [ path.join $(dir) .. ] ;
-# }
-# }
-# }
-# }
-# return $(result) ;
-# }
-#
-
-# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested.
- result = []
- dirs = to_seq (dirs)
- patterns = to_seq (patterns)
-
- splitdirs = []
- for dir in dirs:
- splitdirs += dir.split (os.pathsep)
-
- for dir in splitdirs:
- for pattern in patterns:
- p = os.path.join (dir, pattern)
- import glob
- result.extend (glob.glob (p))
- return result
-
-#
-# Find out the absolute name of path and returns the list of all the parents,
-# starting with the immediate one. Parents are returned as relative names.
-# If 'upper_limit' is specified, directories above it will be pruned.
-#
-def all_parents(path, upper_limit=None, cwd=None):
-
- if not cwd:
- cwd = os.getcwd()
-
- path_abs = os.path.join(cwd, path)
-
- if upper_limit:
- upper_limit = os.path.join(cwd, upper_limit)
-
- result = []
- while path_abs and path_abs != upper_limit:
- (head, tail) = os.path.split(path)
- path = os.path.join(path, "..")
- result.append(path)
- path_abs = head
-
- if upper_limit and path_abs != upper_limit:
- raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path))
-
- return result
-
-# Search for 'pattern' in parent directories of 'dir', up till and including
-# 'upper_limit', if it is specified, or till the filesystem root otherwise.
-#
-def glob_in_parents(dir, patterns, upper_limit=None):
-
- result = []
- parent_dirs = all_parents(dir, upper_limit)
-
- for p in parent_dirs:
- result = glob(p, patterns)
- if result: break
-
- return result
-
-#
-# #
-# # Assuming 'child' is a subdirectory of 'parent', return the relative
-# # path from 'parent' to 'child'
-# #
-# rule relative ( child parent )
-# {
-# if $(parent) = "."
-# {
-# return $(child) ;
-# }
-# else
-# {
-# local split1 = [ regex.split $(parent) / ] ;
-# local split2 = [ regex.split $(child) / ] ;
-#
-# while $(split1)
-# {
-# if $(split1[1]) = $(split2[1])
-# {
-# split1 = $(split1[2-]) ;
-# split2 = $(split2[2-]) ;
-# }
-# else
-# {
-# errors.error $(child) is not a subdir of $(parent) ;
-# }
-# }
-# return [ join $(split2) ] ;
-# }
-# }
-#
-# # Returns the minimal path to path2 that is relative path1.
-# #
-# rule relative-to ( path1 path2 )
-# {
-# local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
-# local split1 = [ regex.split $(path1) / ] ;
-# local split2 = [ regex.split $(path2) / ] ;
-#
-# while $(split1) && $(root_1)
-# {
-# if $(split1[1]) = $(split2[1])
-# {
-# root_1 = $(root_1[2-]) ;
-# split1 = $(split1[2-]) ;
-# split2 = $(split2[2-]) ;
-# }
-# else
-# {
-# split1 = ;
-# }
-# }
-# return [ join . $(root_1) $(split2) ] ;
-# }
-
-# Returns the list of paths which are used by the operating system
-# for looking up programs
-def programs_path ():
- raw = []
- names = ['PATH', 'Path', 'path']
-
- for name in names:
- raw.append(os.environ.get (name, ''))
-
- result = []
- for elem in raw:
- if elem:
- for p in elem.split(os.path.pathsep):
- result.append(make(p))
-
- return result
-
-# rule make-NT ( native )
-# {
-# local tokens = [ regex.split $(native) "[/\\]" ] ;
-# local result ;
-#
-# # Handle paths ending with slashes
-# if $(tokens[-1]) = ""
-# {
-# tokens = $(tokens[1--2]) ; # discard the empty element
-# }
-#
-# result = [ path.join $(tokens) ] ;
-#
-# if [ regex.match "(^.:)" : $(native) ]
-# {
-# result = /$(result) ;
-# }
-#
-# if $(native) = ""
-# {
-# result = "." ;
-# }
-#
-# return $(result) ;
-# }
-#
-# rule native-NT ( path )
-# {
-# local result = [ MATCH "^/?(.*)" : $(path) ] ;
-# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
-# return $(result) ;
-# }
-#
-# rule make-CYGWIN ( path )
-# {
-# return [ make-NT $(path) ] ;
-# }
-#
-# rule native-CYGWIN ( path )
-# {
-# local result = $(path) ;
-# if [ regex.match "(^/.:)" : $(path) ] # win absolute
-# {
-# result = [ MATCH "^/?(.*)" : $(path) ] ; # remove leading '/'
-# }
-# return [ native-UNIX $(result) ] ;
-# }
-#
-# #
-# # split-VMS: splits input native path into
-# # device dir file (each part is optional),
-# # example:
-# #
-# # dev:[dir]file.c => dev: [dir] file.c
-# #
-# rule split-path-VMS ( native )
-# {
-# local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
-# local device = $(matches[1]) ;
-# local dir = $(matches[2]) ;
-# local file = $(matches[3]) ;
-#
-# return $(device) $(dir) $(file) ;
-# }
-#
-# #
-# # Converts a native VMS path into a portable path spec.
-# #
-# # Does not handle current-device absolute paths such
-# # as "[dir]File.c" as it is not clear how to represent
-# # them in the portable path notation.
-# #
-# # Adds a trailing dot (".") to the file part if no extension
-# # is present (helps when converting it back into native path).
-# #
-# rule make-VMS ( native )
-# {
-# if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
-# {
-# errors.error "Can't handle default-device absolute paths: " $(native) ;
-# }
-#
-# local parts = [ split-path-VMS $(native) ] ;
-# local device = $(parts[1]) ;
-# local dir = $(parts[2]) ;
-# local file = $(parts[3]) ;
-# local elems ;
-#
-# if $(device)
-# {
-# #
-# # rooted
-# #
-# elems = /$(device) ;
-# }
-#
-# if $(dir) = "[]"
-# {
-# #
-# # Special case: current directory
-# #
-# elems = $(elems) "." ;
-# }
-# else if $(dir)
-# {
-# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
-# local dir_parts = [ regex.split $(dir) \\. ] ;
-#
-# if $(dir_parts[1]) = ""
-# {
-# #
-# # Relative path
-# #
-# dir_parts = $(dir_parts[2--1]) ;
-# }
-#
-# #
-# # replace "parent-directory" parts (- => ..)
-# #
-# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
-#
-# elems = $(elems) $(dir_parts) ;
-# }
-#
-# if $(file)
-# {
-# if ! [ MATCH (\\.) : $(file) ]
-# {
-# #
-# # Always add "." to end of non-extension file
-# #
-# file = $(file). ;
-# }
-# elems = $(elems) $(file) ;
-# }
-#
-# local portable = [ path.join $(elems) ] ;
-#
-# return $(portable) ;
-# }
-#
-# #
-# # Converts a portable path spec into a native VMS path.
-# #
-# # Relies on having at least one dot (".") included in the file
-# # name to be able to differentiate it ftom the directory part.
-# #
-# rule native-VMS ( path )
-# {
-# local device = "" ;
-# local dir = $(path) ;
-# local file = "" ;
-# local native ;
-# local split ;
-#
-# #
-# # Has device ?
-# #
-# if [ is-rooted $(dir) ]
-# {
-# split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
-# device = $(split[1]) ;
-# dir = $(split[2]) ;
-# }
-#
-# #
-# # Has file ?
-# #
-# # This is no exact science, just guess work:
-# #
-# # If the last part of the current path spec
-# # includes some chars, followed by a dot,
-# # optionally followed by more chars -
-# # then it is a file (keep your fingers crossed).
-# #
-# split = [ regex.split $(dir) / ] ;
-# local maybe_file = $(split[-1]) ;
-#
-# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
-# {
-# file = $(maybe_file) ;
-# dir = [ sequence.join $(split[1--2]) : / ] ;
-# }
-#
-# #
-# # Has dir spec ?
-# #
-# if $(dir) = "."
-# {
-# dir = "[]" ;
-# }
-# else if $(dir)
-# {
-# dir = [ regex.replace $(dir) \\.\\. - ] ;
-# dir = [ regex.replace $(dir) / . ] ;
-#
-# if $(device) = ""
-# {
-# #
-# # Relative directory
-# #
-# dir = "."$(dir) ;
-# }
-# dir = "["$(dir)"]" ;
-# }
-#
-# native = [ sequence.join $(device) $(dir) $(file) ] ;
-#
-# return $(native) ;
-# }
-#
-#
-# rule __test__ ( ) {
-#
-# import assert ;
-# import errors : try catch ;
-#
-# assert.true is-rooted "/" ;
-# assert.true is-rooted "/foo" ;
-# assert.true is-rooted "/foo/bar" ;
-# assert.result : is-rooted "." ;
-# assert.result : is-rooted "foo" ;
-# assert.result : is-rooted "foo/bar" ;
-#
-# assert.true has-parent "foo" ;
-# assert.true has-parent "foo/bar" ;
-# assert.true has-parent "." ;
-# assert.result : has-parent "/" ;
-#
-# assert.result "." : basename "." ;
-# assert.result ".." : basename ".." ;
-# assert.result "foo" : basename "foo" ;
-# assert.result "foo" : basename "bar/foo" ;
-# assert.result "foo" : basename "gaz/bar/foo" ;
-# assert.result "foo" : basename "/gaz/bar/foo" ;
-#
-# assert.result "." : parent "foo" ;
-# assert.result "/" : parent "/foo" ;
-# assert.result "foo/bar" : parent "foo/bar/giz" ;
-# assert.result ".." : parent "." ;
-# assert.result ".." : parent "../foo" ;
-# assert.result "../../foo" : parent "../../foo/bar" ;
-#
-#
-# assert.result "." : reverse "." ;
-# assert.result ".." : reverse "foo" ;
-# assert.result "../../.." : reverse "foo/bar/giz" ;
-#
-# assert.result "foo" : join "foo" ;
-# assert.result "/foo" : join "/" "foo" ;
-# assert.result "foo/bar" : join "foo" "bar" ;
-# assert.result "foo/bar" : join "foo/giz" "../bar" ;
-# assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
-# assert.result ".." : join "." ".." ;
-# assert.result ".." : join "foo" "../.." ;
-# assert.result "../.." : join "../foo" "../.." ;
-# assert.result "/foo" : join "/bar" "../foo" ;
-# assert.result "foo/giz" : join "foo/giz" "." ;
-# assert.result "." : join lib2 ".." ;
-# assert.result "/" : join "/a" ".." ;
-#
-# assert.result /a/b : join /a/b/c .. ;
-#
-# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
-# assert.result "giz" : join "foo" ".." "giz" ;
-# assert.result "foo/giz" : join "foo" "." "giz" ;
-#
-# try ;
-# {
-# join "a" "/b" ;
-# }
-# catch only first element may be rooted ;
-#
-# local CWD = "/home/ghost/build" ;
-# assert.result : all-parents . : . : $(CWD) ;
-# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
-# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
-# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
-#
-# local CWD = "/home/ghost" ;
-# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
-# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
-#
-# assert.result "c/d" : relative "a/b/c/d" "a/b" ;
-# assert.result "foo" : relative "foo" "." ;
-#
-# local save-os = [ modules.peek path : os ] ;
-# modules.poke path : os : NT ;
-#
-# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
-# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
-# assert.result "foo" : make "foo/." ;
-# assert.result "foo" : make "foo/bar/.." ;
-# assert.result "/D:/My Documents" : make "D:\\My Documents" ;
-# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
-#
-# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
-# assert.result "foo" : native "foo" ;
-# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
-#
-# modules.poke path : os : UNIX ;
-#
-# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
-# assert.result "/sub1" : make "/sub1/." ;
-# assert.result "/sub1" : make "/sub1/sub2/.." ;
-# assert.result "sub1" : make "sub1/." ;
-# assert.result "sub1" : make "sub1/sub2/.." ;
-# assert.result "/foo/bar" : native "/foo/bar" ;
-#
-# modules.poke path : os : VMS ;
-#
-# #
-# # Don't really need to poke os before these
-# #
-# assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
-# assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
-# assert.result "disk:" "" "" : split-path-VMS "disk:" ;
-# assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
-# assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
-# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
-# assert.result "" "" "file" : split-path-VMS "file" ;
-# assert.result "" "" "" : split-path-VMS "" ;
-#
-# #
-# # Special case: current directory
-# #
-# assert.result "" "[]" "" : split-path-VMS "[]" ;
-# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
-# assert.result "" "[]" "file" : split-path-VMS "[]file" ;
-# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
-#
-# #
-# # Make portable paths
-# #
-# assert.result "/disk:" : make "disk:" ;
-# assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
-# assert.result "foo" : make "[.foo]" ;
-# assert.result "foo" : make "[.foo.bar.-]" ;
-# assert.result ".." : make "[.-]" ;
-# assert.result ".." : make "[-]" ;
-# assert.result "." : make "[]" ;
-# assert.result "giz.h" : make "giz.h" ;
-# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
-# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
-# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
-#
-# #
-# # Special case (adds '.' to end of file w/o extension to
-# # disambiguate from directory in portable path spec).
-# #
-# assert.result "Jamfile." : make "Jamfile" ;
-# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
-# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
-#
-# #
-# # Make native paths
-# #
-# assert.result "disk:" : native "/disk:" ;
-# assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
-# assert.result "[.foo]" : native "foo" ;
-# assert.result "[.-]" : native ".." ;
-# assert.result "[.foo.-]" : native "foo/.." ;
-# assert.result "[]" : native "." ;
-# assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
-# assert.result "giz.h" : native "giz.h" ;
-# assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
-# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
-#
-# modules.poke path : os : $(save-os) ;
-#
-# }
-
-#
-
-
-#def glob(dir, patterns):
-# result = []
-# for pattern in patterns:
-# result.extend(builtin_glob(os.path.join(dir, pattern)))
-# return result
-
-def glob(dirs, patterns, exclude_patterns=None):
- """Returns the list of files matching the given pattern in the
- specified directory. Both directories and patterns are
- supplied as portable paths. Each pattern should be non-absolute
- path, and can't contain '.' or '..' elements. Each slash separated
- element of pattern can contain the following special characters:
- - '?', which match any character
- - '*', which matches arbitrary number of characters.
- A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
- if and only if e1 matches p1, e2 matches p2 and so on.
- For example:
- [ glob . : *.cpp ]
- [ glob . : */build/Jamfile ]
- """
-
- assert(isinstance(patterns, list))
- assert(isinstance(dirs, list))
-
- if not exclude_patterns:
- exclude_patterns = []
- else:
- assert(isinstance(exclude_patterns, list))
-
- real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
- real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns
- for d in dirs]
-
- inc = [os.path.normpath(name) for p in real_patterns
- for name in builtin_glob(p)]
- exc = [os.path.normpath(name) for p in real_exclude_patterns
- for name in builtin_glob(p)]
- return [x for x in inc if x not in exc]
-
-def glob_tree(roots, patterns, exclude_patterns=None):
- """Recursive version of GLOB. Builds the glob of files while
- also searching in the subdirectories of the given roots. An
- optional set of exclusion patterns will filter out the
- matching entries from the result. The exclusions also apply
- to the subdirectory scanning, such that directories that
- match the exclusion patterns will not be searched."""
-
- if not exclude_patterns:
- exclude_patterns = []
-
- result = glob(roots, patterns, exclude_patterns)
- subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)]
- if subdirs:
- result.extend(glob_tree(subdirs, patterns, exclude_patterns))
-
- return result
-
-def glob_in_parents(dir, patterns, upper_limit=None):
- """Recursive version of GLOB which glob sall parent directories
- of dir until the first match is found. Returns an empty result if no match
- is found"""
-
- assert(isinstance(dir, str))
- assert(isinstance(patterns, list))
-
- result = []
-
- absolute_dir = os.path.join(os.getcwd(), dir)
- absolute_dir = os.path.normpath(absolute_dir)
- while absolute_dir:
- new_dir = os.path.split(absolute_dir)[0]
- if new_dir == absolute_dir:
- break
- result = glob([new_dir], patterns)
- if result:
- break
- absolute_dir = new_dir
-
- return result
-
-
-# The relpath functionality is written by
-# Cimarron Taylor
-def split(p, rest=[]):
- (h,t) = os.path.split(p)
- if len(h) < 1: return [t]+rest
- if len(t) < 1: return [h]+rest
- return split(h,[t]+rest)
-
-def commonpath(l1, l2, common=[]):
- if len(l1) < 1: return (common, l1, l2)
- if len(l2) < 1: return (common, l1, l2)
- if l1[0] != l2[0]: return (common, l1, l2)
- return commonpath(l1[1:], l2[1:], common+[l1[0]])
-
-def relpath(p1, p2):
- (common,l1,l2) = commonpath(split(p1), split(p2))
- p = []
- if len(l1) > 0:
- p = [ '../' * len(l1) ]
- p = p + l2
- if p:
- return os.path.join( *p )
- else:
- return "."
diff --git a/jam-files/boost-build/util/print.jam b/jam-files/boost-build/util/print.jam
deleted file mode 100644
index 708d21ab..00000000
--- a/jam-files/boost-build/util/print.jam
+++ /dev/null
@@ -1,488 +0,0 @@
-# Copyright 2003 Douglas Gregor
-# Copyright 2002, 2003, 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Utilities for generating format independent output. Using these
-# will help in generation of documentation in at minimum plain/console
-# and html.
-
-import modules ;
-import numbers ;
-import string ;
-import regex ;
-import "class" ;
-import scanner ;
-import path ;
-
-# The current output target. Defaults to console.
-output-target = console ;
-
-# The current output type. Defaults to plain. Other possible values are "html".
-output-type = plain ;
-
-# Whitespace.
-.whitespace = [ string.whitespace ] ;
-
-
-# Set the target and type of output to generate. This sets both the destination
-# output and the type of docs to generate to that output. The target can be
-# either a file or "console" for echoing to the console. If the type of output
-# is not specified it defaults to plain text.
-#
-rule output (
- target # The target file or device; file or "console".
- type ? # The type of output; "plain" or "html".
-)
-{
- type ?= plain ;
- if $(output-target) != $(target)
- {
- output-target = $(target) ;
- output-type = $(type) ;
- if $(output-type) = html
- {
- text
- "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">"
- "<html>"
- "<head>"
- "</head>"
- "<body link=\"#0000ff\" vlink=\"#800080\">"
- : true
- : prefix ;
- text
- "</body>"
- "</html>"
- :
- : suffix ;
- }
- }
-}
-
-
-# Generate a section with a description. The type of output can be controlled by
-# the value of the 'output-type' variable.
-#
-rule section (
- name # The name of the section.
- description * # A number of description lines.
-)
-{
- if $(output-type) = plain
- {
- lines [ split-at-words $(name): ] ;
- lines ;
- }
- else if $(output-type) = html
- {
- name = [ escape-html $(name) ] ;
- text <h3>$(name)</h3> <p> ;
- }
- local pre = ;
- while $(description)
- {
- local paragraph = ;
- while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; }
- if $(pre)
- {
- while $(description) && (
- $(pre) = " $(description[1])" ||
- ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] )
- )
- { paragraph += $(description[1]) ; description = $(description[2-]) ; }
- while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; }
- pre = ;
- if $(output-type) = plain
- {
- lines $(paragraph) "" : " " " " ;
- }
- else if $(output-type) = html
- {
- text <blockquote> ;
- lines $(paragraph) ;
- text </blockquote> ;
- }
- }
- else
- {
- while $(description) && ! [ string.is-whitespace $(description[1]) ]
- { paragraph += $(description[1]) ; description = $(description[2-]) ; }
- if $(paragraph[1]) = :: && ! $(paragraph[2])
- {
- pre = " " ;
- }
- if $(paragraph[1]) = ::
- {
- if $(output-type) = plain
- {
- lines $(paragraph[2-]) "" : " " " " ;
- lines ;
- }
- else if $(output-type) = html
- {
- text <blockquote> ;
- lines $(paragraph[2-]) ;
- text </blockquote> ;
- }
- }
- else
- {
- local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ;
- local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ;
- p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ;
- if $(p[3]) = ::
- {
- pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ;
- if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) $(p[1]): ; }
- else { paragraph = $(paragraph[1--2]) $(p[1]) ; }
- if $(output-type) = plain
- {
- lines [ split-at-words " " $(paragraph) ] : " " " " ;
- lines ;
- }
- else if $(output-type) = html
- {
- text </p> <p> [ escape-html $(paragraph) ] ;
- }
- }
- else
- {
- if $(output-type) = plain
- {
- lines [ split-at-words " " $(paragraph) ] : " " " " ;
- lines ;
- }
- else if $(output-type) = html
- {
- text </p> <p> [ escape-html $(paragraph) ] ;
- }
- }
- }
- }
- }
- if $(output-type) = html
- {
- text </p> ;
- }
-}
-
-
-# Generate the start of a list of items. The type of output can be controlled by
-# the value of the 'output-type' variable.
-#
-rule list-start ( )
-{
- if $(output-type) = plain
- {
- }
- else if $(output-type) = html
- {
- text <ul> ;
- }
-}
-
-
-# Generate an item in a list. The type of output can be controlled by the value
-# of the 'output-type' variable.
-#
-rule list-item (
- item + # The item to list.
-)
-{
- if $(output-type) = plain
- {
- lines [ split-at-words "*" $(item) ] : " " " " ;
- }
- else if $(output-type) = html
- {
- text <li> [ escape-html $(item) ] </li> ;
- }
-}
-
-
-# Generate the end of a list of items. The type of output can be controlled by
-# the value of the 'output-type' variable.
-#
-rule list-end ( )
-{
- if $(output-type) = plain
- {
- lines ;
- }
- else if $(output-type) = html
- {
- text </ul> ;
- }
-}
-
-
-# Split the given text into separate lines, word-wrapping to a margin. The
-# default margin is 78 characters.
-#
-rule split-at-words (
- text + # The text to split.
- : margin ? # An optional margin, default is 78.
-)
-{
- local lines = ;
- text = [ string.words $(text:J=" ") ] ;
- text = $(text:J=" ") ;
- margin ?= 78 ;
- local char-match-1 = ".?" ;
- local char-match = "" ;
- while $(margin) != 0
- {
- char-match = $(char-match)$(char-match-1) ;
- margin = [ numbers.decrement $(margin) ] ;
- }
- while $(text)
- {
- local s = "" ;
- local t = "" ;
- # divide s into the first X characters and the rest
- s = [ MATCH "^($(char-match))(.*)" : $(text) ] ;
-
- if $(s[2])
- {
- # split the first half at a space
- t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ;
- }
- else
- {
- t = $(s) ;
- }
-
- if ! $(t[2])
- {
- t += "" ;
- }
-
- text = $(t[2])$(s[2]) ;
- lines += $(t[1]) ;
- }
- return $(lines) ;
-}
-
-
-# Generate a set of fixed lines. Each single item passed in is output on a
-# separate line. For console this just echos each line, but for html this will
-# split them with <br>.
-#
-rule lines (
- text * # The lines of text.
- : indent ? # Optional indentation prepended to each line after the first one.
- outdent ? # Optional indentation to prepend to the first line.
-)
-{
- text ?= "" ;
- indent ?= "" ;
- outdent ?= "" ;
- if $(output-type) = plain
- {
- text $(outdent)$(text[1]) $(indent)$(text[2-]) ;
- }
- else if $(output-type) = html
- {
- local indent-chars = [ string.chars $(indent) ] ;
- indent = "" ;
- for local c in $(indent-chars)
- {
- if $(c) = " " { c = "&nbsp;" ; }
- else if $(c) = " " { c = "&nbsp;&nbsp;&nbsp;&nbsp;" ; }
- indent = $(indent)$(c) ;
- }
- local html-text = [ escape-html $(text) : "&nbsp;" ] ;
- text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ;
- }
-}
-
-
-# Output text directly to the current target. When doing output to a file, one
-# can indicate if the text should be output to "prefix" it, as the "body"
-# (default), or "suffix" of the file. This is independant of the actual
-# execution order of the text rule. This rule invokes a singular action, one
-# action only once, which does the build of the file. Therefore actions on the
-# target outside of this rule will happen entirely before and/or after all
-# output using this rule.
-#
-rule text (
- strings * # The strings of text to output.
- : overwrite ? # true to overwrite the output (if it is a file)
- : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for a file).
-)
-{
- prefix-body-suffix ?= body ;
- if $(output-target) = console
- {
- if ! $(strings)
- {
- ECHO ;
- }
- else
- {
- for local s in $(strings)
- {
- ECHO $(s) ;
- }
- }
- }
- if ! $($(output-target).did-action)
- {
- $(output-target).did-action = yes ;
- $(output-target).text-prefix = ;
- $(output-target).text-body = ;
- $(output-target).text-suffix = ;
-
- nl on $(output-target) = "
-" ;
- text-redirect on $(output-target) = ">>" ;
- if $(overwrite)
- {
- text-redirect on $(output-target) = ">" ;
- }
- text-content on $(output-target) = ;
-
- text-action $(output-target) ;
-
- if $(overwrite) && $(output-target) != console
- {
- check-for-update $(output-target) ;
- }
- }
- $(output-target).text-$(prefix-body-suffix) += $(strings) ;
- text-content on $(output-target) =
- $($(output-target).text-prefix)
- $($(output-target).text-body)
- $($(output-target).text-suffix) ;
-}
-
-
-# Outputs the text to the current targets, after word-wrapping it.
-#
-rule wrapped-text ( text + )
-{
- local lines = [ split-at-words $(text) ] ;
- text $(lines) ;
-}
-
-
-# Escapes text into html/xml printable equivalents. Does not know about tags and
-# therefore tags fed into this will also be escaped. Currently escapes space,
-# "<", ">", and "&".
-#
-rule escape-html (
- text + # The text to escape.
- : space ? # What to replace spaces with, defaults to " ".
-)
-{
- local html-text = ;
- while $(text)
- {
- local html = $(text[1]) ;
- text = $(text[2-]) ;
- html = [ regex.replace $(html) "&" "&amp;" ] ;
- html = [ regex.replace $(html) "<" "&lt;" ] ;
- html = [ regex.replace $(html) ">" "&gt;" ] ;
- if $(space)
- {
- html = [ regex.replace $(html) " " "$(space)" ] ;
- }
- html-text += $(html) ;
- }
- return $(html-text) ;
-}
-
-
-# Outputs the text strings collected by the text rule to the output file.
-#
-actions quietly text-action
-{
- @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)"
-}
-
-
-rule get-scanner ( )
-{
- if ! $(.scanner)
- {
- .scanner = [ class.new print-scanner ] ;
- }
- return $(.scanner) ;
-}
-
-
-# The following code to update print targets when their contents
-# change is a horrible hack. It basically creates a target which
-# binds to this file (print.jam) and installs a scanner on it
-# which reads the target and compares its contents to the new
-# contents that we're writing.
-#
-rule check-for-update ( target )
-{
- local scanner = [ get-scanner ] ;
- local file = [ path.native [ modules.binding $(__name__) ] ] ;
- local g = [ MATCH <(.*)> : $(target:G) ] ;
- local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ;
- DEPENDS $(target) : $(dependency-target) ;
- SEARCH on $(dependency-target) = $(file:D) ;
- ISFILE $(dependency-target) ;
- NOUPDATE $(dependency-target) ;
- base on $(dependency-target) = $(target) ;
- scanner.install $(scanner) : $(dependency-target) none ;
- return $(dependency-target) ;
-}
-
-
-class print-scanner : scanner
-{
- import path ;
- import os ;
-
- rule pattern ( )
- {
- return "(One match...)" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local base = [ on $(target) return $(base) ] ;
- local nl = [ on $(base) return $(nl) ] ;
- local text-content = [ on $(base) return $(text-content) ] ;
- local dir = [ on $(base) return $(LOCATE) ] ;
- if $(dir)
- {
- dir = [ path.make $(dir) ] ;
- }
- local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ;
- local actual-content ;
- if [ os.name ] = NT
- {
- actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ;
- }
- else
- {
- actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ;
- }
- if $(text-content:J=$(nl)) != $(actual-content)
- {
- ALWAYS $(base) ;
- }
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- assert.result one two three : split-at-words one two three : 5 ;
- assert.result "one two" three : split-at-words one two three : 8 ;
- assert.result "one two" three : split-at-words one two three : 9 ;
- assert.result "one two three" : split-at-words one two three ;
-
- # VP, 2004-12-03 The following test fails for some reason, so commenting it
- # out.
- #assert.result "one&nbsp;two&nbsp;three" "&amp;&lt;&gt;" :
- # escape-html "one two three" "&<>" ;
-}
diff --git a/jam-files/boost-build/util/regex.jam b/jam-files/boost-build/util/regex.jam
deleted file mode 100644
index 234c36f6..00000000
--- a/jam-files/boost-build/util/regex.jam
+++ /dev/null
@@ -1,193 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2003 Douglas Gregor
-# Copyright 2003 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# Returns a list of the following substrings:
-# 1) from beginning till the first occurrence of 'separator' or till the end,
-# 2) between each occurrence of 'separator' and the next occurrence,
-# 3) from the last occurrence of 'separator' till the end.
-# If no separator is present, the result will contain only one element.
-#
-
-rule split ( string separator )
-{
- local result ;
- local s = $(string) ;
-
- # Break pieaces off 's' until it has no separators left.
- local match = 1 ;
- while $(match)
- {
- match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
- if $(match)
- {
- match += "" ; # in case 3rd item was empty - works around MATCH bug
- result = $(match[3]) $(result) ;
- s = $(match[1]) ;
- }
- }
- # Combine the remaining part at the beginning, which does not have
- # separators, with the pieces broken off. Note that the rule's signature
- # does not allow the initial s to be empty.
- return $(s) $(result) ;
-}
-
-
-# Returns the concatenated results of Applying regex.split to every element of
-# the list using the separator pattern.
-#
-rule split-list ( list * : separator )
-{
- local result ;
- for s in $(list)
- {
- result += [ split $(s) $(separator) ] ;
- }
- return $(result) ;
-}
-
-
-# Match string against pattern, and return the elements indicated by indices.
-#
-rule match ( pattern : string : indices * )
-{
- indices ?= 1 2 3 4 5 6 7 8 9 ;
- local x = [ MATCH $(pattern) : $(string) ] ;
- return $(x[$(indices)]) ;
-}
-
-
-# Matches all elements of 'list' agains the 'pattern' and returns a list of
-# elements indicated by indices of all successful matches. If 'indices' is
-# omitted returns a list of first paranthethised groups of all successful
-# matches.
-#
-rule transform ( list * : pattern : indices * )
-{
- indices ?= 1 ;
- local result ;
- for local e in $(list)
- {
- local m = [ MATCH $(pattern) : $(e) ] ;
- if $(m)
- {
- result += $(m[$(indices)]) ;
- }
- }
- return $(result) ;
-}
-
-NATIVE_RULE regex : transform ;
-
-
-# Escapes all of the characters in symbols using the escape symbol escape-symbol
-# for the given string, and returns the escaped string.
-#
-rule escape ( string : symbols : escape-symbol )
-{
- local result = "" ;
- local m = 1 ;
- while $(m)
- {
- m = [ MATCH ^([^$(symbols)]*)([$(symbols)])(.*) : $(string) ] ;
- if $(m)
- {
- m += "" ; # Supposedly a bug fix; borrowed from regex.split
- result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ;
- string = $(m[3]) ;
- }
- }
- string ?= "" ;
- result = "$(result)$(string)" ;
- return $(result) ;
-}
-
-
-# Replaces occurrences of a match string in a given string and returns the new
-# string. The match string can be a regex expression.
-#
-rule replace (
- string # The string to modify.
- match # The characters to replace.
- replacement # The string to replace with.
- )
-{
- local result = "" ;
- local parts = 1 ;
- while $(parts)
- {
- parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
- if $(parts)
- {
- parts += "" ;
- result = "$(replacement)$(parts[3])$(result)" ;
- string = $(parts[1]) ;
- }
- }
- string ?= "" ;
- result = "$(string)$(result)" ;
- return $(result) ;
-}
-
-
-# Replaces occurrences of a match string in a given list of strings and returns
-# a list of new strings. The match string can be a regex expression.
-#
-# list - the list of strings to modify.
-# match - the search expression.
-# replacement - the string to replace with.
-#
-rule replace-list ( list * : match : replacement )
-{
- local result ;
- for local e in $(list)
- {
- result += [ replace $(e) $(match) $(replacement) ] ;
- }
- return $(result) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- assert.result a b c : split "a/b/c" / ;
- assert.result "" a b c : split "/a/b/c" / ;
- assert.result "" "" a b c : split "//a/b/c" / ;
- assert.result "" a "" b c : split "/a//b/c" / ;
- assert.result "" a "" b c "" : split "/a//b/c/" / ;
- assert.result "" a "" b c "" "" : split "/a//b/c//" / ;
-
- assert.result a c b d
- : match (.)(.)(.)(.) : abcd : 1 3 2 4 ;
-
- assert.result a b c d
- : match (.)(.)(.)(.) : abcd ;
-
- assert.result ababab cddc
- : match ((ab)*)([cd]+) : abababcddc : 1 3 ;
-
- assert.result a.h c.h
- : transform <a.h> \"b.h\" <c.h> : <(.*)> ;
-
- assert.result a.h b.h c.h
- : transform <a.h> \"b.h\" <c.h> : <([^>]*)>|\"([^\"]*)\" : 1 2 ;
-
- assert.result "^<?xml version=\"1.0\"^>"
- : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ;
-
- assert.result "<?xml version=\\\"1.0\\\">"
- : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ;
-
- assert.result "string&nbsp;string&nbsp;" : replace "string string " " " "&nbsp;" ;
- assert.result "&nbsp;string&nbsp;string" : replace " string string" " " "&nbsp;" ;
- assert.result "string&nbsp;&nbsp;string" : replace "string string" " " "&nbsp;" ;
- assert.result "-" : replace "&" "&" "-" ;
-
- assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ;
-}
diff --git a/jam-files/boost-build/util/regex.py b/jam-files/boost-build/util/regex.py
deleted file mode 100644
index 29e26ecf..00000000
--- a/jam-files/boost-build/util/regex.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-import re
-
-def transform (list, pattern, indices = [1]):
- """ Matches all elements of 'list' agains the 'pattern'
- and returns a list of the elements indicated by indices of
- all successfull matches. If 'indices' is omitted returns
- a list of first paranthethised groups of all successfull
- matches.
- """
- result = []
-
- for e in list:
- m = re.match (pattern, e)
-
- if m:
- for i in indices:
- result.append (m.group (i))
-
- return result
-
diff --git a/jam-files/boost-build/util/sequence.jam b/jam-files/boost-build/util/sequence.jam
deleted file mode 100644
index 73919a65..00000000
--- a/jam-files/boost-build/util/sequence.jam
+++ /dev/null
@@ -1,335 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import assert ;
-import numbers ;
-import modules ;
-
-
-# Note that algorithms in this module execute largely in the caller's module
-# namespace, so that local rules can be used as function objects. Also note that
-# most predicates can be multi-element lists. In that case, all but the first
-# element are prepended to the first argument which is passed to the rule named
-# by the first element.
-
-
-# Return the elements e of $(sequence) for which [ $(predicate) e ] has a
-# non-null value.
-#
-rule filter ( predicate + : sequence * )
-{
- local caller = [ CALLER_MODULE ] ;
- local result ;
-
- for local e in $(sequence)
- {
- if [ modules.call-in $(caller) : $(predicate) $(e) ]
- {
- result += $(e) ;
- }
- }
- return $(result) ;
-}
-
-
-# Return a new sequence consisting of [ $(function) $(e) ] for each element e of
-# $(sequence).
-#
-rule transform ( function + : sequence * )
-{
- local caller = [ CALLER_MODULE ] ;
- local result ;
-
- for local e in $(sequence)
- {
- result += [ modules.call-in $(caller) : $(function) $(e) ] ;
- }
- return $(result) ;
-}
-
-
-rule reverse ( s * )
-{
- local r ;
- for local x in $(s)
- {
- r = $(x) $(r) ;
- }
- return $(r) ;
-}
-
-
-rule less ( a b )
-{
- if $(a) < $(b)
- {
- return true ;
- }
-}
-
-
-# Insertion-sort s using the BinaryPredicate ordered.
-#
-rule insertion-sort ( s * : ordered * )
-{
- if ! $(ordered)
- {
- return [ SORT $(s) ] ;
- }
- else
- {
- local caller = [ CALLER_MODULE ] ;
- ordered ?= sequence.less ;
- local result = $(s[1]) ;
- if $(ordered) = sequence.less
- {
- local head tail ;
- for local x in $(s[2-])
- {
- head = ;
- tail = $(result) ;
- while $(tail) && ( $(tail[1]) < $(x) )
- {
- head += $(tail[1]) ;
- tail = $(tail[2-]) ;
- }
- result = $(head) $(x) $(tail) ;
- }
- }
- else
- {
- for local x in $(s[2-])
- {
- local head tail ;
- tail = $(result) ;
- while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ]
- {
- head += $(tail[1]) ;
- tail = $(tail[2-]) ;
- }
- result = $(head) $(x) $(tail) ;
- }
- }
-
- return $(result) ;
- }
-}
-
-
-# Merge two ordered sequences using the BinaryPredicate ordered.
-#
-rule merge ( s1 * : s2 * : ordered * )
-{
- ordered ?= sequence.less ;
- local result__ ;
- local caller = [ CALLER_MODULE ] ;
-
- while $(s1) && $(s2)
- {
- if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ]
- {
- result__ += $(s1[1]) ;
- s1 = $(s1[2-]) ;
- }
- else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ]
- {
- result__ += $(s2[1]) ;
- s2 = $(s2[2-]) ;
- }
- else
- {
- s2 = $(s2[2-]) ;
- }
-
- }
- result__ += $(s1) ;
- result__ += $(s2) ;
-
- return $(result__) ;
-}
-
-
-# Join the elements of s into one long string. If joint is supplied, it is used
-# as a separator.
-#
-rule join ( s * : joint ? )
-{
- joint ?= "" ;
- return $(s:J=$(joint)) ;
-}
-
-
-# Find the length of any sequence.
-#
-rule length ( s * )
-{
- local result = 0 ;
- for local i in $(s)
- {
- result = [ CALC $(result) + 1 ] ;
- }
- return $(result) ;
-}
-
-
-rule unique ( list * : stable ? )
-{
- local result ;
- local prev ;
- if $(stable)
- {
- for local f in $(list)
- {
- if ! $(f) in $(result)
- {
- result += $(f) ;
- }
- }
- }
- else
- {
- for local i in [ SORT $(list) ]
- {
- if $(i) != $(prev)
- {
- result += $(i) ;
- }
- prev = $(i) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or
-# 'numbers.less' if none is provided.
-#
-rule max-element ( elements + : ordered ? )
-{
- ordered ?= numbers.less ;
-
- local max = $(elements[1]) ;
- for local e in $(elements[2-])
- {
- if [ $(ordered) $(max) $(e) ]
- {
- max = $(e) ;
- }
- }
- return $(max) ;
-}
-
-
-# Returns all of 'elements' for which corresponding element in parallel list
-# 'rank' is equal to the maximum value in 'rank'.
-#
-rule select-highest-ranked ( elements * : ranks * )
-{
- if $(elements)
- {
- local max-rank = [ max-element $(ranks) ] ;
- local result ;
- while $(elements)
- {
- if $(ranks[1]) = $(max-rank)
- {
- result += $(elements[1]) ;
- }
- elements = $(elements[2-]) ;
- ranks = $(ranks[2-]) ;
- }
- return $(result) ;
- }
-}
-NATIVE_RULE sequence : select-highest-ranked ;
-
-
-rule __test__ ( )
-{
- # Use a unique module so we can test the use of local rules.
- module sequence.__test__
- {
- import assert ;
- import sequence ;
-
- local rule is-even ( n )
- {
- if $(n) in 0 2 4 6 8
- {
- return true ;
- }
- }
-
- assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ;
-
- # Test that argument binding works.
- local rule is-equal-test ( x y )
- {
- if $(x) = $(y)
- {
- return true ;
- }
- }
-
- assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ;
-
- local rule append-x ( n )
- {
- return $(n)x ;
- }
-
- assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ;
-
- local rule repeat2 ( x )
- {
- return $(x) $(x) ;
- }
-
- assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ;
-
- local rule test-greater ( a b )
- {
- if $(a) > $(b)
- {
- return true ;
- }
- }
- assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ;
- assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ;
- assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ;
- assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ;
- assert.result 1 2 3 : sequence.merge 1 2 3 : ;
- assert.result 1 : sequence.merge 1 : 1 ;
-
- assert.result foo-bar-baz : sequence.join foo bar baz : - ;
- assert.result substandard : sequence.join sub stan dard ;
- assert.result 3.0.1 : sequence.join 3.0.1 : - ;
-
- assert.result 0 : sequence.length ;
- assert.result 3 : sequence.length a b c ;
- assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ;
-
- assert.result 1 : sequence.length a ;
- assert.result 10 : sequence.length a b c d e f g h i j ;
- assert.result 11 : sequence.length a b c d e f g h i j k ;
- assert.result 12 : sequence.length a b c d e f g h i j k l ;
-
- local p2 = x ;
- for local i in 1 2 3 4 5 6 7 8
- {
- p2 = $(p2) $(p2) ;
- }
- assert.result 256 : sequence.length $(p2) ;
-
- assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ;
-
- assert.result 5 : sequence.max-element 1 3 5 0 4 ;
-
- assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ;
-
- assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ;
- }
-}
diff --git a/jam-files/boost-build/util/sequence.py b/jam-files/boost-build/util/sequence.py
deleted file mode 100644
index 1d32efd2..00000000
--- a/jam-files/boost-build/util/sequence.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-import operator
-
-def unique (values, stable=False):
- if stable:
- s = set()
- r = []
- for v in values:
- if not v in s:
- r.append(v)
- s.add(v)
- return r
- else:
- return list(set(values))
-
-def max_element (elements, ordered = None):
- """ Returns the maximum number in 'elements'. Uses 'ordered' for comparisons,
- or '<' is none is provided.
- """
- if not ordered: ordered = operator.lt
-
- max = elements [0]
- for e in elements [1:]:
- if ordered (max, e):
- max = e
-
- return max
-
-def select_highest_ranked (elements, ranks):
- """ Returns all of 'elements' for which corresponding element in parallel
- list 'rank' is equal to the maximum value in 'rank'.
- """
- if not elements:
- return []
-
- max_rank = max_element (ranks)
-
- result = []
- while elements:
- if ranks [0] == max_rank:
- result.append (elements [0])
-
- elements = elements [1:]
- ranks = ranks [1:]
-
- return result
diff --git a/jam-files/boost-build/util/set.jam b/jam-files/boost-build/util/set.jam
deleted file mode 100644
index fc179134..00000000
--- a/jam-files/boost-build/util/set.jam
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-class set
-{
- rule __init__ ( )
- {
- }
-
- rule add ( elements * )
- {
- for local e in $(elements)
- {
- if ! $($(e))
- {
- $(e) = 1 ;
- self.result += $(e) ;
- }
- }
- }
-
- rule contains ( element )
- {
- return $($(element)) ;
- }
-
- rule list ( )
- {
- return $(self.result) ;
- }
-}
-
-
-
-# Returns the elements of set1 that are not in set2.
-#
-rule difference ( set1 * : set2 * )
-{
- local result = ;
- for local element in $(set1)
- {
- if ! ( $(element) in $(set2) )
- {
- result += $(element) ;
- }
- }
- return $(result) ;
-}
-
-NATIVE_RULE set : difference ;
-
-
-# Removes all the items appearing in both set1 & set2.
-#
-rule intersection ( set1 * : set2 * )
-{
- local result ;
- for local v in $(set1)
- {
- if $(v) in $(set2)
- {
- result += $(v) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns whether set1 & set2 contain the same elements. Note that this ignores
-# any element ordering differences as well as any element duplication.
-#
-rule equal ( set1 * : set2 * )
-{
- if $(set1) in $(set2) && ( $(set2) in $(set1) )
- {
- return true ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- assert.result 0 1 4 6 8 9 : difference 0 1 2 3 4 5 6 7 8 9 : 2 3 5 7 ;
- assert.result 2 5 7 : intersection 0 1 2 4 5 6 7 8 9 : 2 3 5 7 ;
-
- assert.true equal : ;
- assert.true equal 1 1 2 3 : 3 2 2 1 ;
- assert.false equal 2 3 : 3 2 2 1 ;
-}
diff --git a/jam-files/boost-build/util/set.py b/jam-files/boost-build/util/set.py
deleted file mode 100644
index dc7cf328..00000000
--- a/jam-files/boost-build/util/set.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-from utility import to_seq
-
-def difference (b, a):
- """ Returns the elements of B that are not in A.
- """
- result = []
- for element in b:
- if not element in a:
- result.append (element)
-
- return result
-
-def intersection (set1, set2):
- """ Removes from set1 any items which don't appear in set2 and returns the result.
- """
- result = []
- for v in set1:
- if v in set2:
- result.append (v)
- return result
-
-def contains (small, large):
- """ Returns true iff all elements of 'small' exist in 'large'.
- """
- small = to_seq (small)
- large = to_seq (large)
-
- for s in small:
- if not s in large:
- return False
- return True
-
-def equal (a, b):
- """ Returns True iff 'a' contains the same elements as 'b', irrespective of their order.
- # TODO: Python 2.4 has a proper set class.
- """
- return contains (a, b) and contains (b, a)
diff --git a/jam-files/boost-build/util/string.jam b/jam-files/boost-build/util/string.jam
deleted file mode 100644
index a39ed119..00000000
--- a/jam-files/boost-build/util/string.jam
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright 2002 Dave Abrahams
-# Copyright 2002, 2003 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import regex ;
-
-
-# Characters considered whitespace, as a list.
-.whitespace-chars = " " " " "
-" ;
-
-# Characters considered whitespace, as a single string.
-.whitespace = $(.whitespace-chars:J="") ;
-
-
-# Returns the canonical set of whitespace characters, as a list.
-#
-rule whitespace-chars ( )
-{
- return $(.whitespace-chars) ;
-}
-
-
-# Returns the canonical set of whitespace characters, as a single string.
-#
-rule whitespace ( )
-{
- return $(.whitespace) ;
-}
-
-
-# Splits the given string into a list of strings composed of each character of
-# the string in sequence.
-#
-rule chars (
- string # The string to split.
- )
-{
- local result ;
- while $(string)
- {
- local s = [ MATCH (.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.*) : $(string) ] ;
- string = $(s[9]) ;
- result += $(s[1-8]) ;
- }
-
- # Trim off empty strings.
- while $(result[1]) && ! $(result[-1])
- {
- result = $(result[1--2]) ;
- }
-
- return $(result) ;
-}
-
-
-# Apply a set of standard transformations to string to produce an abbreviation
-# no more than 5 characters long.
-#
-rule abbreviate ( string )
-{
- local r = $(.abbreviated-$(string)) ;
- if $(r)
- {
- return $(r) ;
- }
- # Anything less than 4 characters gets no abbreviation.
- else if ! [ MATCH (....) : $(string) ]
- {
- .abbreviated-$(string) = $(string) ;
- return $(string) ;
- }
- else
- {
- # Separate the initial letter in case it's a vowel.
- local s1 = [ MATCH ^(.)(.*) : $(string) ] ;
-
- # Drop trailing "ing".
- local s2 = [ MATCH ^(.*)ing$ : $(s1[2]) ] ;
- s2 ?= $(s1[2]) ;
-
- # Reduce all doubled characters to one.
- local last = "" ;
- for local c in [ chars $(s2) ]
- {
- if $(c) != $(last)
- {
- r += $(c) ;
- last = $(c) ;
- }
- }
- s2 = $(r:J="") ;
-
- # Chop all vowels out of the remainder.
- s2 = [ regex.replace $(s2) [AEIOUaeiou] "" ] ;
-
- # Shorten remaining consonants to 4 characters.
- s2 = [ MATCH ^(.?.?.?.?) : $(s2) ] ;
-
- # Glue the initial character back on to the front.
- s2 = $(s1[1])$(s2) ;
-
- .abbreviated-$(string) = $(s2) ;
- return $(s2) ;
- }
-}
-
-
-# Concatenates the given strings, inserting the given separator between each
-# string.
-#
-rule join (
- strings * # The strings to join.
- : separator ? # The optional separator.
- )
-{
- separator ?= "" ;
- return $(strings:J=$(separator)) ;
-}
-
-
-# Split a string into whitespace separated words.
-#
-rule words (
- string # The string to split.
- : whitespace * # Optional, characters to consider as whitespace.
- )
-{
- whitespace = $(whitespace:J="") ;
- whitespace ?= $(.whitespace) ;
- local w = ;
- while $(string)
- {
- string = [ MATCH "^[$(whitespace)]*([^$(whitespace)]*)(.*)" : $(string) ] ;
- if $(string[1]) && $(string[1]) != ""
- {
- w += $(string[1]) ;
- }
- string = $(string[2]) ;
- }
- return $(w) ;
-}
-
-
-# Check that the given string is composed entirely of whitespace.
-#
-rule is-whitespace (
- string ? # The string to test.
- )
-{
- if ! $(string) { return true ; }
- else if $(string) = "" { return true ; }
- else if [ MATCH "^([$(.whitespace)]+)$" : $(string) ] { return true ; }
- else { return ; }
-}
-
-rule __test__ ( )
-{
- import assert ;
- assert.result a b c : chars abc ;
-
- assert.result rntm : abbreviate runtime ;
- assert.result ovrld : abbreviate overload ;
- assert.result dbg : abbreviate debugging ;
- assert.result async : abbreviate asynchronous ;
- assert.result pop : abbreviate pop ;
- assert.result aaa : abbreviate aaa ;
- assert.result qck : abbreviate quack ;
- assert.result sttc : abbreviate static ;
-
- # Check boundary cases.
- assert.result a : chars a ;
- assert.result : chars "" ;
- assert.result a b c d e f g h : chars abcdefgh ;
- assert.result a b c d e f g h i : chars abcdefghi ;
- assert.result a b c d e f g h i j : chars abcdefghij ;
- assert.result a b c d e f g h i j k : chars abcdefghijk ;
-
- assert.result a//b/c/d : join a "" b c d : / ;
- assert.result abcd : join a "" b c d ;
-
- assert.result a b c : words "a b c" ;
-
- assert.true is-whitespace " " ;
- assert.false is-whitespace " a b c " ;
- assert.true is-whitespace "" ;
- assert.true is-whitespace ;
-}
diff --git a/jam-files/boost-build/util/utility.jam b/jam-files/boost-build/util/utility.jam
deleted file mode 100644
index c46747f5..00000000
--- a/jam-files/boost-build/util/utility.jam
+++ /dev/null
@@ -1,235 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : is-instance ;
-import errors ;
-
-
-# For all elements of 'list' which do not already have 'suffix', add 'suffix'.
-#
-rule apply-default-suffix ( suffix : list * )
-{
- local result ;
- for local i in $(list)
- {
- if $(i:S) = $(suffix)
- {
- result += $(i) ;
- }
- else
- {
- result += $(i)$(suffix) ;
- }
- }
- return $(result) ;
-}
-
-
-# If 'name' contains a dot, returns the part before the last dot. If 'name'
-# contains no dot, returns it unmodified.
-#
-rule basename ( name )
-{
- if $(name:S)
- {
- name = $(name:B) ;
- }
- return $(name) ;
-}
-
-
-# Return the file of the caller of the rule that called caller-file.
-#
-rule caller-file ( )
-{
- local bt = [ BACKTRACE ] ;
- return $(bt[9]) ;
-}
-
-
-# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal'
-# method. Uses ordinary jam's comparison otherwise.
-#
-rule equal ( a b )
-{
- if [ is-instance $(a) ]
- {
- return [ $(a).equal $(b) ] ;
- }
- else
- {
- if $(a) = $(b)
- {
- return true ;
- }
- }
-}
-
-
-# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less'
-# method. Uses ordinary jam's comparison otherwise.
-#
-rule less ( a b )
-{
- if [ is-instance $(a) ]
- {
- return [ $(a).less $(b) ] ;
- }
- else
- {
- if $(a) < $(b)
- {
- return true ;
- }
- }
-}
-
-
-# Returns the textual representation of argument. If it is a class instance,
-# class its 'str' method. Otherwise, returns the argument.
-#
-rule str ( value )
-{
- if [ is-instance $(value) ]
- {
- return [ $(value).str ] ;
- }
- else
- {
- return $(value) ;
- }
-}
-
-
-# Accepts a list of gristed values and returns them ungristed. Reports an error
-# in case any of the passed parameters is not gristed, i.e. surrounded in angle
-# brackets < and >.
-#
-rule ungrist ( names * )
-{
- local result ;
- for local name in $(names)
- {
- local stripped = [ MATCH ^<(.*)>$ : $(name) ] ;
- if ! $(stripped)
- {
- errors.error "in ungrist $(names) : $(name) is not of the form <.*>" ;
- }
- result += $(stripped) ;
- }
- return $(result) ;
-}
-
-
-# If the passed value is quoted, unquotes it. Otherwise returns the value
-# unchanged.
-#
-rule unquote ( value ? )
-{
- local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ;
- if $(match-result)
- {
- return $(match-result[2]) ;
- }
- else
- {
- return $(value) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
- import errors : try catch ;
-
- assert.result 123 : str 123 ;
-
- class test-class__
- {
- rule __init__ ( ) { }
- rule str ( ) { return "str-test-class" ; }
- rule less ( a ) { return "yes, of course!" ; }
- rule equal ( a ) { return "not sure" ; }
- }
-
- assert.result "str-test-class" : str [ new test-class__ ] ;
- assert.true less 1 2 ;
- assert.false less 2 1 ;
- assert.result "yes, of course!" : less [ new test-class__ ] 1 ;
- assert.true equal 1 1 ;
- assert.false equal 1 2 ;
- assert.result "not sure" : equal [ new test-class__ ] 1 ;
-
- assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib
- ;
-
- assert.result foo : basename foo ;
- assert.result foo : basename foo.so ;
- assert.result foo.so : basename foo.so.1 ;
-
- assert.result : unquote ;
- assert.result "" : unquote "" ;
- assert.result foo : unquote foo ;
- assert.result \"foo : unquote \"foo ;
- assert.result foo\" : unquote foo\" ;
- assert.result foo : unquote \"foo\" ;
- assert.result \"foo\" : unquote \"\"foo\"\" ;
-
- assert.result : ungrist ;
- assert.result foo : ungrist <foo> ;
- assert.result <foo> : ungrist <<foo>> ;
- assert.result foo bar : ungrist <foo> <bar> ;
-
- try ;
- {
- ungrist "" ;
- }
- catch "in ungrist : is not of the form <.*>" ;
-
- try ;
- {
- ungrist <> ;
- }
- catch "in ungrist <> : <> is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo ;
- }
- catch "in ungrist foo : foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist <foo ;
- }
- catch "in ungrist <foo : <foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo> ;
- }
- catch "in ungrist foo> : foo> is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo bar ;
- }
- catch "in ungrist foo : foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo <bar> ;
- }
- catch "in ungrist foo : foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist <foo> bar ;
- }
- catch "in ungrist bar : bar is not of the form <.*>" ;
-}
diff --git a/jam-files/boost-build/util/utility.py b/jam-files/boost-build/util/utility.py
deleted file mode 100644
index afea765b..00000000
--- a/jam-files/boost-build/util/utility.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-""" Utility functions to add/remove/get grists.
- Grists are string enclosed in angle brackets (<>) that are used as prefixes. See Jam for more information.
-"""
-
-import re
-import os
-import bjam
-from b2.exceptions import *
-
-__re_grist_and_value = re.compile (r'(<[^>]*>)(.*)')
-__re_grist_content = re.compile ('^<(.*)>$')
-__re_backslash = re.compile (r'\\')
-
-def to_seq (value):
- """ If value is a sequence, returns it.
- If it is a string, returns a sequence with value as its sole element.
- """
- if not value:
- return []
-
- if isinstance (value, str):
- return [value]
-
- else:
- return value
-
-def replace_references_by_objects (manager, refs):
- objs = []
- for r in refs:
- objs.append (manager.get_object (r))
- return objs
-
-def add_grist (features):
- """ Transform a string by bracketing it with "<>". If already bracketed, does nothing.
- features: one string or a sequence of strings
- return: the gristed string, if features is a string, or a sequence of gristed strings, if features is a sequence
- """
-
- def grist_one (feature):
- if feature [0] != '<' and feature [len (feature) - 1] != '>':
- return '<' + feature + '>'
- else:
- return feature
-
- if isinstance (features, str):
- return grist_one (features)
- else:
- return [ grist_one (feature) for feature in features ]
-
-def replace_grist (features, new_grist):
- """ Replaces the grist of a string by a new one.
- Returns the string with the new grist.
- """
- def replace_grist_one (name, new_grist):
- split = __re_grist_and_value.match (name)
- if not split:
- return new_grist + name
- else:
- return new_grist + split.group (2)
-
- if isinstance (features, str):
- return replace_grist_one (features, new_grist)
- else:
- return [ replace_grist_one (feature, new_grist) for feature in features ]
-
-def get_value (property):
- """ Gets the value of a property, that is, the part following the grist, if any.
- """
- return replace_grist (property, '')
-
-def get_grist (value):
- """ Returns the grist of a string.
- If value is a sequence, does it for every value and returns the result as a sequence.
- """
- def get_grist_one (name):
- split = __re_grist_and_value.match (name)
- if not split:
- return ''
- else:
- return split.group (1)
-
- if isinstance (value, str):
- return get_grist_one (value)
- else:
- return [ get_grist_one (v) for v in value ]
-
-def ungrist (value):
- """ Returns the value without grist.
- If value is a sequence, does it for every value and returns the result as a sequence.
- """
- def ungrist_one (value):
- stripped = __re_grist_content.match (value)
- if not stripped:
- raise BaseException ("in ungrist: '%s' is not of the form <.*>" % value)
-
- return stripped.group (1)
-
- if isinstance (value, str):
- return ungrist_one (value)
- else:
- return [ ungrist_one (v) for v in value ]
-
-def replace_suffix (name, new_suffix):
- """ Replaces the suffix of name by new_suffix.
- If no suffix exists, the new one is added.
- """
- split = os.path.splitext (name)
- return split [0] + new_suffix
-
-def forward_slashes (s):
- """ Converts all backslashes to forward slashes.
- """
- return __re_backslash.sub ('/', s)
-
-
-def split_action_id (id):
- """ Splits an id in the toolset and specific rule parts. E.g.
- 'gcc.compile.c++' returns ('gcc', 'compile.c++')
- """
- split = id.split ('.', 1)
- toolset = split [0]
- name = ''
- if len (split) > 1:
- name = split [1]
- return (toolset, name)
-
-def os_name ():
- result = bjam.variable("OS")
- assert(len(result) == 1)
- return result[0]
-
-def platform ():
- return bjam.variable("OSPLAT")
-
-def os_version ():
- return bjam.variable("OSVER")
-
-def on_windows ():
- """ Returns true if running on windows, whether in cygwin or not.
- """
- if bjam.variable("NT"):
- return True
-
- elif bjam.variable("UNIX"):
-
- uname = bjam.variable("JAMUNAME")
- if uname and uname[0].startswith("CYGWIN"):
- return True
-
- return False
diff --git a/jam-files/engine/Jambase b/jam-files/engine/Jambase
deleted file mode 100644
index 94f8fbde..00000000
--- a/jam-files/engine/Jambase
+++ /dev/null
@@ -1,2473 +0,0 @@
-#
-# /+\
-# +\ Copyright 1993, 2000 Christopher Seiwald.
-# \+/
-#
-# This file is part of Jam - see jam.c for Copyright information.
-#
-
-# This file is ALSO:
-# Copyright 2001-2004 David Abrahams.
-# Copyright 2002-2004 Rene Rivera.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-if $(NT)
-{
- SLASH ?= \\ ;
-}
-SLASH ?= / ;
-
-
-# Glob for patterns in the directories starting from the given start directory,
-# up to and including the root of the file-system. We stop globbing as soon as
-# we find at least one match.
-#
-rule find-to-root ( dir : patterns + )
-{
- local globs = [ GLOB $(dir) : $(patterns) ] ;
- while ! $(globs) && $(dir:P) != $(dir)
- {
- dir = $(dir:P) ;
- globs = [ GLOB $(dir) : $(patterns) ] ;
- }
- return $(globs) ;
-}
-
-
-# This global will hold the location of the user's boost-build.jam file.
-.boost-build-file = ;
-
-# This global will hold the location of the build system bootstrap file.
-.bootstrap-file = ;
-
-# Remember the value of $(BOOST_BUILD_PATH) supplied to us by the user.
-BOOST_BUILD_PATH.user-value = $(BOOST_BUILD_PATH) ;
-
-# On Unix only, when BOOST_BUILD_PATH is not supplied by the user, set it to a
-# sensible default value. This allows Boost.Build to work without any
-# environment variables, which is good in itself and also required by the Debian
-# Policy.
-if ! $(BOOST_BUILD_PATH) && $(UNIX)
-{
- BOOST_BUILD_PATH = /usr/share/boost-build ;
-}
-
-
-rule _poke ( module-name ? : variables + : value * )
-{
- module $(<)
- {
- $(>) = $(3) ;
- }
-}
-
-
-# This rule can be invoked from an optional user's boost-build.jam file to both
-# indicate where to find the build system files, and to load them. The path
-# indicated is relative to the location of the boost-build.jam file.
-#
-rule boost-build ( dir ? )
-{
- if $(.bootstrap-file)
- {
- ECHO "Error: Illegal attempt to re-bootstrap the build system by invoking" ;
- ECHO ;
- ECHO " 'boost-build" $(dir) ";'" ;
- ECHO ;
- EXIT "Please consult the documentation at 'http://www.boost.org'." ;
- }
-
- # Add the given directory to the path so we can find the build system. If
- # dir is empty, has no effect.
- BOOST_BUILD_PATH = $(dir:R=$(.boost-build-file:D)) $(BOOST_BUILD_PATH) ;
-
- # We might have just modified the *global* value of BOOST_BUILD_PATH. The
- # code that loads the rest of Boost.Build, in particular the site-config.jam
- # and user-config.jam configuration files uses os.environ, so we need to
- # update the value there.
- _poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
-
- # Try to find the build system bootstrap file 'bootstrap.jam'.
- local bootstrap-file = [ GLOB $(BOOST_BUILD_PATH) : bootstrap.jam ] ;
- .bootstrap-file = $(bootstrap-file[1]) ;
-
- # There is no bootstrap.jam we can find, exit with an error.
- if ! $(.bootstrap-file)
- {
- ECHO "Unable to load Boost.Build: could not find build system." ;
- ECHO --------------------------------------------------------- ;
- ECHO "$(.boost-build-file) attempted to load the build system by invoking" ;
- ECHO ;
- ECHO " 'boost-build" $(dir) ";'" ;
- ECHO ;
- ECHO "but we were unable to find \"bootstrap.jam\" in the specified directory" ;
- ECHO "or in BOOST_BUILD_PATH (searching "$(BOOST_BUILD_PATH:J=", ")")." ;
- ECHO ;
- EXIT "Please consult the documentation at 'http://www.boost.org'." ;
- }
-
- if [ MATCH .*(--debug-configuration).* : $(ARGV) ]
- {
- ECHO "notice: loading Boost.Build from"
- [ NORMALIZE_PATH $(.bootstrap-file:D) ] ;
- }
-
- # Load the build system, now that we know where to start from.
- include $(.bootstrap-file) ;
-}
-
-
-if [ MATCH .*(b2).* : $(ARGV[1]:BL) ]
- || [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]
- || $(BOOST_ROOT) # A temporary measure so Jam works with Boost.Build v1.
-{
- # We attempt to load "boost-build.jam" by searching from the current
- # invocation directory up to the root of the file-system.
- #
- # boost-build.jam is expected to invoke the "boost-build" rule to load the
- # Boost.Build files.
-
- local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;
- local self = [ SELF_PATH ] ;
- local boost-build-relative = ../../share/boost-build ;
- local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;
-
- local boost-build-files =
- [ find-to-root [ PWD ] : boost-build.jam ]
- [ GLOB $(self-based-path) : boost-build.jam ]
- # Another temporary measure so Jam works with Boost.Build v1.
- [ GLOB $(search-path) : boost-build.jam ] ;
-
- .boost-build-file = $(boost-build-files[1]) ;
-
- # There is no boost-build.jam we can find, exit with an error, and
- # information.
- if ! $(.boost-build-file)
- {
- ECHO "Unable to load Boost.Build: could not find \"boost-build.jam\"" ;
- ECHO --------------------------------------------------------------- ;
-
- if ! [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]
- {
- ECHO "BOOST_ROOT must be set, either in the environment, or " ;
- ECHO "on the command-line with -sBOOST_ROOT=..., to the root" ;
- ECHO "of the boost installation." ;
- ECHO ;
- }
-
- ECHO "Attempted search from" [ PWD ] "up to the root" ;
- ECHO "at" $(self-based-path) ;
- ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
- EXIT "Please consult the documentation at 'http://www.boost.org'." ;
- }
-
- if [ MATCH .*(--debug-configuration).* : $(ARGV) ]
- {
- ECHO "notice: found boost-build.jam at"
- [ NORMALIZE_PATH $(.boost-build-file) ] ;
- }
-
- # Now load the boost-build.jam to get the build system loaded. This
- # incidentaly loads the users jamfile and attempts to build targets.
- #
- # We also set it up so we can tell whether we are loading the new V2 system
- # or the the old V1 system.
- include $(.boost-build-file) ;
-
- # Check that, at minimum, the bootstrap file was found.
- if ! $(.bootstrap-file)
- {
- ECHO "Unable to load Boost.Build" ;
- ECHO -------------------------- ;
- ECHO "\"$(.boost-build-file)\" was found by searching from" [ PWD ] "up to the root" ;
- ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
- ECHO ;
- ECHO "However, it failed to call the \"boost-build\" rule to indicate" ;
- ECHO "the location of the build system." ;
- ECHO ;
- EXIT "Please consult the documentation at 'http://www.boost.org'." ;
- }
-}
-else
-{
-
-#
-# JAMBASE - jam 2.3 ruleset providing make(1)-like functionality
-#
-# Supports UNIX, NT, and VMS.
-#
-# 12/27/93 (seiwald) - purturb library sources with SOURCE_GRIST
-# 04/18/94 (seiwald) - use '?=' when setting OS specific vars
-# 04/21/94 (seiwald) - do RmTemps together
-# 05/05/94 (seiwald) - all supported C compilers support -o: relegate
-# RELOCATE as an option; set Ranlib to "" to disable it
-# 06/01/94 (seiwald) - new 'actions existing' to do existing sources
-# 08/25/94 (seiwald) - new ObjectCcFlags rule to append to per-target CCFLAGS
-# 08/29/94 (seiwald) - new ObjectHdrs rule to append to per-target HDRS
-# 09/19/94 (seiwald) - LinkLibraries and Undefs now append
-# - Rule names downshifted.
-# 10/06/94 (seiwald) - Dumb yyacc stuff moved into Jamfile.
-# 10/14/94 (seiwald) - (Crude) support for .s, .C, .cc, .cpp, and .f files.
-# 01/08/95 (seiwald) - Shell now handled with awk, not sed
-# 01/09/95 (seiwald) - Install* now take dest directory as target
-# 01/10/95 (seiwald) - All entries sorted.
-# 01/10/95 (seiwald) - NT support moved in, with LauraW's help.
-# 01/10/95 (seiwald) - VMS support moved in.
-# 02/06/95 (seiwald) - ObjectC++Flags and SubDirC++Flags added.
-# 02/07/95 (seiwald) - Iron out when HDRSEARCH uses "" or SEARCH_SOURCE.
-# 02/08/95 (seiwald) - SubDir works on VMS.
-# 02/14/95 (seiwald) - MkDir and entourage.
-# 04/30/95 (seiwald) - Use install -c flag so that it copies, not moves.
-# 07/10/95 (taylor) - Support for Microsoft C++.
-# 11/21/96 (peterk) - Support for BeOS
-# 07/19/99 (sickel) - Support for Mac OS X Server (and maybe client)
-# 02/18/00 (belmonte)- Support for Cygwin.
-
-# Special targets defined in this file:
-#
-# all - parent of first, shell, files, lib, exe
-# first - first dependency of 'all', for potential initialization
-# shell - parent of all Shell targets
-# files - parent of all File targets
-# lib - parent of all Library targets
-# exe - parent of all Main targets
-# dirs - parent of all MkDir targets
-# clean - removes all Shell, File, Library, and Main targets
-# uninstall - removes all Install targets
-#
-
-# Rules defined by this file:
-#
-# as obj.o : source.s ; .s -> .o
-# Bulk dir : files ; populate directory with many files
-# Cc obj.o : source.c ; .c -> .o
-# C++ obj.o : source.cc ; .cc -> .o
-# Clean clean : sources ; remove sources with 'jam clean'
-# File dest : source ; copy file
-# Fortran obj.o : source.f ; .f -> .o
-# GenFile source.c : program args ; make custom file
-# Hardlink target : source ; make link from source to target
-# HdrRule source : headers ; handle #includes
-# InstallInto dir : sources ; install any files
-# InstallBin dir : sources ; install binaries
-# InstallLib dir : sources ; install files
-# InstallFile dir : sources ; install files
-# InstallMan dir : sources ; install man pages
-# InstallShell dir : sources ; install shell scripts
-# Lex source.c : source.l ; .l -> .c
-# Library lib : source ; archive library from compiled sources
-# LibraryFromObjects lib : objects ; archive library from objects
-# LinkLibraries images : libraries ; bag libraries onto Mains
-# Main image : source ; link executable from compiled sources
-# MainFromObjects image : objects ; link executable from objects
-# MkDir dir ; make a directory, if not there
-# Object object : source ; compile object from source
-# ObjectCcFlags source : flags ; add compiler flags for object
-# ObjectC++Flags source : flags ; add compiler flags for object
-# ObjectHdrs source : dirs ; add include directories for object
-# Objects sources ; compile sources
-# RmTemps target : sources ; remove temp sources after target made
-# Setuid images ; mark executables Setuid
-# SubDir TOP d1 d2 ... ; start a subdirectory Jamfile
-# SubDirCcFlags flags ; add compiler flags until next SubDir
-# SubDirC++Flags flags ; add compiler flags until next SubDir
-# SubDirHdrs dirs ; add include dirs until next SubDir
-# SubInclude TOP d1 d2 ... ; include a subdirectory Jamfile
-# Shell exe : source ; make a shell executable
-# Undefines images : symbols ; save undef's for linking
-# UserObject object : source ; handle unknown suffixes for Object
-# Yacc source.c : source.y ; .y -> .c
-#
-# Utility rules that have no side effects (not supported):
-#
-# FAppendSuffix f1 f2 ... : $(SUF) ; return $(<) with suffixes
-# FConcat value ... ; return contatenated values
-# FDirName d1 d2 ... ; return path from root to dir
-# FGrist d1 d2 ... ; return d1!d2!...
-# FGristFiles value ; return $(value:G=$(SOURCE_GRIST))
-# FGristSourceFiles value ; return $(value:G=$(SOURCE_GRIST))
-# FRelPath d1 : d2 ; return rel path from d1 to d2
-# FSubDir d1 d2 ... ; return path to root
-#
-
-
-# Brief review of the jam language:
-#
-# Statements:
-# rule RULE - statements to process a rule
-# actions RULE - system commands to carry out target update
-#
-# Modifiers on actions:
-# together - multiple instances of same rule on target get executed
-# once with their sources ($(>)) concatenated
-# updated - refers to updated sources ($(>)) only
-# ignore - ignore return status of command
-# quietly - don't trace its execution unless verbose
-# piecemeal - iterate command each time with a small subset of $(>)
-# existing - refers to currently existing sources ($(>)) only
-# bind vars - subject to binding before expanding in actions
-#
-# Special rules:
-# ALWAYS - always build a target
-# DEPENDS - builds the dependency graph
-# ECHO - blurt out targets on stdout
-# EXIT - blurt out targets and exit
-# INCLUDES - marks sources as headers for target (a codependency)
-# NOCARE - don't panic if the target can't be built
-# NOUPDATE - create the target if needed but never update it
-# NOTFILE - ignore the timestamp of the target (it's not a file)
-# TEMPORARY - target need not be present if sources haven't changed
-#
-# Special variables set by jam:
-# $(<) - targets of a rule (to the left of the :)
-# $(>) - sources of a rule (to the right of the :)
-# $(xxx) - true on xxx (UNIX, VMS, NT, OS2, MAC)
-# $(OS) - name of OS - varies wildly
-# $(JAMVERSION) - version number (2.3)
-#
-# Special variables used by jam:
-# SEARCH - where to find something (used during binding and actions)
-# LOCATE - where to plop something not found with SEARCH
-# HDRRULE - rule to call to handle include files
-# HDRSCAN - egrep regex to extract include files
-#
-# Special targets:
-# all - default if none given on command line
-#
-
-# Initialize variables
-#
-
-#
-# OS specific variable settings
-#
-if $(NT)
-{
- # the list of supported toolsets on Windows NT and Windows 95/98
- #
- local SUPPORTED_TOOLSETS = "BORLANDC" "VC7" "VISUALC" "VISUALC16" "INTELC" "WATCOM"
- "MINGW" "LCC" ;
-
- # this variable holds the current toolset
- #
- TOOLSET = "" ;
-
- # if the JAM_TOOLSET environment variable is defined, check that it is
- # one of our supported values
- #
- if $(JAM_TOOLSET)
- {
- local t ;
-
- for t in $(SUPPORTED_TOOLSETS)
- {
- $(t) = $($(t):J=" ") ; # reconstitute paths with spaces in them
- if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }
- }
-
- if ! $(TOOLSET)
- {
- ECHO "The JAM_TOOLSET environment variable is defined but its value" ;
- ECHO "is invalid, please use one of the following:" ;
- ECHO ;
-
- for t in $(SUPPORTED_TOOLSETS) { ECHO " " $(t) ; }
- EXIT ;
- }
- }
-
- # if TOOLSET is empty, we'll try to detect the toolset from other
- # environment variables to remain backwards compatible with Jam 2.3
- #
- if ! $(TOOLSET)
- {
- if $(BCCROOT)
- {
- TOOLSET = BORLANDC ;
- BORLANDC = $(BCCROOT:J=" ") ;
- }
- else if $(MSVC)
- {
- TOOLSET = VISUALC16 ;
- VISUALC16 = $(MSVC:J=" ") ;
- }
- else if $(MSVCNT)
- {
- TOOLSET = VISUALC ;
- VISUALC = $(MSVCNT:J=" ") ;
- }
- else if $(MSVCDir)
- {
- TOOLSET = VISUALC ;
- VISUALC = $(MSVCDir:J=" ") ;
- }
- else if $(MINGW)
- {
- TOOLSET = MINGW ;
- }
- else
- {
- ECHO "Jam cannot be run because, either:" ;
- ECHO " a. You didn't set BOOST_ROOT to indicate the root of your" ;
- ECHO " Boost installation." ;
- ECHO " b. You are trying to use stock Jam but didn't indicate which" ;
- ECHO " compilation toolset to use. To do so, follow these simple" ;
- ECHO " instructions:" ;
- ECHO ;
- ECHO " - define one of the following environment variable, with the" ;
- ECHO " appropriate value according to this list:" ;
- ECHO ;
- ECHO " Variable Toolset Description" ;
- ECHO ;
- ECHO " BORLANDC Borland C++ BC++ install path" ;
- ECHO " VISUALC Microsoft Visual C++ VC++ install path" ;
- ECHO " VISUALC16 Microsoft Visual C++ 16 bit VC++ 16 bit install" ;
- ECHO " INTELC Intel C/C++ IC++ install path" ;
- ECHO " WATCOM Watcom C/C++ Watcom install path" ;
- ECHO " MINGW MinGW (gcc) MinGW install path" ;
- ECHO " LCC Win32-LCC LCC-Win32 install path" ;
- ECHO ;
- ECHO " - define the JAM_TOOLSET environment variable with the *name*" ;
- ECHO " of the toolset variable you want to use." ;
- ECHO ;
- ECHO " e.g.: set VISUALC=C:\\Visual6" ;
- ECHO " set JAM_TOOLSET=VISUALC" ;
- EXIT ;
- }
- }
-
- CP ?= copy ;
- RM ?= del /f/q ;
- SLASH ?= \\ ;
- SUFLIB ?= .lib ;
- SUFOBJ ?= .obj ;
- SUFEXE ?= .exe ;
-
- if $(TOOLSET) = BORLANDC
- {
- ECHO "Compiler is Borland C++" ;
-
- AR ?= tlib /C /P64 ;
- CC ?= bcc32 ;
- CCFLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus ;
- C++ ?= bcc32 ;
- C++FLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus -P ;
- LINK ?= $(CC) ;
- LINKFLAGS ?= $(CCFLAGS) ;
- STDLIBPATH ?= $(BORLANDC)\\lib ;
- STDHDRS ?= $(BORLANDC)\\include ;
- NOARSCAN ?= true ;
- }
- else if $(TOOLSET) = VISUALC16
- {
- ECHO "Compiler is Microsoft Visual C++ 16 bit" ;
-
- AR ?= lib /nologo ;
- CC ?= cl /nologo ;
- CCFLAGS ?= /D \"WIN\" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= $(CC) ;
- LINKFLAGS ?= $(CCFLAGS) ;
- LINKLIBS ?=
- \"$(VISUALC16)\\lib\\mlibce.lib\"
- \"$(VISUALC16)\\lib\\oldnames.lib\"
- ;
- LINKLIBS ?= ;
- NOARSCAN ?= true ;
- OPTIM ?= "" ;
- STDHDRS ?= $(VISUALC16)\\include ;
- UNDEFFLAG ?= "/u _" ;
- }
- else if $(TOOLSET) = VISUALC
- {
- ECHO "Compiler is Microsoft Visual C++" ;
-
- AR ?= lib ;
- AS ?= masm386 ;
- CC ?= cl /nologo ;
- CCFLAGS ?= "" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= link /nologo ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= \"$(VISUALC)\\lib\\advapi32.lib\"
- # $(VISUALC)\\lib\\libc.lib
- # $(VISUALC)\\lib\\oldnames.lib
- \"$(VISUALC)\\lib\\gdi32.lib\"
- \"$(VISUALC)\\lib\\user32.lib\"
- \"$(VISUALC)\\lib\\kernel32.lib\" ;
- OPTIM ?= "" ;
- STDHDRS ?= $(VISUALC)\\include ;
- UNDEFFLAG ?= "/u _" ;
- }
- else if $(TOOLSET) = VC7
- {
- ECHO "Compiler is Microsoft Visual C++ .NET" ;
-
- AR ?= lib ;
- AS ?= masm386 ;
- CC ?= cl /nologo ;
- CCFLAGS ?= "" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= link /nologo ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= \"$(VISUALC)\\PlatformSDK\\lib\\advapi32.lib\"
- # $(VISUALC)\\lib\\libc.lib
- # $(VISUALC)\\lib\\oldnames.lib
- \"$(VISUALC)\\PlatformSDK\\lib\\gdi32.lib\"
- \"$(VISUALC)\\PlatformSDK\\lib\\user32.lib\"
- \"$(VISUALC)\\PlatformSDK\\lib\\kernel32.lib\" ;
- OPTIM ?= "" ;
- STDHDRS ?= \"$(VISUALC)\\include\"
- \"$(VISUALC)\\PlatformSDK\\include\" ;
- UNDEFFLAG ?= "/u _" ;
- }
- else if $(TOOLSET) = INTELC
- {
- ECHO "Compiler is Intel C/C++" ;
-
- if ! $(VISUALC)
- {
- ECHO "As a special exception, when using the Intel C++ compiler, you need" ;
- ECHO "to define the VISUALC environment variable to indicate the location" ;
- ECHO "of your Visual C++ installation. Aborting.." ;
- EXIT ;
- }
-
- AR ?= lib ;
- AS ?= masm386 ;
- CC ?= icl /nologo ;
- CCFLAGS ?= "" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= link /nologo ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= $(VISUALC)\\lib\\advapi32.lib
- # $(VISUALC)\\lib\\libc.lib
- # $(VISUALC)\\lib\\oldnames.lib
- $(VISUALC)\\lib\\kernel32.lib
- ;
- OPTIM ?= "" ;
- STDHDRS ?= $(INTELC)\include $(VISUALC)\\include ;
- UNDEFFLAG ?= "/u _" ;
- }
- else if $(TOOLSET) = WATCOM
- {
- ECHO "Compiler is Watcom C/C++" ;
-
- AR ?= wlib ;
- CC ?= wcc386 ;
- CCFLAGS ?= /zq /DWIN32 /I$(WATCOM)\\h ; # zq=quiet
- C++ ?= wpp386 ;
- C++FLAGS ?= $(CCFLAGS) ;
- CP ?= copy ;
- DOT ?= . ;
- DOTDOT ?= .. ;
- LINK ?= wcl386 ;
- LINKFLAGS ?= /zq ; # zq=quiet
- LINKLIBS ?= ;
- MV ?= move ;
- NOARSCAN ?= true ;
- OPTIM ?= ;
- RM ?= del /f ;
- SLASH ?= \\ ;
- STDHDRS ?= $(WATCOM)\\h $(WATCOM)\\h\\nt ;
- SUFEXE ?= .exe ;
- SUFLIB ?= .lib ;
- SUFOBJ ?= .obj ;
- UNDEFFLAG ?= "/u _" ;
- }
- else if $(TOOLSET) = MINGW
- {
- ECHO "Compiler is GCC with Mingw" ;
-
- AR ?= ar -ru ;
- CC ?= gcc ;
- CCFLAGS ?= "" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= $(CC) ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= "" ;
- OPTIM ?= ;
- SUFOBJ = .o ;
- SUFLIB = .a ;
- SLASH = / ;
-# NOARSCAN ?= true ;
- }
- else if $(TOOLSET) = LCC
- {
- ECHO "Compiler is Win32-LCC" ;
-
- AR ?= lcclib ;
- CC ?= lcc ;
- CCFLAGS ?= "" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= lcclnk ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= "" ;
- OPTIM ?= ;
- NOARSCAN = true ;
- }
- else
- {
-#
-# XXX: We need better comments here !!
-#
- EXIT On NT, set BCCROOT, MSVCNT, MINGW or MSVC to the root of the
- Borland or Microsoft directories. ;
- }
-
-}
-else if $(OS2)
-{
- # the list of supported toolsets on Windows NT and Windows 95/98
- #
- local SUPPORTED_TOOLSETS = "EMX" "WATCOM" ;
-
- # this variable holds the current toolset
- #
- TOOLSET = "" ;
-
- # if the JAM_TOOLSET environment variable is defined, check that it is
- # one of our supported values
- #
- if $(JAM_TOOLSET)
- {
- local t ;
-
- for t in $(SUPPORTED_TOOLSETS)
- {
- $(t) = $($(t):J=" ") ; # reconstitute paths with spaces in them
- if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }
- }
-
- if ! $(TOOLSET)
- {
- ECHO "The JAM_TOOLSET environment variable is defined but its value" ;
- ECHO "is invalid, please use one of the following:" ;
- ECHO ;
-
- for t in $(SUPPORTED_TOOLSETS) { ECHO " " $(t) ; }
- EXIT ;
- }
- }
-
- # if TOOLSET is empty, we'll try to detect the toolset from other
- # environment variables to remain backwards compatible with Jam 2.3
- #
- if ! $(TOOLSET)
- {
- if $(watcom)
- {
- WATCOM = $(watcom:J=" ") ;
- TOOLSET = WATCOM ;
- }
- else
- {
- ECHO "Jam cannot be run because you didn't indicate which compilation toolset" ;
- ECHO "to use. To do so, follow these simple instructions:" ;
- ECHO ;
- ECHO " - define one of the following environment variable, with the" ;
- ECHO " appropriate value according to this list:" ;
- ECHO ;
- ECHO " Variable Toolset Description" ;
- ECHO ;
- ECHO " WATCOM Watcom C/C++ Watcom install path" ;
- ECHO " EMX EMX (gcc) EMX install path" ;
- ECHO " VISUALAGE IBM Visual Age C/C++ VisualAge install path" ;
- ECHO ;
- ECHO " - define the JAM_TOOLSET environment variable with the *name*" ;
- ECHO " of the toolset variable you want to use." ;
- ECHO ;
- ECHO " e.g.: set WATCOM=C:\WATCOM" ;
- ECHO " set JAM_TOOLSET=WATCOM" ;
- ECHO ;
- EXIT ;
- }
- }
-
- RM = del /f ;
- CP = copy ;
- MV ?= move ;
- DOT ?= . ;
- DOTDOT ?= .. ;
- SUFLIB ?= .lib ;
- SUFOBJ ?= .obj ;
- SUFEXE ?= .exe ;
-
- if $(TOOLSET) = WATCOM
- {
- AR ?= wlib ;
- BINDIR ?= \\os2\\apps ;
- CC ?= wcc386 ;
- CCFLAGS ?= /zq /DOS2 /I$(WATCOM)\\h ; # zq=quiet
- C++ ?= wpp386 ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= wcl386 ;
- LINKFLAGS ?= /zq ; # zq=quiet
- LINKLIBS ?= ;
- NOARSCAN ?= true ;
- OPTIM ?= ;
- SLASH ?= \\ ;
- STDHDRS ?= $(WATCOM)\\h ;
- UNDEFFLAG ?= "/u _" ;
- }
- else if $(TOOLSET) = EMX
- {
- ECHO "Compiler is GCC-EMX" ;
- AR ?= ar -ru ;
- CC ?= gcc ;
- CCFLAGS ?= "" ;
- C++ ?= $(CC) ;
- C++FLAGS ?= $(CCFLAGS) ;
- LINK ?= $(CC) ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= "" ;
- OPTIM ?= ;
- SUFOBJ = .o ;
- SUFLIB = .a ;
- UNDEFFLAG ?= "-U" ;
- SLASH = / ;
-# NOARSCAN ?= true ;
- }
- else
- {
- # should never happen
- EXIT "Sorry, but the $(JAM_TOOLSET) toolset isn't supported for now" ;
- }
-}
-else if $(VMS)
-{
- C++ ?= cxx ;
- C++FLAGS ?= ;
- CC ?= cc ;
- CCFLAGS ?= ;
- CHMOD ?= set file/prot= ;
- CP ?= copy/replace ;
- CRELIB ?= true ;
- DOT ?= [] ;
- DOTDOT ?= [-] ;
- EXEMODE ?= (w:e) ;
- FILEMODE ?= (w:r) ;
- HDRS ?= ;
- LINK ?= link ;
- LINKFLAGS ?= "" ;
- LINKLIBS ?= ;
- MKDIR ?= create/dir ;
- MV ?= rename ;
- OPTIM ?= "" ;
- RM ?= delete ;
- RUNVMS ?= mcr ;
- SHELLMODE ?= (w:er) ;
- SLASH ?= . ;
- STDHDRS ?= decc$library_include ;
- SUFEXE ?= .exe ;
- SUFLIB ?= .olb ;
- SUFOBJ ?= .obj ;
-
- switch $(OS)
- {
- case OPENVMS : CCFLAGS ?= /stand=vaxc ;
- case VMS : LINKLIBS ?= sys$library:vaxcrtl.olb/lib ;
- }
-}
-else if $(MAC)
-{
- local OPT ;
-
- CW ?= "{CW}" ;
-
- MACHDRS ?=
- "$(UMACHDRS):Universal:Interfaces:CIncludes"
- "$(CW):MSL:MSL_C:MSL_Common:Include"
- "$(CW):MSL:MSL_C:MSL_MacOS:Include" ;
-
- MACLIBS ?=
- "$(CW):MacOS Support:Universal:Libraries:StubLibraries:Interfacelib"
- "$(CW):MacOS Support:Universal:Libraries:StubLibraries:Mathlib" ;
-
- MPWLIBS ?=
- "$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib"
- "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW.Lib" ;
-
- MPWNLLIBS ?=
- "$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib"
- "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW(NL).Lib" ;
-
- SIOUXHDRS ?= ;
-
- SIOUXLIBS ?=
- "$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL RuntimePPC.lib"
- "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL SIOUX.PPC.Lib"
- "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC.Lib" ;
-
- C++ ?= mwcppc ;
- C++FLAGS ?= -w off -nomapcr ;
- CC ?= mwcppc ;
- CCFLAGS ?= -w off -nomapcr ;
- CP ?= duplicate -y ;
- DOT ?= ":" ;
- DOTDOT ?= "::" ;
- HDRS ?= $(MACHDRS) $(MPWHDRS) ;
- LINK ?= mwlinkppc ;
- LINKFLAGS ?= -mpwtool -warn ;
- LINKLIBS ?= $(MACLIBS) $(MPWLIBS) ;
- MKDIR ?= newfolder ;
- MV ?= rename -y ;
- NOARSCAN ?= true ;
- OPTIM ?= ;
- RM ?= delete -y ;
- SLASH ?= ":" ;
- STDHDRS ?= ;
- SUFLIB ?= .lib ;
- SUFOBJ ?= .o ;
-}
-else if $(OS) = BEOS && $(METROWERKS)
-{
- AR ?= mwld -xml -o ;
- BINDIR ?= /boot/apps ;
- CC ?= mwcc ;
- CCFLAGS ?= -nosyspath ;
- C++ ?= $(CC) ;
- C++FLAGS ?= -nosyspath ;
- FORTRAN ?= "" ;
- LIBDIR ?= /boot/develop/libraries ;
- LINK ?= mwld ;
- LINKFLAGS ?= "" ;
- MANDIR ?= /boot/documentation/"Shell Tools"/HTML ;
- NOARSCAN ?= true ;
- STDHDRS ?= /boot/develop/headers/posix ;
-}
-else if $(OS) = BEOS
-{
- BINDIR ?= /boot/apps ;
- CC ?= gcc ;
- C++ ?= $(CC) ;
- FORTRAN ?= "" ;
- LIBDIR ?= /boot/develop/libraries ;
- LINK ?= gcc ;
- LINKLIBS ?= -lnet ;
- NOARSCAN ?= true ;
- STDHDRS ?= /boot/develop/headers/posix ;
-}
-else if $(UNIX)
-{
- switch $(OS)
- {
- case AIX :
- LINKLIBS ?= -lbsd ;
-
- case AMIGA :
- CC ?= gcc ;
- YACC ?= "bison -y" ;
-
- case CYGWIN :
- CC ?= gcc ;
- CCFLAGS += -D__cygwin__ ;
- LEX ?= flex ;
- RANLIB ?= "" ;
- SUFEXE ?= .exe ;
- YACC ?= "bison -y" ;
-
- case DGUX :
- RANLIB ?= "" ;
- RELOCATE ?= true ;
-
- case HPUX :
- YACC = ;
- CFLAGS += -Ae ;
- CCFLAGS += -Ae ;
- RANLIB ?= "" ;
-
- case INTERIX :
- CC ?= gcc ;
- RANLIB ?= "" ;
-
- case IRIX :
- RANLIB ?= "" ;
-
- case MPEIX :
- CC ?= gcc ;
- C++ ?= gcc ;
- CCFLAGS += -D_POSIX_SOURCE ;
- HDRS += /usr/include ;
- RANLIB ?= "" ;
- NOARSCAN ?= true ;
- NOARUPDATE ?= true ;
-
- case MVS :
- RANLIB ?= "" ;
-
- case NEXT :
- AR ?= libtool -o ;
- RANLIB ?= "" ;
-
- case MACOSX :
- AR ?= libtool -o ;
- C++ ?= c++ ;
- MANDIR ?= /usr/local/share/man ;
- RANLIB ?= "" ;
-
- case NCR :
- RANLIB ?= "" ;
-
- case PTX :
- RANLIB ?= "" ;
-
- case QNX :
- AR ?= wlib ;
- CC ?= cc ;
- CCFLAGS ?= -Q ; # quiet
- C++ ?= $(CC) ;
- C++FLAGS ?= -Q ; # quiet
- LINK ?= $(CC) ;
- LINKFLAGS ?= -Q ; # quiet
- NOARSCAN ?= true ;
- RANLIB ?= "" ;
-
- case SCO :
- RANLIB ?= "" ;
- RELOCATE ?= true ;
-
- case SINIX :
- RANLIB ?= "" ;
-
- case SOLARIS :
- RANLIB ?= "" ;
- AR ?= "/usr/ccs/bin/ar ru" ;
-
- case UNICOS :
- NOARSCAN ?= true ;
- OPTIM ?= -O0 ;
-
- case UNIXWARE :
- RANLIB ?= "" ;
- RELOCATE ?= true ;
- }
-
- # UNIX defaults
-
- CCFLAGS ?= ;
- C++FLAGS ?= $(CCFLAGS) ;
- CHMOD ?= chmod ;
- CHGRP ?= chgrp ;
- CHOWN ?= chown ;
- LEX ?= lex ;
- LINKFLAGS ?= $(CCFLAGS) ;
- LINKLIBS ?= ;
- OPTIM ?= -O ;
- RANLIB ?= ranlib ;
- YACC ?= yacc ;
- YACCFILES ?= y.tab ;
- YACCFLAGS ?= -d ;
-}
-
-#
-# General defaults; a lot like UNIX
-#
-
- AR ?= ar ru ;
- AS ?= as ;
- ASFLAGS ?= ;
- AWK ?= awk ;
- BINDIR ?= /usr/local/bin ;
- C++ ?= cc ;
- C++FLAGS ?= ;
- CC ?= cc ;
- CCFLAGS ?= ;
- CP ?= cp -f ;
- CRELIB ?= ;
- DOT ?= . ;
- DOTDOT ?= .. ;
- EXEMODE ?= 711 ;
- FILEMODE ?= 644 ;
- FORTRAN ?= f77 ;
- FORTRANFLAGS ?= ;
- HDRS ?= ;
- INSTALLGRIST ?= installed ;
- JAMFILE ?= Jamfile ;
- JAMRULES ?= Jamrules ;
- LEX ?= ;
- LIBDIR ?= /usr/local/lib ;
- LINK ?= $(CC) ;
- LINKFLAGS ?= ;
- LINKLIBS ?= ;
- LN ?= ln ;
- MANDIR ?= /usr/local/man ;
- MKDIR ?= mkdir ;
- MV ?= mv -f ;
- OPTIM ?= ;
- RCP ?= rcp ;
- RM ?= rm -f ;
- RSH ?= rsh ;
- SED ?= sed ;
- SHELLHEADER ?= "#!/bin/sh" ;
- SHELLMODE ?= 755 ;
- SLASH ?= / ;
- STDHDRS ?= /usr/include ;
- SUFEXE ?= "" ;
- SUFLIB ?= .a ;
- SUFOBJ ?= .o ;
- UNDEFFLAG ?= "-u _" ;
- YACC ?= ;
- YACCFILES ?= ;
- YACCFLAGS ?= ;
-
- HDRPATTERN =
- "^[ ]*#[ ]*include[ ]*[<\"]([^\">]*)[\">].*$" ;
-
- OSFULL = $(OS)$(OSVER)$(OSPLAT) $(OS)$(OSPLAT) $(OS)$(OSVER) $(OS) ;
-
-
-#
-# Base dependencies - first for "bootstrap" kinds of rules
-#
-
-DEPENDS all : shell files lib exe obj ;
-DEPENDS all shell files lib exe obj : first ;
-NOTFILE all first shell files lib exe obj dirs clean uninstall ;
-ALWAYS clean uninstall ;
-
-#
-# Rules
-#
-
-rule As
-{
- DEPENDS $(<) : $(>) ;
- ASFLAGS on $(<) += $(ASFLAGS) $(SUBDIRASFLAGS) ;
-}
-
-rule Bulk
-{
- local i ;
-
- for i in $(>)
- {
- File $(i:D=$(<)) : $(i) ;
- }
-}
-
-rule Cc
-{
- local _h ;
-
- DEPENDS $(<) : $(>) ;
-
- # Just to clarify here: this sets the per-target CCFLAGS to
- # be the current value of (global) CCFLAGS and SUBDIRCCFLAGS.
-
- CCFLAGS on $(<) += $(CCFLAGS) $(SUBDIRCCFLAGS) ;
-
- # If the compiler's -o flag doesn't work, relocate the .o
-
- if $(RELOCATE)
- {
- CcMv $(<) : $(>) ;
- }
-
- _h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;
-
- if $(VMS) && $(_h)
- {
- SLASHINC on $(<) = "/inc=(" $(_h[1]) ,$(_h[2-]) ")" ;
- }
- else if $(MAC) && $(_h)
- {
- local _i _j ;
- _j = $(_h[1]) ;
- for _i in $(_h[2-])
- {
- _j = $(_j),$(_i) ;
- }
- MACINC on $(<) = \"$(_j)\" ;
- }
-}
-
-rule C++
-{
- local _h ;
-
- DEPENDS $(<) : $(>) ;
- C++FLAGS on $(<) += $(C++FLAGS) $(SUBDIRC++FLAGS) ;
-
- if $(RELOCATE)
- {
- CcMv $(<) : $(>) ;
- }
-
- _h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;
-
- if $(VMS) && $(_h)
- {
- SLASHINC on $(<) = "/inc=(" $(_h[1]) ,$(_h[2-]) ")" ;
- }
- else if $(MAC) && $(_h)
- {
- local _i _j ;
- _j = $(_h[1]) ;
- for _i in $(_h[2-])
- {
- _j = $(_j),$(_i) ;
- }
- MACINC on $(<) = \"$(_j)\" ;
- }
-}
-
-rule Chmod
-{
- if $(CHMOD) { Chmod1 $(<) ; }
-}
-
-rule File
-{
- DEPENDS files : $(<) ;
- DEPENDS $(<) : $(>) ;
- SEARCH on $(>) = $(SEARCH_SOURCE) ;
- MODE on $(<) = $(FILEMODE) ;
- Chmod $(<) ;
-}
-
-rule Fortran
-{
- DEPENDS $(<) : $(>) ;
-}
-
-rule GenFile
-{
- local _t = [ FGristSourceFiles $(<) ] ;
- local _s = [ FAppendSuffix $(>[1]) : $(SUFEXE) ] ;
- Depends $(_t) : $(_s) $(>[2-]) ;
- GenFile1 $(_t) : $(_s) $(>[2-]) ;
- Clean clean : $(_t) ;
-}
-
-rule GenFile1
-{
- MakeLocate $(<) : $(LOCATE_SOURCE) ;
- SEARCH on $(>) = $(SEARCH_SOURCE) ;
-}
-
-rule HardLink
-{
- DEPENDS files : $(<) ;
- DEPENDS $(<) : $(>) ;
- SEARCH on $(>) = $(SEARCH_SOURCE) ;
-}
-
-rule HdrMacroFile
-{
- # HdrMacroFile file ;
- #
- # this rule is used to indicate that a given file contains definitions
- # for filename macros (e.g. "#define MYFILE_H <myfile.h>") that can
- # later be used in #include statements in the rest of the source
- #
- # theses files must be parsed before any make is tried..
- #
- HDRMACRO $(<) ;
-}
-
-rule HdrRule
-{
- # HdrRule source : headers ;
-
- # N.B. This rule is called during binding, potentially after
- # the fate of many targets has been determined, and must be
- # used with caution: don't add dependencies to unrelated
- # targets, and don't set variables on $(<).
-
- # Tell Jam that anything depending on $(<) also depends on $(>),
- # set SEARCH so Jam can find the headers, but then say we don't
- # care if we can't actually find the headers (they may have been
- # within ifdefs),
-
- local s ;
-
- if $(HDRGRIST)
- {
- s = $(>:G=$(HDRGRIST)) ;
- } else {
- s = $(>) ;
- }
-
- INCLUDES $(<) : $(s) ;
- SEARCH on $(s) = $(HDRSEARCH) ;
- NOCARE $(s) ;
-
- # Propagate on $(<) to $(>)
-
- HDRSEARCH on $(s) = $(HDRSEARCH) ;
- HDRSCAN on $(s) = $(HDRSCAN) ;
- HDRRULE on $(s) = $(HDRRULE) ;
- HDRGRIST on $(s) = $(HDRGRIST) ;
-}
-
-rule InstallInto
-{
- # InstallInto dir : sources ;
-
- local i t ;
-
- t = $(>:G=$(INSTALLGRIST)) ;
-
- # Arrange for jam install
- # Arrange for jam uninstall
- # sources are in SEARCH_SOURCE
- # targets are in dir
-
- Depends install : $(t) ;
- Clean uninstall : $(t) ;
- SEARCH on $(>) = $(SEARCH_SOURCE) ;
- MakeLocate $(t) : $(<) ;
-
- # For each source, make gristed target name
- # and Install, Chmod, Chown, and Chgrp
-
- for i in $(>)
- {
- local tt = $(i:G=$(INSTALLGRIST)) ;
-
- Depends $(tt) : $(i) ;
- Install $(tt) : $(i) ;
- Chmod $(tt) ;
-
- if $(OWNER) && $(CHOWN)
- {
- Chown $(tt) ;
- OWNER on $(tt) = $(OWNER) ;
- }
-
- if $(GROUP) && $(CHGRP)
- {
- Chgrp $(tt) ;
- GROUP on $(tt) = $(GROUP) ;
- }
- }
-}
-
-rule InstallBin
-{
- local _t = [ FAppendSuffix $(>) : $(SUFEXE) ] ;
-
- InstallInto $(<) : $(_t) ;
- MODE on $(_t:G=installed) = $(EXEMODE) ;
-}
-
-rule InstallFile
-{
- InstallInto $(<) : $(>) ;
- MODE on $(>:G=installed) = $(FILEMODE) ;
-}
-
-rule InstallLib
-{
- InstallInto $(<) : $(>) ;
- MODE on $(>:G=installed) = $(FILEMODE) ;
-}
-
-rule InstallMan
-{
- # Really this just strips the . from the suffix
-
- local i s d ;
-
- for i in $(>)
- {
- switch $(i:S)
- {
- case .1 : s = 1 ; case .2 : s = 2 ; case .3 : s = 3 ;
- case .4 : s = 4 ; case .5 : s = 5 ; case .6 : s = 6 ;
- case .7 : s = 7 ; case .8 : s = 8 ; case .l : s = l ;
- case .n : s = n ; case .man : s = 1 ;
- }
-
- d = man$(s) ;
-
- InstallInto $(d:R=$(<)) : $(i) ;
- }
-
- MODE on $(>:G=installed) = $(FILEMODE) ;
-}
-
-rule InstallShell
-{
- InstallInto $(<) : $(>) ;
- MODE on $(>:G=installed) = $(SHELLMODE) ;
-}
-
-rule Lex
-{
- LexMv $(<) : $(>) ;
- DEPENDS $(<) : $(>) ;
- MakeLocate $(<) : $(LOCATE_SOURCE) ;
- Clean clean : $(<) ;
-}
-
-rule Library
-{
- LibraryFromObjects $(<) : $(>:S=$(SUFOBJ)) ;
- Objects $(>) ;
-}
-
-rule LibraryFromObjects
-{
- local _i _l _s ;
-
- # Add grist to file names
-
- _s = [ FGristFiles $(>) ] ;
- _l = $(<:S=$(SUFLIB)) ;
-
- # library depends on its member objects
-
- if $(KEEPOBJS)
- {
- DEPENDS obj : $(_s) ;
- }
- else
- {
- DEPENDS lib : $(_l) ;
- }
-
- # Set LOCATE for the library and its contents. The bound
- # value shows up as $(NEEDLIBS) on the Link actions.
- # For compatibility, we only do this if the library doesn't
- # already have a path.
-
- if ! $(_l:D)
- {
- MakeLocate $(_l) $(_l)($(_s:BS)) : $(LOCATE_TARGET) ;
- }
-
- if $(NOARSCAN)
- {
- # If we can't scan the library to timestamp its contents,
- # we have to just make the library depend directly on the
- # on-disk object files.
-
- DEPENDS $(_l) : $(_s) ;
- }
- else
- {
- # If we can scan the library, we make the library depend
- # on its members and each member depend on the on-disk
- # object file.
-
- DEPENDS $(_l) : $(_l)($(_s:BS)) ;
-
- for _i in $(_s)
- {
- DEPENDS $(_l)($(_i:BS)) : $(_i) ;
- }
- }
-
- Clean clean : $(_l) ;
-
- if $(CRELIB) { CreLib $(_l) : $(_s[1]) ; }
-
- Archive $(_l) : $(_s) ;
-
- if $(RANLIB) { Ranlib $(_l) ; }
-
- # If we can't scan the library, we have to leave the .o's around.
-
- if ! ( $(NOARSCAN) || $(KEEPOBJS) ) { RmTemps $(_l) : $(_s) ; }
-}
-
-rule Link
-{
- MODE on $(<) = $(EXEMODE) ;
- Chmod $(<) ;
-}
-
-rule LinkLibraries
-{
- # make library dependencies of target
- # set NEEDLIBS variable used by 'actions Main'
-
- local _t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;
-
- DEPENDS $(_t) : $(>:S=$(SUFLIB)) ;
- NEEDLIBS on $(_t) += $(>:S=$(SUFLIB)) ;
-}
-
-rule Main
-{
- MainFromObjects $(<) : $(>:S=$(SUFOBJ)) ;
- Objects $(>) ;
-}
-
-rule MainFromObjects
-{
- local _s _t ;
-
- # Add grist to file names
- # Add suffix to exe
-
- _s = [ FGristFiles $(>) ] ;
- _t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;
-
- if $(_t) != $(<)
- {
- DEPENDS $(<) : $(_t) ;
- NOTFILE $(<) ;
- }
-
- # make compiled sources a dependency of target
-
- DEPENDS exe : $(_t) ;
- DEPENDS $(_t) : $(_s) ;
- MakeLocate $(_t) : $(LOCATE_TARGET) ;
-
- Clean clean : $(_t) ;
-
- Link $(_t) : $(_s) ;
-}
-
-rule MakeLocate
-{
- if $(>)
- {
- LOCATE on $(<) = $(>) ;
- Depends $(<) : $(>[1]) ;
- MkDir $(>[1]) ;
- }
-}
-
-rule MkDir
-{
- # If dir exists, don't update it
- # Do this even for $(DOT).
-
- NOUPDATE $(<) ;
-
- if $(<) != $(DOT) && ! $($(<)-mkdir)
- {
- local s ;
-
- # Cheesy gate to prevent multiple invocations on same dir
- # MkDir1 has the actions
- # Arrange for jam dirs
-
- $(<)-mkdir = true ;
- MkDir1 $(<) ;
- Depends dirs : $(<) ;
-
- # Recursively make parent directories.
- # $(<:P) = $(<)'s parent, & we recurse until root
-
- s = $(<:P) ;
-
- if $(NT)
- {
- switch $(s)
- {
- case *: : s = ;
- case *:\\ : s = ;
- }
- }
-
- if $(s) && $(s) != $(<)
- {
- Depends $(<) : $(s) ;
- MkDir $(s) ;
- }
- else if $(s)
- {
- NOTFILE $(s) ;
- }
-
- }
-}
-
-rule Object
-{
- local h ;
-
- # locate object and search for source, if wanted
-
- Clean clean : $(<) ;
-
- MakeLocate $(<) : $(LOCATE_TARGET) ;
- SEARCH on $(>) = $(SEARCH_SOURCE) ;
-
- # Save HDRS for -I$(HDRS) on compile.
- # We shouldn't need -I$(SEARCH_SOURCE) as cc can find headers
- # in the .c file's directory, but generated .c files (from
- # yacc, lex, etc) are located in $(LOCATE_TARGET), possibly
- # different from $(SEARCH_SOURCE).
-
- HDRS on $(<) = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;
-
- # handle #includes for source: Jam scans for headers with
- # the regexp pattern $(HDRSCAN) and then invokes $(HDRRULE)
- # with the scanned file as the target and the found headers
- # as the sources. HDRSEARCH is the value of SEARCH used for
- # the found header files. Finally, if jam must deal with
- # header files of the same name in different directories,
- # they can be distinguished with HDRGRIST.
-
- # $(h) is where cc first looks for #include "foo.h" files.
- # If the source file is in a distant directory, look there.
- # Else, look in "" (the current directory).
-
- if $(SEARCH_SOURCE)
- {
- h = $(SEARCH_SOURCE) ;
- }
- else
- {
- h = "" ;
- }
-
- HDRRULE on $(>) = HdrRule ;
- HDRSCAN on $(>) = $(HDRPATTERN) ;
- HDRSEARCH on $(>) = $(HDRS) $(SUBDIRHDRS) $(h) $(STDHDRS) ;
- HDRGRIST on $(>) = $(HDRGRIST) ;
-
- # if source is not .c, generate .c with specific rule
-
- switch $(>:S)
- {
- case .asm : As $(<) : $(>) ;
- case .c : Cc $(<) : $(>) ;
- case .C : C++ $(<) : $(>) ;
- case .cc : C++ $(<) : $(>) ;
- case .cpp : C++ $(<) : $(>) ;
- case .f : Fortran $(<) : $(>) ;
- case .l : Cc $(<) : $(<:S=.c) ;
- Lex $(<:S=.c) : $(>) ;
- case .s : As $(<) : $(>) ;
- case .y : Cc $(<) : $(<:S=.c) ;
- Yacc $(<:S=.c) : $(>) ;
- case * : UserObject $(<) : $(>) ;
- }
-}
-
-
-rule ObjectCcFlags
-{
- CCFLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;
-}
-
-rule ObjectC++Flags
-{
- C++FLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;
-}
-
-rule ObjectHdrs
-{
- HDRS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;
-}
-
-rule Objects
-{
- local _i ;
-
- for _i in [ FGristFiles $(<) ]
- {
- Object $(_i:S=$(SUFOBJ)) : $(_i) ;
- DEPENDS obj : $(_i:S=$(SUFOBJ)) ;
- }
-}
-
-rule RmTemps
-{
- TEMPORARY $(>) ;
-}
-
-rule Setuid
-{
- MODE on [ FAppendSuffix $(<) : $(SUFEXE) ] = 4711 ;
-}
-
-rule Shell
-{
- DEPENDS shell : $(<) ;
- DEPENDS $(<) : $(>) ;
- SEARCH on $(>) = $(SEARCH_SOURCE) ;
- MODE on $(<) = $(SHELLMODE) ;
- Clean clean : $(<) ;
- Chmod $(<) ;
-}
-
-rule SubDir
-{
- local _r _s ;
-
- #
- # SubDir TOP d1 [ ... ]
- #
- # This introduces a Jamfile that is part of a project tree
- # rooted at $(TOP). It (only once) includes the project-specific
- # rules file $(TOP)/Jamrules and then sets search & locate stuff.
- #
- # If the variable $(TOPRULES) is set (where TOP is the first arg
- # to SubDir), that file is included instead of $(TOP)/Jamrules.
- #
- # d1 ... are the directory elements that lead to this directory
- # from $(TOP). We construct the system dependent path from these
- # directory elements in order to set search & locate stuff.
- #
-
- if ! $($(<[1]))
- {
- if ! $(<[1])
- {
- EXIT SubDir syntax error ;
- }
-
- $(<[1]) = [ FSubDir $(<[2-]) ] ;
- }
-
- #
- # If $(TOP)/Jamrules hasn't been included, do so.
- #
-
- if ! $($(<[1])-included)
- {
- # Gated entry.
-
- $(<[1])-included = TRUE ;
-
- # File is $(TOPRULES) or $(TOP)/Jamrules.
-
- _r = $($(<[1])RULES) ;
-
- if ! $(_r)
- {
- _r = $(JAMRULES:R=$($(<[1]))) ;
- }
-
- # Include it.
-
- include $(_r) ;
- }
-
- # Get path to current directory from root using SubDir.
- # Save dir tokens for other potential uses.
-
- _s = [ FDirName $(<[2-]) ] ;
- SUBDIR = $(_s:R=$($(<[1]))) ;
- SUBDIR_TOKENS = $(<[2-]) ;
-
- # Now set up SEARCH_SOURCE, LOCATE_TARGET, SOURCE_GRIST
- # These can be reset if needed. For example, if the source
- # directory should not hold object files, LOCATE_TARGET can
- # subsequently be redefined.
-
- SEARCH_SOURCE = $(SUBDIR) ;
- LOCATE_SOURCE = $(ALL_LOCATE_TARGET) $(SUBDIR) ;
- LOCATE_TARGET = $(ALL_LOCATE_TARGET) $(SUBDIR) ;
- SOURCE_GRIST = [ FGrist $(<[2-]) ] ;
-
- # Reset per-directory ccflags, hdrs
-
- SUBDIRCCFLAGS = ;
- SUBDIRC++FLAGS = ;
- SUBDIRHDRS = ;
-}
-
-rule SubDirCcFlags
-{
- SUBDIRCCFLAGS += $(<) ;
-}
-
-rule SubDirC++Flags
-{
- SUBDIRC++FLAGS += $(<) ;
-}
-
-rule SubDirHdrs
-{
- SUBDIRHDRS += $(<) ;
-}
-
-rule SubInclude
-{
- local _s ;
-
- # That's
- # SubInclude TOP d1 [ d2 [ d3 [ d4 ] ] ]
- #
- # to include a subdirectory's Jamfile.
-
- if ! $($(<[1]))
- {
- EXIT Top level of source tree has not been set with $(<[1]) ;
- }
-
- _s = [ FDirName $(<[2-]) ] ;
-
- include $(JAMFILE:D=$(_s):R=$($(<[1]))) ;
-}
-
-rule Undefines
-{
- UNDEFS on [ FAppendSuffix $(<) : $(SUFEXE) ] += $(UNDEFFLAG)$(>) ;
-}
-
-rule UserObject
-{
- EXIT "Unknown suffix on" $(>) "- see UserObject rule in Jamfile(5)." ;
-}
-
-rule Yacc
-{
- local _h ;
-
- _h = $(<:BS=.h) ;
-
- # Some places don't have a yacc.
-
- MakeLocate $(<) $(_h) : $(LOCATE_SOURCE) ;
-
- if $(YACC)
- {
- DEPENDS $(<) $(_h) : $(>) ;
- Yacc1 $(<) $(_h) : $(>) ;
- YaccMv $(<) $(_h) : $(>) ;
- Clean clean : $(<) $(_h) ;
- }
-
- # Make sure someone includes $(_h) else it will be a deadly independent
- # target.
- INCLUDES $(<) : $(_h) ;
-}
-
-#
-# Utility rules; no side effects on these.
-#
-
-rule FGrist
-{
- # Turn individual elements in $(<) into grist.
-
- local _g _i ;
-
- _g = $(<[1]) ;
-
- for _i in $(<[2-])
- {
- _g = $(_g)!$(_i) ;
- }
-
- return $(_g) ;
-}
-
-rule FGristFiles
-{
- if ! $(SOURCE_GRIST)
- {
- return $(<) ;
- }
- else
- {
- return $(<:G=$(SOURCE_GRIST)) ;
- }
-}
-
-rule FGristSourceFiles
-{
- # Produce source file name name with grist in it,
- # if SOURCE_GRIST is set.
-
- # Leave header files alone, because they have a global
- # visibility.
-
- if ! $(SOURCE_GRIST)
- {
- return $(<) ;
- }
- else
- {
- local _i _o ;
-
- for _i in $(<)
- {
- switch $(_i)
- {
- case *.h : _o += $(_i) ;
- case * : _o += $(_i:G=$(SOURCE_GRIST)) ;
- }
- }
-
- return $(_o) ;
- }
-}
-
-rule FConcat
-{
- # Puts the variables together, removing spaces.
-
- local _t _r ;
-
- $(_r) = $(<[1]) ;
-
- for _t in $(<[2-])
- {
- $(_r) = $(_r)$(_t) ;
- }
-
- return $(_r) ;
-}
-
-rule FSubDir
-{
- local _i _d ;
-
- # If $(>) is the path to the current directory, compute the
- # path (using ../../ etc) back to that root directory.
- # Sets result in $(<)
-
- if ! $(<[1])
- {
- _d = $(DOT) ;
- }
- else
- {
- _d = $(DOTDOT) ;
-
- for _i in $(<[2-])
- {
- _d = $(_d:R=$(DOTDOT)) ;
- }
- }
-
- return $(_d) ;
-}
-
-rule FDirName
-{
- local _s _i ;
-
- # Turn individual elements in $(<) into a usable path.
-
- if ! $(<)
- {
- _s = $(DOT) ;
- }
- else if $(VMS)
- {
- # This handles the following cases:
- # a -> [.a]
- # a b c -> [.a.b.c]
- # x: -> x:
- # x: a -> x:[a]
- # x:[a] b -> x:[a.b]
-
- switch $(<[1])
- {
- case *:* : _s = $(<[1]) ;
- case \\[*\\] : _s = $(<[1]) ;
- case * : _s = [.$(<[1])] ;
- }
-
- for _i in [.$(<[2-])]
- {
- _s = $(_i:R=$(_s)) ;
- }
- }
- else if $(MAC)
- {
- _s = $(DOT) ;
-
- for _i in $(<)
- {
- _s = $(_i:R=$(_s)) ;
- }
- }
- else
- {
- _s = $(<[1]) ;
-
- for _i in $(<[2-])
- {
- _s = $(_i:R=$(_s)) ;
- }
- }
-
- return $(_s) ;
-}
-
-
-rule _makeCommon
-{
- # strip common initial elements
-
- if $($(<)[1]) && $($(<)[1]) = $($(>)[1])
- {
- $(<) = $($(<)[2-]) ;
- $(>) = $($(>)[2-]) ;
- _makeCommon $(<) : $(>) ;
- }
-}
-
-
-rule FRelPath
-{
- local _l _r ;
-
- # first strip off common parts
-
- _l = $(<) ;
- _r = $(>) ;
-
- _makeCommon _l : _r ;
-
- # now make path to root and path down
-
- _l = [ FSubDir $(_l) ] ;
- _r = [ FDirName $(_r) ] ;
-
- # Concatenate and save
-
- # XXX This should be better
-
- if $(_r) = $(DOT) {
- return $(_l) ;
- } else {
- return $(_r:R=$(_l)) ;
- }
-}
-
-rule FAppendSuffix
-{
- # E.g., "FAppendSuffix yacc lex foo.bat : $(SUFEXE) ;"
- # returns (yacc,lex,foo.bat) on Unix and
- # (yacc.exe,lex.exe,foo.bat) on NT.
-
- if $(>)
- {
- local _i _o ;
-
- for _i in $(<)
- {
- if $(_i:S)
- {
- _o += $(_i) ;
- }
- else
- {
- _o += $(_i:S=$(>)) ;
- }
- }
- return $(_o) ;
- }
- else
- {
- return $(<) ;
- }
-}
-
-rule unmakeDir
-{
- if $(>[1]:D) && $(>[1]:D) != $(>[1]) && $(>[1]:D) != \\\\
- {
- unmakeDir $(<) : $(>[1]:D) $(>[1]:BS) $(>[2-]) ;
- }
- else
- {
- $(<) = $(>) ;
- }
-}
-
-
-rule FConvertToSlashes
-{
- local _d, _s, _i ;
-
- unmakeDir _d : $(<) ;
-
- _s = $(_d[1]) ;
- for _i in $(_d[2-])
- {
- _s = $(_s)/$(_i) ;
- }
- return $(_s) ;
-}
-
-
-#
-# Actions
-#
-
-#
-# First the defaults
-#
-
-actions updated together piecemeal Archive
-{
- $(AR) $(<) $(>)
-}
-
-actions As
-{
- $(AS) $(ASFLAGS) -I$(HDRS) -o $(<) $(>)
-}
-
-actions C++
-{
- $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)
-}
-
-actions Cc
-{
- $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)
-}
-
-actions Chgrp
-{
- $(CHGRP) $(GROUP) $(<)
-}
-
-actions Chmod1
-{
- $(CHMOD) $(MODE) $(<)
-}
-
-actions Chown
-{
- $(CHOWN) $(OWNER) $(<)
-}
-
-actions piecemeal together existing Clean
-{
- $(RM) $(>)
-}
-
-actions File
-{
- $(CP) $(>) $(<)
-}
-
-actions GenFile1
-{
- $(>[1]) $(<) $(>[2-])
-}
-
-actions Fortran
-{
- $(FORTRAN) $(FORTRANFLAGS) -o $(<) $(>)
-}
-
-actions HardLink
-{
- $(RM) $(<) && $(LN) $(>) $(<)
-}
-
-actions Install
-{
- $(CP) $(>) $(<)
-}
-
-actions Lex
-{
- $(LEX) $(>)
-}
-
-actions LexMv
-{
- $(MV) lex.yy.c $(<)
-}
-
-actions Link bind NEEDLIBS
-{
- $(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
-}
-
-actions MkDir1
-{
- $(MKDIR) $(<)
-}
-
-actions together Ranlib
-{
- $(RANLIB) $(<)
-}
-
-actions quietly updated piecemeal together RmTemps
-{
- $(RM) $(>)
-}
-
-actions Shell
-{
- $(AWK) '
- NR == 1 { print "$(SHELLHEADER)" }
- NR == 1 && /^[#:]/ { next }
- /^##/ { next }
- { print }
- ' < $(>) > $(<)
-}
-
-actions Yacc1
-{
- $(YACC) $(YACCFLAGS) $(>)
-}
-
-actions YaccMv
-{
- $(MV) $(YACCFILES).c $(<[1])
- $(MV) $(YACCFILES).h $(<[2])
-}
-
-#
-# RELOCATE - for compilers with broken -o flags
-#
-
-if $(RELOCATE)
-{
- actions C++
- {
- $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) $(>)
- }
-
- actions Cc
- {
- $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) $(>)
- }
-
- actions ignore CcMv
- {
- [ $(<) != $(>:BS=$(SUFOBJ)) ] && $(MV) $(>:BS=$(SUFOBJ)) $(<)
- }
-}
-
-#
-# NOARUPDATE - can't update an archive
-#
-
-if $(NOARUPDATE)
-{
- actions Archive
- {
- $(AR) $(<) $(>)
- }
-}
-
-#
-# NT specific actions
-#
-
-if $(NT)
-{
- if $(TOOLSET) = VISUALC || $(TOOLSET) = VC7 || $(TOOLSET) = INTELC
- {
- actions updated together piecemeal Archive
- {
- if exist $(<) set _$(<:B)_=$(<)
- $(AR) /out:$(<) %_$(<:B)_% $(>)
- }
-
- actions As
- {
- $(AS) /Ml /p /v /w2 $(>) $(<) ,nul,nul;
- }
-
- actions Cc
- {
- $(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) $(>)
- }
-
- actions C++
- {
- $(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) /Tp$(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
- }
- }
- else if $(TOOLSET) = VISUALC16
- {
- actions updated together piecemeal Archive
- {
- $(AR) $(<) -+$(>)
- }
-
- actions Cc
- {
- $(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) $(>)
- }
-
- actions C++
- {
- $(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /Tp$(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
- }
- }
- else if $(TOOLSET) = BORLANDC
- {
- actions updated together piecemeal Archive
- {
- $(AR) $(<) -+$(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) -e$(<) $(LINKFLAGS) $(UNDEFS) -L$(LINKLIBS) $(NEEDLIBS) $(>)
- }
-
- actions C++
- {
- $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
- }
-
- actions Cc
- {
- $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
- }
-
- }
- else if $(TOOLSET) = MINGW
- {
- actions together piecemeal Archive
- {
- $(AR) $(<) $(>:T)
- }
-
- actions Cc
- {
- $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
- }
-
- actions C++
- {
- $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
- }
- }
- else if $(TOOLSET) = WATCOM
- {
- actions together piecemeal Archive
- {
- $(AR) $(<) +-$(>)
- }
-
- actions Cc
- {
- $(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
- }
-
- actions C++
- {
- $(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
- }
-
- actions Shell
- {
- $(CP) $(>) $(<)
- }
- }
- else if $(TOOLSET) = LCC
- {
- actions together piecemeal Archive
- {
- $(AR) /out:$(<) $(>)
- }
-
- actions Cc
- {
- $(CC) $(CCFLAGS) $(OPTIM) -Fo$(<) -I$(HDRS) $(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
- }
-
- actions Shell
- {
- $(CP) $(>) $(<)
- }
- }
-}
-
-#
-# OS2 specific actions
-#
-
-else if $(OS2)
-{
- if $(TOOLSET) = WATCOM
- {
- actions together piecemeal Archive
- {
- $(AR) $(<) +-$(>)
- }
-
- actions Cc
- {
- $(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
- }
-
- actions C++
- {
- $(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
- }
-
- actions Shell
- {
- $(CP) $(>) $(<)
- }
- }
- else if $(TOOLSET) = EMX
- {
- actions together piecemeal Archive
- {
- $(AR) $(<) $(>:T)
- }
-
- actions Cc
- {
- $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
- }
-
- actions C++
- {
- $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
- }
- }
-}
-
-#
-# VMS specific actions
-#
-
-else if $(VMS)
-{
- actions updated together piecemeal Archive
- {
- lib/replace $(<) $(>[1]) ,$(>[2-])
- }
-
- actions Cc
- {
- $(CC)/obj=$(<) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>)
- }
-
- actions C++
- {
- $(C++)/obj=$(<) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>)
- }
-
- actions piecemeal together existing Clean
- {
- $(RM) $(>[1]);* ,$(>[2-]);*
- }
-
- actions together quietly CreLib
- {
- if f$search("$(<)") .eqs. "" then lib/create $(<)
- }
-
- actions GenFile1
- {
- mcr $(>[1]) $(<) $(>[2-])
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK)/exe=$(<) $(LINKFLAGS) $(>[1]) ,$(>[2-]) ,$(NEEDLIBS)/lib ,$(LINKLIBS)
- }
-
- actions quietly updated piecemeal together RmTemps
- {
- $(RM) $(>[1]);* ,$(>[2-]);*
- }
-
- actions Shell
- {
- $(CP) $(>) $(<)
- }
-}
-
-#
-# Mac specifc actions
-#
-
-else if $(MAC)
-{
- actions together Archive
- {
- $(LINK) -library -o $(<) $(>)
- }
-
- actions Cc
- {
- set -e MWCincludes $(MACINC)
- $(CC) -o $(<) $(CCFLAGS) $(OPTIM) $(>)
- }
-
- actions C++
- {
- set -e MWCincludes $(MACINC)
- $(CC) -o $(<) $(C++FLAGS) $(OPTIM) $(>)
- }
-
- actions Link bind NEEDLIBS
- {
- $(LINK) -o $(<) $(LINKFLAGS) $(>) $(NEEDLIBS) "$(LINKLIBS)"
- }
-}
-
-#
-# Backwards compatibility with jam 1, where rules were uppercased.
-#
-
-rule BULK { Bulk $(<) : $(>) ; }
-rule FILE { File $(<) : $(>) ; }
-rule HDRRULE { HdrRule $(<) : $(>) ; }
-rule INSTALL { Install $(<) : $(>) ; }
-rule LIBRARY { Library $(<) : $(>) ; }
-rule LIBS { LinkLibraries $(<) : $(>) ; }
-rule LINK { Link $(<) : $(>) ; }
-rule MAIN { Main $(<) : $(>) ; }
-rule SETUID { Setuid $(<) ; }
-rule SHELL { Shell $(<) : $(>) ; }
-rule UNDEFINES { Undefines $(<) : $(>) ; }
-
-# Old INSTALL* didn't take dest directory.
-
-rule INSTALLBIN { InstallBin $(BINDIR) : $(<) ; }
-rule INSTALLLIB { InstallLib $(LIBDIR) : $(<) ; }
-rule INSTALLMAN { InstallMan $(MANDIR) : $(<) ; }
-
-# Compatibility with jam 2.2.
-
-rule addDirName { $(<) += [ FDirName $(>) ] ; }
-rule makeDirName { $(<) = [ FDirName $(>) ] ; }
-rule makeGristedName { $(<) = [ FGristSourceFiles $(>) ] ; }
-rule makeRelPath { $(<[1]) = [ FRelPath $(<[2-]) : $(>) ] ; }
-rule makeSuffixed { $(<[1]) = [ FAppendSuffix $(>) : $(<[2]) ] ; }
-
-#
-# Now include the user's Jamfile.
-#
-
-{
- if $(JAMFILE) { include $(JAMFILE) ; }
-}
-
-}
diff --git a/jam-files/engine/boost-jam.spec b/jam-files/engine/boost-jam.spec
deleted file mode 100644
index bc572fc9..00000000
--- a/jam-files/engine/boost-jam.spec
+++ /dev/null
@@ -1,64 +0,0 @@
-Name: boost-jam
-Version: 3.1.19
-Summary: Build tool
-Release: 1
-Source: %{name}-%{version}.tgz
-
-License: Boost Software License, Version 1.0
-Group: Development/Tools
-URL: http://www.boost.org
-Packager: Rene Rivera <grafik@redshift-software.com>
-BuildRoot: /var/tmp/%{name}-%{version}.root
-
-%description
-Boost Jam is a build tool based on FTJam, which in turn is based on
-Perforce Jam. It contains significant improvements made to facilitate
-its use in the Boost Build System, but should be backward compatible
-with Perforce Jam.
-
-Authors:
- Perforce Jam : Cristopher Seiwald
- FT Jam : David Turner
- Boost Jam : David Abrahams
-
-Copyright:
- /+\
- +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- \+/
- License is hereby granted to use this software and distribute it
- freely, as long as this copyright notice is retained and modifications
- are clearly marked.
- ALL WARRANTIES ARE HEREBY DISCLAIMED.
-
-Also:
- Copyright 2001-2006 David Abrahams.
- Copyright 2002-2006 Rene Rivera.
- Copyright 2003-2006 Vladimir Prus.
-
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-%prep
-%setup -n %{name}-%{version}
-
-%build
-LOCATE_TARGET=bin ./build.sh $BOOST_JAM_TOOLSET
-
-%install
-rm -rf $RPM_BUILD_ROOT
-mkdir -p $RPM_BUILD_ROOT%{_bindir}
-mkdir -p $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
-install -m 755 bin/bjam $RPM_BUILD_ROOT%{_bindir}/bjam-%{version}
-ln -sf bjam-%{version} $RPM_BUILD_ROOT%{_bindir}/bjam
-cp -R *.html *.png *.css LICENSE*.txt images jam $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
-
-find $RPM_BUILD_ROOT -name CVS -type d -exec rm -r {} \;
-
-%files
-%defattr(-,root,root)
-%attr(755,root,root) /usr/bin/*
-%doc %{_docdir}/%{name}-%{version}
-
-
-%clean
-rm -rf $RPM_BUILD_ROOT
diff --git a/jam-files/engine/boost-no-inspect b/jam-files/engine/boost-no-inspect
deleted file mode 100644
index 8a06f3a7..00000000
--- a/jam-files/engine/boost-no-inspect
+++ /dev/null
@@ -1 +0,0 @@
-this really out of our hands, so tell inspect to ignore directory \ No newline at end of file
diff --git a/jam-files/engine/build.bat b/jam-files/engine/build.bat
deleted file mode 100644
index f927b769..00000000
--- a/jam-files/engine/build.bat
+++ /dev/null
@@ -1,532 +0,0 @@
-@ECHO OFF
-
-REM ~ Copyright 2002-2007 Rene Rivera.
-REM ~ Distributed under the Boost Software License, Version 1.0.
-REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-setlocal
-goto Start
-
-
-:Set_Error
-color 00
-goto :eof
-
-
-:Clear_Error
-ver >nul
-goto :eof
-
-
-:Error_Print
-REM Output an error message and set the errorlevel to indicate failure.
-setlocal
-ECHO ###
-ECHO ### %1
-ECHO ###
-ECHO ### You can specify the toolset as the argument, i.e.:
-ECHO ### .\build.bat msvc
-ECHO ###
-ECHO ### Toolsets supported by this script are: borland, como, gcc, gcc-nocygwin,
-ECHO ### intel-win32, metrowerks, mingw, msvc, vc7, vc8, vc9, vc10
-ECHO ###
-call :Set_Error
-endlocal
-goto :eof
-
-
-:Test_Path
-REM Tests for the given file(executable) presence in the directories in the PATH
-REM environment variable. Additionaly sets FOUND_PATH to the path of the
-REM found file.
-call :Clear_Error
-setlocal
-set test=%~$PATH:1
-endlocal
-if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1
-goto :eof
-
-
-:Test_Option
-REM Tests whether the given string is in the form of an option: "--*"
-call :Clear_Error
-setlocal
-set test=%1
-if not defined test (
- call :Set_Error
- goto Test_Option_End
-)
-set test=###%test%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-if not "-" == "%test:~1,1%" call :Set_Error
-:Test_Option_End
-endlocal
-goto :eof
-
-
-:Test_Empty
-REM Tests whether the given string is not empty
-call :Clear_Error
-setlocal
-set test=%1
-if not defined test (
- call :Clear_Error
- goto Test_Empty_End
-)
-set test=###%test%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-if not "" == "%test%" call :Set_Error
-:Test_Empty_End
-endlocal
-goto :eof
-
-
-:Call_If_Exists
-if EXIST %1 call %*
-goto :eof
-
-
-:Guess_Toolset
-REM Try and guess the toolset to bootstrap the build with...
-REM Sets BOOST_JAM_TOOLSET to the first found toolset.
-REM May also set BOOST_JAM_TOOLSET_ROOT to the
-REM location of the found toolset.
-
-call :Clear_Error
-call :Test_Empty %ProgramFiles%
-if not errorlevel 1 set ProgramFiles=C:\Program Files
-
-call :Clear_Error
-if NOT "_%VS100COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc10"
- set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 10.0\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc10"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 10.0\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS90COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc9"
- set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 9.0\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc9"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 9.0\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS80COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc8"
- set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 8\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc8"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 8\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS71COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%\..\..\VC7\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VCINSTALLDIR%_" == "__" (
- REM %VCINSTALLDIR% is also set for VC9 (and probably VC8)
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%VCINSTALLDIR%\VC7\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET\VC7\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET\VC7\"
- goto :eof)
-call :Clear_Error
-if NOT "_%MSVCDir%_" == "__" (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio\VC98\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio\VC98\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual C++\VC98\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual C++\VC98\"
- goto :eof)
-call :Clear_Error
-call :Test_Path cl.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- goto :eof)
-call :Clear_Error
-call :Test_Path vcvars32.bat
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=msvc"
- call "%FOUND_PATH%VCVARS32.BAT"
- set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
- goto :eof)
-call :Clear_Error
-if EXIST "C:\Borland\BCC55\Bin\bcc32.exe" (
- set "BOOST_JAM_TOOLSET=borland"
- set "BOOST_JAM_TOOLSET_ROOT=C:\Borland\BCC55\"
- goto :eof)
-call :Clear_Error
-call :Test_Path bcc32.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=borland"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- goto :eof)
-call :Clear_Error
-call :Test_Path icl.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=intel-win32"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- goto :eof)
-call :Clear_Error
-if EXIST "C:\MinGW\bin\gcc.exe" (
- set "BOOST_JAM_TOOLSET=mingw"
- set "BOOST_JAM_TOOLSET_ROOT=C:\MinGW\"
- goto :eof)
-call :Clear_Error
-if NOT "_%CWFolder%_" == "__" (
- set "BOOST_JAM_TOOLSET=metrowerks"
- set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
- goto :eof )
-call :Clear_Error
-call :Test_Path mwcc.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=metrowerks"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\..\"
- goto :eof)
-call :Clear_Error
-call :Error_Print "Could not find a suitable toolset."
-goto :eof
-
-
-:Guess_Yacc
-REM Tries to find bison or yacc in common places so we can build the grammar.
-call :Clear_Error
-call :Test_Path yacc.exe
-if not errorlevel 1 (
- set "YACC=yacc -d"
- goto :eof)
-call :Clear_Error
-call :Test_Path bison.exe
-if not errorlevel 1 (
- set "YACC=bison -d --yacc"
- goto :eof)
-call :Clear_Error
-if EXIST "C:\Program Files\GnuWin32\bin\bison.exe" (
- set "YACC=C:\Program Files\GnuWin32\bin\bison.exe" -d --yacc
- goto :eof)
-call :Clear_Error
-call :Error_Print "Could not find Yacc to build the Jam grammar."
-goto :eof
-
-
-:Start
-set BOOST_JAM_TOOLSET=
-set BOOST_JAM_ARGS=
-
-REM If no arguments guess the toolset;
-REM or if first argument is an option guess the toolset;
-REM otherwise the argument is the toolset to use.
-call :Clear_Error
-call :Test_Empty %1
-if not errorlevel 1 (
- call :Guess_Toolset
- if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
-)
-
-call :Clear_Error
-call :Test_Option %1
-if not errorlevel 1 (
- call :Guess_Toolset
- if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
-)
-
-call :Clear_Error
-set BOOST_JAM_TOOLSET=%1
-shift
-goto Setup_Toolset
-
-
-:Setup_Toolset
-REM Setup the toolset command and options. This bit of code
-REM needs to be flexible enough to handle both when
-REM the toolset was guessed at and found, or when the toolset
-REM was indicated in the command arguments.
-REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way
-REM because in BAT variables are subsituted only once during a single
-REM command. A complete "if ... ( commands ) else ( commands )"
-REM is a single command, even though it's in multiple lines here.
-:Setup_Args
-call :Clear_Error
-call :Test_Empty %1
-if not errorlevel 1 goto Config_Toolset
-call :Clear_Error
-call :Test_Option %1
-if errorlevel 1 (
- set BOOST_JAM_ARGS=%BOOST_JAM_ARGS% %1
- shift
- goto Setup_Args
-)
-:Config_Toolset
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_metrowerks_" goto Skip_METROWERKS
-if NOT "_%CWFolder%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
- )
-set "PATH=%BOOST_JAM_TOOLSET_ROOT%Other Metrowerks Tools\Command Line Tools;%PATH%"
-set "BOOST_JAM_CC=mwcc -runtime ss -cwd include -DNT -lkernel32.lib -ladvapi32.lib -luser32.lib"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_METROWERKS
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_msvc_" goto Skip_MSVC
-if NOT "_%MSVCDir%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
- )
-call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
-if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- )
-set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_MSVC
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc7_" goto Skip_VC7
-if NOT "_%VS71COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%..\..\VC7\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC7
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc8_" goto Skip_VC8
-if NOT "_%VS80COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC8
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc9_" goto Skip_VC9
-if NOT "_%VS90COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC9
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc10_" goto Skip_VC10
-if NOT "_%VS100COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC10
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_borland_" goto Skip_BORLAND
-if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- call :Test_Path bcc32.exe )
-if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- ) )
-if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%Bin;%PATH%"
- )
-set "BOOST_JAM_CC=bcc32 -WC -w- -q -I%BOOST_JAM_TOOLSET_ROOT%Include -L%BOOST_JAM_TOOLSET_ROOT%Lib /DNT -nbootstrap"
-set "BOOST_JAM_OPT_JAM=-ejam0"
-set "BOOST_JAM_OPT_MKJAMBASE=-emkjambasejam0"
-set "BOOST_JAM_OPT_YYACC=-eyyacc0"
-set "_known_=1"
-:Skip_BORLAND
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_como_" goto Skip_COMO
-set "BOOST_JAM_CC=como -DNT"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_COMO
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc_" goto Skip_GCC
-set "BOOST_JAM_CC=gcc -DNT"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_GCC
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc-nocygwin_" goto Skip_GCC_NOCYGWIN
-set "BOOST_JAM_CC=gcc -DNT -mno-cygwin"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_GCC_NOCYGWIN
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_intel-win32_" goto Skip_INTEL_WIN32
-set "BOOST_JAM_CC=icl -DNT /nologo kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_INTEL_WIN32
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_mingw_" goto Skip_MINGW
-if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- )
-set "BOOST_JAM_CC=gcc -DNT"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_MINGW
-call :Clear_Error
-if "_%_known_%_" == "__" (
- call :Error_Print "Unknown toolset: %BOOST_JAM_TOOLSET%"
-)
-if errorlevel 1 goto Finish
-
-echo ###
-echo ### Using '%BOOST_JAM_TOOLSET%' toolset.
-echo ###
-
-set YYACC_SOURCES=yyacc.c
-set MKJAMBASE_SOURCES=mkjambase.c
-set BJAM_SOURCES=
-set BJAM_SOURCES=%BJAM_SOURCES% command.c compile.c debug.c execnt.c expand.c filent.c glob.c hash.c
-set BJAM_SOURCES=%BJAM_SOURCES% hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c
-set BJAM_SOURCES=%BJAM_SOURCES% newstr.c option.c output.c parse.c pathunix.c regexp.c
-set BJAM_SOURCES=%BJAM_SOURCES% rules.c scan.c search.c subst.c timestamp.c variable.c modules.c
-set BJAM_SOURCES=%BJAM_SOURCES% strings.c filesys.c builtins.c md5.c pwd.c class.c w32_getreg.c native.c
-set BJAM_SOURCES=%BJAM_SOURCES% modules/set.c modules/path.c modules/regex.c
-set BJAM_SOURCES=%BJAM_SOURCES% modules/property-set.c modules/sequence.c modules/order.c
-
-set BJAM_UPDATE=
-:Check_Update
-call :Test_Empty %1
-if not errorlevel 1 goto Check_Update_End
-call :Clear_Error
-setlocal
-set test=%1
-set test=###%test%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-if "%test%" == "--update" set BJAM_UPDATE=update
-endlocal
-shift
-if not "_%BJAM_UPDATE%_" == "_update_" goto Check_Update
-:Check_Update_End
-if "_%BJAM_UPDATE%_" == "_update_" (
- if not exist ".\bootstrap\jam0.exe" (
- set BJAM_UPDATE=
- )
-)
-
-@echo ON
-@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Bootstrap
-if exist bootstrap rd /S /Q bootstrap
-md bootstrap
-@if not exist jamgram.y goto Bootstrap_GrammarPrep
-@if not exist jamgramtab.h goto Bootstrap_GrammarPrep
-@goto Skip_GrammarPrep
-:Bootstrap_GrammarPrep
-%BOOST_JAM_CC% %BOOST_JAM_OPT_YYACC% %YYACC_SOURCES%
-@if not exist ".\bootstrap\yyacc0.exe" goto Skip_GrammarPrep
-.\bootstrap\yyacc0 jamgram.y jamgramtab.h jamgram.yy
-:Skip_GrammarPrep
-@if not exist jamgram.c goto Bootstrap_GrammarBuild
-@if not exist jamgram.h goto Bootstrap_GrammarBuild
-@goto Skip_GrammarBuild
-:Bootstrap_GrammarBuild
-@echo OFF
-if "_%YACC%_" == "__" (
- call :Guess_Yacc
-)
-if errorlevel 1 goto Finish
-@echo ON
-%YACC% jamgram.y
-@if errorlevel 1 goto Finish
-del /f jamgram.c
-rename y.tab.c jamgram.c
-del /f jamgram.h
-rename y.tab.h jamgram.h
-:Skip_GrammarBuild
-@echo ON
-@if exist jambase.c goto Skip_Jambase
-%BOOST_JAM_CC% %BOOST_JAM_OPT_MKJAMBASE% %MKJAMBASE_SOURCES%
-@if not exist ".\bootstrap\mkjambase0.exe" goto Skip_Jambase
-.\bootstrap\mkjambase0 jambase.c Jambase
-:Skip_Jambase
-%BOOST_JAM_CC% %BOOST_JAM_OPT_JAM% %BJAM_SOURCES%
-:Skip_Bootstrap
-@if not exist ".\bootstrap\jam0.exe" goto Skip_Jam
-@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Clean
-.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " clean
-:Skip_Clean
-@set args=%*
-@echo OFF
-:Set_Args
-setlocal
-call :Test_Empty %args%
-if not errorlevel 1 goto Set_Args_End
-set test=###%args:~0,2%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-set test=%test:~0,1%
-if "-" == "%test%" goto Set_Args_End
-endlocal
-set args=%args:~1%
-goto Set_Args
-:Set_Args_End
-@echo ON
-.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " %args%
-:Skip_Jam
-
-:Finish
diff --git a/jam-files/engine/build.jam b/jam-files/engine/build.jam
deleted file mode 100644
index 266b07a1..00000000
--- a/jam-files/engine/build.jam
+++ /dev/null
@@ -1,1070 +0,0 @@
-#~ Copyright 2002-2007 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Clean env vars of any "extra" empty values.
-for local v in ARGV CC CFLAGS LIBS
-{
- local values ;
- for local x in $($(v))
- {
- if $(x) != ""
- {
- values += $(x) ;
- }
- }
- $(v) = $(values) ;
-}
-
-# Platform related specifics.
-if $(OS) = NT { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; }
-else if $(OS) = OS2 { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; }
-else if $(OS) = VMS { rule .path { return "[.$(<:J=/)]" ; } }
-else if $(OS) = MAC { rule .path { return ":$(<:J=\:)" ; } }
-else { rule .path { return "$(<:J=/)" ; } }
-if $(OS) = VMS { . = "_" ; }
-else { . = "." ; }
-./ ?= "" ;
-
-# Info about what we are building.
-_VERSION_ = 3 1 19 ;
-NAME = boost-jam ;
-VERSION = $(_VERSION_:J=$(.)) ;
-RELEASE = 1 ;
-LICENSE = LICENSE_1_0 ;
-
-# Generate development debug binaries?
-if --debug in $(ARGV)
-{
- debug = true ;
-}
-
-if --profile in $(ARGV)
-{
- profile = true ;
-}
-
-# Attempt to generate and/or build the grammar?
-if --grammar in $(ARGV)
-{
- grammar = true ;
-}
-
-# Do we need to add a default build type argument?
-if ! ( --release in $(ARGV) ) &&
- ! ( --debug in $(ARGV) ) &&
- ! ( --profile in $(ARGV) )
-{
- ARGV += --release ;
-}
-
-# Enable, and configure, Python hooks.
-with-python = ;
-python-location = [ MATCH --with-python=(.*) : $(ARGV) ] ;
-if $(python-location)
-{
- with-python = true ;
-}
-if $(with-python)
-{
- if $(OS) = NT
- {
- --python-include = [ .path $(python-location) include ] ;
- --python-lib = ;
- for local v in 26 25 24 23 22
- {
- --python-lib ?=
- [ GLOB [ .path $(python-location) libs ] : "python$(v).lib" ]
- [ GLOB $(python-location) [ .path $(python-location) libs ]
- $(Path) $(PATH) $(path) : "python$(v).dll" ]
- ;
- if ! $(--python-lib[2])
- {
- --python-lib = ;
- }
- }
- --python-lib = $(--python-lib[1]) ;
- }
- else if $(OS) = MACOSX
- {
- --python-include = [ .path $(python-location) Headers ] ;
- --python-lib = $(python-location) Python ;
- }
- else
- {
- --python-include = ;
- --python-lib = ;
- for local v in 2.6 2.5 2.4 2.3 2.2
- {
- local inc = [ GLOB [ .path $(python-location) include ] : python$(v) ] ;
- local lib = [ GLOB [ .path $(python-location) lib ] : libpython$(v)* ] ;
- if $(inc) && $(lib)
- {
- --python-include ?= $(inc) ;
- --python-lib ?= $(lib[1]:D) python$(v) ;
- }
- }
- }
-}
-
-# Boehm GC?
-if --gc in $(ARGV)
-{
- --boehm-gc = true ;
-}
-if $(--boehm-gc)
-{
- --extra-include += [ .path [ PWD ] "boehm_gc" "include" ] ;
-}
-
-# Duma?
-if --duma in $(ARGV)
-{
- --duma = true ;
-}
-if $(--duma)
-{
- --extra-include += [ .path [ PWD ] "duma" ] ;
-}
-
-# An explicit root for the toolset? (trim spaces)
-toolset-root = [ MATCH --toolset-root=(.*) : $(ARGV) ] ;
-{
- local t = [ MATCH "[ ]*(.*)" : $(toolset-root:J=" ") ] ;
- toolset-root = ;
- while $(t)
- {
- t = [ MATCH "([^ ]+)([ ]*)(.*)" : $(t) ] ;
- toolset-root += $(t[1]) ;
- if $(t[3]) { toolset-root += $(t[2]) ; }
- t = $(t[3]) ;
- }
- toolset-root = $(toolset-root:J="") ;
-}
-
-# Configure the implemented toolsets. These are minimal
-# commands and options to compile the full Jam. When
-# adding new toolsets make sure to add them to the
-# "known" list also.
-
-rule toolset ( name command .type ? : opt.out + : opt.define * : flags * : linklibs * )
-{
- .type ?= "" ;
- tool.$(name)$(.type).cc ?= $(command) ;
- tool.$(name)$(.type).opt.out ?= $(opt.out) ;
- tool.$(name)$(.type).opt.define ?= $(opt.define) ;
- tool.$(name)$(.type).flags ?= $(flags) ;
- tool.$(name)$(.type).linklibs ?= $(linklibs) ;
- if ! $(name) in $(toolsets) { toolsets += $(name) ; }
-}
-
-rule if-os ( os + : yes-opt * : no-opt * )
- { if $(os) in $(OS) { return $(yes-opt) ; } else { return $(no-opt) ; } }
-
-rule opt ( type : yes-opt * : no-opt * )
- { if $(type) in $(ARGV) { return $(yes-opt) ; } else { return $(no-opt) ; } }
-
-## HP-UX aCC compiler
-toolset acc cc : "-o " : -D
- : -Ae
- [ opt --release : -s -O3 ]
- [ opt --debug : -g -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Borland C++ 5.5.x
-toolset borland bcc32 : -e -n : /D
- : -WC -w- -q "-I$(toolset-root)Include" "-L$(toolset-root)Lib"
- [ opt --release : -O2 -vi -w-inl ]
- [ opt --debug : -v -Od -vi- ]
- -I$(--python-include) -I$(--extra-include)
- : $(--python-lib[1]) ;
-## Generic Unix cc
-if ! $(CC) { CC = cc ; }
-toolset cc $(CC) : "-o " : -D
- : $(CFLAGS)
- [ opt --release : -s -O ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : $(LIBS) -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Comeau C/C++ 4.x
-toolset como como : "-o " : -D
- : --c
- [ opt --release : --inlining ]
- [ opt --debug : --no_inlining ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Clang Linux 2.8+
-toolset clang clang : "-o " : -D
- : -Wno-unused -Wno-format
- [ opt --release : -Os ]
- [ opt --debug : -g -O0 -fno-inline ]
- [ opt --profile : -finline-functions -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## MacOSX Darwin, using GCC 2.9.x, 3.x
-toolset darwin cc : "-o " : -D
- :
- [ opt --release : -Wl,-x -O3 -finline-functions ]
- [ opt --debug : -g -O0 -fno-inline -pg ]
- [ opt --profile : -Wl,-x -O3 -finline-functions -g -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## GCC 2.x, 3.x, 4.x
-toolset gcc gcc : "-o " : -D
- : -pedantic -fno-strict-aliasing
- [ opt --release : [ opt --symbols : -g : -s ] -O3 ]
- [ opt --debug : -g -O0 -fno-inline ]
- -I$(--python-include) -I$(--extra-include) -Wno-long-long
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## GCC 2.x, 3.x on CYGWIN but without cygwin1.dll
-toolset gcc-nocygwin gcc : "-o " : -D
- : -s -O3 -mno-cygwin
- [ opt --release : -finline-functions ]
- [ opt --debug : -s -O3 -fno-inline -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Intel C/C++ for Darwin
-toolset intel-darwin icc : "-o " : -D
- :
- [ opt --release : -O3 ]
- [ opt --debug : -g -O0 -p ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Intel C/C++ for Linux
-toolset intel-linux icc : "-o " : -D
- :
- [ opt --release : -Xlinker -s -O3 ]
- [ opt --debug : -g -O0 -p ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Intel C/C++ for Win32
-toolset intel-win32 icl : /Fe : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /GB ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## KCC ?
-toolset kcc KCC : "-o " : -D
- :
- [ opt --release : -s +K2 ]
- [ opt --debug : -g +K0 ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Borland Kylix
-toolset kylix bc++ : -o : -D
- : -tC -q
- [ opt --release : -O2 -vi -w-inl ]
- [ opt --debug : -v -Od -vi- ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Metrowerks CodeWarrior 8.x
-{
- # Even though CW can compile all files at once, it crashes if it tries in the bjam case.
- local mwcc = ; if $(OS) = NT { mwcc = mwcc ; } else { mwcc = mwc$(OSPLAT:L) ; }
- mwcc ?= mwcc ;
- toolset metrowerks $(mwcc) : "-o " : -D
- : -c -lang c -subsystem console -cwd include
- [ opt --release : -runtime ss -opt full -inline all ]
- [ opt --debug : -runtime ssd -opt none -inline off ]
- -I$(--python-include) -I$(--extra-include) ;
- toolset metrowerks $(mwcc) .link : "-o " :
- : -subsystem console -lkernel32.lib -ladvapi32.lib -luser32.lib
- [ opt --release : -runtime ss ]
- [ opt --debug : -runtime ssd ]
- : $(--python-lib[1]) ;
-}
-## MINGW GCC
-toolset mingw gcc : "-o " : -D
- :
- [ opt --release : -s -O3 -finline-functions ]
- [ opt --debug : -g -O0 -fno-inline -pg ]
- -I$(--python-include) -I$(--extra-include)
- : $(--python-lib[2]) ;
-## MIPS Pro
-toolset mipspro cc : "-o " : -D
- :
- [ opt --release : -s -O3 -g0 -INLINE:none ]
- [ opt --debug : -g -O0 -INLINE ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Microsoft Visual Studio C++ 6.x
-toolset msvc cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
- [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## QNX 6.x GCC 3.x/2.95.3
-toolset qcc qcc : "-o " : -D
- : -Wc,-pedantic -Wc,-fno-strict-aliasing
- [ opt --release : [ opt --symbols : -g ] -O3 -Wc,-finline-functions ]
- [ opt --debug : -g -O0 -Wc,-fno-inline ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Qlogic Pathscale 2.4
-toolset pathscale pathcc : "-o " : -D
- :
- [ opt --release : -s -Ofast -O3 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Portland Group Pgi 6.2
-toolset pgi pgcc : "-o " : -D
- :
- [ opt --release : -s -O3 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Sun Workshop 6 C++
-toolset sun cc : "-o " : -D
- :
- [ opt --release : -s -fast -xO4 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Sun Workshop 6 C++ (old alias)
-toolset sunpro cc : "-o " : -D
- :
- [ opt --release : -s -fast -xO4 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Compaq Alpha CXX
-toolset tru64cxx cc : "-o " : -D
- :
- [ opt --release : -s -O5 -inline speed ]
- [ opt --debug : -g -O0 -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## IBM VisualAge C++
-toolset vacpp xlc : "-o " : -D
- :
- [ opt --release : -s -O3 -qstrict -qinline ]
- [ opt --debug : -g -qNOOPTimize -qnoinline -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) [ if-os AIX : -bmaxdata:0x40000000 ] ;
-## Microsoft Visual C++ .NET 7.x
-toolset vc7 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
- [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## Microsoft Visual C++ 2005
-toolset vc8 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## Microsoft Visual C++ 2008
-toolset vc9 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## Microsoft Visual C++ 2010
-toolset vc10 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## VMS/OpenVMS DEC C
-toolset vmsdecc cc : /OBJECT= : "/DEFINES=(" "," ")"
- : /STANDARD=VAXC /PREFIX_LIBRARY_ENTRIES=ALL_ENTRIES
- [ opt --release : /OPTIMIZE /NODEBUG ]
- [ opt --debug : /NOOPTIMIZE /DEBUG ]
- ;
-toolset vmsdecc link .link : /EXECUTABLE= :
- : /NOMAP
- [ opt --release : /NODEBUG ]
- [ opt --debug : /DEBUG ]
- ;
-
-# First set the build commands and options according to the
-# preset toolset.
-toolset = [ MATCH --toolset=(.*) : $(ARGV) ] ;
-if ! $(toolset)
-{
- # For some reason, the following test does not catch empty toolset.
- ECHO "###" ;
- ECHO "###" No toolset specified. Please use --toolset option. ;
- ECHO "###" ;
- ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
- EXIT "###" ;
-}
-if ! $(toolset) in $(toolsets)
-{
- ECHO "###" ;
- ECHO "###" Unknown toolset: $(toolset) ;
- ECHO "###" ;
- ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
- EXIT "###" ;
-}
---cc = $(tool.$(toolset).cc) ;
-if $(tool.$(toolset).opt.out[2])
-{
- if $(tool.$(toolset).opt.out[1]) = $(tool.$(toolset).opt.out[2])
- {
- --out = $(tool.$(toolset).opt.out[1]) ;
- --dir = $(tool.$(toolset).opt.out[3-]) ;
- }
- else
- {
- --bin = $(tool.$(toolset).opt.out[1]) ;
- --dir = $(tool.$(toolset).opt.out[2-]) ;
- }
-}
-else
-{
- --out = $(tool.$(toolset).opt.out) ;
-}
---def = $(tool.$(toolset).opt.define) ;
---flags = $(tool.$(toolset).flags) ;
---defs = $(tool.$(toolset).defines) ;
---libs = $(tool.$(toolset).linklibs) ;
-if $(tool.$(toolset).link.cc)
-{
- --link = $(tool.$(toolset).link.cc) ;
- if $(tool.$(toolset).link.opt.out[2])
- {
- if $(tool.$(toolset).link.opt.out[1]) = $(tool.$(toolset).link.opt.out[2])
- {
- --link-out = $(tool.$(toolset).link.opt.out[1]) ;
- --link-dir = $(tool.$(toolset).link.opt.out[3-]) ;
- }
- else
- {
- --link-bin = $(tool.$(toolset).link.opt.out[1]) ;
- --link-dir = $(tool.$(toolset).link.opt.out[2-]) ;
- }
- }
- else
- {
- --link-out = $(tool.$(toolset).link.opt.out) ;
- }
- --link-def = $(tool.$(toolset).link.opt.define) ;
- --link-flags = $(tool.$(toolset).link.flags) ;
- --link-defs = $(tool.$(toolset).link.defines) ;
- --link-libs = $(tool.$(toolset).link.linklibs) ;
-}
-
-# Put executables in platform-specific subdirectory.
-locate-target = $(LOCATE_TARGET) ;
-if $(OS) = VMS
-{
- locate-target ?= bin$(.)vms ;
- platform = vms ;
-}
-else if $(OS) = MAC
-{
- locate-target ?= bin$(.)$(OS:L)$(OSPLAT:L) ;
- platform = $(OS:L)$(OSPLAT:L) ;
-}
-else if $(OSPLAT)
-{
- locate-target ?= bin$(.)$(OS:L)$(OSPLAT:L) ;
- platform = $(OS:L)$(OSPLAT:L) ;
-}
-else
-{
- locate-target ?= bin$(.)$(OS:L) ;
- platform = $(OS:L) ;
-}
-if $(debug)
-{
- locate-target = [ .path $(locate-target)$(.)debug ] ;
-}
-if $(profile)
-{
- locate-target = [ .path $(locate-target)$(.)profile ] ;
-}
-else
-{
- locate-target = [ .path $(locate-target) ] ;
-}
-
-if --show-locate-target in $(ARGV)
-{
- ECHO $(locate-target) ;
-}
-
-# We have some different files for UNIX, VMS, and NT.
-jam.source =
- command.c compile.c debug.c expand.c glob.c
- hash.c hcache.c headers.c hdrmacro.c
- jam.c jambase.c jamgram.c
- lists.c make.c make1.c mem.c newstr.c
- option.c output.c parse.c regexp.c rules.c
- scan.c search.c subst.c w32_getreg.c
- timestamp.c variable.c modules.c strings.c filesys.c
- builtins.c pwd.c class.c native.c md5.c modules/set.c
- modules/path.c modules/regex.c modules/property-set.c
- modules/sequence.c modules/order.c
- ;
-if $(OS) = NT
-{
- jam.source += execnt.c filent.c pathunix.c ;
-}
-else if $(OS) = OS2
-{
- jam.source += execunix.c fileos2.c pathunix.c ;
-}
-else if $(OS) = VMS
-{
- jam.source += execvms.c filevms.c pathvms.c ;
-}
-else if $(OS) = MAC
-{
- jam.source += execmac.c filemac.c pathmac.c ;
-}
-else
-{
- jam.source += execunix.c fileunix.c pathunix.c ;
-}
-
-# Debug assertions, or not.
-if ! $(debug) || --noassert in $(ARGV)
-{
- --defs += NDEBUG ;
-}
-
-# Enable some optional features.
---defs += OPT_HEADER_CACHE_EXT ;
---defs += OPT_GRAPH_DEBUG_EXT ;
---defs += OPT_SEMAPHORE ;
---defs += OPT_AT_FILES ;
---defs += OPT_DEBUG_PROFILE ;
-
-# Bug fixes
---defs += OPT_FIX_TARGET_VARIABLES_EXT ;
-#~ --defs += OPT_NO_EXTERNAL_VARIABLE_SPLIT ;
-
-# Improvements
---defs += OPT_IMPROVED_PATIENCE_EXT ;
-
-# Use Boehm GC memory allocator?
-if $(--boehm-gc)
-{
- --defs += OPT_BOEHM_GC ;
- if $(debug)
- {
- --defs += GC_DEBUG ;
- }
-}
-
-if $(--duma)
-{
- --defs += OPT_DUMA ;
-}
-
-if ( $(OS) = NT ) && ! NT in $(--defs)
-{
- --defs += NT ;
-}
-if $(OS) = VMS
-{
- --defs += VMS ;
-}
---defs += YYSTACKSIZE=5000 ;
-
-if $(with-python)
-{
- --defs += HAVE_PYTHON ;
-}
-
-if $(debug)
-{
- --defs += BJAM_NEWSTR_NO_ALLOCATE ;
-}
-
-
-# The basic symbolic targets...
-NOTFILE all clean dist ;
-ALWAYS clean ;
-
-# Utility rules and actions...
-rule .clean
-{
- [DELETE] clean : $(<) ;
-}
-if $(OS) = NT { actions piecemeal together existing [DELETE] {
- del /F /Q "$(>)"
-} }
-if $(UNIX) = true { actions piecemeal together existing [DELETE] {
- rm -f "$(>)"
-} }
-if $(OS) = VMS { actions piecemeal together existing [DELETE] {
- DELETE $(>[--2]:J=";*, ") $(>[-1]);*
-} }
-if $(OS) = NT {
- --chmod+w = "attrib -r " ;
-}
-if $(UNIX) = true {
- --chmod+w = "chmod +w " ;
-}
-if $(OS) = VMS {
- --chmod+w = "SET FILE/PROT=(S:RWED) " ;
-}
-
-rule .mkdir
-{
- NOUPDATE $(<) ;
- if $(<:P) { DEPENDS $(<) : $(<:P) ; .mkdir $(<:P) ; }
- if ! $(md<$(<)>) { [MKDIR] $(<) ; md<$(<)> = - ; }
-}
-if $(OS) = NT { actions [MKDIR] {
- md "$(<)"
-} }
-if $(UNIX) = true { actions [MKDIR] {
- mkdir "$(<)"
-} }
-if $(OS) = VMS { actions [MKDIR] {
- CREATE/DIR $(<J=", ")
-} }
-
-rule .exe
-{
- local exe = $(<) ;
- if $(OS) = NT || ( $(UNIX) = true && $(OS) = CYGWIN ) || $(OS) = VMS { exe = $(exe:S=.exe) ; }
- LOCATE on $(exe) = $(locate-target) ;
- DEPENDS all : $(exe) ;
- .mkdir $(locate-target) ;
- if $(--link)
- {
- local objs = ;
- for local s in $(>)
- {
- # Translate any subdir elements into a simple file name.
- local o = [ MATCH "([^/]+)[/]?(.+)" : $(s) ] ;
- o = $(o:J=_) ;
- o = $(o:S=.o) ;
- objs += $(o) ;
- LOCATE on $(o) = $(locate-target) ;
- DEPENDS $(exe) : $(o) ;
- DEPENDS $(o) : $(s) ;
- DEPENDS $(o) : $(locate-target) ;
- [COMPILE] $(o) : $(s) ;
- .clean $(o) ;
- }
- DEPENDS $(exe) : $(objs) ;
- DEPENDS $(exe) : $(locate-target) ;
- [COMPILE.LINK] $(exe) : $(objs) ;
- .clean $(exe) ;
- }
- else
- {
- DEPENDS $(exe) : $(>) ;
- DEPENDS $(exe) : $(locate-target) ;
- [COMPILE] $(exe) : $(>) ;
- .clean $(exe) ;
- }
- return $(exe) ;
-}
-if ! $(--def[2]) { actions [COMPILE] {
- "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def)$(--defs)" "$(--flags)" "$(--libs)" "$(>)"
-} }
-else { actions [COMPILE] {
- "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def[1])$(--defs:J=$(--def[2]))$(--def[3])" "$(--flags)" "$(--libs)" "$(>)"
-} }
-if $(OS) = VMS { actions [COMPILE.LINK] {
- "$(--link)" $(--link-bin)$(<:D=) $(--link-dir)$(<:D)$(./) $(--link-out)$(<) $(--link-def)$(--link-defs) $(--link-flags) "$(--link-libs)" $(>J=", ")
-} }
-else { actions [COMPILE.LINK] {
- "$(--link)" "$(--link-bin)$(<:D=)" "$(--link-dir)$(<:D)$(./)" "$(--link-out)$(<)" "$(--link-def)$(--link-defs)" "$(--link-flags)" "$(--link-libs)" "$(>)"
-} }
-
-rule .link
-{
- DEPENDS all : $(<) ;
- DEPENDS $(<) : $(>) ;
- [LINK] $(<) : $(>) ;
- .clean $(<) ;
-}
-if $(OS) = NT { actions [LINK] {
- copy "$(>)" "$(<)"
-} }
-if $(UNIX) = true { actions [LINK] {
- ln -fs "$(>)" "$(<)"
-} }
-if $(OS) = VMS { actions [LINK] {
- COPY/REPLACE $(>) $(<)
-} }
-
-rule .copy
-{
- DEPENDS all : $(<) ;
- DEPENDS $(<) : $(>) ;
- [COPY] $(<) : $(>) ;
- .clean $(<) ;
-}
-
-# Will be redefined later.
-actions [COPY]
-{
-}
-
-
-rule .move
-{
- DEPENDS $(<) : $(>) ;
- [MOVE] $(<) : $(>) ;
-}
-if $(OS) = NT { actions [MOVE] {
- del /f "$(<)"
- rename "$(>)" "$(<)"
-} }
-if $(UNIX) = true { actions [MOVE] {
- mv -f "$(>)" "$(<)"
-} }
-if $(OS) = VMS { actions [MOVE] {
- RENAME "$(>)" "$(<)"
-} }
-
-# Generate the grammar tokens table, and the real yacc grammar.
-rule .yyacc
-{
- local exe = [ .exe yyacc : yyacc.c ] ;
- NOUPDATE $(exe) ;
- DEPENDS $(<) : $(exe) $(>) ;
- LEAVES $(<) ;
- yyacc.exe on $(<) = $(exe:R=$(locate-target)) ;
- [YYACC] $(<) : $(>) ;
-}
-actions [YYACC] {
- $(--chmod+w)$(<[1])
- $(--chmod+w)$(<[2])
- "$(yyacc.exe)" "$(<)" "$(>)"
-}
-if $(grammar)
-{
- .yyacc jamgram.y jamgramtab.h : jamgram.yy ;
-}
-else if $(debug)
-{
- .exe yyacc : yyacc.c ;
-}
-
-# How to build the grammar.
-if $(OS) = NT
-{
- SUFEXE = .exe ;
- # try some other likely spellings...
- PATH ?= $(Path) ;
- PATH ?= $(path) ;
-}
-SUFEXE ?= "" ;
-
-yacc ?= [ GLOB $(PATH) : yacc$(SUFEXE) ] ;
-yacc ?= [ GLOB $(PATH) : bison$(SUFEXE) ] ;
-yacc ?= [ GLOB "$(ProgramFiles:J= )\\GnuWin32\\bin" "C:\\Program Files\\GnuWin32\\bin" : bison$(SUFEXE) ] ;
-yacc = $(yacc[1]) ;
-switch $(yacc:D=:S=)
-{
- case bison : yacc += -d --yacc ;
- case yacc : yacc += -d ;
-}
-if $(debug) && $(yacc)
-{
- yacc += -t -v ;
-}
-yacc += $(YACCFLAGS) ;
-
-rule .yacc
-{
- DEPENDS $(<) : $(>) ;
- LEAVES $(<) ;
- [YACC] $(<) : $(>) ;
-}
-if $(OS) = NT { actions [YACC] {
- "$(yacc)" "$(>)"
- if not errorlevel 1 (
- del /f "$(<[1])"
- rename y.tab$(<[1]:S) "$(<[1])"
- del /f $(<[2])
- rename y.tab$(<[2]:S) "$(<[2])"
- ) else set _error_ =
-} }
-if $(UNIX) = true { actions [YACC] {
- if ` "$(yacc)" "$(>)" ` ; then
- mv -f y.tab$(<[1]:S) "$(<[1])"
- mv -f y.tab$(<[2]:S) "$(<[2])"
- else
- exit 1
- fi
-} }
-if $(OS) = VMS { actions [YACC] {
- IF "$(yacc)" $(>)
- THEN
- RENAME y_tab$(<[1]:S) $(<[1])
- RENAME y_tab$(<[2]:S) $(<[2])
- ENDIF
-} }
-if $(grammar) && ! $(yacc)
-{
- EXIT "Could not find the 'yacc' tool, and therefore can not build the grammar." ;
-}
-if $(grammar) && $(yacc)
-{
- .yacc jamgram.c jamgram.h : jamgram.y ;
-}
-
-# How to build the compiled in jambase.
-rule .mkjambase
-{
- local exe = [ .exe mkjambase : mkjambase.c ] ;
- DEPENDS $(<) : $(exe) $(>) ;
- LEAVES $(<) ;
- mkjambase.exe on $(<) = $(exe:R=$(locate-target)) ;
- [MKJAMBASE] $(<) : $(>) ;
-}
-actions [MKJAMBASE] {
- $(--chmod+w)$(<)
- $(mkjambase.exe) "$(<)" "$(>)"
-}
-if $(debug)
-{
- .mkjambase jambase.c : Jambase ;
-}
-
-# How to build Jam.
-rule .jam
-{
- $(>).exe = [ .exe $(>) : $(jam.source) ] ;
- DEPENDS all : $($(>).exe) ;
-
- # Make a copy under the old name.
- $(<).exe = $(<:S=$($(>).exe:S)) ;
- LOCATE on $($(<).exe) = $(locate-target) ;
- .copy $($(<).exe) : $($(>).exe) ;
- DEPENDS all : $($(<).exe) ;
-}
-.jam bjam : b2 ;
-
-
-# Scan sources for header dependencies.
-# WARNING: Yes those are *REAL TABS* below. DO NOT CHANGE,
-# under any circumstances, to spaces!! And the tabs
-# indenting this are so that if someone is in the mood to
-# replace tabs they hit this comment, and hopefully notice
-# their error.
-rule .scan
-{
- HDRRULE on $(<:D=) = .hdr.scan ;
- HDRSCAN on $(<:D=) = "^[ ]*#[ ]*include[ ]*([<\"][^\">]*[\">]).*$" ;
-}
-rule .hdr.scan
-{
- local hdrs = [ GLOB . : $(>:D=) ] ;
- INCLUDES $(<:D=) : $(hdrs:D=) ;
- HDRRULE on $(>:D=) = .hdr.scan ;
- HDRSCAN on $(>:D=) = "^[ ]*#[ ]*include[ ]*([<\"][^\">]*[\">]).*$" ;
-}
-.scan [ GLOB . : *.c ] ;
-
-# Distribution making from here on out. Assumes that
-# the docs are already built as html at ../doc/html. If
-# they aren't, then the docs are not included in the dist
-# archive.
-dist.license =
- [ GLOB . : $(LICENSE).txt ]
- ;
-dist.license = $(dist.license:D=)
- [ GLOB [ .path .. .. .. ] : $(LICENSE).txt ]
- [ GLOB [ .path .. boost ] : $(LICENSE).txt ] ;
-dist.docs =
- [ GLOB . : *.png *.css *.html ]
- ;
-dist.docs = $(dist.docs:D=)
- [ GLOB [ .path images ] : *.png ]
- [ GLOB [ .path jam ] : *.html ]
- ;
-dist.source =
- [ GLOB . : *.c *.h ]
- ;
-dist.source = $(dist.source:D=)
- $(dist.license[1])
- $(dist.docs)
- build.jam build.bat build.sh build_vms.com
- Jambase
- jamgram.y jamgram.yy
- [ .path modules set.c ]
- [ .path modules path.c ]
- [ .path modules regex.c ]
- [ .path modules property-set.c ]
- [ .path modules sequence.c ]
- [ .path modules order.c ]
- [ GLOB [ .path boehm_gc ] : * ]
- [ GLOB [ .path boehm_gc include ] : * ]
- [ GLOB [ .path boehm_gc include private ] : * ]
- [ GLOB [ .path boehm_gc cord ] : * ]
- [ GLOB [ .path boehm_gc Mac_files ] : * ]
- [ GLOB [ .path boehm_gc tests ] : * ]
- [ GLOB [ .path boehm_gc doc ] : * ]
- ;
-dist.bin =
- bjam
- ;
-dist.bin =
- $(dist.license[1])
- $(dist.bin:S=$(bjam.exe:S))
- ;
-
-if $(OS) = NT
-{
- zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7z.exe" ] ;
- zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7zn.exe" ] ;
- zip ?= [ GLOB $(PATH) : zip.exe ] ;
- zip ?= zip ;
- zip = $(zip[1]) ;
- switch $(zip:D=:S=)
- {
- case 7z* : zip += a -r -tzip -mx=9 ;
- case zip : zip += -9r ;
- }
- actions piecemeal [PACK] {
- "$(zip)" "$(<)" "$(>)"
- }
- actions piecemeal [ZIP] {
- "$(zip)" "$(<)" "$(>)"
- }
- actions piecemeal [COPY] {
- copy /Y "$(>)" "$(<)" >NUL:
- }
-}
-if $(UNIX) = true
-{
- tar ?= [ GLOB $(PATH) : star bsdtar tar ] ;
- tar = $(tar[1]) ;
- switch $(tar:D=:S=)
- {
- case star : tar += -c artype=pax -D -d -to-stdout ;
- case * : tar += -c -f - ;
- }
- actions [PACK] {
- "$(tar)" "$(>)" | gzip -c9 > "$(<)"
- }
- #~ actions [PACK] {
- #~ tar cf "$(<:S=.tar)" "$(>)"
- #~ }
- actions [ZIP] {
- gzip -c9 "$(>)" > "$(<)"
- }
- actions [COPY] {
- cp -Rpf "$(>)" "$(<)"
- }
-}
-
-# The single binary, compressed.
-rule .binary
-{
- local zip = ;
- if $(OS) = NT { zip = $($(<).exe:S=.zip) ; }
- if $(UNIX) = true { zip = $($(<).exe:S=.tgz) ; }
- zip = $(zip:S=)-$(VERSION)-$(RELEASE)-$(platform)$(zip:S) ;
- DEPENDS $(zip) : $($(<).exe) ;
- DEPENDS dist : $(zip) ;
- #~ LOCATE on $(zip) = $(locate-target) ;
- if $(OS) = NT { [ZIP] $(zip) : $($(<).exe) ; }
- if $(UNIX) = true { [PACK] $(zip) : $($(<).exe) ; }
- .clean $(zip) ;
-}
-
-# Package some file.
-rule .package ( dst-dir : src-files + )
-{
- local dst-files ;
- local src-files-actual ;
- for local src-path in $(src-files)
- {
- if ! [ GLOB $(src-path:P) : $(src-path:B) ] || [ CHECK_IF_FILE $(src-path) ]
- {
- local src-subdir = $(src-path:D) ;
- local src-file = $(src-path) ;
- while $(src-subdir:D) { src-subdir = $(src-subdir:D) ; }
- if $(src-subdir) = ".."
- {
- src-file = $(src-file:D=) ;
- }
- dst-files += $(src-file:R=$(dst-dir)) ;
- src-files-actual += $(src-path) ;
- }
- }
-
- local pack = ;
- if $(OS) = NT { pack = $(dst-dir).zip ; }
- if $(UNIX) = true { pack = $(dst-dir).tgz ; }
-
- DEPENDS dist : $(pack) ;
- DEPENDS $(pack) : $(dst-files) ;
-
- local dst-files-queue = $(dst-files) ;
- for local src-path in $(src-files-actual)
- {
- local dst-file = $(dst-files-queue[1]) ;
- dst-files-queue = $(dst-files-queue[2-]) ;
- DEPENDS $(dst-file) : $(src-path) $(dst-file:D) ;
- .mkdir $(dst-file:D) ;
-
- [COPY] $(dst-file) : $(src-path) ;
- .clean $(dst-file) ;
- }
-
- [PACK] $(pack) : $(dst-files) ;
- .clean $(pack) ;
-}
-
-# RPM distro file.
-rpm-tool = [ GLOB $(PATH) : "rpmbuild" ] ;
-rpm-tool ?= [ GLOB $(PATH) : "rpm" ] ;
-rpm-tool = $(rpm-tool[1]) ;
-rule .rpm ( name : source )
-{
- local rpm-arch = ;
- switch $(OSPLAT)
- {
- case X86 : rpm-arch ?= i386 ;
- case PPC : rpm-arch ?= ppc ;
- case AXP : rpm-arch ?= alpha ;
- # no guaranty for these:
- case IA64 : rpm-arch ?= ia64 ;
- case ARM : rpm-arch ?= arm ;
- case SPARC : rpm-arch ?= sparc ;
- case * : rpm-arch ?= other ;
- }
- local target = $(name)-rpm ;
- NOTFILE $(target) ;
- DEPENDS dist : $(target) ;
- DEPENDS $(target) : $(name).$(rpm-arch).rpm $(name).src.rpm ;
- DEPENDS $(name).$(rpm-arch).rpm : $(source) ;
- DEPENDS $(name).src.rpm : $(name).$(rpm-arch).rpm ;
- docs on $(target) = $(dist.docs:J=" ") ;
- arch on $(target) = $(rpm-arch) ;
- if $(rpm-arch) = ppc { target-opt on $(target) = --target= ; }
- else { target-opt on $(target) = "--target " ; }
- [RPM] $(target) : $(source) ;
- .clean $(name).$(rpm-arch).rpm $(name).src.rpm ;
-}
-actions [RPM] {
- set -e
- export BOOST_JAM_TOOLSET="$(toolset)"
- $(rpm-tool) -ta $(target-opt)$(arch) $(>) | tee rpm.out
- cp `grep -e '^Wrote:' rpm.out | sed 's/^Wrote: //'` .
- rm -f rpm.out
-}
-
-# The distribution targets. Don't bother with the targets if
-# distribution build not requested.
-if dist in $(ARGV)
-{
- #~ .binary bjam ;
- .package $(NAME)-$(VERSION) : $(dist.source) ;
- .package $(NAME)-$(VERSION)-$(RELEASE)-$(platform) : $(dist.bin) ;
- if $(rpm-tool)
- {
- #~ .rpm $(NAME)-$(VERSION)-$(RELEASE) : $(NAME)-$(VERSION).tgz ;
- }
-}
diff --git a/jam-files/engine/build.sh b/jam-files/engine/build.sh
deleted file mode 100755
index f1fb806d..00000000
--- a/jam-files/engine/build.sh
+++ /dev/null
@@ -1,303 +0,0 @@
-#!/bin/sh
-
-#~ Copyright 2002-2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Reset the toolset.
-BOOST_JAM_TOOLSET=
-
-# Run a command, and echo before doing so. Also checks the exit
-# status and quits if there was an error.
-echo_run ()
-{
- echo "$@"
- $@
- r=$?
- if test $r -ne 0 ; then
- exit $r
- fi
-}
-
-# Print an error message, and exit with a status of 1.
-error_exit ()
-{
- echo "###"
- echo "###" "$@"
- echo "###"
- echo "### You can specify the toolset as the argument, i.e.:"
- echo "### ./build.sh gcc"
- echo "###"
- echo "### Toolsets supported by this script are:"
- echo "### acc, como, darwin, gcc, intel-darwin, intel-linux, kcc, kylix,"
- echo "### mipspro, mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp"
- echo "###"
- echo "### A special toolset; cc, is available which is used as a fallback"
- echo "### when a more specific toolset is not found and the cc command is"
- echo "### detected. The 'cc' toolset will use the CC, CFLAGS, and LIBS"
- echo "### envrironment variables, if present."
- echo "###"
- exit 1
-}
-
-# Check that a command is in the PATH.
-test_path ()
-{
- if `command -v command 1>/dev/null 2>/dev/null`; then
- command -v $1 1>/dev/null 2>/dev/null
- else
- hash $1 1>/dev/null 2>/dev/null
- fi
-}
-
-# Check that the OS name, as returned by "uname", is as given.
-test_uname ()
-{
- if test_path uname; then
- test `uname` = $*
- fi
-}
-
-# Try and guess the toolset to bootstrap the build with...
-Guess_Toolset ()
-{
- if test -r /mingw/bin/gcc ; then
- BOOST_JAM_TOOLSET=mingw
- BOOST_JAM_TOOLSET_ROOT=/mingw/
- elif test_uname Darwin ; then BOOST_JAM_TOOLSET=darwin
- elif test_uname IRIX ; then BOOST_JAM_TOOLSET=mipspro
- elif test_uname IRIX64 ; then BOOST_JAM_TOOLSET=mipspro
- elif test_uname OSF1 ; then BOOST_JAM_TOOLSET=tru64cxx
- elif test_uname QNX && test_path qcc ; then BOOST_JAM_TOOLSET=qcc
- elif test_path gcc ; then BOOST_JAM_TOOLSET=gcc
- elif test_path icc ; then BOOST_JAM_TOOLSET=intel-linux
- elif test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0
- elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80
- elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
- elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
- elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
- elif test_path pgcc ; then BOOST_JAM_TOOLSET=pgi
- elif test_path pathcc ; then BOOST_JAM_TOOLSET=pathscale
- elif test_path xlc ; then BOOST_JAM_TOOLSET=vacpp
- elif test_path como ; then BOOST_JAM_TOOLSET=como
- elif test_path KCC ; then BOOST_JAM_TOOLSET=kcc
- elif test_path bc++ ; then BOOST_JAM_TOOLSET=kylix
- elif test_path aCC ; then BOOST_JAM_TOOLSET=acc
- elif test_uname HP-UX ; then BOOST_JAM_TOOLSET=acc
- elif test -r /opt/SUNWspro/bin/cc ; then
- BOOST_JAM_TOOLSET=sunpro
- BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
- # Test for "cc" as the default fallback.
- elif test_path $CC ; then BOOST_JAM_TOOLSET=cc
- elif test_path cc ; then
- BOOST_JAM_TOOLSET=cc
- CC=cc
- fi
- if test "$BOOST_JAM_TOOLSET" = "" ; then
- error_exit "Could not find a suitable toolset."
- fi
-}
-
-# The one option we support in the invocation
-# is the name of the toolset to force building
-# with.
-case "$1" in
- --guess-toolset) Guess_Toolset ; echo "$BOOST_JAM_TOOLSET" ; exit 1 ;;
- -*) Guess_Toolset ;;
- ?*) BOOST_JAM_TOOLSET=$1 ; shift ;;
- *) Guess_Toolset ;;
-esac
-BOOST_JAM_OPT_JAM="-o bootstrap/jam0"
-BOOST_JAM_OPT_MKJAMBASE="-o bootstrap/mkjambase0"
-BOOST_JAM_OPT_YYACC="-o bootstrap/yyacc0"
-case $BOOST_JAM_TOOLSET in
- mingw)
- if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/gcc ; then
- export PATH=${BOOST_JAM_TOOLSET_ROOT}bin:$PATH
- fi
- BOOST_JAM_CC="gcc -DNT"
- ;;
-
- gcc)
- BOOST_JAM_CC=gcc
- ;;
-
- darwin)
- BOOST_JAM_CC=cc
- ;;
-
- intel-darwin)
- BOOST_JAM_CC=icc
- ;;
-
- intel-linux)
- if test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0/
- elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80/
- elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
- elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
- elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
- fi
- if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh ; then
- # iccvars doesn't change LD_RUN_PATH. We adjust LD_RUN_PATH
- # here in order not to have to rely on ld.so.conf knowing the
- # icc library directory. We do this before running iccvars.sh
- # in order to allow a user to add modifications to LD_RUN_PATH
- # in iccvars.sh.
- if test -z "${LD_RUN_PATH}"; then
- LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib"
- else
- LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib:${LD_RUN_PATH}"
- fi
- export LD_RUN_PATH
- . ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh
- fi
- BOOST_JAM_CC=icc
- ;;
-
- vacpp)
- BOOST_JAM_CC=xlc
- ;;
-
- como)
- BOOST_JAM_CC="como --c"
- ;;
-
- kcc)
- BOOST_JAM_CC=KCC
- ;;
-
- kylix)
- BOOST_JAM_CC=bc++
- ;;
-
- mipspro)
- BOOST_JAM_CC=cc
- ;;
-
- pathscale)
- BOOST_JAM_CC=pathcc
- ;;
-
- pgi)
- BOOST_JAM_CC=pgcc
- ;;
-
- sun*)
- if test -z "${BOOST_JAM_TOOLSET_ROOT}" -a -r /opt/SUNWspro/bin/cc ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
- fi
- if test -r "${BOOST_JAM_TOOLSET_ROOT}bin/cc" ; then
- PATH=${BOOST_JAM_TOOLSET_ROOT}bin:${PATH}
- export PATH
- fi
- BOOST_JAM_CC=cc
- ;;
-
- clang*)
- BOOST_JAM_CC="clang -Wno-unused -Wno-format"
- BOOST_JAM_TOOLSET=clang
- ;;
-
- tru64cxx)
- BOOST_JAM_CC=cc
- ;;
-
- acc)
- BOOST_JAM_CC="cc -Ae"
- ;;
-
- cc)
- if test -z "$CC" ; then CC=cc ; fi
- BOOST_JAM_CC=$CC
- BOOST_JAM_OPT_JAM="$BOOST_JAM_OPT_JAM $CFLAGS $LIBS"
- BOOST_JAM_OPT_MKJAMBASE="$BOOST_JAM_OPT_MKJAMBASE $CFLAGS $LIBS"
- BOOST_JAM_OPT_YYACC="$BOOST_JAM_OPT_YYACC $CFLAGS $LIBS"
- ;;
-
- qcc)
- BOOST_JAM_CC=qcc
- ;;
-
- *)
- error_exit "Unknown toolset: $BOOST_JAM_TOOLSET"
- ;;
-esac
-
-echo "###"
-echo "### Using '$BOOST_JAM_TOOLSET' toolset."
-echo "###"
-
-YYACC_SOURCES="yyacc.c"
-MKJAMBASE_SOURCES="mkjambase.c"
-BJAM_SOURCES="\
- command.c compile.c debug.c expand.c glob.c hash.c\
- hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c\
- newstr.c option.c output.c parse.c pathunix.c pathvms.c regexp.c\
- rules.c scan.c search.c subst.c timestamp.c variable.c modules.c\
- strings.c filesys.c builtins.c pwd.c class.c native.c md5.c w32_getreg.c\
- modules/set.c modules/path.c modules/regex.c modules/property-set.c\
- modules/sequence.c modules/order.c"
-case $BOOST_JAM_TOOLSET in
- mingw)
- BJAM_SOURCES="${BJAM_SOURCES} execnt.c filent.c"
- ;;
-
- *)
- BJAM_SOURCES="${BJAM_SOURCES} execunix.c fileunix.c"
- ;;
-esac
-
-BJAM_UPDATE=
-if test "$1" = "--update" -o "$2" = "--update" -o "$3" = "--update" -o "$4" = "--update" ; then
- BJAM_UPDATE="update"
-fi
-if test "${BJAM_UPDATE}" = "update" -a ! -x "./bootstrap/jam0" ; then
- BJAM_UPDATE=
-fi
-
-if test "${BJAM_UPDATE}" != "update" ; then
- echo_run rm -rf bootstrap
- echo_run mkdir bootstrap
- if test ! -r jamgram.y -o ! -r jamgramtab.h ; then
- echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_YYACC} ${YYACC_SOURCES}
- if test -x "./bootstrap/yyacc0" ; then
- echo_run ./bootstrap/yyacc0 jamgram.y jamgramtab.h jamgram.yy
- fi
- fi
- if test ! -r jamgram.c -o ! -r jamgram.h ; then
- if test_path yacc ; then YACC="yacc -d"
- elif test_path bison ; then YACC="bison -y -d --yacc"
- fi
- echo_run $YACC jamgram.y
- mv -f y.tab.c jamgram.c
- mv -f y.tab.h jamgram.h
- fi
- if test ! -r jambase.c ; then
- echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_MKJAMBASE} ${MKJAMBASE_SOURCES}
- if test -x "./bootstrap/mkjambase0" ; then
- echo_run ./bootstrap/mkjambase0 jambase.c Jambase
- fi
- fi
- echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_JAM} ${BJAM_SOURCES}
-fi
-if test -x "./bootstrap/jam0" ; then
- if test "${BJAM_UPDATE}" != "update" ; then
- echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" clean
- fi
- echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" "$@"
-fi
diff --git a/jam-files/engine/build_vms.com b/jam-files/engine/build_vms.com
deleted file mode 100644
index 965b6342..00000000
--- a/jam-files/engine/build_vms.com
+++ /dev/null
@@ -1,105 +0,0 @@
-$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson.
-$ ! Distributed under the Boost Software License, Version 1.0.
-$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-$ !
-$ ! bootstrap build script for Jam
-$ !
-$ SAY :== WRITE SYS$OUTPUT
-$ !
-$ ON WARNING THEN CONTINUE
-$ !
-$ IF "" .NES. F$SEARCH("[.bootstrap_vms]*.*")
-$ THEN
-$ SAY "Cleaning previous boostrap files..."
-$ !
-$ SET FILE/PROTECTION=(S:RWED) [.bootstrap_vms]*.*;*
-$ DELETE [.bootstrap_vms]*.*;*
-$ ENDIF
-$ !
-$ IF "" .NES. F$SEARCH("bootstrap_vms.dir")
-$ THEN
-$ SAY "Removing previous boostrap directory..."
-$ !
-$ SET FILE/PROT=(S:RWED) bootstrap_vms.dir
-$ DELETE bootstrap_vms.dir;
-$ ENDIF
-$ !
-$ SAY "Creating boostrap directory..."
-$ !
-$ CREATE/DIR [.bootstrap_vms]
-$ !
-$ SAY "Building bootstrap jam..."
-$ !
-$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC /PREFIX_LIBRARY_ENTRIES=ALL_ENTRIES "
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]builtins.obj builtins.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]command.obj command.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]compile.obj compile.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]execvms.obj execvms.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]expand.obj expand.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]filesys.obj filesys.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]filevms.obj filevms.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]glob.obj glob.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]hash.obj hash.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]hdrmacro.obj hdrmacro.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]headers.obj headers.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]jam.obj jam.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]jambase.obj jambase.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]jamgram.obj jamgram.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]lists.obj lists.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]make.obj make.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]make1.obj make1.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]modules.obj modules.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]newstr.obj newstr.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]option.obj option.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]parse.obj parse.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]pathvms.obj pathvms.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]pwd.obj pwd.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]regexp.obj regexp.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]rules.obj rules.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]scan.obj scan.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]search.obj search.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]strings.obj strings.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]subst.obj subst.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]timestamp.obj timestamp.c
-$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]variable.obj variable.c
-$ link -
- /EXECUTABLE=[.bootstrap_vms]jam0.exe -
- [.bootstrap_vms]builtins.obj, -
- [.bootstrap_vms]command.obj, -
- [.bootstrap_vms]compile.obj, -
- [.bootstrap_vms]execvms.obj, -
- [.bootstrap_vms]expand.obj, -
- [.bootstrap_vms]filesys.obj, -
- [.bootstrap_vms]filevms.obj, -
- [.bootstrap_vms]glob.obj, -
- [.bootstrap_vms]hash.obj, -
- [.bootstrap_vms]hdrmacro.obj, -
- [.bootstrap_vms]headers.obj, -
- [.bootstrap_vms]jam.obj, -
- [.bootstrap_vms]jambase.obj, -
- [.bootstrap_vms]jamgram.obj, -
- [.bootstrap_vms]lists.obj, -
- [.bootstrap_vms]make.obj, -
- [.bootstrap_vms]make1.obj, -
- [.bootstrap_vms]modules.obj, -
- [.bootstrap_vms]newstr.obj, -
- [.bootstrap_vms]option.obj, -
- [.bootstrap_vms]parse.obj, -
- [.bootstrap_vms]pathvms.obj, -
- [.bootstrap_vms]pwd.obj, -
- [.bootstrap_vms]regexp.obj, -
- [.bootstrap_vms]rules.obj, -
- [.bootstrap_vms]scan.obj, -
- [.bootstrap_vms]search.obj, -
- [.bootstrap_vms]strings.obj, -
- [.bootstrap_vms]subst.obj, -
- [.bootstrap_vms]timestamp.obj, -
- [.bootstrap_vms]variable.obj
-$ !
-$ SAY "Cleaning any previous build..."
-$ !
-$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset=vmsdecc clean
-$ !
-$ SAY "Building Boost.Jam..."
-$ !
-$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset=vmsdecc
diff --git a/jam-files/engine/builtins.c b/jam-files/engine/builtins.c
deleted file mode 100644
index b28a484e..00000000
--- a/jam-files/engine/builtins.c
+++ /dev/null
@@ -1,2310 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "builtins.h"
-#include "rules.h"
-#include "filesys.h"
-#include "newstr.h"
-#include "regexp.h"
-#include "frames.h"
-#include "hash.h"
-#include "strings.h"
-#include "pwd.h"
-#include "pathsys.h"
-#include "make.h"
-#include "hdrmacro.h"
-#include "compile.h"
-#include "native.h"
-#include "variable.h"
-#include "timestamp.h"
-#include "md5.h"
-#include <ctype.h>
-
-#if defined(USE_EXECUNIX)
-# include <sys/types.h>
-# include <sys/wait.h>
-#else
-/*
- NT does not have wait() and associated macros, it uses the return value
- of system() instead. Status code group are documented at
- http://msdn.microsoft.com/en-gb/library/ff565436.aspx
-*/
-# define WIFEXITED(w) (((w) & 0XFFFFFF00) == 0)
-# define WEXITSTATUS(w)(w)
-#endif
-
-/*
- * builtins.c - builtin jam rules
- *
- * External routines:
- *
- * load_builtin() - define builtin rules
- *
- * Internal routines:
- *
- * builtin_depends() - DEPENDS/INCLUDES rule.
- * builtin_echo() - ECHO rule.
- * builtin_exit() - EXIT rule.
- * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule.
- * builtin_glob() - GLOB rule.
- * builtin_match() - MATCH rule.
- *
- * 01/10/01 (seiwald) - split from compile.c
- */
-
-
-/*
- * compile_builtin() - define builtin rules
- */
-
-#define P0 (PARSE *)0
-#define C0 (char *)0
-
-#if defined( OS_NT ) || defined( OS_CYGWIN )
- LIST * builtin_system_registry ( PARSE *, FRAME * );
- LIST * builtin_system_registry_names( PARSE *, FRAME * );
-#endif
-
-int glob( char * s, char * c );
-
-void backtrace ( FRAME * );
-void backtrace_line ( FRAME * );
-void print_source_line( PARSE * );
-
-
-RULE * bind_builtin( char * name, LIST * (* f)( PARSE *, FRAME * ), int flags, char * * args )
-{
- argument_list* arg_list = 0;
-
- if ( args )
- {
- arg_list = args_new();
- lol_build( arg_list->data, args );
- }
-
- return new_rule_body( root_module(), name, arg_list,
- parse_make( f, P0, P0, P0, C0, C0, flags ), 1 );
-}
-
-
-RULE * duplicate_rule( char * name, RULE * other )
-{
- return import_rule( other, root_module(), name );
-}
-
-
-void load_builtins()
-{
- duplicate_rule( "Always",
- bind_builtin( "ALWAYS",
- builtin_flags, T_FLAG_TOUCHED, 0 ) );
-
- duplicate_rule( "Depends",
- bind_builtin( "DEPENDS",
- builtin_depends, 0, 0 ) );
-
- duplicate_rule( "echo",
- duplicate_rule( "Echo",
- bind_builtin( "ECHO",
- builtin_echo, 0, 0 ) ) );
-
- {
- char * args[] = { "message", "*", ":", "result-value", "?", 0 };
- duplicate_rule( "exit",
- duplicate_rule( "Exit",
- bind_builtin( "EXIT",
- builtin_exit, 0, args ) ) );
- }
-
- {
- char * args[] = { "directories", "*", ":", "patterns", "*", ":", "case-insensitive", "?", 0 };
- duplicate_rule( "Glob",
- bind_builtin( "GLOB", builtin_glob, 0, args ) );
- }
-
- {
- char * args[] = { "patterns", "*", 0 };
- bind_builtin( "GLOB-RECURSIVELY",
- builtin_glob_recursive, 0, args );
- }
-
- duplicate_rule( "Includes",
- bind_builtin( "INCLUDES",
- builtin_depends, 1, 0 ) );
-
- {
- char * args[] = { "targets", "*", ":", "targets-to-rebuild", "*", 0 };
- bind_builtin( "REBUILDS",
- builtin_rebuilds, 0, args );
- }
-
- duplicate_rule( "Leaves",
- bind_builtin( "LEAVES",
- builtin_flags, T_FLAG_LEAVES, 0 ) );
-
- duplicate_rule( "Match",
- bind_builtin( "MATCH",
- builtin_match, 0, 0 ) );
-
- {
- char * args[] = { "string", ":", "delimiters" };
- bind_builtin( "SPLIT_BY_CHARACTERS",
- builtin_split_by_characters, 0, 0 );
- }
-
- duplicate_rule( "NoCare",
- bind_builtin( "NOCARE",
- builtin_flags, T_FLAG_NOCARE, 0 ) );
-
- duplicate_rule( "NOTIME",
- duplicate_rule( "NotFile",
- bind_builtin( "NOTFILE",
- builtin_flags, T_FLAG_NOTFILE, 0 ) ) );
-
- duplicate_rule( "NoUpdate",
- bind_builtin( "NOUPDATE",
- builtin_flags, T_FLAG_NOUPDATE, 0 ) );
-
- duplicate_rule( "Temporary",
- bind_builtin( "TEMPORARY",
- builtin_flags, T_FLAG_TEMP, 0 ) );
-
- bind_builtin( "ISFILE",
- builtin_flags, T_FLAG_ISFILE, 0 );
-
- duplicate_rule( "HdrMacro",
- bind_builtin( "HDRMACRO",
- builtin_hdrmacro, 0, 0 ) );
-
- /* FAIL_EXPECTED is used to indicate that the result of a target build
- * action should be inverted (ok <=> fail) this can be useful when
- * performing test runs from Jamfiles.
- */
- bind_builtin( "FAIL_EXPECTED",
- builtin_flags, T_FLAG_FAIL_EXPECTED, 0 );
-
- bind_builtin( "RMOLD",
- builtin_flags, T_FLAG_RMOLD, 0 );
-
- {
- char * args[] = { "targets", "*", 0 };
- bind_builtin( "UPDATE",
- builtin_update, 0, args );
- }
-
- {
- char * args[] = { "targets", "*",
- ":", "log", "?",
- ":", "ignore-minus-n", "?",
- ":", "ignore-minus-q", "?", 0 };
- bind_builtin( "UPDATE_NOW",
- builtin_update_now, 0, args );
- }
-
- {
- char * args[] = { "string", "pattern", "replacements", "+", 0 };
- duplicate_rule( "subst",
- bind_builtin( "SUBST",
- builtin_subst, 0, args ) );
- }
-
- {
- char * args[] = { "module", "?", 0 };
- bind_builtin( "RULENAMES",
- builtin_rulenames, 0, args );
- }
-
-
- {
- char * args[] = { "module", "?", 0 };
- bind_builtin( "VARNAMES",
- builtin_varnames, 0, args );
- }
-
- {
- char * args[] = { "module", "?", 0 };
- bind_builtin( "DELETE_MODULE",
- builtin_delete_module, 0, args );
- }
-
- {
- char * args[] = { "source_module", "?",
- ":", "source_rules", "*",
- ":", "target_module", "?",
- ":", "target_rules", "*",
- ":", "localize", "?", 0 };
- bind_builtin( "IMPORT",
- builtin_import, 0, args );
- }
-
- {
- char * args[] = { "module", "?", ":", "rules", "*", 0 };
- bind_builtin( "EXPORT",
- builtin_export, 0, args );
- }
-
- {
- char * args[] = { "levels", "?", 0 };
- bind_builtin( "CALLER_MODULE",
- builtin_caller_module, 0, args );
- }
-
- {
- char * args[] = { "levels", "?", 0 };
- bind_builtin( "BACKTRACE",
- builtin_backtrace, 0, args );
- }
-
- {
- char * args[] = { 0 };
- bind_builtin( "PWD",
- builtin_pwd, 0, args );
- }
-
- {
- char * args[] = { "target", "*", ":", "path", "*", 0 };
- bind_builtin( "SEARCH_FOR_TARGET",
- builtin_search_for_target, 0, args );
- }
-
- {
- char * args[] = { "modules_to_import", "+", ":", "target_module", "?", 0 };
- bind_builtin( "IMPORT_MODULE",
- builtin_import_module, 0, args );
- }
-
- {
- char * args[] = { "module", "?", 0 };
- bind_builtin( "IMPORTED_MODULES",
- builtin_imported_modules, 0, args );
- }
-
- {
- char * args[] = { "instance_module", ":", "class_module", 0 };
- bind_builtin( "INSTANCE",
- builtin_instance, 0, args );
- }
-
- {
- char * args[] = { "sequence", "*", 0 };
- bind_builtin( "SORT",
- builtin_sort, 0, args );
- }
-
- {
- char * args[] = { "path_parts", "*", 0 };
- bind_builtin( "NORMALIZE_PATH",
- builtin_normalize_path, 0, args );
- }
-
- {
- char * args[] = { "args", "*", 0 };
- bind_builtin( "CALC",
- builtin_calc, 0, args );
- }
-
- {
- char * args[] = { "module", ":", "rule", 0 };
- bind_builtin( "NATIVE_RULE",
- builtin_native_rule, 0, args );
- }
-
- {
- char * args[] = { "module", ":", "rule", ":", "version", 0 };
- bind_builtin( "HAS_NATIVE_RULE",
- builtin_has_native_rule, 0, args );
- }
-
- {
- char * args[] = { "module", "*", 0 };
- bind_builtin( "USER_MODULE",
- builtin_user_module, 0, args );
- }
-
- {
- char * args[] = { 0 };
- bind_builtin( "NEAREST_USER_LOCATION",
- builtin_nearest_user_location, 0, args );
- }
-
- {
- char * args[] = { "file", 0 };
- bind_builtin( "CHECK_IF_FILE",
- builtin_check_if_file, 0, args );
- }
-
-#ifdef HAVE_PYTHON
- {
- char * args[] = { "python-module", ":", "function", ":",
- "jam-module", ":", "rule-name", 0 };
- bind_builtin( "PYTHON_IMPORT_RULE",
- builtin_python_import_rule, 0, args );
- }
-#endif
-
-# if defined( OS_NT ) || defined( OS_CYGWIN )
- {
- char * args[] = { "key_path", ":", "data", "?", 0 };
- bind_builtin( "W32_GETREG",
- builtin_system_registry, 0, args );
- }
-
- {
- char * args[] = { "key_path", ":", "result-type", 0 };
- bind_builtin( "W32_GETREGNAMES",
- builtin_system_registry_names, 0, args );
- }
-# endif
-
- {
- char * args[] = { "command", ":", "*", 0 };
- duplicate_rule( "SHELL",
- bind_builtin( "COMMAND",
- builtin_shell, 0, args ) );
- }
-
- {
- char * args[] = { "string", 0 };
- bind_builtin( "MD5",
- builtin_md5, 0, args ) ;
- }
-
- {
- char * args[] = { "name", ":", "mode", 0 };
- bind_builtin( "FILE_OPEN",
- builtin_file_open, 0, args );
- }
-
- {
- char * args[] = { "string", ":", "width", 0 };
- bind_builtin( "PAD",
- builtin_pad, 0, args );
- }
-
- {
- char * args[] = { "targets", "*", 0 };
- bind_builtin( "PRECIOUS",
- builtin_precious, 0, args );
- }
-
- {
- char * args [] = { 0 };
- bind_builtin( "SELF_PATH", builtin_self_path, 0, args );
- }
-
- {
- char * args [] = { "path", 0 };
- bind_builtin( "MAKEDIR", builtin_makedir, 0, args );
- }
-
- /* Initialize builtin modules. */
- init_set();
- init_path();
- init_regex();
- init_property_set();
- init_sequence();
- init_order();
-}
-
-
-/*
- * builtin_calc() - CALC rule.
- *
- * The CALC rule performs simple mathematical operations on two arguments.
- */
-
-LIST * builtin_calc( PARSE * parse, FRAME * frame )
-{
- LIST * arg = lol_get( frame->args, 0 );
-
- LIST * result = 0;
- long lhs_value;
- long rhs_value;
- long result_value;
- char buffer [ 16 ];
- char const * lhs;
- char const * op;
- char const * rhs;
-
- if ( arg == 0 ) return L0;
- lhs = arg->string;
-
- arg = list_next( arg );
- if ( arg == 0 ) return L0;
- op = arg->string;
-
- arg = list_next( arg );
- if ( arg == 0 ) return L0;
- rhs = arg->string;
-
- lhs_value = atoi( lhs );
- rhs_value = atoi( rhs );
-
- if ( strcmp( "+", op ) == 0 )
- {
- result_value = lhs_value + rhs_value;
- }
- else if ( strcmp( "-", op ) == 0 )
- {
- result_value = lhs_value - rhs_value;
- }
- else
- {
- return L0;
- }
-
- sprintf( buffer, "%ld", result_value );
- result = list_new( result, newstr( buffer ) );
- return result;
-}
-
-
-/*
- * builtin_depends() - DEPENDS/INCLUDES rule.
- *
- * The DEPENDS/INCLUDES builtin rule appends each of the listed sources on the
- * dependency/includes list of each of the listed targets. It binds both the
- * targets and sources as TARGETs.
- */
-
-LIST * builtin_depends( PARSE * parse, FRAME * frame )
-{
- LIST * targets = lol_get( frame->args, 0 );
- LIST * sources = lol_get( frame->args, 1 );
- LIST * l;
-
- for ( l = targets; l; l = list_next( l ) )
- {
- TARGET * t = bindtarget( l->string );
-
- /* If doing INCLUDES, switch to the TARGET's include */
- /* TARGET, creating it if needed. The internal include */
- /* TARGET shares the name of its parent. */
-
- if ( parse->num )
- {
- if ( !t->includes )
- {
- t->includes = copytarget( t );
- t->includes->original_target = t;
- }
- t = t->includes;
- }
-
- t->depends = targetlist( t->depends, sources );
- }
-
- /* Enter reverse links */
- for ( l = sources; l; l = list_next( l ) )
- {
- TARGET * s = bindtarget( l->string );
- s->dependants = targetlist( s->dependants, targets );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_rebuilds() - REBUILDS rule.
- *
- * The REBUILDS builtin rule appends each of the listed rebuild-targets in its
- * 2nd argument on the rebuilds list of each of the listed targets in its first
- * argument.
- */
-
-LIST * builtin_rebuilds( PARSE * parse, FRAME * frame )
-{
- LIST * targets = lol_get( frame->args, 0 );
- LIST * rebuilds = lol_get( frame->args, 1 );
- LIST * l;
-
- for ( l = targets; l; l = list_next( l ) )
- {
- TARGET * t = bindtarget( l->string );
- t->rebuilds = targetlist( t->rebuilds, rebuilds );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_echo() - ECHO rule.
- *
- * The ECHO builtin rule echoes the targets to the user. No other actions are
- * taken.
- */
-
-LIST * builtin_echo( PARSE * parse, FRAME * frame )
-{
- list_print( lol_get( frame->args, 0 ) );
- printf( "\n" );
- fflush( stdout );
- return L0;
-}
-
-
-/*
- * builtin_exit() - EXIT rule.
- *
- * The EXIT builtin rule echoes the targets to the user and exits the program
- * with a failure status.
- */
-
-LIST * builtin_exit( PARSE * parse, FRAME * frame )
-{
- list_print( lol_get( frame->args, 0 ) );
- printf( "\n" );
- if ( lol_get( frame->args, 1 ) )
- {
- exit( atoi( lol_get( frame->args, 1 )->string ) );
- }
- else
- {
- exit( EXITBAD ); /* yeech */
- }
- return L0;
-}
-
-
-/*
- * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule.
- *
- * Builtin_flags() marks the target with the appropriate flag, for use by make0().
- * It binds each target as a TARGET.
- */
-
-LIST * builtin_flags( PARSE * parse, FRAME * frame )
-{
- LIST * l = lol_get( frame->args, 0 );
- for ( ; l; l = list_next( l ) )
- bindtarget( l->string )->flags |= parse->num;
- return L0;
-}
-
-
-/*
- * builtin_globbing() - GLOB rule.
- */
-
-struct globbing
-{
- LIST * patterns;
- LIST * results;
- LIST * case_insensitive;
-};
-
-
-static void downcase_inplace( char * p )
-{
- for ( ; *p; ++p )
- *p = tolower( *p );
-}
-
-
-static void builtin_glob_back
-(
- void * closure,
- char * file,
- int status,
- time_t time
-)
-{
- PROFILE_ENTER( BUILTIN_GLOB_BACK );
-
- struct globbing * globbing = (struct globbing *)closure;
- LIST * l;
- PATHNAME f;
- string buf[ 1 ];
-
- /* Null out directory for matching. We wish we had file_dirscan() pass up a
- * PATHNAME.
- */
- path_parse( file, &f );
- f.f_dir.len = 0;
-
- /* For globbing, we unconditionally ignore current and parent directory
- * items. Since they items always exist, there is no reason why caller of
- * GLOB would want to see them. We could also change file_dirscan(), but
- * then paths with embedded "." and ".." would not work anywhere.
- */
- if ( !strcmp( f.f_base.ptr, "." ) || !strcmp( f.f_base.ptr, ".." ) )
- {
- PROFILE_EXIT( BUILTIN_GLOB_BACK );
- return;
- }
-
- string_new( buf );
- path_build( &f, buf, 0 );
-
- if ( globbing->case_insensitive )
- downcase_inplace( buf->value );
-
- for ( l = globbing->patterns; l; l = l->next )
- {
- if ( !glob( l->string, buf->value ) )
- {
- globbing->results = list_new( globbing->results, newstr( file ) );
- break;
- }
- }
-
- string_free( buf );
-
- PROFILE_EXIT( BUILTIN_GLOB_BACK );
-}
-
-
-static LIST * downcase_list( LIST * in )
-{
- LIST * result = 0;
-
- string s[ 1 ];
- string_new( s );
-
- while ( in )
- {
- string_copy( s, in->string );
- downcase_inplace( s->value );
- result = list_append( result, list_new( 0, newstr( s->value ) ) );
- in = in->next;
- }
-
- string_free( s );
- return result;
-}
-
-
-LIST * builtin_glob( PARSE * parse, FRAME * frame )
-{
- LIST * l = lol_get( frame->args, 0 );
- LIST * r = lol_get( frame->args, 1 );
-
- struct globbing globbing;
-
- globbing.results = L0;
- globbing.patterns = r;
-
- globbing.case_insensitive
-# if defined( OS_NT ) || defined( OS_CYGWIN )
- = l; /* Always case-insensitive if any files can be found. */
-# else
- = lol_get( frame->args, 2 );
-# endif
-
- if ( globbing.case_insensitive )
- globbing.patterns = downcase_list( r );
-
- for ( ; l; l = list_next( l ) )
- file_dirscan( l->string, builtin_glob_back, &globbing );
-
- if ( globbing.case_insensitive )
- list_free( globbing.patterns );
-
- return globbing.results;
-}
-
-
-static int has_wildcards( char const * str )
-{
- size_t const index = strcspn( str, "[]*?" );
- return str[ index ] == '\0' ? 0 : 1;
-}
-
-
-/*
- * If 'file' exists, append 'file' to 'list'. Returns 'list'.
- */
-
-static LIST * append_if_exists( LIST * list, char * file )
-{
- time_t time;
- timestamp( file, &time );
- return time > 0
- ? list_new( list, newstr( file ) )
- : list;
-}
-
-
-LIST * glob1( char * dirname, char * pattern )
-{
- LIST * plist = list_new( L0, pattern );
- struct globbing globbing;
-
- globbing.results = L0;
- globbing.patterns = plist;
-
- globbing.case_insensitive
-# if defined( OS_NT ) || defined( OS_CYGWIN )
- = plist; /* always case-insensitive if any files can be found */
-# else
- = L0;
-# endif
-
- if ( globbing.case_insensitive )
- globbing.patterns = downcase_list( plist );
-
- file_dirscan( dirname, builtin_glob_back, &globbing );
-
- if ( globbing.case_insensitive )
- list_free( globbing.patterns );
-
- list_free( plist );
-
- return globbing.results;
-}
-
-
-LIST * glob_recursive( char * pattern )
-{
- LIST * result = L0;
-
- /* Check if there's metacharacters in pattern */
- if ( !has_wildcards( pattern ) )
- {
- /* No metacharacters. Check if the path exists. */
- result = append_if_exists(result, pattern);
- }
- else
- {
- /* Have metacharacters in the pattern. Split into dir/name. */
- PATHNAME path[ 1 ];
- path_parse( pattern, path );
-
- if ( path->f_dir.ptr )
- {
- LIST * dirs = L0;
- string dirname[ 1 ];
- string basename[ 1 ];
- string_new( dirname );
- string_new( basename );
-
- string_append_range( dirname, path->f_dir.ptr,
- path->f_dir.ptr + path->f_dir.len );
-
- path->f_grist.ptr = 0;
- path->f_grist.len = 0;
- path->f_dir.ptr = 0;
- path->f_dir.len = 0;
- path_build( path, basename, 0 );
-
- dirs = has_wildcards( dirname->value )
- ? glob_recursive( dirname->value )
- : list_new( dirs, dirname->value );
-
- if ( has_wildcards( basename->value ) )
- {
- for ( ; dirs; dirs = dirs->next )
- result = list_append( result, glob1( dirs->string,
- basename->value ) );
- }
- else
- {
- string file_string[ 1 ];
- string_new( file_string );
-
- /* No wildcard in basename. */
- for ( ; dirs; dirs = dirs->next )
- {
- path->f_dir.ptr = dirs->string;
- path->f_dir.len = strlen( dirs->string );
- path_build( path, file_string, 0 );
-
- result = append_if_exists( result, file_string->value );
-
- string_truncate( file_string, 0 );
- }
-
- string_free( file_string );
- }
-
- string_free( dirname );
- string_free( basename );
- }
- else
- {
- /** No directory, just a pattern. */
- result = list_append( result, glob1( ".", pattern ) );
- }
- }
-
- return result;
-}
-
-
-LIST * builtin_glob_recursive( PARSE * parse, FRAME * frame )
-{
- LIST * result = L0;
- LIST * l = lol_get( frame->args, 0 );
- for ( ; l; l = l->next )
- result = list_append( result, glob_recursive( l->string ) );
- return result;
-}
-
-
-/*
- * builtin_match() - MATCH rule, regexp matching.
- */
-
-LIST * builtin_match( PARSE * parse, FRAME * frame )
-{
- LIST * l;
- LIST * r;
- LIST * result = 0;
-
- string buf[ 1 ];
- string_new( buf );
-
- /* For each pattern */
-
- for ( l = lol_get( frame->args, 0 ); l; l = l->next )
- {
- /* Result is cached and intentionally never freed. */
- regexp * re = regex_compile( l->string );
-
- /* For each string to match against. */
- for ( r = lol_get( frame->args, 1 ); r; r = r->next )
- {
- if ( regexec( re, r->string ) )
- {
- int i;
- int top;
-
- /* Find highest parameter */
-
- for ( top = NSUBEXP; top-- > 1; )
- if ( re->startp[ top ] )
- break;
-
- /* And add all parameters up to highest onto list. */
- /* Must have parameters to have results! */
- for ( i = 1; i <= top; ++i )
- {
- string_append_range( buf, re->startp[ i ], re->endp[ i ] );
- result = list_new( result, newstr( buf->value ) );
- string_truncate( buf, 0 );
- }
- }
- }
- }
-
- string_free( buf );
- return result;
-}
-
-LIST * builtin_split_by_characters( PARSE * parse, FRAME * frame )
-{
- LIST * l1 = lol_get( frame->args, 0 );
- LIST * l2 = lol_get( frame->args, 1 );
-
- LIST * result = 0;
-
- char* s = strdup (l1->string);
- char* delimiters = l2->string;
- char* t;
-
- t = strtok (s, delimiters);
- while (t)
- {
- result = list_new(result, newstr(t));
- t = strtok (NULL, delimiters);
- }
-
- free (s);
-
- return result;
-}
-
-LIST * builtin_hdrmacro( PARSE * parse, FRAME * frame )
-{
- LIST * l = lol_get( frame->args, 0 );
-
- for ( ; l; l = list_next( l ) )
- {
- TARGET * t = bindtarget( l->string );
-
- /* Scan file for header filename macro definitions. */
- if ( DEBUG_HEADER )
- printf( "scanning '%s' for header file macro definitions\n",
- l->string );
-
- macro_headers( t );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_rulenames() - RULENAMES ( MODULE ? ).
- *
- * Returns a list of the non-local rule names in the given MODULE. If MODULE is
- * not supplied, returns the list of rule names in the global module.
- */
-
-static void add_rule_name( void * r_, void * result_ )
-{
- RULE * r = (RULE *)r_;
- LIST * * result = (LIST * *)result_;
- if ( r->exported )
- *result = list_new( *result, copystr( r->name ) );
-}
-
-
-LIST * builtin_rulenames( PARSE * parse, FRAME * frame )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- LIST * result = L0;
- module_t * source_module = bindmodule( arg0 ? arg0->string : 0 );
-
- if ( source_module->rules )
- hashenumerate( source_module->rules, add_rule_name, &result );
- return result;
-}
-
-
-/*
- * builtin_varnames() - VARNAMES ( MODULE ? ).
- *
- * Returns a list of the variable names in the given MODULE. If MODULE is not
- * supplied, returns the list of variable names in the global module.
- */
-
-/* helper function for builtin_varnames(), below. Used with hashenumerate, will
- * prepend the key of each element to the list
- */
-static void add_hash_key( void * np, void * result_ )
-{
- LIST * * result = (LIST * *)result_;
- *result = list_new( *result, copystr( *(char * *)np ) );
-}
-
-
-static struct hash * get_running_module_vars()
-{
- struct hash * dummy;
- struct hash * vars = NULL;
- /* Get the global variables pointer (that of the currently running module).
- */
- var_hash_swap( &vars );
- dummy = vars;
- /* Put the global variables pointer in its right place. */
- var_hash_swap( &dummy );
- return vars;
-}
-
-
-LIST * builtin_varnames( PARSE * parse, FRAME * frame )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- LIST * result = L0;
- module_t * source_module = bindmodule( arg0 ? arg0->string : 0 );
-
- /* The running module _always_ has its 'variables' member set to NULL due to
- * the way enter_module() and var_hash_swap() work.
- */
- struct hash * vars = source_module == frame->module
- ? get_running_module_vars()
- : source_module->variables;
-
- if ( vars )
- hashenumerate( vars, add_hash_key, &result );
- return result;
-}
-
-
-/*
- * builtin_delete_module() - MODULE ?.
- *
- * Clears all rules and variables from the given module.
- */
-
-LIST * builtin_delete_module( PARSE * parse, FRAME * frame )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- LIST * result = L0;
- module_t * source_module = bindmodule( arg0 ? arg0->string : 0 );
- delete_module( source_module );
- return result;
-}
-
-
-static void unknown_rule( FRAME * frame, char * key, char * module_name, char * rule_name )
-{
- backtrace_line( frame->prev );
- printf( "%s error: rule \"%s\" unknown in module \"%s\"\n", key, rule_name, module_name );
- backtrace( frame->prev );
- exit( 1 );
-}
-
-
-/*
- * builtin_import() - IMPORT
- * (
- * SOURCE_MODULE ? :
- * SOURCE_RULES * :
- * TARGET_MODULE ? :
- * TARGET_RULES * :
- * LOCALIZE ?
- * )
- *
- * The IMPORT rule imports rules from the SOURCE_MODULE into the TARGET_MODULE
- * as local rules. If either SOURCE_MODULE or TARGET_MODULE is not supplied, it
- * refers to the global module. SOURCE_RULES specifies which rules from the
- * SOURCE_MODULE to import; TARGET_RULES specifies the names to give those rules
- * in TARGET_MODULE. If SOURCE_RULES contains a name which doesn't correspond to
- * a rule in SOURCE_MODULE, or if it contains a different number of items than
- * TARGET_RULES, an error is issued. If LOCALIZE is specified, the rules will be
- * executed in TARGET_MODULE, with corresponding access to its module local
- * variables.
- */
-
-LIST * builtin_import( PARSE * parse, FRAME * frame )
-{
- LIST * source_module_list = lol_get( frame->args, 0 );
- LIST * source_rules = lol_get( frame->args, 1 );
- LIST * target_module_list = lol_get( frame->args, 2 );
- LIST * target_rules = lol_get( frame->args, 3 );
- LIST * localize = lol_get( frame->args, 4 );
-
- module_t * target_module =
- bindmodule( target_module_list ? target_module_list->string : 0 );
- module_t * source_module =
- bindmodule( source_module_list ? source_module_list->string : 0 );
-
- LIST * source_name;
- LIST * target_name;
-
- for ( source_name = source_rules, target_name = target_rules;
- source_name && target_name;
- source_name = list_next( source_name ),
- target_name = list_next( target_name ) )
- {
- RULE r_;
- RULE * r = &r_;
- RULE * imported;
- r_.name = source_name->string;
-
- if ( !source_module->rules ||
- !hashcheck( source_module->rules, (HASHDATA * *)&r ) )
- unknown_rule( frame, "IMPORT", source_module->name, r_.name );
-
- imported = import_rule( r, target_module, target_name->string );
- if ( localize )
- imported->module = target_module;
- /* This rule is really part of some other module. Just refer to it here,
- * but do not let it out.
- */
- imported->exported = 0;
- }
-
- if ( source_name || target_name )
- {
- backtrace_line( frame->prev );
- printf( "import error: length of source and target rule name lists don't match!\n" );
- printf( " source: " );
- list_print( source_rules );
- printf( "\n target: " );
- list_print( target_rules );
- printf( "\n" );
- backtrace( frame->prev );
- exit( 1 );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_export() - EXPORT ( MODULE ? : RULES * ).
- *
- * The EXPORT rule marks RULES from the SOURCE_MODULE as non-local (and thus
- * exportable). If an element of RULES does not name a rule in MODULE, an error
- * is issued.
- */
-
-LIST * builtin_export( PARSE * parse, FRAME * frame )
-{
- LIST * module_list = lol_get( frame->args, 0 );
- LIST * rules = lol_get( frame->args, 1 );
- module_t * m = bindmodule( module_list ? module_list->string : 0 );
-
- for ( ; rules; rules = list_next( rules ) )
- {
- RULE r_;
- RULE * r = &r_;
- r_.name = rules->string;
-
- if ( !m->rules || !hashcheck( m->rules, (HASHDATA * *)&r ) )
- unknown_rule( frame, "EXPORT", m->name, r_.name );
-
- r->exported = 1;
- }
- return L0;
-}
-
-
-/*
- * get_source_line() - Retrieve the file and line number that should be
- * indicated for a given procedure in debug output or an error backtrace.
- */
-
-static void get_source_line( PARSE * procedure, char * * file, int * line )
-{
- if ( procedure )
- {
- char * f = procedure->file;
- int l = procedure->line;
- if ( !strcmp( f, "+" ) )
- {
- f = "jambase.c";
- l += 3;
- }
- *file = f;
- *line = l;
- }
- else
- {
- *file = "(builtin)";
- *line = -1;
- }
-}
-
-
-void print_source_line( PARSE * p )
-{
- char * file;
- int line;
-
- get_source_line( p, &file, &line );
- if ( line < 0 )
- printf( "(builtin):" );
- else
- printf( "%s:%d:", file, line );
-}
-
-
-/*
- * backtrace_line() - print a single line of error backtrace for the given
- * frame.
- */
-
-void backtrace_line( FRAME * frame )
-{
- if ( frame == 0 )
- {
- printf( "(no frame):" );
- }
- else
- {
- print_source_line( frame->procedure );
- printf( " in %s\n", frame->rulename );
- }
-}
-
-
-/*
- * backtrace() - Print the entire backtrace from the given frame to the Jambase
- * which invoked it.
- */
-
-void backtrace( FRAME * frame )
-{
- if ( !frame ) return;
- while ( ( frame = frame->prev ) )
- backtrace_line( frame );
-}
-
-
-/*
- * builtin_backtrace() - A Jam version of the backtrace function, taking no
- * arguments and returning a list of quadruples: FILENAME LINE MODULE. RULENAME
- * describing each frame. Note that the module-name is always followed by a
- * period.
- */
-
-LIST * builtin_backtrace( PARSE * parse, FRAME * frame )
-{
- LIST * levels_arg = lol_get( frame->args, 0 );
- int levels = levels_arg ? atoi( levels_arg->string ) : ( (unsigned int)(-1) >> 1 ) ;
-
- LIST * result = L0;
- for ( ; ( frame = frame->prev ) && levels ; --levels )
- {
- char * file;
- int line;
- char buf[32];
- get_source_line( frame->procedure, &file, &line );
- sprintf( buf, "%d", line );
- result = list_new( result, newstr( file ) );
- result = list_new( result, newstr( buf ) );
- result = list_new( result, newstr( frame->module->name ) );
- result = list_new( result, newstr( frame->rulename ) );
- }
- return result;
-}
-
-
-/*
- * builtin_caller_module() - CALLER_MODULE ( levels ? )
- *
- * If levels is not supplied, returns the name of the module of the rule which
- * called the one calling this one. If levels is supplied, it is interpreted as
- * an integer specifying a number of additional levels of call stack to traverse
- * in order to locate the module in question. If no such module exists, returns
- * the empty list. Also returns the empty list when the module in question is
- * the global module. This rule is needed for implementing module import
- * behavior.
- */
-
-LIST * builtin_caller_module( PARSE * parse, FRAME * frame )
-{
- LIST * levels_arg = lol_get( frame->args, 0 );
- int levels = levels_arg ? atoi( levels_arg->string ) : 0 ;
-
- int i;
- for ( i = 0; ( i < levels + 2 ) && frame->prev; ++i )
- frame = frame->prev;
-
- if ( frame->module == root_module() )
- return L0;
-
- {
- LIST * result;
- string name;
- string_copy( &name, frame->module->name );
- string_pop_back( &name );
- result = list_new( L0, newstr(name.value) );
- string_free( &name );
- return result;
- }
-}
-
-
-/*
- * Return the current working directory.
- *
- * Usage: pwd = [ PWD ] ;
- */
-
-LIST * builtin_pwd( PARSE * parse, FRAME * frame )
-{
- return pwd();
-}
-
-
-/*
- * Adds targets to the list of target that jam will attempt to update.
- */
-
-LIST * builtin_update( PARSE * parse, FRAME * frame )
-{
- LIST * result = list_copy( L0, targets_to_update() );
- LIST * arg1 = lol_get( frame->args, 0 );
- clear_targets_to_update();
- for ( ; arg1; arg1 = list_next( arg1 ) )
- mark_target_for_updating( newstr( arg1->string ) );
- return result;
-}
-
-extern int anyhow;
-int last_update_now_status;
-
-/* Takes a list of target names as first argument, and immediately
- updates them.
- Second parameter, if specified, if the descriptor (converted to a string)
- of a log file where all build output is redirected.
- Third parameter, if non-empty, specifies that the -n option should have
- no effect -- that is, all out-of-date targets should be rebuild.
-*/
-LIST * builtin_update_now( PARSE * parse, FRAME * frame )
-{
- LIST * targets = lol_get( frame->args, 0 );
- LIST * log = lol_get( frame->args, 1 );
- LIST * force = lol_get (frame->args, 2);
- LIST * continue_ = lol_get(frame->args, 3);
- int status = 0;
- int original_stdout;
- int original_stderr;
- int n;
- int targets_count;
- const char** targets2;
- int i;
- int original_noexec;
- int original_quitquick;
-
-
- if (log)
- {
- int fd = atoi(log->string);
- /* Redirect stdout and stderr, temporary, to the log file. */
- original_stdout = dup (0);
- original_stderr = dup (1);
- dup2 (fd, 0);
- dup2 (fd, 1);
- }
-
- if (force)
- {
- original_noexec = globs.noexec;
- globs.noexec = 0;
- original_quitquick = globs.quitquick;
- globs.quitquick = 0;
- }
-
- if (continue_)
- {
- original_quitquick = globs.quitquick;
- globs.quitquick = 0;
- }
-
- targets_count = list_length( targets );
- targets2 = (const char * *)BJAM_MALLOC( targets_count * sizeof( char * ) );
- for (i = 0 ; targets; targets = list_next( targets ) )
- targets2[ i++ ] = targets->string;
- status |= make( targets_count, targets2, anyhow);
- free( targets );
-
- if (force)
- {
- globs.noexec = original_noexec;
- globs.quitquick = original_quitquick;
- }
-
- if (continue_)
- {
- globs.quitquick = original_quitquick;
- }
-
- if (log)
- {
- /* Flush whatever stdio might have buffered, while descriptions
- 0 and 1 still refer to the log file. */
- fflush (stdout);
- fflush (stderr);
- dup2 (original_stdout, 0);
- dup2 (original_stderr, 1);
- close (original_stdout);
- close (original_stderr);
- }
-
- last_update_now_status = status;
-
- if (status == 0)
- return list_new (L0, newstr ("ok"));
- else
- return L0;
-}
-
-LIST * builtin_search_for_target( PARSE * parse, FRAME * frame )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- LIST * arg2 = lol_get( frame->args, 1 );
- TARGET * t = search_for_target( arg1->string, arg2 );
- return list_new( L0, t->name );
-}
-
-
-LIST * builtin_import_module( PARSE * parse, FRAME * frame )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- LIST * arg2 = lol_get( frame->args, 1 );
- module_t * m = arg2 ? bindmodule( arg2->string ) : root_module();
- import_module( arg1, m );
- return L0;
-}
-
-
-LIST * builtin_imported_modules( PARSE * parse, FRAME * frame )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- return imported_modules( bindmodule( arg0 ? arg0->string : 0 ) );
-}
-
-
-LIST * builtin_instance( PARSE * parse, FRAME * frame )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- LIST * arg2 = lol_get( frame->args, 1 );
- module_t * const instance = bindmodule( arg1->string );
- module_t * const class_module = bindmodule( arg2->string );
- instance->class_module = class_module;
- return L0;
-}
-
-
-LIST * builtin_sort( PARSE * parse, FRAME * frame )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- return list_sort( arg1 );
-}
-
-
-LIST * builtin_normalize_path( PARSE * parse, FRAME * frame )
-{
- LIST * arg = lol_get( frame->args, 0 );
-
- /* First, we iterate over all '/'-separated elements, starting from the end
- * of string. If we see a '..', we remove a previous path elements. If we
- * see '.', we remove it. The removal is done by overwriting data using '\1'
- * in the string. After the whole string has been processed, we do a second
- * pass, removing all the entered '\1' characters.
- */
-
- string in[ 1 ];
- string out[ 1 ];
- /* Last character of the part of string still to be processed. */
- char * end;
- /* Working pointer. */
- char * current;
- /* Number of '..' elements seen and not processed yet. */
- int dotdots = 0;
- int rooted = 0;
- char * result = 0;
-
- /* Make a copy of input: we should not change it. Prepend a '/' before it as
- * a guard for the algorithm later on and remember whether it was originally
- * rooted or not.
- */
- string_new( in );
- string_push_back( in, '/' );
- for ( ; arg; arg = list_next( arg ) )
- {
- if ( arg->string[ 0 ] != '\0' )
- {
- if ( in->size == 1 )
- rooted = ( ( arg->string[ 0 ] == '/' ) ||
- ( arg->string[ 0 ] == '\\' ) );
- else
- string_append( in, "/" );
- string_append( in, arg->string );
- }
- }
-
- /* Convert \ into /. On Windows, paths using / and \ are equivalent, and we
- * want this function to obtain a canonic representation.
- */
- for ( current = in->value, end = in->value + in->size;
- current < end; ++current )
- if ( *current == '\\' )
- *current = '/';
-
- /* Now we remove any extra path elements by overwriting them with '\1'
- * characters and cound how many more unused '..' path elements there are
- * remaining. Note that each remaining path element with always starts with
- * a '/' character.
- */
- for ( end = in->value + in->size - 1; end >= in->value; )
- {
- /* Set 'current' to the next occurence of '/', which always exists. */
- for ( current = end; *current != '/'; --current );
-
- if ( current == end )
- {
- /* Found a trailing or duplicate '/'. Remove it. */
- *current = '\1';
- }
- else if ( ( end - current == 1 ) && ( *(current + 1) == '.' ) )
- {
- /* Found '/.'. Remove them all. */
- *current = '\1';
- *(current + 1) = '\1';
- }
- else if ( ( end - current == 2 ) && ( *(current + 1) == '.' ) && ( *(current + 2) == '.' ) )
- {
- /* Found '/..'. Remove them all. */
- *current = '\1';
- *(current + 1) = '\1';
- *(current + 2) = '\1';
- ++dotdots;
- }
- else if ( dotdots )
- {
- memset( current, '\1', end - current + 1 );
- --dotdots;
- }
- end = current - 1;
- }
-
- string_new( out );
-
- /* Now we know that we need to add exactly dotdots '..' path elements to the
- * front and that our string is either empty or has a '/' as its first
- * significant character. If we have any dotdots remaining then the passed
- * path must not have been rooted or else it is invalid we return an empty
- * list.
- */
- if ( dotdots )
- {
- if ( rooted ) return L0;
- do
- string_append( out, "/.." );
- while ( --dotdots );
- }
-
- /* Now we actually remove all the path characters marked for removal. */
- for ( current = in->value; *current; ++current )
- if ( *current != '\1' )
- string_push_back( out, *current );
-
- /* Here we know that our string contains no '\1' characters and is either
- * empty or has a '/' as its initial character. If the original path was not
- * rooted and we have a non-empty path we need to drop the initial '/'. If
- * the original path was rooted and we have an empty path we need to add
- * back the '/'.
- */
- result = newstr( out->size ? out->value + !rooted : ( rooted ? "/" : "." ) );
-
- string_free( out );
- string_free( in );
-
- return list_new( 0, result );
-}
-
-
-LIST * builtin_native_rule( PARSE * parse, FRAME * frame )
-{
- LIST * module_name = lol_get( frame->args, 0 );
- LIST * rule_name = lol_get( frame->args, 1 );
-
- module_t * module = bindmodule( module_name->string );
-
- native_rule_t n;
- native_rule_t * np = &n;
- n.name = rule_name->string;
- if ( module->native_rules && hashcheck( module->native_rules, (HASHDATA * *)&np ) )
- {
- new_rule_body( module, np->name, np->arguments, np->procedure, 1 );
- }
- else
- {
- backtrace_line( frame->prev );
- printf( "error: no native rule \"%s\" defined in module \"%s\"\n",
- n.name, module->name );
- backtrace( frame->prev );
- exit( 1 );
- }
- return L0;
-}
-
-
-LIST * builtin_has_native_rule( PARSE * parse, FRAME * frame )
-{
- LIST * module_name = lol_get( frame->args, 0 );
- LIST * rule_name = lol_get( frame->args, 1 );
- LIST * version = lol_get( frame->args, 2 );
-
- module_t * module = bindmodule( module_name->string );
-
- native_rule_t n;
- native_rule_t * np = &n;
- n.name = rule_name->string;
- if ( module->native_rules && hashcheck( module->native_rules, (HASHDATA * *)&np ) )
- {
- int expected_version = atoi( version->string );
- if ( np->version == expected_version )
- return list_new( 0, newstr( "true" ) );
- }
- return L0;
-}
-
-
-LIST * builtin_user_module( PARSE * parse, FRAME * frame )
-{
- LIST * module_name = lol_get( frame->args, 0 );
- for ( ; module_name; module_name = module_name->next )
- {
- module_t * m = bindmodule( module_name->string );
- m->user_module = 1;
- }
- return L0;
-}
-
-
-LIST * builtin_nearest_user_location( PARSE * parse, FRAME * frame )
-{
- FRAME * nearest_user_frame =
- frame->module->user_module ? frame : frame->prev_user;
- if ( !nearest_user_frame )
- return L0;
-
- {
- LIST * result = 0;
- char * file;
- int line;
- char buf[32];
-
- get_source_line( nearest_user_frame->procedure, &file, &line );
- sprintf( buf, "%d", line );
- result = list_new( result, newstr( file ) );
- result = list_new( result, newstr( buf ) );
- return result;
- }
-}
-
-
-LIST * builtin_check_if_file( PARSE * parse, FRAME * frame )
-{
- LIST * name = lol_get( frame->args, 0 );
- return file_is_file( name->string ) == 1
- ? list_new( 0, newstr( "true" ) )
- : L0 ;
-}
-
-
-LIST * builtin_md5( PARSE * parse, FRAME * frame )
-{
- LIST * l = lol_get( frame->args, 0 );
- char* s = l->string;
-
- md5_state_t state;
- md5_byte_t digest[16];
- char hex_output[16*2 + 1];
-
- int di;
-
- md5_init(&state);
- md5_append(&state, (const md5_byte_t *)s, strlen(s));
- md5_finish(&state, digest);
-
- for (di = 0; di < 16; ++di)
- sprintf(hex_output + di * 2, "%02x", digest[di]);
-
- return list_new (0, newstr(hex_output));
-}
-
-LIST *builtin_file_open( PARSE *parse, FRAME *frame )
-{
- char* name = lol_get(frame->args, 0)->string;
- char* mode = lol_get(frame->args, 1)->string;
- int fd;
- char buffer[sizeof("4294967295")];
-
- if (strcmp(mode, "w") == 0)
- {
- fd = open(name, O_WRONLY|O_CREAT|O_TRUNC, 0666);
- }
- else
- {
- fd = open(name, O_RDONLY);
- }
-
- if (fd != -1)
- {
- sprintf(buffer, "%d", fd);
- return list_new(L0, newstr(buffer));
- }
- else
- {
- return L0;
- }
-}
-
-LIST *builtin_pad( PARSE *parse, FRAME *frame )
-{
- char *string = lol_get(frame->args, 0)->string;
- char *width_s = lol_get(frame->args, 1)->string;
-
- int current = strlen (string);
- int desired = atoi(width_s);
- if (current >= desired)
- return list_new (L0, string);
- else
- {
- char *buffer = malloc (desired + 1);
- int i;
- LIST *result;
-
- strcpy (buffer, string);
- for (i = current; i < desired; ++i)
- buffer[i] = ' ';
- buffer[desired] = '\0';
- result = list_new (L0, newstr (buffer));
- free (buffer);
- return result;
- }
-}
-
-LIST *builtin_precious( PARSE *parse, FRAME *frame )
-{
- LIST* targets = lol_get(frame->args, 0);
-
- for ( ; targets; targets = list_next( targets ) )
- {
- TARGET* t = bindtarget (targets->string);
- t->flags |= T_FLAG_PRECIOUS;
- }
-
- return L0;
-}
-
-LIST *builtin_self_path( PARSE *parse, FRAME *frame )
-{
- extern char *saved_argv0;
- char *p = executable_path (saved_argv0);
- if (p)
- {
- LIST* result = list_new (0, newstr (p));
- free(p);
- return result;
- }
- else
- {
- return L0;
- }
-}
-
-LIST *builtin_makedir( PARSE *parse, FRAME *frame )
-{
- LIST *path = lol_get(frame->args, 0);
-
- if (file_mkdir(path->string) == 0)
- {
- LIST *result = list_new (0, newstr(path->string));
- return result;
- }
- else
- {
- return L0;
- }
-}
-
-#ifdef HAVE_PYTHON
-
-LIST * builtin_python_import_rule( PARSE * parse, FRAME * frame )
-{
- static int first_time = 1;
- char * python_module = lol_get( frame->args, 0 )->string;
- char * python_function = lol_get( frame->args, 1 )->string;
- char * jam_module = lol_get( frame->args, 2 )->string;
- char * jam_rule = lol_get( frame->args, 3 )->string;
-
- PyObject * pName;
- PyObject * pModule;
- PyObject * pDict;
- PyObject * pFunc;
-
- if ( first_time )
- {
- /* At the first invocation, we add the value of the global
- * EXTRA_PYTHONPATH to the sys.path Python variable.
- */
- LIST * extra = 0;
- module_t * outer_module = frame->module;
-
- first_time = 0;
-
- if ( outer_module != root_module() )
- {
- exit_module( outer_module );
- enter_module( root_module() );
- }
-
- extra = var_get( "EXTRA_PYTHONPATH" );
-
- if ( outer_module != root_module() )
- {
- exit_module( root_module() );
- enter_module( outer_module );
- }
-
- for ( ; extra; extra = extra->next )
- {
- string buf[ 1 ];
- string_new( buf );
- string_append( buf, "import sys\nsys.path.append(\"" );
- string_append( buf, extra->string );
- string_append( buf, "\")\n" );
- PyRun_SimpleString( buf->value );
- string_free( buf );
- }
- }
-
- pName = PyString_FromString( python_module );
- pModule = PyImport_Import( pName );
- Py_DECREF( pName );
-
- if ( pModule != NULL )
- {
- pDict = PyModule_GetDict( pModule );
- pFunc = PyDict_GetItemString( pDict, python_function );
-
- if ( pFunc && PyCallable_Check( pFunc ) )
- {
- module_t * m = bindmodule( jam_module );
- RULE * r = bindrule( jam_rule, m );
-
- /* Make pFunc owned. */
- Py_INCREF( pFunc );
-
- r->python_function = pFunc;
- }
- else
- {
- if ( PyErr_Occurred() )
- PyErr_Print();
- fprintf( stderr, "Cannot find function \"%s\"\n", python_function );
- }
- Py_DECREF( pModule );
- }
- else
- {
- PyErr_Print();
- fprintf( stderr, "Failed to load \"%s\"\n", python_module );
- }
- return L0;
-
-}
-
-#endif
-
-void lol_build( LOL * lol, char * * elements )
-{
- LIST * l = L0;
- lol_init( lol );
-
- while ( elements && *elements )
- {
- if ( !strcmp( *elements, ":" ) )
- {
- lol_add( lol, l );
- l = L0 ;
- }
- else
- {
- l = list_new( l, newstr( *elements ) );
- }
- ++elements;
- }
-
- if ( l != L0 )
- lol_add( lol, l );
-}
-
-
-#ifdef HAVE_PYTHON
-
-/*
- * Calls the bjam rule specified by name passed in 'args'. The name is looked up
- * in the context of bjam's 'python_interface' module. Returns the list of
- * string retured by the rule.
- */
-
-PyObject* bjam_call( PyObject * self, PyObject * args )
-{
- FRAME inner[ 1 ];
- LIST * result;
- PARSE * p;
- char * rulename;
-
- /* Build up the list of arg lists. */
- frame_init( inner );
- inner->prev = 0;
- inner->prev_user = 0;
- inner->module = bindmodule( "python_interface" );
- inner->procedure = 0;
-
- /* Extract the rule name and arguments from 'args'. */
-
- /* PyTuple_GetItem returns borrowed reference. */
- rulename = PyString_AsString( PyTuple_GetItem( args, 0 ) );
- {
- int i = 1;
- int size = PyTuple_Size( args );
- for ( ; i < size; ++i )
- {
- PyObject * a = PyTuple_GetItem( args, i );
- if ( PyString_Check( a ) )
- {
- lol_add( inner->args, list_new( 0, newstr(
- PyString_AsString( a ) ) ) );
- }
- else if ( PySequence_Check( a ) )
- {
- LIST * l = 0;
- int s = PySequence_Size( a );
- int i = 0;
- for ( ; i < s; ++i )
- {
- /* PySequence_GetItem returns new reference. */
- PyObject * e = PySequence_GetItem( a, i );
- char * s = PyString_AsString( e );
- if ( !s )
- {
- printf( "Invalid parameter type passed from Python\n" );
- exit( 1 );
- }
- l = list_new( l, newstr( s ) );
- Py_DECREF( e );
- }
- lol_add( inner->args, l );
- }
- }
- }
-
- result = evaluate_rule( rulename, inner );
-
- frame_free( inner );
-
- /* Convert the bjam list into a Python list result. */
- {
- PyObject * pyResult = PyList_New( list_length( result ) );
- int i = 0;
- while ( result )
- {
- PyList_SetItem( pyResult, i, PyString_FromString( result->string ) );
- result = list_next( result );
- i += 1;
- }
- list_free( result );
- return pyResult;
- }
-}
-
-
-/*
- * Accepts four arguments:
- * - module name
- * - rule name,
- * - Python callable.
- * - (optional) bjam language function signature.
- * Creates a bjam rule with the specified name in the specified module, which will
- * invoke the Python callable.
- */
-
-PyObject * bjam_import_rule( PyObject * self, PyObject * args )
-{
- char * module;
- char * rule;
- PyObject * func;
- PyObject * bjam_signature = NULL;
- module_t * m;
- RULE * r;
-
- if ( !PyArg_ParseTuple( args, "ssO|O:import_rule",
- &module, &rule, &func, &bjam_signature ) )
- return NULL;
-
- if ( !PyCallable_Check( func ) )
- {
- PyErr_SetString( PyExc_RuntimeError,
- "Non-callable object passed to bjam.import_rule" );
- return NULL;
- }
-
- m = bindmodule( *module ? module : 0 );
- r = bindrule( rule, m );
-
- /* Make pFunc owned. */
- Py_INCREF( func );
-
- r->python_function = func;
- r->arguments = 0;
-
- if (bjam_signature)
- {
- argument_list * arg_list = args_new();
- Py_ssize_t i;
-
- Py_ssize_t s = PySequence_Size (bjam_signature);
- for (i = 0; i < s; ++i)
- {
- PyObject* v = PySequence_GetItem (bjam_signature, i);
- lol_add(arg_list->data, list_from_python (v));
- Py_DECREF(v);
- }
- r->arguments = arg_list;
- }
-
- Py_INCREF( Py_None );
- return Py_None;
-}
-
-
-/*
- * Accepts four arguments:
- * - an action name
- * - an action body
- * - a list of variable that will be bound inside the action
- * - integer flags.
- * Defines an action on bjam side.
- */
-
-PyObject * bjam_define_action( PyObject * self, PyObject * args )
-{
- char * name;
- char * body;
- module_t * m;
- PyObject * bindlist_python;
- int flags;
- LIST * bindlist = L0;
- int n;
- int i;
-
- if ( !PyArg_ParseTuple( args, "ssO!i:define_action", &name, &body,
- &PyList_Type, &bindlist_python, &flags ) )
- return NULL;
-
- n = PyList_Size( bindlist_python );
- for ( i = 0; i < n; ++i )
- {
- PyObject * next = PyList_GetItem( bindlist_python, i );
- if ( !PyString_Check( next ) )
- {
- PyErr_SetString( PyExc_RuntimeError,
- "bind list has non-string type" );
- return NULL;
- }
- bindlist = list_new( bindlist, PyString_AsString( next ) );
- }
-
- new_rule_actions( root_module(), name, newstr( body ), bindlist, flags );
-
- Py_INCREF( Py_None );
- return Py_None;
-}
-
-
-/*
- * Returns the value of a variable in root Jam module.
- */
-
-PyObject * bjam_variable( PyObject * self, PyObject * args )
-{
- char * name;
- LIST * value;
- PyObject * result;
- int i;
-
- if ( !PyArg_ParseTuple( args, "s", &name ) )
- return NULL;
-
- enter_module( root_module() );
- value = var_get( name );
- exit_module( root_module() );
-
- result = PyList_New( list_length( value ) );
- for ( i = 0; value; value = list_next( value ), ++i )
- PyList_SetItem( result, i, PyString_FromString( value->string ) );
-
- return result;
-}
-
-
-PyObject * bjam_backtrace( PyObject * self, PyObject * args )
-{
- PyObject * result = PyList_New( 0 );
- struct frame * f = frame_before_python_call;
-
- for ( ; f = f->prev; )
- {
- PyObject * tuple = PyTuple_New( 4 );
- char * file;
- int line;
- char buf[ 32 ];
-
- get_source_line( f->procedure, &file, &line );
- sprintf( buf, "%d", line );
-
- /* PyTuple_SetItem steals reference. */
- PyTuple_SetItem( tuple, 0, PyString_FromString( file ) );
- PyTuple_SetItem( tuple, 1, PyString_FromString( buf ) );
- PyTuple_SetItem( tuple, 2, PyString_FromString( f->module->name ) );
- PyTuple_SetItem( tuple, 3, PyString_FromString( f->rulename ) );
-
- PyList_Append( result, tuple );
- Py_DECREF( tuple );
- }
- return result;
-}
-
-PyObject * bjam_caller( PyObject * self, PyObject * args )
-{
- PyObject *result = PyString_FromString(
- frame_before_python_call->prev->module->name);
- return result;
-}
-
-#endif /* #ifdef HAVE_PYTHON */
-
-
-#ifdef HAVE_POPEN
-
-#if defined(_MSC_VER) || defined(__BORLANDC__)
- #define popen windows_popen_wrapper
- #define pclose _pclose
-
- /*
- * This wrapper is a workaround for a funny _popen() feature on Windows
- * where it eats external quotes in some cases. The bug seems to be related
- * to the quote stripping functionality used by the Windows cmd.exe
- * interpreter when its /S is not specified.
- *
- * Cleaned up quote from the cmd.exe help screen as displayed on Windows XP
- * SP3:
- *
- * 1. If all of the following conditions are met, then quote characters on
- * the command line are preserved:
- *
- * - no /S switch
- * - exactly two quote characters
- * - no special characters between the two quote characters, where
- * special is one of: &<>()@^|
- * - there are one or more whitespace characters between the two quote
- * characters
- * - the string between the two quote characters is the name of an
- * executable file.
- *
- * 2. Otherwise, old behavior is to see if the first character is a quote
- * character and if so, strip the leading character and remove the last
- * quote character on the command line, preserving any text after the
- * last quote character.
- *
- * This causes some commands containing quotes not to be executed correctly.
- * For example:
- *
- * "\Long folder name\aaa.exe" --name="Jurko" --no-surname
- *
- * would get its outermost quotes stripped and would be executed as:
- *
- * \Long folder name\aaa.exe" --name="Jurko --no-surname
- *
- * which would report an error about '\Long' not being a valid command.
- *
- * cmd.exe help seems to indicate it would be enough to add an extra space
- * character in front of the command to avoid this but this does not work,
- * most likely due to the shell first stripping all leading whitespace
- * characters from the command.
- *
- * Solution implemented here is to quote the whole command in case it
- * contains any quote characters. Note thought this will not work correctly
- * should Windows ever 'fix' this feature.
- * (03.06.2008.) (Jurko)
- */
- static FILE * windows_popen_wrapper( char * command, char * mode )
- {
- int extra_command_quotes_needed = ( strchr( command, '"' ) != 0 );
- string quoted_command;
- FILE * result;
-
- if ( extra_command_quotes_needed )
- {
- string_new( &quoted_command );
- string_append( &quoted_command, "\"" );
- string_append( &quoted_command, command );
- string_append( &quoted_command, "\"" );
- command = quoted_command.value;
- }
-
- result = _popen( command, "r" );
-
- if ( extra_command_quotes_needed )
- string_free( &quoted_command );
-
- return result;
- }
-#endif
-
-
-static char * rtrim(char *s)
-{
- char *p = s;
- while(*p) ++p;
- for(--p; p >= s && isspace(*p); *p-- = 0);
- return s;
-}
-
-LIST * builtin_shell( PARSE * parse, FRAME * frame )
-{
- LIST * command = lol_get( frame->args, 0 );
- LIST * result = 0;
- string s;
- int ret;
- char buffer[ 1024 ];
- FILE * p = NULL;
- int exit_status = -1;
- int exit_status_opt = 0;
- int no_output_opt = 0;
- int strip_eol_opt = 0;
-
- /* Process the variable args options. */
- {
- int a = 1;
- LIST * arg = lol_get( frame->args, a );
- while ( arg )
- {
- if ( strcmp( "exit-status", arg->string ) == 0 )
- {
- exit_status_opt = 1;
- }
- else if ( strcmp( "no-output", arg->string ) == 0 )
- {
- no_output_opt = 1;
- }
- else if ( strcmp("strip-eol", arg->string) == 0 )
- {
- strip_eol_opt = 1;
- }
- arg = lol_get( frame->args, ++a );
- }
- }
-
- /* The following fflush() call seems to be indicated as a workaround for a
- * popen() bug on POSIX implementations related to synhronizing input
- * stream positions for the called and the calling process.
- */
- fflush( NULL );
-
- p = popen( command->string, "r" );
- if ( p == NULL )
- return L0;
-
- string_new( &s );
-
- while ( ( ret = fread( buffer, sizeof( char ), sizeof( buffer ) - 1, p ) ) > 0 )
- {
- buffer[ret] = 0;
- if ( !no_output_opt )
- {
- if ( strip_eol_opt )
- rtrim(buffer);
- string_append( &s, buffer );
- }
- }
-
- exit_status = pclose( p );
-
- /* The command output is returned first. */
- result = list_new( L0, newstr( s.value ) );
- string_free( &s );
-
- /* The command exit result next. */
- if ( exit_status_opt )
- {
- if ( WIFEXITED(exit_status) )
- exit_status = WEXITSTATUS(exit_status);
- else
- exit_status = -1;
- sprintf( buffer, "%d", exit_status );
- result = list_new( result, newstr( buffer ) );
- }
-
- return result;
-}
-
-#else /* #ifdef HAVE_POPEN */
-
-LIST * builtin_shell( PARSE * parse, FRAME * frame )
-{
- return L0;
-}
-
-#endif /* #ifdef HAVE_POPEN */
diff --git a/jam-files/engine/builtins.h b/jam-files/engine/builtins.h
deleted file mode 100644
index 5fed07c9..00000000
--- a/jam-files/engine/builtins.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#ifndef JAM_BUILTINS_H
-# define JAM_BUILTINS_H
-
-# include "frames.h"
-
-/*
- * builtins.h - compile parsed jam statements
- */
-
-void load_builtins();
-void init_set();
-void init_path();
-void init_regex();
-void init_property_set();
-void init_sequence();
-void init_order();
-
-LIST *builtin_calc( PARSE *parse, FRAME *args );
-LIST *builtin_depends( PARSE *parse, FRAME *args );
-LIST *builtin_rebuilds( PARSE *parse, FRAME *args );
-LIST *builtin_echo( PARSE *parse, FRAME *args );
-LIST *builtin_exit( PARSE *parse, FRAME *args );
-LIST *builtin_flags( PARSE *parse, FRAME *args );
-LIST *builtin_glob( PARSE *parse, FRAME *args );
-LIST *builtin_glob_recursive( PARSE *parse, FRAME *frame );
-LIST *builtin_subst( PARSE *parse, FRAME *args );
-LIST *builtin_match( PARSE *parse, FRAME *args );
-LIST *builtin_split_by_characters( PARSE *parse, FRAME *args );
-LIST *builtin_hdrmacro( PARSE *parse, FRAME *args );
-LIST *builtin_rulenames( PARSE *parse, FRAME *args );
-LIST *builtin_varnames( PARSE *parse, FRAME *args );
-LIST *builtin_delete_module( PARSE *parse, FRAME *args );
-LIST *builtin_import( PARSE *parse, FRAME *args );
-LIST *builtin_export( PARSE *parse, FRAME *args );
-LIST *builtin_caller_module( PARSE *parse, FRAME *args );
-LIST *builtin_backtrace( PARSE *parse, FRAME *args );
-LIST *builtin_pwd( PARSE *parse, FRAME *args );
-LIST *builtin_update( PARSE *parse, FRAME *args );
-LIST *builtin_update_now( PARSE *parse, FRAME *args );
-LIST *builtin_search_for_target( PARSE *parse, FRAME *args );
-LIST *builtin_import_module( PARSE *parse, FRAME *args );
-LIST *builtin_imported_modules( PARSE *parse, FRAME *frame );
-LIST *builtin_instance( PARSE *parse, FRAME *frame );
-LIST *builtin_sort( PARSE *parse, FRAME *frame );
-LIST *builtin_normalize_path( PARSE *parse, FRAME *frame );
-LIST *builtin_native_rule( PARSE *parse, FRAME *frame );
-LIST *builtin_has_native_rule( PARSE *parse, FRAME *frame );
-LIST *builtin_user_module( PARSE *parse, FRAME *frame );
-LIST *builtin_nearest_user_location( PARSE *parse, FRAME *frame );
-LIST *builtin_check_if_file( PARSE *parse, FRAME *frame );
-LIST *builtin_python_import_rule( PARSE *parse, FRAME *frame );
-LIST *builtin_shell( PARSE *parse, FRAME *frame );
-LIST *builtin_md5( PARSE *parse, FRAME *frame );
-LIST *builtin_file_open( PARSE *parse, FRAME *frame );
-LIST *builtin_pad( PARSE *parse, FRAME *frame );
-LIST *builtin_precious( PARSE *parse, FRAME *frame );
-LIST *builtin_self_path( PARSE *parse, FRAME *frame );
-LIST *builtin_makedir( PARSE *parse, FRAME *frame );
-
-void backtrace( FRAME *frame );
-extern int last_update_now_status;
-
-#endif
diff --git a/jam-files/engine/bump_version.py b/jam-files/engine/bump_version.py
deleted file mode 100644
index 9423c4c7..00000000
--- a/jam-files/engine/bump_version.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python
-
-# This script is used to bump version of bjam. It takes a single argument, e.g
-#
-# ./bump_version.py 3.1.9
-#
-# and updates all necessary files. For the time being, it's assumes presense
-# of 'perl' executable and Debian-specific 'dch' executable.
-#
-
-
-import os
-import os.path
-import re
-import string
-import sys
-
-srcdir = os.path.abspath(os.path.dirname(__file__ ))
-docdir = os.path.abspath(os.path.join(srcdir,"..","doc"))
-
-def edit(file,replacements):
- print " '%s'..." %(file)
- text = open(file,'r').read()
- while len(replacements) > 0:
- #~ print " '%s' ==> '%s'" % (replacements[0],replacements[1])
- text = re.compile(replacements[0],re.M).subn(replacements[1],text)[0]
- replacements = replacements[2:]
- #~ print text
- open(file,'w').write(text)
-
-def make_edits(version):
- edit(os.path.join(srcdir,"boost-jam.spec"), [
- '^Version:.*$','Version: %s' % string.join(version, "."),
- ])
-
- edit(os.path.join(srcdir,"build.jam"), [
- '^_VERSION_ = .* ;$','_VERSION_ = %s %s %s ;' % (version[0], version[1], version[2]),
- ])
-
- edit(os.path.join(docdir,"bjam.qbk"), [
- '\[version.*\]','[version: %s]' % string.join(version, '.'),
- '\[def :version:.*\]','[def :version: %s]' % string.join(version, '.'),
- ])
-
- edit(os.path.join(srcdir,"patchlevel.h"), [
- '^#define VERSION_MAJOR .*$',
- '#define VERSION_MAJOR %s' % (version[0]),
- '^#define VERSION_MINOR .*$',
- '#define VERSION_MINOR %s' % (version[1]),
- '^#define VERSION_PATCH .*$',
- '#define VERSION_PATCH %s' % (version[2]),
- '^#define VERSION_MAJOR_SYM .*$',
- '#define VERSION_MAJOR_SYM "0%s"' % (version[0]),
- '^#define VERSION_MINOR_SYM .*$',
- '#define VERSION_MINOR_SYM "%s"' % (version[1]),
- '^#define VERSION_PATCH_SYM .*$',
- '#define VERSION_PATCH_SYM "%s"' % (version[2]),
- '^#define VERSION .*$',
- '#define VERSION "%s"' % string.join(version, '.'),
- '^#define JAMVERSYM .*$',
- '#define JAMVERSYM "JAMVERSION=%s.%s"' % (version[0],version[1]),
- ])
-
-def main():
-
- if len(sys.argv) < 2:
- print "Expect new version as argument"
- sys.exit(1)
-
- version = string.split(sys.argv[1], ".")
- print "Setting version to", version
- make_edits(version)
-
-if __name__ == '__main__':
- main()
-
-#~ Copyright 2006 Rene Rivera.
-#~ Copyright 2005-2006 Vladimir Prus.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
diff --git a/jam-files/engine/class.c b/jam-files/engine/class.c
deleted file mode 100644
index ff4ec568..00000000
--- a/jam-files/engine/class.c
+++ /dev/null
@@ -1,141 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "class.h"
-#include "strings.h"
-#include "variable.h"
-#include "frames.h"
-#include "rules.h"
-#include "newstr.h"
-
-#include "hash.h"
-
-
-static struct hash * classes = 0;
-
-
-static void check_defined( LIST * class_names )
-{
- for ( ; class_names; class_names = class_names->next )
- {
- char * * p = &class_names->string;
- if ( !hashcheck( classes, (HASHDATA * *)&p ) )
- {
- printf( "Class %s is not defined\n", class_names->string );
- abort();
- }
- }
-}
-
-
-static char * class_module_name( char * declared_name )
-{
- string name[ 1 ];
- char * result;
-
- string_new( name );
- string_append( name, "class@" );
- string_append( name, declared_name );
-
- result = newstr( name->value );
- string_free( name );
-
- return result;
-}
-
-
-struct import_base_data
-{
- char * base_name;
- module_t * base_module;
- module_t * class_module;
-};
-
-
-static void import_base_rule( void * r_, void * d_ )
-{
- RULE * r = (RULE *)r_;
- RULE * ir1;
- RULE * ir2;
- struct import_base_data * d = (struct import_base_data *)d_;
- string qualified_name[ 1 ];
-
- string_new ( qualified_name );
- string_append ( qualified_name, d->base_name );
- string_push_back( qualified_name, '.' );
- string_append ( qualified_name, r->name );
-
- ir1 = import_rule( r, d->class_module, r->name );
- ir2 = import_rule( r, d->class_module, qualified_name->value );
-
- /* Copy 'exported' flag. */
- ir1->exported = ir2->exported = r->exported;
-
- /* If we are importing a class method, localize it. */
- if ( ( r->module == d->base_module ) || ( r->module->class_module &&
- ( r->module->class_module == d->base_module ) ) )
- ir1->module = ir2->module = d->class_module;
-
- string_free( qualified_name );
-}
-
-
-/*
- * For each exported rule 'n', declared in class module for base, imports that
- * rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and
- * marked as exported.
- */
-
-static void import_base_rules( module_t * class, char * base )
-{
- module_t * base_module = bindmodule( class_module_name( base ) );
- struct import_base_data d;
- d.base_name = base;
- d.base_module = base_module;
- d.class_module = class;
-
- if ( base_module->rules )
- hashenumerate( base_module->rules, import_base_rule, &d );
-
- import_module( imported_modules( base_module ), class );
-}
-
-
-char * make_class_module( LIST * xname, LIST * bases, FRAME * frame )
-{
- char * name = class_module_name( xname->string );
- char * * pp = &xname->string;
- module_t * class_module = 0;
- module_t * outer_module = frame->module;
-
- if ( !classes )
- classes = hashinit( sizeof( char * ), "classes" );
-
- if ( hashcheck( classes, (HASHDATA * *)&pp ) )
- {
- printf( "Class %s already defined\n", xname->string );
- abort();
- }
- else
- {
- hashenter( classes, (HASHDATA * *)&pp );
- }
- check_defined( bases );
-
- class_module = bindmodule( name );
-
- exit_module( outer_module );
- enter_module( class_module );
-
- var_set( "__name__", xname, VAR_SET );
- var_set( "__bases__", bases, VAR_SET );
-
- exit_module( class_module );
- enter_module( outer_module );
-
- for ( ; bases; bases = bases->next )
- import_base_rules( class_module, bases->string );
-
- return name;
-}
diff --git a/jam-files/engine/class.h b/jam-files/engine/class.h
deleted file mode 100644
index f7faeff6..00000000
--- a/jam-files/engine/class.h
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#ifndef CLASS_H_VP_2003_08_01
-#define CLASS_H_VP_2003_08_01
-
-#include "lists.h"
-#include "frames.h"
-
-char* make_class_module(LIST* xname, LIST* bases, FRAME* frame);
-
-#endif
diff --git a/jam-files/engine/command.c b/jam-files/engine/command.c
deleted file mode 100644
index d2ea0681..00000000
--- a/jam-files/engine/command.c
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * command.c - maintain lists of commands
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "rules.h"
-
-#include "command.h"
-#include <limits.h>
-#include <string.h>
-
-
-/*
- * cmd_new() - return a new CMD or 0 if too many args
- */
-
-CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell )
-{
- CMD * cmd = (CMD *)BJAM_MALLOC( sizeof( CMD ) );
- /* Lift line-length limitation entirely when JAMSHELL is just "%". */
- int no_limit = ( shell && !strcmp(shell->string,"%") && !list_next(shell) );
- int max_line = MAXLINE;
- int allocated = -1;
-
- cmd->rule = rule;
- cmd->shell = shell;
- cmd->next = 0;
-
- lol_init( &cmd->args );
- lol_add( &cmd->args, targets );
- lol_add( &cmd->args, sources );
- cmd->buf = 0;
-
- do
- {
- BJAM_FREE( cmd->buf ); /* free any buffer from previous iteration */
-
- cmd->buf = (char*)BJAM_MALLOC_ATOMIC( max_line + 1 );
-
- if ( cmd->buf == 0 )
- break;
-
- allocated = var_string( rule->actions->command, cmd->buf, max_line, &cmd->args );
-
- max_line = max_line * 2;
- }
- while ( ( allocated < 0 ) && ( max_line < INT_MAX / 2 ) );
-
- if ( !no_limit )
- {
- /* Bail if the result will not fit in MAXLINE. */
- char * s = cmd->buf;
- while ( *s )
- {
- size_t l = strcspn( s, "\n" );
-
- if ( l > MAXLINE )
- {
- /* We do not free targets/sources/shell if bailing. */
- cmd_free( cmd );
- return 0;
- }
-
- s += l;
- if ( *s )
- ++s;
- }
- }
-
- return cmd;
-}
-
-
-/*
- * cmd_free() - free a CMD
- */
-
-void cmd_free( CMD * cmd )
-{
- lol_free( &cmd->args );
- list_free( cmd->shell );
- BJAM_FREE( cmd->buf );
- BJAM_FREE( (char *)cmd );
-}
diff --git a/jam-files/engine/command.h b/jam-files/engine/command.h
deleted file mode 100644
index ddd38e68..00000000
--- a/jam-files/engine/command.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 1994 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * command.h - the CMD structure and routines to manipulate them
- *
- * Both ACTION and CMD contain a rule, targets, and sources. An
- * ACTION describes a rule to be applied to the given targets and
- * sources; a CMD is what actually gets executed by the shell. The
- * differences are due to:
- *
- * ACTIONS must be combined if 'actions together' is given.
- * ACTIONS must be split if 'actions piecemeal' is given.
- * ACTIONS must have current sources omitted for 'actions updated'.
- *
- * The CMD datatype holds a single command that is to be executed
- * against a target, and they can chain together to represent the
- * full collection of commands used to update a target.
- *
- * Structures:
- *
- * CMD - an action, ready to be formatted into a buffer and executed.
- *
- * External routines:
- *
- * cmd_new() - return a new CMD or 0 if too many args.
- * cmd_free() - delete CMD and its parts.
- * cmd_next() - walk the CMD chain.
- */
-
-
-/*
- * CMD - an action, ready to be formatted into a buffer and executed.
- */
-
-typedef struct _cmd CMD;
-
-struct _cmd
-{
- CMD * next;
- CMD * tail; /* valid on in head */
- RULE * rule; /* rule->actions contains shell script */
- LIST * shell; /* $(SHELL) value */
- LOL args; /* LISTs for $(<), $(>) */
- char * buf; /* actual commands */
-};
-
-CMD * cmd_new
-(
- RULE * rule, /* rule (referenced) */
- LIST * targets, /* $(<) (freed) */
- LIST * sources, /* $(>) (freed) */
- LIST * shell /* $(SHELL) (freed) */
-);
-
-void cmd_free( CMD * );
-
-#define cmd_next( c ) ( ( c )->next )
diff --git a/jam-files/engine/compile.c b/jam-files/engine/compile.c
deleted file mode 100644
index 2c049aae..00000000
--- a/jam-files/engine/compile.c
+++ /dev/null
@@ -1,1424 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-
-# include "lists.h"
-# include "parse.h"
-# include "compile.h"
-# include "variable.h"
-# include "expand.h"
-# include "rules.h"
-# include "newstr.h"
-# include "make.h"
-# include "search.h"
-# include "hdrmacro.h"
-# include "hash.h"
-# include "modules.h"
-# include "strings.h"
-# include "builtins.h"
-# include "class.h"
-
-# include <assert.h>
-# include <string.h>
-# include <stdarg.h>
-
-/*
- * compile.c - compile parsed jam statements
- *
- * External routines:
- *
- * compile_append() - append list results of two statements
- * compile_eval() - evaluate if to determine which leg to compile
- * compile_foreach() - compile the "for x in y" statement
- * compile_if() - compile 'if' rule
- * compile_while() - compile 'while' rule
- * compile_include() - support for 'include' - call include() on file
- * compile_list() - expand and return a list
- * compile_local() - declare (and set) local variables
- * compile_null() - do nothing -- a stub for parsing
- * compile_on() - run rule under influence of on-target variables
- * compile_rule() - compile a single user defined rule
- * compile_rules() - compile a chain of rules
- * compile_set() - compile the "set variable" statement
- * compile_setcomp() - support for `rule` - save parse tree
- * compile_setexec() - support for `actions` - save execution string
- * compile_settings() - compile the "on =" (set variable on exec) statement
- * compile_switch() - compile 'switch' rule
- *
- * Internal routines:
- *
- * debug_compile() - printf with indent to show rule expansion.
- * evaluate_rule() - execute a rule invocation
- *
- * builtin_depends() - DEPENDS/INCLUDES rule
- * builtin_echo() - ECHO rule
- * builtin_exit() - EXIT rule
- * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
- *
- * 02/03/94 (seiwald) - Changed trace output to read "setting" instead of
- * the awkward sounding "settings".
- * 04/12/94 (seiwald) - Combined build_depends() with build_includes().
- * 04/12/94 (seiwald) - actionlist() now just appends a single action.
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 05/13/94 (seiwald) - include files are now bound as targets, and thus
- * can make use of $(SEARCH)
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 01/22/95 (seiwald) - Exit rule.
- * 02/02/95 (seiwald) - Always rule; LEAVES rule.
- * 02/14/95 (seiwald) - NoUpdate rule.
- * 09/11/00 (seiwald) - new evaluate_rule() for headers().
- * 09/11/00 (seiwald) - compile_xxx() now return LIST *.
- * New compile_append() and compile_list() in
- * support of building lists here, rather than
- * in jamgram.yy.
- * 01/10/00 (seiwald) - built-ins split out to builtin.c.
- */
-
-static void debug_compile( int which, char *s, FRAME* frame );
-int glob( char *s, char *c );
-/* Internal functions from builtins.c */
-void backtrace( FRAME *frame );
-void backtrace_line( FRAME *frame );
-void print_source_line( PARSE* p );
-
-struct frame * frame_before_python_call;
-
-void frame_init( FRAME* frame )
-{
- frame->prev = 0;
- frame->prev_user = 0;
- lol_init(frame->args);
- frame->module = root_module();
- frame->rulename = "module scope";
- frame->procedure = 0;
-}
-
-
-void frame_free( FRAME* frame )
-{
- lol_free( frame->args );
-}
-
-
-/*
- * compile_append() - append list results of two statements
- *
- * parse->left more compile_append() by left-recursion
- * parse->right single rule
- */
-
-LIST * compile_append( PARSE * parse, FRAME * frame )
-{
- /* Append right to left. */
- return list_append(
- parse_evaluate( parse->left, frame ),
- parse_evaluate( parse->right, frame ) );
-}
-
-
-/*
- * compile_eval() - evaluate if to determine which leg to compile
- *
- * Returns:
- * list if expression true - compile 'then' clause
- * L0 if expression false - compile 'else' clause
- */
-
-static int lcmp( LIST * t, LIST * s )
-{
- int status = 0;
-
- while ( !status && ( t || s ) )
- {
- char *st = t ? t->string : "";
- char *ss = s ? s->string : "";
-
- status = strcmp( st, ss );
-
- t = t ? list_next( t ) : t;
- s = s ? list_next( s ) : s;
- }
-
- return status;
-}
-
-LIST * compile_eval( PARSE * parse, FRAME * frame )
-{
- LIST * ll;
- LIST * lr;
- LIST * s;
- LIST * t;
- int status = 0;
-
- /* Short circuit lr eval for &&, ||, and 'in'. */
-
- ll = parse_evaluate( parse->left, frame );
- lr = 0;
-
- switch ( parse->num )
- {
- case EXPR_AND:
- case EXPR_IN : if ( ll ) goto eval; break;
- case EXPR_OR : if ( !ll ) goto eval; break;
- default: eval: lr = parse_evaluate( parse->right, frame );
- }
-
- /* Now eval. */
- switch ( parse->num )
- {
- case EXPR_NOT: if ( !ll ) status = 1; break;
- case EXPR_AND: if ( ll && lr ) status = 1; break;
- case EXPR_OR : if ( ll || lr ) status = 1; break;
-
- case EXPR_IN:
- /* "a in b": make sure each of ll is equal to something in lr. */
- for ( t = ll; t; t = list_next( t ) )
- {
- for ( s = lr; s; s = list_next( s ) )
- if ( !strcmp( t->string, s->string ) )
- break;
- if ( !s ) break;
- }
- /* No more ll? Success. */
- if ( !t ) status = 1;
- break;
-
- case EXPR_EXISTS: if ( lcmp( ll, L0 ) != 0 ) status = 1; break;
- case EXPR_EQUALS: if ( lcmp( ll, lr ) == 0 ) status = 1; break;
- case EXPR_NOTEQ : if ( lcmp( ll, lr ) != 0 ) status = 1; break;
- case EXPR_LESS : if ( lcmp( ll, lr ) < 0 ) status = 1; break;
- case EXPR_LESSEQ: if ( lcmp( ll, lr ) <= 0 ) status = 1; break;
- case EXPR_MORE : if ( lcmp( ll, lr ) > 0 ) status = 1; break;
- case EXPR_MOREEQ: if ( lcmp( ll, lr ) >= 0 ) status = 1; break;
- }
-
- if ( DEBUG_IF )
- {
- debug_compile( 0, "if", frame );
- list_print( ll );
- printf( "(%d) ", status );
- list_print( lr );
- printf( "\n" );
- }
-
- /* Find something to return. */
- /* In odd circumstances (like "" = "") */
- /* we'll have to return a new string. */
-
- if ( !status ) t = 0;
- else if ( ll ) t = ll, ll = 0;
- else if ( lr ) t = lr, lr = 0;
- else t = list_new( L0, newstr( "1" ) );
-
- if ( ll ) list_free( ll );
- if ( lr ) list_free( lr );
- return t;
-}
-
-
-/*
- * compile_foreach() - compile the "for x in y" statement
- *
- * Compile_foreach() resets the given variable name to each specified
- * value, executing the commands enclosed in braces for each iteration.
- *
- * parse->string index variable
- * parse->left variable values
- * parse->right rule to compile
- */
-
-LIST * compile_foreach( PARSE * parse, FRAME * frame )
-{
- LIST * nv = parse_evaluate( parse->left, frame );
- LIST * l;
- SETTINGS * s = 0;
-
- if ( parse->num )
- {
- s = addsettings( s, VAR_SET, parse->string, L0 );
- pushsettings( s );
- }
-
- /* Call var_set to reset $(parse->string) for each val. */
-
- for ( l = nv; l; l = list_next( l ) )
- {
- LIST * val = list_new( L0, copystr( l->string ) );
- var_set( parse->string, val, VAR_SET );
- list_free( parse_evaluate( parse->right, frame ) );
- }
-
- if ( parse->num )
- {
- popsettings( s );
- freesettings( s );
- }
-
- list_free( nv );
-
- return L0;
-}
-
-/*
- * compile_if() - compile 'if' rule
- *
- * parse->left condition tree
- * parse->right then tree
- * parse->third else tree
- */
-
-LIST * compile_if( PARSE * p, FRAME * frame )
-{
- LIST * l = parse_evaluate( p->left, frame );
- if ( l )
- {
- list_free( l );
- return parse_evaluate( p->right, frame );
- }
- return parse_evaluate( p->third, frame );
-}
-
-
-LIST * compile_while( PARSE * p, FRAME * frame )
-{
- LIST * r = 0;
- LIST * l;
- while ( ( l = parse_evaluate( p->left, frame ) ) )
- {
- list_free( l );
- if ( r ) list_free( r );
- r = parse_evaluate( p->right, frame );
- }
- return r;
-}
-
-
-/*
- * compile_include() - support for 'include' - call include() on file
- *
- * parse->left list of files to include (can only do 1)
- */
-
-LIST * compile_include( PARSE * parse, FRAME * frame )
-{
- LIST * nt = parse_evaluate( parse->left, frame );
-
- if ( DEBUG_COMPILE )
- {
- debug_compile( 0, "include", frame);
- list_print( nt );
- printf( "\n" );
- }
-
- if ( nt )
- {
- TARGET * t = bindtarget( nt->string );
-
- /* DWA 2001/10/22 - Perforce Jam cleared the arguments here, which
- * prevents an included file from being treated as part of the body of a
- * rule. I did not see any reason to do that, so I lifted the
- * restriction.
- */
-
- /* Bind the include file under the influence of */
- /* "on-target" variables. Though they are targets, */
- /* include files are not built with make(). */
-
- pushsettings( t->settings );
- /* We don't expect that file to be included is generated by some
- action. Therefore, pass 0 as third argument.
- If the name resolves to directory, let it error out. */
- t->boundname = search( t->name, &t->time, 0, 0 );
- popsettings( t->settings );
-
- parse_file( t->boundname, frame );
- }
-
- list_free( nt );
-
- return L0;
-}
-
-static LIST* evaluate_in_module ( char* module_name, PARSE * p, FRAME* frame)
-{
- LIST* result;
-
- module_t* outer_module = frame->module;
- frame->module = module_name ? bindmodule( module_name ) : root_module();
-
- if ( outer_module != frame->module )
- {
- exit_module( outer_module );
- enter_module( frame->module );
- }
-
- result = parse_evaluate( p, frame );
-
- if ( outer_module != frame->module )
- {
- exit_module( frame->module );
- enter_module( outer_module );
- frame->module = outer_module;
- }
-
- return result;
-}
-
-
-LIST * compile_module( PARSE * p, FRAME * frame )
-{
- /* Here we are entering a module declaration block. */
- LIST * module_name = parse_evaluate( p->left, frame );
- LIST * result = evaluate_in_module( module_name ? module_name->string : 0,
- p->right, frame );
- list_free( module_name );
- return result;
-}
-
-
-LIST * compile_class( PARSE * p, FRAME * frame )
-{
- /** Todo: check for empty class name.
- Check for class redeclaration. */
-
- char * class_module = 0;
-
- LIST * name = parse_evaluate( p->left->right, frame );
- LIST * bases = 0;
-
- if ( p->left->left )
- bases = parse_evaluate( p->left->left->right, frame );
-
- class_module = make_class_module( name, bases, frame );
- evaluate_in_module( class_module, p->right, frame );
-
- return L0;
-}
-
-
-/*
- * compile_list() - expand and return a list.
- *
- * parse->string - character string to expand.
- */
-
-LIST * compile_list( PARSE * parse, FRAME * frame )
-{
- /* s is a copyable string */
- char * s = parse->string;
- return var_expand( L0, s, s + strlen( s ), frame->args, 1 );
-}
-
-
-/*
- * compile_local() - declare (and set) local variables.
- *
- * parse->left list of variables
- * parse->right list of values
- * parse->third rules to execute
- */
-
-LIST * compile_local( PARSE * parse, FRAME * frame )
-{
- LIST * l;
- SETTINGS * s = 0;
- LIST * nt = parse_evaluate( parse->left, frame );
- LIST * ns = parse_evaluate( parse->right, frame );
- LIST * result;
-
- if ( DEBUG_COMPILE )
- {
- debug_compile( 0, "local", frame );
- list_print( nt );
- printf( " = " );
- list_print( ns );
- printf( "\n" );
- }
-
- /* Initial value is ns. */
- for ( l = nt; l; l = list_next( l ) )
- s = addsettings( s, VAR_SET, l->string, list_copy( (LIST *)0, ns ) );
-
- list_free( ns );
- list_free( nt );
-
- /* Note that callees of the current context get this "local" variable,
- * making it not so much local as layered.
- */
-
- pushsettings( s );
- result = parse_evaluate( parse->third, frame );
- popsettings( s );
-
- freesettings( s );
-
- return result;
-}
-
-
-/*
- * compile_null() - do nothing -- a stub for parsing.
- */
-
-LIST * compile_null( PARSE * parse, FRAME * frame )
-{
- return L0;
-}
-
-
-/*
- * compile_on() - run rule under influence of on-target variables
- *
- * parse->left list of files to include (can only do 1).
- * parse->right rule to run.
- *
- * EXPERIMENTAL!
- */
-
-LIST * compile_on( PARSE * parse, FRAME * frame )
-{
- LIST * nt = parse_evaluate( parse->left, frame );
- LIST * result = 0;
-
- if ( DEBUG_COMPILE )
- {
- debug_compile( 0, "on", frame );
- list_print( nt );
- printf( "\n" );
- }
-
- if ( nt )
- {
- TARGET * t = bindtarget( nt->string );
- pushsettings( t->settings );
- result = parse_evaluate( parse->right, frame );
- popsettings( t->settings );
- }
-
- list_free( nt );
-
- return result;
-}
-
-
-/*
- * compile_rule() - compile a single user defined rule.
- *
- * parse->string name of user defined rule.
- * parse->left parameters (list of lists) to rule, recursing left.
- *
- * Wrapped around evaluate_rule() so that headers() can share it.
- */
-
-LIST * compile_rule( PARSE * parse, FRAME * frame )
-{
- FRAME inner[ 1 ];
- LIST * result;
- PARSE * p;
-
- /* Build up the list of arg lists. */
- frame_init( inner );
- inner->prev = frame;
- inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
- inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */
- inner->procedure = parse;
- /* Special-case LOL of length 1 where the first list is totally empty.
- This is created when calling functions with no parameters, due to
- the way jam grammar is written. This is OK when one jam function
- calls another, but really not good when Jam function calls Python. */
- if ( parse->left->left == NULL && parse->left->right->func == compile_null)
- ;
- else
- for ( p = parse->left; p; p = p->left )
- lol_add( inner->args, parse_evaluate( p->right, frame ) );
-
- /* And invoke the rule. */
- result = evaluate_rule( parse->string, inner );
- frame_free( inner );
- return result;
-}
-
-
-static void argument_error( char * message, RULE * rule, FRAME * frame, LIST* arg )
-{
- LOL * actual = frame->args;
- assert( frame->procedure != 0 );
- backtrace_line( frame->prev );
- printf( "*** argument error\n* rule %s ( ", frame->rulename );
- lol_print( rule->arguments->data );
- printf( " )\n* called with: ( " );
- lol_print( actual );
- printf( " )\n* %s %s\n", message, arg ? arg->string : "" );
- print_source_line( rule->procedure );
- printf( "see definition of rule '%s' being called\n", rule->name );
- backtrace( frame->prev );
- exit( 1 );
-}
-
-
-/* Define delimiters for type check elements in argument lists (and return type
- * specifications, eventually).
- */
-# define TYPE_OPEN_DELIM '['
-# define TYPE_CLOSE_DELIM ']'
-
-/*
- * is_type_name() - true iff the given string represents a type check
- * specification.
- */
-
-static int is_type_name( char * s )
-{
- return ( s[ 0 ] == TYPE_OPEN_DELIM ) &&
- ( s[ strlen( s ) - 1 ] == TYPE_CLOSE_DELIM );
-}
-
-
-/*
- * arg_modifier - if the next element of formal is a single character, return
- * that; return 0 otherwise. Used to extract "*+?" modifiers * from argument
- * lists.
- */
-
-static char arg_modifier( LIST * formal )
-{
- if ( formal->next )
- {
- char * next = formal->next->string;
- if ( next && ( next[ 0 ] != 0 ) && ( next[ 1 ] == 0 ) )
- return next[ 0 ];
- }
- return 0;
-}
-
-
-/*
- * type_check() - checks that each element of values satisfies the requirements
- * of type_name.
- *
- * caller - the frame of the rule calling the rule whose arguments are
- * being checked
- *
- * called - the rule being called
- *
- * arg_name - a list element containing the name of the argument being
- * checked
- */
-
-static void type_check
-(
- char * type_name,
- LIST * values,
- FRAME * caller,
- RULE * called,
- LIST * arg_name
-)
-{
- static module_t * typecheck = 0;
-
- /* If nothing to check, bail now. */
- if ( !values || !type_name )
- return;
-
- if ( !typecheck )
- typecheck = bindmodule( ".typecheck" );
-
- /* If the checking rule can not be found, also bail. */
- {
- RULE checker_, *checker = &checker_;
-
- checker->name = type_name;
- if ( !typecheck->rules || !hashcheck( typecheck->rules, (HASHDATA * *)&checker ) )
- return;
- }
-
- exit_module( caller->module );
-
- while ( values != 0 )
- {
- LIST *error;
- FRAME frame[1];
- frame_init( frame );
- frame->module = typecheck;
- frame->prev = caller;
- frame->prev_user = caller->module->user_module ? caller : caller->prev_user;
-
- enter_module( typecheck );
- /* Prepare the argument list */
- lol_add( frame->args, list_new( L0, values->string ) );
- error = evaluate_rule( type_name, frame );
-
- exit_module( typecheck );
-
- if ( error )
- argument_error( error->string, called, caller, arg_name );
-
- frame_free( frame );
- values = values->next;
- }
-
- enter_module( caller->module );
-}
-
-/*
- * collect_arguments() - local argument checking and collection
- */
-static SETTINGS *
-collect_arguments( RULE* rule, FRAME* frame )
-{
- SETTINGS *locals = 0;
-
- LOL * all_actual = frame->args;
- LOL * all_formal = rule->arguments ? rule->arguments->data : 0;
- if ( all_formal ) /* Nothing to set; nothing to check */
- {
- int max = all_formal->count > all_actual->count
- ? all_formal->count
- : all_actual->count;
-
- int n;
- for ( n = 0; n < max ; ++n )
- {
- LIST *actual = lol_get( all_actual, n );
- char *type_name = 0;
-
- LIST *formal;
- for ( formal = lol_get( all_formal, n ); formal; formal = formal->next )
- {
- char* name = formal->string;
-
- if ( is_type_name(name) )
- {
- if ( type_name )
- argument_error( "missing argument name before type name:", rule, frame, formal );
-
- if ( !formal->next )
- argument_error( "missing argument name after type name:", rule, frame, formal );
-
- type_name = formal->string;
- }
- else
- {
- LIST* value = 0;
- char modifier;
- LIST* arg_name = formal; /* hold the argument name for type checking */
- int multiple = 0;
-
- /* Stop now if a variable number of arguments are specified */
- if ( name[0] == '*' && name[1] == 0 )
- return locals;
-
- modifier = arg_modifier( formal );
-
- if ( !actual && modifier != '?' && modifier != '*' )
- argument_error( "missing argument", rule, frame, formal );
-
- switch ( modifier )
- {
- case '+':
- case '*':
- value = list_copy( 0, actual );
- multiple = 1;
- actual = 0;
- /* skip an extra element for the modifier */
- formal = formal->next;
- break;
- case '?':
- /* skip an extra element for the modifier */
- formal = formal->next;
- /* fall through */
- default:
- if ( actual ) /* in case actual is missing */
- {
- value = list_new( 0, actual->string );
- actual = actual->next;
- }
- }
-
- locals = addsettings(locals, VAR_SET, name, value);
- locals->multiple = multiple;
- type_check( type_name, value, frame, rule, arg_name );
- type_name = 0;
- }
- }
-
- if ( actual )
- {
- argument_error( "extra argument", rule, frame, actual );
- }
- }
- }
- return locals;
-}
-
-RULE *
-enter_rule( char *rulename, module_t *target_module );
-
-#ifdef HAVE_PYTHON
-
-static int python_instance_number = 0;
-
-
-/* Given a Python object, return a string to use in Jam
- code instead of said object.
- If the object is string, use the string value
- If the object implemenets __jam_repr__ method, use that.
- Otherwise return 0.
-
- The result value is newstr-ed. */
-char *python_to_string(PyObject* value)
-{
- if (PyString_Check(value))
- {
- return newstr(PyString_AsString(value));
- }
- else
- {
- /* See if this is an instance that defines special __jam_repr__
- method. */
- if (PyInstance_Check(value)
- && PyObject_HasAttrString(value, "__jam_repr__"))
- {
- PyObject* repr = PyObject_GetAttrString(value, "__jam_repr__");
- if (repr)
- {
- PyObject* arguments2 = PyTuple_New(0);
- PyObject* value2 = PyObject_Call(repr, arguments2, 0);
- Py_DECREF(repr);
- Py_DECREF(arguments2);
- if (PyString_Check(value2))
- {
- return newstr(PyString_AsString(value2));
- }
- Py_DECREF(value2);
- }
- }
- return 0;
- }
-}
-
-static LIST*
-call_python_function(RULE* r, FRAME* frame)
-{
- LIST * result = 0;
- PyObject * arguments = 0;
- PyObject * kw = NULL;
- int i ;
- PyObject * py_result;
-
- if (r->arguments)
- {
- SETTINGS * args;
-
- arguments = PyTuple_New(0);
- kw = PyDict_New();
-
- for (args = collect_arguments(r, frame); args; args = args->next)
- {
- PyObject *key = PyString_FromString(args->symbol);
- PyObject *value = 0;
- if (args->multiple)
- value = list_to_python(args->value);
- else {
- if (args->value)
- value = PyString_FromString(args->value->string);
- }
-
- if (value)
- PyDict_SetItem(kw, key, value);
- Py_DECREF(key);
- Py_XDECREF(value);
- }
- }
- else
- {
- arguments = PyTuple_New( frame->args->count );
- for ( i = 0; i < frame->args->count; ++i )
- {
- PyObject * arg = PyList_New(0);
- LIST* l = lol_get( frame->args, i);
-
- for ( ; l; l = l->next )
- {
- PyObject * v = PyString_FromString(l->string);
- PyList_Append( arg, v );
- Py_DECREF(v);
- }
- /* Steals reference to 'arg' */
- PyTuple_SetItem( arguments, i, arg );
- }
- }
-
- frame_before_python_call = frame;
- py_result = PyObject_Call( r->python_function, arguments, kw );
- Py_DECREF(arguments);
- Py_XDECREF(kw);
- if ( py_result != NULL )
- {
- if ( PyList_Check( py_result ) )
- {
- int size = PyList_Size( py_result );
- int i;
- for ( i = 0; i < size; ++i )
- {
- PyObject * item = PyList_GetItem( py_result, i );
- char *s = python_to_string (item);
- if (!s) {
- fprintf( stderr, "Non-string object returned by Python call.\n" );
- } else {
- result = list_new (result, s);
- }
- }
- }
- else if ( py_result == Py_None )
- {
- result = L0;
- }
- else
- {
- char *s = python_to_string(py_result);
- if (s)
- result = list_new(0, s);
- else
- /* We have tried all we could. Return empty list. There are
- cases, e.g. feature.feature function that should return
- value for the benefit of Python code and which also can be
- called by Jam code, where no sensible value can be
- returned. We cannot even emit a warning, since there will
- be a pile of them. */
- result = L0;
- }
-
- Py_DECREF( py_result );
- }
- else
- {
- PyErr_Print();
- fprintf(stderr,"Call failed\n");
- }
-
- return result;
-}
-
-
-module_t * python_module()
-{
- static module_t * python = 0;
- if ( !python )
- python = bindmodule("__python__");
- return python;
-}
-
-#endif
-
-
-/*
- * evaluate_rule() - execute a rule invocation.
- */
-
-LIST *
-evaluate_rule(
- char * rulename,
- FRAME * frame )
-{
- LIST * result = L0;
- RULE * rule;
- profile_frame prof[1];
- module_t * prev_module = frame->module;
-
- LIST * l;
- {
- LOL arg_context_, * arg_context = &arg_context_;
- if ( !frame->prev )
- lol_init(arg_context);
- else
- arg_context = frame->prev->args;
- l = var_expand( L0, rulename, rulename+strlen(rulename), arg_context, 0 );
- }
-
- if ( !l )
- {
- backtrace_line( frame->prev );
- printf( "warning: rulename %s expands to empty string\n", rulename );
- backtrace( frame->prev );
- return result;
- }
-
- rulename = l->string;
- rule = bindrule( l->string, frame->module );
-
-#ifdef HAVE_PYTHON
- if ( rule->python_function )
- {
- /* The below messing with modules is due to the way modules are
- * implemented in Jam. Suppose we are in module M1 now. The global
- * variable map actually holds 'M1' variables, and M1->variables hold
- * global variables.
- *
- * If we call Python right away, Python calls back Jam and then Jam
- * does 'module M1 { }' then Jam will try to swap the current global
- * variables with M1->variables. The result will be that global
- * variables map will hold global variables, and any variable settings
- * we do will go to the global module, not M1.
- *
- * By restoring basic state, where the global variable map holds global
- * variable, we make sure any future 'module M1' entry will work OK.
- */
-
- LIST * result;
- module_t * m = python_module();
-
- frame->module = m;
-
- exit_module( prev_module );
- enter_module( m );
-
- result = call_python_function( rule, frame );
-
- exit_module( m );
- enter_module ( prev_module );
-
- return result;
- }
-#endif
-
- /* Drop the rule name. */
- l = list_pop_front( l );
-
- /* Tack the rest of the expansion onto the front of the first argument. */
- frame->args->list[0] = list_append( l, lol_get( frame->args, 0 ) );
-
- if ( DEBUG_COMPILE )
- {
- /* Try hard to indicate in which module the rule is going to execute. */
- if ( rule->module != frame->module
- && rule->procedure != 0 && strcmp( rulename, rule->procedure->rulename ) )
- {
- char buf[256] = "";
- strncat( buf, rule->module->name, sizeof( buf ) - 1 );
- strncat( buf, rule->name, sizeof( buf ) - 1 );
- debug_compile( 1, buf, frame );
- }
- else
- {
- debug_compile( 1, rulename, frame );
- }
-
- lol_print( frame->args );
- printf( "\n" );
- }
-
- if ( rule->procedure && rule->module != prev_module )
- {
- /* Propagate current module to nested rule invocations. */
- frame->module = rule->module;
-
- /* Swap variables. */
- exit_module( prev_module );
- enter_module( rule->module );
- }
-
- /* Record current rule name in frame. */
- if ( rule->procedure )
- {
- frame->rulename = rulename;
- /* And enter record profile info. */
- if ( DEBUG_PROFILE )
- profile_enter( rule->procedure->rulename, prof );
- }
-
- /* Check traditional targets $(<) and sources $(>). */
- if ( !rule->actions && !rule->procedure )
- {
- backtrace_line( frame->prev );
- printf( "rule %s unknown in module %s\n", rule->name, frame->module->name );
- backtrace( frame->prev );
- exit( 1 );
- }
-
- /* If this rule will be executed for updating the targets then construct the
- * action for make().
- */
- if ( rule->actions )
- {
- TARGETS * t;
- ACTION * action;
-
- /* The action is associated with this instance of this rule. */
- action = (ACTION *)BJAM_MALLOC( sizeof( ACTION ) );
- memset( (char *)action, '\0', sizeof( *action ) );
-
- action->rule = rule;
- action->targets = targetlist( (TARGETS *)0, lol_get( frame->args, 0 ) );
- action->sources = targetlist( (TARGETS *)0, lol_get( frame->args, 1 ) );
-
- /* If we have a group of targets all being built using the same action
- * then we must not allow any of them to be used as sources unless they
- * had all already been built in the first place or their joined action
- * has had a chance to finish its work and build all of them anew.
- *
- * Without this it might be possible, in case of a multi-process build,
- * for their action, triggered by buiding one of the targets, to still
- * be running when another target in the group reports as done in order
- * to avoid triggering the same action again and gets used prematurely.
- *
- * As a quick-fix to achieve this effect we make all the targets list
- * each other as 'included targets'. More precisely, we mark the first
- * listed target as including all the other targets in the list and vice
- * versa. This makes anyone depending on any of those targets implicitly
- * depend on all of them, thus making sure none of those targets can be
- * used as sources until all of them have been built. Note that direct
- * dependencies could not have been used due to the 'circular
- * dependency' issue.
- *
- * TODO: Although the current implementation solves the problem of one
- * of the targets getting used before its action completes its work it
- * also forces the action to run whenever any of the targets in the
- * group is not up to date even though some of them might not actually
- * be used by the targets being built. We should see how we can
- * correctly recognize such cases and use that to avoid running the
- * action if possible and not rebuild targets not actually depending on
- * targets that are not up to date.
- *
- * TODO: Using the 'include' feature might have side-effects due to
- * interaction with the actual 'inclusion scanning' system. This should
- * be checked.
- */
- if ( action->targets )
- {
- TARGET * t0 = action->targets->target;
- for ( t = action->targets->next; t; t = t->next )
- {
- target_include( t->target, t0 );
- target_include( t0, t->target );
- }
- }
-
- /* Append this action to the actions of each target. */
- for ( t = action->targets; t; t = t->next )
- t->target->actions = actionlist( t->target->actions, action );
- }
-
- /* Now recursively compile any parse tree associated with this rule.
- * parse_refer()/parse_free() call pair added to ensure rule not freed
- * during use.
- */
- if ( rule->procedure )
- {
- SETTINGS * local_args = collect_arguments( rule, frame );
- PARSE * parse = rule->procedure;
- parse_refer( parse );
-
- pushsettings( local_args );
- result = parse_evaluate( parse, frame );
- popsettings( local_args );
- freesettings( local_args );
-
- parse_free( parse );
- }
-
- if ( frame->module != prev_module )
- {
- exit_module( frame->module );
- enter_module( prev_module );
- }
-
- if ( DEBUG_PROFILE && rule->procedure )
- profile_exit( prof );
-
- if ( DEBUG_COMPILE )
- debug_compile( -1, 0, frame);
-
- return result;
-}
-
-
-/*
- * Call the given rule with the specified parameters. The parameters should be
- * of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule'
- * in that frame for the called rule is prepared inside 'call_rule'.
- *
- * This function is useful when a builtin rule (in C) wants to call another rule
- * which might be implemented in Jam.
- */
-
-LIST * call_rule( char * rulename, FRAME * caller_frame, ... )
-{
- va_list va;
- LIST * result;
-
- FRAME inner[1];
- frame_init( inner );
- inner->prev = caller_frame;
- inner->prev_user = caller_frame->module->user_module ?
- caller_frame : caller_frame->prev_user;
- inner->module = caller_frame->module;
- inner->procedure = 0;
-
- va_start( va, caller_frame );
- for ( ; ; )
- {
- LIST * l = va_arg( va, LIST* );
- if ( !l )
- break;
- lol_add( inner->args, l );
- }
- va_end( va );
-
- result = evaluate_rule( rulename, inner );
-
- frame_free( inner );
-
- return result;
-}
-
-
-/*
- * compile_rules() - compile a chain of rules
- *
- * parse->left single rule
- * parse->right more compile_rules() by right-recursion
- */
-
-LIST * compile_rules( PARSE * parse, FRAME * frame )
-{
- /* Ignore result from first statement; return the 2nd. */
- /* Optimize recursion on the right by looping. */
- do list_free( parse_evaluate( parse->left, frame ) );
- while ( ( parse = parse->right )->func == compile_rules );
- return parse_evaluate( parse, frame );
-}
-
-
-/*
- * assign_var_mode() - convert ASSIGN_XXX compilation flag into corresponding
- * VAR_XXX variable set flag.
- */
-
-static int assign_var_mode( int parsenum, char const * * tracetext )
-{
- char const * trace;
- int setflag;
- switch ( parsenum )
- {
- case ASSIGN_SET : setflag = VAR_SET ; trace = "=" ; break;
- case ASSIGN_APPEND : setflag = VAR_APPEND ; trace = "+="; break;
- case ASSIGN_DEFAULT: setflag = VAR_DEFAULT; trace = "?="; break;
- default: setflag = VAR_SET ; trace = "" ; break;
- }
- if ( tracetext )
- *tracetext = trace ;
- return setflag;
-}
-
-/*
- * compile_set() - compile the "set variable" statement
- *
- * parse->left variable names
- * parse->right variable values
- * parse->num ASSIGN_SET/APPEND/DEFAULT
- */
-
-LIST * compile_set( PARSE * parse, FRAME * frame )
-{
- LIST * nt = parse_evaluate( parse->left, frame );
- LIST * ns = parse_evaluate( parse->right, frame );
- LIST * l;
- char const * trace;
- int setflag = assign_var_mode( parse->num, &trace );
-
- if ( DEBUG_COMPILE )
- {
- debug_compile( 0, "set", frame );
- list_print( nt );
- printf( " %s ", trace );
- list_print( ns );
- printf( "\n" );
- }
-
- /* Call var_set to set variable. var_set keeps ns, so need to copy it. */
- for ( l = nt; l; l = list_next( l ) )
- var_set( l->string, list_copy( L0, ns ), setflag );
- list_free( nt );
- return ns;
-}
-
-
-/*
- * compile_setcomp() - support for `rule` - save parse tree.
- *
- * parse->string rule name
- * parse->left rules for rule
- * parse->right optional list-of-lists describing arguments
- */
-
-LIST * compile_setcomp( PARSE * parse, FRAME * frame )
-{
- argument_list * arg_list = 0;
-
- /* Create new LOL describing argument requirements if supplied. */
- if ( parse->right )
- {
- PARSE * p;
- arg_list = args_new();
- for ( p = parse->right; p; p = p->left )
- lol_add( arg_list->data, parse_evaluate( p->right, frame ) );
- }
-
- new_rule_body( frame->module, parse->string, arg_list, parse->left, !parse->num );
- return L0;
-}
-
-
-/*
- * compile_setexec() - support for `actions` - save execution string.
- *
- * parse->string rule name
- * parse->string1 OS command string
- * parse->num flags
- * parse->left `bind` variables
- *
- * Note that the parse flags (as defined in compile.h) are transferred directly
- * to the rule flags (as defined in rules.h).
- */
-
-LIST * compile_setexec( PARSE * parse, FRAME * frame )
-{
- LIST * bindlist = parse_evaluate( parse->left, frame );
- new_rule_actions( frame->module, parse->string, parse->string1, bindlist, parse->num );
- return L0;
-}
-
-
-/*
- * compile_settings() - compile the "on =" (set variable on exec) statement.
- *
- * parse->left variable names
- * parse->right target name
- * parse->third variable value
- * parse->num ASSIGN_SET/APPEND
- */
-
-LIST * compile_settings( PARSE * parse, FRAME * frame )
-{
- LIST * nt = parse_evaluate( parse->left, frame );
- LIST * ns = parse_evaluate( parse->third, frame );
- LIST * targets = parse_evaluate( parse->right, frame );
- LIST * ts;
- char const * trace;
- int setflag = assign_var_mode( parse->num, &trace );
-
- if ( DEBUG_COMPILE )
- {
- debug_compile( 0, "set", frame );
- list_print( nt );
- printf( " on " );
- list_print( targets );
- printf( " %s ", trace );
- list_print( ns );
- printf( "\n" );
- }
-
- /* Call addsettings() to save variable setting. addsettings() keeps ns, so
- * need to copy it. Pass append flag to addsettings().
- */
- for ( ts = targets; ts; ts = list_next( ts ) )
- {
- TARGET * t = bindtarget( ts->string );
- LIST * l;
-
- for ( l = nt; l; l = list_next( l ) )
- t->settings = addsettings( t->settings, setflag, l->string,
- list_copy( (LIST *)0, ns ) );
- }
-
- list_free( nt );
- list_free( targets );
- return ns;
-}
-
-
-/*
- * compile_switch() - compile 'switch' rule.
- *
- * parse->left switch value (only 1st used)
- * parse->right cases
- *
- * cases->left 1st case
- * cases->right next cases
- *
- * case->string argument to match
- * case->left parse tree to execute
- */
-
-LIST * compile_switch( PARSE * parse, FRAME * frame )
-{
- LIST * nt = parse_evaluate( parse->left, frame );
- LIST * result = 0;
-
- if ( DEBUG_COMPILE )
- {
- debug_compile( 0, "switch", frame );
- list_print( nt );
- printf( "\n" );
- }
-
- /* Step through cases. */
- for ( parse = parse->right; parse; parse = parse->right )
- {
- if ( !glob( parse->left->string, nt ? nt->string : "" ) )
- {
- /* Get & exec parse tree for this case. */
- parse = parse->left->left;
- result = parse_evaluate( parse, frame );
- break;
- }
- }
-
- list_free( nt );
- return result;
-}
-
-
-/*
- * debug_compile() - printf with indent to show rule expansion.
- */
-
-static void debug_compile( int which, char * s, FRAME * frame )
-{
- static int level = 0;
- static char indent[36] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|";
-
- if ( which >= 0 )
- {
- int i;
-
- print_source_line( frame->procedure );
-
- i = ( level + 1 ) * 2;
- while ( i > 35 )
- {
- fputs( indent, stdout );
- i -= 35;
- }
-
- printf( "%*.*s ", i, i, indent );
- }
-
- if ( s )
- printf( "%s ", s );
-
- level += which;
-}
diff --git a/jam-files/engine/compile.h b/jam-files/engine/compile.h
deleted file mode 100644
index 7d5191f0..00000000
--- a/jam-files/engine/compile.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#ifndef COMPILE_DWA20011022_H
-# define COMPILE_DWA20011022_H
-
-# include "frames.h"
-# include "parse.h"
-# include "regexp.h"
-
-/*
- * compile.h - compile parsed jam statements
- */
-
-void compile_builtins();
-
-LIST *compile_append( PARSE *parse, FRAME *frame );
-LIST *compile_foreach( PARSE *parse, FRAME *frame );
-LIST *compile_if( PARSE *parse, FRAME *frame );
-LIST *compile_eval( PARSE *parse, FRAME *args );
-LIST *compile_include( PARSE *parse, FRAME *frame );
-LIST *compile_list( PARSE *parse, FRAME *frame );
-LIST *compile_local( PARSE *parse, FRAME *frame );
-LIST *compile_module( PARSE *parse, FRAME *frame );
-LIST *compile_class( PARSE *parse, FRAME *frame );
-LIST *compile_null( PARSE *parse, FRAME *frame );
-LIST *compile_on( PARSE *parse, FRAME *frame );
-LIST *compile_rule( PARSE *parse, FRAME *frame );
-LIST *compile_rules( PARSE *parse, FRAME *frame );
-LIST *compile_set( PARSE *parse, FRAME *frame );
-LIST *compile_setcomp( PARSE *parse, FRAME *frame );
-LIST *compile_setexec( PARSE *parse, FRAME *frame );
-LIST *compile_settings( PARSE *parse, FRAME *frame );
-LIST *compile_switch( PARSE *parse, FRAME *frame );
-LIST *compile_while( PARSE *parse, FRAME *frame );
-
-LIST *evaluate_rule( char *rulename, FRAME *frame );
-LIST *call_rule( char *rulename, FRAME* caller_frame, ...);
-
-regexp* regex_compile( const char* pattern );
-
-/* Flags for compile_set(), etc */
-
-# define ASSIGN_SET 0x00 /* = assign variable */
-# define ASSIGN_APPEND 0x01 /* += append variable */
-# define ASSIGN_DEFAULT 0x02 /* set only if unset */
-
-/* Flags for compile_setexec() */
-
-# define EXEC_UPDATED 0x01 /* executes updated */
-# define EXEC_TOGETHER 0x02 /* executes together */
-# define EXEC_IGNORE 0x04 /* executes ignore */
-# define EXEC_QUIETLY 0x08 /* executes quietly */
-# define EXEC_PIECEMEAL 0x10 /* executes piecemeal */
-# define EXEC_EXISTING 0x20 /* executes existing */
-
-/* Conditions for compile_if() */
-
-# define EXPR_NOT 0 /* ! cond */
-# define EXPR_AND 1 /* cond && cond */
-# define EXPR_OR 2 /* cond || cond */
-
-# define EXPR_EXISTS 3 /* arg */
-# define EXPR_EQUALS 4 /* arg = arg */
-# define EXPR_NOTEQ 5 /* arg != arg */
-# define EXPR_LESS 6 /* arg < arg */
-# define EXPR_LESSEQ 7 /* arg <= arg */
-# define EXPR_MORE 8 /* arg > arg */
-# define EXPR_MOREEQ 9 /* arg >= arg */
-# define EXPR_IN 10 /* arg in arg */
-
-#endif
-
diff --git a/jam-files/engine/debian/changelog b/jam-files/engine/debian/changelog
deleted file mode 100644
index 29084289..00000000
--- a/jam-files/engine/debian/changelog
+++ /dev/null
@@ -1,72 +0,0 @@
-bjam (3.1.12-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Rene Rivera <grafik@redshift-software.com> Sat, 01 Oct 2005 00:00:00 +0000
-
-bjam (3.1.11-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Rene Rivera <grafik@redshift-software.com> Sat, 30 Apr 2005 00:00:00 +0000
-
-bjam (3.1.10-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Rene Rivera <grafik@redshift-software.com> Tue, 1 Jun 2004 05:42:35 +0000
-
-bjam (3.1.9-2) unstable; urgency=low
-
- * Use default value of BOOST_BUILD_PATH is not is set in environment.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Wed, 17 Dec 2003 16:44:35 +0300
-
-bjam (3.1.9-1) unstable; urgency=low
-
- * Implement NATIVE_FILE builtin and several native rules.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Dec 2003 13:15:26 +0300
-
-bjam (3.1.8-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 4 Nov 2003 20:50:43 +0300
-
-bjam (3.1.7-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Sep 2003 10:45:44 +0400
-
-bjam (3.1.6-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 1 Jul 2003 09:12:18 +0400
-
-bjam (3.1.5-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Mon, 19 May 2003 14:05:13 +0400
-
-bjam (3.1.3-2) unstable; urgency=low
-
- * Changed Debian package to be similar to Jam's package.
-
- -- Vladimir Prus <ghost@cs.msu.su> Thu, 10 Oct 2002 18:43:26 +0400
-
-bjam (3.1.3-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Fri, 4 Oct 2002 18:16:54 +0400
-
-bjam (3.1.2-1) unstable; urgency=low
-
- * Initial Release.
-
- -- Vladimir Prus <ghost@cs.msu.su> Wed, 14 Aug 2002 14:08:00 +0400
-
diff --git a/jam-files/engine/debian/control b/jam-files/engine/debian/control
deleted file mode 100644
index c7f15193..00000000
--- a/jam-files/engine/debian/control
+++ /dev/null
@@ -1,16 +0,0 @@
-Source: bjam
-Section: devel
-Priority: optional
-Maintainer: Vladimir Prus <ghost@cs.msu.su>
-Build-Depends: debhelper (>> 3.0.0), docbook-to-man, bison
-Standards-Version: 3.5.2
-
-Package: bjam
-Architecture: any
-Depends: ${shlibs:Depends}
-Description: Build tool
- Boost.Jam is a portable build tool with its own interpreted language, which
- allows to implement rather complex logic in a readable way and without
- resorting to external programs. It is a descendant of Jam/MR tool modified to
- suit the needs of Boost.Build. In particular, modules and rule parameters
- were added, as well as several new builtins.
diff --git a/jam-files/engine/debian/copyright b/jam-files/engine/debian/copyright
deleted file mode 100644
index f72e4e3a..00000000
--- a/jam-files/engine/debian/copyright
+++ /dev/null
@@ -1,25 +0,0 @@
-This package was debianized by Vladimir Prus <ghost@cs.msu.su> on
-Wed, 17 July 2002, 19:27:00 +0400.
-
-Copyright:
-
- /+\
- +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- \+/
-
- This is Release 2.4 of Jam/MR, a make-like program.
-
- License is hereby granted to use this software and distribute it
- freely, as long as this copyright notice is retained and modifications
- are clearly marked.
-
- ALL WARRANTIES ARE HEREBY DISCLAIMED.
-
-Some portions are also:
-
- Copyright 2001-2006 David Abrahams.
- Copyright 2002-2006 Rene Rivera.
- Copyright 2003-2006 Vladimir Prus.
-
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
diff --git a/jam-files/engine/debian/jam.man.sgml b/jam-files/engine/debian/jam.man.sgml
deleted file mode 100644
index ee21d4d8..00000000
--- a/jam-files/engine/debian/jam.man.sgml
+++ /dev/null
@@ -1,236 +0,0 @@
-<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
-
-<!-- Process this file with docbook-to-man to generate an nroff manual
- page: `docbook-to-man manpage.sgml > manpage.1'. You may view
- the manual page with: `docbook-to-man manpage.sgml | nroff -man |
- less'. A typical entry in a Makefile or Makefile.am is:
-
-manpage.1: manpage.sgml
- docbook-to-man $< > $@
- -->
-
- <!ENTITY dhfirstname "<firstname>Yann</firstname>">
- <!ENTITY dhsurname "<surname>Dirson</surname>">
- <!-- Please adjust the date whenever revising the manpage. -->
- <!ENTITY dhdate "<date>mai 23, 2001</date>">
- <!ENTITY dhemail "<email>dirson@debian.org</email>">
- <!ENTITY dhusername "Yann Dirson">
- <!ENTITY dhpackage "jam">
-
- <!ENTITY debian "<productname>Debian GNU/Linux</productname>">
- <!ENTITY gnu "<acronym>GNU</acronym>">
-]>
-
-<refentry>
- <refentryinfo>
- <address>
- &dhemail;
- </address>
- <author>
- &dhfirstname;
- &dhsurname;
- </author>
- <copyright>
- <year>2001</year>
- <holder>&dhusername;</holder>
- </copyright>
- &dhdate;
- </refentryinfo>
-
- <refmeta>
- <refentrytitle>JAM</refentrytitle>
- <manvolnum>1</manvolnum>
- </refmeta>
-
- <refnamediv>
- <refname>Jam/MR</refname>
- <refpurpose>Make(1) Redux</refpurpose>
- </refnamediv>
-
- <refsynopsisdiv>
- <cmdsynopsis>
- <command>jam</command>
-
- <arg><option>-a</option></arg>
- <arg><option>-n</option></arg>
- <arg><option>-v</option></arg>
-
- <arg><option>-d <replaceable/debug/</option></arg>
- <arg><option>-f <replaceable/jambase/</option></arg>
- <arg><option>-j <replaceable/jobs/</option></arg>
- <arg><option>-o <replaceable/actionsfile/</option></arg>
- <arg><option>-s <replaceable/var/=<replaceable/value/</option></arg>
- <arg><option>-t <replaceable/target/</option></arg>
-
- <arg repeat><option><replaceable/target/</option></arg>
- </cmdsynopsis>
- </refsynopsisdiv>
-
- <refsect1>
- <title>DESCRIPTION</title>
-
- <para>Jam is a program construction tool, like make(1).</para>
-
- <para>Jam recursively builds target files from source files, using
- dependency information and updating actions expressed in the
- Jambase file, which is written in jam's own interpreted language.
- The default Jambase is compiled into jam and provides a
- boilerplate for common use, relying on a user-provide file
- "Jamfile" to enumerate actual targets and sources.</para>
- </refsect1>
-
- <refsect1>
- <title>OPTIONS</title>
-
- <variablelist>
- <varlistentry>
- <term><option/-a/</term>
- <listitem>
- <para>Build all targets anyway, even if they are up-to-date.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-d <replaceable/n/</option></term>
- <listitem>
- <para>Enable cummulative debugging levels from 1 to
- <replaceable/n/. Interesting values are:
-
- <glosslist>
- <glossentry><glossterm/1/ <glossdef><simpara/Show
- actions (the default)/</glossdef></glossentry>
-
- <glossentry><glossterm/2/ <glossdef><simpara/Show
- "quiet" actions and display all action
- text/</glossdef></glossentry>
-
- <glossentry><glossterm/3/ <glossdef><simpara>Show
- dependency analysis, and target/source
- timestamps/paths</simpara></glossdef></glossentry>
-
- <glossentry><glossterm/4/ <glossdef><simpara/Show shell
- arguments/</glossdef></glossentry>
-
- <glossentry><glossterm/5/ <glossdef><simpara/Show rule
- invocations and variable
- expansions/</glossdef></glossentry>
-
- <glossentry><glossterm/6/ <glossdef><simpara>Show
- directory/header file/archive
- scans</simpara></glossdef></glossentry>
-
- <glossentry><glossterm/7/ <glossdef><simpara/Show
- variable settings/</glossdef></glossentry>
-
- <glossentry><glossterm/8/ <glossdef><simpara/Show
- variable fetches/</glossdef></glossentry>
-
- <glossentry><glossterm/9/ <glossdef><simpara/Show
- variable manipulation, scanner
- tokens/</glossdef></glossentry>
- </glosslist>
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-d +<replaceable/n/</option></term>
- <listitem>
- <para>Enable debugging level <replaceable/n/.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option/-d 0/</term>
- <listitem>
- <para>Turn off all debugging levels. Only errors are not
- suppressed.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-f <replaceable/jambase/</option></term>
- <listitem>
- <para>Read <replaceable/jambase/ instead of using the
- built-in Jambase. Only one <option/-f/ flag is permitted,
- but the <replaceable/jambase/ may explicitly include other
- files.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-j <replaceable/n/</option></term>
- <listitem>
- <para>Run up to <replaceable/n/ shell commands concurrently
- (UNIX and NT only). The default is 1.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option/-n/</term>
- <listitem>
- <para>Don't actually execute the updating actions, but do
- everything else. This changes the debug level default to
- <option/-d2/.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-o <replaceable/file/</option></term>
- <listitem>
- <para>Write the updating actions to the specified file
- instead of running them (or outputting them, as on the
- Mac).</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-s <replaceable/var/=<replaceable/value/</option></term>
- <listitem>
- <para>Set the variable <replaceable/var/ to
- <replaceable/value/, overriding both internal variables and
- variables imported from the environment. </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-t <replaceable/target/</option></term>
- <listitem>
- <para>Rebuild <replaceable/target/ and everything that
- depends on it, even if it is up-to-date.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option/-v/</term>
- <listitem>
- <para>Print the version of jam and exit.</para>
- </listitem>
- </varlistentry>
-
- </variablelist>
- </refsect1>
-
- <refsect1>
- <title>SEE ALSO</title>
-
- <para>Jam is documented fully in HTML pages available on Debian
- systems from
- <filename>/usr/share/doc/jam/Jam.html</filename>.</para>
- </refsect1>
-
- <refsect1>
- <title>AUTHOR</title>
-
- <para>This manual page was created by &dhusername; &dhemail; from
- the <filename/Jam.html/ documentation, for the &debian; system
- (but may be used by others).</para>
- </refsect1>
-</refentry>
-
-<!-- Keep this comment at the end of the file
-Local variables:
-sgml-omittag:t
-sgml-shorttag:t
-End:
--->
diff --git a/jam-files/engine/debian/rules b/jam-files/engine/debian/rules
deleted file mode 100755
index 756052a3..00000000
--- a/jam-files/engine/debian/rules
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/make -f
-# Sample debian/rules that uses debhelper.
-# GNU copyright 1997 to 1999 by Joey Hess.
-# GNU copyright 2001 by Yann Dirson.
-
-# This is the debian/rules file for packages jam and ftjam
-# It should be usable with both packages without any change
-
-# Uncomment this to turn on verbose mode.
-#export DH_VERBOSE=1
-
-# This is the debhelper compatability version to use.
-export DH_COMPAT=3
-
-topdir=$(shell pwd)
-
-jam=bjam
-binname=bjam
-
-build: build-stamp
-build-stamp: debian/jam.1
- dh_testdir
-
- ./build.sh
-
- touch build-stamp
-
-%.1: %.man.sgml
- /usr/bin/docbook-to-man $< > $@
-
-clean:
- dh_testdir
- dh_testroot
- rm -f build-stamp
- rm -rf bin.*
- rm -f jam0 debian/jam.1
- dh_clean
-
-install: build
- dh_testdir
- dh_testroot
- dh_clean -k
- dh_installdirs
-
- install -d ${topdir}/debian/${jam}/usr/bin
- install -m755 bin.linuxx86/bjam ${topdir}/debian/${jam}/usr/bin/
- install -d ${topdir}/debian/${jam}/usr/share/man/man1/
- install -m644 debian/jam.1 ${topdir}/debian/${jam}/usr/share/man/man1/${binname}.1
-
-
-# Build architecture-independent files here.
-binary-indep: build install
-# We have nothing to do by default.
-
-# Build architecture-dependent files here.
-binary-arch: build install
- dh_testdir
- dh_testroot
- dh_installdocs README RELNOTES Jambase *.html
-# dh_installemacsen
-# dh_undocumented
- dh_installchangelogs
- dh_strip
- dh_compress
- dh_fixperms
- dh_installdeb
- dh_shlibdeps
- dh_gencontrol
- dh_md5sums
- dh_builddeb
-
-binary: binary-indep binary-arch
-.PHONY: build clean binary-indep binary-arch binary install configure
diff --git a/jam-files/engine/debug.c b/jam-files/engine/debug.c
deleted file mode 100644
index 7290555a..00000000
--- a/jam-files/engine/debug.c
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- Copyright Rene Rivera 2005.
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#include "jam.h"
-
-#include "hash.h"
-
-#include <time.h>
-#include <assert.h>
-
-
-static profile_frame * profile_stack = 0;
-static struct hash * profile_hash = 0;
-static profile_info profile_other = { "[OTHER]", 0, 0, 0, 0, 0 };
-static profile_info profile_total = { "[TOTAL]", 0, 0, 0, 0, 0 };
-
-
-profile_frame * profile_init( char * rulename, profile_frame * frame )
-{
- if ( DEBUG_PROFILE ) profile_enter( rulename, frame );
- return frame;
-}
-
-
-void profile_enter( char * rulename, profile_frame * frame )
-{
- if ( DEBUG_PROFILE )
- {
- clock_t start = clock();
- profile_info info;
- profile_info * p = &info;
-
- if ( !rulename ) p = &profile_other;
-
- if ( !profile_hash && rulename )
- profile_hash = hashinit( sizeof( profile_info ), "profile" );
-
- info.name = rulename;
-
- if ( rulename && hashenter( profile_hash, (HASHDATA * *)&p ) )
- p->cumulative = p->net = p->num_entries = p->stack_count = p->memory = 0;
-
- ++p->num_entries;
- ++p->stack_count;
-
- frame->info = p;
-
- frame->caller = profile_stack;
- profile_stack = frame;
-
- frame->entry_time = clock();
- frame->overhead = 0;
- frame->subrules = 0;
-
- /* caller pays for the time it takes to play with the hash table */
- if ( frame->caller )
- frame->caller->overhead += frame->entry_time - start;
- }
-}
-
-
-void profile_memory( long mem )
-{
- if ( DEBUG_PROFILE )
- if ( profile_stack && profile_stack->info )
- profile_stack->info->memory += mem;
-}
-
-
-void profile_exit( profile_frame * frame )
-{
- if ( DEBUG_PROFILE )
- {
- /* Cumulative time for this call. */
- clock_t t = clock() - frame->entry_time - frame->overhead;
- /* If this rule is already present on the stack, don't add the time for
- * this instance.
- */
- if ( frame->info->stack_count == 1 )
- frame->info->cumulative += t;
- /* Net time does not depend on presense of the same rule in call stack.
- */
- frame->info->net += t - frame->subrules;
-
- if ( frame->caller )
- {
- /* Caller's cumulative time must account for this overhead. */
- frame->caller->overhead += frame->overhead;
- frame->caller->subrules += t;
- }
- /* Pop this stack frame. */
- --frame->info->stack_count;
- profile_stack = frame->caller;
- }
-}
-
-
-static void dump_profile_entry( void * p_, void * ignored )
-{
- profile_info * p = (profile_info *)p_;
- unsigned long mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1 ) );
- double cumulative = p->cumulative;
- double net = p->net;
- double q = p->net;
- q /= ( p->num_entries ? p->num_entries : 1 );
- cumulative /= CLOCKS_PER_SEC;
- net /= CLOCKS_PER_SEC;
- q /= CLOCKS_PER_SEC;
- if ( !ignored )
- {
- profile_total.cumulative += p->net;
- profile_total.memory += p->memory;
- }
- printf( "%10ld %12.6f %12.6f %12.8f %10ld %10ld %s\n", p->num_entries,
- cumulative, net, q, p->memory, mem_each, p->name );
-}
-
-
-void profile_dump()
-{
- if ( profile_hash )
- {
- printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--",
- "--net--", "--each--", "--mem--", "--each--", "--name--" );
- hashenumerate( profile_hash, dump_profile_entry, 0 );
- dump_profile_entry( &profile_other, 0 );
- dump_profile_entry( &profile_total, (void *)1 );
- }
-}
diff --git a/jam-files/engine/debug.h b/jam-files/engine/debug.h
deleted file mode 100644
index 115a8873..00000000
--- a/jam-files/engine/debug.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- Copyright Rene Rivera 2005.
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-#ifndef BJAM_DEBUG_H
-#define BJAM_DEBUG_H
-
-#include "jam.h"
-#include <time.h>
-
-
-struct profile_info
-{
- /* name of rule being called */
- char* name;
- /* cumulative time spent in rule */
- clock_t cumulative;
- /* time spent in rule proper */
- clock_t net;
- /* number of time rule was entered */
- unsigned long num_entries;
- /* number of the times this function is present in stack */
- unsigned long stack_count;
- /* bytes of memory allocated by the call */
- unsigned long memory;
-};
-typedef struct profile_info profile_info;
-
-struct profile_frame
-{
- /* permanent storage where data accumulates */
- profile_info* info;
- /* overhead for profiling in this call */
- clock_t overhead;
- /* time of last entry to rule */
- clock_t entry_time;
- /* stack frame of caller */
- struct profile_frame* caller;
- /* time spent in subrules */
- clock_t subrules;
-};
-typedef struct profile_frame profile_frame;
-
-profile_frame * profile_init( char * rulename, profile_frame * frame );
-void profile_enter( char* rulename, profile_frame * frame );
-void profile_memory( long mem );
-void profile_exit( profile_frame * frame );
-void profile_dump();
-
-#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( #scope, &PROF_ ## scope )
-#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p )
-
-#endif
diff --git a/jam-files/engine/execcmd.h b/jam-files/engine/execcmd.h
deleted file mode 100644
index 67f2b839..00000000
--- a/jam-files/engine/execcmd.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * execcmd.h - execute a shell script.
- *
- * Defines the interface to be implemented in platform specific implementation
- * modules.
- *
- * 05/04/94 (seiwald) - async multiprocess interface
- */
-
-#ifndef EXECCMD_H
-#define EXECCMD_H
-
-#include <time.h>
-
-typedef struct timing_info
-{
- double system;
- double user;
- time_t start;
- time_t end;
-} timing_info;
-
-void exec_cmd
-(
- char * string,
- void (* func)( void * closure, int status, timing_info *, char *, char * ),
- void * closure,
- LIST * shell,
- char * action,
- char * target
-);
-
-int exec_wait();
-
-#define EXEC_CMD_OK 0
-#define EXEC_CMD_FAIL 1
-#define EXEC_CMD_INTR 2
-
-#endif
diff --git a/jam-files/engine/execmac.c b/jam-files/engine/execmac.c
deleted file mode 100644
index 2ddddedd..00000000
--- a/jam-files/engine/execmac.c
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "execcmd.h"
-#include <errno.h>
-
-#ifdef OS_MAC
-
-/*
- * execunix.c - execute a shell script on UNIX
- *
- * If $(JAMSHELL) is defined, uses that to formulate execvp().
- * The default is:
- *
- * /bin/sh -c %
- *
- * Each word must be an individual element in a jam variable value.
- *
- * In $(JAMSHELL), % expands to the command string and ! expands to
- * the slot number (starting at 1) for multiprocess (-j) invocations.
- * If $(JAMSHELL) doesn't include a %, it is tacked on as the last
- * argument.
- *
- * Don't just set JAMSHELL to /bin/sh - it won't work!
- *
- * External routines:
- * exec_cmd() - launch an async command execution.
- * exec_wait() - wait and drive at most one execution completion.
- *
- * Internal routines:
- * onintr() - bump intr to note command interruption.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 05/04/94 (seiwald) - async multiprocess interface
- * 01/22/95 (seiwald) - $(JAMSHELL) support
- */
-
-
-/*
- * exec_cmd() - launch an async command execution.
- */
-
-void exec_cmd
-(
- char * string,
- void (* func)( void * closure, int status, timing_info *, char *, char * ),
- void * closure,
- LIST * shell
-)
-{
- printf( "%s", string );
- (*func)( closure, EXEC_CMD_OK );
-}
-
-/*
- * exec_wait() - wait and drive at most one execution completion.
- */
-
-int exec_wait()
-{
- return 0;
-}
-
-#endif /* OS_MAC */
diff --git a/jam-files/engine/execnt.c b/jam-files/engine/execnt.c
deleted file mode 100644
index 76420451..00000000
--- a/jam-files/engine/execnt.c
+++ /dev/null
@@ -1,1296 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2007 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "execcmd.h"
-#include "pathsys.h"
-#include "string.h"
-#include "output.h"
-#include <errno.h>
-#include <assert.h>
-#include <ctype.h>
-#include <time.h>
-#include <math.h>
-
-#ifdef USE_EXECNT
-
-#define WIN32_LEAN_AND_MEAN
-#include <windows.h>
-#include <process.h>
-#include <tlhelp32.h>
-
-/*
- * execnt.c - execute a shell command on Windows NT
- *
- * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp().
- * The default is:
- *
- * /bin/sh -c % [ on UNIX/AmigaOS ]
- * cmd.exe /c % [ on Windows NT ]
- *
- * Each word must be an individual element in a jam variable value.
- *
- * In $(JAMSHELL), % expands to the command string and ! expands to
- * the slot number (starting at 1) for multiprocess (-j) invocations.
- * If $(JAMSHELL) doesn't include a %, it is tacked on as the last
- * argument.
- *
- * Don't just set JAMSHELL to /bin/sh or cmd.exe - it won't work!
- *
- * External routines:
- * exec_cmd() - launch an async command execution.
- * exec_wait() - wait and drive at most one execution completion.
- *
- * Internal routines:
- * onintr() - bump intr to note command interruption.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 05/04/94 (seiwald) - async multiprocess interface
- * 01/22/95 (seiwald) - $(JAMSHELL) support
- * 06/02/97 (gsar) - full async multiprocess support for Win32
- */
-
-/* get the maximum command line length according to the OS */
-int maxline();
-
-/* delete and argv list */
-static void free_argv(char**);
-/* Convert a command string into arguments for spawnvp. */
-static char** string_to_args(const char*);
-/* bump intr to note command interruption */
-static void onintr(int);
-/* If the command is suitable for execution via spawnvp */
-long can_spawn(char*);
-/* Add two 64-bit unsigned numbers, h1l1 and h2l2 */
-static FILETIME add_64(
- unsigned long h1, unsigned long l1,
- unsigned long h2, unsigned long l2);
-static FILETIME add_FILETIME(FILETIME t1, FILETIME t2);
-static FILETIME negate_FILETIME(FILETIME t);
-/* Convert a FILETIME to a number of seconds */
-static double filetime_seconds(FILETIME t);
-/* record the timing info for the process */
-static void record_times(HANDLE, timing_info*);
-/* calc the current running time of an *active* process */
-static double running_time(HANDLE);
-/* */
-DWORD get_process_id(HANDLE);
-/* terminate the given process, after terminating all its children */
-static void kill_process_tree(DWORD, HANDLE);
-/* waits for a command to complete or for the given timeout, whichever is first */
-static int try_wait(int timeoutMillis);
-/* reads any pending output for running commands */
-static void read_output();
-/* checks if a command ran out of time, and kills it */
-static int try_kill_one();
-/* */
-static double creation_time(HANDLE);
-/* Recursive check if first process is parent (directly or indirectly) of
-the second one. */
-static int is_parent_child(DWORD, DWORD);
-/* */
-static void close_alert(HANDLE);
-/* close any alerts hanging around */
-static void close_alerts();
-
-/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
-
-static int intr = 0;
-static int cmdsrunning = 0;
-static void (* istat)( int );
-
-
-/* The list of commands we run. */
-static struct
-{
- string action; /* buffer to hold action */
- string target; /* buffer to hold target */
- string command; /* buffer to hold command being invoked */
-
- /* Temporary batch file used to execute the action when needed. */
- char * tempfile_bat;
-
- /* Pipes for communicating with the child process. Parent reads from (0),
- * child writes to (1).
- */
- HANDLE pipe_out[ 2 ];
- HANDLE pipe_err[ 2 ];
-
- string buffer_out; /* buffer to hold stdout, if any */
- string buffer_err; /* buffer to hold stderr, if any */
-
- PROCESS_INFORMATION pi; /* running process information */
- DWORD exit_code; /* executed command's exit code */
- int exit_reason; /* reason why a command completed */
-
- /* Function called when the command completes. */
- void (* func)( void * closure, int status, timing_info *, char *, char * );
-
- /* Opaque data passed back to the 'func' callback called when the command
- * completes.
- */
- void * closure;
-}
-cmdtab[ MAXJOBS ] = { { 0 } };
-
-
-/*
- * Execution unit tests.
- */
-
-void execnt_unit_test()
-{
-#if !defined( NDEBUG )
- /* vc6 preprocessor is broken, so assert with these strings gets confused.
- * Use a table instead.
- */
- typedef struct test { char * command; int result; } test;
- test tests[] = {
- { "x", 0 },
- { "x\n ", 0 },
- { "x\ny", 1 },
- { "x\n\n y", 1 },
- { "echo x > foo.bar", 1 },
- { "echo x < foo.bar", 1 },
- { "echo x \">\" foo.bar", 0 },
- { "echo x \"<\" foo.bar", 0 },
- { "echo x \\\">\\\" foo.bar", 1 },
- { "echo x \\\"<\\\" foo.bar", 1 } };
- int i;
- for ( i = 0; i < sizeof( tests ) / sizeof( *tests ); ++i )
- assert( !can_spawn( tests[ i ].command ) == tests[ i ].result );
-
- {
- char * long_command = BJAM_MALLOC_ATOMIC( MAXLINE + 10 );
- assert( long_command != 0 );
- memset( long_command, 'x', MAXLINE + 9 );
- long_command[ MAXLINE + 9 ] = 0;
- assert( can_spawn( long_command ) == MAXLINE + 9 );
- BJAM_FREE( long_command );
- }
-
- {
- /* Work around vc6 bug; it doesn't like escaped string
- * literals inside assert
- */
- char * * argv = string_to_args(" \"g++\" -c -I\"Foobar\"" );
- char const expected[] = "-c -I\"Foobar\"";
-
- assert( !strcmp( argv[ 0 ], "g++" ) );
- assert( !strcmp( argv[ 1 ], expected ) );
- free_argv( argv );
- }
-#endif
-}
-
-
-/*
- * exec_cmd() - launch an async command execution.
- */
-
-void exec_cmd
-(
- char * command,
- void (* func)( void * closure, int status, timing_info *, char * invoked_command, char * command_output ),
- void * closure,
- LIST * shell,
- char * action,
- char * target
-)
-{
- int slot;
- int raw_cmd = 0 ;
- char * argv_static[ MAXARGC + 1 ]; /* +1 for NULL */
- char * * argv = argv_static;
- char * p;
- char * command_orig = command;
-
- /* Check to see if we need to hack around the line-length limitation. Look
- * for a JAMSHELL setting of "%", indicating that the command should be
- * invoked directly.
- */
- if ( shell && !strcmp( shell->string, "%" ) && !list_next( shell ) )
- {
- raw_cmd = 1;
- shell = 0;
- }
-
- /* Find a slot in the running commands table for this one. */
- for ( slot = 0; slot < MAXJOBS; ++slot )
- if ( !cmdtab[ slot ].pi.hProcess )
- break;
- if ( slot == MAXJOBS )
- {
- printf( "no slots for child!\n" );
- exit( EXITBAD );
- }
-
- /* Compute the name of a temp batch file, for possible use. */
- if ( !cmdtab[ slot ].tempfile_bat )
- {
- char const * tempdir = path_tmpdir();
- DWORD procID = GetCurrentProcessId();
-
- /* SVA - allocate 64 bytes extra just to be safe. */
- cmdtab[ slot ].tempfile_bat = BJAM_MALLOC_ATOMIC( strlen( tempdir ) + 64 );
-
- sprintf( cmdtab[ slot ].tempfile_bat, "%s\\jam%d-%02d.bat",
- tempdir, procID, slot );
- }
-
- /* Trim leading, -ending- white space */
- while ( *( command + 1 ) && isspace( *command ) )
- ++command;
-
- /* Write to .BAT file unless the line would be too long and it meets the
- * other spawnability criteria.
- */
- if ( raw_cmd && ( can_spawn( command ) >= MAXLINE ) )
- {
- if ( DEBUG_EXECCMD )
- printf("Executing raw command directly\n");
- }
- else
- {
- FILE * f = 0;
- int tries = 0;
- raw_cmd = 0;
-
- /* Write command to bat file. For some reason this open can fail
- * intermitently. But doing some retries works. Most likely this is due
- * to a previously existing file of the same name that happens to be
- * opened by an active virus scanner. Pointed out and fixed by Bronek
- * Kozicki.
- */
- for ( ; !f && ( tries < 4 ); ++tries )
- {
- f = fopen( cmdtab[ slot ].tempfile_bat, "w" );
- if ( !f && ( tries < 4 ) ) Sleep( 250 );
- }
- if ( !f )
- {
- printf( "failed to write command file!\n" );
- exit( EXITBAD );
- }
- fputs( command, f );
- fclose( f );
-
- command = cmdtab[ slot ].tempfile_bat;
-
- if ( DEBUG_EXECCMD )
- {
- if ( shell )
- printf( "using user-specified shell: %s", shell->string );
- else
- printf( "Executing through .bat file\n" );
- }
- }
-
- /* Formulate argv; If shell was defined, be prepared for % and ! subs.
- * Otherwise, use stock cmd.exe.
- */
- if ( shell )
- {
- int i;
- char jobno[ 4 ];
- int gotpercent = 0;
-
- sprintf( jobno, "%d", slot + 1 );
-
- for ( i = 0; shell && ( i < MAXARGC ); ++i, shell = list_next( shell ) )
- {
- switch ( shell->string[ 0 ] )
- {
- case '%': argv[ i ] = command; ++gotpercent; break;
- case '!': argv[ i ] = jobno; break;
- default : argv[ i ] = shell->string;
- }
- if ( DEBUG_EXECCMD )
- printf( "argv[%d] = '%s'\n", i, argv[ i ] );
- }
-
- if ( !gotpercent )
- argv[ i++ ] = command;
-
- argv[ i ] = 0;
- }
- else if ( raw_cmd )
- {
- argv = string_to_args( command );
- }
- else
- {
- argv[ 0 ] = "cmd.exe";
- argv[ 1 ] = "/Q/C"; /* anything more is non-portable */
- argv[ 2 ] = command;
- argv[ 3 ] = 0;
- }
-
- /* Catch interrupts whenever commands are running. */
- if ( !cmdsrunning++ )
- istat = signal( SIGINT, onintr );
-
- /* Start the command. */
- {
- SECURITY_ATTRIBUTES sa
- = { sizeof( SECURITY_ATTRIBUTES ), 0, 0 };
- SECURITY_DESCRIPTOR sd;
- STARTUPINFO si
- = { sizeof( STARTUPINFO ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
- string cmd;
-
- /* Init the security data. */
- InitializeSecurityDescriptor( &sd, SECURITY_DESCRIPTOR_REVISION );
- SetSecurityDescriptorDacl( &sd, TRUE, NULL, FALSE );
- sa.lpSecurityDescriptor = &sd;
- sa.bInheritHandle = TRUE;
-
- /* Create the stdout, which is also the merged out + err, pipe. */
- if ( !CreatePipe( &cmdtab[ slot ].pipe_out[ 0 ],
- &cmdtab[ slot ].pipe_out[ 1 ], &sa, 0 ) )
- {
- perror( "CreatePipe" );
- exit( EXITBAD );
- }
-
- /* Create the stdout, which is also the merged out+err, pipe. */
- if ( globs.pipe_action == 2 )
- {
- if ( !CreatePipe( &cmdtab[ slot ].pipe_err[ 0 ],
- &cmdtab[ slot ].pipe_err[ 1 ], &sa, 0 ) )
- {
- perror( "CreatePipe" );
- exit( EXITBAD );
- }
- }
-
- /* Set handle inheritance off for the pipe ends the parent reads from. */
- SetHandleInformation( cmdtab[ slot ].pipe_out[ 0 ], HANDLE_FLAG_INHERIT, 0 );
- if ( globs.pipe_action == 2 )
- SetHandleInformation( cmdtab[ slot ].pipe_err[ 0 ], HANDLE_FLAG_INHERIT, 0 );
-
- /* Hide the child window, if any. */
- si.dwFlags |= STARTF_USESHOWWINDOW;
- si.wShowWindow = SW_HIDE;
-
- /* Set the child outputs to the pipes. */
- si.dwFlags |= STARTF_USESTDHANDLES;
- si.hStdOutput = cmdtab[ slot ].pipe_out[ 1 ];
- if ( globs.pipe_action == 2 )
- {
- /* Pipe stderr to the action error output. */
- si.hStdError = cmdtab[ slot ].pipe_err[ 1 ];
- }
- else if ( globs.pipe_action == 1 )
- {
- /* Pipe stderr to the console error output. */
- si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
- }
- else
- {
- /* Pipe stderr to the action merged output. */
- si.hStdError = cmdtab[ slot ].pipe_out[ 1 ];
- }
-
- /* Let the child inherit stdin, as some commands assume it's available. */
- si.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
-
- /* Save the operation for exec_wait() to find. */
- cmdtab[ slot ].func = func;
- cmdtab[ slot ].closure = closure;
- if ( action && target )
- {
- string_copy( &cmdtab[ slot ].action, action );
- string_copy( &cmdtab[ slot ].target, target );
- }
- else
- {
- string_free( &cmdtab[ slot ].action );
- string_new ( &cmdtab[ slot ].action );
- string_free( &cmdtab[ slot ].target );
- string_new ( &cmdtab[ slot ].target );
- }
- string_copy( &cmdtab[ slot ].command, command_orig );
-
- /* Put together the command we run. */
- {
- char * * argp = argv;
- string_new( &cmd );
- string_copy( &cmd, *(argp++) );
- while ( *argp )
- {
- string_push_back( &cmd, ' ' );
- string_append( &cmd, *(argp++) );
- }
- }
-
- /* Create output buffers. */
- string_new( &cmdtab[ slot ].buffer_out );
- string_new( &cmdtab[ slot ].buffer_err );
-
- /* Run the command by creating a sub-process for it. */
- if (
- ! CreateProcess(
- NULL , /* application name */
- cmd.value , /* command line */
- NULL , /* process attributes */
- NULL , /* thread attributes */
- TRUE , /* inherit handles */
- CREATE_NEW_PROCESS_GROUP, /* create flags */
- NULL , /* env vars, null inherits env */
- NULL , /* current dir, null is our */
- /* current dir */
- &si , /* startup info */
- &cmdtab[ slot ].pi /* child process info, if created */
- )
- )
- {
- perror( "CreateProcess" );
- exit( EXITBAD );
- }
-
- /* Clean up temporary stuff. */
- string_free( &cmd );
- }
-
- /* Wait until we are under the limit of concurrent commands. Do not trust
- * globs.jobs alone.
- */
- while ( ( cmdsrunning >= MAXJOBS ) || ( cmdsrunning >= globs.jobs ) )
- if ( !exec_wait() )
- break;
-
- if ( argv != argv_static )
- free_argv( argv );
-}
-
-
-/*
- * exec_wait()
- * * wait and drive at most one execution completion.
- * * waits for one command to complete, while processing the i/o for all
- * ongoing commands.
- *
- * Returns 0 if called when there were no more commands being executed or 1
- * otherwise.
- */
-
-int exec_wait()
-{
- int i = -1;
-
- /* Handle naive make1() which does not know if cmds are running. */
- if ( !cmdsrunning )
- return 0;
-
- /* Wait for a command to complete, while snarfing up any output. */
- do
- {
- /* Check for a complete command, briefly. */
- i = try_wait(500);
- /* Read in the output of all running commands. */
- read_output();
- /* Close out pending debug style dialogs. */
- close_alerts();
- /* Check if a command ran out of time. */
- if ( i < 0 ) i = try_kill_one();
- }
- while ( i < 0 );
-
- /* We have a command... process it. */
- --cmdsrunning;
- {
- timing_info time;
- int rstat;
-
- /* The time data for the command. */
- record_times( cmdtab[ i ].pi.hProcess, &time );
-
- /* Clear the temp file. */
- if ( cmdtab[ i ].tempfile_bat )
- {
- unlink( cmdtab[ i ].tempfile_bat );
- BJAM_FREE( cmdtab[ i ].tempfile_bat );
- cmdtab[ i ].tempfile_bat = NULL;
- }
-
- /* Find out the process exit code. */
- GetExitCodeProcess( cmdtab[ i ].pi.hProcess, &cmdtab[ i ].exit_code );
-
- /* The dispossition of the command. */
- if ( intr )
- rstat = EXEC_CMD_INTR;
- else if ( cmdtab[ i ].exit_code != 0 )
- rstat = EXEC_CMD_FAIL;
- else
- rstat = EXEC_CMD_OK;
-
- /* Output the action block. */
- out_action(
- cmdtab[ i ].action.size > 0 ? cmdtab[ i ].action.value : 0,
- cmdtab[ i ].target.size > 0 ? cmdtab[ i ].target.value : 0,
- cmdtab[ i ].command.size > 0 ? cmdtab[ i ].command.value : 0,
- cmdtab[ i ].buffer_out.size > 0 ? cmdtab[ i ].buffer_out.value : 0,
- cmdtab[ i ].buffer_err.size > 0 ? cmdtab[ i ].buffer_err.value : 0,
- cmdtab[ i ].exit_reason );
-
- /* Call the callback, may call back to jam rule land. Assume -p0 in
- * effect so only pass buffer containing merged output.
- */
- (*cmdtab[ i ].func)(
- cmdtab[ i ].closure,
- rstat,
- &time,
- cmdtab[ i ].command.value,
- cmdtab[ i ].buffer_out.value );
-
- /* Clean up the command data, process, etc. */
- string_free( &cmdtab[ i ].action ); string_new( &cmdtab[ i ].action );
- string_free( &cmdtab[ i ].target ); string_new( &cmdtab[ i ].target );
- string_free( &cmdtab[ i ].command ); string_new( &cmdtab[ i ].command );
- if ( cmdtab[ i ].pi.hProcess ) { CloseHandle( cmdtab[ i ].pi.hProcess ); cmdtab[ i ].pi.hProcess = 0; }
- if ( cmdtab[ i ].pi.hThread ) { CloseHandle( cmdtab[ i ].pi.hThread ); cmdtab[ i ].pi.hThread = 0; }
- if ( cmdtab[ i ].pipe_out[ 0 ] ) { CloseHandle( cmdtab[ i ].pipe_out[ 0 ] ); cmdtab[ i ].pipe_out[ 0 ] = 0; }
- if ( cmdtab[ i ].pipe_out[ 1 ] ) { CloseHandle( cmdtab[ i ].pipe_out[ 1 ] ); cmdtab[ i ].pipe_out[ 1 ] = 0; }
- if ( cmdtab[ i ].pipe_err[ 0 ] ) { CloseHandle( cmdtab[ i ].pipe_err[ 0 ] ); cmdtab[ i ].pipe_err[ 0 ] = 0; }
- if ( cmdtab[ i ].pipe_err[ 1 ] ) { CloseHandle( cmdtab[ i ].pipe_err[ 1 ] ); cmdtab[ i ].pipe_err[ 1 ] = 0; }
- string_free( &cmdtab[ i ].buffer_out ); string_new( &cmdtab[ i ].buffer_out );
- string_free( &cmdtab[ i ].buffer_err ); string_new( &cmdtab[ i ].buffer_err );
- cmdtab[ i ].exit_code = 0;
- cmdtab[ i ].exit_reason = EXIT_OK;
- }
-
- return 1;
-}
-
-
-/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
-
-static void free_argv( char * * args )
-{
- BJAM_FREE( args[ 0 ] );
- BJAM_FREE( args );
-}
-
-
-/*
- * For more details on Windows cmd.exe shell command-line length limitations see
- * the following MSDN article:
- * http://support.microsoft.com/default.aspx?scid=kb;en-us;830473
- */
-
-int maxline()
-{
- OSVERSIONINFO os_info;
- os_info.dwOSVersionInfoSize = sizeof( os_info );
- GetVersionEx( &os_info );
-
- if ( os_info.dwMajorVersion >= 5 ) return 8191; /* XP > */
- if ( os_info.dwMajorVersion == 4 ) return 2047; /* NT 4.x */
- return 996; /* NT 3.5.1 */
-}
-
-
-/*
- * Convert a command string into arguments for spawnvp(). The original code,
- * inherited from ftjam, tried to break up every argument on the command-line,
- * dealing with quotes, but that is really a waste of time on Win32, at least.
- * It turns out that all you need to do is get the raw path to the executable in
- * the first argument to spawnvp(), and you can pass all the rest of the
- * command-line arguments to spawnvp() in one, un-processed string.
- *
- * New strategy: break the string in at most one place.
- */
-
-static char * * string_to_args( char const * string )
-{
- int src_len;
- int in_quote;
- char * line;
- char const * src;
- char * dst;
- char * * argv;
-
- /* Drop leading and trailing whitespace if any. */
- while ( isspace( *string ) )
- ++string;
-
- src_len = strlen( string );
- while ( ( src_len > 0 ) && isspace( string[ src_len - 1 ] ) )
- --src_len;
-
- /* Copy the input string into a buffer we can modify. */
- line = (char *)BJAM_MALLOC_ATOMIC( src_len + 1 );
- if ( !line )
- return 0;
-
- /* Allocate the argv array.
- * element 0: stores the path to the executable
- * element 1: stores the command-line arguments to the executable
- * element 2: NULL terminator
- */
- argv = (char * *)BJAM_MALLOC( 3 * sizeof( char * ) );
- if ( !argv )
- {
- BJAM_FREE( line );
- return 0;
- }
-
- /* Strip quotes from the first command-line argument and find where it ends.
- * Quotes are illegal in Win32 pathnames, so we do not need to worry about
- * preserving escaped quotes here. Spaces can not be escaped in Win32, only
- * enclosed in quotes, so removing backslash escapes is also a non-issue.
- */
- in_quote = 0;
- for ( src = string, dst = line ; *src; ++src )
- {
- if ( *src == '"' )
- in_quote = !in_quote;
- else if ( !in_quote && isspace( *src ) )
- break;
- else
- *dst++ = *src;
- }
- *dst++ = 0;
- argv[ 0 ] = line;
-
- /* Skip whitespace in src. */
- while ( isspace( *src ) )
- ++src;
-
- argv[ 1 ] = dst;
-
- /* Copy the rest of the arguments verbatim. */
- src_len -= src - string;
-
- /* Use strncat() because it appends a trailing nul. */
- *dst = 0;
- strncat( dst, src, src_len );
-
- argv[ 2 ] = 0;
-
- return argv;
-}
-
-
-static void onintr( int disp )
-{
- ++intr;
- printf( "...interrupted\n" );
-}
-
-
-/*
- * can_spawn() - If the command is suitable for execution via spawnvp(), return
- * a number >= the number of characters it would occupy on the command-line.
- * Otherwise, return zero.
- */
-
-long can_spawn( char * command )
-{
- char * p;
- char inquote = 0;
-
- /* Move to the first non-whitespace. */
- command += strspn( command, " \t" );
-
- p = command;
-
- /* Look for newlines and unquoted i/o redirection. */
- do
- {
- p += strcspn( p, "'\n\"<>|" );
-
- switch ( *p )
- {
- case '\n':
- /* Skip over any following spaces. */
- while ( isspace( *p ) )
- ++p;
- /* Must use a .bat file if there is anything significant following
- * the newline.
- */
- if ( *p )
- return 0;
- break;
-
- case '"':
- case '\'':
- if ( ( p > command ) && ( p[ -1 ] != '\\' ) )
- {
- if ( inquote == *p )
- inquote = 0;
- else if ( inquote == 0 )
- inquote = *p;
- }
- ++p;
- break;
-
- case '<':
- case '>':
- case '|':
- if ( !inquote )
- return 0;
- ++p;
- break;
- }
- }
- while ( *p );
-
- /* Return the number of characters the command will occupy. */
- return p - command;
-}
-
-
-/* 64-bit arithmetic helpers. */
-
-/* Compute the carry bit from the addition of two 32-bit unsigned numbers. */
-#define add_carry_bit( a, b ) ( (((a) | (b)) >> 31) & (~((a) + (b)) >> 31) & 0x1 )
-
-/* Compute the high 32 bits of the addition of two 64-bit unsigned numbers, h1l1 and h2l2. */
-#define add_64_hi( h1, l1, h2, l2 ) ((h1) + (h2) + add_carry_bit(l1, l2))
-
-
-/*
- * Add two 64-bit unsigned numbers, h1l1 and h2l2.
- */
-
-static FILETIME add_64
-(
- unsigned long h1, unsigned long l1,
- unsigned long h2, unsigned long l2
-)
-{
- FILETIME result;
- result.dwLowDateTime = l1 + l2;
- result.dwHighDateTime = add_64_hi( h1, l1, h2, l2 );
- return result;
-}
-
-
-static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 )
-{
- return add_64( t1.dwHighDateTime, t1.dwLowDateTime, t2.dwHighDateTime,
- t2.dwLowDateTime );
-}
-
-
-static FILETIME negate_FILETIME( FILETIME t )
-{
- /* 2s complement negation */
- return add_64( ~t.dwHighDateTime, ~t.dwLowDateTime, 0, 1 );
-}
-
-
-/*
- * Convert a FILETIME to a number of seconds.
- */
-
-static double filetime_seconds( FILETIME t )
-{
- return t.dwHighDateTime * ( (double)( 1UL << 31 ) * 2.0 * 1.0e-7 ) + t.dwLowDateTime * 1.0e-7;
-}
-
-
-/*
- * What should be a simple conversion, turns out to be horribly complicated by
- * the defficiencies of MSVC and the Win32 API.
- */
-
-static time_t filetime_dt( FILETIME t_utc )
-{
- static int calc_time_diff = 1;
- static double time_diff;
- if ( calc_time_diff )
- {
- struct tm t0_;
- FILETIME f0_local;
- FILETIME f0_;
- SYSTEMTIME s0_;
- GetSystemTime( &s0_ );
- t0_.tm_year = s0_.wYear-1900;
- t0_.tm_mon = s0_.wMonth-1;
- t0_.tm_wday = s0_.wDayOfWeek;
- t0_.tm_mday = s0_.wDay;
- t0_.tm_hour = s0_.wHour;
- t0_.tm_min = s0_.wMinute;
- t0_.tm_sec = s0_.wSecond;
- t0_.tm_isdst = 0;
- SystemTimeToFileTime( &s0_, &f0_local );
- LocalFileTimeToFileTime( &f0_local, &f0_ );
- time_diff = filetime_seconds( f0_ ) - (double)mktime( &t0_ );
- calc_time_diff = 0;
- }
- return ceil( filetime_seconds( t_utc ) - time_diff );
-}
-
-
-static void record_times( HANDLE process, timing_info * time )
-{
- FILETIME creation;
- FILETIME exit;
- FILETIME kernel;
- FILETIME user;
- if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
- {
- time->system = filetime_seconds( kernel );
- time->user = filetime_seconds( user );
- time->start = filetime_dt ( creation );
- time->end = filetime_dt ( exit );
- }
-}
-
-
-#define IO_BUFFER_SIZE ( 16 * 1024 )
-
-static char ioBuffer[ IO_BUFFER_SIZE + 1 ];
-
-
-static void read_pipe
-(
- HANDLE in, /* the pipe to read from */
- string * out
-)
-{
- DWORD bytesInBuffer = 0;
- DWORD bytesAvailable = 0;
-
- do
- {
- /* check if we have any data to read */
- if ( !PeekNamedPipe( in, ioBuffer, IO_BUFFER_SIZE, &bytesInBuffer, &bytesAvailable, NULL ) )
- bytesAvailable = 0;
-
- /* read in the available data */
- if ( bytesAvailable > 0 )
- {
- /* we only read in the available bytes, to avoid blocking */
- if ( ReadFile( in, ioBuffer,
- bytesAvailable <= IO_BUFFER_SIZE ? bytesAvailable : IO_BUFFER_SIZE,
- &bytesInBuffer, NULL ) )
- {
- if ( bytesInBuffer > 0 )
- {
- /* Clean up some illegal chars. */
- int i;
- for ( i = 0; i < bytesInBuffer; ++i )
- {
- if ( ( (unsigned char)ioBuffer[ i ] < 1 ) )
- ioBuffer[ i ] = '?';
- }
- /* Null, terminate. */
- ioBuffer[ bytesInBuffer ] = '\0';
- /* Append to the output. */
- string_append( out, ioBuffer );
- /* Subtract what we read in. */
- bytesAvailable -= bytesInBuffer;
- }
- else
- {
- /* Likely read a error, bail out. */
- bytesAvailable = 0;
- }
- }
- else
- {
- /* Definitely read a error, bail out. */
- bytesAvailable = 0;
- }
- }
- }
- while ( bytesAvailable > 0 );
-}
-
-
-static void read_output()
-{
- int i;
- for ( i = 0; i < globs.jobs && i < MAXJOBS; ++i )
- {
- /* Read stdout data. */
- if ( cmdtab[ i ].pipe_out[ 0 ] )
- read_pipe( cmdtab[ i ].pipe_out[ 0 ], & cmdtab[ i ].buffer_out );
- /* Read stderr data. */
- if ( cmdtab[ i ].pipe_err[ 0 ] )
- read_pipe( cmdtab[ i ].pipe_err[ 0 ], & cmdtab[ i ].buffer_err );
- }
-}
-
-
-/*
- * Waits for a single child process command to complete, or the timeout,
- * whichever comes first. Returns the index of the completed command in the
- * cmdtab array, or -1.
- */
-
-static int try_wait( int timeoutMillis )
-{
- int i;
- int num_active;
- int wait_api_result;
- HANDLE active_handles[ MAXJOBS ];
- int active_procs[ MAXJOBS ];
-
- /* Prepare a list of all active processes to wait for. */
- for ( num_active = 0, i = 0; i < globs.jobs; ++i )
- {
- if ( cmdtab[ i ].pi.hProcess )
- {
- active_handles[ num_active ] = cmdtab[ i ].pi.hProcess;
- active_procs[ num_active ] = i;
- ++num_active;
- }
- }
-
- /* Wait for a child to complete, or for our timeout window to expire. */
- wait_api_result = WaitForMultipleObjects( num_active, active_handles,
- FALSE, timeoutMillis );
- if ( ( WAIT_OBJECT_0 <= wait_api_result ) &&
- ( wait_api_result < WAIT_OBJECT_0 + num_active ) )
- {
- /* Rerminated process detected - return its index. */
- return active_procs[ wait_api_result - WAIT_OBJECT_0 ];
- }
-
- /* Timeout. */
- return -1;
-}
-
-
-static int try_kill_one()
-{
- /* Only need to check if a timeout was specified with the -l option. */
- if ( globs.timeout > 0 )
- {
- int i;
- for ( i = 0; i < globs.jobs; ++i )
- {
- double t = running_time( cmdtab[ i ].pi.hProcess );
- if ( t > (double)globs.timeout )
- {
- /* The job may have left an alert dialog around, try and get rid
- * of it before killing
- */
- close_alert( cmdtab[ i ].pi.hProcess );
- /* We have a "runaway" job, kill it. */
- kill_process_tree( 0, cmdtab[ i ].pi.hProcess );
- /* And return it marked as a timeout. */
- cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
- return i;
- }
- }
- }
- return -1;
-}
-
-
-static void close_alerts()
-{
- /* We only attempt this every 5 seconds, or so, because it is not a cheap
- * operation, and we will catch the alerts eventually. This check uses
- * floats as some compilers define CLOCKS_PER_SEC as a float or double.
- */
- if ( ( (float)clock() / (float)( CLOCKS_PER_SEC * 5 ) ) < ( 1.0 / 5.0 ) )
- {
- int i;
- for ( i = 0; i < globs.jobs; ++i )
- close_alert( cmdtab[ i ].pi.hProcess );
- }
-}
-
-
-/*
- * Calc the current running time of an *active* process.
- */
-
-static double running_time( HANDLE process )
-{
- FILETIME creation;
- FILETIME exit;
- FILETIME kernel;
- FILETIME user;
- FILETIME current;
- if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
- {
- /* Compute the elapsed time. */
- GetSystemTimeAsFileTime( &current );
- return filetime_seconds( add_FILETIME( current,
- negate_FILETIME( creation ) ) );
- }
- return 0.0;
-}
-
-
-/* It is just stupidly silly that one has to do this. */
-typedef struct PROCESS_BASIC_INFORMATION__
-{
- LONG ExitStatus;
- PVOID PebBaseAddress;
- ULONG AffinityMask;
- LONG BasePriority;
- ULONG UniqueProcessId;
- ULONG InheritedFromUniqueProcessId;
-} PROCESS_BASIC_INFORMATION_;
-typedef LONG (__stdcall * NtQueryInformationProcess__)(
- HANDLE ProcessHandle,
- LONG ProcessInformationClass,
- PVOID ProcessInformation,
- ULONG ProcessInformationLength,
- PULONG ReturnLength);
-static NtQueryInformationProcess__ NtQueryInformationProcess_ = NULL;
-static HMODULE NTDLL_ = NULL;
-DWORD get_process_id( HANDLE process )
-{
- PROCESS_BASIC_INFORMATION_ pinfo;
- if ( !NtQueryInformationProcess_ )
- {
- if ( ! NTDLL_ )
- NTDLL_ = GetModuleHandleA( "ntdll" );
- if ( NTDLL_ )
- NtQueryInformationProcess_
- = (NtQueryInformationProcess__)GetProcAddress( NTDLL_, "NtQueryInformationProcess" );
- }
- if ( NtQueryInformationProcess_ )
- {
- LONG r = (*NtQueryInformationProcess_)( process,
- /* ProcessBasicInformation == */ 0, &pinfo,
- sizeof( PROCESS_BASIC_INFORMATION_ ), NULL );
- return pinfo.UniqueProcessId;
- }
- return 0;
-}
-
-
-/*
- * Not really optimal, or efficient, but it is easier this way, and it is not
- * like we are going to be killing thousands, or even tens of processes.
- */
-
-static void kill_process_tree( DWORD pid, HANDLE process )
-{
- HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
- if ( !pid )
- pid = get_process_id( process );
- process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
-
- if ( INVALID_HANDLE_VALUE != process_snapshot_h )
- {
- BOOL ok = TRUE;
- PROCESSENTRY32 pinfo;
- pinfo.dwSize = sizeof( PROCESSENTRY32 );
- for (
- ok = Process32First( process_snapshot_h, &pinfo );
- ok == TRUE;
- ok = Process32Next( process_snapshot_h, &pinfo ) )
- {
- if ( pinfo.th32ParentProcessID == pid )
- {
- /* Found a child, recurse to kill it and anything else below it.
- */
- HANDLE ph = OpenProcess( PROCESS_ALL_ACCESS, FALSE,
- pinfo.th32ProcessID );
- if ( NULL != ph )
- {
- kill_process_tree( pinfo.th32ProcessID, ph );
- CloseHandle( ph );
- }
- }
- }
- CloseHandle( process_snapshot_h );
- }
- /* Now that the children are all dead, kill the root. */
- TerminateProcess( process, -2 );
-}
-
-
-static double creation_time( HANDLE process )
-{
- FILETIME creation;
- FILETIME exit;
- FILETIME kernel;
- FILETIME user;
- FILETIME current;
- return GetProcessTimes( process, &creation, &exit, &kernel, &user )
- ? filetime_seconds( creation )
- : 0.0;
-}
-
-
-/*
- * Recursive check if first process is parent (directly or indirectly) of the
- * second one. Both processes are passed as process ids, not handles. Special
- * return value 2 means that the second process is smss.exe and its parent
- * process is System (first argument is ignored).
- */
-
-static int is_parent_child( DWORD parent, DWORD child )
-{
- HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
-
- if ( !child )
- return 0;
- if ( parent == child )
- return 1;
-
- process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
- if ( INVALID_HANDLE_VALUE != process_snapshot_h )
- {
- BOOL ok = TRUE;
- PROCESSENTRY32 pinfo;
- pinfo.dwSize = sizeof( PROCESSENTRY32 );
- for (
- ok = Process32First( process_snapshot_h, &pinfo );
- ok == TRUE;
- ok = Process32Next( process_snapshot_h, &pinfo ) )
- {
- if ( pinfo.th32ProcessID == child )
- {
- /* Unfortunately, process ids are not really unique. There might
- * be spurious "parent and child" relationship match between two
- * non-related processes if real parent process of a given
- * process has exited (while child process kept running as an
- * "orphan") and the process id of such parent process has been
- * reused by internals of the operating system when creating
- * another process.
- *
- * Thus additional check is needed - process creation time. This
- * check may fail (i.e. return 0) for system processes due to
- * insufficient privileges, and that is OK.
- */
- double tchild = 0.0;
- double tparent = 0.0;
- HANDLE hchild = OpenProcess( PROCESS_QUERY_INFORMATION, FALSE, pinfo.th32ProcessID );
- CloseHandle( process_snapshot_h );
-
- /* csrss.exe may display message box like following:
- * xyz.exe - Unable To Locate Component
- * This application has failed to start because
- * boost_foo-bar.dll was not found. Re-installing the
- * application may fix the problem
- * This actually happens when starting test process that depends
- * on a dynamic library which failed to build. We want to
- * automatically close these message boxes even though csrss.exe
- * is not our child process. We may depend on the fact that (in
- * all current versions of Windows) csrss.exe is directly child
- * of the smss.exe process, which in turn is directly child of
- * the System process, which always has process id == 4. This
- * check must be performed before comparison of process creation
- * times.
- */
- if ( !stricmp( pinfo.szExeFile, "csrss.exe" ) &&
- ( is_parent_child( parent, pinfo.th32ParentProcessID ) == 2 ) )
- return 1;
- if ( !stricmp( pinfo.szExeFile, "smss.exe" ) &&
- ( pinfo.th32ParentProcessID == 4 ) )
- return 2;
-
- if ( hchild )
- {
- HANDLE hparent = OpenProcess( PROCESS_QUERY_INFORMATION,
- FALSE, pinfo.th32ParentProcessID );
- if ( hparent )
- {
- tchild = creation_time( hchild );
- tparent = creation_time( hparent );
- CloseHandle( hparent );
- }
- CloseHandle( hchild );
- }
-
- /* Return 0 if one of the following is true:
- * 1. we failed to read process creation time
- * 2. child was created before alleged parent
- */
- if ( ( tchild == 0.0 ) || ( tparent == 0.0 ) ||
- ( tchild < tparent ) )
- return 0;
-
- return is_parent_child( parent, pinfo.th32ParentProcessID ) & 1;
- }
- }
-
- CloseHandle( process_snapshot_h );
- }
-
- return 0;
-}
-
-typedef struct PROCESS_HANDLE_ID { HANDLE h; DWORD pid; } PROCESS_HANDLE_ID;
-
-
-/*
- * This function is called by the operating system for each topmost window.
- */
-
-BOOL CALLBACK close_alert_window_enum( HWND hwnd, LPARAM lParam )
-{
- char buf[ 7 ] = { 0 };
- PROCESS_HANDLE_ID p = *( (PROCESS_HANDLE_ID *)lParam );
- DWORD pid = 0;
- DWORD tid = 0;
-
- /* We want to find and close any window that:
- * 1. is visible and
- * 2. is a dialog and
- * 3. is displayed by any of our child processes
- */
- if ( !IsWindowVisible( hwnd ) )
- return TRUE;
-
- if ( !GetClassNameA( hwnd, buf, sizeof( buf ) ) )
- return TRUE; /* Failed to read class name; presume it is not a dialog. */
-
- if ( strcmp( buf, "#32770" ) )
- return TRUE; /* Not a dialog */
-
- /* GetWindowThreadProcessId() returns 0 on error, otherwise thread id of
- * window message pump thread.
- */
- tid = GetWindowThreadProcessId( hwnd, &pid );
-
- if ( tid && is_parent_child( p.pid, pid ) )
- {
- /* Ask really nice. */
- PostMessageA( hwnd, WM_CLOSE, 0, 0 );
- /* Now wait and see if it worked. If not, insist. */
- if ( WaitForSingleObject( p.h, 200 ) == WAIT_TIMEOUT )
- {
- PostThreadMessageA( tid, WM_QUIT, 0, 0 );
- WaitForSingleObject( p.h, 300 );
- }
-
- /* Done, we do not want to check any other window now. */
- return FALSE;
- }
-
- return TRUE;
-}
-
-
-static void close_alert( HANDLE process )
-{
- DWORD pid = get_process_id( process );
- /* If process already exited or we just can not get its process id, do not
- * go any further.
- */
- if ( pid )
- {
- PROCESS_HANDLE_ID p;
- p.h = process;
- p.pid = pid;
- EnumWindows( &close_alert_window_enum, (LPARAM)&p );
- }
-}
-
-#endif /* USE_EXECNT */
diff --git a/jam-files/engine/execunix.c b/jam-files/engine/execunix.c
deleted file mode 100644
index ef9dba00..00000000
--- a/jam-files/engine/execunix.c
+++ /dev/null
@@ -1,569 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- * Copyright 2007 Noel Belcourt.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "execcmd.h"
-#include "output.h"
-#include <errno.h>
-#include <signal.h>
-#include <stdio.h>
-#include <time.h>
-#include <unistd.h> /* needed for vfork(), _exit() prototypes */
-#include <sys/resource.h>
-#include <sys/times.h>
-#include <sys/wait.h>
-
-#if defined(sun) || defined(__sun) || defined(linux)
- #include <wait.h>
-#endif
-
-#ifdef USE_EXECUNIX
-
-#include <sys/times.h>
-
-#if defined(__APPLE__)
- #define NO_VFORK
-#endif
-
-#ifdef NO_VFORK
- #define vfork() fork()
-#endif
-
-
-/*
- * execunix.c - execute a shell script on UNIX/WinNT/OS2/AmigaOS
- *
- * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp().
- * The default is:
- *
- * /bin/sh -c % [ on UNIX/AmigaOS ]
- * cmd.exe /c % [ on OS2/WinNT ]
- *
- * Each word must be an individual element in a jam variable value.
- *
- * In $(JAMSHELL), % expands to the command string and ! expands to the slot
- * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
- * not include a %, it is tacked on as the last argument.
- *
- * Do not just set JAMSHELL to /bin/sh or cmd.exe - it will not work!
- *
- * External routines:
- * exec_cmd() - launch an async command execution.
- * exec_wait() - wait and drive at most one execution completion.
- *
- * Internal routines:
- * onintr() - bump intr to note command interruption.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 05/04/94 (seiwald) - async multiprocess interface
- * 01/22/95 (seiwald) - $(JAMSHELL) support
- * 06/02/97 (gsar) - full async multiprocess support for Win32
- */
-
-static clock_t tps = 0;
-static struct timeval tv;
-static int select_timeout = 0;
-static int intr = 0;
-static int cmdsrunning = 0;
-static struct tms old_time;
-
-#define OUT 0
-#define ERR 1
-
-static struct
-{
- int pid; /* on win32, a real process handle */
- int fd[2]; /* file descriptors for stdout and stderr */
- FILE *stream[2]; /* child's stdout (0) and stderr (1) file stream */
- clock_t start_time; /* start time of child process */
- int exit_reason; /* termination status */
- int action_length; /* length of action string */
- int target_length; /* length of target string */
- char *action; /* buffer to hold action and target invoked */
- char *target; /* buffer to hold action and target invoked */
- char *command; /* buffer to hold command being invoked */
- char *buffer[2]; /* buffer to hold stdout and stderr, if any */
- void (*func)( void *closure, int status, timing_info*, char *, char * );
- void *closure;
- time_t start_dt; /* start of command timestamp */
-} cmdtab[ MAXJOBS ] = {{0}};
-
-/*
- * onintr() - bump intr to note command interruption
- */
-
-void onintr( int disp )
-{
- ++intr;
- printf( "...interrupted\n" );
-}
-
-
-/*
- * exec_cmd() - launch an async command execution.
- */
-
-void exec_cmd
-(
- char * string,
- void (*func)( void *closure, int status, timing_info*, char *, char * ),
- void * closure,
- LIST * shell,
- char * action,
- char * target
-)
-{
- static int initialized = 0;
- int out[2];
- int err[2];
- int slot;
- int len;
- char * argv[ MAXARGC + 1 ]; /* +1 for NULL */
-
- /* Find a slot in the running commands table for this one. */
- for ( slot = 0; slot < MAXJOBS; ++slot )
- if ( !cmdtab[ slot ].pid )
- break;
-
- if ( slot == MAXJOBS )
- {
- printf( "no slots for child!\n" );
- exit( EXITBAD );
- }
-
- /* Forumulate argv. If shell was defined, be prepared for % and ! subs.
- * Otherwise, use stock /bin/sh on unix or cmd.exe on NT.
- */
- if ( shell )
- {
- int i;
- char jobno[4];
- int gotpercent = 0;
-
- sprintf( jobno, "%d", slot + 1 );
-
- for ( i = 0; shell && i < MAXARGC; ++i, shell = list_next( shell ) )
- {
- switch ( shell->string[0] )
- {
- case '%': argv[ i ] = string; ++gotpercent; break;
- case '!': argv[ i ] = jobno; break;
- default : argv[ i ] = shell->string;
- }
- if ( DEBUG_EXECCMD )
- printf( "argv[%d] = '%s'\n", i, argv[ i ] );
- }
-
- if ( !gotpercent )
- argv[ i++ ] = string;
-
- argv[ i ] = 0;
- }
- else
- {
- argv[ 0 ] = "/bin/sh";
- argv[ 1 ] = "-c";
- argv[ 2 ] = string;
- argv[ 3 ] = 0;
- }
-
- /* Increment jobs running. */
- ++cmdsrunning;
-
- /* Save off actual command string. */
- cmdtab[ slot ].command = BJAM_MALLOC_ATOMIC( strlen( string ) + 1 );
- strcpy( cmdtab[ slot ].command, string );
-
- /* Initialize only once. */
- if ( !initialized )
- {
- times( &old_time );
- initialized = 1;
- }
-
- /* Create pipes from child to parent. */
- {
- if ( pipe( out ) < 0 )
- exit( EXITBAD );
-
- if ( pipe( err ) < 0 )
- exit( EXITBAD );
- }
-
- /* Start the command */
-
- cmdtab[ slot ].start_dt = time(0);
-
- if ( 0 < globs.timeout )
- {
- /*
- * Handle hung processes by manually tracking elapsed time and signal
- * process when time limit expires.
- */
- struct tms buf;
- cmdtab[ slot ].start_time = times( &buf );
-
- /* Make a global, only do this once. */
- if ( tps == 0 ) tps = sysconf( _SC_CLK_TCK );
- }
-
- if ( ( cmdtab[ slot ].pid = vfork() ) == 0 )
- {
- int pid = getpid();
-
- close( out[0] );
- close( err[0] );
-
- dup2( out[1], STDOUT_FILENO );
-
- if ( globs.pipe_action == 0 )
- dup2( out[1], STDERR_FILENO );
- else
- dup2( err[1], STDERR_FILENO );
-
- close( out[1] );
- close( err[1] );
-
- /* Make this process a process group leader so that when we kill it, all
- * child processes of this process are terminated as well. We use
- * killpg(pid, SIGKILL) to kill the process group leader and all its
- * children.
- */
- if ( 0 < globs.timeout )
- {
- struct rlimit r_limit;
- r_limit.rlim_cur = globs.timeout;
- r_limit.rlim_max = globs.timeout;
- setrlimit( RLIMIT_CPU, &r_limit );
- }
- setpgid( pid,pid );
- execvp( argv[0], argv );
- perror( "execvp" );
- _exit( 127 );
- }
- else if ( cmdtab[ slot ].pid == -1 )
- {
- perror( "vfork" );
- exit( EXITBAD );
- }
-
- setpgid( cmdtab[ slot ].pid, cmdtab[ slot ].pid );
-
- /* close write end of pipes */
- close( out[1] );
- close( err[1] );
-
- /* set both file descriptors to non-blocking */
- fcntl(out[0], F_SETFL, O_NONBLOCK);
- fcntl(err[0], F_SETFL, O_NONBLOCK);
-
- /* child writes stdout to out[1], parent reads from out[0] */
- cmdtab[ slot ].fd[ OUT ] = out[0];
- cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" );
- if ( cmdtab[ slot ].stream[ OUT ] == NULL )
- {
- perror( "fdopen" );
- exit( EXITBAD );
- }
-
- /* child writes stderr to err[1], parent reads from err[0] */
- if (globs.pipe_action == 0)
- {
- close(err[0]);
- }
- else
- {
- cmdtab[ slot ].fd[ ERR ] = err[0];
- cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" );
- if ( cmdtab[ slot ].stream[ ERR ] == NULL )
- {
- perror( "fdopen" );
- exit( EXITBAD );
- }
- }
-
- /* Ensure enough room for rule and target name. */
- if ( action && target )
- {
- len = strlen( action ) + 1;
- if ( cmdtab[ slot ].action_length < len )
- {
- BJAM_FREE( cmdtab[ slot ].action );
- cmdtab[ slot ].action = BJAM_MALLOC_ATOMIC( len );
- cmdtab[ slot ].action_length = len;
- }
- strcpy( cmdtab[ slot ].action, action );
- len = strlen( target ) + 1;
- if ( cmdtab[ slot ].target_length < len )
- {
- BJAM_FREE( cmdtab[ slot ].target );
- cmdtab[ slot ].target = BJAM_MALLOC_ATOMIC( len );
- cmdtab[ slot ].target_length = len;
- }
- strcpy( cmdtab[ slot ].target, target );
- }
- else
- {
- BJAM_FREE( cmdtab[ slot ].action );
- BJAM_FREE( cmdtab[ slot ].target );
- cmdtab[ slot ].action = 0;
- cmdtab[ slot ].target = 0;
- cmdtab[ slot ].action_length = 0;
- cmdtab[ slot ].target_length = 0;
- }
-
- /* Save the operation for exec_wait() to find. */
- cmdtab[ slot ].func = func;
- cmdtab[ slot ].closure = closure;
-
- /* Wait until we are under the limit of concurrent commands. Do not trust
- * globs.jobs alone.
- */
- while ( ( cmdsrunning >= MAXJOBS ) || ( cmdsrunning >= globs.jobs ) )
- if ( !exec_wait() )
- break;
-}
-
-
-/* Returns 1 if file is closed, 0 if descriptor is still live.
- *
- * i is index into cmdtab
- *
- * s (stream) indexes:
- * - cmdtab[ i ].stream[ s ]
- * - cmdtab[ i ].buffer[ s ]
- * - cmdtab[ i ].fd [ s ]
- */
-
-int read_descriptor( int i, int s )
-{
- int ret;
- int len;
- char buffer[BUFSIZ];
-
- while ( 0 < ( ret = fread( buffer, sizeof(char), BUFSIZ-1, cmdtab[ i ].stream[ s ] ) ) )
- {
- buffer[ret] = 0;
- if ( !cmdtab[ i ].buffer[ s ] )
- {
- /* Never been allocated. */
- cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 );
- memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 );
- }
- else
- {
- /* Previously allocated. */
- char * tmp = cmdtab[ i ].buffer[ s ];
- len = strlen( tmp );
- cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( len + ret + 1 );
- memcpy( cmdtab[ i ].buffer[ s ], tmp, len );
- memcpy( cmdtab[ i ].buffer[ s ] + len, buffer, ret + 1 );
- BJAM_FREE( tmp );
- }
- }
-
- return feof(cmdtab[ i ].stream[ s ]);
-}
-
-
-void close_streams( int i, int s )
-{
- /* Close the stream and pipe descriptor. */
- fclose(cmdtab[ i ].stream[ s ]);
- cmdtab[ i ].stream[ s ] = 0;
-
- close(cmdtab[ i ].fd[ s ]);
- cmdtab[ i ].fd[ s ] = 0;
-}
-
-
-void populate_file_descriptors( int * fmax, fd_set * fds)
-{
- int i, fd_max = 0;
- struct tms buf;
- clock_t current = times( &buf );
- select_timeout = globs.timeout;
-
- /* Compute max read file descriptor for use in select. */
- FD_ZERO(fds);
- for ( i = 0; i < globs.jobs; ++i )
- {
- if ( 0 < cmdtab[ i ].fd[ OUT ] )
- {
- fd_max = fd_max < cmdtab[ i ].fd[ OUT ] ? cmdtab[ i ].fd[ OUT ] : fd_max;
- FD_SET(cmdtab[ i ].fd[ OUT ], fds);
- }
- if ( globs.pipe_action != 0 )
- {
- if (0 < cmdtab[ i ].fd[ ERR ])
- {
- fd_max = fd_max < cmdtab[ i ].fd[ ERR ] ? cmdtab[ i ].fd[ ERR ] : fd_max;
- FD_SET(cmdtab[ i ].fd[ ERR ], fds);
- }
- }
-
- if (globs.timeout && cmdtab[ i ].pid) {
- clock_t consumed = (current - cmdtab[ i ].start_time) / tps;
- clock_t process_timesout = globs.timeout - consumed;
- if (0 < process_timesout && process_timesout < select_timeout) {
- select_timeout = process_timesout;
- }
- if ( globs.timeout <= consumed )
- {
- killpg( cmdtab[ i ].pid, SIGKILL );
- cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
- }
- }
- }
- *fmax = fd_max;
-}
-
-
-/*
- * exec_wait() - wait and drive at most one execution completion.
- */
-
-int exec_wait()
-{
- int i;
- int ret;
- int fd_max;
- int pid;
- int status;
- int finished;
- int rstat;
- timing_info time_info;
- fd_set fds;
- struct tms new_time;
-
- /* Handle naive make1() which does not know if commands are running. */
- if ( !cmdsrunning )
- return 0;
-
- /* Process children that signaled. */
- finished = 0;
- while ( !finished && cmdsrunning )
- {
- /* Compute max read file descriptor for use in select(). */
- populate_file_descriptors( &fd_max, &fds );
-
- if ( 0 < globs.timeout )
- {
- /* Force select() to timeout so we can terminate expired processes.
- */
- tv.tv_sec = select_timeout;
- tv.tv_usec = 0;
-
- /* select() will wait until: i/o on a descriptor, a signal, or we
- * time out.
- */
- ret = select( fd_max + 1, &fds, 0, 0, &tv );
- }
- else
- {
- /* select() will wait until i/o on a descriptor or a signal. */
- ret = select( fd_max + 1, &fds, 0, 0, 0 );
- }
-
- if ( 0 < ret )
- {
- for ( i = 0; i < globs.jobs; ++i )
- {
- int out = 0;
- int err = 0;
- if ( FD_ISSET( cmdtab[ i ].fd[ OUT ], &fds ) )
- out = read_descriptor( i, OUT );
-
- if ( ( globs.pipe_action != 0 ) &&
- ( FD_ISSET( cmdtab[ i ].fd[ ERR ], &fds ) ) )
- err = read_descriptor( i, ERR );
-
- /* If feof on either descriptor, then we are done. */
- if ( out || err )
- {
- /* Close the stream and pipe descriptors. */
- close_streams( i, OUT );
- if ( globs.pipe_action != 0 )
- close_streams( i, ERR );
-
- /* Reap the child and release resources. */
- pid = waitpid( cmdtab[ i ].pid, &status, 0 );
-
- if ( pid == cmdtab[ i ].pid )
- {
- finished = 1;
- pid = 0;
- cmdtab[ i ].pid = 0;
-
- /* Set reason for exit if not timed out. */
- if ( WIFEXITED( status ) )
- {
- cmdtab[ i ].exit_reason = 0 == WEXITSTATUS( status )
- ? EXIT_OK
- : EXIT_FAIL;
- }
-
- /* Print out the rule and target name. */
- out_action( cmdtab[ i ].action, cmdtab[ i ].target,
- cmdtab[ i ].command, cmdtab[ i ].buffer[ OUT ],
- cmdtab[ i ].buffer[ ERR ], cmdtab[ i ].exit_reason
- );
-
- times( &new_time );
-
- time_info.system = (double)( new_time.tms_cstime - old_time.tms_cstime ) / CLOCKS_PER_SEC;
- time_info.user = (double)( new_time.tms_cutime - old_time.tms_cutime ) / CLOCKS_PER_SEC;
- time_info.start = cmdtab[ i ].start_dt;
- time_info.end = time( 0 );
-
- old_time = new_time;
-
- /* Drive the completion. */
- --cmdsrunning;
-
- if ( intr )
- rstat = EXEC_CMD_INTR;
- else if ( status != 0 )
- rstat = EXEC_CMD_FAIL;
- else
- rstat = EXEC_CMD_OK;
-
- /* Assume -p0 in effect so only pass buffer[ 0 ]
- * containing merged output.
- */
- (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat,
- &time_info, cmdtab[ i ].command,
- cmdtab[ i ].buffer[ 0 ] );
-
- BJAM_FREE( cmdtab[ i ].buffer[ OUT ] );
- cmdtab[ i ].buffer[ OUT ] = 0;
-
- BJAM_FREE( cmdtab[ i ].buffer[ ERR ] );
- cmdtab[ i ].buffer[ ERR ] = 0;
-
- BJAM_FREE( cmdtab[ i ].command );
- cmdtab[ i ].command = 0;
-
- cmdtab[ i ].func = 0;
- cmdtab[ i ].closure = 0;
- cmdtab[ i ].start_time = 0;
- }
- else
- {
- printf( "unknown pid %d with errno = %d\n", pid, errno );
- exit( EXITBAD );
- }
- }
- }
- }
- }
-
- return 1;
-}
-
-# endif /* USE_EXECUNIX */
diff --git a/jam-files/engine/execvms.c b/jam-files/engine/execvms.c
deleted file mode 100644
index 729917d3..00000000
--- a/jam-files/engine/execvms.c
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "execcmd.h"
-
-#ifdef OS_VMS
-
-#include <stdio.h>
-#include <string.h>
-#include <stdlib.h>
-#include <iodef.h>
-#include <ssdef.h>
-#include <descrip.h>
-#include <dvidef.h>
-#include <clidef.h>
-
-/*
- * execvms.c - execute a shell script, ala VMS.
- *
- * The approach is this:
- *
- * If the command is a single line, and shorter than WRTLEN (what we believe to
- * be the maximum line length), we just system() it.
- *
- * If the command is multi-line, or longer than WRTLEN, we write the command
- * block to a temp file, splitting long lines (using "-" at the end of the line
- * to indicate contiuation), and then source that temp file. We use special
- * logic to make sure we do not continue in the middle of a quoted string.
- *
- * 05/04/94 (seiwald) - async multiprocess interface; noop on VMS
- * 12/20/96 (seiwald) - rewritten to handle multi-line commands well
- * 01/14/96 (seiwald) - do not put -'s between "'s
- */
-
-#define WRTLEN 240
-
-#define MIN( a, b ) ((a) < (b) ? (a) : (b))
-
-/* 1 for the @ and 4 for the .com */
-
-char tempnambuf[ L_tmpnam + 1 + 4 ] = { 0 };
-
-
-void exec_cmd
-(
- char * string,
- void (* func)( void * closure, int status, timing_info *, char *, char * ),
- void * closure,
- LIST * shell,
- char * rule_name,
- char * target
-)
-{
- char * s;
- char * e;
- cahr * p;
- int rstat = EXEC_CMD_OK;
- int status;
-
- /* See if string is more than one line discounting leading/trailing white
- * space.
- */
- for ( s = string; *s && isspace( *s ); ++s );
-
- e = p = strchr( s, '\n' );
-
- while ( p && isspace( *p ) )
- ++p;
-
- /* If multi line or long, write to com file. Otherwise, exec directly. */
- if ( ( p && *p ) || ( e - s > WRTLEN ) )
- {
- FILE * f;
-
- /* Create temp file invocation "@sys$scratch:tempfile.com". */
- if ( !*tempnambuf )
- {
- tempnambuf[0] = '@';
- (void)tmpnam( tempnambuf + 1 );
- strcat( tempnambuf, ".com" );
- }
-
- /* Open tempfile. */
- if ( !( f = fopen( tempnambuf + 1, "w" ) ) )
- {
- printf( "can't open command file\n" );
- (*func)( closure, EXEC_CMD_FAIL );
- return;
- }
-
- /* For each line of the string. */
- while ( *string )
- {
- char * s = strchr( string, '\n' );
- int len = s ? s + 1 - string : strlen( string );
-
- fputc( '$', f );
-
- /* For each chunk of a line that needs to be split. */
- while ( len > 0 )
- {
- char * q = string;
- char * qe = string + MIN( len, WRTLEN );
- char * qq = q;
- int quote = 0;
-
- /* Look for matching "s. */
- for ( ; q < qe; ++q )
- if ( ( *q == '"' ) && ( quote = !quote ) )
- qq = q;
-
- /* Back up to opening quote, if in one. */
- if ( quote )
- q = qq;
-
- fwrite( string, ( q - string ), 1, f );
-
- len -= ( q - string );
- string = q;
-
- if ( len )
- {
- fputc( '-', f );
- fputc( '\n', f );
- }
- }
- }
-
- fclose( f );
-
- status = system( tempnambuf ) & 0x07;
-
- unlink( tempnambuf + 1 );
- }
- else
- {
- /* Execute single line command. Strip trailing newline before execing.
- */
- if ( e ) *e = 0;
- status = system( s ) & 0x07;
- }
-
- /* Fail for error or fatal error. OK on OK, warning or info exit. */
- if ( ( status == 2 ) || ( status == 4 ) )
- rstat = EXEC_CMD_FAIL;
-
- (*func)( closure, rstat );
-}
-
-
-int exec_wait()
-{
- return 0;
-}
-
-# endif /* VMS */
diff --git a/jam-files/engine/expand.c b/jam-files/engine/expand.c
deleted file mode 100644
index d8e58827..00000000
--- a/jam-files/engine/expand.c
+++ /dev/null
@@ -1,733 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "variable.h"
-# include "expand.h"
-# include "pathsys.h"
-# include "newstr.h"
-# include <assert.h>
-# include <stdlib.h>
-# include <limits.h>
-
-# ifdef OS_CYGWIN
-# include <sys/cygwin.h>
-# include <windows.h>
-# endif
-
-/*
- * expand.c - expand a buffer, given variable values
- *
- * External routines:
- *
- * var_expand() - variable-expand input string into list of strings
- *
- * Internal routines:
- *
- * var_edit_parse() - parse : modifiers into PATHNAME structure.
- * var_edit_file() - copy input target name to output, modifying filename.
- * var_edit_shift() - do upshift/downshift mods.
- *
- * 01/25/94 (seiwald) - $(X)$(UNDEF) was expanding like plain $(X)
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 01/11/01 (seiwald) - added support for :E=emptyvalue, :J=joinval
- */
-
-typedef struct
-{
- PATHNAME f; /* :GDBSMR -- pieces */
- char parent; /* :P -- go to parent directory */
- char filemods; /* one of the above applied */
- char downshift; /* :L -- downshift result */
- char upshift; /* :U -- upshift result */
- char to_slashes; /* :T -- convert "\" to "/" */
- char to_windows; /* :W -- convert cygwin to native paths */
- PATHPART empty; /* :E -- default for empties */
- PATHPART join; /* :J -- join list with char */
-} VAR_EDITS ;
-
-static void var_edit_parse( char * mods, VAR_EDITS * edits );
-static void var_edit_file ( char * in, string * out, VAR_EDITS * edits );
-static void var_edit_shift( string * out, VAR_EDITS * edits );
-
-#define MAGIC_COLON '\001'
-#define MAGIC_LEFT '\002'
-#define MAGIC_RIGHT '\003'
-
-
-/*
- * var_expand() - variable-expand input string into list of strings.
- *
- * Would just copy input to output, performing variable expansion, except that
- * since variables can contain multiple values the result of variable expansion
- * may contain multiple values (a list). Properly performs "product" operations
- * that occur in "$(var1)xxx$(var2)" or even "$($(var2))".
- *
- * Returns a newly created list.
- */
-
-LIST * var_expand( LIST * l, char * in, char * end, LOL * lol, int cancopyin )
-{
- char out_buf[ MAXSYM ];
- string buf[ 1 ];
- string out1[ 1 ]; /* temporary buffer */
- size_t prefix_length;
- char * out;
- char * inp = in;
- char * ov; /* for temp copy of variable in outbuf */
- int depth;
-
- if ( DEBUG_VAREXP )
- printf( "expand '%.*s'\n", end - in, in );
-
- /* This gets a lot of cases: $(<) and $(>). */
- if
- (
- ( in[ 0 ] == '$' ) &&
- ( in[ 1 ] == '(' ) &&
- ( in[ 3 ] == ')' ) &&
- ( in[ 4 ] == '\0' )
- )
- {
- switch ( in[ 2 ] )
- {
- case '<': return list_copy( l, lol_get( lol, 0 ) );
- case '>': return list_copy( l, lol_get( lol, 1 ) );
-
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- return list_copy( l, lol_get( lol, in[ 2 ] - '1' ) );
- }
- }
- else if ( in[0] == '$' && in[1] == '(' && in[2] == '1' && in[4] == ')' &&
- in[5] == '\0') {
-
- switch( in[3] )
- {
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- return list_copy( l, lol_get( lol, in[3]-'0'+10-1 ) );
- }
- }
-
- /* Expand @() files, to single item plus accompanying file. */
- if ( ( in[ 0 ] == '@' ) && ( in[ 1 ] == '(' ) && ( *( end - 1 ) == ')' ) )
- {
- /* We try the expansion until it fits within the propective output
- * buffer.
- */
- char * at_buf = 0;
- int at_size = MAXJPATH;
- int at_len = 0;
- do
- {
- BJAM_FREE( at_buf );
- at_buf = (char *)BJAM_MALLOC_ATOMIC( at_size + 1 );
- at_len = var_string( in, at_buf, at_size, lol );
- at_size *= 2;
- }
- while ( ( at_len < 0 ) && ( at_size < INT_MAX / 2 ) );
- /* Return the result as a single item list. */
- if ( at_len > 0 )
- {
- LIST * r;
- string_copy( buf, at_buf );
- r = list_new( l, newstr( buf->value ) );
- string_free( buf );
- BJAM_FREE( at_buf );
- return r;
- }
- BJAM_FREE( at_buf );
- }
-
- /* Just try simple copy of in to out. */
- while ( in < end )
- if ( ( *in++ == '$' ) && ( *in == '(' ) )
- goto expand;
-
- /* No variables expanded - just add copy of input string to list. */
-
- /* 'cancopyin' is an optimization: if the input was already a list item, we
- * can use copystr() to put it on the new list. Otherwise, we use the slower
- * newstr().
- */
- if ( cancopyin )
- return list_new( l, copystr( inp ) );
-
- {
- LIST * r;
- string_new( buf );
- string_append_range( buf, inp, end );
- r = list_new( l, newstr( buf->value ) );
- string_free( buf );
- return r;
- }
-
-expand:
- string_new( buf );
- string_append_range( buf, inp, in - 1 ); /* Copy the part before '$'. */
- /*
- * Input so far (ignore blanks):
- *
- * stuff-in-outbuf $(variable) remainder
- * ^ ^
- * in end
- * Output so far:
- *
- * stuff-in-outbuf $
- * ^ ^
- * out_buf out
- *
- *
- * We just copied the $ of $(...), so back up one on the output. We now find
- * the matching close paren, copying the variable and modifiers between the
- * $( and ) temporarily into out_buf, so that we can replace :'s with
- * MAGIC_COLON. This is necessary to avoid being confused by modifier values
- * that are variables containing :'s. Ugly.
- */
-
- depth = 1;
- inp = ++in; /* Skip over the '('. */
-
- while ( ( in < end ) && depth )
- {
- switch ( *in++ )
- {
- case '(': ++depth; break;
- case ')': --depth; break;
- }
- }
-
- /*
- * Input so far (ignore blanks):
- *
- * stuff-in-outbuf $(variable) remainder
- * ^ ^ ^
- * inp in end
- */
- prefix_length = buf->size;
- string_append_range( buf, inp, in - 1 );
-
- out = buf->value + prefix_length;
- for ( ov = out; ov < buf->value + buf->size; ++ov )
- {
- switch ( *ov )
- {
- case ':': *ov = MAGIC_COLON; break;
- case '[': *ov = MAGIC_LEFT ; break;
- case ']': *ov = MAGIC_RIGHT; break;
- }
- }
-
- /*
- * Input so far (ignore blanks):
- *
- * stuff-in-outbuf $(variable) remainder
- * ^ ^
- * in end
- * Output so far:
- *
- * stuff-in-outbuf variable
- * ^ ^ ^
- * out_buf out ov
- *
- * Later we will overwrite 'variable' in out_buf, but we will be done with
- * it by then. 'variable' may be a multi-element list, so may each value for
- * '$(variable element)', and so may 'remainder'. Thus we produce a product
- * of three lists.
- */
- {
- LIST * variables = 0;
- LIST * remainder = 0;
- LIST * vars;
-
- /* Recursively expand variable name & rest of input. */
- if ( out < ov ) variables = var_expand( L0, out, ov, lol, 0 );
- if ( in < end ) remainder = var_expand( L0, in, end, lol, 0 );
-
- /* Now produce the result chain. */
-
- /* For each variable name. */
- for ( vars = variables; vars; vars = list_next( vars ) )
- {
- LIST * value = 0;
- LIST * evalue = 0;
- char * colon;
- char * bracket;
- string variable[1];
- char * varname;
- int sub1 = 0;
- int sub2 = -1;
- VAR_EDITS edits;
-
- /* Look for a : modifier in the variable name. Must copy into
- * varname so we can modify it.
- */
- string_copy( variable, vars->string );
- varname = variable->value;
-
- if ( ( colon = strchr( varname, MAGIC_COLON ) ) )
- {
- string_truncate( variable, colon - varname );
- var_edit_parse( colon + 1, &edits );
- }
-
- /* Look for [x-y] subscripting. sub1 and sub2 are x and y. */
- if ( ( bracket = strchr( varname, MAGIC_LEFT ) ) )
- {
- /* Make all syntax errors in [] subscripting result in the same
- * behavior: silenty return an empty expansion (by setting sub2
- * = 0). Brute force parsing; May get moved into yacc someday.
- */
-
- char * s = bracket + 1;
-
- string_truncate( variable, bracket - varname );
-
- do /* so we can use "break" */
- {
- /* Allow negative indexes. */
- if ( !isdigit( *s ) && ( *s != '-' ) )
- {
- sub2 = 0;
- break;
- }
- sub1 = atoi( s );
-
- /* Skip over the first symbol, which is either a digit or dash. */
- ++s;
- while ( isdigit( *s ) ) ++s;
-
- if ( *s == MAGIC_RIGHT )
- {
- sub2 = sub1;
- break;
- }
-
- if ( *s != '-' )
- {
- sub2 = 0;
- break;
- }
-
- ++s;
-
- if ( *s == MAGIC_RIGHT )
- {
- sub2 = -1;
- break;
- }
-
- if ( !isdigit( *s ) && ( *s != '-' ) )
- {
- sub2 = 0;
- break;
- }
-
- /* First, compute the index of the last element. */
- sub2 = atoi( s );
- while ( isdigit( *++s ) );
-
- if ( *s != MAGIC_RIGHT )
- sub2 = 0;
-
- } while ( 0 );
-
- /* Anything but the end of the string, or the colon introducing
- * a modifier is a syntax error.
- */
- ++s;
- if ( *s && ( *s != MAGIC_COLON ) )
- sub2 = 0;
-
- *bracket = '\0';
- }
-
- /* Get variable value, with special handling for $(<), $(>), $(n).
- */
- if ( !varname[1] )
- {
- if ( varname[0] == '<' )
- value = lol_get( lol, 0 );
- else if ( varname[0] == '>' )
- value = lol_get( lol, 1 );
- else if ( ( varname[0] >= '1' ) && ( varname[0] <= '9' ) )
- value = lol_get( lol, varname[0] - '1' );
- else if( varname[0] == '1' && varname[1] >= '0' &&
- varname[1] <= '9' && !varname[2] )
- value = lol_get( lol, varname[1] - '0' + 10 - 1 );
- }
-
- if ( !value )
- value = var_get( varname );
-
- /* Handle negitive indexes: part two. */
- {
- int length = list_length( value );
-
- if ( sub1 < 0 )
- sub1 = length + sub1;
- else
- sub1 -= 1;
-
- if ( sub2 < 0 )
- sub2 = length + 1 + sub2 - sub1;
- else
- sub2 -= sub1;
- /* The "sub2 < 0" test handles the semantic error of sub2 <
- * sub1.
- */
- if ( sub2 < 0 )
- sub2 = 0;
- }
-
- /* The fast path: $(x) - just copy the variable value. This is only
- * an optimization.
- */
- if ( ( out == out_buf ) && !bracket && !colon && ( in == end ) )
- {
- string_free( variable );
- l = list_copy( l, value );
- continue;
- }
-
- /* Handle start subscript. */
- while ( ( sub1 > 0 ) && value )
- --sub1, value = list_next( value );
-
- /* Empty w/ :E=default?. */
- if ( !value && colon && edits.empty.ptr )
- evalue = value = list_new( L0, newstr( edits.empty.ptr ) );
-
- /* For each variable value. */
- string_new( out1 );
- for ( ; value; value = list_next( value ) )
- {
- LIST * rem;
- size_t postfix_start;
-
- /* Handle end subscript (length actually). */
-
- if ( sub2 >= 0 && --sub2 < 0 )
- break;
-
- string_truncate( buf, prefix_length );
-
- /* Apply : mods, if present */
-
- if ( colon && edits.filemods )
- var_edit_file( value->string, out1, &edits );
- else
- string_append( out1, value->string );
-
- if ( colon && ( edits.upshift || edits.downshift || edits.to_slashes || edits.to_windows ) )
- var_edit_shift( out1, &edits );
-
- /* Handle :J=joinval */
- /* If we have more values for this var, just keep appending them
- * (using the join value) rather than creating separate LIST
- * elements.
- */
- if ( colon && edits.join.ptr &&
- ( list_next( value ) || list_next( vars ) ) )
- {
- string_append( out1, edits.join.ptr );
- continue;
- }
-
- string_append( buf, out1->value );
- string_free( out1 );
- string_new( out1 );
-
- /* If no remainder, append result to output chain. */
- if ( in == end )
- {
- l = list_new( l, newstr( buf->value ) );
- continue;
- }
-
- /* For each remainder, append the complete string to the output
- * chain. Remember the end of the variable expansion so we can
- * just tack on each instance of 'remainder'.
- */
- postfix_start = buf->size;
- for ( rem = remainder; rem; rem = list_next( rem ) )
- {
- string_truncate( buf, postfix_start );
- string_append( buf, rem->string );
- l = list_new( l, newstr( buf->value ) );
- }
- }
- string_free( out1 );
-
- /* Toss used empty. */
- if ( evalue )
- list_free( evalue );
-
- string_free( variable );
- }
-
- /* variables & remainder were gifts from var_expand and must be freed. */
- if ( variables ) list_free( variables );
- if ( remainder ) list_free( remainder );
-
- if ( DEBUG_VAREXP )
- {
- printf( "expanded to " );
- list_print( l );
- printf( "\n" );
- }
-
- string_free( buf );
- return l;
- }
-}
-
-
-/*
- * var_edit_parse() - parse : modifiers into PATHNAME structure
- *
- * The : modifiers in a $(varname:modifier) currently support replacing or
- * omitting elements of a filename, and so they are parsed into a PATHNAME
- * structure (which contains pointers into the original string).
- *
- * Modifiers of the form "X=value" replace the component X with the given value.
- * Modifiers without the "=value" cause everything but the component X to be
- * omitted. X is one of:
- *
- * G <grist>
- * D directory name
- * B base name
- * S .suffix
- * M (member)
- * R root directory - prepended to whole path
- *
- * This routine sets:
- *
- * f->f_xxx.ptr = 0
- * f->f_xxx.len = 0
- * -> leave the original component xxx
- *
- * f->f_xxx.ptr = string
- * f->f_xxx.len = strlen( string )
- * -> replace component xxx with string
- *
- * f->f_xxx.ptr = ""
- * f->f_xxx.len = 0
- * -> omit component xxx
- *
- * var_edit_file() below and path_build() obligingly follow this convention.
- */
-
-static void var_edit_parse( char * mods, VAR_EDITS * edits )
-{
- int havezeroed = 0;
- memset( (char *)edits, 0, sizeof( *edits ) );
-
- while ( *mods )
- {
- char * p;
- PATHPART * fp;
-
- switch ( *mods++ )
- {
- case 'L': edits->downshift = 1; continue;
- case 'U': edits->upshift = 1; continue;
- case 'P': edits->parent = edits->filemods = 1; continue;
- case 'E': fp = &edits->empty; goto strval;
- case 'J': fp = &edits->join; goto strval;
- case 'G': fp = &edits->f.f_grist; goto fileval;
- case 'R': fp = &edits->f.f_root; goto fileval;
- case 'D': fp = &edits->f.f_dir; goto fileval;
- case 'B': fp = &edits->f.f_base; goto fileval;
- case 'S': fp = &edits->f.f_suffix; goto fileval;
- case 'M': fp = &edits->f.f_member; goto fileval;
- case 'T': edits->to_slashes = 1; continue;
- case 'W': edits->to_windows = 1; continue;
- default:
- return; /* Should complain, but so what... */
- }
-
- fileval:
- /* Handle :CHARS, where each char (without a following =) selects a
- * particular file path element. On the first such char, we deselect all
- * others (by setting ptr = "", len = 0) and for each char we select
- * that element (by setting ptr = 0).
- */
- edits->filemods = 1;
-
- if ( *mods != '=' )
- {
- if ( !havezeroed++ )
- {
- int i;
- for ( i = 0; i < 6; ++i )
- {
- edits->f.part[ i ].len = 0;
- edits->f.part[ i ].ptr = "";
- }
- }
-
- fp->ptr = 0;
- continue;
- }
-
- strval:
- /* Handle :X=value, or :X */
- if ( *mods != '=' )
- {
- fp->ptr = "";
- fp->len = 0;
- }
- else if ( ( p = strchr( mods, MAGIC_COLON ) ) )
- {
- *p = 0;
- fp->ptr = ++mods;
- fp->len = p - mods;
- mods = p + 1;
- }
- else
- {
- fp->ptr = ++mods;
- fp->len = strlen( mods );
- mods += fp->len;
- }
- }
-}
-
-
-/*
- * var_edit_file() - copy input target name to output, modifying filename.
- */
-
-static void var_edit_file( char * in, string * out, VAR_EDITS * edits )
-{
- PATHNAME pathname;
-
- /* Parse apart original filename, putting parts into "pathname". */
- path_parse( in, &pathname );
-
- /* Replace any pathname with edits->f */
- if ( edits->f.f_grist .ptr ) pathname.f_grist = edits->f.f_grist;
- if ( edits->f.f_root .ptr ) pathname.f_root = edits->f.f_root;
- if ( edits->f.f_dir .ptr ) pathname.f_dir = edits->f.f_dir;
- if ( edits->f.f_base .ptr ) pathname.f_base = edits->f.f_base;
- if ( edits->f.f_suffix.ptr ) pathname.f_suffix = edits->f.f_suffix;
- if ( edits->f.f_member.ptr ) pathname.f_member = edits->f.f_member;
-
- /* If requested, modify pathname to point to parent. */
- if ( edits->parent )
- path_parent( &pathname );
-
- /* Put filename back together. */
- path_build( &pathname, out, 0 );
-}
-
-
-/*
- * var_edit_shift() - do upshift/downshift mods.
- */
-
-static void var_edit_shift( string * out, VAR_EDITS * edits )
-{
- /* Handle upshifting, downshifting and slash translation now. */
- char * p;
- for ( p = out->value; *p; ++p)
- {
- if ( edits->upshift )
- *p = toupper( *p );
- else if ( edits->downshift )
- *p = tolower( *p );
- if ( edits->to_slashes && ( *p == '\\' ) )
- *p = '/';
-# ifdef OS_CYGWIN
- if ( edits->to_windows )
- {
- char result[ MAX_PATH + 1 ];
- cygwin_conv_to_win32_path( out->value, result );
- assert( strlen( result ) <= MAX_PATH );
- string_free( out );
- string_copy( out, result );
- }
-# endif
- }
- out->size = p - out->value;
-}
-
-
-#ifndef NDEBUG
-void var_expand_unit_test()
-{
- LOL lol[ 1 ];
- LIST * l;
- LIST * l2;
- LIST * expected = list_new( list_new( L0, newstr( "axb" ) ), newstr( "ayb" ) );
- LIST * e2;
- char axyb[] = "a$(xy)b";
- char azb[] = "a$($(z))b";
- char path[] = "$(p:W)";
-
-# ifdef OS_CYGWIN
- char cygpath[ 256 ];
- cygwin_conv_to_posix_path( "c:\\foo\\bar", cygpath );
-# else
- char cygpath[] = "/cygdrive/c/foo/bar";
-# endif
-
- lol_init(lol);
- var_set( "xy", list_new( list_new( L0, newstr( "x" ) ), newstr( "y" ) ), VAR_SET );
- var_set( "z", list_new( L0, newstr( "xy" ) ), VAR_SET );
- var_set( "p", list_new( L0, newstr( cygpath ) ), VAR_SET );
-
- l = var_expand( 0, axyb, axyb + sizeof( axyb ) - 1, lol, 0 );
- for ( l2 = l, e2 = expected; l2 && e2; l2 = list_next( l2 ), e2 = list_next( e2 ) )
- assert( !strcmp( e2->string, l2->string ) );
- assert( l2 == 0 );
- assert( e2 == 0 );
- list_free( l );
-
- l = var_expand( 0, azb, azb + sizeof( azb ) - 1, lol, 0 );
- for ( l2 = l, e2 = expected; l2 && e2; l2 = list_next( l2 ), e2 = list_next( e2 ) )
- assert( !strcmp( e2->string, l2->string ) );
- assert( l2 == 0 );
- assert( e2 == 0 );
- list_free( l );
-
- l = var_expand( 0, path, path + sizeof( path ) - 1, lol, 0 );
- assert( l != 0 );
- assert( list_next( l ) == 0 );
-# ifdef OS_CYGWIN
- /* On some installations of cygwin the drive letter is expanded to other
- * case. This has been reported to be the case if cygwin has been installed
- * to C:\ as opposed to C:\cygwin. Since case of the drive letter will not
- * matter, we allow for both.
- */
- assert( !strcmp( l->string, "c:\\foo\\bar" ) ||
- !strcmp( l->string, "C:\\foo\\bar" ) );
-# else
- assert( !strcmp( l->string, cygpath ) );
-# endif
- list_free( l );
- list_free( expected );
- lol_free( lol );
-}
-#endif
diff --git a/jam-files/engine/expand.h b/jam-files/engine/expand.h
deleted file mode 100644
index cc25d190..00000000
--- a/jam-files/engine/expand.h
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * expand.h - expand a buffer, given variable values
- */
-
-#include "lists.h"
-
-LIST *var_expand( LIST *l, char *in, char *end, LOL *lol, int cancopyin );
-void var_expand_unit_test();
diff --git a/jam-files/engine/filemac.c b/jam-files/engine/filemac.c
deleted file mode 100644
index e69aa648..00000000
--- a/jam-files/engine/filemac.c
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "filesys.h"
-# include "pathsys.h"
-
-# ifdef OS_MAC
-
-#include <Files.h>
-#include <Folders.h>
-
-# include <:sys:stat.h>
-
-/*
- * filemac.c - manipulate file names and scan directories on macintosh
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 12/19/94 (mikem) - solaris string table insanity support
- * 02/14/95 (seiwald) - parse and build /xxx properly
- * 05/03/96 (seiwald) - split into pathunix.c
- * 11/21/96 (peterk) - BEOS does not have Unix-style archives
- */
-
-
-void CopyC2PStr( char const * cstr, StringPtr pstr )
-{
- int len;
- for ( len = 0; *cstr && ( len < 255 ); pstr[ ++len ] = *cstr++ );
- pstr[ 0 ] = len;
-}
-
-
-/*
- * file_dirscan() - scan a directory for files.
- */
-
-void file_dirscan( char * dir, scanback func, void * closure )
-{
- PATHNAME f;
- string filename[ 1 ];
- unsigned char fullPath[ 512 ];
-
- FSSpec spec;
- WDPBRec vol;
- Str63 volName;
- CInfoPBRec lastInfo;
- int index = 1;
-
- /* First enter directory itself. */
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_dir.ptr = dir;
- f.f_dir.len = strlen(dir);
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", dir );
-
- /* Special case ":" - enter it */
-
- if ( ( f.f_dir.len == 1 ) && ( f.f_dir.ptr[0] == ':' ) )
- (*func)( closure, dir, 0 /* not stat()'ed */, (time_t)0 );
-
- /* Now enter contents of directory */
-
- vol.ioNamePtr = volName;
-
- if ( PBHGetVolSync( &vol ) )
- return;
-
- CopyC2PStr( dir, fullPath );
-
- if ( FSMakeFSSpec( vol.ioWDVRefNum, vol.ioWDDirID, fullPath, &spec ) )
- return;
-
- lastInfo.dirInfo.ioVRefNum = spec.vRefNum;
- lastInfo.dirInfo.ioDrDirID = spec.parID;
- lastInfo.dirInfo.ioNamePtr = spec.name;
- lastInfo.dirInfo.ioFDirIndex = 0;
- lastInfo.dirInfo.ioACUser = 0;
-
- if ( PBGetCatInfoSync( &lastInfo ) )
- return;
-
- if ( !( lastInfo.dirInfo.ioFlAttrib & 0x10 ) )
- return;
-
- /* ioDrDirID must be reset each time. */
- spec.parID = lastInfo.dirInfo.ioDrDirID;
-
- string_new( filename );
- for ( ; ; )
- {
- lastInfo.dirInfo.ioVRefNum = spec.vRefNum;
- lastInfo.dirInfo.ioDrDirID = spec.parID;
- lastInfo.dirInfo.ioNamePtr = fullPath;
- lastInfo.dirInfo.ioFDirIndex = index++;
-
- if ( PBGetCatInfoSync( &lastInfo ) )
- return;
-
- f.f_base.ptr = (char *)fullPath + 1;
- f.f_base.len = *fullPath;
-
- string_truncate( filename, 0 );
- path_build( &f, filename, 0 );
- (*func)( closure, filename->value, 0 /* not stat()'ed */, (time_t)0 );
- }
- string_free( filename );
-}
-
-
-/*
- * file_time() - get timestamp of file, if not done by file_dirscan().
- */
-
-int file_time( char * filename, time_t * time )
-{
- struct stat statbuf;
-
- if ( stat( filename, &statbuf ) < 0 )
- return -1;
-
- *time = statbuf.st_mtime;
-
- return 0;
-}
-
-
-int file_is_file( char * filename )
-{
- struct stat statbuf;
- if ( stat( filename, &statbuf ) < 0 )
- return -1;
- return S_ISREG( statbuf.st_mode ) ? 1 : 0;
-}
-
-int file_mkdir(char *pathname)
-{
- return mkdir(pathname, 0766);
-}
-
-
-/*
- * file_archscan() - scan an archive for files.
- */
-
-void file_archscan( char * archive, scanback func, void * closure )
-{
-}
-
-
-# endif /* macintosh */
diff --git a/jam-files/engine/filent.c b/jam-files/engine/filent.c
deleted file mode 100644
index ab189576..00000000
--- a/jam-files/engine/filent.c
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-
-# include "filesys.h"
-# include "pathsys.h"
-# include "strings.h"
-# include "newstr.h"
-
-# ifdef OS_NT
-
-# ifdef __BORLANDC__
-# if __BORLANDC__ < 0x550
-# include <dir.h>
-# include <dos.h>
-# endif
-# undef FILENAME /* cpp namespace collision */
-# define _finddata_t ffblk
-# endif
-
-# include <io.h>
-# include <sys/stat.h>
-# include <ctype.h>
-# include <direct.h>
-
-/*
- * filent.c - scan directories and archives on NT
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 07/10/95 (taylor) Findfirst() returns the first file on NT.
- * 05/03/96 (seiwald) split apart into pathnt.c
- */
-
-/*
- * file_dirscan() - scan a directory for files
- */
-
-void file_dirscan( char * dir, scanback func, void * closure )
-{
- PROFILE_ENTER( FILE_DIRSCAN );
-
- file_info_t * d = 0;
-
- dir = short_path_to_long_path( dir );
-
- /* First enter directory itself */
-
- d = file_query( dir );
-
- if ( !d || !d->is_dir )
- {
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- if ( !d->files )
- {
- PATHNAME f;
- string filespec[ 1 ];
- string filename[ 1 ];
- long handle;
- int ret;
- struct _finddata_t finfo[ 1 ];
- LIST * files = L0;
- int d_length = strlen( d->name );
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_dir.ptr = d->name;
- f.f_dir.len = d_length;
-
- /* Now enter contents of directory */
-
- /* Prepare file search specification for the findfirst() API. */
- if ( d_length == 0 )
- string_copy( filespec, ".\\*" );
- else
- {
- /*
- * We can not simply assume the given folder name will never include
- * its trailing path separator or otherwise we would not support the
- * Windows root folder specified without its drive letter, i.e. '\'.
- */
- char trailingChar = d->name[ d_length - 1 ] ;
- string_copy( filespec, d->name );
- if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) )
- string_append( filespec, "\\" );
- string_append( filespec, "*" );
- }
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", dir );
-
- #if defined(__BORLANDC__) && __BORLANDC__ < 0x550
- if ( ret = findfirst( filespec->value, finfo, FA_NORMAL | FA_DIREC ) )
- {
- string_free( filespec );
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- string_new ( filename );
- while ( !ret )
- {
- file_info_t * ff = 0;
-
- f.f_base.ptr = finfo->ff_name;
- f.f_base.len = strlen( finfo->ff_name );
-
- string_truncate( filename, 0 );
- path_build( &f, filename );
-
- files = list_new( files, newstr(filename->value) );
- ff = file_info( filename->value );
- ff->is_file = finfo->ff_attrib & FA_DIREC ? 0 : 1;
- ff->is_dir = finfo->ff_attrib & FA_DIREC ? 1 : 0;
- ff->size = finfo->ff_fsize;
- ff->time = (finfo->ff_ftime << 16) | finfo->ff_ftime;
-
- ret = findnext( finfo );
- }
- # else
- handle = _findfirst( filespec->value, finfo );
-
- if ( ret = ( handle < 0L ) )
- {
- string_free( filespec );
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- string_new( filename );
- while ( !ret )
- {
- file_info_t * ff = 0;
-
- f.f_base.ptr = finfo->name;
- f.f_base.len = strlen( finfo->name );
-
- string_truncate( filename, 0 );
- path_build( &f, filename, 0 );
-
- files = list_new( files, newstr( filename->value ) );
- ff = file_info( filename->value );
- ff->is_file = finfo->attrib & _A_SUBDIR ? 0 : 1;
- ff->is_dir = finfo->attrib & _A_SUBDIR ? 1 : 0;
- ff->size = finfo->size;
- ff->time = finfo->time_write;
-
- ret = _findnext( handle, finfo );
- }
-
- _findclose( handle );
- # endif
- string_free( filename );
- string_free( filespec );
-
- d->files = files;
- }
-
- /* Special case \ or d:\ : enter it */
- {
- unsigned long len = strlen(d->name);
- if ( len == 1 && d->name[0] == '\\' )
- (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
- else if ( len == 3 && d->name[1] == ':' ) {
- (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
- /* We've just entered 3-letter drive name spelling (with trailing
- slash), into the hash table. Now enter two-letter variant,
- without trailing slash, so that if we try to check whether
- "c:" exists, we hit it.
-
- Jam core has workarounds for that. Given:
- x = c:\whatever\foo ;
- p = $(x:D) ;
- p2 = $(p:D) ;
- There will be no trailing slash in $(p), but there will be one
- in $(p2). But, that seems rather fragile.
- */
- d->name[2] = 0;
- (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
- }
- }
-
- /* Now enter contents of directory */
- if ( d->files )
- {
- LIST * files = d->files;
- while ( files )
- {
- file_info_t * ff = file_info( files->string );
- (*func)( closure, ff->name, 1 /* stat()'ed */, ff->time );
- files = list_next( files );
- }
- }
-
- PROFILE_EXIT( FILE_DIRSCAN );
-}
-
-file_info_t * file_query( char * filename )
-{
- file_info_t * ff = file_info( filename );
- if ( ! ff->time )
- {
- struct stat statbuf;
-
- if ( stat( *filename ? filename : ".", &statbuf ) < 0 )
- return 0;
-
- ff->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
- ff->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
- ff->size = statbuf.st_size;
- ff->time = statbuf.st_mtime ? statbuf.st_mtime : 1;
- }
- return ff;
-}
-
-/*
- * file_time() - get timestamp of file, if not done by file_dirscan()
- */
-
-int
-file_time(
- char *filename,
- time_t *time )
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- *time = ff->time;
- return 0;
-}
-
-int file_is_file(char* filename)
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- return ff->is_file;
-}
-
-int file_mkdir(char *pathname)
-{
- return _mkdir(pathname);
-}
-
-/*
- * file_archscan() - scan an archive for files
- */
-
-/* Straight from SunOS */
-
-#define ARMAG "!<arch>\n"
-#define SARMAG 8
-
-#define ARFMAG "`\n"
-
-struct ar_hdr {
- char ar_name[16];
- char ar_date[12];
- char ar_uid[6];
- char ar_gid[6];
- char ar_mode[8];
- char ar_size[10];
- char ar_fmag[2];
-};
-
-# define SARFMAG 2
-# define SARHDR sizeof( struct ar_hdr )
-
-void
-file_archscan(
- char *archive,
- scanback func,
- void *closure )
-{
- struct ar_hdr ar_hdr;
- char *string_table = 0;
- char buf[ MAXJPATH ];
- long offset;
- int fd;
-
- if ( ( fd = open( archive, O_RDONLY | O_BINARY, 0 ) ) < 0 )
- return;
-
- if ( read( fd, buf, SARMAG ) != SARMAG ||
- strncmp( ARMAG, buf, SARMAG ) )
- {
- close( fd );
- return;
- }
-
- offset = SARMAG;
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
- !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) )
- {
- long lar_date;
- long lar_size;
- char *name = 0;
- char *endname;
- char *c;
-
- sscanf( ar_hdr.ar_date, "%ld", &lar_date );
- sscanf( ar_hdr.ar_size, "%ld", &lar_size );
-
- lar_size = ( lar_size + 1 ) & ~1;
-
- if (ar_hdr.ar_name[0] == '/' && ar_hdr.ar_name[1] == '/' )
- {
- /* this is the "string table" entry of the symbol table,
- ** which holds strings of filenames that are longer than
- ** 15 characters (ie. don't fit into a ar_name
- */
-
- string_table = BJAM_MALLOC_ATOMIC(lar_size+1);
- if (read(fd, string_table, lar_size) != lar_size)
- printf("error reading string table\n");
- string_table[lar_size] = '\0';
- offset += SARHDR + lar_size;
- continue;
- }
- else if (ar_hdr.ar_name[0] == '/' && ar_hdr.ar_name[1] != ' ')
- {
- /* Long filenames are recognized by "/nnnn" where nnnn is
- ** the offset of the string in the string table represented
- ** in ASCII decimals.
- */
-
- name = string_table + atoi( ar_hdr.ar_name + 1 );
- for ( endname = name; *endname && *endname != '\n'; ++endname) {}
- }
- else
- {
- /* normal name */
- name = ar_hdr.ar_name;
- endname = name + sizeof( ar_hdr.ar_name );
- }
-
- /* strip trailing white-space, slashes, and backslashes */
-
- while ( endname-- > name )
- if ( !isspace(*endname) && ( *endname != '\\' ) && ( *endname != '/' ) )
- break;
- *++endname = 0;
-
- /* strip leading directory names, an NT specialty */
-
- if ( c = strrchr( name, '/' ) )
- name = c + 1;
- if ( c = strrchr( name, '\\' ) )
- name = c + 1;
-
- sprintf( buf, "%s(%.*s)", archive, endname - name, name );
- (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
-
- offset += SARHDR + lar_size;
- lseek( fd, offset, 0 );
- }
-
- close( fd );
-}
-
-# endif /* NT */
diff --git a/jam-files/engine/fileos2.c b/jam-files/engine/fileos2.c
deleted file mode 100644
index af2373ea..00000000
--- a/jam-files/engine/fileos2.c
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "filesys.h"
-# include "pathsys.h"
-
-/* note that we use "fileunix.c" when compiling with EMX on OS/2 */
-# if defined(OS_OS2) && !defined(__EMX__)
-
-# include <io.h>
-# include <dos.h>
-
-/*
- * fileos2.c - scan directories and archives on NT
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 07/10/95 (taylor) Findfirst() returns the first file on NT.
- * 05/03/96 (seiwald) split apart into pathnt.c
- * 09/22/00 (seiwald) handle \ and c:\ specially: don't add extra /
- */
-
-/*
- * file_dirscan() - scan a directory for files
- */
-
-void
-file_dirscan(
- char *dir,
- scanback func,
- void *closure )
-{
- PATHNAME f;
- string filespec[1];
- long handle;
- int ret;
- struct _find_t finfo[1];
-
- /* First enter directory itself */
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_dir.ptr = dir;
- f.f_dir.len = strlen(dir);
-
- dir = *dir ? dir : ".";
-
- /* Special case \ or d:\ : enter it */
- string_copy( filespec, dir );
-
- if ( f.f_dir.len == 1 && f.f_dir.ptr[0] == '\\' )
- (*func)( closure, dir, 0 /* not stat()'ed */, (time_t)0 );
- else if ( f.f_dir.len == 3 && f.f_dir.ptr[1] == ':' )
- (*func)( closure, dir, 0 /* not stat()'ed */, (time_t)0 );
- else
- string_push_back( filespec, '/' );
-
- string_push_back( filespec, '*' );
-
- /* Now enter contents of directory */
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", filespec->value );
-
- /* Time info in dos find_t is not very useful. It consists */
- /* of a separate date and time, and putting them together is */
- /* not easy. So we leave that to a later stat() call. */
-
- if ( !_dos_findfirst( filespec->value, _A_NORMAL|_A_RDONLY|_A_SUBDIR, finfo ) )
- {
- string filename[1];
- string_new( filename );
- do
- {
- f.f_base.ptr = finfo->name;
- f.f_base.len = strlen( finfo->name );
-
- string_truncate( filename, 0 );
- path_build( &f, filename, 0 );
- (*func)( closure, filename->value, 0 /* not stat()'ed */, (time_t)0 );
- }
- while ( !_dos_findnext( finfo ) );
- string_free( filename );
- }
-}
-
-/*
- * file_time() - get timestamp of file, if not done by file_dirscan()
- */
-
-int
-file_time(
- char *filename,
- time_t *time )
-{
- /* This is called on OS2, not NT. */
- /* NT fills in the time in the dirscan. */
-
- struct stat statbuf;
-
- if ( stat( filename, &statbuf ) < 0 )
- return -1;
-
- *time = statbuf.st_mtime;
-
- return 0;
-}
-
-void
-file_archscan(
- char *archive,
- scanback func,
- void *closure )
-{
-}
-
-# endif /* OS2 && !__EMX__ */
-
diff --git a/jam-files/engine/filesys.c b/jam-files/engine/filesys.c
deleted file mode 100644
index eb62ed40..00000000
--- a/jam-files/engine/filesys.c
+++ /dev/null
@@ -1,83 +0,0 @@
-# include "jam.h"
-# include "pathsys.h"
-# include "strings.h"
-# include "newstr.h"
-# include "filesys.h"
-# include "lists.h"
-
-void file_build1( PATHNAME * f, string * file )
-{
- if ( DEBUG_SEARCH )
- {
- printf("build file: ");
- if ( f->f_root.len )
- printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr );
- if ( f->f_dir.len )
- printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr );
- if ( f->f_base.len )
- printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr );
- printf( "\n" );
- }
-
- /* Start with the grist. If the current grist isn't */
- /* surrounded by <>'s, add them. */
-
- if ( f->f_grist.len )
- {
- if ( f->f_grist.ptr[0] != '<' )
- string_push_back( file, '<' );
- string_append_range(
- file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len );
- if ( file->value[file->size - 1] != '>' )
- string_push_back( file, '>' );
- }
-}
-
-static struct hash * filecache_hash = 0;
-static file_info_t filecache_finfo;
-
-file_info_t * file_info(char * filename)
-{
- file_info_t *finfo = &filecache_finfo;
-
- if ( !filecache_hash )
- filecache_hash = hashinit( sizeof( file_info_t ), "file_info" );
-
- finfo->name = filename;
- finfo->is_file = 0;
- finfo->is_dir = 0;
- finfo->size = 0;
- finfo->time = 0;
- finfo->files = 0;
- if ( hashenter( filecache_hash, (HASHDATA**)&finfo ) )
- {
- /* printf( "file_info: %s\n", filename ); */
- finfo->name = newstr( finfo->name );
- }
-
- return finfo;
-}
-
-static LIST * files_to_remove = L0;
-
-static void remove_files_atexit(void)
-{
- /* we do pop front in case this exit function is called
- more than once */
- while ( files_to_remove )
- {
- remove( files_to_remove->string );
- files_to_remove = list_pop_front( files_to_remove );
- }
-}
-
-void file_done()
-{
- remove_files_atexit();
- hashdone( filecache_hash );
-}
-
-void file_remove_atexit( const char * path )
-{
- files_to_remove = list_new( files_to_remove, newstr((char*)path) );
-}
diff --git a/jam-files/engine/filesys.h b/jam-files/engine/filesys.h
deleted file mode 100644
index efc081d1..00000000
--- a/jam-files/engine/filesys.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * filesys.h - OS specific file routines
- */
-
-#ifndef FILESYS_DWA20011025_H
-# define FILESYS_DWA20011025_H
-
-# include "pathsys.h"
-#include "hash.h"
-#include "lists.h"
-
-typedef void (*scanback)( void *closure, char *file, int found, time_t t );
-
-void file_dirscan( char *dir, scanback func, void *closure );
-void file_archscan( char *arch, scanback func, void *closure );
-
-int file_time( char *filename, time_t *time );
-
-void file_build1(PATHNAME *f, string* file) ;
-int file_is_file(char* filename);
-int file_mkdir(char *pathname);
-
-typedef struct file_info_t file_info_t ;
-struct file_info_t
-{
- char * name;
- short is_file;
- short is_dir;
- unsigned long size;
- time_t time;
- LIST * files;
-};
-
-
-/* Creates a pointer to information about file 'filename', creating it as
- * necessary. If created, the structure will be default initialized.
- */
-file_info_t * file_info( char * filename );
-
-/* Returns information about a file, queries the OS if needed. */
-file_info_t * file_query( char * filename );
-
-void file_done();
-
-/* Marks a path/file to be removed when jam exits. */
-void file_remove_atexit( const char * path );
-
-#endif
diff --git a/jam-files/engine/fileunix.c b/jam-files/engine/fileunix.c
deleted file mode 100644
index 680c3f53..00000000
--- a/jam-files/engine/fileunix.c
+++ /dev/null
@@ -1,501 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "filesys.h"
-# include "strings.h"
-# include "pathsys.h"
-# include "newstr.h"
-# include <stdio.h>
-# include <sys/stat.h>
-
-#if defined(sun) || defined(__sun) || defined(linux)
-# include <unistd.h> /* needed for read and close prototype */
-#endif
-
-# ifdef USE_FILEUNIX
-
-#if defined(sun) || defined(__sun)
-# include <unistd.h> /* needed for read and close prototype */
-#endif
-
-# if defined( OS_SEQUENT ) || \
- defined( OS_DGUX ) || \
- defined( OS_SCO ) || \
- defined( OS_ISC )
-# define PORTAR 1
-# endif
-
-# ifdef __EMX__
-# include <sys/types.h>
-# include <sys/stat.h>
-# endif
-
-# if defined( OS_RHAPSODY ) || \
- defined( OS_MACOSX ) || \
- defined( OS_NEXT )
-/* need unistd for rhapsody's proper lseek */
-# include <sys/dir.h>
-# include <unistd.h>
-# define STRUCT_DIRENT struct direct
-# else
-# include <dirent.h>
-# define STRUCT_DIRENT struct dirent
-# endif
-
-# ifdef OS_COHERENT
-# include <arcoff.h>
-# define HAVE_AR
-# endif
-
-# if defined( OS_MVS ) || \
- defined( OS_INTERIX )
-
-#define ARMAG "!<arch>\n"
-#define SARMAG 8
-#define ARFMAG "`\n"
-
-struct ar_hdr /* archive file member header - printable ascii */
-{
- char ar_name[16]; /* file member name - `/' terminated */
- char ar_date[12]; /* file member date - decimal */
- char ar_uid[6]; /* file member user id - decimal */
- char ar_gid[6]; /* file member group id - decimal */
- char ar_mode[8]; /* file member mode - octal */
- char ar_size[10]; /* file member size - decimal */
- char ar_fmag[2]; /* ARFMAG - string to end header */
-};
-
-# define HAVE_AR
-# endif
-
-# if defined( OS_QNX ) || \
- defined( OS_BEOS ) || \
- defined( OS_MPEIX )
-# define NO_AR
-# define HAVE_AR
-# endif
-
-# ifndef HAVE_AR
-
-# ifdef OS_AIX
-/* Define those for AIX to get the definitions for both the small and the
- * big variant of the archive file format. */
-# define __AR_SMALL__
-# define __AR_BIG__
-# endif
-
-# include <ar.h>
-# endif
-
-/*
- * fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 12/19/94 (mikem) - solaris string table insanity support
- * 02/14/95 (seiwald) - parse and build /xxx properly
- * 05/03/96 (seiwald) - split into pathunix.c
- * 11/21/96 (peterk) - BEOS does not have Unix-style archives
- */
-
-
-/*
- * file_dirscan() - scan a directory for files.
- */
-
-void file_dirscan( char * dir, scanback func, void * closure )
-{
- PROFILE_ENTER( FILE_DIRSCAN );
-
- file_info_t * d = 0;
-
- d = file_query( dir );
-
- if ( !d || !d->is_dir )
- {
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- if ( ! d->files )
- {
- LIST* files = L0;
- PATHNAME f;
- DIR *dd;
- STRUCT_DIRENT *dirent;
- string filename[1];
-
- /* First enter directory itself */
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_dir.ptr = dir;
- f.f_dir.len = strlen(dir);
-
- dir = *dir ? dir : ".";
-
- /* Now enter contents of directory. */
-
- if ( !( dd = opendir( dir ) ) )
- {
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", dir );
-
- string_new( filename );
- while ( ( dirent = readdir( dd ) ) )
- {
- # ifdef old_sinix
- /* Broken structure definition on sinix. */
- f.f_base.ptr = dirent->d_name - 2;
- # else
- f.f_base.ptr = dirent->d_name;
- # endif
- f.f_base.len = strlen( f.f_base.ptr );
-
- string_truncate( filename, 0 );
- path_build( &f, filename, 0 );
-
- files = list_new( files, newstr(filename->value) );
- file_query( filename->value );
- }
- string_free( filename );
-
- closedir( dd );
-
- d->files = files;
- }
-
- /* Special case / : enter it */
- {
- unsigned long len = strlen(d->name);
- if ( ( len == 1 ) && ( d->name[0] == '/' ) )
- (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
- }
-
- /* Now enter contents of directory */
- if ( d->files )
- {
- LIST * files = d->files;
- while ( files )
- {
- file_info_t * ff = file_info( files->string );
- (*func)( closure, ff->name, 1 /* stat()'ed */, ff->time );
- files = list_next( files );
- }
- }
-
- PROFILE_EXIT( FILE_DIRSCAN );
-}
-
-
-file_info_t * file_query( char * filename )
-{
- file_info_t * ff = file_info( filename );
- if ( ! ff->time )
- {
- struct stat statbuf;
-
- if ( stat( *filename ? filename : ".", &statbuf ) < 0 )
- return 0;
-
- ff->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
- ff->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
- ff->size = statbuf.st_size;
- ff->time = statbuf.st_mtime ? statbuf.st_mtime : 1;
- }
- return ff;
-}
-
-/*
- * file_time() - get timestamp of file, if not done by file_dirscan()
- */
-
-int
-file_time(
- char *filename,
- time_t *time )
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- *time = ff->time;
- return 0;
-}
-
-int file_is_file(char* filename)
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- return ff->is_file;
-}
-
-int file_mkdir(char* pathname)
-{
- return mkdir(pathname, 0766);
-}
-
-/*
- * file_archscan() - scan an archive for files
- */
-
-# ifndef AIAMAG /* God-fearing UNIX */
-
-# define SARFMAG 2
-# define SARHDR sizeof( struct ar_hdr )
-
-void
-file_archscan(
- char *archive,
- scanback func,
- void *closure )
-{
-# ifndef NO_AR
- struct ar_hdr ar_hdr;
- char buf[ MAXJPATH ];
- long offset;
- char *string_table = 0;
- int fd;
-
- if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 )
- return;
-
- if ( read( fd, buf, SARMAG ) != SARMAG ||
- strncmp( ARMAG, buf, SARMAG ) )
- {
- close( fd );
- return;
- }
-
- offset = SARMAG;
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR )
- && !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG )
-#ifdef ARFZMAG
- /* OSF also has a compressed format */
- && memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG )
-#endif
- ) )
- {
- char lar_name_[257];
- char * lar_name = lar_name_ + 1;
- long lar_date;
- long lar_size;
- long lar_offset;
- char * c;
- char * src;
- char * dest;
-
- strncpy( lar_name, ar_hdr.ar_name, sizeof(ar_hdr.ar_name) );
-
- sscanf( ar_hdr.ar_date, "%ld", &lar_date );
- sscanf( ar_hdr.ar_size, "%ld", &lar_size );
-
- if (ar_hdr.ar_name[0] == '/')
- {
- if (ar_hdr.ar_name[1] == '/')
- {
- /* this is the "string table" entry of the symbol table,
- ** which holds strings of filenames that are longer than
- ** 15 characters (ie. don't fit into a ar_name
- */
-
- string_table = (char *)BJAM_MALLOC_ATOMIC(lar_size);
- lseek(fd, offset + SARHDR, 0);
- if (read(fd, string_table, lar_size) != lar_size)
- printf("error reading string table\n");
- }
- else if (string_table && ar_hdr.ar_name[1] != ' ')
- {
- /* Long filenames are recognized by "/nnnn" where nnnn is
- ** the offset of the string in the string table represented
- ** in ASCII decimals.
- */
- dest = lar_name;
- lar_offset = atoi(lar_name + 1);
- src = &string_table[lar_offset];
- while (*src != '/')
- *dest++ = *src++;
- *dest = '/';
- }
- }
-
- c = lar_name - 1;
- while ( ( *++c != ' ' ) && ( *c != '/' ) ) ;
- *c = '\0';
-
- if ( DEBUG_BINDSCAN )
- printf( "archive name %s found\n", lar_name );
-
- sprintf( buf, "%s(%s)", archive, lar_name );
-
- (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
-
- offset += SARHDR + ( ( lar_size + 1 ) & ~1 );
- lseek( fd, offset, 0 );
- }
-
- if (string_table)
- BJAM_FREE(string_table);
-
- close( fd );
-
-# endif /* NO_AR */
-
-}
-
-# else /* AIAMAG - RS6000 AIX */
-
-static void file_archscan_small(
- int fd, char const *archive, scanback func, void *closure)
-{
- struct fl_hdr fl_hdr;
-
- struct {
- struct ar_hdr hdr;
- char pad[ 256 ];
- } ar_hdr ;
-
- char buf[ MAXJPATH ];
- long offset;
-
- if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ)
- return;
-
- sscanf( fl_hdr.fl_fstmoff, "%ld", &offset );
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( offset > 0 )
- && ( lseek( fd, offset, 0 ) >= 0 )
- && ( read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) )
- {
- long lar_date;
- int lar_namlen;
-
- sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
- sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date );
- sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset );
-
- if ( !lar_namlen )
- continue;
-
- ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
-
- sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
-
- (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
- }
-}
-
-/* Check for OS version which supports the big variant. */
-#ifdef AR_HSZ_BIG
-
-static void file_archscan_big(
- int fd, char const *archive, scanback func, void *closure)
-{
- struct fl_hdr_big fl_hdr;
-
- struct {
- struct ar_hdr_big hdr;
- char pad[ 256 ];
- } ar_hdr ;
-
- char buf[ MAXJPATH ];
- long long offset;
-
- if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG) != FL_HSZ_BIG)
- return;
-
- sscanf( fl_hdr.fl_fstmoff, "%lld", &offset );
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( offset > 0 )
- && ( lseek( fd, offset, 0 ) >= 0 )
- && ( read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) )
- {
- long lar_date;
- int lar_namlen;
-
- sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
- sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date );
- sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset );
-
- if ( !lar_namlen )
- continue;
-
- ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
-
- sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
-
- (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
- }
-
-}
-
-#endif /* AR_HSZ_BIG */
-
-void file_archscan(char *archive, scanback func, void *closure)
-{
- int fd;
- char fl_magic[SAIAMAG];
-
- if (( fd = open(archive, O_RDONLY, 0)) < 0)
- return;
-
- if (read( fd, fl_magic, SAIAMAG) != SAIAMAG
- || lseek(fd, 0, SEEK_SET) == -1)
- {
- close(fd);
- return;
- }
-
- if (strncmp(AIAMAG, fl_magic, SAIAMAG) == 0)
- {
- /* read small variant */
- file_archscan_small(fd, archive, func, closure);
- }
-#ifdef AR_HSZ_BIG
- else if (strncmp(AIAMAGBIG, fl_magic, SAIAMAG) == 0)
- {
- /* read big variant */
- file_archscan_big(fd, archive, func, closure);
- }
-#endif
-
- close( fd );
-}
-
-# endif /* AIAMAG - RS6000 AIX */
-
-# endif /* USE_FILEUNIX */
diff --git a/jam-files/engine/filevms.c b/jam-files/engine/filevms.c
deleted file mode 100644
index d2ab2047..00000000
--- a/jam-files/engine/filevms.c
+++ /dev/null
@@ -1,327 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "filesys.h"
-# include "pathsys.h"
-
-# ifdef OS_VMS
-
-/*
- * filevms.c - scan directories and libaries on VMS
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 02/09/95 (seiwald) - bungled R=[xxx] - was using directory length!
- * 05/03/96 (seiwald) - split into pathvms.c
- */
-
-# include <rms.h>
-# include <iodef.h>
-# include <ssdef.h>
-# include <string.h>
-# include <stdlib.h>
-# include <stdio.h>
-# include <descrip.h>
-
-#include <lbrdef.h>
-#include <credef.h>
-#include <mhddef.h>
-#include <lhidef.h>
-#include <lib$routines.h>
-#include <starlet.h>
-
-/* Supply missing prototypes for lbr$-routines*/
-
-#ifdef __cplusplus
-extern "C" {
-#endif /* __cplusplus */
-
-int lbr$set_module(
- void **,
- unsigned long *,
- struct dsc$descriptor_s *,
- unsigned short *,
- void * );
-
-int lbr$open( void **,
- struct dsc$descriptor_s *,
- void *,
- void *,
- void *,
- void *,
- void * );
-
-int lbr$ini_control(
- void **,
- unsigned long *,
- unsigned long *,
- void * );
-
-int lbr$get_index(
- void **,
- unsigned long *,
- int (*func)( struct dsc$descriptor_s *, unsigned long *),
- void * );
-
-int lbr$close(
- void ** );
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-static void
-file_cvttime(
- unsigned int *curtime,
- time_t *unixtime )
-{
- static const size_t divisor = 10000000;
- static unsigned int bastim[2] = { 0x4BEB4000, 0x007C9567 }; /* 1/1/1970 */
- int delta[2], remainder;
-
- lib$subx( curtime, bastim, delta );
- lib$ediv( &divisor, delta, unixtime, &remainder );
-}
-
-# define DEFAULT_FILE_SPECIFICATION "[]*.*;0"
-
-# define min( a,b ) ((a)<(b)?(a):(b))
-
-void
-file_dirscan(
- char *dir,
- scanback func,
- void *closure )
-{
-
- struct FAB xfab;
- struct NAM xnam;
- struct XABDAT xab;
- char esa[256];
- char filename[256];
- string filename2[1];
- char dirname[256];
- register int status;
- PATHNAME f;
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_root.ptr = dir;
- f.f_root.len = strlen( dir );
-
- /* get the input file specification
- */
- xnam = cc$rms_nam;
- xnam.nam$l_esa = esa;
- xnam.nam$b_ess = sizeof( esa ) - 1;
- xnam.nam$l_rsa = filename;
- xnam.nam$b_rss = min( sizeof( filename ) - 1, NAM$C_MAXRSS );
-
- xab = cc$rms_xabdat; /* initialize extended attributes */
- xab.xab$b_cod = XAB$C_DAT; /* ask for date */
- xab.xab$l_nxt = NULL; /* terminate XAB chain */
-
- xfab = cc$rms_fab;
- xfab.fab$l_dna = DEFAULT_FILE_SPECIFICATION;
- xfab.fab$b_dns = sizeof( DEFAULT_FILE_SPECIFICATION ) - 1;
- xfab.fab$l_fop = FAB$M_NAM;
- xfab.fab$l_fna = dir; /* address of file name */
- xfab.fab$b_fns = strlen( dir ); /* length of file name */
- xfab.fab$l_nam = &xnam; /* address of NAB block */
- xfab.fab$l_xab = (char *)&xab; /* address of XAB block */
-
-
- status = sys$parse( &xfab );
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", dir );
-
- if ( !( status & 1 ) )
- return;
-
-
-
- /* Add bogus directory for [000000] */
-
- if ( !strcmp( dir, "[000000]" ) )
- {
- (*func)( closure, "[000000]", 1 /* time valid */, 1 /* old but true */ );
- }
-
- /* Add bogus directory for [] */
-
- if ( !strcmp( dir, "[]" ) )
- {
- (*func)( closure, "[]", 1 /* time valid */, 1 /* old but true */ );
- (*func)( closure, "[-]", 1 /* time valid */, 1 /* old but true */ );
- }
-
- string_new( filename2 );
- while ( (status = sys$search( &xfab )) & 1 )
- {
- char *s;
- time_t time;
-
- /* "I think that might work" - eml */
-
- sys$open( &xfab );
- sys$close( &xfab );
-
- file_cvttime( (unsigned int *)&xab.xab$q_rdt, &time );
-
- filename[xnam.nam$b_rsl] = '\0';
-
- /* What we do with the name depends on the suffix: */
- /* .dir is a directory */
- /* .xxx is a file with a suffix */
- /* . is no suffix at all */
-
- if ( xnam.nam$b_type == 4 && !strncmp( xnam.nam$l_type, ".DIR", 4 ) )
- {
- /* directory */
- sprintf( dirname, "[.%.*s]", xnam.nam$b_name, xnam.nam$l_name );
- f.f_dir.ptr = dirname;
- f.f_dir.len = strlen( dirname );
- f.f_base.ptr = 0;
- f.f_base.len = 0;
- f.f_suffix.ptr = 0;
- f.f_suffix.len = 0;
- }
- else
- {
- /* normal file with a suffix */
- f.f_dir.ptr = 0;
- f.f_dir.len = 0;
- f.f_base.ptr = xnam.nam$l_name;
- f.f_base.len = xnam.nam$b_name;
- f.f_suffix.ptr = xnam.nam$l_type;
- f.f_suffix.len = xnam.nam$b_type;
- }
-
- string_truncate( filename2, 0 );
- path_build( &f, filename2, 0 );
-
- /*
- if ( DEBUG_SEARCH )
- printf("root '%s' base %.*s suf %.*s = %s\n",
- dir,
- xnam.nam$b_name, xnam.nam$l_name,
- xnam.nam$b_type, xnam.nam$l_type,
- filename2 );
- */
-
- (*func)( closure, filename2->value, 1 /* time valid */, time );
- }
- string_free( filename2 );
-}
-
-int
-file_time(
- char *filename,
- time_t *time )
-{
- /* This should never be called, as all files are */
- /* timestampped in file_dirscan() and file_archscan() */
- return -1;
-}
-
-static char *VMS_archive = 0;
-static scanback VMS_func;
-static void *VMS_closure;
-static void *context;
-
-static int
-file_archmember(
- struct dsc$descriptor_s *module,
- unsigned long *rfa )
-{
- static struct dsc$descriptor_s bufdsc =
- {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
-
- struct mhddef *mhd;
- char filename[128];
- char buf[ MAXJPATH ];
-
- int status;
- time_t library_date;
-
- register int i;
- register char *p;
-
- bufdsc.dsc$a_pointer = filename;
- bufdsc.dsc$w_length = sizeof( filename );
- status = lbr$set_module( &context, rfa, &bufdsc,
- &bufdsc.dsc$w_length, NULL );
-
- if ( !(status & 1) )
- return ( 1 );
-
- mhd = (struct mhddef *)filename;
-
- file_cvttime( &mhd->mhd$l_datim, &library_date );
-
- for ( i = 0, p = module->dsc$a_pointer; i < module->dsc$w_length; ++i, ++p )
- filename[ i ] = *p;
-
- filename[ i ] = '\0';
-
- sprintf( buf, "%s(%s.obj)", VMS_archive, filename );
-
- (*VMS_func)( VMS_closure, buf, 1 /* time valid */, (time_t)library_date );
-
- return ( 1 );
-}
-
-
-void file_archscan( char * archive, scanback func, void * closure )
-{
- static struct dsc$descriptor_s library =
- {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
-
- unsigned long lfunc = LBR$C_READ;
- unsigned long typ = LBR$C_TYP_UNK;
- unsigned long index = 1;
-
- register int status;
-
- VMS_archive = archive;
- VMS_func = func;
- VMS_closure = closure;
-
- status = lbr$ini_control( &context, &lfunc, &typ, NULL );
- if ( !( status & 1 ) )
- return;
-
- library.dsc$a_pointer = archive;
- library.dsc$w_length = strlen( archive );
-
- status = lbr$open( &context, &library, NULL, NULL, NULL, NULL, NULL );
- if ( !( status & 1 ) )
- return;
-
- (void) lbr$get_index( &context, &index, file_archmember, NULL );
-
- (void) lbr$close( &context );
-}
-
-# endif /* VMS */
diff --git a/jam-files/engine/frames.c b/jam-files/engine/frames.c
deleted file mode 100644
index 84889f09..00000000
--- a/jam-files/engine/frames.c
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "frames.h"
-# include "lists.h"
-
-void frame_init( FRAME* frame )
-{
- frame->prev = 0;
- lol_init(frame->args);
- frame->module = root_module();
- frame->rulename = "module scope";
- frame->procedure = 0;
-}
-
-void frame_free( FRAME* frame )
-{
- lol_free( frame->args );
-}
diff --git a/jam-files/engine/frames.h b/jam-files/engine/frames.h
deleted file mode 100644
index 693d77fa..00000000
--- a/jam-files/engine/frames.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-#ifndef FRAMES_DWA20011021_H
-#define FRAMES_DWA20011021_H
-
-#include "lists.h"
-#include "modules.h"
-
-typedef struct _PARSE PARSE;
-typedef struct frame FRAME;
-
-struct frame
-{
- FRAME * prev;
- /* The nearest enclosing frame for which module->user_module is true. */
- FRAME * prev_user;
- LOL args[ 1 ];
- module_t * module;
- PARSE * procedure;
- char * rulename;
-};
-
-
-/* When call into Python is in progress, this variable points to the bjam frame
- * that was current at the moment of call. When the call completes, the variable
- * is not defined. Further, if Jam calls Python which calls Jam and so on, this
- * variable only keeps the most recent Jam frame.
- */
-extern struct frame * frame_before_python_call;
-
-void frame_init( FRAME * ); /* implemented in compile.c */
-void frame_free( FRAME * ); /* implemented in compile.c */
-
-#endif
diff --git a/jam-files/engine/glob.c b/jam-files/engine/glob.c
deleted file mode 100644
index 527d6c80..00000000
--- a/jam-files/engine/glob.c
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright 1994 Christopher Seiwald. All rights reserved.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * glob.c - match a string against a simple pattern
- *
- * Understands the following patterns:
- *
- * * any number of characters
- * ? any single character
- * [a-z] any single character in the range a-z
- * [^a-z] any single character not in the range a-z
- * \x match x
- *
- * External functions:
- *
- * glob() - match a string against a simple pattern
- *
- * Internal functions:
- *
- * globchars() - build a bitlist to check for character group match
- */
-
-# include "jam.h"
-
-# define CHECK_BIT( tab, bit ) ( tab[ (bit)/8 ] & (1<<( (bit)%8 )) )
-# define BITLISTSIZE 16 /* bytes used for [chars] in compiled expr */
-
-static void globchars( char * s, char * e, char * b );
-
-
-/*
- * glob() - match a string against a simple pattern.
- */
-
-int glob( char * c, char * s )
-{
- char bitlist[ BITLISTSIZE ];
- char * here;
-
- for ( ; ; )
- switch ( *c++ )
- {
- case '\0':
- return *s ? -1 : 0;
-
- case '?':
- if ( !*s++ )
- return 1;
- break;
-
- case '[':
- /* Scan for matching ]. */
-
- here = c;
- do if ( !*c++ ) return 1;
- while ( ( here == c ) || ( *c != ']' ) );
- ++c;
-
- /* Build character class bitlist. */
-
- globchars( here, c, bitlist );
-
- if ( !CHECK_BIT( bitlist, *(unsigned char *)s ) )
- return 1;
- ++s;
- break;
-
- case '*':
- here = s;
-
- while ( *s )
- ++s;
-
- /* Try to match the rest of the pattern in a recursive */
- /* call. If the match fails we'll back up chars, retrying. */
-
- while ( s != here )
- {
- int r;
-
- /* A fast path for the last token in a pattern. */
- r = *c ? glob( c, s ) : *s ? -1 : 0;
-
- if ( !r )
- return 0;
- if ( r < 0 )
- return 1;
- --s;
- }
- break;
-
- case '\\':
- /* Force literal match of next char. */
- if ( !*c || ( *s++ != *c++ ) )
- return 1;
- break;
-
- default:
- if ( *s++ != c[ -1 ] )
- return 1;
- break;
- }
-}
-
-
-/*
- * globchars() - build a bitlist to check for character group match.
- */
-
-static void globchars( char * s, char * e, char * b )
-{
- int neg = 0;
-
- memset( b, '\0', BITLISTSIZE );
-
- if ( *s == '^' )
- {
- ++neg;
- ++s;
- }
-
- while ( s < e )
- {
- int c;
-
- if ( ( s + 2 < e ) && ( s[1] == '-' ) )
- {
- for ( c = s[0]; c <= s[2]; ++c )
- b[ c/8 ] |= ( 1 << ( c % 8 ) );
- s += 3;
- }
- else
- {
- c = *s++;
- b[ c/8 ] |= ( 1 << ( c % 8 ) );
- }
- }
-
- if ( neg )
- {
- int i;
- for ( i = 0; i < BITLISTSIZE; ++i )
- b[ i ] ^= 0377;
- }
-
- /* Do not include \0 in either $[chars] or $[^chars]. */
- b[0] &= 0376;
-}
diff --git a/jam-files/engine/hash.c b/jam-files/engine/hash.c
deleted file mode 100644
index fbd1a899..00000000
--- a/jam-files/engine/hash.c
+++ /dev/null
@@ -1,459 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "hash.h"
-# include "compile.h"
-# include <assert.h>
-
-/*
- * hash.c - simple in-memory hashing routines
- *
- * External routines:
- *
- * hashinit() - initialize a hash table, returning a handle
- * hashitem() - find a record in the table, and optionally enter a new one
- * hashdone() - free a hash table, given its handle
- *
- * Internal routines:
- *
- * hashrehash() - resize and rebuild hp->tab, the hash table
- *
- * 4/29/93 - ensure ITEM's are aligned
- */
-
-/* */
-#define HASH_DEBUG_PROFILE 1
-/* */
-
-char *hashsccssid="@(#)hash.c 1.14 () 6/20/88";
-
-/* Header attached to all data items entered into a hash table. */
-
-struct hashhdr
-{
- struct item * next;
- unsigned int keyval; /* for quick comparisons */
-};
-
-/* This structure overlays the one handed to hashenter(). Its actual size is
- * given to hashinit().
- */
-
-struct hashdata
-{
- char * key;
- /* rest of user data */
-};
-
-typedef struct item
-{
- struct hashhdr hdr;
- struct hashdata data;
-} ITEM ;
-
-# define MAX_LISTS 32
-
-struct hash
-{
- /*
- * the hash table, just an array of item pointers
- */
- struct {
- int nel;
- ITEM **base;
- } tab;
-
- int bloat; /* tab.nel / items.nel */
- int inel; /* initial number of elements */
-
- /*
- * the array of records, maintained by these routines
- * essentially a microallocator
- */
- struct {
- int more; /* how many more ITEMs fit in lists[ list ] */
- ITEM *free; /* free list of items */
- char *next; /* where to put more ITEMs in lists[ list ] */
- int datalen; /* length of records in this hash table */
- int size; /* sizeof( ITEM ) + aligned datalen */
- int nel; /* total ITEMs held by all lists[] */
- int list; /* index into lists[] */
-
- struct {
- int nel; /* total ITEMs held by this list */
- char *base; /* base of ITEMs array */
- } lists[ MAX_LISTS ];
- } items;
-
- char * name; /* just for hashstats() */
-};
-
-static void hashrehash( struct hash *hp );
-static void hashstat( struct hash *hp );
-static void * hash_mem_alloc(size_t datalen, size_t size);
-static void hash_mem_free(size_t datalen, void * data);
-#ifdef OPT_BOEHM_GC
-static void hash_mem_finalizer(char * key, struct hash * hp);
-#endif
-
-static unsigned int jenkins_one_at_a_time_hash(const unsigned char *key)
-{
- unsigned int hash = 0;
-
- while ( *key )
- {
- hash += *key++;
- hash += (hash << 10);
- hash ^= (hash >> 6);
- }
- hash += (hash << 3);
- hash ^= (hash >> 11);
- hash += (hash << 15);
-
- return hash;
-}
-
-/*
-static unsigned int knuth_hash(const unsigned char *key)
-{
- unsigned int keyval = *key;
- while ( *key )
- keyval = keyval * 2147059363 + *key++;
- return keyval;
-}
-*/
-
-static unsigned int hash_keyval( const char * key_ )
-{
- /*
- return knuth_hash((const unsigned char *)key_);
- */
- return jenkins_one_at_a_time_hash((const unsigned char *)key_);
-}
-
-#define hash_bucket(hp,keyval) ((hp)->tab.base + ( (keyval) % (hp)->tab.nel ))
-
-/* Find the hash item for the given data. Returns pointer to the
- item and if given a pointer to the item before the found item.
- If it's the first item in a bucket, there is no previous item,
- and zero is returned for the previous item instead.
-*/
-static ITEM * hash_search(
- struct hash *hp,
- unsigned int keyval,
- const char * keydata,
- ITEM * * previous )
-{
- ITEM * i = *hash_bucket(hp,keyval);
- ITEM * p = 0;
-
- for ( ; i; i = i->hdr.next )
- {
- if ( ( keyval == i->hdr.keyval ) &&
- !strcmp( i->data.key, keydata ) )
- {
- if (previous)
- {
- *previous = p;
- }
- return i;
- }
- p = i;
- }
-
- return 0;
-}
-
-/*
- * hash_free() - remove the given item from the table if it's there.
- * Returns 1 if found, 0 otherwise.
- *
- * NOTE: 2nd argument is HASHDATA*, not HASHDATA** as elsewhere.
- */
-int
-hash_free(
- register struct hash *hp,
- HASHDATA *data)
-{
- ITEM * i = 0;
- ITEM * prev = 0;
- unsigned int keyval = hash_keyval(data->key);
-
- i = hash_search( hp, keyval, data->key, &prev );
- if (i)
- {
- /* mark it free so we skip it during enumeration */
- i->data.key = 0;
- /* unlink the record from the hash chain */
- if (prev) prev->hdr.next = i->hdr.next;
- else *hash_bucket(hp,keyval) = i->hdr.next;
- /* link it into the freelist */
- i->hdr.next = hp->items.free;
- hp->items.free = i;
- /* we have another item */
- hp->items.more++;
-
- return 1;
- }
- return 0;
-}
-
-/*
- * hashitem() - find a record in the table, and optionally enter a new one
- */
-
-int
-hashitem(
- register struct hash *hp,
- HASHDATA **data,
- int enter )
-{
- register ITEM *i;
- char *b = (*data)->key;
- unsigned int keyval = hash_keyval(b);
-
- #ifdef HASH_DEBUG_PROFILE
- profile_frame prof[1];
- if ( DEBUG_PROFILE )
- profile_enter( 0, prof );
- #endif
-
- if ( enter && !hp->items.more )
- hashrehash( hp );
-
- if ( !enter && !hp->items.nel )
- {
- #ifdef HASH_DEBUG_PROFILE
- if ( DEBUG_PROFILE )
- profile_exit( prof );
- #endif
- return 0;
- }
-
- i = hash_search( hp, keyval, (*data)->key, 0 );
- if (i)
- {
- *data = &i->data;
- #ifdef HASH_DEBUG_PROFILE
- if ( DEBUG_PROFILE ) profile_exit( prof );
- #endif
- return !0;
- }
-
- if ( enter )
- {
- ITEM * * base = hash_bucket(hp,keyval);
-
- /* try to grab one from the free list */
- if ( hp->items.free )
- {
- i = hp->items.free;
- hp->items.free = i->hdr.next;
- assert( i->data.key == 0 );
- }
- else
- {
- i = (ITEM *)hp->items.next;
- hp->items.next += hp->items.size;
- }
- hp->items.more--;
- memcpy( (char *)&i->data, (char *)*data, hp->items.datalen );
- i->hdr.keyval = keyval;
- i->hdr.next = *base;
- *base = i;
- *data = &i->data;
- #ifdef OPT_BOEHM_GC
- if (sizeof(HASHDATA) == hp->items.datalen)
- {
- GC_REGISTER_FINALIZER(i->data.key,&hash_mem_finalizer,hp,0,0);
- }
- #endif
- }
-
- #ifdef HASH_DEBUG_PROFILE
- if ( DEBUG_PROFILE )
- profile_exit( prof );
- #endif
- return 0;
-}
-
-/*
- * hashrehash() - resize and rebuild hp->tab, the hash table
- */
-
-static void hashrehash( register struct hash *hp )
-{
- int i = ++hp->items.list;
- hp->items.more = i ? 2 * hp->items.nel : hp->inel;
- hp->items.next = (char *)hash_mem_alloc( hp->items.datalen, hp->items.more * hp->items.size );
- hp->items.free = 0;
-
- hp->items.lists[i].nel = hp->items.more;
- hp->items.lists[i].base = hp->items.next;
- hp->items.nel += hp->items.more;
-
- if ( hp->tab.base )
- hash_mem_free( hp->items.datalen, (char *)hp->tab.base );
-
- hp->tab.nel = hp->items.nel * hp->bloat;
- hp->tab.base = (ITEM **)hash_mem_alloc( hp->items.datalen, hp->tab.nel * sizeof(ITEM **) );
-
- memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) );
-
- for ( i = 0; i < hp->items.list; ++i )
- {
- int nel = hp->items.lists[i].nel;
- char *next = hp->items.lists[i].base;
-
- for ( ; nel--; next += hp->items.size )
- {
- register ITEM *i = (ITEM *)next;
- ITEM **ip = hp->tab.base + i->hdr.keyval % hp->tab.nel;
- /* code currently assumes rehashing only when there are no free items */
- assert( i->data.key != 0 );
-
- i->hdr.next = *ip;
- *ip = i;
- }
- }
-}
-
-void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data )
-{
- int i;
- for ( i = 0; i <= hp->items.list; ++i )
- {
- char * next = hp->items.lists[i].base;
- int nel = hp->items.lists[i].nel;
- if ( i == hp->items.list )
- nel -= hp->items.more;
-
- for ( ; nel--; next += hp->items.size )
- {
- ITEM * i = (ITEM *)next;
- if ( i->data.key != 0 ) /* DO not enumerate freed items. */
- f( &i->data, data );
- }
- }
-}
-
-/* --- */
-
-# define ALIGNED(x) ( ( x + sizeof( ITEM ) - 1 ) & ~( sizeof( ITEM ) - 1 ) )
-
-/*
- * hashinit() - initialize a hash table, returning a handle
- */
-
-struct hash *
-hashinit(
- int datalen,
- char *name )
-{
- struct hash *hp = (struct hash *)hash_mem_alloc( datalen, sizeof( *hp ) );
-
- hp->bloat = 3;
- hp->tab.nel = 0;
- hp->tab.base = (ITEM **)0;
- hp->items.more = 0;
- hp->items.free = 0;
- hp->items.datalen = datalen;
- hp->items.size = sizeof( struct hashhdr ) + ALIGNED( datalen );
- hp->items.list = -1;
- hp->items.nel = 0;
- hp->inel = 11 /* 47 */;
- hp->name = name;
-
- return hp;
-}
-
-/*
- * hashdone() - free a hash table, given its handle
- */
-
-void
-hashdone( struct hash *hp )
-{
- int i;
-
- if ( !hp )
- return;
-
- if ( DEBUG_MEM || DEBUG_PROFILE )
- hashstat( hp );
-
- if ( hp->tab.base )
- hash_mem_free( hp->items.datalen, (char *)hp->tab.base );
- for ( i = 0; i <= hp->items.list; ++i )
- hash_mem_free( hp->items.datalen, hp->items.lists[i].base );
- hash_mem_free( hp->items.datalen, (char *)hp );
-}
-
-static void * hash_mem_alloc(size_t datalen, size_t size)
-{
- if (sizeof(HASHDATA) == datalen)
- {
- return BJAM_MALLOC_RAW(size);
- }
- else
- {
- return BJAM_MALLOC(size);
- }
-}
-
-static void hash_mem_free(size_t datalen, void * data)
-{
- if (sizeof(HASHDATA) == datalen)
- {
- BJAM_FREE_RAW(data);
- }
- else
- {
- BJAM_FREE(data);
- }
-}
-
-#ifdef OPT_BOEHM_GC
-static void hash_mem_finalizer(char * key, struct hash * hp)
-{
- HASHDATA d;
- d.key = key;
- hash_free(hp,&d);
-}
-#endif
-
-
-/* ---- */
-
-static void hashstat( struct hash * hp )
-{
- ITEM * * tab = hp->tab.base;
- int nel = hp->tab.nel;
- int count = 0;
- int sets = 0;
- int run = ( tab[ nel - 1 ] != (ITEM *)0 );
- int i;
- int here;
-
- for ( i = nel; i > 0; --i )
- {
- if ( ( here = ( *tab++ != (ITEM *)0 ) ) )
- count++;
- if ( here && !run )
- sets++;
- run = here;
- }
-
- printf( "%s table: %d+%d+%d (%dK+%luK) items+table+hash, %f density\n",
- hp->name,
- count,
- hp->items.nel,
- hp->tab.nel,
- hp->items.nel * hp->items.size / 1024,
- (long unsigned)hp->tab.nel * sizeof( ITEM ** ) / 1024,
- (float)count / (float)sets );
-}
diff --git a/jam-files/engine/hash.h b/jam-files/engine/hash.h
deleted file mode 100644
index 7195b414..00000000
--- a/jam-files/engine/hash.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * hash.h - simple in-memory hashing routines
- */
-
-#ifndef BOOST_JAM_HASH_H
-#define BOOST_JAM_HASH_H
-
-typedef struct hashdata HASHDATA;
-
-struct hash * hashinit ( int datalen, char * name );
-int hashitem ( struct hash * hp, HASHDATA * * data, int enter );
-void hashdone ( struct hash * hp );
-void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data );
-int hash_free ( struct hash * hp, HASHDATA * data);
-
-#define hashenter( hp, data ) ( !hashitem( hp, data, !0 ) )
-#define hashcheck( hp, data ) hashitem( hp, data, 0 )
-
-#endif
diff --git a/jam-files/engine/hcache.c b/jam-files/engine/hcache.c
deleted file mode 100644
index 70bb798c..00000000
--- a/jam-files/engine/hcache.c
+++ /dev/null
@@ -1,434 +0,0 @@
-/*
- * This file has been donated to Jam.
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "rules.h"
-# include "regexp.h"
-# include "headers.h"
-# include "newstr.h"
-# include "hash.h"
-# include "hcache.h"
-# include "variable.h"
-# include "search.h"
-
-#ifdef OPT_HEADER_CACHE_EXT
-
-/*
- * Craig W. McPheeters, Alias|Wavefront.
- *
- * hcache.c hcache.h - handle cacheing of #includes in source files.
- *
- * Create a cache of files scanned for headers. When starting jam, look for the
- * cache file and load it if present. When finished the binding phase, create a
- * new header cache. The cache contains files, their timestamps and the header
- * files found in their scan. During the binding phase of jam, look in the
- * header cache first for the headers contained in a file. If the cache is
- * present and valid, use its contents. This results in dramatic speedups with
- * large projects (eg. 3min -> 1min startup for one project.)
- *
- * External routines:
- * hcache_init() - read and parse the local .jamdeps file.
- * hcache_done() - write a new .jamdeps file.
- * hcache() - return list of headers on target. Use cache or do a scan.
- *
- * The dependency file format is an ASCII file with 1 line per target. Each line
- * has the following fields:
- * @boundname@ timestamp @file@ @file@ @file@ ... \n
- */
-
-typedef struct hcachedata HCACHEDATA ;
-
-struct hcachedata
-{
- char * boundname;
- time_t time;
- LIST * includes;
- LIST * hdrscan; /* the HDRSCAN value for this target */
- int age; /* if too old, we'll remove it from cache */
- HCACHEDATA * next;
-};
-
-
-static struct hash * hcachehash = 0;
-static HCACHEDATA * hcachelist = 0;
-
-static int queries = 0;
-static int hits = 0;
-
-#define CACHE_FILE_VERSION "version 4"
-#define CACHE_RECORD_HEADER "header"
-#define CACHE_RECORD_END "end"
-
-
-/*
- * Return the name of the header cache file. May return NULL.
- *
- * The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache
- * the result so the user can not change the cache file during header scanning.
- */
-
-static char * cache_name( void )
-{
- static char * name = 0;
- if ( !name )
- {
- LIST * hcachevar = var_get( "HCACHEFILE" );
-
- if ( hcachevar )
- {
- TARGET * t = bindtarget( hcachevar->string );
-
- pushsettings( t->settings );
- /* Do not expect the cache file to be generated, so pass 0 as the
- * third argument to search. Expect the location to be specified via
- * LOCATE, so pass 0 as the fourth arugment.
- */
- t->boundname = search( t->name, &t->time, 0, 0 );
- popsettings( t->settings );
-
- if ( hcachevar )
- name = copystr( t->boundname );
- }
- }
- return name;
-}
-
-
-/*
- * Return the maximum age a cache entry can have before it is purged ftom the
- * cache.
- */
-
-static int cache_maxage( void )
-{
- int age = 100;
- LIST * var = var_get( "HCACHEMAXAGE" );
- if ( var )
- {
- age = atoi( var->string );
- if ( age < 0 )
- age = 0;
- }
- return age;
-}
-
-
-/*
- * Read a netstring. The caveat is that the string can not contain ASCII 0. The
- * returned value is as returned by newstr(), so it need not be freed.
- */
-
-char * read_netstring( FILE * f )
-{
- unsigned long len;
- static char * buf = NULL;
- static unsigned long buf_len = 0;
-
- if ( fscanf( f, " %9lu", &len ) != 1 )
- return NULL;
- if ( fgetc( f ) != (int)'\t' )
- return NULL;
-
- if ( len > 1024 * 64 )
- return NULL; /* sanity check */
-
- if ( len > buf_len )
- {
- unsigned long new_len = buf_len * 2;
- if ( new_len < len )
- new_len = len;
- buf = (char *)BJAM_REALLOC( buf, new_len + 1 );
- if ( buf )
- buf_len = new_len;
- }
-
- if ( !buf )
- return NULL;
-
- if ( fread( buf, 1, len, f ) != len )
- return NULL;
- if ( fgetc( f ) != (int)'\n' )
- return NULL;
-
- buf[ len ] = 0;
- return newstr( buf );
-}
-
-
-/*
- * Write a netstring.
- */
-
-void write_netstring( FILE * f, char const * s )
-{
- if ( !s )
- s = "";
- fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s );
-}
-
-
-void hcache_init()
-{
- HCACHEDATA cachedata;
- HCACHEDATA * c;
- FILE * f;
- char * version;
- int header_count = 0;
- char * hcachename;
-
- hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" );
-
- if ( !( hcachename = cache_name() ) )
- return;
-
- if ( !( f = fopen( hcachename, "rb" ) ) )
- return;
-
- version = read_netstring( f );
- if ( !version || strcmp( version, CACHE_FILE_VERSION ) )
- {
- fclose( f );
- return;
- }
-
- while ( 1 )
- {
- char * record_type;
- char * time_str;
- char * age_str;
- char * includes_count_str;
- char * hdrscan_count_str;
- int i;
- int count;
- LIST * l;
-
- record_type = read_netstring( f );
- if ( !record_type )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto bail;
- }
- if ( !strcmp( record_type, CACHE_RECORD_END ) )
- break;
- if ( strcmp( record_type, CACHE_RECORD_HEADER ) )
- {
- fprintf( stderr, "invalid %s with record separator <%s>\n",
- hcachename, record_type ? record_type : "<null>" );
- goto bail;
- }
-
- c = &cachedata;
-
- c->boundname = read_netstring( f );
- time_str = read_netstring( f );
- age_str = read_netstring( f );
- includes_count_str = read_netstring( f );
-
- if ( !c->boundname || !time_str || !age_str || !includes_count_str )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto bail;
- }
-
- c->time = atoi( time_str );
- c->age = atoi( age_str ) + 1;
-
- count = atoi( includes_count_str );
- for ( l = 0, i = 0; i < count; ++i )
- {
- char * s = read_netstring( f );
- if ( !s )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto bail;
- }
- l = list_new( l, s );
- }
- c->includes = l;
-
- hdrscan_count_str = read_netstring( f );
- if ( !includes_count_str )
- {
- list_free( c->includes );
- fprintf( stderr, "invalid %s\n", hcachename );
- goto bail;
- }
-
- count = atoi( hdrscan_count_str );
- for ( l = 0, i = 0; i < count; ++i )
- {
- char * s = read_netstring( f );
- if ( !s )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto bail;
- }
- l = list_new( l, s );
- }
- c->hdrscan = l;
-
- if ( !hashenter( hcachehash, (HASHDATA * *)&c ) )
- {
- fprintf( stderr, "can't insert header cache item, bailing on %s\n",
- hcachename );
- goto bail;
- }
-
- c->next = hcachelist;
- hcachelist = c;
-
- ++header_count;
- }
-
- if ( DEBUG_HEADER )
- printf( "hcache read from file %s\n", hcachename );
-
- bail:
- fclose( f );
-}
-
-
-void hcache_done()
-{
- FILE * f;
- HCACHEDATA * c;
- int header_count = 0;
- char * hcachename;
- int maxage;
-
- if ( !hcachehash )
- return;
-
- if ( !( hcachename = cache_name() ) )
- return;
-
- if ( !( f = fopen( hcachename, "wb" ) ) )
- return;
-
- maxage = cache_maxage();
-
- /* Print out the version. */
- write_netstring( f, CACHE_FILE_VERSION );
-
- c = hcachelist;
- for ( c = hcachelist; c; c = c->next )
- {
- LIST * l;
- char time_str[ 30 ];
- char age_str[ 30 ];
- char includes_count_str[ 30 ];
- char hdrscan_count_str[ 30 ];
-
- if ( maxage == 0 )
- c->age = 0;
- else if ( c->age > maxage )
- continue;
-
- sprintf( includes_count_str, "%lu", (long unsigned) list_length( c->includes ) );
- sprintf( hdrscan_count_str, "%lu", (long unsigned) list_length( c->hdrscan ) );
- sprintf( time_str, "%lu", (long unsigned) c->time );
- sprintf( age_str, "%lu", (long unsigned) c->age );
-
- write_netstring( f, CACHE_RECORD_HEADER );
- write_netstring( f, c->boundname );
- write_netstring( f, time_str );
- write_netstring( f, age_str );
- write_netstring( f, includes_count_str );
- for ( l = c->includes; l; l = list_next( l ) )
- write_netstring( f, l->string );
- write_netstring( f, hdrscan_count_str );
- for ( l = c->hdrscan; l; l = list_next( l ) )
- write_netstring( f, l->string );
- fputs( "\n", f );
- ++header_count;
- }
- write_netstring( f, CACHE_RECORD_END );
-
- if ( DEBUG_HEADER )
- printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n",
- hcachename, header_count, queries ? 100.0 * hits / queries : 0 );
-
- fclose ( f );
-}
-
-
-LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan )
-{
- HCACHEDATA cachedata;
- HCACHEDATA * c = &cachedata;
-
- LIST * l = 0;
-
- ++queries;
-
- c->boundname = t->boundname;
-
- if (hashcheck (hcachehash, (HASHDATA **) &c))
- {
- if (c->time == t->time)
- {
- LIST *l1 = hdrscan, *l2 = c->hdrscan;
- while (l1 && l2) {
- if (l1->string != l2->string) {
- l1 = NULL;
- } else {
- l1 = list_next(l1);
- l2 = list_next(l2);
- }
- }
- if (l1 || l2) {
- if (DEBUG_HEADER)
- printf("HDRSCAN out of date in cache for %s\n",
- t->boundname);
-
- printf("HDRSCAN out of date for %s\n", t->boundname);
- printf(" real : ");
- list_print(hdrscan);
- printf("\n cached: ");
- list_print(c->hdrscan);
- printf("\n");
-
- list_free(c->includes);
- list_free(c->hdrscan);
- c->includes = 0;
- c->hdrscan = 0;
- } else {
- if (DEBUG_HEADER)
- printf ("using header cache for %s\n", t->boundname);
- c->age = 0;
- ++hits;
- l = list_copy (0, c->includes);
- return l;
- }
- } else {
- if (DEBUG_HEADER)
- printf ("header cache out of date for %s\n", t->boundname);
- list_free (c->includes);
- list_free(c->hdrscan);
- c->includes = 0;
- c->hdrscan = 0;
- }
- } else {
- if (hashenter (hcachehash, (HASHDATA **)&c)) {
- c->boundname = newstr (c->boundname);
- c->next = hcachelist;
- hcachelist = c;
- }
- }
-
- /* 'c' points at the cache entry. Its out of date. */
-
- l = headers1 (0, t->boundname, rec, re);
-
- c->time = t->time;
- c->age = 0;
- c->includes = list_copy (0, l);
- c->hdrscan = list_copy(0, hdrscan);
-
- return l;
-}
-
-#endif
diff --git a/jam-files/engine/hcache.h b/jam-files/engine/hcache.h
deleted file mode 100644
index c316e3bc..00000000
--- a/jam-files/engine/hcache.h
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * This file is not part of Jam
- */
-
-/*
- * hcache.h - handle #includes in source files
- */
-#ifndef HCACHE_H
-# define HCACHE_H
-
-# include "regexp.h"
-# include "lists.h"
-
-void hcache_init(void);
-void hcache_done(void);
-LIST *hcache(TARGET *t, int rec, regexp *re[], LIST *hdrscan);
-
-#endif
diff --git a/jam-files/engine/hdrmacro.c b/jam-files/engine/hdrmacro.c
deleted file mode 100644
index 43031d48..00000000
--- a/jam-files/engine/hdrmacro.c
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "compile.h"
-# include "rules.h"
-# include "variable.h"
-# include "regexp.h"
-# include "hdrmacro.h"
-# include "hash.h"
-# include "newstr.h"
-# include "strings.h"
-
-/*
- * hdrmacro.c - handle header files that define macros used in
- * #include statements.
- *
- * we look for lines like "#define MACRO <....>" or '#define MACRO " "'
- * in the target file. When found, we
- *
- * we then phony up a rule invocation like:
- *
- * $(HDRRULE) <target> : <resolved included files> ;
- *
- * External routines:
- * headers1() - scan a target for "#include MACRO" lines and try
- * to resolve them when needed
- *
- * Internal routines:
- * headers1() - using regexp, scan a file and build include LIST
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 09/10/00 (seiwald) - replaced call to compile_rule with evaluate_rule,
- * so that headers() doesn't have to mock up a parse structure
- * just to invoke a rule.
- */
-
-/* this type is used to store a dictionary of file header macros */
-typedef struct header_macro
-{
- char * symbol;
- char * filename; /* we could maybe use a LIST here ?? */
-} HEADER_MACRO;
-
-static struct hash * header_macros_hash = 0;
-
-
-/*
- * headers() - scan a target for include files and call HDRRULE
- */
-
-# define MAXINC 10
-
-void
-macro_headers( TARGET *t )
-{
- static regexp *re = 0;
- FILE *f;
- char buf[ 1024 ];
-
- if ( DEBUG_HEADER )
- printf( "macro header scan for %s\n", t->name );
-
- /* this regexp is used to detect lines of the form */
- /* "#define MACRO <....>" or "#define MACRO "....." */
- /* in the header macro files.. */
- if ( re == 0 )
- {
- re = regex_compile(
- "^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*"
- "[<\"]([^\">]*)[\">].*$" );
- }
-
- if ( !( f = fopen( t->boundname, "r" ) ) )
- return;
-
- while ( fgets( buf, sizeof( buf ), f ) )
- {
- HEADER_MACRO var;
- HEADER_MACRO *v = &var;
-
- if ( regexec( re, buf ) && re->startp[1] )
- {
- /* we detected a line that looks like "#define MACRO filename */
- re->endp[1][0] = '\0';
- re->endp[2][0] = '\0';
-
- if ( DEBUG_HEADER )
- printf( "macro '%s' used to define filename '%s' in '%s'\n",
- re->startp[1], re->startp[2], t->boundname );
-
- /* add macro definition to hash table */
- if ( !header_macros_hash )
- header_macros_hash = hashinit( sizeof( HEADER_MACRO ), "hdrmacros" );
-
- v->symbol = re->startp[1];
- v->filename = 0;
- if ( hashenter( header_macros_hash, (HASHDATA **)&v ) )
- {
- v->symbol = newstr( re->startp[1] ); /* never freed */
- v->filename = newstr( re->startp[2] ); /* never freed */
- }
- /* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */
- /* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */
- }
- }
-
- fclose( f );
-}
-
-
-char * macro_header_get( const char * macro_name )
-{
- HEADER_MACRO var;
- HEADER_MACRO * v = &var;
-
- v->symbol = (char* )macro_name;
-
- if ( header_macros_hash && hashcheck( header_macros_hash, (HASHDATA **)&v ) )
- {
- if ( DEBUG_HEADER )
- printf( "### macro '%s' evaluated to '%s'\n", macro_name, v->filename );
- return v->filename;
- }
- return 0;
-}
diff --git a/jam-files/engine/hdrmacro.h b/jam-files/engine/hdrmacro.h
deleted file mode 100644
index 08cc1116..00000000
--- a/jam-files/engine/hdrmacro.h
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * hdrmacro.h - parses header files for #define MACRO <filename> or
- * #define MACRO "filename" definitions
- */
-
-void macro_headers( TARGET *t );
-
-char* macro_header_get( const char* macro_name );
diff --git a/jam-files/engine/headers.c b/jam-files/engine/headers.c
deleted file mode 100644
index b9d8f637..00000000
--- a/jam-files/engine/headers.c
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "compile.h"
-# include "rules.h"
-# include "variable.h"
-# include "regexp.h"
-# include "headers.h"
-# include "hdrmacro.h"
-# include "newstr.h"
-
-#ifdef OPT_HEADER_CACHE_EXT
-# include "hcache.h"
-#endif
-
-/*
- * headers.c - handle #includes in source files
- *
- * Using regular expressions provided as the variable $(HDRSCAN),
- * headers() searches a file for #include files and phonies up a
- * rule invocation:
- *
- * $(HDRRULE) <target> : <include files> ;
- *
- * External routines:
- * headers() - scan a target for include files and call HDRRULE
- *
- * Internal routines:
- * headers1() - using regexp, scan a file and build include LIST
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 09/10/00 (seiwald) - replaced call to compile_rule with evaluate_rule,
- * so that headers() doesn't have to mock up a parse structure
- * just to invoke a rule.
- */
-
-#ifndef OPT_HEADER_CACHE_EXT
-static LIST *headers1( LIST *l, char *file, int rec, regexp *re[]);
-#endif
-
-/*
- * headers() - scan a target for include files and call HDRRULE
- */
-
-# define MAXINC 10
-
-void
-headers( TARGET *t )
-{
- LIST * hdrscan;
- LIST * hdrrule;
- #ifndef OPT_HEADER_CACHE_EXT
- LIST * headlist = 0;
- #endif
- regexp * re[ MAXINC ];
- int rec = 0;
-
- if ( !( hdrscan = var_get( "HDRSCAN" ) ) ||
- !( hdrrule = var_get( "HDRRULE" ) ) )
- return;
-
- if ( DEBUG_HEADER )
- printf( "header scan %s\n", t->name );
-
- /* Compile all regular expressions in HDRSCAN */
- while ( ( rec < MAXINC ) && hdrscan )
- {
- re[ rec++ ] = regex_compile( hdrscan->string );
- hdrscan = list_next( hdrscan );
- }
-
- /* Doctor up call to HDRRULE rule */
- /* Call headers1() to get LIST of included files. */
- {
- FRAME frame[1];
- frame_init( frame );
- lol_add( frame->args, list_new( L0, t->name ) );
-#ifdef OPT_HEADER_CACHE_EXT
- lol_add( frame->args, hcache( t, rec, re, hdrscan ) );
-#else
- lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) );
-#endif
-
- if ( lol_get( frame->args, 1 ) )
- {
- /* The third argument to HDRRULE is the bound name of
- * $(<) */
- lol_add( frame->args, list_new( L0, t->boundname ) );
-
- list_free( evaluate_rule( hdrrule->string, frame ) );
- }
-
- /* Clean up. */
- frame_free( frame );
- }
-}
-
-
-/*
- * headers1() - using regexp, scan a file and build include LIST.
- */
-
-#ifdef OPT_HEADER_CACHE_EXT
-LIST *
-#else
-static LIST *
-#endif
-headers1(
- LIST * l,
- char * file,
- int rec,
- regexp * re[] )
-{
- FILE * f;
- char buf[ 1024 ];
- int i;
- static regexp * re_macros = 0;
-
-#ifdef OPT_IMPROVED_PATIENCE_EXT
- static int count = 0;
- ++count;
- if ( ((count == 100) || !( count % 1000 )) && DEBUG_MAKE )
- printf("...patience...\n");
-#endif
-
- /* the following regexp is used to detect cases where a */
- /* file is included through a line line "#include MACRO" */
- if ( re_macros == 0 )
- re_macros = regex_compile(
- "^[ ]*#[ ]*include[ ]*([A-Za-z][A-Za-z0-9_]*).*$" );
-
- if ( !( f = fopen( file, "r" ) ) )
- return l;
-
- while ( fgets( buf, sizeof( buf ), f ) )
- {
- int size = strlen( buf );
- /* Remove trailing \r and \n, if any. */
- while ( ( size > 0 ) &&
- ( buf[ size - 1 ] == '\n' ) &&
- ( buf[ size - 1 ] == '\r' ) )
- {
- buf[ size - 1 ] = '\0';
- --size;
- }
-
- for ( i = 0; i < rec; ++i )
- if ( regexec( re[i], buf ) && re[i]->startp[1] )
- {
- re[i]->endp[1][0] = '\0';
-
- if ( DEBUG_HEADER )
- printf( "header found: %s\n", re[i]->startp[1] );
-
- l = list_new( l, newstr( re[i]->startp[1] ) );
- }
-
- /* special treatment for #include MACRO */
- if ( regexec( re_macros, buf ) && re_macros->startp[1] )
- {
- char* header_filename;
-
- re_macros->endp[1][0] = '\0';
-
- if ( DEBUG_HEADER )
- printf( "macro header found: %s", re_macros->startp[1] );
-
- header_filename = macro_header_get( re_macros->startp[1] );
- if ( header_filename )
- {
- if ( DEBUG_HEADER )
- printf( " resolved to '%s'\n", header_filename );
- l = list_new( l, newstr( header_filename ) );
- }
- else
- {
- if ( DEBUG_HEADER )
- printf( " ignored !!\n" );
- }
- }
- }
-
- fclose( f );
-
- return l;
-}
-
-
-void regerror( char * s )
-{
- printf( "re error %s\n", s );
-}
diff --git a/jam-files/engine/headers.h b/jam-files/engine/headers.h
deleted file mode 100644
index 624475fe..00000000
--- a/jam-files/engine/headers.h
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * headers.h - handle #includes in source files
- */
-
-void headers( TARGET *t );
-
-#ifdef OPT_HEADER_CACHE_EXT
-struct regexp;
-LIST *headers1( LIST *l, char *file, int rec, struct regexp *re[] );
-#endif
diff --git a/jam-files/engine/jam.c b/jam-files/engine/jam.c
deleted file mode 100644
index e11d082b..00000000
--- a/jam-files/engine/jam.c
+++ /dev/null
@@ -1,632 +0,0 @@
-/*
- * /+\
- * +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- * \+/
- *
- * This file is part of jam.
- *
- * License is hereby granted to use this software and distribute it
- * freely, as long as this copyright notice is retained and modifications
- * are clearly marked.
- *
- * ALL WARRANTIES ARE HEREBY DISCLAIMED.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * jam.c - make redux
- *
- * See Jam.html for usage information.
- *
- * These comments document the code.
- *
- * The top half of the code is structured such:
- *
- * jam
- * / | \
- * +---+ | \
- * / | \
- * jamgram option \
- * / | \ \
- * / | \ \
- * / | \ |
- * scan | compile make
- * | | / | \ / | \
- * | | / | \ / | \
- * | | / | \ / | \
- * jambase parse | rules search make1
- * | | | \
- * | | | \
- * | | | \
- * builtins timestamp command execute
- * |
- * |
- * |
- * filesys
- *
- *
- * The support routines are called by all of the above, but themselves
- * are layered thus:
- *
- * variable|expand
- * / | | |
- * / | | |
- * / | | |
- * lists | | pathsys
- * \ | |
- * \ | |
- * \ | |
- * newstr |
- * \ |
- * \ |
- * \ |
- * hash
- *
- * Roughly, the modules are:
- *
- * builtins.c - jam's built-in rules
- * command.c - maintain lists of commands
- * compile.c - compile parsed jam statements
- * execunix.c - execute a shell script on UNIX
- * execvms.c - execute a shell script, ala VMS
- * expand.c - expand a buffer, given variable values
- * file*.c - scan directories and archives on *
- * hash.c - simple in-memory hashing routines
- * hdrmacro.c - handle header file parsing for filename macro definitions
- * headers.c - handle #includes in source files
- * jambase.c - compilable copy of Jambase
- * jamgram.y - jam grammar
- * lists.c - maintain lists of strings
- * make.c - bring a target up to date, once rules are in place
- * make1.c - execute command to bring targets up to date
- * newstr.c - string manipulation routines
- * option.c - command line option processing
- * parse.c - make and destroy parse trees as driven by the parser
- * path*.c - manipulate file names on *
- * hash.c - simple in-memory hashing routines
- * regexp.c - Henry Spencer's regexp
- * rules.c - access to RULEs, TARGETs, and ACTIONs
- * scan.c - the jam yacc scanner
- * search.c - find a target along $(SEARCH) or $(LOCATE)
- * timestamp.c - get the timestamp of a file or archive member
- * variable.c - handle jam multi-element variables
- *
- * 05/04/94 (seiwald) - async multiprocess (-j) support
- * 02/08/95 (seiwald) - -n implies -d2.
- * 02/22/95 (seiwald) - -v for version info.
- * 09/11/00 (seiwald) - PATCHLEVEL folded into VERSION.
- * 01/10/01 (seiwald) - pathsys.h split from filesys.h
- */
-
-
-#include "jam.h"
-#include "option.h"
-#include "patchlevel.h"
-
-/* These get various function declarations. */
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "compile.h"
-#include "builtins.h"
-#include "rules.h"
-#include "newstr.h"
-#include "scan.h"
-#include "timestamp.h"
-#include "make.h"
-#include "strings.h"
-#include "expand.h"
-#include "filesys.h"
-#include "output.h"
-
-/* Macintosh is "special" */
-#ifdef OS_MAC
- #include <QuickDraw.h>
-#endif
-
-/* And UNIX for this. */
-#ifdef unix
- #include <sys/utsname.h>
- #include <signal.h>
-#endif
-
-struct globs globs =
-{
- 0, /* noexec */
- 1, /* jobs */
- 0, /* quitquick */
- 0, /* newestfirst */
- 0, /* pipes action stdout and stderr merged to action output */
-#ifdef OS_MAC
- { 0, 0 }, /* debug - suppress tracing output */
-#else
- { 0, 1 }, /* debug ... */
-#endif
- 0, /* output commands, not run them */
- 0 /* action timeout */
-};
-
-/* Symbols to be defined as true for use in Jambase. */
-static char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 };
-
-
-/* Known for sure:
- * mac needs arg_enviro
- * OS2 needs extern environ
- */
-
-#ifdef OS_MAC
- #define use_environ arg_environ
- #ifdef MPW
- QDGlobals qd;
- #endif
-#endif
-
-/* on Win32-LCC */
-#if defined( OS_NT ) && defined( __LCC__ )
- #define use_environ _environ
-#endif
-
-# if defined( __MWERKS__)
- #define use_environ _environ
- extern char * * _environ;
-#endif
-
-#ifndef use_environ
- #define use_environ environ
- #if !defined( __WATCOM__ ) && !defined( OS_OS2 ) && !defined( OS_NT )
- extern char **environ;
- #endif
-#endif
-
-#if YYDEBUG != 0
- extern int yydebug;
-#endif
-
-#ifndef NDEBUG
-static void run_unit_tests()
-{
-#if defined( USE_EXECNT )
- extern void execnt_unit_test();
- execnt_unit_test();
-#endif
- string_unit_test();
- var_expand_unit_test();
-}
-#endif
-
-int anyhow = 0;
-
-#ifdef HAVE_PYTHON
- extern PyObject * bjam_call ( PyObject * self, PyObject * args );
- extern PyObject * bjam_import_rule ( PyObject * self, PyObject * args );
- extern PyObject * bjam_define_action( PyObject * self, PyObject * args );
- extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
- extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
- extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
-#endif
-
-char *saved_argv0;
-
-int main( int argc, char * * argv, char * * arg_environ )
-{
- int n;
- char * s;
- struct bjam_option optv[N_OPTS];
- char const * all = "all";
- int status;
- int arg_c = argc;
- char * * arg_v = argv;
- char const * progname = argv[0];
-
- saved_argv0 = argv[0];
-
- BJAM_MEM_INIT();
-
-# ifdef OS_MAC
- InitGraf(&qd.thePort);
-# endif
-
- --argc;
- ++argv;
-
- if ( getoptions( argc, argv, "-:l:d:j:p:f:gs:t:ano:qv", optv ) < 0 )
- {
- printf( "\nusage: %s [ options ] targets...\n\n", progname );
-
- printf( "-a Build all targets, even if they are current.\n" );
- printf( "-dx Set the debug level to x (0-9).\n" );
- printf( "-fx Read x instead of Jambase.\n" );
- /* printf( "-g Build from newest sources first.\n" ); */
- printf( "-jx Run up to x shell commands concurrently.\n" );
- printf( "-lx Limit actions to x number of seconds after which they are stopped.\n" );
- printf( "-n Don't actually execute the updating actions.\n" );
- printf( "-ox Write the updating actions to file x.\n" );
- printf( "-px x=0, pipes action stdout and stderr merged into action output.\n" );
- printf( "-q Quit quickly as soon as a target fails.\n" );
- printf( "-sx=y Set variable x=y, overriding environment.\n" );
- printf( "-tx Rebuild x, even if it is up-to-date.\n" );
- printf( "-v Print the version of jam and exit.\n" );
- printf( "--x Option is ignored.\n\n" );
-
- exit( EXITBAD );
- }
-
- /* Version info. */
- if ( ( s = getoptval( optv, 'v', 0 ) ) )
- {
- printf( "Boost.Jam " );
- printf( "Version %s. %s.\n", VERSION, OSMINOR );
- printf( " Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. \n" );
- printf( " Copyright 2001 David Turner.\n" );
- printf( " Copyright 2001-2004 David Abrahams.\n" );
- printf( " Copyright 2002-2008 Rene Rivera.\n" );
- printf( " Copyright 2003-2008 Vladimir Prus.\n" );
-
- return EXITOK;
- }
-
- /* Pick up interesting options. */
- if ( ( s = getoptval( optv, 'n', 0 ) ) )
- globs.noexec++, globs.debug[2] = 1;
-
- if ( ( s = getoptval( optv, 'p', 0 ) ) )
- {
- /* Undocumented -p3 (acts like both -p1 -p2) means separate pipe action
- * stdout and stderr.
- */
- globs.pipe_action = atoi( s );
- if ( ( 3 < globs.pipe_action ) || ( globs.pipe_action < 0 ) )
- {
- printf(
- "Invalid pipe descriptor '%d', valid values are -p[0..3].\n",
- globs.pipe_action );
- exit( EXITBAD );
- }
- }
-
- if ( ( s = getoptval( optv, 'q', 0 ) ) )
- globs.quitquick = 1;
-
- if ( ( s = getoptval( optv, 'a', 0 ) ) )
- anyhow++;
-
- if ( ( s = getoptval( optv, 'j', 0 ) ) )
- {
- globs.jobs = atoi( s );
- if (globs.jobs == 0)
- {
- printf("Invalid value for the '-j' option.\n");
- exit(EXITBAD);
- }
- }
-
- if ( ( s = getoptval( optv, 'g', 0 ) ) )
- globs.newestfirst = 1;
-
- if ( ( s = getoptval( optv, 'l', 0 ) ) )
- globs.timeout = atoi( s );
-
- /* Turn on/off debugging */
- for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n )
- {
- int i;
-
- /* First -d, turn off defaults. */
- if ( !n )
- for ( i = 0; i < DEBUG_MAX; ++i )
- globs.debug[i] = 0;
-
- i = atoi( s );
-
- if ( ( i < 0 ) || ( i >= DEBUG_MAX ) )
- {
- printf( "Invalid debug level '%s'.\n", s );
- continue;
- }
-
- /* n turns on levels 1-n. */
- /* +n turns on level n. */
- if ( *s == '+' )
- globs.debug[i] = 1;
- else while ( i )
- globs.debug[i--] = 1;
- }
-
- {
- PROFILE_ENTER( MAIN );
-
-#ifdef HAVE_PYTHON
- {
- PROFILE_ENTER( MAIN_PYTHON );
- Py_Initialize();
- {
- static PyMethodDef BjamMethods[] = {
- {"call", bjam_call, METH_VARARGS,
- "Call the specified bjam rule."},
- {"import_rule", bjam_import_rule, METH_VARARGS,
- "Imports Python callable to bjam."},
- {"define_action", bjam_define_action, METH_VARARGS,
- "Defines a command line action."},
- {"variable", bjam_variable, METH_VARARGS,
- "Obtains a variable from bjam's global module."},
- {"backtrace", bjam_backtrace, METH_VARARGS,
- "Returns bjam backtrace from the last call into Python."},
- {"caller", bjam_caller, METH_VARARGS,
- "Returns the module from which the last call into Python is made."},
- {NULL, NULL, 0, NULL}
- };
-
- Py_InitModule( "bjam", BjamMethods );
- }
- PROFILE_EXIT( MAIN_PYTHON );
- }
-#endif
-
-#ifndef NDEBUG
- run_unit_tests();
-#endif
-#if YYDEBUG != 0
- if ( DEBUG_PARSE )
- yydebug = 1;
-#endif
-
- /* Set JAMDATE. */
- var_set( "JAMDATE", list_new( L0, outf_time(time(0)) ), VAR_SET );
-
- /* Set JAM_VERSION. */
- var_set( "JAM_VERSION",
- list_new( list_new( list_new( L0,
- newstr( VERSION_MAJOR_SYM ) ),
- newstr( VERSION_MINOR_SYM ) ),
- newstr( VERSION_PATCH_SYM ) ),
- VAR_SET );
-
- /* Set JAMUNAME. */
-#ifdef unix
- {
- struct utsname u;
-
- if ( uname( &u ) >= 0 )
- {
- var_set( "JAMUNAME",
- list_new(
- list_new(
- list_new(
- list_new(
- list_new( L0,
- newstr( u.sysname ) ),
- newstr( u.nodename ) ),
- newstr( u.release ) ),
- newstr( u.version ) ),
- newstr( u.machine ) ), VAR_SET );
- }
- }
-#endif /* unix */
-
- /* Load up environment variables. */
-
- /* First into the global module, with splitting, for backward
- * compatibility.
- */
- var_defines( use_environ, 1 );
-
- /* Then into .ENVIRON, without splitting. */
- enter_module( bindmodule(".ENVIRON") );
- var_defines( use_environ, 0 );
- exit_module( bindmodule(".ENVIRON") );
-
- /*
- * Jam defined variables OS & OSPLAT. We load them after environment, so
- * that setting OS in environment does not change Jam's notion of the
- * current platform.
- */
- var_defines( othersyms, 1 );
-
- /* Load up variables set on command line. */
- for ( n = 0; ( s = getoptval( optv, 's', n ) ); ++n )
- {
- char *symv[2];
- symv[ 0 ] = s;
- symv[ 1 ] = 0;
- var_defines( symv, 1 );
- enter_module( bindmodule(".ENVIRON") );
- var_defines( symv, 0 );
- exit_module( bindmodule(".ENVIRON") );
- }
-
- /* Set the ARGV to reflect the complete list of arguments of invocation.
- */
- for ( n = 0; n < arg_c; ++n )
- var_set( "ARGV", list_new( L0, newstr( arg_v[n] ) ), VAR_APPEND );
-
- /* Initialize built-in rules. */
- load_builtins();
-
- /* Add the targets in the command line to the update list. */
- for ( n = 1; n < arg_c; ++n )
- {
- if ( arg_v[ n ][ 0 ] == '-' )
- {
- char * f = "-:l:d:j:f:gs:t:ano:qv";
- for ( ; *f; ++f ) if ( *f == arg_v[ n ][ 1 ] ) break;
- if ( ( f[ 1 ] == ':' ) && ( arg_v[ n ][ 2 ] == '\0' ) ) ++n;
- }
- else
- {
- mark_target_for_updating( arg_v[ n ] );
- }
- }
-
- if (!targets_to_update())
- mark_target_for_updating("all");
-
- /* Parse ruleset. */
- {
- FRAME frame[ 1 ];
- frame_init( frame );
- for ( n = 0; ( s = getoptval( optv, 'f', n ) ); ++n )
- parse_file( s, frame );
-
- if ( !n )
- parse_file( "+", frame );
- }
-
- status = yyanyerrors();
-
- /* Manually touch -t targets. */
- for ( n = 0; ( s = getoptval( optv, 't', n ) ); ++n )
- touch_target( s );
-
- /* If an output file is specified, set globs.cmdout to that. */
- if ( ( s = getoptval( optv, 'o', 0 ) ) )
- {
- if ( !( globs.cmdout = fopen( s, "w" ) ) )
- {
- printf( "Failed to write to '%s'\n", s );
- exit( EXITBAD );
- }
- ++globs.noexec;
- }
-
- /* The build system may set the PARALLELISM variable to override -j
- options. */
- {
- LIST *p = L0;
- p = var_get ("PARALLELISM");
- if (p)
- {
- int j = atoi (p->string);
- if (j == -1)
- {
- printf( "Invalid value of PARALLELISM: %s\n", p->string);
- }
- else
- {
- globs.jobs = j;
- }
- }
- }
-
- /* KEEP_GOING overrides -q option. */
- {
- LIST *p = L0;
- p = var_get ("KEEP_GOING");
- if (p)
- {
- int v = atoi (p->string);
- if (v == 0)
- globs.quitquick = 1;
- else
- globs.quitquick = 0;
- }
- }
-
- /* Now make target. */
- {
- PROFILE_ENTER( MAIN_MAKE );
-
- LIST * targets = targets_to_update();
- if (targets)
- {
- int targets_count = list_length( targets );
- const char * * targets2 = (const char * *)
- BJAM_MALLOC( targets_count * sizeof( char * ) );
- int n = 0;
- for ( ; targets; targets = list_next( targets ) )
- targets2[ n++ ] = targets->string;
- status |= make( targets_count, targets2, anyhow );
- free( targets );
- }
- else
- {
- status = last_update_now_status;
- }
-
- PROFILE_EXIT( MAIN_MAKE );
- }
-
- PROFILE_EXIT( MAIN );
- }
-
- if ( DEBUG_PROFILE )
- profile_dump();
-
- /* Widely scattered cleanup. */
- var_done();
- file_done();
- rules_done();
- stamps_done();
- str_done();
-
- /* Close cmdout. */
- if ( globs.cmdout )
- fclose( globs.cmdout );
-
-#ifdef HAVE_PYTHON
- Py_Finalize();
-#endif
-
- BJAM_MEM_CLOSE();
-
- return status ? EXITBAD : EXITOK;
-}
-
-#if defined(_WIN32)
-#include <windows.h>
-char *executable_path(char *argv0) {
- char buf[1024];
- DWORD ret = GetModuleFileName(NULL, buf, sizeof(buf));
- if (ret == 0 || ret == sizeof(buf)) return NULL;
- return strdup (buf);
-}
-#elif defined(__APPLE__) /* Not tested */
-#include <mach-o/dyld.h>
-char *executable_path(char *argv0) {
- char buf[1024];
- uint32_t size = sizeof(buf);
- int ret = _NSGetExecutablePath(buf, &size);
- if (ret != 0) return NULL;
- return strdup(buf);
-}
-#elif defined(sun) || defined(__sun) /* Not tested */
-#include <stdlib.h>
-
-char *executable_path(char *argv0) {
- return strdup(getexecname());
-}
-#elif defined(__FreeBSD__)
-#include <sys/sysctl.h>
-char *executable_path(char *argv0) {
- int mib[4];
- mib[0] = CTL_KERN;
- mib[1] = KERN_PROC;
- mib[2] = KERN_PROC_PATHNAME;
- mib[3] = -1;
- char buf[1024];
- size_t size = sizeof(buf);
- sysctl(mib, 4, buf, &size, NULL, 0);
- if (size == 0 || size == sizeof(buf)) return NULL;
- return strndup(buf, size);
-}
-#elif defined(__linux__)
-#include <unistd.h>
-char *executable_path(char *argv0) {
- char buf[1024];
- ssize_t ret = readlink("/proc/self/exe", buf, sizeof(buf));
- if (ret == 0 || ret == sizeof(buf)) return NULL;
- return strndup(buf, ret);
-}
-#else
-char *executable_path(char *argv0) {
- /* If argv0 is absolute path, assume it's the right absolute path. */
- if (argv0[0] == "/")
- return strdup(argv0);
- return NULL;
-}
-#endif
diff --git a/jam-files/engine/jam.h b/jam-files/engine/jam.h
deleted file mode 100644
index 73a7a04c..00000000
--- a/jam-files/engine/jam.h
+++ /dev/null
@@ -1,579 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * jam.h - includes and globals for jam
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 04/21/94 (seiwald) - DGUX is __DGUX__, not just __DGUX.
- * 05/04/94 (seiwald) - new globs.jobs (-j jobs)
- * 11/01/94 (wingerd) - let us define path of Jambase at compile time.
- * 12/30/94 (wingerd) - changed command buffer size for NT (MS-DOS shell).
- * 02/22/95 (seiwald) - Jambase now in /usr/local/lib.
- * 04/30/95 (seiwald) - FreeBSD added. Live Free or Die.
- * 05/10/95 (seiwald) - SPLITPATH character set up here.
- * 08/20/95 (seiwald) - added LINUX.
- * 08/21/95 (seiwald) - added NCR.
- * 10/23/95 (seiwald) - added SCO.
- * 01/03/96 (seiwald) - SINIX (nixdorf) added.
- * 03/13/96 (seiwald) - Jambase now compiled in; remove JAMBASE variable.
- * 04/29/96 (seiwald) - AIX now has 31 and 42 OSVERs.
- * 11/21/96 (peterk) - added BeOS with MW CW mwcc
- * 12/21/96 (seiwald) - OSPLAT now defined for NT.
- * 07/19/99 (sickel) - Mac OS X Server and Client support added
- * 02/18/00 (belmonte)- Support for Cygwin.
- * 09/12/00 (seiwald) - OSSYMS split to OSMAJOR/OSMINOR/OSPLAT
- * 12/29/00 (seiwald) - OSVER dropped.
- */
-
-#ifndef JAM_H_VP_2003_08_01
-#define JAM_H_VP_2003_08_01
-
-#ifdef HAVE_PYTHON
-#include <Python.h>
-#endif
-
-/* Assume popen support is available unless known otherwise. */
-#define HAVE_POPEN 1
-
-/*
- * VMS, OPENVMS
- */
-
-#ifdef VMS
-
-#include <types.h>
-#include <file.h>
-#include <stat.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <stdlib.h>
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-#include <unistd.h>
-#include <unixlib.h>
-
-#define OSMINOR "OS=VMS"
-#define OSMAJOR "VMS=true"
-#define OS_VMS
-#define MAXLINE 1024 /* longest 'together' actions */
-#define SPLITPATH ','
-#define EXITOK 1
-#define EXITBAD 0
-#define DOWNSHIFT_PATHS
-
-/* This may be inaccurate. */
-#ifndef __DECC
-#define OSPLAT "OSPLAT=VAX"
-#endif
-
-#endif
-
-/*
- * Windows NT
- */
-
-#ifdef NT
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <malloc.h>
-#ifndef __MWERKS__
- #include <memory.h>
-#endif
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-
-#define OSMAJOR "NT=true"
-#define OSMINOR "OS=NT"
-#define OS_NT
-#define SPLITPATH ';'
-/* Windows NT 3.51 only allows 996 chars per line, but we deal with the problem
- * in "execnt.c".
- */
-#define MAXLINE (maxline()) /* longest 'together' actions */
-#define USE_EXECNT
-#define USE_PATHUNIX
-#define PATH_DELIM '\\'
-#define DOWNSHIFT_PATHS
-
-/* AS400 cross-compile from NT. */
-
-#ifdef AS400
- #undef OSMINOR
- #undef OSMAJOR
- #define OSMAJOR "AS400=true"
- #define OSMINOR "OS=AS400"
- #define OS_AS400
-#endif
-
-/* Metrowerks Standard Library on Windows. */
-
-#ifdef __MSL__
- #undef HAVE_POPEN
-#endif
-
-# endif
-
-/*
- * Windows MingW32
- */
-
-#ifdef MINGW
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <malloc.h>
-#include <memory.h>
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-
-#define OSMAJOR "MINGW=true"
-#define OSMINOR "OS=MINGW"
-#define OS_NT
-#define SPLITPATH ';'
-#define MAXLINE 996 /* longest 'together' actions */
-#define USE_EXECUNIX
-#define USE_PATHUNIX
-#define PATH_DELIM '\\'
-#define DOWNSHIFT_PATHS
-
-#endif
-
-/*
- * OS2
- */
-
-#ifdef __OS2__
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <malloc.h>
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-
-#define OSMAJOR "OS2=true"
-#define OSMINOR "OS=OS2"
-#define OS_OS2
-#define SPLITPATH ';'
-#define MAXLINE 996 /* longest 'together' actions */
-#define USE_EXECUNIX
-#define USE_PATHUNIX
-#define PATH_DELIM '\\'
-#define DOWNSHIFT_PATHS
-
-#ifdef __EMX__
- #define USE_FILEUNIX
-#endif
-
-#endif
-
-/*
- * Macintosh MPW
- */
-
-#ifdef macintosh
-
-#include <time.h>
-#include <stdlib.h>
-#include <string.h>
-#include <stdio.h>
-
-#define OSMAJOR "MAC=true"
-#define OSMINOR "OS=MAC"
-#define OS_MAC
-#define SPLITPATH ','
-
-#endif
-
-/*
- * God fearing UNIX.
- */
-
-#ifndef OSMINOR
-
-#define OSMAJOR "UNIX=true"
-#define USE_EXECUNIX
-#define USE_FILEUNIX
-#define USE_PATHUNIX
-#define PATH_DELIM '/'
-
-#ifdef _AIX
- #define unix
- #define MAXLINE 23552 /* 24k - 1k, longest 'together' actions */
- #define OSMINOR "OS=AIX"
- #define OS_AIX
- #define NO_VFORK
-#endif
-#ifdef AMIGA
- #define OSMINOR "OS=AMIGA"
- #define OS_AMIGA
-#endif
-#ifdef __BEOS__
- #define unix
- #define OSMINOR "OS=BEOS"
- #define OS_BEOS
- #define NO_VFORK
-#endif
-#ifdef __bsdi__
- #define OSMINOR "OS=BSDI"
- #define OS_BSDI
-#endif
-#if defined (COHERENT) && defined (_I386)
- #define OSMINOR "OS=COHERENT"
- #define OS_COHERENT
- #define NO_VFORK
-#endif
-#if defined(__cygwin__) || defined(__CYGWIN__)
- #define OSMINOR "OS=CYGWIN"
- #define OS_CYGWIN
-#endif
-#if defined(__FreeBSD__) && !defined(__DragonFly__)
- #define OSMINOR "OS=FREEBSD"
- #define OS_FREEBSD
-#endif
-#ifdef __DragonFly__
- #define OSMINOR "OS=DRAGONFLYBSD"
- #define OS_DRAGONFLYBSD
-#endif
-#ifdef __DGUX__
- #define OSMINOR "OS=DGUX"
- #define OS_DGUX
-#endif
-#ifdef __hpux
- #define OSMINOR "OS=HPUX"
- #define OS_HPUX
-#endif
-#ifdef __OPENNT
- #define unix
- #define OSMINOR "OS=INTERIX"
- #define OS_INTERIX
- #define NO_VFORK
-#endif
-#ifdef __sgi
- #define OSMINOR "OS=IRIX"
- #define OS_IRIX
- #define NO_VFORK
-#endif
-#ifdef __ISC
- #define OSMINOR "OS=ISC"
- #define OS_ISC
- #define NO_VFORK
-#endif
-#ifdef linux
- #define OSMINOR "OS=LINUX"
- #define OS_LINUX
-#endif
-#ifdef __Lynx__
- #define OSMINOR "OS=LYNX"
- #define OS_LYNX
- #define NO_VFORK
- #define unix
-#endif
-#ifdef __MACHTEN__
- #define OSMINOR "OS=MACHTEN"
- #define OS_MACHTEN
-#endif
-#ifdef mpeix
- #define unix
- #define OSMINOR "OS=MPEIX"
- #define OS_MPEIX
- #define NO_VFORK
-#endif
-#ifdef __MVS__
- #define unix
- #define OSMINOR "OS=MVS"
- #define OS_MVS
-#endif
-#ifdef _ATT4
- #define OSMINOR "OS=NCR"
- #define OS_NCR
-#endif
-#ifdef __NetBSD__
- #define unix
- #define OSMINOR "OS=NETBSD"
- #define OS_NETBSD
- #define NO_VFORK
-#endif
-#ifdef __QNX__
- #define unix
- #ifdef __QNXNTO__
- #define OSMINOR "OS=QNXNTO"
- #define OS_QNXNTO
- #else
- #define OSMINOR "OS=QNX"
- #define OS_QNX
- #define NO_VFORK
- #define MAXLINE 996
- #endif
-#endif
-#ifdef NeXT
- #ifdef __APPLE__
- #define OSMINOR "OS=RHAPSODY"
- #define OS_RHAPSODY
- #else
- #define OSMINOR "OS=NEXT"
- #define OS_NEXT
- #endif
-#endif
-#ifdef __APPLE__
- #define unix
- #define OSMINOR "OS=MACOSX"
- #define OS_MACOSX
-#endif
-#ifdef __osf__
- #ifndef unix
- #define unix
- #endif
- #define OSMINOR "OS=OSF"
- #define OS_OSF
-#endif
-#ifdef _SEQUENT_
- #define OSMINOR "OS=PTX"
- #define OS_PTX
-#endif
-#ifdef M_XENIX
- #define OSMINOR "OS=SCO"
- #define OS_SCO
- #define NO_VFORK
-#endif
-#ifdef sinix
- #define unix
- #define OSMINOR "OS=SINIX"
- #define OS_SINIX
-#endif
-#ifdef sun
- #if defined(__svr4__) || defined(__SVR4)
- #define OSMINOR "OS=SOLARIS"
- #define OS_SOLARIS
- #else
- #define OSMINOR "OS=SUNOS"
- #define OS_SUNOS
- #endif
-#endif
-#ifdef ultrix
- #define OSMINOR "OS=ULTRIX"
- #define OS_ULTRIX
-#endif
-#ifdef _UNICOS
- #define OSMINOR "OS=UNICOS"
- #define OS_UNICOS
-#endif
-#if defined(__USLC__) && !defined(M_XENIX)
- #define OSMINOR "OS=UNIXWARE"
- #define OS_UNIXWARE
-#endif
-#ifdef __OpenBSD__
- #define OSMINOR "OS=OPENBSD"
- #define OS_OPENBSD
- #define unix
-#endif
-#if defined (__FreeBSD_kernel__) && !defined(__FreeBSD__)
- #define OSMINOR "OS=KFREEBSD"
- #define OS_KFREEBSD
-#endif
-#ifndef OSMINOR
- #define OSMINOR "OS=UNKNOWN"
-#endif
-
-/* All the UNIX includes */
-
-#include <sys/types.h>
-#include <sys/stat.h>
-
-#ifndef OS_MPEIX
- #include <sys/file.h>
-#endif
-
-#include <fcntl.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-#include <unistd.h>
-
-#ifndef OS_QNX
- #include <memory.h>
-#endif
-
-#ifndef OS_ULTRIX
- #include <stdlib.h>
-#endif
-
-#if !defined( OS_BSDI ) && \
- !defined( OS_FREEBSD ) && \
- !defined( OS_DRAGONFLYBSD ) && \
- !defined( OS_NEXT ) && \
- !defined( OS_MACHTEN ) && \
- !defined( OS_MACOSX ) && \
- !defined( OS_RHAPSODY ) && \
- !defined( OS_MVS ) && \
- !defined( OS_OPENBSD )
- #include <malloc.h>
-#endif
-
-#endif
-
-/*
- * OSPLAT definitions - suppressed when it is a one-of-a-kind.
- */
-
-#if defined( _M_PPC ) || \
- defined( PPC ) || \
- defined( ppc ) || \
- defined( __powerpc__ ) || \
- defined( __ppc__ )
- #define OSPLAT "OSPLAT=PPC"
-#endif
-
-#if defined( _ALPHA_ ) || \
- defined( __alpha__ )
- #define OSPLAT "OSPLAT=AXP"
-#endif
-
-#if defined( _i386_ ) || \
- defined( __i386__ ) || \
- defined( __i386 ) || \
- defined( _M_IX86 )
- #define OSPLAT "OSPLAT=X86"
-#endif
-
-#if defined( __ia64__ ) || \
- defined( __IA64__ ) || \
- defined( __ia64 )
- #define OSPLAT "OSPLAT=IA64"
-#endif
-
-#if defined( __x86_64__ ) || \
- defined( __amd64__ ) || \
- defined( _M_AMD64 )
- #define OSPLAT "OSPLAT=X86_64"
-#endif
-
-
-#if defined( __sparc__ ) || \
- defined( __sparc )
- #define OSPLAT "OSPLAT=SPARC"
-#endif
-
-#ifdef __mips__
- #define OSPLAT "OSPLAT=MIPS"
-#endif
-
-#ifdef __arm__
- #define OSPLAT "OSPLAT=ARM"
-#endif
-
-#ifdef __s390__
- #define OSPLAT "OSPLAT=390"
-#endif
-
-#ifdef __hppa
- #define OSPLAT "OSPLAT=PARISC"
-#endif
-
-#ifndef OSPLAT
- #define OSPLAT ""
-#endif
-
-/*
- * Jam implementation misc.
- */
-
-#ifndef MAXLINE
- #define MAXLINE 102400 /* longest 'together' actions' */
-#endif
-
-#ifndef EXITOK
- #define EXITOK 0
- #define EXITBAD 1
-#endif
-
-#ifndef SPLITPATH
- #define SPLITPATH ':'
-#endif
-
-/* You probably do not need to muck with these. */
-
-#define MAXSYM 1024 /* longest symbol in the environment */
-#define MAXJPATH 1024 /* longest filename */
-
-#define MAXJOBS 64 /* silently enforced -j limit */
-#define MAXARGC 32 /* words in $(JAMSHELL) */
-
-/* Jam private definitions below. */
-
-#define DEBUG_MAX 14
-
-
-struct globs
-{
- int noexec;
- int jobs;
- int quitquick;
- int newestfirst; /* build newest sources first */
- int pipe_action;
- char debug[ DEBUG_MAX ];
- FILE * cmdout; /* print cmds, not run them */
- long timeout; /* number of seconds to limit actions to,
- * default 0 for no limit.
- */
- int dart; /* output build and test results formatted for Dart */
-};
-
-extern struct globs globs;
-
-#define DEBUG_MAKE ( globs.debug[ 1 ] ) /* show actions when executed */
-#define DEBUG_MAKEQ ( globs.debug[ 2 ] ) /* show even quiet actions */
-#define DEBUG_EXEC ( globs.debug[ 2 ] ) /* show text of actons */
-#define DEBUG_MAKEPROG ( globs.debug[ 3 ] ) /* show progress of make0 */
-#define DEBUG_BIND ( globs.debug[ 3 ] ) /* show when files bound */
-
-#define DEBUG_EXECCMD ( globs.debug[ 4 ] ) /* show execcmds()'s work */
-
-#define DEBUG_COMPILE ( globs.debug[ 5 ] ) /* show rule invocations */
-
-#define DEBUG_HEADER ( globs.debug[ 6 ] ) /* show result of header scan */
-#define DEBUG_BINDSCAN ( globs.debug[ 6 ] ) /* show result of dir scan */
-#define DEBUG_SEARCH ( globs.debug[ 6 ] ) /* show attempts at binding */
-
-#define DEBUG_VARSET ( globs.debug[ 7 ] ) /* show variable settings */
-#define DEBUG_VARGET ( globs.debug[ 8 ] ) /* show variable fetches */
-#define DEBUG_VAREXP ( globs.debug[ 8 ] ) /* show variable expansions */
-#define DEBUG_IF ( globs.debug[ 8 ] ) /* show 'if' calculations */
-#define DEBUG_LISTS ( globs.debug[ 9 ] ) /* show list manipulation */
-#define DEBUG_SCAN ( globs.debug[ 9 ] ) /* show scanner tokens */
-#define DEBUG_MEM ( globs.debug[ 9 ] ) /* show memory use */
-
-#define DEBUG_PROFILE ( globs.debug[ 10 ] ) /* dump rule execution times */
-#define DEBUG_PARSE ( globs.debug[ 11 ] ) /* debug parsing */
-#define DEBUG_GRAPH ( globs.debug[ 12 ] ) /* debug dependencies */
-#define DEBUG_FATE ( globs.debug[ 13 ] ) /* show changes to fate in make0() */
-
-/* Everyone gets the memory definitions. */
-#include "mem.h"
-
-/* They also get the profile functions. */
-#include "debug.h"
-
-#endif
diff --git a/jam-files/engine/jambase.c b/jam-files/engine/jambase.c
deleted file mode 100644
index b15282bc..00000000
--- a/jam-files/engine/jambase.c
+++ /dev/null
@@ -1,1691 +0,0 @@
-/* Generated by mkjambase from Jambase */
-char *jambase[] = {
-/* Jambase */
-"if $(NT)\n",
-"{\n",
-"SLASH ?= \\\\ ;\n",
-"}\n",
-"SLASH ?= / ;\n",
-"rule find-to-root ( dir : patterns + )\n",
-"{\n",
-"local globs = [ GLOB $(dir) : $(patterns) ] ;\n",
-"while ! $(globs) && $(dir:P) != $(dir)\n",
-"{\n",
-"dir = $(dir:P) ;\n",
-"globs = [ GLOB $(dir) : $(patterns) ] ;\n",
-"}\n",
-"return $(globs) ;\n",
-"}\n",
-".boost-build-file = ;\n",
-".bootstrap-file = ;\n",
-"BOOST_BUILD_PATH.user-value = $(BOOST_BUILD_PATH) ;\n",
-"if ! $(BOOST_BUILD_PATH) && $(UNIX)\n",
-"{\n",
-"BOOST_BUILD_PATH = /usr/share/boost-build ;\n",
-"}\n",
-"rule _poke ( module-name ? : variables + : value * )\n",
-"{\n",
-"module $(<)\n",
-"{\n",
-"$(>) = $(3) ;\n",
-"}\n",
-"}\n",
-"rule boost-build ( dir ? )\n",
-"{\n",
-"if $(.bootstrap-file)\n",
-"{\n",
-"ECHO \"Error: Illegal attempt to re-bootstrap the build system by invoking\" ;\n",
-"ECHO ;\n",
-"ECHO \" 'boost-build\" $(dir) \";'\" ;\n",
-"ECHO ;\n",
-"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
-"}\n",
-"BOOST_BUILD_PATH = $(dir:R=$(.boost-build-file:D)) $(BOOST_BUILD_PATH) ;\n",
-"_poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;\n",
-"local bootstrap-file = [ GLOB $(BOOST_BUILD_PATH) : bootstrap.jam ] ;\n",
-".bootstrap-file = $(bootstrap-file[1]) ;\n",
-"if ! $(.bootstrap-file)\n",
-"{\n",
-"ECHO \"Unable to load Boost.Build: could not find build system.\" ;\n",
-"ECHO --------------------------------------------------------- ;\n",
-"ECHO \"$(.boost-build-file) attempted to load the build system by invoking\" ;\n",
-"ECHO ;\n",
-"ECHO \" 'boost-build\" $(dir) \";'\" ;\n",
-"ECHO ;\n",
-"ECHO \"but we were unable to find \\\"bootstrap.jam\\\" in the specified directory\" ;\n",
-"ECHO \"or in BOOST_BUILD_PATH (searching \"$(BOOST_BUILD_PATH:J=\", \")\").\" ;\n",
-"ECHO ;\n",
-"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
-"}\n",
-"if [ MATCH .*(--debug-configuration).* : $(ARGV) ]\n",
-"{\n",
-"ECHO \"notice: loading Boost.Build from\"\n",
-"[ NORMALIZE_PATH $(.bootstrap-file:D) ] ;\n",
-"}\n",
-"include $(.bootstrap-file) ;\n",
-"}\n",
-"if [ MATCH .*(b2).* : $(ARGV[1]:BL) ] \n",
-"|| [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]\n",
-"|| $(BOOST_ROOT) # A temporary measure so Jam works with Boost.Build v1.\n",
-"{\n",
-"local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;\n",
-"local self = [ SELF_PATH ] ;\n",
-"local boost-build-relative = ../../share/boost-build ;\n",
-"local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;\n",
-"local boost-build-files =\n",
-"[ find-to-root [ PWD ] : boost-build.jam ]\n",
-"[ GLOB $(self-based-path) : boost-build.jam ]\n",
-"[ GLOB $(search-path) : boost-build.jam ] ;\n",
-".boost-build-file = $(boost-build-files[1]) ;\n",
-"if ! $(.boost-build-file)\n",
-"{\n",
-"ECHO \"Unable to load Boost.Build: could not find \\\"boost-build.jam\\\"\" ;\n",
-"ECHO --------------------------------------------------------------- ;\n",
-"if ! [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]\n",
-"{\n",
-"ECHO \"BOOST_ROOT must be set, either in the environment, or \" ;\n",
-"ECHO \"on the command-line with -sBOOST_ROOT=..., to the root\" ;\n",
-"ECHO \"of the boost installation.\" ;\n",
-"ECHO ;\n",
-"}\n",
-"ECHO \"Attempted search from\" [ PWD ] \"up to the root\" ;\n",
-"ECHO \"at\" $(self-based-path) ;\n",
-"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
-"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
-"}\n",
-"if [ MATCH .*(--debug-configuration).* : $(ARGV) ]\n",
-"{\n",
-"ECHO \"notice: found boost-build.jam at\"\n",
-"[ NORMALIZE_PATH $(.boost-build-file) ] ;\n",
-"}\n",
-"include $(.boost-build-file) ;\n",
-"if ! $(.bootstrap-file)\n",
-"{\n",
-"ECHO \"Unable to load Boost.Build\" ;\n",
-"ECHO -------------------------- ;\n",
-"ECHO \"\\\"$(.boost-build-file)\\\" was found by searching from\" [ PWD ] \"up to the root\" ;\n",
-"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
-"ECHO ;\n",
-"ECHO \"However, it failed to call the \\\"boost-build\\\" rule to indicate\" ;\n",
-"ECHO \"the location of the build system.\" ;\n",
-"ECHO ;\n",
-"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
-"}\n",
-"}\n",
-"else\n",
-"{\n",
-"if $(NT)\n",
-"{\n",
-"local SUPPORTED_TOOLSETS = \"BORLANDC\" \"VC7\" \"VISUALC\" \"VISUALC16\" \"INTELC\" \"WATCOM\"\n",
-"\"MINGW\" \"LCC\" ;\n",
-"TOOLSET = \"\" ;\n",
-"if $(JAM_TOOLSET)\n",
-"{\n",
-"local t ;\n",
-"for t in $(SUPPORTED_TOOLSETS)\n",
-"{\n",
-"$(t) = $($(t):J=\" \") ; # reconstitute paths with spaces in them\n",
-"if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }\n",
-"}\n",
-"if ! $(TOOLSET)\n",
-"{\n",
-"ECHO \"The JAM_TOOLSET environment variable is defined but its value\" ;\n",
-"ECHO \"is invalid, please use one of the following:\" ;\n",
-"ECHO ;\n",
-"for t in $(SUPPORTED_TOOLSETS) { ECHO \" \" $(t) ; }\n",
-"EXIT ;\n",
-"}\n",
-"}\n",
-"if ! $(TOOLSET)\n",
-"{\n",
-"if $(BCCROOT)\n",
-"{\n",
-"TOOLSET = BORLANDC ;\n",
-"BORLANDC = $(BCCROOT:J=\" \") ;\n",
-"}\n",
-"else if $(MSVC)\n",
-"{\n",
-"TOOLSET = VISUALC16 ;\n",
-"VISUALC16 = $(MSVC:J=\" \") ;\n",
-"}\n",
-"else if $(MSVCNT)\n",
-"{\n",
-"TOOLSET = VISUALC ;\n",
-"VISUALC = $(MSVCNT:J=\" \") ;\n",
-"}\n",
-"else if $(MSVCDir)\n",
-"{\n",
-"TOOLSET = VISUALC ;\n",
-"VISUALC = $(MSVCDir:J=\" \") ;\n",
-"}\n",
-"else if $(MINGW)\n",
-"{\n",
-"TOOLSET = MINGW ;\n",
-"}\n",
-"else\n",
-"{\n",
-"ECHO \"Jam cannot be run because, either:\" ;\n",
-"ECHO \" a. You didn't set BOOST_ROOT to indicate the root of your\" ;\n",
-"ECHO \" Boost installation.\" ;\n",
-"ECHO \" b. You are trying to use stock Jam but didn't indicate which\" ;\n",
-"ECHO \" compilation toolset to use. To do so, follow these simple\" ;\n",
-"ECHO \" instructions:\" ;\n",
-"ECHO ;\n",
-"ECHO \" - define one of the following environment variable, with the\" ;\n",
-"ECHO \" appropriate value according to this list:\" ;\n",
-"ECHO ;\n",
-"ECHO \" Variable Toolset Description\" ;\n",
-"ECHO ;\n",
-"ECHO \" BORLANDC Borland C++ BC++ install path\" ;\n",
-"ECHO \" VISUALC Microsoft Visual C++ VC++ install path\" ;\n",
-"ECHO \" VISUALC16 Microsoft Visual C++ 16 bit VC++ 16 bit install\" ;\n",
-"ECHO \" INTELC Intel C/C++ IC++ install path\" ;\n",
-"ECHO \" WATCOM Watcom C/C++ Watcom install path\" ;\n",
-"ECHO \" MINGW MinGW (gcc) MinGW install path\" ;\n",
-"ECHO \" LCC Win32-LCC LCC-Win32 install path\" ;\n",
-"ECHO ;\n",
-"ECHO \" - define the JAM_TOOLSET environment variable with the *name*\" ;\n",
-"ECHO \" of the toolset variable you want to use.\" ;\n",
-"ECHO ;\n",
-"ECHO \" e.g.: set VISUALC=C:\\\\Visual6\" ;\n",
-"ECHO \" set JAM_TOOLSET=VISUALC\" ;\n",
-"EXIT ;\n",
-"}\n",
-"}\n",
-"CP ?= copy ;\n",
-"RM ?= del /f/q ;\n",
-"SLASH ?= \\\\ ;\n",
-"SUFLIB ?= .lib ;\n",
-"SUFOBJ ?= .obj ;\n",
-"SUFEXE ?= .exe ;\n",
-"if $(TOOLSET) = BORLANDC\n",
-"{\n",
-"ECHO \"Compiler is Borland C++\" ;\n",
-"AR ?= tlib /C /P64 ;\n",
-"CC ?= bcc32 ;\n",
-"CCFLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus ;\n",
-"C++ ?= bcc32 ;\n",
-"C++FLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus -P ;\n",
-"LINK ?= $(CC) ;\n",
-"LINKFLAGS ?= $(CCFLAGS) ;\n",
-"STDLIBPATH ?= $(BORLANDC)\\\\lib ;\n",
-"STDHDRS ?= $(BORLANDC)\\\\include ;\n",
-"NOARSCAN ?= true ;\n",
-"}\n",
-"else if $(TOOLSET) = VISUALC16\n",
-"{\n",
-"ECHO \"Compiler is Microsoft Visual C++ 16 bit\" ;\n",
-"AR ?= lib /nologo ;\n",
-"CC ?= cl /nologo ;\n",
-"CCFLAGS ?= /D \\\"WIN\\\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= $(CC) ;\n",
-"LINKFLAGS ?= $(CCFLAGS) ;\n",
-"LINKLIBS ?=\n",
-"\\\"$(VISUALC16)\\\\lib\\\\mlibce.lib\\\"\n",
-"\\\"$(VISUALC16)\\\\lib\\\\oldnames.lib\\\"\n",
-";\n",
-"LINKLIBS ?= ;\n",
-"NOARSCAN ?= true ;\n",
-"OPTIM ?= \"\" ;\n",
-"STDHDRS ?= $(VISUALC16)\\\\include ;\n",
-"UNDEFFLAG ?= \"/u _\" ;\n",
-"}\n",
-"else if $(TOOLSET) = VISUALC\n",
-"{\n",
-"ECHO \"Compiler is Microsoft Visual C++\" ;\n",
-"AR ?= lib ;\n",
-"AS ?= masm386 ;\n",
-"CC ?= cl /nologo ;\n",
-"CCFLAGS ?= \"\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= link /nologo ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= \\\"$(VISUALC)\\\\lib\\\\advapi32.lib\\\"\n",
-"\\\"$(VISUALC)\\\\lib\\\\gdi32.lib\\\"\n",
-"\\\"$(VISUALC)\\\\lib\\\\user32.lib\\\"\n",
-"\\\"$(VISUALC)\\\\lib\\\\kernel32.lib\\\" ;\n",
-"OPTIM ?= \"\" ;\n",
-"STDHDRS ?= $(VISUALC)\\\\include ;\n",
-"UNDEFFLAG ?= \"/u _\" ;\n",
-"}\n",
-"else if $(TOOLSET) = VC7\n",
-"{\n",
-"ECHO \"Compiler is Microsoft Visual C++ .NET\" ;\n",
-"AR ?= lib ;\n",
-"AS ?= masm386 ;\n",
-"CC ?= cl /nologo ;\n",
-"CCFLAGS ?= \"\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= link /nologo ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= \\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\advapi32.lib\\\"\n",
-"\\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\gdi32.lib\\\"\n",
-"\\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\user32.lib\\\"\n",
-"\\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\kernel32.lib\\\" ;\n",
-"OPTIM ?= \"\" ;\n",
-"STDHDRS ?= \\\"$(VISUALC)\\\\include\\\"\n",
-"\\\"$(VISUALC)\\\\PlatformSDK\\\\include\\\" ;\n",
-"UNDEFFLAG ?= \"/u _\" ;\n",
-"}\n",
-"else if $(TOOLSET) = INTELC\n",
-"{\n",
-"ECHO \"Compiler is Intel C/C++\" ;\n",
-"if ! $(VISUALC)\n",
-"{\n",
-"ECHO \"As a special exception, when using the Intel C++ compiler, you need\" ;\n",
-"ECHO \"to define the VISUALC environment variable to indicate the location\" ;\n",
-"ECHO \"of your Visual C++ installation. Aborting..\" ;\n",
-"EXIT ;\n",
-"}\n",
-"AR ?= lib ;\n",
-"AS ?= masm386 ;\n",
-"CC ?= icl /nologo ;\n",
-"CCFLAGS ?= \"\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= link /nologo ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= $(VISUALC)\\\\lib\\\\advapi32.lib\n",
-"$(VISUALC)\\\\lib\\\\kernel32.lib\n",
-";\n",
-"OPTIM ?= \"\" ;\n",
-"STDHDRS ?= $(INTELC)\\include $(VISUALC)\\\\include ;\n",
-"UNDEFFLAG ?= \"/u _\" ;\n",
-"}\n",
-"else if $(TOOLSET) = WATCOM\n",
-"{\n",
-"ECHO \"Compiler is Watcom C/C++\" ;\n",
-"AR ?= wlib ;\n",
-"CC ?= wcc386 ;\n",
-"CCFLAGS ?= /zq /DWIN32 /I$(WATCOM)\\\\h ; # zq=quiet\n",
-"C++ ?= wpp386 ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"CP ?= copy ;\n",
-"DOT ?= . ;\n",
-"DOTDOT ?= .. ;\n",
-"LINK ?= wcl386 ;\n",
-"LINKFLAGS ?= /zq ; # zq=quiet\n",
-"LINKLIBS ?= ;\n",
-"MV ?= move ;\n",
-"NOARSCAN ?= true ;\n",
-"OPTIM ?= ;\n",
-"RM ?= del /f ;\n",
-"SLASH ?= \\\\ ;\n",
-"STDHDRS ?= $(WATCOM)\\\\h $(WATCOM)\\\\h\\\\nt ;\n",
-"SUFEXE ?= .exe ;\n",
-"SUFLIB ?= .lib ;\n",
-"SUFOBJ ?= .obj ;\n",
-"UNDEFFLAG ?= \"/u _\" ;\n",
-"}\n",
-"else if $(TOOLSET) = MINGW\n",
-"{\n",
-"ECHO \"Compiler is GCC with Mingw\" ;\n",
-"AR ?= ar -ru ;\n",
-"CC ?= gcc ;\n",
-"CCFLAGS ?= \"\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= $(CC) ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= \"\" ;\n",
-"OPTIM ?= ;\n",
-"SUFOBJ = .o ;\n",
-"SUFLIB = .a ;\n",
-"SLASH = / ;\n",
-"}\n",
-"else if $(TOOLSET) = LCC\n",
-"{\n",
-"ECHO \"Compiler is Win32-LCC\" ;\n",
-"AR ?= lcclib ;\n",
-"CC ?= lcc ;\n",
-"CCFLAGS ?= \"\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= lcclnk ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= \"\" ;\n",
-"OPTIM ?= ;\n",
-"NOARSCAN = true ;\n",
-"}\n",
-"else\n",
-"{\n",
-"EXIT On NT, set BCCROOT, MSVCNT, MINGW or MSVC to the root of the\n",
-"Borland or Microsoft directories. ;\n",
-"}\n",
-"}\n",
-"else if $(OS2)\n",
-"{\n",
-"local SUPPORTED_TOOLSETS = \"EMX\" \"WATCOM\" ;\n",
-"TOOLSET = \"\" ;\n",
-"if $(JAM_TOOLSET)\n",
-"{\n",
-"local t ;\n",
-"for t in $(SUPPORTED_TOOLSETS)\n",
-"{\n",
-"$(t) = $($(t):J=\" \") ; # reconstitute paths with spaces in them\n",
-"if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }\n",
-"}\n",
-"if ! $(TOOLSET)\n",
-"{\n",
-"ECHO \"The JAM_TOOLSET environment variable is defined but its value\" ;\n",
-"ECHO \"is invalid, please use one of the following:\" ;\n",
-"ECHO ;\n",
-"for t in $(SUPPORTED_TOOLSETS) { ECHO \" \" $(t) ; }\n",
-"EXIT ;\n",
-"}\n",
-"}\n",
-"if ! $(TOOLSET)\n",
-"{\n",
-"if $(watcom)\n",
-"{\n",
-"WATCOM = $(watcom:J=\" \") ;\n",
-"TOOLSET = WATCOM ;\n",
-"}\n",
-"else\n",
-"{\n",
-"ECHO \"Jam cannot be run because you didn't indicate which compilation toolset\" ;\n",
-"ECHO \"to use. To do so, follow these simple instructions:\" ;\n",
-"ECHO ;\n",
-"ECHO \" - define one of the following environment variable, with the\" ;\n",
-"ECHO \" appropriate value according to this list:\" ;\n",
-"ECHO ;\n",
-"ECHO \" Variable Toolset Description\" ;\n",
-"ECHO ;\n",
-"ECHO \" WATCOM Watcom C/C++ Watcom install path\" ;\n",
-"ECHO \" EMX EMX (gcc) EMX install path\" ;\n",
-"ECHO \" VISUALAGE IBM Visual Age C/C++ VisualAge install path\" ;\n",
-"ECHO ;\n",
-"ECHO \" - define the JAM_TOOLSET environment variable with the *name*\" ;\n",
-"ECHO \" of the toolset variable you want to use.\" ;\n",
-"ECHO ;\n",
-"ECHO \" e.g.: set WATCOM=C:\\WATCOM\" ;\n",
-"ECHO \" set JAM_TOOLSET=WATCOM\" ;\n",
-"ECHO ;\n",
-"EXIT ;\n",
-"}\n",
-"}\n",
-"RM = del /f ;\n",
-"CP = copy ;\n",
-"MV ?= move ;\n",
-"DOT ?= . ;\n",
-"DOTDOT ?= .. ;\n",
-"SUFLIB ?= .lib ;\n",
-"SUFOBJ ?= .obj ;\n",
-"SUFEXE ?= .exe ;\n",
-"if $(TOOLSET) = WATCOM\n",
-"{\n",
-"AR ?= wlib ;\n",
-"BINDIR ?= \\\\os2\\\\apps ;\n",
-"CC ?= wcc386 ;\n",
-"CCFLAGS ?= /zq /DOS2 /I$(WATCOM)\\\\h ; # zq=quiet\n",
-"C++ ?= wpp386 ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= wcl386 ;\n",
-"LINKFLAGS ?= /zq ; # zq=quiet\n",
-"LINKLIBS ?= ;\n",
-"NOARSCAN ?= true ;\n",
-"OPTIM ?= ;\n",
-"SLASH ?= \\\\ ;\n",
-"STDHDRS ?= $(WATCOM)\\\\h ;\n",
-"UNDEFFLAG ?= \"/u _\" ;\n",
-"}\n",
-"else if $(TOOLSET) = EMX\n",
-"{\n",
-"ECHO \"Compiler is GCC-EMX\" ;\n",
-"AR ?= ar -ru ;\n",
-"CC ?= gcc ;\n",
-"CCFLAGS ?= \"\" ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"LINK ?= $(CC) ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= \"\" ;\n",
-"OPTIM ?= ;\n",
-"SUFOBJ = .o ;\n",
-"SUFLIB = .a ;\n",
-"UNDEFFLAG ?= \"-U\" ;\n",
-"SLASH = / ;\n",
-"}\n",
-"else\n",
-"{\n",
-"EXIT \"Sorry, but the $(JAM_TOOLSET) toolset isn't supported for now\" ;\n",
-"}\n",
-"}\n",
-"else if $(VMS)\n",
-"{\n",
-"C++ ?= cxx ;\n",
-"C++FLAGS ?= ;\n",
-"CC ?= cc ;\n",
-"CCFLAGS ?= ;\n",
-"CHMOD ?= set file/prot= ;\n",
-"CP ?= copy/replace ;\n",
-"CRELIB ?= true ;\n",
-"DOT ?= [] ;\n",
-"DOTDOT ?= [-] ;\n",
-"EXEMODE ?= (w:e) ;\n",
-"FILEMODE ?= (w:r) ;\n",
-"HDRS ?= ;\n",
-"LINK ?= link ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"LINKLIBS ?= ;\n",
-"MKDIR ?= create/dir ;\n",
-"MV ?= rename ;\n",
-"OPTIM ?= \"\" ;\n",
-"RM ?= delete ;\n",
-"RUNVMS ?= mcr ;\n",
-"SHELLMODE ?= (w:er) ;\n",
-"SLASH ?= . ;\n",
-"STDHDRS ?= decc$library_include ;\n",
-"SUFEXE ?= .exe ;\n",
-"SUFLIB ?= .olb ;\n",
-"SUFOBJ ?= .obj ;\n",
-"switch $(OS)\n",
-"{\n",
-"case OPENVMS : CCFLAGS ?= /stand=vaxc ;\n",
-"case VMS : LINKLIBS ?= sys$library:vaxcrtl.olb/lib ;\n",
-"}\n",
-"}\n",
-"else if $(MAC)\n",
-"{\n",
-"local OPT ;\n",
-"CW ?= \"{CW}\" ;\n",
-"MACHDRS ?=\n",
-"\"$(UMACHDRS):Universal:Interfaces:CIncludes\"\n",
-"\"$(CW):MSL:MSL_C:MSL_Common:Include\"\n",
-"\"$(CW):MSL:MSL_C:MSL_MacOS:Include\" ;\n",
-"MACLIBS ?=\n",
-"\"$(CW):MacOS Support:Universal:Libraries:StubLibraries:Interfacelib\"\n",
-"\"$(CW):MacOS Support:Universal:Libraries:StubLibraries:Mathlib\" ;\n",
-"MPWLIBS ?=\n",
-"\"$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib\"\n",
-"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW.Lib\" ;\n",
-"MPWNLLIBS ?=\n",
-"\"$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib\"\n",
-"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW(NL).Lib\" ;\n",
-"SIOUXHDRS ?= ;\n",
-"SIOUXLIBS ?=\n",
-"\"$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL RuntimePPC.lib\"\n",
-"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL SIOUX.PPC.Lib\"\n",
-"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC.Lib\" ;\n",
-"C++ ?= mwcppc ;\n",
-"C++FLAGS ?= -w off -nomapcr ;\n",
-"CC ?= mwcppc ;\n",
-"CCFLAGS ?= -w off -nomapcr ;\n",
-"CP ?= duplicate -y ;\n",
-"DOT ?= \":\" ;\n",
-"DOTDOT ?= \"::\" ;\n",
-"HDRS ?= $(MACHDRS) $(MPWHDRS) ;\n",
-"LINK ?= mwlinkppc ;\n",
-"LINKFLAGS ?= -mpwtool -warn ;\n",
-"LINKLIBS ?= $(MACLIBS) $(MPWLIBS) ;\n",
-"MKDIR ?= newfolder ;\n",
-"MV ?= rename -y ;\n",
-"NOARSCAN ?= true ;\n",
-"OPTIM ?= ;\n",
-"RM ?= delete -y ;\n",
-"SLASH ?= \":\" ;\n",
-"STDHDRS ?= ;\n",
-"SUFLIB ?= .lib ;\n",
-"SUFOBJ ?= .o ;\n",
-"}\n",
-"else if $(OS) = BEOS && $(METROWERKS)\n",
-"{\n",
-"AR ?= mwld -xml -o ;\n",
-"BINDIR ?= /boot/apps ;\n",
-"CC ?= mwcc ;\n",
-"CCFLAGS ?= -nosyspath ;\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= -nosyspath ;\n",
-"FORTRAN ?= \"\" ;\n",
-"LIBDIR ?= /boot/develop/libraries ;\n",
-"LINK ?= mwld ;\n",
-"LINKFLAGS ?= \"\" ;\n",
-"MANDIR ?= /boot/documentation/\"Shell Tools\"/HTML ;\n",
-"NOARSCAN ?= true ;\n",
-"STDHDRS ?= /boot/develop/headers/posix ;\n",
-"}\n",
-"else if $(OS) = BEOS\n",
-"{\n",
-"BINDIR ?= /boot/apps ;\n",
-"CC ?= gcc ;\n",
-"C++ ?= $(CC) ;\n",
-"FORTRAN ?= \"\" ;\n",
-"LIBDIR ?= /boot/develop/libraries ;\n",
-"LINK ?= gcc ;\n",
-"LINKLIBS ?= -lnet ;\n",
-"NOARSCAN ?= true ;\n",
-"STDHDRS ?= /boot/develop/headers/posix ;\n",
-"}\n",
-"else if $(UNIX)\n",
-"{\n",
-"switch $(OS)\n",
-"{\n",
-"case AIX :\n",
-"LINKLIBS ?= -lbsd ;\n",
-"case AMIGA :\n",
-"CC ?= gcc ;\n",
-"YACC ?= \"bison -y\" ;\n",
-"case CYGWIN :\n",
-"CC ?= gcc ;\n",
-"CCFLAGS += -D__cygwin__ ;\n",
-"LEX ?= flex ;\n",
-"RANLIB ?= \"\" ;\n",
-"SUFEXE ?= .exe ;\n",
-"YACC ?= \"bison -y\" ;\n",
-"case DGUX :\n",
-"RANLIB ?= \"\" ;\n",
-"RELOCATE ?= true ;\n",
-"case HPUX :\n",
-"YACC = ;\n",
-"CFLAGS += -Ae ;\n",
-"CCFLAGS += -Ae ;\n",
-"RANLIB ?= \"\" ;\n",
-"case INTERIX :\n",
-"CC ?= gcc ;\n",
-"RANLIB ?= \"\" ;\n",
-"case IRIX :\n",
-"RANLIB ?= \"\" ;\n",
-"case MPEIX :\n",
-"CC ?= gcc ;\n",
-"C++ ?= gcc ;\n",
-"CCFLAGS += -D_POSIX_SOURCE ;\n",
-"HDRS += /usr/include ;\n",
-"RANLIB ?= \"\" ;\n",
-"NOARSCAN ?= true ;\n",
-"NOARUPDATE ?= true ;\n",
-"case MVS :\n",
-"RANLIB ?= \"\" ;\n",
-"case NEXT :\n",
-"AR ?= libtool -o ;\n",
-"RANLIB ?= \"\" ;\n",
-"case MACOSX :\n",
-"AR ?= libtool -o ;\n",
-"C++ ?= c++ ;\n",
-"MANDIR ?= /usr/local/share/man ;\n",
-"RANLIB ?= \"\" ;\n",
-"case NCR :\n",
-"RANLIB ?= \"\" ;\n",
-"case PTX :\n",
-"RANLIB ?= \"\" ;\n",
-"case QNX :\n",
-"AR ?= wlib ;\n",
-"CC ?= cc ;\n",
-"CCFLAGS ?= -Q ; # quiet\n",
-"C++ ?= $(CC) ;\n",
-"C++FLAGS ?= -Q ; # quiet\n",
-"LINK ?= $(CC) ;\n",
-"LINKFLAGS ?= -Q ; # quiet\n",
-"NOARSCAN ?= true ;\n",
-"RANLIB ?= \"\" ;\n",
-"case SCO :\n",
-"RANLIB ?= \"\" ;\n",
-"RELOCATE ?= true ;\n",
-"case SINIX :\n",
-"RANLIB ?= \"\" ;\n",
-"case SOLARIS :\n",
-"RANLIB ?= \"\" ;\n",
-"AR ?= \"/usr/ccs/bin/ar ru\" ;\n",
-"case UNICOS :\n",
-"NOARSCAN ?= true ;\n",
-"OPTIM ?= -O0 ;\n",
-"case UNIXWARE :\n",
-"RANLIB ?= \"\" ;\n",
-"RELOCATE ?= true ;\n",
-"}\n",
-"CCFLAGS ?= ;\n",
-"C++FLAGS ?= $(CCFLAGS) ;\n",
-"CHMOD ?= chmod ;\n",
-"CHGRP ?= chgrp ;\n",
-"CHOWN ?= chown ;\n",
-"LEX ?= lex ;\n",
-"LINKFLAGS ?= $(CCFLAGS) ;\n",
-"LINKLIBS ?= ;\n",
-"OPTIM ?= -O ;\n",
-"RANLIB ?= ranlib ;\n",
-"YACC ?= yacc ;\n",
-"YACCFILES ?= y.tab ;\n",
-"YACCFLAGS ?= -d ;\n",
-"}\n",
-"AR ?= ar ru ;\n",
-"AS ?= as ;\n",
-"ASFLAGS ?= ;\n",
-"AWK ?= awk ;\n",
-"BINDIR ?= /usr/local/bin ;\n",
-"C++ ?= cc ;\n",
-"C++FLAGS ?= ;\n",
-"CC ?= cc ;\n",
-"CCFLAGS ?= ;\n",
-"CP ?= cp -f ;\n",
-"CRELIB ?= ;\n",
-"DOT ?= . ;\n",
-"DOTDOT ?= .. ;\n",
-"EXEMODE ?= 711 ;\n",
-"FILEMODE ?= 644 ;\n",
-"FORTRAN ?= f77 ;\n",
-"FORTRANFLAGS ?= ;\n",
-"HDRS ?= ;\n",
-"INSTALLGRIST ?= installed ;\n",
-"JAMFILE ?= Jamfile ;\n",
-"JAMRULES ?= Jamrules ;\n",
-"LEX ?= ;\n",
-"LIBDIR ?= /usr/local/lib ;\n",
-"LINK ?= $(CC) ;\n",
-"LINKFLAGS ?= ;\n",
-"LINKLIBS ?= ;\n",
-"LN ?= ln ;\n",
-"MANDIR ?= /usr/local/man ;\n",
-"MKDIR ?= mkdir ;\n",
-"MV ?= mv -f ;\n",
-"OPTIM ?= ;\n",
-"RCP ?= rcp ;\n",
-"RM ?= rm -f ;\n",
-"RSH ?= rsh ;\n",
-"SED ?= sed ;\n",
-"SHELLHEADER ?= \"#!/bin/sh\" ;\n",
-"SHELLMODE ?= 755 ;\n",
-"SLASH ?= / ;\n",
-"STDHDRS ?= /usr/include ;\n",
-"SUFEXE ?= \"\" ;\n",
-"SUFLIB ?= .a ;\n",
-"SUFOBJ ?= .o ;\n",
-"UNDEFFLAG ?= \"-u _\" ;\n",
-"YACC ?= ;\n",
-"YACCFILES ?= ;\n",
-"YACCFLAGS ?= ;\n",
-"HDRPATTERN =\n",
-"\"^[ ]*#[ ]*include[ ]*[<\\\"]([^\\\">]*)[\\\">].*$\" ;\n",
-"OSFULL = $(OS)$(OSVER)$(OSPLAT) $(OS)$(OSPLAT) $(OS)$(OSVER) $(OS) ;\n",
-"DEPENDS all : shell files lib exe obj ;\n",
-"DEPENDS all shell files lib exe obj : first ;\n",
-"NOTFILE all first shell files lib exe obj dirs clean uninstall ;\n",
-"ALWAYS clean uninstall ;\n",
-"rule As\n",
-"{\n",
-"DEPENDS $(<) : $(>) ;\n",
-"ASFLAGS on $(<) += $(ASFLAGS) $(SUBDIRASFLAGS) ;\n",
-"}\n",
-"rule Bulk\n",
-"{\n",
-"local i ;\n",
-"for i in $(>)\n",
-"{\n",
-"File $(i:D=$(<)) : $(i) ;\n",
-"}\n",
-"}\n",
-"rule Cc\n",
-"{\n",
-"local _h ;\n",
-"DEPENDS $(<) : $(>) ;\n",
-"CCFLAGS on $(<) += $(CCFLAGS) $(SUBDIRCCFLAGS) ;\n",
-"if $(RELOCATE)\n",
-"{\n",
-"CcMv $(<) : $(>) ;\n",
-"}\n",
-"_h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n",
-"if $(VMS) && $(_h)\n",
-"{\n",
-"SLASHINC on $(<) = \"/inc=(\" $(_h[1]) ,$(_h[2-]) \")\" ;\n",
-"}\n",
-"else if $(MAC) && $(_h)\n",
-"{\n",
-"local _i _j ;\n",
-"_j = $(_h[1]) ;\n",
-"for _i in $(_h[2-])\n",
-"{\n",
-"_j = $(_j),$(_i) ;\n",
-"}\n",
-"MACINC on $(<) = \\\"$(_j)\\\" ;\n",
-"}\n",
-"}\n",
-"rule C++\n",
-"{\n",
-"local _h ;\n",
-"DEPENDS $(<) : $(>) ;\n",
-"C++FLAGS on $(<) += $(C++FLAGS) $(SUBDIRC++FLAGS) ;\n",
-"if $(RELOCATE)\n",
-"{\n",
-"CcMv $(<) : $(>) ;\n",
-"}\n",
-"_h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n",
-"if $(VMS) && $(_h)\n",
-"{\n",
-"SLASHINC on $(<) = \"/inc=(\" $(_h[1]) ,$(_h[2-]) \")\" ;\n",
-"}\n",
-"else if $(MAC) && $(_h)\n",
-"{\n",
-"local _i _j ;\n",
-"_j = $(_h[1]) ;\n",
-"for _i in $(_h[2-])\n",
-"{\n",
-"_j = $(_j),$(_i) ;\n",
-"}\n",
-"MACINC on $(<) = \\\"$(_j)\\\" ;\n",
-"}\n",
-"}\n",
-"rule Chmod\n",
-"{\n",
-"if $(CHMOD) { Chmod1 $(<) ; }\n",
-"}\n",
-"rule File\n",
-"{\n",
-"DEPENDS files : $(<) ;\n",
-"DEPENDS $(<) : $(>) ;\n",
-"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
-"MODE on $(<) = $(FILEMODE) ;\n",
-"Chmod $(<) ;\n",
-"}\n",
-"rule Fortran\n",
-"{\n",
-"DEPENDS $(<) : $(>) ;\n",
-"}\n",
-"rule GenFile\n",
-"{\n",
-"local _t = [ FGristSourceFiles $(<) ] ;\n",
-"local _s = [ FAppendSuffix $(>[1]) : $(SUFEXE) ] ;\n",
-"Depends $(_t) : $(_s) $(>[2-]) ;\n",
-"GenFile1 $(_t) : $(_s) $(>[2-]) ;\n",
-"Clean clean : $(_t) ;\n",
-"}\n",
-"rule GenFile1\n",
-"{\n",
-"MakeLocate $(<) : $(LOCATE_SOURCE) ;\n",
-"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
-"}\n",
-"rule HardLink\n",
-"{\n",
-"DEPENDS files : $(<) ;\n",
-"DEPENDS $(<) : $(>) ;\n",
-"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
-"}\n",
-"rule HdrMacroFile\n",
-"{\n",
-"HDRMACRO $(<) ;\n",
-"}\n",
-"rule HdrRule\n",
-"{\n",
-"local s ;\n",
-"if $(HDRGRIST)\n",
-"{\n",
-"s = $(>:G=$(HDRGRIST)) ;\n",
-"} else {\n",
-"s = $(>) ;\n",
-"}\n",
-"INCLUDES $(<) : $(s) ;\n",
-"SEARCH on $(s) = $(HDRSEARCH) ;\n",
-"NOCARE $(s) ;\n",
-"HDRSEARCH on $(s) = $(HDRSEARCH) ;\n",
-"HDRSCAN on $(s) = $(HDRSCAN) ;\n",
-"HDRRULE on $(s) = $(HDRRULE) ;\n",
-"HDRGRIST on $(s) = $(HDRGRIST) ;\n",
-"}\n",
-"rule InstallInto\n",
-"{\n",
-"local i t ;\n",
-"t = $(>:G=$(INSTALLGRIST)) ;\n",
-"Depends install : $(t) ;\n",
-"Clean uninstall : $(t) ;\n",
-"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
-"MakeLocate $(t) : $(<) ;\n",
-"for i in $(>)\n",
-"{\n",
-"local tt = $(i:G=$(INSTALLGRIST)) ;\n",
-"Depends $(tt) : $(i) ;\n",
-"Install $(tt) : $(i) ;\n",
-"Chmod $(tt) ;\n",
-"if $(OWNER) && $(CHOWN)\n",
-"{\n",
-"Chown $(tt) ;\n",
-"OWNER on $(tt) = $(OWNER) ;\n",
-"}\n",
-"if $(GROUP) && $(CHGRP)\n",
-"{\n",
-"Chgrp $(tt) ;\n",
-"GROUP on $(tt) = $(GROUP) ;\n",
-"}\n",
-"}\n",
-"}\n",
-"rule InstallBin\n",
-"{\n",
-"local _t = [ FAppendSuffix $(>) : $(SUFEXE) ] ;\n",
-"InstallInto $(<) : $(_t) ;\n",
-"MODE on $(_t:G=installed) = $(EXEMODE) ;\n",
-"}\n",
-"rule InstallFile\n",
-"{\n",
-"InstallInto $(<) : $(>) ;\n",
-"MODE on $(>:G=installed) = $(FILEMODE) ;\n",
-"}\n",
-"rule InstallLib\n",
-"{\n",
-"InstallInto $(<) : $(>) ;\n",
-"MODE on $(>:G=installed) = $(FILEMODE) ;\n",
-"}\n",
-"rule InstallMan\n",
-"{\n",
-"local i s d ;\n",
-"for i in $(>)\n",
-"{\n",
-"switch $(i:S)\n",
-"{\n",
-"case .1 : s = 1 ; case .2 : s = 2 ; case .3 : s = 3 ;\n",
-"case .4 : s = 4 ; case .5 : s = 5 ; case .6 : s = 6 ;\n",
-"case .7 : s = 7 ; case .8 : s = 8 ; case .l : s = l ;\n",
-"case .n : s = n ; case .man : s = 1 ;\n",
-"}\n",
-"d = man$(s) ;\n",
-"InstallInto $(d:R=$(<)) : $(i) ;\n",
-"}\n",
-"MODE on $(>:G=installed) = $(FILEMODE) ;\n",
-"}\n",
-"rule InstallShell\n",
-"{\n",
-"InstallInto $(<) : $(>) ;\n",
-"MODE on $(>:G=installed) = $(SHELLMODE) ;\n",
-"}\n",
-"rule Lex\n",
-"{\n",
-"LexMv $(<) : $(>) ;\n",
-"DEPENDS $(<) : $(>) ;\n",
-"MakeLocate $(<) : $(LOCATE_SOURCE) ;\n",
-"Clean clean : $(<) ;\n",
-"}\n",
-"rule Library\n",
-"{\n",
-"LibraryFromObjects $(<) : $(>:S=$(SUFOBJ)) ;\n",
-"Objects $(>) ;\n",
-"}\n",
-"rule LibraryFromObjects\n",
-"{\n",
-"local _i _l _s ;\n",
-"_s = [ FGristFiles $(>) ] ;\n",
-"_l = $(<:S=$(SUFLIB)) ;\n",
-"if $(KEEPOBJS)\n",
-"{\n",
-"DEPENDS obj : $(_s) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"DEPENDS lib : $(_l) ;\n",
-"}\n",
-"if ! $(_l:D)\n",
-"{\n",
-"MakeLocate $(_l) $(_l)($(_s:BS)) : $(LOCATE_TARGET) ;\n",
-"}\n",
-"if $(NOARSCAN)\n",
-"{\n",
-"DEPENDS $(_l) : $(_s) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"DEPENDS $(_l) : $(_l)($(_s:BS)) ;\n",
-"for _i in $(_s)\n",
-"{\n",
-"DEPENDS $(_l)($(_i:BS)) : $(_i) ;\n",
-"}\n",
-"}\n",
-"Clean clean : $(_l) ;\n",
-"if $(CRELIB) { CreLib $(_l) : $(_s[1]) ; }\n",
-"Archive $(_l) : $(_s) ;\n",
-"if $(RANLIB) { Ranlib $(_l) ; }\n",
-"if ! ( $(NOARSCAN) || $(KEEPOBJS) ) { RmTemps $(_l) : $(_s) ; }\n",
-"}\n",
-"rule Link\n",
-"{\n",
-"MODE on $(<) = $(EXEMODE) ;\n",
-"Chmod $(<) ;\n",
-"}\n",
-"rule LinkLibraries\n",
-"{\n",
-"local _t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;\n",
-"DEPENDS $(_t) : $(>:S=$(SUFLIB)) ;\n",
-"NEEDLIBS on $(_t) += $(>:S=$(SUFLIB)) ;\n",
-"}\n",
-"rule Main\n",
-"{\n",
-"MainFromObjects $(<) : $(>:S=$(SUFOBJ)) ;\n",
-"Objects $(>) ;\n",
-"}\n",
-"rule MainFromObjects\n",
-"{\n",
-"local _s _t ;\n",
-"_s = [ FGristFiles $(>) ] ;\n",
-"_t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;\n",
-"if $(_t) != $(<)\n",
-"{\n",
-"DEPENDS $(<) : $(_t) ;\n",
-"NOTFILE $(<) ;\n",
-"}\n",
-"DEPENDS exe : $(_t) ;\n",
-"DEPENDS $(_t) : $(_s) ;\n",
-"MakeLocate $(_t) : $(LOCATE_TARGET) ;\n",
-"Clean clean : $(_t) ;\n",
-"Link $(_t) : $(_s) ;\n",
-"}\n",
-"rule MakeLocate\n",
-"{\n",
-"if $(>)\n",
-"{\n",
-"LOCATE on $(<) = $(>) ;\n",
-"Depends $(<) : $(>[1]) ;\n",
-"MkDir $(>[1]) ;\n",
-"}\n",
-"}\n",
-"rule MkDir\n",
-"{\n",
-"NOUPDATE $(<) ;\n",
-"if $(<) != $(DOT) && ! $($(<)-mkdir)\n",
-"{\n",
-"local s ;\n",
-"$(<)-mkdir = true ;\n",
-"MkDir1 $(<) ;\n",
-"Depends dirs : $(<) ;\n",
-"s = $(<:P) ;\n",
-"if $(NT)\n",
-"{\n",
-"switch $(s)\n",
-"{\n",
-"case *: : s = ;\n",
-"case *:\\\\ : s = ;\n",
-"}\n",
-"}\n",
-"if $(s) && $(s) != $(<)\n",
-"{\n",
-"Depends $(<) : $(s) ;\n",
-"MkDir $(s) ;\n",
-"}\n",
-"else if $(s)\n",
-"{\n",
-"NOTFILE $(s) ;\n",
-"}\n",
-"}\n",
-"}\n",
-"rule Object\n",
-"{\n",
-"local h ;\n",
-"Clean clean : $(<) ;\n",
-"MakeLocate $(<) : $(LOCATE_TARGET) ;\n",
-"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
-"HDRS on $(<) = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n",
-"if $(SEARCH_SOURCE)\n",
-"{\n",
-"h = $(SEARCH_SOURCE) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"h = \"\" ;\n",
-"}\n",
-"HDRRULE on $(>) = HdrRule ;\n",
-"HDRSCAN on $(>) = $(HDRPATTERN) ;\n",
-"HDRSEARCH on $(>) = $(HDRS) $(SUBDIRHDRS) $(h) $(STDHDRS) ;\n",
-"HDRGRIST on $(>) = $(HDRGRIST) ;\n",
-"switch $(>:S)\n",
-"{\n",
-"case .asm : As $(<) : $(>) ;\n",
-"case .c : Cc $(<) : $(>) ;\n",
-"case .C : C++ $(<) : $(>) ;\n",
-"case .cc : C++ $(<) : $(>) ;\n",
-"case .cpp : C++ $(<) : $(>) ;\n",
-"case .f : Fortran $(<) : $(>) ;\n",
-"case .l : Cc $(<) : $(<:S=.c) ;\n",
-"Lex $(<:S=.c) : $(>) ;\n",
-"case .s : As $(<) : $(>) ;\n",
-"case .y : Cc $(<) : $(<:S=.c) ;\n",
-"Yacc $(<:S=.c) : $(>) ;\n",
-"case * : UserObject $(<) : $(>) ;\n",
-"}\n",
-"}\n",
-"rule ObjectCcFlags\n",
-"{\n",
-"CCFLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;\n",
-"}\n",
-"rule ObjectC++Flags\n",
-"{\n",
-"C++FLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;\n",
-"}\n",
-"rule ObjectHdrs\n",
-"{\n",
-"HDRS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;\n",
-"}\n",
-"rule Objects\n",
-"{\n",
-"local _i ;\n",
-"for _i in [ FGristFiles $(<) ]\n",
-"{\n",
-"Object $(_i:S=$(SUFOBJ)) : $(_i) ;\n",
-"DEPENDS obj : $(_i:S=$(SUFOBJ)) ;\n",
-"}\n",
-"}\n",
-"rule RmTemps\n",
-"{\n",
-"TEMPORARY $(>) ;\n",
-"}\n",
-"rule Setuid\n",
-"{\n",
-"MODE on [ FAppendSuffix $(<) : $(SUFEXE) ] = 4711 ;\n",
-"}\n",
-"rule Shell\n",
-"{\n",
-"DEPENDS shell : $(<) ;\n",
-"DEPENDS $(<) : $(>) ;\n",
-"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
-"MODE on $(<) = $(SHELLMODE) ;\n",
-"Clean clean : $(<) ;\n",
-"Chmod $(<) ;\n",
-"}\n",
-"rule SubDir\n",
-"{\n",
-"local _r _s ;\n",
-"if ! $($(<[1]))\n",
-"{\n",
-"if ! $(<[1])\n",
-"{\n",
-"EXIT SubDir syntax error ;\n",
-"}\n",
-"$(<[1]) = [ FSubDir $(<[2-]) ] ;\n",
-"}\n",
-"if ! $($(<[1])-included)\n",
-"{\n",
-"$(<[1])-included = TRUE ;\n",
-"_r = $($(<[1])RULES) ;\n",
-"if ! $(_r)\n",
-"{\n",
-"_r = $(JAMRULES:R=$($(<[1]))) ;\n",
-"}\n",
-"include $(_r) ;\n",
-"}\n",
-"_s = [ FDirName $(<[2-]) ] ;\n",
-"SUBDIR = $(_s:R=$($(<[1]))) ;\n",
-"SUBDIR_TOKENS = $(<[2-]) ;\n",
-"SEARCH_SOURCE = $(SUBDIR) ;\n",
-"LOCATE_SOURCE = $(ALL_LOCATE_TARGET) $(SUBDIR) ;\n",
-"LOCATE_TARGET = $(ALL_LOCATE_TARGET) $(SUBDIR) ;\n",
-"SOURCE_GRIST = [ FGrist $(<[2-]) ] ;\n",
-"SUBDIRCCFLAGS = ;\n",
-"SUBDIRC++FLAGS = ;\n",
-"SUBDIRHDRS = ;\n",
-"}\n",
-"rule SubDirCcFlags\n",
-"{\n",
-"SUBDIRCCFLAGS += $(<) ;\n",
-"}\n",
-"rule SubDirC++Flags\n",
-"{\n",
-"SUBDIRC++FLAGS += $(<) ;\n",
-"}\n",
-"rule SubDirHdrs\n",
-"{\n",
-"SUBDIRHDRS += $(<) ;\n",
-"}\n",
-"rule SubInclude\n",
-"{\n",
-"local _s ;\n",
-"if ! $($(<[1]))\n",
-"{\n",
-"EXIT Top level of source tree has not been set with $(<[1]) ;\n",
-"}\n",
-"_s = [ FDirName $(<[2-]) ] ;\n",
-"include $(JAMFILE:D=$(_s):R=$($(<[1]))) ;\n",
-"}\n",
-"rule Undefines\n",
-"{\n",
-"UNDEFS on [ FAppendSuffix $(<) : $(SUFEXE) ] += $(UNDEFFLAG)$(>) ;\n",
-"}\n",
-"rule UserObject\n",
-"{\n",
-"EXIT \"Unknown suffix on\" $(>) \"- see UserObject rule in Jamfile(5).\" ;\n",
-"}\n",
-"rule Yacc\n",
-"{\n",
-"local _h ;\n",
-"_h = $(<:BS=.h) ;\n",
-"MakeLocate $(<) $(_h) : $(LOCATE_SOURCE) ;\n",
-"if $(YACC)\n",
-"{\n",
-"DEPENDS $(<) $(_h) : $(>) ;\n",
-"Yacc1 $(<) $(_h) : $(>) ;\n",
-"YaccMv $(<) $(_h) : $(>) ;\n",
-"Clean clean : $(<) $(_h) ;\n",
-"}\n",
-"INCLUDES $(<) : $(_h) ;\n",
-"}\n",
-"rule FGrist\n",
-"{\n",
-"local _g _i ;\n",
-"_g = $(<[1]) ;\n",
-"for _i in $(<[2-])\n",
-"{\n",
-"_g = $(_g)!$(_i) ;\n",
-"}\n",
-"return $(_g) ;\n",
-"}\n",
-"rule FGristFiles\n",
-"{\n",
-"if ! $(SOURCE_GRIST)\n",
-"{\n",
-"return $(<) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"return $(<:G=$(SOURCE_GRIST)) ;\n",
-"}\n",
-"}\n",
-"rule FGristSourceFiles\n",
-"{\n",
-"if ! $(SOURCE_GRIST)\n",
-"{\n",
-"return $(<) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"local _i _o ;\n",
-"for _i in $(<)\n",
-"{\n",
-"switch $(_i)\n",
-"{\n",
-"case *.h : _o += $(_i) ;\n",
-"case * : _o += $(_i:G=$(SOURCE_GRIST)) ;\n",
-"}\n",
-"}\n",
-"return $(_o) ;\n",
-"}\n",
-"}\n",
-"rule FConcat\n",
-"{\n",
-"local _t _r ;\n",
-"$(_r) = $(<[1]) ;\n",
-"for _t in $(<[2-])\n",
-"{\n",
-"$(_r) = $(_r)$(_t) ;\n",
-"}\n",
-"return $(_r) ;\n",
-"}\n",
-"rule FSubDir\n",
-"{\n",
-"local _i _d ;\n",
-"if ! $(<[1])\n",
-"{\n",
-"_d = $(DOT) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"_d = $(DOTDOT) ;\n",
-"for _i in $(<[2-])\n",
-"{\n",
-"_d = $(_d:R=$(DOTDOT)) ;\n",
-"}\n",
-"}\n",
-"return $(_d) ;\n",
-"}\n",
-"rule FDirName\n",
-"{\n",
-"local _s _i ;\n",
-"if ! $(<)\n",
-"{\n",
-"_s = $(DOT) ;\n",
-"}\n",
-"else if $(VMS)\n",
-"{\n",
-"switch $(<[1])\n",
-"{\n",
-"case *:* : _s = $(<[1]) ;\n",
-"case \\\\[*\\\\] : _s = $(<[1]) ;\n",
-"case * : _s = [.$(<[1])] ;\n",
-"}\n",
-"for _i in [.$(<[2-])]\n",
-"{\n",
-"_s = $(_i:R=$(_s)) ;\n",
-"}\n",
-"}\n",
-"else if $(MAC)\n",
-"{\n",
-"_s = $(DOT) ;\n",
-"for _i in $(<)\n",
-"{\n",
-"_s = $(_i:R=$(_s)) ;\n",
-"}\n",
-"}\n",
-"else\n",
-"{\n",
-"_s = $(<[1]) ;\n",
-"for _i in $(<[2-])\n",
-"{\n",
-"_s = $(_i:R=$(_s)) ;\n",
-"}\n",
-"}\n",
-"return $(_s) ;\n",
-"}\n",
-"rule _makeCommon\n",
-"{\n",
-"if $($(<)[1]) && $($(<)[1]) = $($(>)[1])\n",
-"{\n",
-"$(<) = $($(<)[2-]) ;\n",
-"$(>) = $($(>)[2-]) ;\n",
-"_makeCommon $(<) : $(>) ;\n",
-"}\n",
-"}\n",
-"rule FRelPath\n",
-"{\n",
-"local _l _r ;\n",
-"_l = $(<) ;\n",
-"_r = $(>) ;\n",
-"_makeCommon _l : _r ;\n",
-"_l = [ FSubDir $(_l) ] ;\n",
-"_r = [ FDirName $(_r) ] ;\n",
-"if $(_r) = $(DOT) {\n",
-"return $(_l) ;\n",
-"} else {\n",
-"return $(_r:R=$(_l)) ;\n",
-"}\n",
-"}\n",
-"rule FAppendSuffix\n",
-"{\n",
-"if $(>)\n",
-"{\n",
-"local _i _o ;\n",
-"for _i in $(<)\n",
-"{\n",
-"if $(_i:S)\n",
-"{\n",
-"_o += $(_i) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"_o += $(_i:S=$(>)) ;\n",
-"}\n",
-"}\n",
-"return $(_o) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"return $(<) ;\n",
-"}\n",
-"}\n",
-"rule unmakeDir\n",
-"{\n",
-"if $(>[1]:D) && $(>[1]:D) != $(>[1]) && $(>[1]:D) != \\\\\\\\\n",
-"{\n",
-"unmakeDir $(<) : $(>[1]:D) $(>[1]:BS) $(>[2-]) ;\n",
-"}\n",
-"else\n",
-"{\n",
-"$(<) = $(>) ;\n",
-"}\n",
-"}\n",
-"rule FConvertToSlashes\n",
-"{\n",
-"local _d, _s, _i ;\n",
-"unmakeDir _d : $(<) ;\n",
-"_s = $(_d[1]) ;\n",
-"for _i in $(_d[2-])\n",
-"{\n",
-"_s = $(_s)/$(_i) ;\n",
-"}\n",
-"return $(_s) ;\n",
-"}\n",
-"actions updated together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) $(>)\n",
-"}\n",
-"actions As\n",
-"{\n",
-"$(AS) $(ASFLAGS) -I$(HDRS) -o $(<) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)\n",
-"}\n",
-"actions Chgrp\n",
-"{\n",
-"$(CHGRP) $(GROUP) $(<)\n",
-"}\n",
-"actions Chmod1\n",
-"{\n",
-"$(CHMOD) $(MODE) $(<)\n",
-"}\n",
-"actions Chown\n",
-"{\n",
-"$(CHOWN) $(OWNER) $(<)\n",
-"}\n",
-"actions piecemeal together existing Clean\n",
-"{\n",
-"$(RM) $(>)\n",
-"}\n",
-"actions File\n",
-"{\n",
-"$(CP) $(>) $(<)\n",
-"}\n",
-"actions GenFile1\n",
-"{\n",
-"$(>[1]) $(<) $(>[2-])\n",
-"}\n",
-"actions Fortran\n",
-"{\n",
-"$(FORTRAN) $(FORTRANFLAGS) -o $(<) $(>)\n",
-"}\n",
-"actions HardLink\n",
-"{\n",
-"$(RM) $(<) && $(LN) $(>) $(<)\n",
-"}\n",
-"actions Install\n",
-"{\n",
-"$(CP) $(>) $(<)\n",
-"}\n",
-"actions Lex\n",
-"{\n",
-"$(LEX) $(>)\n",
-"}\n",
-"actions LexMv\n",
-"{\n",
-"$(MV) lex.yy.c $(<)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
-"}\n",
-"actions MkDir1\n",
-"{\n",
-"$(MKDIR) $(<)\n",
-"}\n",
-"actions together Ranlib\n",
-"{\n",
-"$(RANLIB) $(<)\n",
-"}\n",
-"actions quietly updated piecemeal together RmTemps\n",
-"{\n",
-"$(RM) $(>)\n",
-"}\n",
-"actions Shell\n",
-"{\n",
-"$(AWK) '\n",
-"NR == 1 { print \"$(SHELLHEADER)\" }\n",
-"NR == 1 && /^[#:]/ { next }\n",
-"/^##/ { next }\n",
-"{ print }\n",
-"' < $(>) > $(<)\n",
-"}\n",
-"actions Yacc1\n",
-"{\n",
-"$(YACC) $(YACCFLAGS) $(>)\n",
-"}\n",
-"actions YaccMv\n",
-"{\n",
-"$(MV) $(YACCFILES).c $(<[1])\n",
-"$(MV) $(YACCFILES).h $(<[2])\n",
-"}\n",
-"if $(RELOCATE)\n",
-"{\n",
-"actions C++\n",
-"{\n",
-"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) $(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) $(>)\n",
-"}\n",
-"actions ignore CcMv\n",
-"{\n",
-"[ $(<) != $(>:BS=$(SUFOBJ)) ] && $(MV) $(>:BS=$(SUFOBJ)) $(<)\n",
-"}\n",
-"}\n",
-"if $(NOARUPDATE)\n",
-"{\n",
-"actions Archive\n",
-"{\n",
-"$(AR) $(<) $(>)\n",
-"}\n",
-"}\n",
-"if $(NT)\n",
-"{\n",
-"if $(TOOLSET) = VISUALC || $(TOOLSET) = VC7 || $(TOOLSET) = INTELC\n",
-"{\n",
-"actions updated together piecemeal Archive\n",
-"{\n",
-"if exist $(<) set _$(<:B)_=$(<)\n",
-"$(AR) /out:$(<) %_$(<:B)_% $(>)\n",
-"}\n",
-"actions As\n",
-"{\n",
-"$(AS) /Ml /p /v /w2 $(>) $(<) ,nul,nul;\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) /Tp$(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
-"}\n",
-"}\n",
-"else if $(TOOLSET) = VISUALC16\n",
-"{\n",
-"actions updated together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) -+$(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /Tp$(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
-"}\n",
-"}\n",
-"else if $(TOOLSET) = BORLANDC\n",
-"{\n",
-"actions updated together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) -+$(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) -e$(<) $(LINKFLAGS) $(UNDEFS) -L$(LINKLIBS) $(NEEDLIBS) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
-"}\n",
-"}\n",
-"else if $(TOOLSET) = MINGW\n",
-"{\n",
-"actions together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) $(>:T)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
-"}\n",
-"}\n",
-"else if $(TOOLSET) = WATCOM\n",
-"{\n",
-"actions together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) +-$(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
-"}\n",
-"actions Shell\n",
-"{\n",
-"$(CP) $(>) $(<)\n",
-"}\n",
-"}\n",
-"else if $(TOOLSET) = LCC\n",
-"{\n",
-"actions together piecemeal Archive\n",
-"{\n",
-"$(AR) /out:$(<) $(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) $(CCFLAGS) $(OPTIM) -Fo$(<) -I$(HDRS) $(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
-"}\n",
-"actions Shell\n",
-"{\n",
-"$(CP) $(>) $(<)\n",
-"}\n",
-"}\n",
-"}\n",
-"else if $(OS2)\n",
-"{\n",
-"if $(TOOLSET) = WATCOM\n",
-"{\n",
-"actions together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) +-$(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
-"}\n",
-"actions Shell\n",
-"{\n",
-"$(CP) $(>) $(<)\n",
-"}\n",
-"}\n",
-"else if $(TOOLSET) = EMX\n",
-"{\n",
-"actions together piecemeal Archive\n",
-"{\n",
-"$(AR) $(<) $(>:T)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
-"}\n",
-"}\n",
-"}\n",
-"else if $(VMS)\n",
-"{\n",
-"actions updated together piecemeal Archive\n",
-"{\n",
-"lib/replace $(<) $(>[1]) ,$(>[2-])\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"$(CC)/obj=$(<) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"$(C++)/obj=$(<) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>)\n",
-"}\n",
-"actions piecemeal together existing Clean\n",
-"{\n",
-"$(RM) $(>[1]);* ,$(>[2-]);*\n",
-"}\n",
-"actions together quietly CreLib\n",
-"{\n",
-"if f$search(\"$(<)\") .eqs. \"\" then lib/create $(<)\n",
-"}\n",
-"actions GenFile1\n",
-"{\n",
-"mcr $(>[1]) $(<) $(>[2-])\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK)/exe=$(<) $(LINKFLAGS) $(>[1]) ,$(>[2-]) ,$(NEEDLIBS)/lib ,$(LINKLIBS)\n",
-"}\n",
-"actions quietly updated piecemeal together RmTemps\n",
-"{\n",
-"$(RM) $(>[1]);* ,$(>[2-]);*\n",
-"}\n",
-"actions Shell\n",
-"{\n",
-"$(CP) $(>) $(<)\n",
-"}\n",
-"}\n",
-"else if $(MAC)\n",
-"{\n",
-"actions together Archive\n",
-"{\n",
-"$(LINK) -library -o $(<) $(>)\n",
-"}\n",
-"actions Cc\n",
-"{\n",
-"set -e MWCincludes $(MACINC)\n",
-"$(CC) -o $(<) $(CCFLAGS) $(OPTIM) $(>)\n",
-"}\n",
-"actions C++\n",
-"{\n",
-"set -e MWCincludes $(MACINC)\n",
-"$(CC) -o $(<) $(C++FLAGS) $(OPTIM) $(>)\n",
-"}\n",
-"actions Link bind NEEDLIBS\n",
-"{\n",
-"$(LINK) -o $(<) $(LINKFLAGS) $(>) $(NEEDLIBS) \"$(LINKLIBS)\"\n",
-"}\n",
-"}\n",
-"rule BULK { Bulk $(<) : $(>) ; }\n",
-"rule FILE { File $(<) : $(>) ; }\n",
-"rule HDRRULE { HdrRule $(<) : $(>) ; }\n",
-"rule INSTALL { Install $(<) : $(>) ; }\n",
-"rule LIBRARY { Library $(<) : $(>) ; }\n",
-"rule LIBS { LinkLibraries $(<) : $(>) ; }\n",
-"rule LINK { Link $(<) : $(>) ; }\n",
-"rule MAIN { Main $(<) : $(>) ; }\n",
-"rule SETUID { Setuid $(<) ; }\n",
-"rule SHELL { Shell $(<) : $(>) ; }\n",
-"rule UNDEFINES { Undefines $(<) : $(>) ; }\n",
-"rule INSTALLBIN { InstallBin $(BINDIR) : $(<) ; }\n",
-"rule INSTALLLIB { InstallLib $(LIBDIR) : $(<) ; }\n",
-"rule INSTALLMAN { InstallMan $(MANDIR) : $(<) ; }\n",
-"rule addDirName { $(<) += [ FDirName $(>) ] ; }\n",
-"rule makeDirName { $(<) = [ FDirName $(>) ] ; }\n",
-"rule makeGristedName { $(<) = [ FGristSourceFiles $(>) ] ; }\n",
-"rule makeRelPath { $(<[1]) = [ FRelPath $(<[2-]) : $(>) ] ; }\n",
-"rule makeSuffixed { $(<[1]) = [ FAppendSuffix $(>) : $(<[2]) ] ; }\n",
-"{\n",
-"if $(JAMFILE) { include $(JAMFILE) ; }\n",
-"}\n",
-"}\n",
-0 };
diff --git a/jam-files/engine/jambase.h b/jam-files/engine/jambase.h
deleted file mode 100644
index c05ec792..00000000
--- a/jam-files/engine/jambase.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * jambase.h - declaration for the internal jambase
- *
- * The file Jambase is turned into a C array of strings in jambase.c
- * so that it can be built in to the executable. This is the
- * declaration for that array.
- */
-
-extern char *jambase[];
diff --git a/jam-files/engine/jamgram.c b/jam-files/engine/jamgram.c
deleted file mode 100644
index b1fa0835..00000000
--- a/jam-files/engine/jamgram.c
+++ /dev/null
@@ -1,1830 +0,0 @@
-/* A Bison parser, made by GNU Bison 1.875. */
-
-/* Skeleton parser for Yacc-like parsing with Bison,
- Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc.
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2, or (at your option)
- any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 59 Temple Place - Suite 330,
- Boston, MA 02111-1307, USA. */
-
-/* As a special exception, when this file is copied by Bison into a
- Bison output file, you may use that output file without restriction.
- This special exception was added by the Free Software Foundation
- in version 1.24 of Bison. */
-
-/* Written by Richard Stallman by simplifying the original so called
- ``semantic'' parser. */
-
-/* All symbols defined below should begin with yy or YY, to avoid
- infringing on user name space. This should be done even for local
- variables, as they might otherwise be expanded by user macros.
- There are some unavoidable exceptions within include files to
- define necessary library symbols; they are noted "INFRINGES ON
- USER NAME SPACE" below. */
-
-/* Identify Bison output. */
-#define YYBISON 1
-
-/* Skeleton name. */
-#define YYSKELETON_NAME "yacc.c"
-
-/* Pure parsers. */
-#define YYPURE 0
-
-/* Using locations. */
-#define YYLSP_NEEDED 0
-
-
-
-/* Tokens. */
-#ifndef YYTOKENTYPE
-# define YYTOKENTYPE
- /* Put the tokens into the symbol table, so that GDB and other debuggers
- know about them. */
- enum yytokentype {
- _BANG_t = 258,
- _BANG_EQUALS_t = 259,
- _AMPER_t = 260,
- _AMPERAMPER_t = 261,
- _LPAREN_t = 262,
- _RPAREN_t = 263,
- _PLUS_EQUALS_t = 264,
- _COLON_t = 265,
- _SEMIC_t = 266,
- _LANGLE_t = 267,
- _LANGLE_EQUALS_t = 268,
- _EQUALS_t = 269,
- _RANGLE_t = 270,
- _RANGLE_EQUALS_t = 271,
- _QUESTION_EQUALS_t = 272,
- _LBRACKET_t = 273,
- _RBRACKET_t = 274,
- ACTIONS_t = 275,
- BIND_t = 276,
- CASE_t = 277,
- CLASS_t = 278,
- DEFAULT_t = 279,
- ELSE_t = 280,
- EXISTING_t = 281,
- FOR_t = 282,
- IF_t = 283,
- IGNORE_t = 284,
- IN_t = 285,
- INCLUDE_t = 286,
- LOCAL_t = 287,
- MODULE_t = 288,
- ON_t = 289,
- PIECEMEAL_t = 290,
- QUIETLY_t = 291,
- RETURN_t = 292,
- RULE_t = 293,
- SWITCH_t = 294,
- TOGETHER_t = 295,
- UPDATED_t = 296,
- WHILE_t = 297,
- _LBRACE_t = 298,
- _BAR_t = 299,
- _BARBAR_t = 300,
- _RBRACE_t = 301,
- ARG = 302,
- STRING = 303
- };
-#endif
-#define _BANG_t 258
-#define _BANG_EQUALS_t 259
-#define _AMPER_t 260
-#define _AMPERAMPER_t 261
-#define _LPAREN_t 262
-#define _RPAREN_t 263
-#define _PLUS_EQUALS_t 264
-#define _COLON_t 265
-#define _SEMIC_t 266
-#define _LANGLE_t 267
-#define _LANGLE_EQUALS_t 268
-#define _EQUALS_t 269
-#define _RANGLE_t 270
-#define _RANGLE_EQUALS_t 271
-#define _QUESTION_EQUALS_t 272
-#define _LBRACKET_t 273
-#define _RBRACKET_t 274
-#define ACTIONS_t 275
-#define BIND_t 276
-#define CASE_t 277
-#define CLASS_t 278
-#define DEFAULT_t 279
-#define ELSE_t 280
-#define EXISTING_t 281
-#define FOR_t 282
-#define IF_t 283
-#define IGNORE_t 284
-#define IN_t 285
-#define INCLUDE_t 286
-#define LOCAL_t 287
-#define MODULE_t 288
-#define ON_t 289
-#define PIECEMEAL_t 290
-#define QUIETLY_t 291
-#define RETURN_t 292
-#define RULE_t 293
-#define SWITCH_t 294
-#define TOGETHER_t 295
-#define UPDATED_t 296
-#define WHILE_t 297
-#define _LBRACE_t 298
-#define _BAR_t 299
-#define _BARBAR_t 300
-#define _RBRACE_t 301
-#define ARG 302
-#define STRING 303
-
-
-
-
-/* Copy the first part of user declarations. */
-#line 96 "jamgram.y"
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "compile.h"
-#include "newstr.h"
-#include "rules.h"
-
-# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
-
-# define F0 (LIST *(*)(PARSE *, FRAME *))0
-# define P0 (PARSE *)0
-# define S0 (char *)0
-
-# define pappend( l,r ) parse_make( compile_append,l,r,P0,S0,S0,0 )
-# define peval( c,l,r ) parse_make( compile_eval,l,r,P0,S0,S0,c )
-# define pfor( s,l,r,x ) parse_make( compile_foreach,l,r,P0,s,S0,x )
-# define pif( l,r,t ) parse_make( compile_if,l,r,t,S0,S0,0 )
-# define pincl( l ) parse_make( compile_include,l,P0,P0,S0,S0,0 )
-# define plist( s ) parse_make( compile_list,P0,P0,P0,s,S0,0 )
-# define plocal( l,r,t ) parse_make( compile_local,l,r,t,S0,S0,0 )
-# define pmodule( l,r ) parse_make( compile_module,l,r,P0,S0,S0,0 )
-# define pclass( l,r ) parse_make( compile_class,l,r,P0,S0,S0,0 )
-# define pnull() parse_make( compile_null,P0,P0,P0,S0,S0,0 )
-# define pon( l,r ) parse_make( compile_on,l,r,P0,S0,S0,0 )
-# define prule( s,p ) parse_make( compile_rule,p,P0,P0,s,S0,0 )
-# define prules( l,r ) parse_make( compile_rules,l,r,P0,S0,S0,0 )
-# define pset( l,r,a ) parse_make( compile_set,l,r,P0,S0,S0,a )
-# define pset1( l,r,t,a ) parse_make( compile_settings,l,r,t,S0,S0,a )
-# define psetc( s,p,a,l ) parse_make( compile_setcomp,p,a,P0,s,S0,l )
-# define psete( s,l,s1,f ) parse_make( compile_setexec,l,P0,P0,s,s1,f )
-# define pswitch( l,r ) parse_make( compile_switch,l,r,P0,S0,S0,0 )
-# define pwhile( l,r ) parse_make( compile_while,l,r,P0,S0,S0,0 )
-
-# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
-# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
-
-
-
-/* Enabling traces. */
-#ifndef YYDEBUG
-# define YYDEBUG 0
-#endif
-
-/* Enabling verbose error messages. */
-#ifdef YYERROR_VERBOSE
-# undef YYERROR_VERBOSE
-# define YYERROR_VERBOSE 1
-#else
-# define YYERROR_VERBOSE 0
-#endif
-
-#if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED)
-typedef int YYSTYPE;
-# define yystype YYSTYPE /* obsolescent; will be withdrawn */
-# define YYSTYPE_IS_DECLARED 1
-# define YYSTYPE_IS_TRIVIAL 1
-#endif
-
-
-
-/* Copy the second part of user declarations. */
-
-
-/* Line 214 of yacc.c. */
-#line 223 "y.tab.c"
-
-#if ! defined (yyoverflow) || YYERROR_VERBOSE
-
-/* The parser invokes alloca or malloc; define the necessary symbols. */
-
-# if YYSTACK_USE_ALLOCA
-# define YYSTACK_ALLOC alloca
-# else
-# ifndef YYSTACK_USE_ALLOCA
-# if defined (alloca) || defined (_ALLOCA_H)
-# define YYSTACK_ALLOC alloca
-# else
-# ifdef __GNUC__
-# define YYSTACK_ALLOC __builtin_alloca
-# endif
-# endif
-# endif
-# endif
-
-# ifdef YYSTACK_ALLOC
- /* Pacify GCC's `empty if-body' warning. */
-# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0)
-# else
-# if defined (__STDC__) || defined (__cplusplus)
-# include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
-# define YYSIZE_T size_t
-# endif
-# define YYSTACK_ALLOC malloc
-# define YYSTACK_FREE free
-# endif
-#endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */
-
-
-#if (! defined (yyoverflow) \
- && (! defined (__cplusplus) \
- || (YYSTYPE_IS_TRIVIAL)))
-
-/* A type that is properly aligned for any stack member. */
-union yyalloc
-{
- short yyss;
- YYSTYPE yyvs;
- };
-
-/* The size of the maximum gap between one aligned stack and the next. */
-# define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1)
-
-/* The size of an array large to enough to hold all stacks, each with
- N elements. */
-# define YYSTACK_BYTES(N) \
- ((N) * (sizeof (short) + sizeof (YYSTYPE)) \
- + YYSTACK_GAP_MAXIMUM)
-
-/* Copy COUNT objects from FROM to TO. The source and destination do
- not overlap. */
-# ifndef YYCOPY
-# if 1 < __GNUC__
-# define YYCOPY(To, From, Count) \
- __builtin_memcpy (To, From, (Count) * sizeof (*(From)))
-# else
-# define YYCOPY(To, From, Count) \
- do \
- { \
- register YYSIZE_T yyi; \
- for (yyi = 0; yyi < (Count); yyi++) \
- (To)[yyi] = (From)[yyi]; \
- } \
- while (0)
-# endif
-# endif
-
-/* Relocate STACK from its old location to the new one. The
- local variables YYSIZE and YYSTACKSIZE give the old and new number of
- elements in the stack, and YYPTR gives the new location of the
- stack. Advance YYPTR to a properly aligned location for the next
- stack. */
-# define YYSTACK_RELOCATE(Stack) \
- do \
- { \
- YYSIZE_T yynewbytes; \
- YYCOPY (&yyptr->Stack, Stack, yysize); \
- Stack = &yyptr->Stack; \
- yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \
- yyptr += yynewbytes / sizeof (*yyptr); \
- } \
- while (0)
-
-#endif
-
-#if defined (__STDC__) || defined (__cplusplus)
- typedef signed char yysigned_char;
-#else
- typedef short yysigned_char;
-#endif
-
-/* YYFINAL -- State number of the termination state. */
-#define YYFINAL 43
-/* YYLAST -- Last index in YYTABLE. */
-#define YYLAST 261
-
-/* YYNTOKENS -- Number of terminals. */
-#define YYNTOKENS 49
-/* YYNNTS -- Number of nonterminals. */
-#define YYNNTS 24
-/* YYNRULES -- Number of rules. */
-#define YYNRULES 75
-/* YYNRULES -- Number of states. */
-#define YYNSTATES 159
-
-/* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */
-#define YYUNDEFTOK 2
-#define YYMAXUTOK 303
-
-#define YYTRANSLATE(YYX) \
- ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK)
-
-/* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */
-static const unsigned char yytranslate[] =
-{
- 0, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 1, 2, 3, 4,
- 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48
-};
-
-#if YYDEBUG
-/* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in
- YYRHS. */
-static const unsigned char yyprhs[] =
-{
- 0, 0, 3, 4, 6, 8, 10, 12, 15, 21,
- 22, 25, 27, 31, 32, 34, 35, 39, 43, 47,
- 52, 59, 63, 72, 78, 84, 90, 96, 102, 110,
- 116, 120, 121, 122, 132, 134, 136, 138, 141, 143,
- 147, 151, 155, 159, 163, 167, 171, 175, 179, 183,
- 187, 190, 194, 195, 198, 203, 205, 209, 211, 212,
- 215, 217, 218, 223, 226, 231, 236, 237, 240, 242,
- 244, 246, 248, 250, 252, 253
-};
-
-/* YYRHS -- A `-1'-separated list of the rules' RHS. */
-static const yysigned_char yyrhs[] =
-{
- 50, 0, -1, -1, 52, -1, 53, -1, 52, -1,
- 57, -1, 57, 52, -1, 32, 65, 54, 11, 51,
- -1, -1, 14, 65, -1, 53, -1, 7, 64, 8,
- -1, -1, 32, -1, -1, 43, 51, 46, -1, 31,
- 65, 11, -1, 47, 64, 11, -1, 67, 60, 65,
- 11, -1, 67, 34, 65, 60, 65, 11, -1, 37,
- 65, 11, -1, 27, 56, 47, 30, 65, 43, 51,
- 46, -1, 39, 65, 43, 62, 46, -1, 28, 61,
- 43, 51, 46, -1, 33, 65, 43, 51, 46, -1,
- 23, 64, 43, 51, 46, -1, 42, 61, 43, 51,
- 46, -1, 28, 61, 43, 51, 46, 25, 57, -1,
- 56, 38, 47, 55, 57, -1, 34, 67, 57, -1,
- -1, -1, 20, 70, 47, 72, 43, 58, 48, 59,
- 46, -1, 14, -1, 9, -1, 17, -1, 24, 14,
- -1, 67, -1, 61, 14, 61, -1, 61, 4, 61,
- -1, 61, 12, 61, -1, 61, 13, 61, -1, 61,
- 15, 61, -1, 61, 16, 61, -1, 61, 5, 61,
- -1, 61, 6, 61, -1, 61, 44, 61, -1, 61,
- 45, 61, -1, 67, 30, 65, -1, 3, 61, -1,
- 7, 61, 8, -1, -1, 63, 62, -1, 22, 47,
- 10, 51, -1, 65, -1, 65, 10, 64, -1, 66,
- -1, -1, 66, 67, -1, 47, -1, -1, 18, 68,
- 69, 19, -1, 67, 64, -1, 34, 67, 67, 64,
- -1, 34, 67, 37, 65, -1, -1, 70, 71, -1,
- 41, -1, 40, -1, 29, -1, 36, -1, 35, -1,
- 26, -1, -1, 21, 65, -1
-};
-
-/* YYRLINE[YYN] -- source line where rule number YYN was defined. */
-static const unsigned short yyrline[] =
-{
- 0, 139, 139, 141, 152, 154, 158, 160, 162, 167,
- 170, 172, 176, 179, 182, 185, 188, 190, 192, 194,
- 196, 198, 200, 202, 204, 206, 208, 210, 212, 214,
- 216, 219, 221, 218, 230, 232, 234, 236, 243, 245,
- 247, 249, 251, 253, 255, 257, 259, 261, 263, 265,
- 267, 269, 281, 282, 286, 295, 297, 307, 312, 313,
- 317, 319, 319, 328, 330, 332, 343, 344, 348, 350,
- 352, 354, 356, 358, 368, 369
-};
-#endif
-
-#if YYDEBUG || YYERROR_VERBOSE
-/* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM.
- First, the terminals, then, starting at YYNTOKENS, nonterminals. */
-static const char *const yytname[] =
-{
- "$end", "error", "$undefined", "_BANG_t", "_BANG_EQUALS_t", "_AMPER_t",
- "_AMPERAMPER_t", "_LPAREN_t", "_RPAREN_t", "_PLUS_EQUALS_t", "_COLON_t",
- "_SEMIC_t", "_LANGLE_t", "_LANGLE_EQUALS_t", "_EQUALS_t", "_RANGLE_t",
- "_RANGLE_EQUALS_t", "_QUESTION_EQUALS_t", "_LBRACKET_t", "_RBRACKET_t",
- "ACTIONS_t", "BIND_t", "CASE_t", "CLASS_t", "DEFAULT_t", "ELSE_t",
- "EXISTING_t", "FOR_t", "IF_t", "IGNORE_t", "IN_t", "INCLUDE_t",
- "LOCAL_t", "MODULE_t", "ON_t", "PIECEMEAL_t", "QUIETLY_t", "RETURN_t",
- "RULE_t", "SWITCH_t", "TOGETHER_t", "UPDATED_t", "WHILE_t", "_LBRACE_t",
- "_BAR_t", "_BARBAR_t", "_RBRACE_t", "ARG", "STRING", "$accept", "run",
- "block", "rules", "null", "assign_list_opt", "arglist_opt", "local_opt",
- "rule", "@1", "@2", "assign", "expr", "cases", "case", "lol", "list",
- "listp", "arg", "@3", "func", "eflags", "eflag", "bindlist", 0
-};
-#endif
-
-# ifdef YYPRINT
-/* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to
- token YYLEX-NUM. */
-static const unsigned short yytoknum[] =
-{
- 0, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303
-};
-# endif
-
-/* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */
-static const unsigned char yyr1[] =
-{
- 0, 49, 50, 50, 51, 51, 52, 52, 52, 53,
- 54, 54, 55, 55, 56, 56, 57, 57, 57, 57,
- 57, 57, 57, 57, 57, 57, 57, 57, 57, 57,
- 57, 58, 59, 57, 60, 60, 60, 60, 61, 61,
- 61, 61, 61, 61, 61, 61, 61, 61, 61, 61,
- 61, 61, 62, 62, 63, 64, 64, 65, 66, 66,
- 67, 68, 67, 69, 69, 69, 70, 70, 71, 71,
- 71, 71, 71, 71, 72, 72
-};
-
-/* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */
-static const unsigned char yyr2[] =
-{
- 0, 2, 0, 1, 1, 1, 1, 2, 5, 0,
- 2, 1, 3, 0, 1, 0, 3, 3, 3, 4,
- 6, 3, 8, 5, 5, 5, 5, 5, 7, 5,
- 3, 0, 0, 9, 1, 1, 1, 2, 1, 3,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 2, 3, 0, 2, 4, 1, 3, 1, 0, 2,
- 1, 0, 4, 2, 4, 4, 0, 2, 1, 1,
- 1, 1, 1, 1, 0, 2
-};
-
-/* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state
- STATE-NUM when YYTABLE doesn't specify something else to do. Zero
- means the default is an error. */
-static const unsigned char yydefact[] =
-{
- 2, 61, 66, 58, 15, 0, 58, 58, 58, 0,
- 58, 58, 0, 9, 60, 0, 3, 0, 6, 0,
- 0, 0, 0, 55, 57, 14, 0, 0, 0, 60,
- 0, 38, 0, 9, 0, 15, 0, 0, 0, 0,
- 5, 4, 0, 1, 0, 7, 35, 34, 36, 0,
- 58, 58, 0, 58, 0, 73, 70, 72, 71, 69,
- 68, 74, 67, 9, 58, 59, 0, 50, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 9, 0, 0,
- 58, 17, 58, 11, 0, 9, 30, 21, 52, 9,
- 16, 18, 13, 37, 0, 0, 0, 63, 62, 58,
- 0, 0, 56, 58, 51, 40, 45, 46, 41, 42,
- 39, 43, 44, 0, 47, 48, 49, 10, 9, 0,
- 0, 0, 52, 0, 58, 15, 58, 19, 58, 58,
- 75, 31, 26, 0, 24, 8, 25, 0, 23, 53,
- 27, 0, 29, 0, 65, 64, 0, 9, 15, 9,
- 12, 20, 32, 0, 28, 54, 0, 22, 33
-};
-
-/* YYDEFGOTO[NTERM-NUM]. */
-static const short yydefgoto[] =
-{
- -1, 15, 39, 40, 41, 84, 125, 17, 18, 146,
- 156, 51, 30, 121, 122, 22, 23, 24, 31, 20,
- 54, 21, 62, 100
-};
-
-/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing
- STATE-NUM. */
-#define YYPACT_NINF -48
-static const short yypact[] =
-{
- 179, -48, -48, -48, -15, 7, -48, -16, -48, 3,
- -48, -48, 7, 179, 1, 27, -48, -9, 179, 19,
- -3, 33, -11, 24, 3, -48, -10, 7, 7, -48,
- 138, 9, 30, 35, 13, 205, 53, 22, 151, 20,
- -48, -48, 56, -48, 23, -48, -48, -48, -48, 61,
- -48, -48, 3, -48, 62, -48, -48, -48, -48, -48,
- -48, 58, -48, 179, -48, -48, 52, -48, 164, 7,
- 7, 7, 7, 7, 7, 7, 7, 179, 7, 7,
- -48, -48, -48, -48, 72, 179, -48, -48, 68, 179,
- -48, -48, 85, -48, 77, 73, 8, -48, -48, -48,
- 50, 57, -48, -48, -48, 45, 93, 93, -48, -48,
- 45, -48, -48, 64, 245, 245, -48, -48, 179, 66,
- 67, 69, 68, 71, -48, 205, -48, -48, -48, -48,
- -48, -48, -48, 70, 79, -48, -48, 109, -48, -48,
- -48, 112, -48, 115, -48, -48, 75, 179, 205, 179,
- -48, -48, -48, 81, -48, -48, 82, -48, -48
-};
-
-/* YYPGOTO[NTERM-NUM]. */
-static const short yypgoto[] =
-{
- -48, -48, -47, 5, 104, -48, -48, 136, -27, -48,
- -48, 47, 60, 36, -48, -13, -4, -48, 0, -48,
- -48, -48, -48, -48
-};
-
-/* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If
- positive, shift that token. If negative, reduce the rule which
- number is the opposite. If zero, do what YYDEFACT says.
- If YYTABLE_NINF, syntax error. */
-#define YYTABLE_NINF -59
-static const short yytable[] =
-{
- 19, 42, 32, 33, 34, 16, 36, 37, 86, 35,
- 27, -58, -58, 19, 28, 1, 101, 25, 19, -58,
- 53, 1, -14, 45, 65, 1, 1, 43, 46, 44,
- 113, 52, 63, 47, 64, 19, 48, 66, 119, 80,
- 97, 81, 123, 49, 29, 128, 94, 95, -58, 82,
- 29, 102, 96, 50, 29, 29, 85, 72, 73, 55,
- 75, 76, 56, 19, 87, 88, 90, 91, 57, 58,
- 92, 135, 38, 59, 60, 93, 116, 19, 117, 99,
- 61, 98, 103, 118, 127, 19, 46, 67, 68, 19,
- 120, 47, 124, 131, 48, 130, 129, 69, 142, 133,
- 153, 49, 155, 132, 148, 72, 73, 74, 75, 76,
- 134, 141, 136, 147, 137, 138, 145, 140, 19, 149,
- 150, 154, 143, 152, 144, 19, 151, 157, 158, 105,
- 106, 107, 108, 109, 110, 111, 112, 83, 114, 115,
- 26, 126, 69, 70, 71, 0, 0, 19, 19, 19,
- 72, 73, 74, 75, 76, 69, 70, 71, 139, 0,
- 0, 0, 0, 72, 73, 74, 75, 76, 69, 70,
- 71, 0, 104, 0, 0, 0, 72, 73, 74, 75,
- 76, 77, 78, 79, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 89, 78, 79, 1, 0, 2,
- 0, 0, 3, 0, 0, 0, 4, 5, 78, 79,
- 6, 7, 8, 9, 0, 0, 10, -15, 11, 0,
- 0, 12, 13, 1, 0, 2, 14, 0, 3, 0,
- 0, 0, 4, 5, 0, 0, 6, 25, 8, 9,
- 0, 0, 10, 0, 11, 0, 0, 12, 13, 69,
- 70, 71, 14, 0, 0, 0, 0, 72, 73, 74,
- 75, 76
-};
-
-static const short yycheck[] =
-{
- 0, 14, 6, 7, 8, 0, 10, 11, 35, 9,
- 3, 10, 11, 13, 7, 18, 63, 32, 18, 18,
- 20, 18, 38, 18, 24, 18, 18, 0, 9, 38,
- 77, 34, 43, 14, 10, 35, 17, 47, 85, 30,
- 53, 11, 89, 24, 47, 37, 50, 51, 47, 14,
- 47, 64, 52, 34, 47, 47, 43, 12, 13, 26,
- 15, 16, 29, 63, 11, 43, 46, 11, 35, 36,
- 47, 118, 12, 40, 41, 14, 80, 77, 82, 21,
- 47, 19, 30, 11, 11, 85, 9, 27, 28, 89,
- 22, 14, 7, 43, 17, 99, 96, 4, 125, 103,
- 147, 24, 149, 46, 25, 12, 13, 14, 15, 16,
- 46, 124, 46, 43, 47, 46, 129, 46, 118, 10,
- 8, 148, 126, 48, 128, 125, 11, 46, 46, 69,
- 70, 71, 72, 73, 74, 75, 76, 33, 78, 79,
- 4, 94, 4, 5, 6, -1, -1, 147, 148, 149,
- 12, 13, 14, 15, 16, 4, 5, 6, 122, -1,
- -1, -1, -1, 12, 13, 14, 15, 16, 4, 5,
- 6, -1, 8, -1, -1, -1, 12, 13, 14, 15,
- 16, 43, 44, 45, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 43, 44, 45, 18, -1, 20,
- -1, -1, 23, -1, -1, -1, 27, 28, 44, 45,
- 31, 32, 33, 34, -1, -1, 37, 38, 39, -1,
- -1, 42, 43, 18, -1, 20, 47, -1, 23, -1,
- -1, -1, 27, 28, -1, -1, 31, 32, 33, 34,
- -1, -1, 37, -1, 39, -1, -1, 42, 43, 4,
- 5, 6, 47, -1, -1, -1, -1, 12, 13, 14,
- 15, 16
-};
-
-/* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
- symbol of state STATE-NUM. */
-static const unsigned char yystos[] =
-{
- 0, 18, 20, 23, 27, 28, 31, 32, 33, 34,
- 37, 39, 42, 43, 47, 50, 52, 56, 57, 67,
- 68, 70, 64, 65, 66, 32, 56, 3, 7, 47,
- 61, 67, 65, 65, 65, 67, 65, 65, 61, 51,
- 52, 53, 64, 0, 38, 52, 9, 14, 17, 24,
- 34, 60, 34, 67, 69, 26, 29, 35, 36, 40,
- 41, 47, 71, 43, 10, 67, 47, 61, 61, 4,
- 5, 6, 12, 13, 14, 15, 16, 43, 44, 45,
- 30, 11, 14, 53, 54, 43, 57, 11, 43, 43,
- 46, 11, 47, 14, 65, 65, 67, 64, 19, 21,
- 72, 51, 64, 30, 8, 61, 61, 61, 61, 61,
- 61, 61, 61, 51, 61, 61, 65, 65, 11, 51,
- 22, 62, 63, 51, 7, 55, 60, 11, 37, 67,
- 65, 43, 46, 65, 46, 51, 46, 47, 46, 62,
- 46, 64, 57, 65, 65, 64, 58, 43, 25, 10,
- 8, 11, 48, 51, 57, 51, 59, 46, 46
-};
-
-#if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__)
-# define YYSIZE_T __SIZE_TYPE__
-#endif
-#if ! defined (YYSIZE_T) && defined (size_t)
-# define YYSIZE_T size_t
-#endif
-#if ! defined (YYSIZE_T)
-# if defined (__STDC__) || defined (__cplusplus)
-# include <stddef.h> /* INFRINGES ON USER NAME SPACE */
-# define YYSIZE_T size_t
-# endif
-#endif
-#if ! defined (YYSIZE_T)
-# define YYSIZE_T unsigned int
-#endif
-
-#define yyerrok (yyerrstatus = 0)
-#define yyclearin (yychar = YYEMPTY)
-#define YYEMPTY (-2)
-#define YYEOF 0
-
-#define YYACCEPT goto yyacceptlab
-#define YYABORT goto yyabortlab
-#define YYERROR goto yyerrlab1
-
-/* Like YYERROR except do call yyerror. This remains here temporarily
- to ease the transition to the new meaning of YYERROR, for GCC.
- Once GCC version 2 has supplanted version 1, this can go. */
-
-#define YYFAIL goto yyerrlab
-
-#define YYRECOVERING() (!!yyerrstatus)
-
-#define YYBACKUP(Token, Value) \
-do \
- if (yychar == YYEMPTY && yylen == 1) \
- { \
- yychar = (Token); \
- yylval = (Value); \
- yytoken = YYTRANSLATE (yychar); \
- YYPOPSTACK; \
- goto yybackup; \
- } \
- else \
- { \
- yyerror ("syntax error: cannot back up");\
- YYERROR; \
- } \
-while (0)
-
-#define YYTERROR 1
-#define YYERRCODE 256
-
-/* YYLLOC_DEFAULT -- Compute the default location (before the actions
- are run). */
-
-#ifndef YYLLOC_DEFAULT
-# define YYLLOC_DEFAULT(Current, Rhs, N) \
- Current.first_line = Rhs[1].first_line; \
- Current.first_column = Rhs[1].first_column; \
- Current.last_line = Rhs[N].last_line; \
- Current.last_column = Rhs[N].last_column;
-#endif
-
-/* YYLEX -- calling `yylex' with the right arguments. */
-
-#ifdef YYLEX_PARAM
-# define YYLEX yylex (YYLEX_PARAM)
-#else
-# define YYLEX yylex ()
-#endif
-
-/* Enable debugging if requested. */
-#if YYDEBUG
-
-# ifndef YYFPRINTF
-# include <stdio.h> /* INFRINGES ON USER NAME SPACE */
-# define YYFPRINTF fprintf
-# endif
-
-# define YYDPRINTF(Args) \
-do { \
- if (yydebug) \
- YYFPRINTF Args; \
-} while (0)
-
-# define YYDSYMPRINT(Args) \
-do { \
- if (yydebug) \
- yysymprint Args; \
-} while (0)
-
-# define YYDSYMPRINTF(Title, Token, Value, Location) \
-do { \
- if (yydebug) \
- { \
- YYFPRINTF (stderr, "%s ", Title); \
- yysymprint (stderr, \
- Token, Value); \
- YYFPRINTF (stderr, "\n"); \
- } \
-} while (0)
-
-/*------------------------------------------------------------------.
-| yy_stack_print -- Print the state stack from its BOTTOM up to its |
-| TOP (cinluded). |
-`------------------------------------------------------------------*/
-
-#if defined (__STDC__) || defined (__cplusplus)
-static void
-yy_stack_print (short *bottom, short *top)
-#else
-static void
-yy_stack_print (bottom, top)
- short *bottom;
- short *top;
-#endif
-{
- YYFPRINTF (stderr, "Stack now");
- for (/* Nothing. */; bottom <= top; ++bottom)
- YYFPRINTF (stderr, " %d", *bottom);
- YYFPRINTF (stderr, "\n");
-}
-
-# define YY_STACK_PRINT(Bottom, Top) \
-do { \
- if (yydebug) \
- yy_stack_print ((Bottom), (Top)); \
-} while (0)
-
-
-/*------------------------------------------------.
-| Report that the YYRULE is going to be reduced. |
-`------------------------------------------------*/
-
-#if defined (__STDC__) || defined (__cplusplus)
-static void
-yy_reduce_print (int yyrule)
-#else
-static void
-yy_reduce_print (yyrule)
- int yyrule;
-#endif
-{
- int yyi;
- unsigned int yylineno = yyrline[yyrule];
- YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ",
- yyrule - 1, yylineno);
- /* Print the symbols being reduced, and their result. */
- for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++)
- YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]);
- YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]);
-}
-
-# define YY_REDUCE_PRINT(Rule) \
-do { \
- if (yydebug) \
- yy_reduce_print (Rule); \
-} while (0)
-
-/* Nonzero means print parse trace. It is left uninitialized so that
- multiple parsers can coexist. */
-int yydebug;
-#else /* !YYDEBUG */
-# define YYDPRINTF(Args)
-# define YYDSYMPRINT(Args)
-# define YYDSYMPRINTF(Title, Token, Value, Location)
-# define YY_STACK_PRINT(Bottom, Top)
-# define YY_REDUCE_PRINT(Rule)
-#endif /* !YYDEBUG */
-
-
-/* YYINITDEPTH -- initial size of the parser's stacks. */
-#ifndef YYINITDEPTH
-# define YYINITDEPTH 200
-#endif
-
-/* YYMAXDEPTH -- maximum size the stacks can grow to (effective only
- if the built-in stack extension method is used).
-
- Do not make this value too large; the results are undefined if
- SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH)
- evaluated with infinite-precision integer arithmetic. */
-
-#if YYMAXDEPTH == 0
-# undef YYMAXDEPTH
-#endif
-
-#ifndef YYMAXDEPTH
-# define YYMAXDEPTH 10000
-#endif
-
-
-
-#if YYERROR_VERBOSE
-
-# ifndef yystrlen
-# if defined (__GLIBC__) && defined (_STRING_H)
-# define yystrlen strlen
-# else
-/* Return the length of YYSTR. */
-static YYSIZE_T
-# if defined (__STDC__) || defined (__cplusplus)
-yystrlen (const char *yystr)
-# else
-yystrlen (yystr)
- const char *yystr;
-# endif
-{
- register const char *yys = yystr;
-
- while (*yys++ != '\0')
- continue;
-
- return yys - yystr - 1;
-}
-# endif
-# endif
-
-# ifndef yystpcpy
-# if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE)
-# define yystpcpy stpcpy
-# else
-/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in
- YYDEST. */
-static char *
-# if defined (__STDC__) || defined (__cplusplus)
-yystpcpy (char *yydest, const char *yysrc)
-# else
-yystpcpy (yydest, yysrc)
- char *yydest;
- const char *yysrc;
-# endif
-{
- register char *yyd = yydest;
- register const char *yys = yysrc;
-
- while ((*yyd++ = *yys++) != '\0')
- continue;
-
- return yyd - 1;
-}
-# endif
-# endif
-
-#endif /* !YYERROR_VERBOSE */
-
-
-
-#if YYDEBUG
-/*--------------------------------.
-| Print this symbol on YYOUTPUT. |
-`--------------------------------*/
-
-#if defined (__STDC__) || defined (__cplusplus)
-static void
-yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep)
-#else
-static void
-yysymprint (yyoutput, yytype, yyvaluep)
- FILE *yyoutput;
- int yytype;
- YYSTYPE *yyvaluep;
-#endif
-{
- /* Pacify ``unused variable'' warnings. */
- (void) yyvaluep;
-
- if (yytype < YYNTOKENS)
- {
- YYFPRINTF (yyoutput, "token %s (", yytname[yytype]);
-# ifdef YYPRINT
- YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep);
-# endif
- }
- else
- YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]);
-
- switch (yytype)
- {
- default:
- break;
- }
- YYFPRINTF (yyoutput, ")");
-}
-
-#endif /* ! YYDEBUG */
-/*-----------------------------------------------.
-| Release the memory associated to this symbol. |
-`-----------------------------------------------*/
-
-#if defined (__STDC__) || defined (__cplusplus)
-static void
-yydestruct (int yytype, YYSTYPE *yyvaluep)
-#else
-static void
-yydestruct (yytype, yyvaluep)
- int yytype;
- YYSTYPE *yyvaluep;
-#endif
-{
- /* Pacify ``unused variable'' warnings. */
- (void) yyvaluep;
-
- switch (yytype)
- {
-
- default:
- break;
- }
-}
-
-
-/* Prevent warnings from -Wmissing-prototypes. */
-
-#ifdef YYPARSE_PARAM
-# if defined (__STDC__) || defined (__cplusplus)
-int yyparse (void *YYPARSE_PARAM);
-# else
-int yyparse ();
-# endif
-#else /* ! YYPARSE_PARAM */
-#if defined (__STDC__) || defined (__cplusplus)
-int yyparse (void);
-#else
-int yyparse ();
-#endif
-#endif /* ! YYPARSE_PARAM */
-
-
-
-/* The lookahead symbol. */
-int yychar;
-
-/* The semantic value of the lookahead symbol. */
-YYSTYPE yylval;
-
-/* Number of syntax errors so far. */
-int yynerrs;
-
-
-
-/*----------.
-| yyparse. |
-`----------*/
-
-#ifdef YYPARSE_PARAM
-# if defined (__STDC__) || defined (__cplusplus)
-int yyparse (void *YYPARSE_PARAM)
-# else
-int yyparse (YYPARSE_PARAM)
- void *YYPARSE_PARAM;
-# endif
-#else /* ! YYPARSE_PARAM */
-#if defined (__STDC__) || defined (__cplusplus)
-int
-yyparse (void)
-#else
-int
-yyparse ()
-
-#endif
-#endif
-{
-
- register int yystate;
- register int yyn;
- int yyresult;
- /* Number of tokens to shift before error messages enabled. */
- int yyerrstatus;
- /* Lookahead token as an internal (translated) token number. */
- int yytoken = 0;
-
- /* Three stacks and their tools:
- `yyss': related to states,
- `yyvs': related to semantic values,
- `yyls': related to locations.
-
- Refer to the stacks thru separate pointers, to allow yyoverflow
- to reallocate them elsewhere. */
-
- /* The state stack. */
- short yyssa[YYINITDEPTH];
- short *yyss = yyssa;
- register short *yyssp;
-
- /* The semantic value stack. */
- YYSTYPE yyvsa[YYINITDEPTH];
- YYSTYPE *yyvs = yyvsa;
- register YYSTYPE *yyvsp;
-
-
-
-#define YYPOPSTACK (yyvsp--, yyssp--)
-
- YYSIZE_T yystacksize = YYINITDEPTH;
-
- /* The variables used to return semantic value and location from the
- action routines. */
- YYSTYPE yyval;
-
-
- /* When reducing, the number of symbols on the RHS of the reduced
- rule. */
- int yylen;
-
- YYDPRINTF ((stderr, "Starting parse\n"));
-
- yystate = 0;
- yyerrstatus = 0;
- yynerrs = 0;
- yychar = YYEMPTY; /* Cause a token to be read. */
-
- /* Initialize stack pointers.
- Waste one element of value and location stack
- so that they stay on the same level as the state stack.
- The wasted elements are never initialized. */
-
- yyssp = yyss;
- yyvsp = yyvs;
-
- goto yysetstate;
-
-/*------------------------------------------------------------.
-| yynewstate -- Push a new state, which is found in yystate. |
-`------------------------------------------------------------*/
- yynewstate:
- /* In all cases, when you get here, the value and location stacks
- have just been pushed. so pushing a state here evens the stacks.
- */
- yyssp++;
-
- yysetstate:
- *yyssp = yystate;
-
- if (yyss + yystacksize - 1 <= yyssp)
- {
- /* Get the current used size of the three stacks, in elements. */
- YYSIZE_T yysize = yyssp - yyss + 1;
-
-#ifdef yyoverflow
- {
- /* Give user a chance to reallocate the stack. Use copies of
- these so that the &'s don't force the real ones into
- memory. */
- YYSTYPE *yyvs1 = yyvs;
- short *yyss1 = yyss;
-
-
- /* Each stack pointer address is followed by the size of the
- data in use in that stack, in bytes. This used to be a
- conditional around just the two extra args, but that might
- be undefined if yyoverflow is a macro. */
- yyoverflow ("parser stack overflow",
- &yyss1, yysize * sizeof (*yyssp),
- &yyvs1, yysize * sizeof (*yyvsp),
-
- &yystacksize);
-
- yyss = yyss1;
- yyvs = yyvs1;
- }
-#else /* no yyoverflow */
-# ifndef YYSTACK_RELOCATE
- goto yyoverflowlab;
-# else
- /* Extend the stack our own way. */
- if (YYMAXDEPTH <= yystacksize)
- goto yyoverflowlab;
- yystacksize *= 2;
- if (YYMAXDEPTH < yystacksize)
- yystacksize = YYMAXDEPTH;
-
- {
- short *yyss1 = yyss;
- union yyalloc *yyptr =
- (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize));
- if (! yyptr)
- goto yyoverflowlab;
- YYSTACK_RELOCATE (yyss);
- YYSTACK_RELOCATE (yyvs);
-
-# undef YYSTACK_RELOCATE
- if (yyss1 != yyssa)
- YYSTACK_FREE (yyss1);
- }
-# endif
-#endif /* no yyoverflow */
-
- yyssp = yyss + yysize - 1;
- yyvsp = yyvs + yysize - 1;
-
-
- YYDPRINTF ((stderr, "Stack size increased to %lu\n",
- (unsigned long int) yystacksize));
-
- if (yyss + yystacksize - 1 <= yyssp)
- YYABORT;
- }
-
- YYDPRINTF ((stderr, "Entering state %d\n", yystate));
-
- goto yybackup;
-
-/*-----------.
-| yybackup. |
-`-----------*/
-yybackup:
-
-/* Do appropriate processing given the current state. */
-/* Read a lookahead token if we need one and don't already have one. */
-/* yyresume: */
-
- /* First try to decide what to do without reference to lookahead token. */
-
- yyn = yypact[yystate];
- if (yyn == YYPACT_NINF)
- goto yydefault;
-
- /* Not known => get a lookahead token if don't already have one. */
-
- /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */
- if (yychar == YYEMPTY)
- {
- YYDPRINTF ((stderr, "Reading a token: "));
- yychar = YYLEX;
- }
-
- if (yychar <= YYEOF)
- {
- yychar = yytoken = YYEOF;
- YYDPRINTF ((stderr, "Now at end of input.\n"));
- }
- else
- {
- yytoken = YYTRANSLATE (yychar);
- YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc);
- }
-
- /* If the proper action on seeing token YYTOKEN is to reduce or to
- detect an error, take that action. */
- yyn += yytoken;
- if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken)
- goto yydefault;
- yyn = yytable[yyn];
- if (yyn <= 0)
- {
- if (yyn == 0 || yyn == YYTABLE_NINF)
- goto yyerrlab;
- yyn = -yyn;
- goto yyreduce;
- }
-
- if (yyn == YYFINAL)
- YYACCEPT;
-
- /* Shift the lookahead token. */
- YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken]));
-
- /* Discard the token being shifted unless it is eof. */
- if (yychar != YYEOF)
- yychar = YYEMPTY;
-
- *++yyvsp = yylval;
-
-
- /* Count tokens shifted since error; after three, turn off error
- status. */
- if (yyerrstatus)
- yyerrstatus--;
-
- yystate = yyn;
- goto yynewstate;
-
-
-/*-----------------------------------------------------------.
-| yydefault -- do the default action for the current state. |
-`-----------------------------------------------------------*/
-yydefault:
- yyn = yydefact[yystate];
- if (yyn == 0)
- goto yyerrlab;
- goto yyreduce;
-
-
-/*-----------------------------.
-| yyreduce -- Do a reduction. |
-`-----------------------------*/
-yyreduce:
- /* yyn is the number of a rule to reduce with. */
- yylen = yyr2[yyn];
-
- /* If YYLEN is nonzero, implement the default value of the action:
- `$$ = $1'.
-
- Otherwise, the following line sets YYVAL to garbage.
- This behavior is undocumented and Bison
- users should not rely upon it. Assigning to YYVAL
- unconditionally makes the parser a bit smaller, and it avoids a
- GCC warning that YYVAL may be used uninitialized. */
- yyval = yyvsp[1-yylen];
-
-
- YY_REDUCE_PRINT (yyn);
- switch (yyn)
- {
- case 3:
-#line 142 "jamgram.y"
- { parse_save( yyvsp[0].parse ); }
- break;
-
- case 4:
-#line 153 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; }
- break;
-
- case 5:
-#line 155 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; }
- break;
-
- case 6:
-#line 159 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; }
- break;
-
- case 7:
-#line 161 "jamgram.y"
- { yyval.parse = prules( yyvsp[-1].parse, yyvsp[0].parse ); }
- break;
-
- case 8:
-#line 163 "jamgram.y"
- { yyval.parse = plocal( yyvsp[-3].parse, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 9:
-#line 167 "jamgram.y"
- { yyval.parse = pnull(); }
- break;
-
- case 10:
-#line 171 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_SET; }
- break;
-
- case 11:
-#line 173 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_APPEND; }
- break;
-
- case 12:
-#line 177 "jamgram.y"
- { yyval.parse = yyvsp[-1].parse; }
- break;
-
- case 13:
-#line 179 "jamgram.y"
- { yyval.parse = P0; }
- break;
-
- case 14:
-#line 183 "jamgram.y"
- { yyval.number = 1; }
- break;
-
- case 15:
-#line 185 "jamgram.y"
- { yyval.number = 0; }
- break;
-
- case 16:
-#line 189 "jamgram.y"
- { yyval.parse = yyvsp[-1].parse; }
- break;
-
- case 17:
-#line 191 "jamgram.y"
- { yyval.parse = pincl( yyvsp[-1].parse ); }
- break;
-
- case 18:
-#line 193 "jamgram.y"
- { yyval.parse = prule( yyvsp[-2].string, yyvsp[-1].parse ); }
- break;
-
- case 19:
-#line 195 "jamgram.y"
- { yyval.parse = pset( yyvsp[-3].parse, yyvsp[-1].parse, yyvsp[-2].number ); }
- break;
-
- case 20:
-#line 197 "jamgram.y"
- { yyval.parse = pset1( yyvsp[-5].parse, yyvsp[-3].parse, yyvsp[-1].parse, yyvsp[-2].number ); }
- break;
-
- case 21:
-#line 199 "jamgram.y"
- { yyval.parse = yyvsp[-1].parse; }
- break;
-
- case 22:
-#line 201 "jamgram.y"
- { yyval.parse = pfor( yyvsp[-5].string, yyvsp[-3].parse, yyvsp[-1].parse, yyvsp[-6].number ); }
- break;
-
- case 23:
-#line 203 "jamgram.y"
- { yyval.parse = pswitch( yyvsp[-3].parse, yyvsp[-1].parse ); }
- break;
-
- case 24:
-#line 205 "jamgram.y"
- { yyval.parse = pif( yyvsp[-3].parse, yyvsp[-1].parse, pnull() ); }
- break;
-
- case 25:
-#line 207 "jamgram.y"
- { yyval.parse = pmodule( yyvsp[-3].parse, yyvsp[-1].parse ); }
- break;
-
- case 26:
-#line 209 "jamgram.y"
- { yyval.parse = pclass( yyvsp[-3].parse, yyvsp[-1].parse ); }
- break;
-
- case 27:
-#line 211 "jamgram.y"
- { yyval.parse = pwhile( yyvsp[-3].parse, yyvsp[-1].parse ); }
- break;
-
- case 28:
-#line 213 "jamgram.y"
- { yyval.parse = pif( yyvsp[-5].parse, yyvsp[-3].parse, yyvsp[0].parse ); }
- break;
-
- case 29:
-#line 215 "jamgram.y"
- { yyval.parse = psetc( yyvsp[-2].string, yyvsp[0].parse, yyvsp[-1].parse, yyvsp[-4].number ); }
- break;
-
- case 30:
-#line 217 "jamgram.y"
- { yyval.parse = pon( yyvsp[-1].parse, yyvsp[0].parse ); }
- break;
-
- case 31:
-#line 219 "jamgram.y"
- { yymode( SCAN_STRING ); }
- break;
-
- case 32:
-#line 221 "jamgram.y"
- { yymode( SCAN_NORMAL ); }
- break;
-
- case 33:
-#line 223 "jamgram.y"
- { yyval.parse = psete( yyvsp[-6].string,yyvsp[-5].parse,yyvsp[-2].string,yyvsp[-7].number ); }
- break;
-
- case 34:
-#line 231 "jamgram.y"
- { yyval.number = ASSIGN_SET; }
- break;
-
- case 35:
-#line 233 "jamgram.y"
- { yyval.number = ASSIGN_APPEND; }
- break;
-
- case 36:
-#line 235 "jamgram.y"
- { yyval.number = ASSIGN_DEFAULT; }
- break;
-
- case 37:
-#line 237 "jamgram.y"
- { yyval.number = ASSIGN_DEFAULT; }
- break;
-
- case 38:
-#line 244 "jamgram.y"
- { yyval.parse = peval( EXPR_EXISTS, yyvsp[0].parse, pnull() ); }
- break;
-
- case 39:
-#line 246 "jamgram.y"
- { yyval.parse = peval( EXPR_EQUALS, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 40:
-#line 248 "jamgram.y"
- { yyval.parse = peval( EXPR_NOTEQ, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 41:
-#line 250 "jamgram.y"
- { yyval.parse = peval( EXPR_LESS, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 42:
-#line 252 "jamgram.y"
- { yyval.parse = peval( EXPR_LESSEQ, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 43:
-#line 254 "jamgram.y"
- { yyval.parse = peval( EXPR_MORE, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 44:
-#line 256 "jamgram.y"
- { yyval.parse = peval( EXPR_MOREEQ, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 45:
-#line 258 "jamgram.y"
- { yyval.parse = peval( EXPR_AND, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 46:
-#line 260 "jamgram.y"
- { yyval.parse = peval( EXPR_AND, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 47:
-#line 262 "jamgram.y"
- { yyval.parse = peval( EXPR_OR, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 48:
-#line 264 "jamgram.y"
- { yyval.parse = peval( EXPR_OR, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 49:
-#line 266 "jamgram.y"
- { yyval.parse = peval( EXPR_IN, yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 50:
-#line 268 "jamgram.y"
- { yyval.parse = peval( EXPR_NOT, yyvsp[0].parse, pnull() ); }
- break;
-
- case 51:
-#line 270 "jamgram.y"
- { yyval.parse = yyvsp[-1].parse; }
- break;
-
- case 52:
-#line 281 "jamgram.y"
- { yyval.parse = P0; }
- break;
-
- case 53:
-#line 283 "jamgram.y"
- { yyval.parse = pnode( yyvsp[-1].parse, yyvsp[0].parse ); }
- break;
-
- case 54:
-#line 287 "jamgram.y"
- { yyval.parse = psnode( yyvsp[-2].string, yyvsp[0].parse ); }
- break;
-
- case 55:
-#line 296 "jamgram.y"
- { yyval.parse = pnode( P0, yyvsp[0].parse ); }
- break;
-
- case 56:
-#line 298 "jamgram.y"
- { yyval.parse = pnode( yyvsp[0].parse, yyvsp[-2].parse ); }
- break;
-
- case 57:
-#line 308 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; yymode( SCAN_NORMAL ); }
- break;
-
- case 58:
-#line 312 "jamgram.y"
- { yyval.parse = pnull(); yymode( SCAN_PUNCT ); }
- break;
-
- case 59:
-#line 314 "jamgram.y"
- { yyval.parse = pappend( yyvsp[-1].parse, yyvsp[0].parse ); }
- break;
-
- case 60:
-#line 318 "jamgram.y"
- { yyval.parse = plist( yyvsp[0].string ); }
- break;
-
- case 61:
-#line 319 "jamgram.y"
- { yymode( SCAN_NORMAL ); }
- break;
-
- case 62:
-#line 320 "jamgram.y"
- { yyval.parse = yyvsp[-1].parse; }
- break;
-
- case 63:
-#line 329 "jamgram.y"
- { yyval.parse = prule( yyvsp[-1].string, yyvsp[0].parse ); }
- break;
-
- case 64:
-#line 331 "jamgram.y"
- { yyval.parse = pon( yyvsp[-2].parse, prule( yyvsp[-1].string, yyvsp[0].parse ) ); }
- break;
-
- case 65:
-#line 333 "jamgram.y"
- { yyval.parse = pon( yyvsp[-2].parse, yyvsp[0].parse ); }
- break;
-
- case 66:
-#line 343 "jamgram.y"
- { yyval.number = 0; }
- break;
-
- case 67:
-#line 345 "jamgram.y"
- { yyval.number = yyvsp[-1].number | yyvsp[0].number; }
- break;
-
- case 68:
-#line 349 "jamgram.y"
- { yyval.number = EXEC_UPDATED; }
- break;
-
- case 69:
-#line 351 "jamgram.y"
- { yyval.number = EXEC_TOGETHER; }
- break;
-
- case 70:
-#line 353 "jamgram.y"
- { yyval.number = EXEC_IGNORE; }
- break;
-
- case 71:
-#line 355 "jamgram.y"
- { yyval.number = EXEC_QUIETLY; }
- break;
-
- case 72:
-#line 357 "jamgram.y"
- { yyval.number = EXEC_PIECEMEAL; }
- break;
-
- case 73:
-#line 359 "jamgram.y"
- { yyval.number = EXEC_EXISTING; }
- break;
-
- case 74:
-#line 368 "jamgram.y"
- { yyval.parse = pnull(); }
- break;
-
- case 75:
-#line 370 "jamgram.y"
- { yyval.parse = yyvsp[0].parse; }
- break;
-
-
- }
-
-/* Line 991 of yacc.c. */
-#line 1621 "y.tab.c"
-
- yyvsp -= yylen;
- yyssp -= yylen;
-
-
- YY_STACK_PRINT (yyss, yyssp);
-
- *++yyvsp = yyval;
-
-
- /* Now `shift' the result of the reduction. Determine what state
- that goes to, based on the state we popped back to and the rule
- number reduced by. */
-
- yyn = yyr1[yyn];
-
- yystate = yypgoto[yyn - YYNTOKENS] + *yyssp;
- if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp)
- yystate = yytable[yystate];
- else
- yystate = yydefgoto[yyn - YYNTOKENS];
-
- goto yynewstate;
-
-
-/*------------------------------------.
-| yyerrlab -- here on detecting error |
-`------------------------------------*/
-yyerrlab:
- /* If not already recovering from an error, report this error. */
- if (!yyerrstatus)
- {
- ++yynerrs;
-#if YYERROR_VERBOSE
- yyn = yypact[yystate];
-
- if (YYPACT_NINF < yyn && yyn < YYLAST)
- {
- YYSIZE_T yysize = 0;
- int yytype = YYTRANSLATE (yychar);
- char *yymsg;
- int yyx, yycount;
-
- yycount = 0;
- /* Start YYX at -YYN if negative to avoid negative indexes in
- YYCHECK. */
- for (yyx = yyn < 0 ? -yyn : 0;
- yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++)
- if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR)
- yysize += yystrlen (yytname[yyx]) + 15, yycount++;
- yysize += yystrlen ("syntax error, unexpected ") + 1;
- yysize += yystrlen (yytname[yytype]);
- yymsg = (char *) YYSTACK_ALLOC (yysize);
- if (yymsg != 0)
- {
- char *yyp = yystpcpy (yymsg, "syntax error, unexpected ");
- yyp = yystpcpy (yyp, yytname[yytype]);
-
- if (yycount < 5)
- {
- yycount = 0;
- for (yyx = yyn < 0 ? -yyn : 0;
- yyx < (int) (sizeof (yytname) / sizeof (char *));
- yyx++)
- if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR)
- {
- const char *yyq = ! yycount ? ", expecting " : " or ";
- yyp = yystpcpy (yyp, yyq);
- yyp = yystpcpy (yyp, yytname[yyx]);
- yycount++;
- }
- }
- yyerror (yymsg);
- YYSTACK_FREE (yymsg);
- }
- else
- yyerror ("syntax error; also virtual memory exhausted");
- }
- else
-#endif /* YYERROR_VERBOSE */
- yyerror ("syntax error");
- }
-
-
-
- if (yyerrstatus == 3)
- {
- /* If just tried and failed to reuse lookahead token after an
- error, discard it. */
-
- /* Return failure if at end of input. */
- if (yychar == YYEOF)
- {
- /* Pop the error token. */
- YYPOPSTACK;
- /* Pop the rest of the stack. */
- while (yyss < yyssp)
- {
- YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp);
- yydestruct (yystos[*yyssp], yyvsp);
- YYPOPSTACK;
- }
- YYABORT;
- }
-
- YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc);
- yydestruct (yytoken, &yylval);
- yychar = YYEMPTY;
-
- }
-
- /* Else will try to reuse lookahead token after shifting the error
- token. */
- goto yyerrlab2;
-
-
-/*----------------------------------------------------.
-| yyerrlab1 -- error raised explicitly by an action. |
-`----------------------------------------------------*/
-yyerrlab1:
-
- /* Suppress GCC warning that yyerrlab1 is unused when no action
- invokes YYERROR. */
-#if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__)
- __attribute__ ((__unused__))
-#endif
-
-
- goto yyerrlab2;
-
-
-/*---------------------------------------------------------------.
-| yyerrlab2 -- pop states until the error token can be shifted. |
-`---------------------------------------------------------------*/
-yyerrlab2:
- yyerrstatus = 3; /* Each real token shifted decrements this. */
-
- for (;;)
- {
- yyn = yypact[yystate];
- if (yyn != YYPACT_NINF)
- {
- yyn += YYTERROR;
- if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR)
- {
- yyn = yytable[yyn];
- if (0 < yyn)
- break;
- }
- }
-
- /* Pop the current state because it cannot handle the error token. */
- if (yyssp == yyss)
- YYABORT;
-
- YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp);
- yydestruct (yystos[yystate], yyvsp);
- yyvsp--;
- yystate = *--yyssp;
-
- YY_STACK_PRINT (yyss, yyssp);
- }
-
- if (yyn == YYFINAL)
- YYACCEPT;
-
- YYDPRINTF ((stderr, "Shifting error token, "));
-
- *++yyvsp = yylval;
-
-
- yystate = yyn;
- goto yynewstate;
-
-
-/*-------------------------------------.
-| yyacceptlab -- YYACCEPT comes here. |
-`-------------------------------------*/
-yyacceptlab:
- yyresult = 0;
- goto yyreturn;
-
-/*-----------------------------------.
-| yyabortlab -- YYABORT comes here. |
-`-----------------------------------*/
-yyabortlab:
- yyresult = 1;
- goto yyreturn;
-
-#ifndef yyoverflow
-/*----------------------------------------------.
-| yyoverflowlab -- parser overflow comes here. |
-`----------------------------------------------*/
-yyoverflowlab:
- yyerror ("parser stack overflow");
- yyresult = 2;
- /* Fall through. */
-#endif
-
-yyreturn:
-#ifndef yyoverflow
- if (yyss != yyssa)
- YYSTACK_FREE (yyss);
-#endif
- return yyresult;
-}
-
-
-
diff --git a/jam-files/engine/jamgram.h b/jam-files/engine/jamgram.h
deleted file mode 100644
index 3cb76564..00000000
--- a/jam-files/engine/jamgram.h
+++ /dev/null
@@ -1,140 +0,0 @@
-/* A Bison parser, made by GNU Bison 1.875. */
-
-/* Skeleton parser for Yacc-like parsing with Bison,
- Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc.
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2, or (at your option)
- any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 59 Temple Place - Suite 330,
- Boston, MA 02111-1307, USA. */
-
-/* As a special exception, when this file is copied by Bison into a
- Bison output file, you may use that output file without restriction.
- This special exception was added by the Free Software Foundation
- in version 1.24 of Bison. */
-
-/* Tokens. */
-#ifndef YYTOKENTYPE
-# define YYTOKENTYPE
- /* Put the tokens into the symbol table, so that GDB and other debuggers
- know about them. */
- enum yytokentype {
- _BANG_t = 258,
- _BANG_EQUALS_t = 259,
- _AMPER_t = 260,
- _AMPERAMPER_t = 261,
- _LPAREN_t = 262,
- _RPAREN_t = 263,
- _PLUS_EQUALS_t = 264,
- _COLON_t = 265,
- _SEMIC_t = 266,
- _LANGLE_t = 267,
- _LANGLE_EQUALS_t = 268,
- _EQUALS_t = 269,
- _RANGLE_t = 270,
- _RANGLE_EQUALS_t = 271,
- _QUESTION_EQUALS_t = 272,
- _LBRACKET_t = 273,
- _RBRACKET_t = 274,
- ACTIONS_t = 275,
- BIND_t = 276,
- CASE_t = 277,
- CLASS_t = 278,
- DEFAULT_t = 279,
- ELSE_t = 280,
- EXISTING_t = 281,
- FOR_t = 282,
- IF_t = 283,
- IGNORE_t = 284,
- IN_t = 285,
- INCLUDE_t = 286,
- LOCAL_t = 287,
- MODULE_t = 288,
- ON_t = 289,
- PIECEMEAL_t = 290,
- QUIETLY_t = 291,
- RETURN_t = 292,
- RULE_t = 293,
- SWITCH_t = 294,
- TOGETHER_t = 295,
- UPDATED_t = 296,
- WHILE_t = 297,
- _LBRACE_t = 298,
- _BAR_t = 299,
- _BARBAR_t = 300,
- _RBRACE_t = 301,
- ARG = 302,
- STRING = 303
- };
-#endif
-#define _BANG_t 258
-#define _BANG_EQUALS_t 259
-#define _AMPER_t 260
-#define _AMPERAMPER_t 261
-#define _LPAREN_t 262
-#define _RPAREN_t 263
-#define _PLUS_EQUALS_t 264
-#define _COLON_t 265
-#define _SEMIC_t 266
-#define _LANGLE_t 267
-#define _LANGLE_EQUALS_t 268
-#define _EQUALS_t 269
-#define _RANGLE_t 270
-#define _RANGLE_EQUALS_t 271
-#define _QUESTION_EQUALS_t 272
-#define _LBRACKET_t 273
-#define _RBRACKET_t 274
-#define ACTIONS_t 275
-#define BIND_t 276
-#define CASE_t 277
-#define CLASS_t 278
-#define DEFAULT_t 279
-#define ELSE_t 280
-#define EXISTING_t 281
-#define FOR_t 282
-#define IF_t 283
-#define IGNORE_t 284
-#define IN_t 285
-#define INCLUDE_t 286
-#define LOCAL_t 287
-#define MODULE_t 288
-#define ON_t 289
-#define PIECEMEAL_t 290
-#define QUIETLY_t 291
-#define RETURN_t 292
-#define RULE_t 293
-#define SWITCH_t 294
-#define TOGETHER_t 295
-#define UPDATED_t 296
-#define WHILE_t 297
-#define _LBRACE_t 298
-#define _BAR_t 299
-#define _BARBAR_t 300
-#define _RBRACE_t 301
-#define ARG 302
-#define STRING 303
-
-
-
-
-#if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED)
-typedef int YYSTYPE;
-# define yystype YYSTYPE /* obsolescent; will be withdrawn */
-# define YYSTYPE_IS_DECLARED 1
-# define YYSTYPE_IS_TRIVIAL 1
-#endif
-
-extern YYSTYPE yylval;
-
-
-
diff --git a/jam-files/engine/jamgram.y b/jam-files/engine/jamgram.y
deleted file mode 100644
index c26b1e1b..00000000
--- a/jam-files/engine/jamgram.y
+++ /dev/null
@@ -1,371 +0,0 @@
-%token _BANG_t
-%token _BANG_EQUALS_t
-%token _AMPER_t
-%token _AMPERAMPER_t
-%token _LPAREN_t
-%token _RPAREN_t
-%token _PLUS_EQUALS_t
-%token _COLON_t
-%token _SEMIC_t
-%token _LANGLE_t
-%token _LANGLE_EQUALS_t
-%token _EQUALS_t
-%token _RANGLE_t
-%token _RANGLE_EQUALS_t
-%token _QUESTION_EQUALS_t
-%token _LBRACKET_t
-%token _RBRACKET_t
-%token ACTIONS_t
-%token BIND_t
-%token CASE_t
-%token CLASS_t
-%token DEFAULT_t
-%token ELSE_t
-%token EXISTING_t
-%token FOR_t
-%token IF_t
-%token IGNORE_t
-%token IN_t
-%token INCLUDE_t
-%token LOCAL_t
-%token MODULE_t
-%token ON_t
-%token PIECEMEAL_t
-%token QUIETLY_t
-%token RETURN_t
-%token RULE_t
-%token SWITCH_t
-%token TOGETHER_t
-%token UPDATED_t
-%token WHILE_t
-%token _LBRACE_t
-%token _BAR_t
-%token _BARBAR_t
-%token _RBRACE_t
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * jamgram.yy - jam grammar
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- * 08/31/94 (seiwald) - Allow ?= as alias for "default =".
- * 09/15/94 (seiwald) - if conditionals take only single arguments, so
- * that 'if foo == bar' gives syntax error (use =).
- * 02/11/95 (seiwald) - when scanning arguments to rules, only treat
- * punctuation keywords as keywords. All arg lists
- * are terminated with punctuation keywords.
- *
- * 09/11/00 (seiwald) - Support for function calls:
- *
- * Rules now return lists (LIST *), rather than void.
- *
- * New "[ rule ]" syntax evals rule into a LIST.
- *
- * Lists are now generated by compile_list() and
- * compile_append(), and any other rule that indirectly
- * makes a list, rather than being built directly here,
- * so that lists values can contain rule evaluations.
- *
- * New 'return' rule sets the return value, though
- * other statements also may have return values.
- *
- * 'run' production split from 'block' production so
- * that empty blocks can be handled separately.
- */
-
-%token ARG STRING
-
-%left _BARBAR_t _BAR_t
-%left _AMPERAMPER_t _AMPER_t
-%left _EQUALS_t _BANG_EQUALS_t IN_t
-%left _LANGLE_t _LANGLE_EQUALS_t _RANGLE_t _RANGLE_EQUALS_t
-%left _BANG_t
-
-%{
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "compile.h"
-#include "newstr.h"
-#include "rules.h"
-
-# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
-
-# define F0 (LIST *(*)(PARSE *, FRAME *))0
-# define P0 (PARSE *)0
-# define S0 (char *)0
-
-# define pappend( l,r ) parse_make( compile_append,l,r,P0,S0,S0,0 )
-# define peval( c,l,r ) parse_make( compile_eval,l,r,P0,S0,S0,c )
-# define pfor( s,l,r,x ) parse_make( compile_foreach,l,r,P0,s,S0,x )
-# define pif( l,r,t ) parse_make( compile_if,l,r,t,S0,S0,0 )
-# define pincl( l ) parse_make( compile_include,l,P0,P0,S0,S0,0 )
-# define plist( s ) parse_make( compile_list,P0,P0,P0,s,S0,0 )
-# define plocal( l,r,t ) parse_make( compile_local,l,r,t,S0,S0,0 )
-# define pmodule( l,r ) parse_make( compile_module,l,r,P0,S0,S0,0 )
-# define pclass( l,r ) parse_make( compile_class,l,r,P0,S0,S0,0 )
-# define pnull() parse_make( compile_null,P0,P0,P0,S0,S0,0 )
-# define pon( l,r ) parse_make( compile_on,l,r,P0,S0,S0,0 )
-# define prule( s,p ) parse_make( compile_rule,p,P0,P0,s,S0,0 )
-# define prules( l,r ) parse_make( compile_rules,l,r,P0,S0,S0,0 )
-# define pset( l,r,a ) parse_make( compile_set,l,r,P0,S0,S0,a )
-# define pset1( l,r,t,a ) parse_make( compile_settings,l,r,t,S0,S0,a )
-# define psetc( s,p,a,l ) parse_make( compile_setcomp,p,a,P0,s,S0,l )
-# define psete( s,l,s1,f ) parse_make( compile_setexec,l,P0,P0,s,s1,f )
-# define pswitch( l,r ) parse_make( compile_switch,l,r,P0,S0,S0,0 )
-# define pwhile( l,r ) parse_make( compile_while,l,r,P0,S0,S0,0 )
-
-# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
-# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
-
-%}
-
-%%
-
-run : /* empty */
- /* do nothing */
- | rules
- { parse_save( $1.parse ); }
- ;
-
-/*
- * block - zero or more rules
- * rules - one or more rules
- * rule - any one of jam's rules
- * right-recursive so rules execute in order.
- */
-
-block : null
- { $$.parse = $1.parse; }
- | rules
- { $$.parse = $1.parse; }
- ;
-
-rules : rule
- { $$.parse = $1.parse; }
- | rule rules
- { $$.parse = prules( $1.parse, $2.parse ); }
- | LOCAL_t list assign_list_opt _SEMIC_t block
- { $$.parse = plocal( $2.parse, $3.parse, $5.parse ); }
- ;
-
-null : /* empty */
- { $$.parse = pnull(); }
- ;
-
-assign_list_opt : _EQUALS_t list
- { $$.parse = $2.parse; $$.number = ASSIGN_SET; }
- | null
- { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; }
- ;
-
-arglist_opt : _LPAREN_t lol _RPAREN_t
- { $$.parse = $2.parse; }
- |
- { $$.parse = P0; }
- ;
-
-local_opt : LOCAL_t
- { $$.number = 1; }
- | /* empty */
- { $$.number = 0; }
- ;
-
-rule : _LBRACE_t block _RBRACE_t
- { $$.parse = $2.parse; }
- | INCLUDE_t list _SEMIC_t
- { $$.parse = pincl( $2.parse ); }
- | ARG lol _SEMIC_t
- { $$.parse = prule( $1.string, $2.parse ); }
- | arg assign list _SEMIC_t
- { $$.parse = pset( $1.parse, $3.parse, $2.number ); }
- | arg ON_t list assign list _SEMIC_t
- { $$.parse = pset1( $1.parse, $3.parse, $5.parse, $4.number ); }
- | RETURN_t list _SEMIC_t
- { $$.parse = $2.parse; }
- | FOR_t local_opt ARG IN_t list _LBRACE_t block _RBRACE_t
- { $$.parse = pfor( $3.string, $5.parse, $7.parse, $2.number ); }
- | SWITCH_t list _LBRACE_t cases _RBRACE_t
- { $$.parse = pswitch( $2.parse, $4.parse ); }
- | IF_t expr _LBRACE_t block _RBRACE_t
- { $$.parse = pif( $2.parse, $4.parse, pnull() ); }
- | MODULE_t list _LBRACE_t block _RBRACE_t
- { $$.parse = pmodule( $2.parse, $4.parse ); }
- | CLASS_t lol _LBRACE_t block _RBRACE_t
- { $$.parse = pclass( $2.parse, $4.parse ); }
- | WHILE_t expr _LBRACE_t block _RBRACE_t
- { $$.parse = pwhile( $2.parse, $4.parse ); }
- | IF_t expr _LBRACE_t block _RBRACE_t ELSE_t rule
- { $$.parse = pif( $2.parse, $4.parse, $7.parse ); }
- | local_opt RULE_t ARG arglist_opt rule
- { $$.parse = psetc( $3.string, $5.parse, $4.parse, $1.number ); }
- | ON_t arg rule
- { $$.parse = pon( $2.parse, $3.parse ); }
- | ACTIONS_t eflags ARG bindlist _LBRACE_t
- { yymode( SCAN_STRING ); }
- STRING
- { yymode( SCAN_NORMAL ); }
- _RBRACE_t
- { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); }
- ;
-
-/*
- * assign - = or +=
- */
-
-assign : _EQUALS_t
- { $$.number = ASSIGN_SET; }
- | _PLUS_EQUALS_t
- { $$.number = ASSIGN_APPEND; }
- | _QUESTION_EQUALS_t
- { $$.number = ASSIGN_DEFAULT; }
- | DEFAULT_t _EQUALS_t
- { $$.number = ASSIGN_DEFAULT; }
- ;
-
-/*
- * expr - an expression for if
- */
-expr : arg
- { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); }
- | expr _EQUALS_t expr
- { $$.parse = peval( EXPR_EQUALS, $1.parse, $3.parse ); }
- | expr _BANG_EQUALS_t expr
- { $$.parse = peval( EXPR_NOTEQ, $1.parse, $3.parse ); }
- | expr _LANGLE_t expr
- { $$.parse = peval( EXPR_LESS, $1.parse, $3.parse ); }
- | expr _LANGLE_EQUALS_t expr
- { $$.parse = peval( EXPR_LESSEQ, $1.parse, $3.parse ); }
- | expr _RANGLE_t expr
- { $$.parse = peval( EXPR_MORE, $1.parse, $3.parse ); }
- | expr _RANGLE_EQUALS_t expr
- { $$.parse = peval( EXPR_MOREEQ, $1.parse, $3.parse ); }
- | expr _AMPER_t expr
- { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
- | expr _AMPERAMPER_t expr
- { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
- | expr _BAR_t expr
- { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
- | expr _BARBAR_t expr
- { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
- | arg IN_t list
- { $$.parse = peval( EXPR_IN, $1.parse, $3.parse ); }
- | _BANG_t expr
- { $$.parse = peval( EXPR_NOT, $2.parse, pnull() ); }
- | _LPAREN_t expr _RPAREN_t
- { $$.parse = $2.parse; }
- ;
-
-
-/*
- * cases - action elements inside a 'switch'
- * case - a single action element inside a 'switch'
- * right-recursive rule so cases can be examined in order.
- */
-
-cases : /* empty */
- { $$.parse = P0; }
- | case cases
- { $$.parse = pnode( $1.parse, $2.parse ); }
- ;
-
-case : CASE_t ARG _COLON_t block
- { $$.parse = psnode( $2.string, $4.parse ); }
- ;
-
-/*
- * lol - list of lists
- * right-recursive rule so that lists can be added in order.
- */
-
-lol : list
- { $$.parse = pnode( P0, $1.parse ); }
- | list _COLON_t lol
- { $$.parse = pnode( $3.parse, $1.parse ); }
- ;
-
-/*
- * list - zero or more args in a LIST
- * listp - list (in puncutation only mode)
- * arg - one ARG or function call
- */
-
-list : listp
- { $$.parse = $1.parse; yymode( SCAN_NORMAL ); }
- ;
-
-listp : /* empty */
- { $$.parse = pnull(); yymode( SCAN_PUNCT ); }
- | listp arg
- { $$.parse = pappend( $1.parse, $2.parse ); }
- ;
-
-arg : ARG
- { $$.parse = plist( $1.string ); }
- | _LBRACKET_t { yymode( SCAN_NORMAL ); } func _RBRACKET_t
- { $$.parse = $3.parse; }
- ;
-
-/*
- * func - a function call (inside [])
- * This needs to be split cleanly out of 'rule'
- */
-
-func : arg lol
- { $$.parse = prule( $1.string, $2.parse ); }
- | ON_t arg arg lol
- { $$.parse = pon( $2.parse, prule( $3.string, $4.parse ) ); }
- | ON_t arg RETURN_t list
- { $$.parse = pon( $2.parse, $4.parse ); }
- ;
-
-
-/*
- * eflags - zero or more modifiers to 'executes'
- * eflag - a single modifier to 'executes'
- */
-
-eflags : /* empty */
- { $$.number = 0; }
- | eflags eflag
- { $$.number = $1.number | $2.number; }
- ;
-
-eflag : UPDATED_t
- { $$.number = EXEC_UPDATED; }
- | TOGETHER_t
- { $$.number = EXEC_TOGETHER; }
- | IGNORE_t
- { $$.number = EXEC_IGNORE; }
- | QUIETLY_t
- { $$.number = EXEC_QUIETLY; }
- | PIECEMEAL_t
- { $$.number = EXEC_PIECEMEAL; }
- | EXISTING_t
- { $$.number = EXEC_EXISTING; }
- ;
-
-
-/*
- * bindlist - list of variable to bind for an action
- */
-
-bindlist : /* empty */
- { $$.parse = pnull(); }
- | BIND_t list
- { $$.parse = $2.parse; }
- ;
diff --git a/jam-files/engine/jamgram.yy b/jam-files/engine/jamgram.yy
deleted file mode 100644
index 15243487..00000000
--- a/jam-files/engine/jamgram.yy
+++ /dev/null
@@ -1,329 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * jamgram.yy - jam grammar
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- * 08/31/94 (seiwald) - Allow ?= as alias for "default =".
- * 09/15/94 (seiwald) - if conditionals take only single arguments, so
- * that 'if foo == bar' gives syntax error (use =).
- * 02/11/95 (seiwald) - when scanning arguments to rules, only treat
- * punctuation keywords as keywords. All arg lists
- * are terminated with punctuation keywords.
- *
- * 09/11/00 (seiwald) - Support for function calls:
- *
- * Rules now return lists (LIST *), rather than void.
- *
- * New "[ rule ]" syntax evals rule into a LIST.
- *
- * Lists are now generated by compile_list() and
- * compile_append(), and any other rule that indirectly
- * makes a list, rather than being built directly here,
- * so that lists values can contain rule evaluations.
- *
- * New 'return' rule sets the return value, though
- * other statements also may have return values.
- *
- * 'run' production split from 'block' production so
- * that empty blocks can be handled separately.
- */
-
-%token ARG STRING
-
-%left `||` `|`
-%left `&&` `&`
-%left `=` `!=` `in`
-%left `<` `<=` `>` `>=`
-%left `!`
-
-%{
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "compile.h"
-#include "newstr.h"
-#include "rules.h"
-
-# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
-
-# define F0 (LIST *(*)(PARSE *, FRAME *))0
-# define P0 (PARSE *)0
-# define S0 (char *)0
-
-# define pappend( l,r ) parse_make( compile_append,l,r,P0,S0,S0,0 )
-# define peval( c,l,r ) parse_make( compile_eval,l,r,P0,S0,S0,c )
-# define pfor( s,l,r,x ) parse_make( compile_foreach,l,r,P0,s,S0,x )
-# define pif( l,r,t ) parse_make( compile_if,l,r,t,S0,S0,0 )
-# define pincl( l ) parse_make( compile_include,l,P0,P0,S0,S0,0 )
-# define plist( s ) parse_make( compile_list,P0,P0,P0,s,S0,0 )
-# define plocal( l,r,t ) parse_make( compile_local,l,r,t,S0,S0,0 )
-# define pmodule( l,r ) parse_make( compile_module,l,r,P0,S0,S0,0 )
-# define pclass( l,r ) parse_make( compile_class,l,r,P0,S0,S0,0 )
-# define pnull() parse_make( compile_null,P0,P0,P0,S0,S0,0 )
-# define pon( l,r ) parse_make( compile_on,l,r,P0,S0,S0,0 )
-# define prule( s,p ) parse_make( compile_rule,p,P0,P0,s,S0,0 )
-# define prules( l,r ) parse_make( compile_rules,l,r,P0,S0,S0,0 )
-# define pset( l,r,a ) parse_make( compile_set,l,r,P0,S0,S0,a )
-# define pset1( l,r,t,a ) parse_make( compile_settings,l,r,t,S0,S0,a )
-# define psetc( s,p,a,l ) parse_make( compile_setcomp,p,a,P0,s,S0,l )
-# define psete( s,l,s1,f ) parse_make( compile_setexec,l,P0,P0,s,s1,f )
-# define pswitch( l,r ) parse_make( compile_switch,l,r,P0,S0,S0,0 )
-# define pwhile( l,r ) parse_make( compile_while,l,r,P0,S0,S0,0 )
-
-# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
-# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
-
-%}
-
-%%
-
-run : /* empty */
- /* do nothing */
- | rules
- { parse_save( $1.parse ); }
- ;
-
-/*
- * block - zero or more rules
- * rules - one or more rules
- * rule - any one of jam's rules
- * right-recursive so rules execute in order.
- */
-
-block : null
- { $$.parse = $1.parse; }
- | rules
- { $$.parse = $1.parse; }
- ;
-
-rules : rule
- { $$.parse = $1.parse; }
- | rule rules
- { $$.parse = prules( $1.parse, $2.parse ); }
- | `local` list assign_list_opt `;` block
- { $$.parse = plocal( $2.parse, $3.parse, $5.parse ); }
- ;
-
-null : /* empty */
- { $$.parse = pnull(); }
- ;
-
-assign_list_opt : `=` list
- { $$.parse = $2.parse; $$.number = ASSIGN_SET; }
- | null
- { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; }
- ;
-
-arglist_opt : `(` lol `)`
- { $$.parse = $2.parse; }
- |
- { $$.parse = P0; }
- ;
-
-local_opt : `local`
- { $$.number = 1; }
- | /* empty */
- { $$.number = 0; }
- ;
-
-rule : `{` block `}`
- { $$.parse = $2.parse; }
- | `include` list `;`
- { $$.parse = pincl( $2.parse ); }
- | ARG lol `;`
- { $$.parse = prule( $1.string, $2.parse ); }
- | arg assign list `;`
- { $$.parse = pset( $1.parse, $3.parse, $2.number ); }
- | arg `on` list assign list `;`
- { $$.parse = pset1( $1.parse, $3.parse, $5.parse, $4.number ); }
- | `return` list `;`
- { $$.parse = $2.parse; }
- | `for` local_opt ARG `in` list `{` block `}`
- { $$.parse = pfor( $3.string, $5.parse, $7.parse, $2.number ); }
- | `switch` list `{` cases `}`
- { $$.parse = pswitch( $2.parse, $4.parse ); }
- | `if` expr `{` block `}`
- { $$.parse = pif( $2.parse, $4.parse, pnull() ); }
- | `module` list `{` block `}`
- { $$.parse = pmodule( $2.parse, $4.parse ); }
- | `class` lol `{` block `}`
- { $$.parse = pclass( $2.parse, $4.parse ); }
- | `while` expr `{` block `}`
- { $$.parse = pwhile( $2.parse, $4.parse ); }
- | `if` expr `{` block `}` `else` rule
- { $$.parse = pif( $2.parse, $4.parse, $7.parse ); }
- | local_opt `rule` ARG arglist_opt rule
- { $$.parse = psetc( $3.string, $5.parse, $4.parse, $1.number ); }
- | `on` arg rule
- { $$.parse = pon( $2.parse, $3.parse ); }
- | `actions` eflags ARG bindlist `{`
- { yymode( SCAN_STRING ); }
- STRING
- { yymode( SCAN_NORMAL ); }
- `}`
- { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); }
- ;
-
-/*
- * assign - = or +=
- */
-
-assign : `=`
- { $$.number = ASSIGN_SET; }
- | `+=`
- { $$.number = ASSIGN_APPEND; }
- | `?=`
- { $$.number = ASSIGN_DEFAULT; }
- | `default` `=`
- { $$.number = ASSIGN_DEFAULT; }
- ;
-
-/*
- * expr - an expression for if
- */
-expr : arg
- { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); }
- | expr `=` expr
- { $$.parse = peval( EXPR_EQUALS, $1.parse, $3.parse ); }
- | expr `!=` expr
- { $$.parse = peval( EXPR_NOTEQ, $1.parse, $3.parse ); }
- | expr `<` expr
- { $$.parse = peval( EXPR_LESS, $1.parse, $3.parse ); }
- | expr `<=` expr
- { $$.parse = peval( EXPR_LESSEQ, $1.parse, $3.parse ); }
- | expr `>` expr
- { $$.parse = peval( EXPR_MORE, $1.parse, $3.parse ); }
- | expr `>=` expr
- { $$.parse = peval( EXPR_MOREEQ, $1.parse, $3.parse ); }
- | expr `&` expr
- { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
- | expr `&&` expr
- { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
- | expr `|` expr
- { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
- | expr `||` expr
- { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
- | arg `in` list
- { $$.parse = peval( EXPR_IN, $1.parse, $3.parse ); }
- | `!` expr
- { $$.parse = peval( EXPR_NOT, $2.parse, pnull() ); }
- | `(` expr `)`
- { $$.parse = $2.parse; }
- ;
-
-
-/*
- * cases - action elements inside a 'switch'
- * case - a single action element inside a 'switch'
- * right-recursive rule so cases can be examined in order.
- */
-
-cases : /* empty */
- { $$.parse = P0; }
- | case cases
- { $$.parse = pnode( $1.parse, $2.parse ); }
- ;
-
-case : `case` ARG `:` block
- { $$.parse = psnode( $2.string, $4.parse ); }
- ;
-
-/*
- * lol - list of lists
- * right-recursive rule so that lists can be added in order.
- */
-
-lol : list
- { $$.parse = pnode( P0, $1.parse ); }
- | list `:` lol
- { $$.parse = pnode( $3.parse, $1.parse ); }
- ;
-
-/*
- * list - zero or more args in a LIST
- * listp - list (in puncutation only mode)
- * arg - one ARG or function call
- */
-
-list : listp
- { $$.parse = $1.parse; yymode( SCAN_NORMAL ); }
- ;
-
-listp : /* empty */
- { $$.parse = pnull(); yymode( SCAN_PUNCT ); }
- | listp arg
- { $$.parse = pappend( $1.parse, $2.parse ); }
- ;
-
-arg : ARG
- { $$.parse = plist( $1.string ); }
- | `[` { yymode( SCAN_NORMAL ); } func `]`
- { $$.parse = $3.parse; }
- ;
-
-/*
- * func - a function call (inside [])
- * This needs to be split cleanly out of 'rule'
- */
-
-func : arg lol
- { $$.parse = prule( $1.string, $2.parse ); }
- | `on` arg arg lol
- { $$.parse = pon( $2.parse, prule( $3.string, $4.parse ) ); }
- | `on` arg `return` list
- { $$.parse = pon( $2.parse, $4.parse ); }
- ;
-
-
-/*
- * eflags - zero or more modifiers to 'executes'
- * eflag - a single modifier to 'executes'
- */
-
-eflags : /* empty */
- { $$.number = 0; }
- | eflags eflag
- { $$.number = $1.number | $2.number; }
- ;
-
-eflag : `updated`
- { $$.number = EXEC_UPDATED; }
- | `together`
- { $$.number = EXEC_TOGETHER; }
- | `ignore`
- { $$.number = EXEC_IGNORE; }
- | `quietly`
- { $$.number = EXEC_QUIETLY; }
- | `piecemeal`
- { $$.number = EXEC_PIECEMEAL; }
- | `existing`
- { $$.number = EXEC_EXISTING; }
- ;
-
-
-/*
- * bindlist - list of variable to bind for an action
- */
-
-bindlist : /* empty */
- { $$.parse = pnull(); }
- | `bind` list
- { $$.parse = $2.parse; }
- ;
-
-
diff --git a/jam-files/engine/jamgramtab.h b/jam-files/engine/jamgramtab.h
deleted file mode 100644
index a0fd43f6..00000000
--- a/jam-files/engine/jamgramtab.h
+++ /dev/null
@@ -1,44 +0,0 @@
- { "!", _BANG_t },
- { "!=", _BANG_EQUALS_t },
- { "&", _AMPER_t },
- { "&&", _AMPERAMPER_t },
- { "(", _LPAREN_t },
- { ")", _RPAREN_t },
- { "+=", _PLUS_EQUALS_t },
- { ":", _COLON_t },
- { ";", _SEMIC_t },
- { "<", _LANGLE_t },
- { "<=", _LANGLE_EQUALS_t },
- { "=", _EQUALS_t },
- { ">", _RANGLE_t },
- { ">=", _RANGLE_EQUALS_t },
- { "?=", _QUESTION_EQUALS_t },
- { "[", _LBRACKET_t },
- { "]", _RBRACKET_t },
- { "actions", ACTIONS_t },
- { "bind", BIND_t },
- { "case", CASE_t },
- { "class", CLASS_t },
- { "default", DEFAULT_t },
- { "else", ELSE_t },
- { "existing", EXISTING_t },
- { "for", FOR_t },
- { "if", IF_t },
- { "ignore", IGNORE_t },
- { "in", IN_t },
- { "include", INCLUDE_t },
- { "local", LOCAL_t },
- { "module", MODULE_t },
- { "on", ON_t },
- { "piecemeal", PIECEMEAL_t },
- { "quietly", QUIETLY_t },
- { "return", RETURN_t },
- { "rule", RULE_t },
- { "switch", SWITCH_t },
- { "together", TOGETHER_t },
- { "updated", UPDATED_t },
- { "while", WHILE_t },
- { "{", _LBRACE_t },
- { "|", _BAR_t },
- { "||", _BARBAR_t },
- { "}", _RBRACE_t },
diff --git a/jam-files/engine/lists.c b/jam-files/engine/lists.c
deleted file mode 100644
index ebabb63e..00000000
--- a/jam-files/engine/lists.c
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "newstr.h"
-# include "lists.h"
-
-/*
- * lists.c - maintain lists of strings
- *
- * This implementation essentially uses a singly linked list, but
- * guarantees that the head element of every list has a valid pointer
- * to the tail of the list, so the new elements can efficiently and
- * properly be appended to the end of a list.
- *
- * To avoid massive allocation, list_free() just tacks the whole freed
- * chain onto freelist and list_new() looks on freelist first for an
- * available list struct. list_free() does not free the strings in the
- * chain: it lazily lets list_new() do so.
- *
- * 08/23/94 (seiwald) - new list_append()
- * 09/07/00 (seiwald) - documented lol_*() functions
- */
-
-static LIST *freelist = 0; /* junkpile for list_free() */
-
-/*
- * list_append() - append a list onto another one, returning total
- */
-
-LIST * list_append( LIST * l, LIST * nl )
-{
- if ( !nl )
- {
- /* Just return l */
- }
- else if ( !l )
- {
- l = nl;
- }
- else
- {
- /* Graft two non-empty lists. */
- l->tail->next = nl;
- l->tail = nl->tail;
- }
-
- return l;
-}
-
-/*
- * list_new() - tack a string onto the end of a list of strings
- */
-
-LIST * list_new( LIST * head, char * string )
-{
- LIST * l;
-
- if ( DEBUG_LISTS )
- printf( "list > %s <\n", string );
-
- /* Get list struct from freelist, if one available. */
- /* Otherwise allocate. */
- /* If from freelist, must free string first */
-
- if ( freelist )
- {
- l = freelist;
- freestr( l->string );
- freelist = freelist->next;
- }
- else
- {
- l = (LIST *)BJAM_MALLOC( sizeof( LIST ) );
- }
-
- /* If first on chain, head points here. */
- /* If adding to chain, tack us on. */
- /* Tail must point to this new, last element. */
-
- if ( !head ) head = l;
- else head->tail->next = l;
- head->tail = l;
- l->next = 0;
-
- l->string = string;
-
- return head;
-}
-
-
-/*
- * list_copy() - copy a whole list of strings (nl) onto end of another (l).
- */
-
-LIST * list_copy( LIST * l, LIST * nl )
-{
- for ( ; nl; nl = list_next( nl ) )
- l = list_new( l, copystr( nl->string ) );
- return l;
-}
-
-
-/*
- * list_sublist() - copy a subset of a list of strings.
- */
-
-LIST * list_sublist( LIST * l, int start, int count )
-{
- LIST * nl = 0;
- for ( ; l && start--; l = list_next( l ) );
- for ( ; l && count--; l = list_next( l ) )
- nl = list_new( nl, copystr( l->string ) );
- return nl;
-}
-
-
-static int str_ptr_compare( void const * va, void const * vb )
-{
- char * a = *( (char * *)va );
- char * b = *( (char * *)vb );
- return strcmp(a, b);
-}
-
-
-LIST * list_sort( LIST * l )
-{
- int len;
- int ii;
- char * * strings;
- LIST * listp;
- LIST * result = 0;
-
- if ( !l )
- return L0;
-
- len = list_length( l );
- strings = (char * *)BJAM_MALLOC( len * sizeof(char*) );
-
- listp = l;
- for ( ii = 0; ii < len; ++ii )
- {
- strings[ ii ] = listp->string;
- listp = listp->next;
- }
-
- qsort( strings, len, sizeof( char * ), str_ptr_compare );
-
- for ( ii = 0; ii < len; ++ii )
- result = list_append( result, list_new( 0, strings[ ii ] ) );
-
- BJAM_FREE( strings );
-
- return result;
-}
-
-
-/*
- * list_free() - free a list of strings
- */
-
-void list_free( LIST * head )
-{
- /* Just tack onto freelist. */
- if ( head )
- {
- head->tail->next = freelist;
- freelist = head;
- }
-}
-
-
-/*
- * list_pop_front() - remove the front element from a list of strings
- */
-
-LIST * list_pop_front( LIST * l )
-{
- LIST * result = l->next;
- if ( result )
- {
- result->tail = l->tail;
- l->next = L0;
- l->tail = l;
- }
- list_free( l );
- return result;
-}
-
-
-/*
- * list_print() - print a list of strings to stdout
- */
-
-void list_print( LIST * l )
-{
- LIST * p = 0;
- for ( ; l; p = l, l = list_next( l ) )
- if ( p )
- printf( "%s ", p->string );
- if ( p )
- printf( "%s", p->string );
-}
-
-
-/*
- * list_length() - return the number of items in the list
- */
-
-int list_length( LIST * l )
-{
- int n = 0;
- for ( ; l; l = list_next( l ), ++n );
- return n;
-}
-
-
-int list_in( LIST * l, char * value )
-{
- for ( ; l; l = l->next )
- if ( strcmp( l->string, value ) == 0 )
- return 1;
- return 0;
-}
-
-
-LIST * list_unique( LIST * sorted_list )
-{
- LIST * result = 0;
- LIST * last_added = 0;
-
- for ( ; sorted_list; sorted_list = sorted_list->next )
- {
- if ( !last_added || strcmp( sorted_list->string, last_added->string ) != 0 )
- {
- result = list_new( result, sorted_list->string );
- last_added = sorted_list;
- }
- }
- return result;
-}
-
-
-/*
- * lol_init() - initialize a LOL (list of lists).
- */
-
-void lol_init( LOL * lol )
-{
- lol->count = 0;
-}
-
-
-/*
- * lol_add() - append a LIST onto an LOL.
- */
-
-void lol_add( LOL * lol, LIST * l )
-{
- if ( lol->count < LOL_MAX )
- lol->list[ lol->count++ ] = l;
-}
-
-
-/*
- * lol_free() - free the LOL and its LISTs.
- */
-
-void lol_free( LOL * lol )
-{
- int i;
- for ( i = 0; i < lol->count; ++i )
- list_free( lol->list[ i ] );
- lol->count = 0;
-}
-
-
-/*
- * lol_get() - return one of the LISTs in the LOL.
- */
-
-LIST * lol_get( LOL * lol, int i )
-{
- return i < lol->count ? lol->list[ i ] : 0;
-}
-
-
-/*
- * lol_print() - debug print LISTS separated by ":".
- */
-
-void lol_print( LOL * lol )
-{
- int i;
-
- for ( i = 0; i < lol->count; ++i )
- {
- if ( i )
- printf( " : " );
- list_print( lol->list[ i ] );
- }
-}
-
-#ifdef HAVE_PYTHON
-
-PyObject *list_to_python(LIST *l)
-{
- PyObject *result = PyList_New(0);
-
- for (; l; l = l->next)
- {
- PyObject* s = PyString_FromString(l->string);
- PyList_Append(result, s);
- Py_DECREF(s);
- }
-
- return result;
-}
-
-LIST *list_from_python(PyObject *l)
-{
- LIST * result = 0;
-
- Py_ssize_t i, n;
- n = PySequence_Size(l);
- for (i = 0; i < n; ++i)
- {
- PyObject *v = PySequence_GetItem(l, i);
- result = list_new (result, newstr (PyString_AsString(v)));
- Py_DECREF(v);
- }
-
- return result;
-}
-
-#endif
diff --git a/jam-files/engine/lists.h b/jam-files/engine/lists.h
deleted file mode 100644
index 1dc59827..00000000
--- a/jam-files/engine/lists.h
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * lists.h - the LIST structure and routines to manipulate them
- *
- * The whole of jam relies on lists of strings as a datatype. This
- * module, in conjunction with newstr.c, handles these relatively
- * efficiently.
- *
- * Structures defined:
- *
- * LIST - list of strings
- * LOL - list of LISTs
- *
- * External routines:
- *
- * list_append() - append a list onto another one, returning total
- * list_new() - tack a string onto the end of a list of strings
- * list_copy() - copy a whole list of strings
- * list_sublist() - copy a subset of a list of strings
- * list_free() - free a list of strings
- * list_print() - print a list of strings to stdout
- * list_length() - return the number of items in the list
- *
- * lol_init() - initialize a LOL (list of lists)
- * lol_add() - append a LIST onto an LOL
- * lol_free() - free the LOL and its LISTs
- * lol_get() - return one of the LISTs in the LOL
- * lol_print() - debug print LISTS separated by ":"
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 08/23/94 (seiwald) - new list_append()
- */
-
-#ifndef LISTS_DWA20011022_H
-# define LISTS_DWA20011022_H
-
-#ifdef HAVE_PYTHON
-#include <Python.h>
-#endif
-
-/*
- * LIST - list of strings
- */
-
-typedef struct _list LIST;
-
-struct _list {
- LIST *next;
- LIST *tail; /* only valid in head node */
- char *string; /* private copy */
-};
-
-/*
- * LOL - list of LISTs
- */
-
-typedef struct _lol LOL;
-
-# define LOL_MAX 19
-
-struct _lol {
- int count;
- LIST *list[ LOL_MAX ];
-};
-
-LIST * list_append( LIST *l, LIST *nl );
-LIST * list_copy( LIST *l, LIST *nl );
-void list_free( LIST *head );
-LIST * list_new( LIST *head, char *string );
-void list_print( LIST *l );
-int list_length( LIST *l );
-LIST * list_sublist( LIST *l, int start, int count );
-LIST * list_pop_front( LIST *l );
-LIST * list_sort( LIST *l);
-LIST * list_unique( LIST *sorted_list);
-int list_in(LIST* l, char* value);
-
-# define list_next( l ) ((l)->next)
-
-# define L0 ((LIST *)0)
-
-void lol_add( LOL *lol, LIST *l );
-void lol_init( LOL *lol );
-void lol_free( LOL *lol );
-LIST * lol_get( LOL *lol, int i );
-void lol_print( LOL *lol );
-void lol_build( LOL* lol, char** elements );
-
-#ifdef HAVE_PYTHON
-
-PyObject *list_to_python(LIST *l);
-LIST *list_from_python(PyObject *l);
-
-#endif
-
-#endif
-
diff --git a/jam-files/engine/make.c b/jam-files/engine/make.c
deleted file mode 100644
index c871f0be..00000000
--- a/jam-files/engine/make.c
+++ /dev/null
@@ -1,814 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * make.c - bring a target up to date, once rules are in place.
- *
- * This modules controls the execution of rules to bring a target and its
- * dependencies up to date. It is invoked after the targets, rules, et. al.
- * described in rules.h are created by the interpreting jam files.
- *
- * This file contains the main make() entry point and the first pass make0().
- * The second pass, make1(), which actually does the command execution, is in
- * make1.c.
- *
- * External routines:
- * make() - make a target, given its name
- *
- * Internal routines:
- * make0() - bind and scan everything to make a TARGET
- * make0sort() - reorder TARGETS chain by their time (newest to oldest)
- *
- * 12/26/93 (seiwald) - allow NOTIME targets to be expanded via $(<), $(>).
- * 01/04/94 (seiwald) - print all targets, bounded, when tracing commands.
- * 04/08/94 (seiwald) - progress report now reflects only targets with actions.
- * 04/11/94 (seiwald) - Combined deps & headers into deps[2] in TARGET.
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 12/20/94 (seiwald) - make0() headers after determining fate of target, so
- * that headers are not seen as being dependent on
- * themselves.
- * 01/19/95 (seiwald) - distinguish between CANTFIND/CANTMAKE targets.
- * 02/02/95 (seiwald) - propagate leaf source time for new LEAVES rule.
- * 02/14/95 (seiwald) - NOUPDATE rule means don't update existing target.
- * 08/22/95 (seiwald) - NOUPDATE targets immune to anyhow (-a) flag.
- * 09/06/00 (seiwald) - NOCARE affects targets with sources/actions.
- * 03/02/01 (seiwald) - reverse NOCARE change.
- * 03/14/02 (seiwald) - TEMPORARY targets no longer take on parents age.
- * 03/16/02 (seiwald) - support for -g (reorder builds by source time).
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "rules.h"
-
-#ifdef OPT_HEADER_CACHE_EXT
- #include "hcache.h"
-#endif
-
-#include "search.h"
-#include "newstr.h"
-#include "make.h"
-#include "headers.h"
-#include "command.h"
-#include <assert.h>
-
-#ifndef max
- #define max( a,b ) ((a)>(b)?(a):(b))
-#endif
-
-static TARGETS * make0sort( TARGETS * c );
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- static void dependGraphOutput( TARGET * t, int depth );
-#endif
-
-static const char * target_fate[] =
-{
- "init", /* T_FATE_INIT */
- "making", /* T_FATE_MAKING */
- "stable", /* T_FATE_STABLE */
- "newer", /* T_FATE_NEWER */
- "temp", /* T_FATE_ISTMP */
- "touched", /* T_FATE_TOUCHED */
- "rebuild", /* T_FATE_REBUILD */
- "missing", /* T_FATE_MISSING */
- "needtmp", /* T_FATE_NEEDTMP */
- "old", /* T_FATE_OUTDATED */
- "update", /* T_FATE_UPDATE */
- "nofind", /* T_FATE_CANTFIND */
- "nomake" /* T_FATE_CANTMAKE */
-};
-
-static const char * target_bind[] =
-{
- "unbound",
- "missing",
- "parents",
- "exists",
-};
-
-# define spaces(x) ( " " + ( x > 20 ? 0 : 20-x ) )
-
-
-/*
- * make() - make a target, given its name.
- */
-
-int make( int n_targets, char const * * targets, int anyhow )
-{
- int i;
- COUNTS counts[ 1 ];
- int status = 0; /* 1 if anything fails */
-
-#ifdef OPT_HEADER_CACHE_EXT
- hcache_init();
-#endif
-
- memset( (char *)counts, 0, sizeof( *counts ) );
-
- /* First bind all targets with LOCATE_TARGET setting. This is needed to
- * correctly handle dependencies to generated headers.
- */
- bind_explicitly_located_targets();
-
- {
- PROFILE_ENTER( MAKE_MAKE0 );
- for ( i = 0; i < n_targets; ++i )
- make0( bindtarget( targets[ i ] ), 0, 0, counts, anyhow );
- PROFILE_EXIT( MAKE_MAKE0 );
- }
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_GRAPH )
- for ( i = 0; i < n_targets; ++i )
- dependGraphOutput( bindtarget( targets[ i ] ), 0 );
-#endif
-
- if ( DEBUG_MAKE )
- {
- if ( counts->targets )
- printf( "...found %d target%s...\n", counts->targets,
- counts->targets > 1 ? "s" : "" );
- if ( counts->temp )
- printf( "...using %d temp target%s...\n", counts->temp,
- counts->temp > 1 ? "s" : "" );
- if ( counts->updating )
- printf( "...updating %d target%s...\n", counts->updating,
- counts->updating > 1 ? "s" : "" );
- if ( counts->cantfind )
- printf( "...can't find %d target%s...\n", counts->cantfind,
- counts->cantfind > 1 ? "s" : "" );
- if ( counts->cantmake )
- printf( "...can't make %d target%s...\n", counts->cantmake,
- counts->cantmake > 1 ? "s" : "" );
- }
-
-#ifdef OPT_HEADER_CACHE_EXT
- hcache_done();
-#endif
-
- status = counts->cantfind || counts->cantmake;
-
- {
- PROFILE_ENTER( MAKE_MAKE1 );
- for ( i = 0; i < n_targets; ++i )
- status |= make1( bindtarget( targets[ i ] ) );
- PROFILE_EXIT( MAKE_MAKE1 );
- }
-
- return status;
-}
-
-
-/* Force any dependants of t that have already at least begun being visited by
- * make0() to be updated.
- */
-
-static void update_dependants( TARGET * t )
-{
- TARGETS * q;
-
- for ( q = t->dependants; q; q = q->next )
- {
- TARGET * p = q->target;
- char fate0 = p->fate;
-
- /* If we have already at least begun visiting it and we are not already
- * rebuilding it for other reasons.
- */
- if ( ( fate0 != T_FATE_INIT ) && ( fate0 < T_FATE_BUILD ) )
- {
- p->fate = T_FATE_UPDATE;
-
- if ( DEBUG_FATE )
- {
- printf( "fate change %s from %s to %s (as dependant of %s)\n",
- p->name, target_fate[ (int) fate0 ], target_fate[ (int) p->fate ], t->name );
- }
-
- /* If we are done visiting it, go back and make sure its dependants
- * get rebuilt.
- */
- if ( fate0 > T_FATE_MAKING )
- update_dependants( p );
- }
- }
-}
-
-
-/*
- * Make sure that all of t's rebuilds get rebuilt.
- */
-
-static void force_rebuilds( TARGET * t )
-{
- TARGETS * d;
- for ( d = t->rebuilds; d; d = d->next )
- {
- TARGET * r = d->target;
-
- /* If it is not already being rebuilt for other reasons. */
- if ( r->fate < T_FATE_BUILD )
- {
- if ( DEBUG_FATE )
- printf( "fate change %s from %s to %s (by rebuild)\n",
- r->name, target_fate[ (int) r->fate ], target_fate[ T_FATE_REBUILD ] );
-
- /* Force rebuild it. */
- r->fate = T_FATE_REBUILD;
-
- /* And make sure its dependants are updated too. */
- update_dependants( r );
- }
- }
-}
-
-
-/*
- * make0() - bind and scan everything to make a TARGET.
- *
- * Recursively binds a target, searches for #included headers, calls itself on
- * those headers and any dependencies.
- */
-
-void make0
-(
- TARGET * t,
- TARGET * p, /* parent */
- int depth, /* for display purposes */
- COUNTS * counts, /* for reporting */
- int anyhow
-) /* forcibly touch all (real) targets */
-{
- TARGETS * c;
- TARGET * ptime = t;
- time_t last;
- time_t leaf;
- time_t hlast;
- int fate;
- char const * flag = "";
- SETTINGS * s;
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- int savedFate, oldTimeStamp;
-#endif
-
- if ( DEBUG_MAKEPROG )
- printf( "make\t--\t%s%s\n", spaces( depth ), t->name );
-
- /*
- * Step 1: initialize
- */
-
- if ( DEBUG_MAKEPROG )
- printf( "make\t--\t%s%s\n", spaces( depth ), t->name );
-
- t->fate = T_FATE_MAKING;
-
- /*
- * Step 2: under the influence of "on target" variables,
- * bind the target and search for headers.
- */
-
- /* Step 2a: set "on target" variables. */
- s = copysettings( t->settings );
- pushsettings( s );
-
- /* Step 2b: find and timestamp the target file (if it is a file). */
- if ( ( t->binding == T_BIND_UNBOUND ) && !( t->flags & T_FLAG_NOTFILE ) )
- {
- char * another_target;
- t->boundname = search( t->name, &t->time, &another_target,
- t->flags & T_FLAG_ISFILE );
- /* If it was detected that this target refers to an already existing and
- * bound one, we add an include dependency, so that every target
- * depending on us will depend on that other target as well.
- */
- if ( another_target )
- target_include( t, bindtarget( another_target ) );
-
- t->binding = t->time ? T_BIND_EXISTS : T_BIND_MISSING;
- }
-
- /* INTERNAL, NOTFILE header nodes have the time of their parents. */
- if ( p && ( t->flags & T_FLAG_INTERNAL ) )
- ptime = p;
-
- /* If temp file does not exist but parent does, use parent. */
- if ( p && ( t->flags & T_FLAG_TEMP ) &&
- ( t->binding == T_BIND_MISSING ) &&
- ( p->binding != T_BIND_MISSING ) )
- {
- t->binding = T_BIND_PARENTS;
- ptime = p;
- }
-
-#ifdef OPT_SEMAPHORE
- {
- LIST * var = var_get( "JAM_SEMAPHORE" );
- if ( var )
- {
- TARGET * semaphore = bindtarget( var->string );
- semaphore->progress = T_MAKE_SEMAPHORE;
- t->semaphore = semaphore;
- }
- }
-#endif
-
- /* Step 2c: If its a file, search for headers. */
- if ( t->binding == T_BIND_EXISTS )
- headers( t );
-
- /* Step 2d: reset "on target" variables. */
- popsettings( s );
- freesettings( s );
-
- /*
- * Pause for a little progress reporting .
- */
-
- if ( DEBUG_BIND )
- {
- if ( strcmp( t->name, t->boundname ) )
- printf( "bind\t--\t%s%s: %s\n",
- spaces( depth ), t->name, t->boundname );
-
- switch ( t->binding )
- {
- case T_BIND_UNBOUND:
- case T_BIND_MISSING:
- case T_BIND_PARENTS:
- printf( "time\t--\t%s%s: %s\n",
- spaces( depth ), t->name, target_bind[ (int) t->binding ] );
- break;
-
- case T_BIND_EXISTS:
- printf( "time\t--\t%s%s: %s",
- spaces( depth ), t->name, ctime( &t->time ) );
- break;
- }
- }
-
- /*
- * Step 3: recursively make0() dependencies & headers.
- */
-
- /* Step 3a: recursively make0() dependencies. */
- for ( c = t->depends; c; c = c->next )
- {
- int internal = t->flags & T_FLAG_INTERNAL;
-
- /* Warn about circular deps, except for includes, which include each
- * other alot.
- */
- if ( c->target->fate == T_FATE_INIT )
- make0( c->target, ptime, depth + 1, counts, anyhow );
- else if ( c->target->fate == T_FATE_MAKING && !internal )
- printf( "warning: %s depends on itself\n", c->target->name );
- }
-
- /* Step 3b: recursively make0() internal includes node. */
- if ( t->includes )
- make0( t->includes, p, depth + 1, counts, anyhow );
-
- /* Step 3c: add dependencies' includes to our direct dependencies. */
- {
- TARGETS * incs = 0;
- for ( c = t->depends; c; c = c->next )
- if ( c->target->includes )
- incs = targetentry( incs, c->target->includes );
- t->depends = targetchain( t->depends, incs );
- }
-
- /*
- * Step 4: compute time & fate
- */
-
- /* Step 4a: pick up dependencies' time and fate */
- last = 0;
- leaf = 0;
- fate = T_FATE_STABLE;
- for ( c = t->depends; c; c = c->next )
- {
- /* If LEAVES has been applied, we only heed the timestamps of the leaf
- * source nodes.
- */
- leaf = max( leaf, c->target->leaf );
-
- if ( t->flags & T_FLAG_LEAVES )
- {
- last = leaf;
- continue;
- }
-
- last = max( last, c->target->time );
- fate = max( fate, c->target->fate );
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE )
- if ( fate < c->target->fate )
- printf( "fate change %s from %s to %s by dependency %s\n",
- t->name, target_fate[(int) fate], target_fate[(int) c->target->fate],
- c->target->name );
-#endif
- }
-
- /* Step 4b: pick up included headers time */
-
- /*
- * If a header is newer than a temp source that includes it,
- * the temp source will need building.
- */
-
- hlast = t->includes ? t->includes->time : 0;
-
- /* Step 4c: handle NOUPDATE oddity.
- *
- * If a NOUPDATE file exists, mark it as having eternally old dependencies.
- * Do not inherit our fate from our dependencies. Decide fate based only on
- * other flags and our binding (done later).
- */
- if ( t->flags & T_FLAG_NOUPDATE )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE )
- if ( fate != T_FATE_STABLE )
- printf( "fate change %s back to stable, NOUPDATE.\n", t->name
- );
-#endif
-
- last = 0;
- t->time = 0;
-
- /* Do not inherit our fate from our dependencies. Decide fate based only
- * upon other flags and our binding (done later).
- */
- fate = T_FATE_STABLE;
- }
-
- /* Step 4d: determine fate: rebuild target or what? */
-
- /*
- In English:
- If can not find or make child, can not make target.
- If children changed, make target.
- If target missing, make it.
- If children newer, make target.
- If temp's children newer than parent, make temp.
- If temp's headers newer than parent, make temp.
- If deliberately touched, make it.
- If up-to-date temp file present, use it.
- If target newer than non-notfile parent, mark target newer.
- Otherwise, stable!
-
- Note this block runs from least to most stable:
- as we make it further down the list, the target's
- fate is getting stabler.
- */
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- savedFate = fate;
- oldTimeStamp = 0;
-#endif
-
- if ( fate >= T_FATE_BROKEN )
- {
- fate = T_FATE_CANTMAKE;
- }
- else if ( fate >= T_FATE_SPOIL )
- {
- fate = T_FATE_UPDATE;
- }
- else if ( t->binding == T_BIND_MISSING )
- {
- fate = T_FATE_MISSING;
- }
- else if ( ( t->binding == T_BIND_EXISTS ) && ( last > t->time ) )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- oldTimeStamp = 1;
-#endif
- fate = T_FATE_OUTDATED;
- }
- else if ( ( t->binding == T_BIND_PARENTS ) && ( last > p->time ) )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- oldTimeStamp = 1;
-#endif
- fate = T_FATE_NEEDTMP;
- }
- else if ( ( t->binding == T_BIND_PARENTS ) && ( hlast > p->time ) )
- {
- fate = T_FATE_NEEDTMP;
- }
- else if ( t->flags & T_FLAG_TOUCHED )
- {
- fate = T_FATE_TOUCHED;
- }
- else if ( anyhow && !( t->flags & T_FLAG_NOUPDATE ) )
- {
- fate = T_FATE_TOUCHED;
- }
- else if ( ( t->binding == T_BIND_EXISTS ) && ( t->flags & T_FLAG_TEMP ) )
- {
- fate = T_FATE_ISTMP;
- }
- else if ( ( t->binding == T_BIND_EXISTS ) && p &&
- ( p->binding != T_BIND_UNBOUND ) && ( t->time > p->time ) )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- oldTimeStamp = 1;
-#endif
- fate = T_FATE_NEWER;
- }
- else
- {
- fate = T_FATE_STABLE;
- }
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE && ( fate != savedFate ) )
- {
- if ( savedFate == T_FATE_STABLE )
- printf( "fate change %s set to %s%s\n", t->name,
- target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" );
- else
- printf( "fate change %s from %s to %s%s\n", t->name,
- target_fate[ savedFate ], target_fate[ fate ],
- oldTimeStamp ? " (by timestamp)" : "" );
- }
-#endif
-
- /* Step 4e: handle missing files */
- /* If it is missing and there are no actions to create it, boom. */
- /* If we can not make a target we do not care about it, okay. */
- /* We could insist that there are updating actions for all missing */
- /* files, but if they have dependencies we just pretend it is a NOTFILE. */
-
- if ( ( fate == T_FATE_MISSING ) && !t->actions && !t->depends )
- {
- if ( t->flags & T_FLAG_NOCARE )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE )
- printf( "fate change %s to STABLE from %s, "
- "no actions, no dependencies and do not care\n",
- t->name, target_fate[ fate ] );
-#endif
- fate = T_FATE_STABLE;
- }
- else
- {
- printf( "don't know how to make %s\n", t->name );
- fate = T_FATE_CANTFIND;
- }
- }
-
- /* Step 4f: propagate dependencies' time & fate. */
- /* Set leaf time to be our time only if this is a leaf. */
-
- t->time = max( t->time, last );
- t->leaf = leaf ? leaf : t->time ;
- /* This target's fate may have been updated by virtue of following some
- * target's rebuilds list, so only allow it to be increased to the fate we
- * have calculated. Otherwise, grab its new fate.
- */
- if ( fate > t->fate )
- t->fate = fate;
- else
- fate = t->fate;
-
- /* Step 4g: if this target needs to be built, force rebuild everything in
- * this target's rebuilds list.
- */
- if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
- force_rebuilds( t );
-
- /*
- * Step 5: sort dependencies by their update time.
- */
-
- if ( globs.newestfirst )
- t->depends = make0sort( t->depends );
-
- /*
- * Step 6: a little harmless tabulating for tracing purposes
- */
-
- /* Do not count or report interal includes nodes. */
- if ( t->flags & T_FLAG_INTERNAL )
- return;
-
- if ( counts )
- {
-#ifdef OPT_IMPROVED_PATIENCE_EXT
- ++counts->targets;
-#else
- if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE )
- printf( "...patience...\n" );
-#endif
-
- if ( fate == T_FATE_ISTMP )
- ++counts->temp;
- else if ( fate == T_FATE_CANTFIND )
- ++counts->cantfind;
- else if ( ( fate == T_FATE_CANTMAKE ) && t->actions )
- ++counts->cantmake;
- else if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) &&
- t->actions )
- ++counts->updating;
- }
-
- if ( !( t->flags & T_FLAG_NOTFILE ) && ( fate >= T_FATE_SPOIL ) )
- flag = "+";
- else if ( ( t->binding == T_BIND_EXISTS ) && p && ( t->time > p->time ) )
- flag = "*";
-
- if ( DEBUG_MAKEPROG )
- printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int) t->fate ],
- spaces( depth ), t->name );
-}
-
-
-#ifdef OPT_GRAPH_DEBUG_EXT
-
-static const char * target_name( TARGET * t )
-{
- static char buf[ 1000 ];
- if ( t->flags & T_FLAG_INTERNAL )
- {
- sprintf( buf, "%s (internal node)", t->name );
- return buf;
- }
- return t->name;
-}
-
-
-/*
- * dependGraphOutput() - output the DG after make0 has run.
- */
-
-static void dependGraphOutput( TARGET * t, int depth )
-{
- TARGETS * c;
-
- if ( ( t->flags & T_FLAG_VISITED ) || !t->name || !t->boundname )
- return;
-
- t->flags |= T_FLAG_VISITED;
-
- switch ( t->fate )
- {
- case T_FATE_TOUCHED:
- case T_FATE_MISSING:
- case T_FATE_OUTDATED:
- case T_FATE_UPDATE:
- printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) );
- break;
- default:
- printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) );
- break;
- }
-
- if ( strcmp( t->name, t->boundname ) )
- printf( " %s Loc: %s\n", spaces( depth ), t->boundname );
-
- switch ( t->fate )
- {
- case T_FATE_STABLE:
- printf( " %s : Stable\n", spaces( depth ) );
- break;
- case T_FATE_NEWER:
- printf( " %s : Newer\n", spaces( depth ) );
- break;
- case T_FATE_ISTMP:
- printf( " %s : Up to date temp file\n", spaces( depth ) );
- break;
- case T_FATE_NEEDTMP:
- printf( " %s : Temporary file, to be updated\n", spaces( depth ) );
- break;
- case T_FATE_TOUCHED:
- printf( " %s : Been touched, updating it\n", spaces( depth ) );
- break;
- case T_FATE_MISSING:
- printf( " %s : Missing, creating it\n", spaces( depth ) );
- break;
- case T_FATE_OUTDATED:
- printf( " %s : Outdated, updating it\n", spaces( depth ) );
- break;
- case T_FATE_REBUILD:
- printf( " %s : Rebuild, updating it\n", spaces( depth ) );
- break;
- case T_FATE_UPDATE:
- printf( " %s : Updating it\n", spaces( depth ) );
- break;
- case T_FATE_CANTFIND:
- printf( " %s : Can not find it\n", spaces( depth ) );
- break;
- case T_FATE_CANTMAKE:
- printf( " %s : Can make it\n", spaces( depth ) );
- break;
- }
-
- if ( t->flags & ~T_FLAG_VISITED )
- {
- printf( " %s : ", spaces( depth ) );
- if ( t->flags & T_FLAG_TEMP ) printf( "TEMPORARY " );
- if ( t->flags & T_FLAG_NOCARE ) printf( "NOCARE " );
- if ( t->flags & T_FLAG_NOTFILE ) printf( "NOTFILE " );
- if ( t->flags & T_FLAG_TOUCHED ) printf( "TOUCHED " );
- if ( t->flags & T_FLAG_LEAVES ) printf( "LEAVES " );
- if ( t->flags & T_FLAG_NOUPDATE ) printf( "NOUPDATE " );
- printf( "\n" );
- }
-
- for ( c = t->depends; c; c = c->next )
- {
- printf( " %s : Depends on %s (%s)", spaces( depth ),
- target_name( c->target ), target_fate[ (int) c->target->fate ] );
- if ( c->target->time == t->time )
- printf( " (max time)");
- printf( "\n" );
- }
-
- for ( c = t->depends; c; c = c->next )
- dependGraphOutput( c->target, depth + 1 );
-}
-#endif
-
-
-/*
- * make0sort() - reorder TARGETS chain by their time (newest to oldest).
- *
- * We walk chain, taking each item and inserting it on the sorted result, with
- * newest items at the front. This involves updating each of the TARGETS'
- * c->next and c->tail. Note that we make c->tail a valid prev pointer for every
- * entry. Normally, it is only valid at the head, where prev == tail. Note also
- * that while tail is a loop, next ends at the end of the chain.
- */
-
-static TARGETS * make0sort( TARGETS * chain )
-{
- PROFILE_ENTER( MAKE_MAKE0SORT );
-
- TARGETS * result = 0;
-
- /* Walk the current target list. */
- while ( chain )
- {
- TARGETS * c = chain;
- TARGETS * s = result;
-
- chain = chain->next;
-
- /* Find point s in result for c. */
- while ( s && ( s->target->time > c->target->time ) )
- s = s->next;
-
- /* Insert c in front of s (might be 0). Do not even think of deciphering
- * this.
- */
- c->next = s; /* good even if s = 0 */
- if ( result == s ) result = c; /* new head of chain? */
- if ( !s ) s = result; /* wrap to ensure a next */
- if ( result != c ) s->tail->next = c; /* not head? be prev's next */
- c->tail = s->tail; /* take on next's prev */
- s->tail = c; /* make next's prev us */
- }
-
- PROFILE_EXIT( MAKE_MAKE0SORT );
- return result;
-}
-
-
-static LIST * targets_to_update_ = 0;
-
-
-void mark_target_for_updating( char * target )
-{
- targets_to_update_ = list_new( targets_to_update_, target );
-}
-
-
-LIST * targets_to_update()
-{
- return targets_to_update_;
-}
-
-
-void clear_targets_to_update()
-{
- list_free( targets_to_update_ );
- targets_to_update_ = 0;
-}
diff --git a/jam-files/engine/make.h b/jam-files/engine/make.h
deleted file mode 100644
index b372263e..00000000
--- a/jam-files/engine/make.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * make.h - bring a target up to date, once rules are in place
- */
-
-#include "lists.h"
-
-int make( int n_targets, const char **targets, int anyhow );
-int make1( TARGET *t );
-
-typedef struct {
- int temp;
- int updating;
- int cantfind;
- int cantmake;
- int targets;
- int made;
-} COUNTS ;
-
-
-void make0( TARGET *t, TARGET *p, int depth,
- COUNTS *counts, int anyhow );
-
-
-/*
- * Specifies that the target should be updated.
- */
-void mark_target_for_updating(char *target);
-/*
- * Returns the list of all the target previously passed to 'mark_target_for_updating'.
- */
-LIST *targets_to_update();
-/*
- * Cleasr/unmarks all targets that are currently marked for update.
- */
-void clear_targets_to_update();
diff --git a/jam-files/engine/make1.c b/jam-files/engine/make1.c
deleted file mode 100644
index 8001f333..00000000
--- a/jam-files/engine/make1.c
+++ /dev/null
@@ -1,1145 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * make1.c - execute command to bring targets up to date
- *
- * This module contains make1(), the entry point called by make() to
- * recursively decend the dependency graph executing update actions as
- * marked by make0().
- *
- * External routines:
- *
- * make1() - execute commands to update a TARGET and all of its dependencies.
- *
- * Internal routines, the recursive/asynchronous command executors:
- *
- * make1a() - recursively traverse dependency target tree, calling make1b().
- * make1atail() - started processing all dependencies so go on to make1b().
- * make1b() - when dependencies are up to date, build target with make1c().
- * make1c() - launch target's next command, call parents' make1b() if none.
- * make1d() - handle command execution completion and call back make1c().
- *
- * Internal support routines:
- *
- * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
- * make1list() - turn a list of targets into a LIST, for $(<) and $(>).
- * make1settings() - for vars that get bound values, build up replacement lists.
- * make1bind() - bind targets that weren't bound in dependency analysis.
- *
- * 04/16/94 (seiwald) - Split from make.c.
- * 04/21/94 (seiwald) - Handle empty "updated" actions.
- * 05/04/94 (seiwald) - async multiprocess (-j) support.
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources.
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 01/19/95 (seiwald) - distinguish between CANTFIND/CANTMAKE targets.
- * 01/22/94 (seiwald) - pass per-target JAMSHELL down to exec_cmd().
- * 02/28/95 (seiwald) - Handle empty "existing" actions.
- * 03/10/95 (seiwald) - Fancy counts.
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "assert.h"
-#include "variable.h"
-#include "rules.h"
-#include "headers.h"
-
-#include "search.h"
-#include "newstr.h"
-#include "make.h"
-#include "command.h"
-#include "execcmd.h"
-#include "compile.h"
-#include "output.h"
-
-#include <stdlib.h>
-
-#if ! defined(NT) || defined(__GNUC__)
- #include <unistd.h> /* for unlink */
-#endif
-
-static CMD * make1cmds ( TARGET * );
-static LIST * make1list ( LIST *, TARGETS *, int flags );
-static SETTINGS * make1settings( LIST * vars );
-static void make1bind ( TARGET * );
-
-/* Ugly static - it is too hard to carry it through the callbacks. */
-
-static struct
-{
- int failed;
- int skipped;
- int total;
- int made;
-} counts[ 1 ] ;
-
-/* Target state - remove recursive calls by just keeping track of state target
- * is in.
- */
-typedef struct _state
-{
- struct _state * prev; /* previous state on stack */
- TARGET * t; /* current target */
- TARGET * parent; /* parent argument necessary for make1a() */
-#define T_STATE_MAKE1A 0 /* make1a() should be called */
-#define T_STATE_MAKE1ATAIL 1 /* make1atail() should be called */
-#define T_STATE_MAKE1B 2 /* make1b() should be called */
-#define T_STATE_MAKE1C 3 /* make1c() should be called */
-#define T_STATE_MAKE1D 4 /* make1d() should be called */
- int curstate; /* current state */
- int status;
-} state;
-
-static void make1a ( state * );
-static void make1atail ( state * );
-static void make1b ( state * );
-static void make1c ( state * );
-static void make1d ( state * );
-static void make_closure( void * closure, int status, timing_info *, char *, char * );
-
-typedef struct _stack
-{
- state * stack;
-} stack;
-
-static stack state_stack = { NULL };
-
-static state * state_freelist = NULL;
-
-
-static state * alloc_state()
-{
- if ( state_freelist != NULL )
- {
- state * pState = state_freelist;
- state_freelist = pState->prev;
- memset( pState, 0, sizeof( state ) );
- return pState;
- }
-
- return (state *)BJAM_MALLOC( sizeof( state ) );
-}
-
-
-static void free_state( state * pState )
-{
- pState->prev = state_freelist;
- state_freelist = pState;
-}
-
-
-static void clear_state_freelist()
-{
- while ( state_freelist != NULL )
- {
- state * pState = state_freelist;
- state_freelist = state_freelist->prev;
- BJAM_FREE( pState );
- }
-}
-
-
-static state * current_state( stack * pStack )
-{
- return pStack->stack;
-}
-
-
-static void pop_state( stack * pStack )
-{
- if ( pStack->stack != NULL )
- {
- state * pState = pStack->stack->prev;
- free_state( pStack->stack );
- pStack->stack = pState;
- }
-}
-
-
-static state * push_state( stack * pStack, TARGET * t, TARGET * parent, int curstate )
-{
- state * pState = alloc_state();
-
- pState->t = t;
- pState->parent = parent;
- pState->prev = pStack->stack;
- pState->curstate = curstate;
-
- pStack->stack = pState;
-
- return pStack->stack;
-}
-
-
-/*
- * Pushes a stack onto another stack, effectively reversing the order.
- */
-
-static void push_stack_on_stack( stack * pDest, stack * pSrc )
-{
- while ( pSrc->stack != NULL )
- {
- state * pState = pSrc->stack;
- pSrc->stack = pSrc->stack->prev;
- pState->prev = pDest->stack;
- pDest->stack = pState;
- }
-}
-
-
-/*
- * make1() - execute commands to update a TARGET and all of its dependencies.
- */
-
-static int intr = 0;
-
-int make1( TARGET * t )
-{
- state * pState;
-
- memset( (char *)counts, 0, sizeof( *counts ) );
-
- /* Recursively make the target and its dependencies. */
- push_state( &state_stack, t, NULL, T_STATE_MAKE1A );
-
- do
- {
- while ( ( pState = current_state( &state_stack ) ) != NULL )
- {
- if ( intr )
- pop_state( &state_stack );
-
- switch ( pState->curstate )
- {
- case T_STATE_MAKE1A : make1a ( pState ); break;
- case T_STATE_MAKE1ATAIL: make1atail( pState ); break;
- case T_STATE_MAKE1B : make1b ( pState ); break;
- case T_STATE_MAKE1C : make1c ( pState ); break;
- case T_STATE_MAKE1D : make1d ( pState ); break;
- }
- }
- }
- /* Wait for any outstanding commands to finish running. */
- while ( exec_wait() );
-
- clear_state_freelist();
-
- /* Talk about it. */
- if ( counts->failed )
- printf( "...failed updating %d target%s...\n", counts->failed,
- counts->failed > 1 ? "s" : "" );
- if ( DEBUG_MAKE && counts->skipped )
- printf( "...skipped %d target%s...\n", counts->skipped,
- counts->skipped > 1 ? "s" : "" );
- if ( DEBUG_MAKE && counts->made )
- printf( "...updated %d target%s...\n", counts->made,
- counts->made > 1 ? "s" : "" );
-
- return counts->total != counts->made;
-}
-
-
-/*
- * make1a() - recursively traverse target tree, calling make1b().
- *
- * Called to start processing a specified target. Does nothing if the target is
- * already being processed or otherwise starts processing all of its
- * dependencies. Once all of its dependencies have started being processed goes
- * on and calls make1b() (actually does that indirectly via a helper
- * make1atail() state).
- */
-
-static void make1a( state * pState )
-{
- TARGET * t = pState->t;
- TARGETS * c;
-
- /* If the parent is the first to try to build this target or this target is
- * in the make1c() quagmire, arrange for the parent to be notified when this
- * target is built.
- */
- if ( pState->parent )
- switch ( pState->t->progress )
- {
- case T_MAKE_INIT:
- case T_MAKE_ACTIVE:
- case T_MAKE_RUNNING:
- pState->t->parents = targetentry( pState->t->parents,
- pState->parent );
- ++pState->parent->asynccnt;
- }
-
- /* If this target is already being processed then do nothing. There is no
- * need to start processing the same target all over again.
- */
- if ( pState->t->progress != T_MAKE_INIT )
- {
- pop_state( &state_stack );
- return;
- }
-
- /* Asynccnt counts the dependencies preventing this target from proceeding
- * to make1b() for actual building. We start off with a count of 1 to
- * prevent anything from happening until we can notify all dependencies that
- * they are needed. This 1 is accounted for when we call make1b() ourselves,
- * below. Without this if a a dependency gets built before we finish
- * processing all of our other dependencies our build might be triggerred
- * prematurely.
- */
- pState->t->asynccnt = 1;
-
- /* Add header nodes created during the building process. */
- {
- TARGETS * inc = 0;
- for ( c = t->depends; c; c = c->next )
- if ( c->target->rescanned && c->target->includes )
- inc = targetentry( inc, c->target->includes );
- t->depends = targetchain( t->depends, inc );
- }
-
- /* Guard against circular dependencies. */
- pState->t->progress = T_MAKE_ONSTACK;
-
- {
- stack temp_stack = { NULL };
- for ( c = t->depends; c && !intr; c = c->next )
- push_state( &temp_stack, c->target, pState->t, T_STATE_MAKE1A );
-
- /* Using stacks reverses the order of execution. Reverse it back. */
- push_stack_on_stack( &state_stack, &temp_stack );
- }
-
- pState->curstate = T_STATE_MAKE1ATAIL;
-}
-
-
-/*
- * make1atail() - started processing all dependencies so go on to make1b().
- */
-
-static void make1atail( state * pState )
-{
- pState->t->progress = T_MAKE_ACTIVE;
- /* Now that all of our dependencies have bumped up our asynccnt we can
- * remove our own internal bump added to prevent this target from being
- * built before all of its dependencies start getting processed.
- */
- pState->curstate = T_STATE_MAKE1B;
-}
-
-
-/*
- * make1b() - when dependencies are up to date, build target with make1c().
- *
- * Called after all dependencies have started being processed and after each of
- * them finishes its processing. The target actually goes on to getting built in
- * make1c() only after all of its dependencies have finished their processing.
- */
-
-static void make1b( state * pState )
-{
- TARGET * t = pState->t;
- TARGETS * c;
- TARGET * failed = 0;
- char * failed_name = "dependencies";
-
- /* If any dependencies are still outstanding, wait until they call make1b()
- * to signal their completion.
- */
- if ( --pState->t->asynccnt )
- {
- pop_state( &state_stack );
- return;
- }
-
- /* Try to aquire a semaphore. If it is locked, wait until the target that
- * locked it is built and signal completition.
- */
-#ifdef OPT_SEMAPHORE
- if ( t->semaphore && t->semaphore->asynccnt )
- {
- /* Append 't' to the list of targets waiting on semaphore. */
- t->semaphore->parents = targetentry( t->semaphore->parents, t );
- t->asynccnt++;
-
- if ( DEBUG_EXECCMD )
- printf( "SEM: %s is busy, delaying launch of %s\n",
- t->semaphore->name, t->name );
- pop_state( &state_stack );
- return;
- }
-#endif
-
- /* Now ready to build target 't', if dependencies built OK. */
-
- /* Collect status from dependencies. */
- for ( c = t->depends; c; c = c->next )
- if ( c->target->status > t->status && !( c->target->flags & T_FLAG_NOCARE ) )
- {
- failed = c->target;
- pState->t->status = c->target->status;
- }
- /* If an internal header node failed to build, we want to output the target
- * that it failed on.
- */
- if ( failed )
- {
- failed_name = failed->flags & T_FLAG_INTERNAL
- ? failed->failed
- : failed->name;
- }
- t->failed = failed_name;
-
- /* If actions for building any of the dependencies have failed, bail.
- * Otherwise, execute all actions to make the current target.
- */
- if ( ( pState->t->status == EXEC_CMD_FAIL ) && pState->t->actions )
- {
- ++counts->skipped;
- if ( ( pState->t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD )
- {
- if ( !unlink( pState->t->boundname ) )
- printf( "...removing outdated %s\n", pState->t->boundname );
- }
- else
- printf( "...skipped %s for lack of %s...\n", pState->t->name, failed_name );
- }
-
- if ( pState->t->status == EXEC_CMD_OK )
- switch ( pState->t->fate )
- {
- /* These are handled by the default case below now
- case T_FATE_INIT:
- case T_FATE_MAKING:
- */
-
- case T_FATE_STABLE:
- case T_FATE_NEWER:
- break;
-
- case T_FATE_CANTFIND:
- case T_FATE_CANTMAKE:
- pState->t->status = EXEC_CMD_FAIL;
- break;
-
- case T_FATE_ISTMP:
- if ( DEBUG_MAKE )
- printf( "...using %s...\n", pState->t->name );
- break;
-
- case T_FATE_TOUCHED:
- case T_FATE_MISSING:
- case T_FATE_NEEDTMP:
- case T_FATE_OUTDATED:
- case T_FATE_UPDATE:
- case T_FATE_REBUILD:
- /* Prepare commands for executing actions scheduled for this target
- * and then schedule transfer to make1c() state to proceed with
- * executing the prepared commands. Commands have their embedded
- * variables automatically expanded, including making use of any "on
- * target" variables.
- */
- if ( pState->t->actions )
- {
- ++counts->total;
- if ( DEBUG_MAKE && !( counts->total % 100 ) )
- printf( "...on %dth target...\n", counts->total );
-
- pState->t->cmds = (char *)make1cmds( pState->t );
- /* Set the target's "progress" so that make1c() counts it among
- * its successes/failures.
- */
- pState->t->progress = T_MAKE_RUNNING;
- }
- break;
-
- /* All possible fates should have been accounted for by now. */
- default:
- printf( "ERROR: %s has bad fate %d", pState->t->name,
- pState->t->fate );
- abort();
- }
-
- /* Call make1c() to begin the execution of the chain of commands needed to
- * build the target. If we are not going to build the target (due of
- * dependency failures or no commands needing to be run) the chain will be
- * empty and make1c() will directly signal the target's completion.
- */
-
-#ifdef OPT_SEMAPHORE
- /* If there is a semaphore, indicate that it is in use. */
- if ( pState->t->semaphore )
- {
- ++pState->t->semaphore->asynccnt;
- if ( DEBUG_EXECCMD )
- printf( "SEM: %s now used by %s\n", pState->t->semaphore->name,
- pState->t->name );
- }
-#endif
-
- pState->curstate = T_STATE_MAKE1C;
-}
-
-
-/*
- * make1c() - launch target's next command, call parents' make1b() if none.
- *
- * If there are (more) commands to run to build this target (and we have not hit
- * an error running earlier comands) we launch the command using exec_cmd(). If
- * there are no more commands to run, we collect the status from all the actions
- * and report our completion to all the parents.
- */
-
-static void make1c( state * pState )
-{
- CMD * cmd = (CMD *)pState->t->cmds;
-
- if ( cmd && ( pState->t->status == EXEC_CMD_OK ) )
- {
- char * rule_name = 0;
- char * target = 0;
-
- if ( DEBUG_MAKEQ ||
- ( !( cmd->rule->actions->flags & RULE_QUIETLY ) && DEBUG_MAKE ) )
- {
- rule_name = cmd->rule->name;
- target = lol_get( &cmd->args, 0 )->string;
- if ( globs.noexec )
- out_action( rule_name, target, cmd->buf, "", "", EXIT_OK );
- }
-
- if ( globs.noexec )
- {
- pState->curstate = T_STATE_MAKE1D;
- pState->status = EXEC_CMD_OK;
- }
- else
- {
- /* Pop state first because exec_cmd() could push state. */
- pop_state( &state_stack );
- exec_cmd( cmd->buf, make_closure, pState->t, cmd->shell, rule_name,
- target );
- }
- }
- else
- {
- TARGETS * c;
- ACTIONS * actions;
-
- /* Collect status from actions, and distribute it as well. */
- for ( actions = pState->t->actions; actions; actions = actions->next )
- if ( actions->action->status > pState->t->status )
- pState->t->status = actions->action->status;
- for ( actions = pState->t->actions; actions; actions = actions->next )
- if ( pState->t->status > actions->action->status )
- actions->action->status = pState->t->status;
-
- /* Tally success/failure for those we tried to update. */
- if ( pState->t->progress == T_MAKE_RUNNING )
- switch ( pState->t->status )
- {
- case EXEC_CMD_OK : ++counts->made ; break;
- case EXEC_CMD_FAIL: ++counts->failed; break;
- }
-
- /* Tell parents their dependency has been built. */
- {
- stack temp_stack = { NULL };
- TARGET * t = pState->t;
- TARGET * additional_includes = NULL;
-
- t->progress = T_MAKE_DONE;
-
- /* Target has been updated so rescan it for dependencies. */
- if ( ( t->fate >= T_FATE_MISSING ) &&
- ( t->status == EXEC_CMD_OK ) &&
- !t->rescanned )
- {
- TARGET * target_to_rescan = t;
- SETTINGS * s;
-
- target_to_rescan->rescanned = 1;
-
- if ( target_to_rescan->flags & T_FLAG_INTERNAL )
- target_to_rescan = t->original_target;
-
- /* Clean current includes. */
- target_to_rescan->includes = 0;
-
- s = copysettings( target_to_rescan->settings );
- pushsettings( s );
- headers( target_to_rescan );
- popsettings( s );
- freesettings( s );
-
- if ( target_to_rescan->includes )
- {
- target_to_rescan->includes->rescanned = 1;
- /* Tricky. The parents have already been processed, but they
- * have not seen the internal node, because it was just
- * created. We need to make the calls to make1a() that would
- * have been made by the parents here, and also make sure
- * all unprocessed parents will pick up the includes. We
- * must make sure processing of the additional make1a()
- * invocations is done before make1b() which means this
- * target is built, otherwise the parent would be considered
- * built before this make1a() processing has even started.
- */
- make0( target_to_rescan->includes, target_to_rescan->parents->target, 0, 0, 0 );
- for ( c = target_to_rescan->parents; c; c = c->next )
- c->target->depends = targetentry( c->target->depends,
- target_to_rescan->includes );
- /* Will be processed below. */
- additional_includes = target_to_rescan->includes;
- }
- }
-
- if ( additional_includes )
- for ( c = t->parents; c; c = c->next )
- push_state( &temp_stack, additional_includes, c->target, T_STATE_MAKE1A );
-
- for ( c = t->parents; c; c = c->next )
- push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B );
-
-#ifdef OPT_SEMAPHORE
- /* If there is a semaphore, it is now free. */
- if ( t->semaphore )
- {
- assert( t->semaphore->asynccnt == 1 );
- --t->semaphore->asynccnt;
-
- if ( DEBUG_EXECCMD )
- printf( "SEM: %s is now free\n", t->semaphore->name );
-
- /* If anything is waiting, notify the next target. There is no
- * point in notifying waiting targets, since they will be
- * notified again.
- */
- if ( t->semaphore->parents )
- {
- TARGETS * first = t->semaphore->parents;
- if ( first->next )
- first->next->tail = first->tail;
- t->semaphore->parents = first->next;
-
- if ( DEBUG_EXECCMD )
- printf( "SEM: placing %s on stack\n", first->target->name );
- push_state( &temp_stack, first->target, NULL, T_STATE_MAKE1B );
- BJAM_FREE( first );
- }
- }
-#endif
-
- /* Must pop state before pushing any more. */
- pop_state( &state_stack );
-
- /* Using stacks reverses the order of execution. Reverse it back. */
- push_stack_on_stack( &state_stack, &temp_stack );
- }
- }
-}
-
-
-/*
- * call_timing_rule() - Look up the __TIMING_RULE__ variable on the given
- * target, and if non-empty, invoke the rule it names, passing the given
- * timing_info.
- */
-
-static void call_timing_rule( TARGET * target, timing_info * time )
-{
- LIST * timing_rule;
-
- pushsettings( target->settings );
- timing_rule = var_get( "__TIMING_RULE__" );
- popsettings( target->settings );
-
- if ( timing_rule )
- {
- /* rule timing-rule ( args * : target : start end user system ) */
-
- /* Prepare the argument list. */
- FRAME frame[ 1 ];
- frame_init( frame );
-
- /* args * :: $(__TIMING_RULE__[2-]) */
- lol_add( frame->args, list_copy( L0, timing_rule->next ) );
-
- /* target :: the name of the target */
- lol_add( frame->args, list_new( L0, target->name ) );
-
- /* start end user system :: info about the action command */
- lol_add( frame->args, list_new( list_new( list_new( list_new( L0,
- outf_time ( time->start ) ),
- outf_time ( time->end ) ),
- outf_double( time->user ) ),
- outf_double( time->system ) ) );
-
- /* Call the rule. */
- evaluate_rule( timing_rule->string, frame );
-
- /* Clean up. */
- frame_free( frame );
- }
-}
-
-
-/*
- * call_action_rule() - Look up the __ACTION_RULE__ variable on the given
- * target, and if non-empty, invoke the rule it names, passing the given info,
- * timing_info, executed command and command output.
- */
-
-static void call_action_rule
-(
- TARGET * target,
- int status,
- timing_info * time,
- char * executed_command,
- char * command_output
-)
-{
- LIST * action_rule;
-
- pushsettings( target->settings );
- action_rule = var_get( "__ACTION_RULE__" );
- popsettings( target->settings );
-
- if ( action_rule )
- {
- /* rule action-rule (
- args * :
- target :
- command status start end user system :
- output ? ) */
-
- /* Prepare the argument list. */
- FRAME frame[ 1 ];
- frame_init( frame );
-
- /* args * :: $(__ACTION_RULE__[2-]) */
- lol_add( frame->args, list_copy( L0, action_rule->next ) );
-
- /* target :: the name of the target */
- lol_add( frame->args, list_new( L0, target->name ) );
-
- /* command status start end user system :: info about the action command */
- lol_add( frame->args,
- list_new( list_new( list_new( list_new( list_new( list_new( L0,
- newstr( executed_command ) ),
- outf_int( status ) ),
- outf_time( time->start ) ),
- outf_time( time->end ) ),
- outf_double( time->user ) ),
- outf_double( time->system ) ) );
-
- /* output ? :: the output of the action command */
- if ( command_output )
- lol_add( frame->args, list_new( L0, newstr( command_output ) ) );
- else
- lol_add( frame->args, L0 );
-
- /* Call the rule. */
- evaluate_rule( action_rule->string, frame );
-
- /* Clean up. */
- frame_free( frame );
- }
-}
-
-
-/*
- * make_closure() - internal function passed as a notification callback for when
- * commands finish getting executed by the OS.
- */
-
-static void make_closure
-(
- void * closure,
- int status,
- timing_info * time,
- char * executed_command,
- char * command_output
-)
-{
- TARGET * built = (TARGET *)closure;
-
- call_timing_rule( built, time );
- if ( DEBUG_EXECCMD )
- printf( "%f sec system; %f sec user\n", time->system, time->user );
-
- call_action_rule( built, status, time, executed_command, command_output );
-
- push_state( &state_stack, built, NULL, T_STATE_MAKE1D )->status = status;
-}
-
-
-/*
- * make1d() - handle command execution completion and call back make1c().
- *
- * exec_cmd() has completed and now all we need to do is fiddle with the status
- * and call back to make1c() so it can run the next command scheduled for
- * building this target or close up the target's build process in case there are
- * no more commands scheduled for it. On interrupts, we bail heavily.
- */
-
-static void make1d( state * pState )
-{
- TARGET * t = pState->t;
- CMD * cmd = (CMD *)t->cmds;
- int status = pState->status;
-
- if ( t->flags & T_FLAG_FAIL_EXPECTED )
- {
- /* Invert execution result when FAIL_EXPECTED has been applied. */
- switch ( status )
- {
- case EXEC_CMD_FAIL: status = EXEC_CMD_OK ; break;
- case EXEC_CMD_OK: status = EXEC_CMD_FAIL; break;
- }
- }
-
- if ( ( status == EXEC_CMD_FAIL ) &&
- ( cmd->rule->actions->flags & RULE_IGNORE ) )
- status = EXEC_CMD_OK;
-
- /* On interrupt, set intr so _everything_ fails. */
- if ( status == EXEC_CMD_INTR )
- ++intr;
-
- /* Print command text on failure. */
- if ( ( status == EXEC_CMD_FAIL ) && DEBUG_MAKE )
- {
- if ( !DEBUG_EXEC )
- printf( "%s\n", cmd->buf );
-
- printf( "...failed %s ", cmd->rule->name );
- list_print( lol_get( &cmd->args, 0 ) );
- printf( "...\n" );
- }
-
- /* Treat failed commands as interrupts in case we were asked to stop the
- * build in case of any errors.
- */
- if ( ( status == EXEC_CMD_FAIL ) && globs.quitquick )
- ++intr;
-
- /* If the command was interrupted or failed and the target is not
- * "precious", remove the targets.
- */
- if (status != EXEC_CMD_OK)
- {
- LIST * targets = lol_get( &cmd->args, 0 );
- for ( ; targets; targets = list_next( targets ) )
- {
- int need_unlink = 1;
- TARGET* t = bindtarget ( targets->string );
- if (t->flags & T_FLAG_PRECIOUS)
- {
- need_unlink = 0;
- }
- if (need_unlink && !unlink( targets->string ) )
- printf( "...removing %s\n", targets->string );
- }
- }
-
- /* Free this command and call make1c() to move onto the next one scheduled
- * for building this same target.
- */
- t->status = status;
- t->cmds = (char *)cmd_next( cmd );
- cmd_free( cmd );
- pState->curstate = T_STATE_MAKE1C;
-}
-
-
-/*
- * swap_settings() - replace the settings from the current module and target
- * with those from the new module and target
- */
-
-static void swap_settings
-(
- module_t * * current_module,
- TARGET * * current_target,
- module_t * new_module,
- TARGET * new_target
-)
-{
- if ( new_module == root_module() )
- new_module = 0;
-
- if ( ( new_target == *current_target ) && ( new_module == *current_module ) )
- return;
-
- if ( *current_target )
- popsettings( (*current_target)->settings );
-
- if ( new_module != *current_module )
- {
- if ( *current_module )
- exit_module( *current_module );
-
- *current_module = new_module;
-
- if ( new_module )
- enter_module( new_module );
- }
-
- *current_target = new_target;
- if ( new_target )
- pushsettings( new_target->settings );
-}
-
-
-/*
- * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
- *
- * Essentially copies a chain of ACTIONs to a chain of CMDs, grouping
- * RULE_TOGETHER actions, splitting RULE_PIECEMEAL actions, and handling
- * RULE_NEWSRCS actions. The result is a chain of CMDs which can be expanded by
- * var_string() and executed using exec_cmd().
- */
-
-static CMD * make1cmds( TARGET * t )
-{
- CMD * cmds = 0;
- LIST * shell = 0;
- module_t * settings_module = 0;
- TARGET * settings_target = 0;
- ACTIONS * a0;
-
- /* Step through actions. Actions may be shared with other targets or grouped
- * using RULE_TOGETHER, so actions already seen are skipped.
- */
- for ( a0 = t->actions ; a0; a0 = a0->next )
- {
- RULE * rule = a0->action->rule;
- rule_actions * actions = rule->actions;
- SETTINGS * boundvars;
- LIST * nt;
- LIST * ns;
- ACTIONS * a1;
- int start;
- int chunk;
- int length;
-
- /* Only do rules with commands to execute. If this action has already
- * been executed, use saved status.
- */
- if ( !actions || a0->action->running )
- continue;
-
- a0->action->running = 1;
-
- /* Make LISTS of targets and sources. If `execute together` has been
- * specified for this rule, tack on sources from each instance of this
- * rule for this target.
- */
- nt = make1list( L0, a0->action->targets, 0 );
- ns = make1list( L0, a0->action->sources, actions->flags );
- if ( actions->flags & RULE_TOGETHER )
- for ( a1 = a0->next; a1; a1 = a1->next )
- if ( a1->action->rule == rule && !a1->action->running )
- {
- ns = make1list( ns, a1->action->sources, actions->flags );
- a1->action->running = 1;
- }
-
- /* If doing only updated (or existing) sources, but none have been
- * updated (or exist), skip this action.
- */
- if ( !ns && ( actions->flags & ( RULE_NEWSRCS | RULE_EXISTING ) ) )
- {
- list_free( nt );
- continue;
- }
-
- swap_settings( &settings_module, &settings_target, rule->module, t );
- if ( !shell )
- shell = var_get( "JAMSHELL" ); /* shell is per-target */
-
- /* If we had 'actions xxx bind vars' we bind the vars now. */
- boundvars = make1settings( actions->bindlist );
- pushsettings( boundvars );
-
- /*
- * Build command, starting with all source args.
- *
- * If cmd_new returns 0, it is because the resulting command length is
- * > MAXLINE. In this case, we will slowly reduce the number of source
- * arguments presented until it does fit. This only applies to actions
- * that allow PIECEMEAL commands.
- *
- * While reducing slowly takes a bit of compute time to get things just
- * right, it is worth it to get as close to MAXLINE as possible, because
- * launching the commands we are executing is likely to be much more
- * compute intensive.
- *
- * Note we loop through at least once, for sourceless actions.
- */
-
- start = 0;
- chunk = length = list_length( ns );
-
- do
- {
- /* Build cmd: cmd_new consumes its lists. */
- CMD * cmd = cmd_new( rule,
- list_copy( L0, nt ),
- list_sublist( ns, start, chunk ),
- list_copy( L0, shell ) );
-
- if ( cmd )
- {
- /* It fit: chain it up. */
- if ( !cmds ) cmds = cmd;
- else cmds->tail->next = cmd;
- cmds->tail = cmd;
- start += chunk;
- }
- else if ( ( actions->flags & RULE_PIECEMEAL ) && ( chunk > 1 ) )
- {
- /* Reduce chunk size slowly. */
- chunk = chunk * 9 / 10;
- }
- else
- {
- /* Too long and not splittable. */
- printf( "%s actions too long (max %d):\n", rule->name, MAXLINE
- );
-
- /* Tell the user what didn't fit. */
- cmd = cmd_new( rule, list_copy( L0, nt ),
- list_sublist( ns, start, chunk ),
- list_new( L0, newstr( "%" ) ) );
- fputs( cmd->buf, stdout );
- exit( EXITBAD );
- }
- }
- while ( start < length );
-
- /* These were always copied when used. */
- list_free( nt );
- list_free( ns );
-
- /* Free the variables whose values were bound by 'actions xxx bind
- * vars'.
- */
- popsettings( boundvars );
- freesettings( boundvars );
- }
-
- swap_settings( &settings_module, &settings_target, 0, 0 );
- return cmds;
-}
-
-
-/*
- * make1list() - turn a list of targets into a LIST, for $(<) and $(>).
- */
-
-static LIST * make1list( LIST * l, TARGETS * targets, int flags )
-{
- for ( ; targets; targets = targets->next )
- {
- TARGET * t = targets->target;
-
- if ( t->binding == T_BIND_UNBOUND )
- make1bind( t );
-
- if ( ( flags & RULE_EXISTING ) && ( flags & RULE_NEWSRCS ) )
- {
- if ( ( t->binding != T_BIND_EXISTS ) && ( t->fate <= T_FATE_STABLE ) )
- continue;
- }
- else
- {
- if ( ( flags & RULE_EXISTING ) && ( t->binding != T_BIND_EXISTS ) )
- continue;
-
- if ( ( flags & RULE_NEWSRCS ) && ( t->fate <= T_FATE_STABLE ) )
- continue;
- }
-
- /* Prohibit duplicates for RULE_TOGETHER. */
- if ( flags & RULE_TOGETHER )
- {
- LIST * m;
- for ( m = l; m; m = m->next )
- if ( !strcmp( m->string, t->boundname ) )
- break;
- if ( m )
- continue;
- }
-
- /* Build new list. */
- l = list_new( l, copystr( t->boundname ) );
- }
-
- return l;
-}
-
-
-/*
- * make1settings() - for vars that get bound values, build up replacement lists.
- */
-
-static SETTINGS * make1settings( LIST * vars )
-{
- SETTINGS * settings = 0;
-
- for ( ; vars; vars = list_next( vars ) )
- {
- LIST * l = var_get( vars->string );
- LIST * nl = 0;
-
- for ( ; l; l = list_next( l ) )
- {
- TARGET * t = bindtarget( l->string );
-
- /* Make sure the target is bound. */
- if ( t->binding == T_BIND_UNBOUND )
- make1bind( t );
-
- /* Build a new list. */
- nl = list_new( nl, copystr( t->boundname ) );
- }
-
- /* Add to settings chain. */
- settings = addsettings( settings, VAR_SET, vars->string, nl );
- }
-
- return settings;
-}
-
-
-/*
- * make1bind() - bind targets that were not bound during dependency analysis
- *
- * Spot the kludge! If a target is not in the dependency tree, it did not get
- * bound by make0(), so we have to do it here. Ugly.
- */
-
-static void make1bind( TARGET * t )
-{
- if ( t->flags & T_FLAG_NOTFILE )
- return;
-
- pushsettings( t->settings );
- t->boundname = search( t->name, &t->time, 0, ( t->flags & T_FLAG_ISFILE ) );
- t->binding = t->time ? T_BIND_EXISTS : T_BIND_MISSING;
- popsettings( t->settings );
-}
diff --git a/jam-files/engine/md5.c b/jam-files/engine/md5.c
deleted file mode 100644
index c35d96c5..00000000
--- a/jam-files/engine/md5.c
+++ /dev/null
@@ -1,381 +0,0 @@
-/*
- Copyright (C) 1999, 2000, 2002 Aladdin Enterprises. All rights reserved.
-
- This software is provided 'as-is', without any express or implied
- warranty. In no event will the authors be held liable for any damages
- arising from the use of this software.
-
- Permission is granted to anyone to use this software for any purpose,
- including commercial applications, and to alter it and redistribute it
- freely, subject to the following restrictions:
-
- 1. The origin of this software must not be misrepresented; you must not
- claim that you wrote the original software. If you use this software
- in a product, an acknowledgment in the product documentation would be
- appreciated but is not required.
- 2. Altered source versions must be plainly marked as such, and must not be
- misrepresented as being the original software.
- 3. This notice may not be removed or altered from any source distribution.
-
- L. Peter Deutsch
- ghost@aladdin.com
-
- */
-/* $Id: md5.c,v 1.6 2002/04/13 19:20:28 lpd Exp $ */
-/*
- Independent implementation of MD5 (RFC 1321).
-
- This code implements the MD5 Algorithm defined in RFC 1321, whose
- text is available at
- http://www.ietf.org/rfc/rfc1321.txt
- The code is derived from the text of the RFC, including the test suite
- (section A.5) but excluding the rest of Appendix A. It does not include
- any code or documentation that is identified in the RFC as being
- copyrighted.
-
- The original and principal author of md5.c is L. Peter Deutsch
- <ghost@aladdin.com>. Other authors are noted in the change history
- that follows (in reverse chronological order):
-
- 2002-04-13 lpd Clarified derivation from RFC 1321; now handles byte order
- either statically or dynamically; added missing #include <string.h>
- in library.
- 2002-03-11 lpd Corrected argument list for main(), and added int return
- type, in test program and T value program.
- 2002-02-21 lpd Added missing #include <stdio.h> in test program.
- 2000-07-03 lpd Patched to eliminate warnings about "constant is
- unsigned in ANSI C, signed in traditional"; made test program
- self-checking.
- 1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
- 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5).
- 1999-05-03 lpd Original version.
- */
-
-#include "md5.h"
-#include <string.h>
-
-#undef BYTE_ORDER /* 1 = big-endian, -1 = little-endian, 0 = unknown */
-#ifdef ARCH_IS_BIG_ENDIAN
-# define BYTE_ORDER (ARCH_IS_BIG_ENDIAN ? 1 : -1)
-#else
-# define BYTE_ORDER 0
-#endif
-
-#define T_MASK ((md5_word_t)~0)
-#define T1 /* 0xd76aa478 */ (T_MASK ^ 0x28955b87)
-#define T2 /* 0xe8c7b756 */ (T_MASK ^ 0x173848a9)
-#define T3 0x242070db
-#define T4 /* 0xc1bdceee */ (T_MASK ^ 0x3e423111)
-#define T5 /* 0xf57c0faf */ (T_MASK ^ 0x0a83f050)
-#define T6 0x4787c62a
-#define T7 /* 0xa8304613 */ (T_MASK ^ 0x57cfb9ec)
-#define T8 /* 0xfd469501 */ (T_MASK ^ 0x02b96afe)
-#define T9 0x698098d8
-#define T10 /* 0x8b44f7af */ (T_MASK ^ 0x74bb0850)
-#define T11 /* 0xffff5bb1 */ (T_MASK ^ 0x0000a44e)
-#define T12 /* 0x895cd7be */ (T_MASK ^ 0x76a32841)
-#define T13 0x6b901122
-#define T14 /* 0xfd987193 */ (T_MASK ^ 0x02678e6c)
-#define T15 /* 0xa679438e */ (T_MASK ^ 0x5986bc71)
-#define T16 0x49b40821
-#define T17 /* 0xf61e2562 */ (T_MASK ^ 0x09e1da9d)
-#define T18 /* 0xc040b340 */ (T_MASK ^ 0x3fbf4cbf)
-#define T19 0x265e5a51
-#define T20 /* 0xe9b6c7aa */ (T_MASK ^ 0x16493855)
-#define T21 /* 0xd62f105d */ (T_MASK ^ 0x29d0efa2)
-#define T22 0x02441453
-#define T23 /* 0xd8a1e681 */ (T_MASK ^ 0x275e197e)
-#define T24 /* 0xe7d3fbc8 */ (T_MASK ^ 0x182c0437)
-#define T25 0x21e1cde6
-#define T26 /* 0xc33707d6 */ (T_MASK ^ 0x3cc8f829)
-#define T27 /* 0xf4d50d87 */ (T_MASK ^ 0x0b2af278)
-#define T28 0x455a14ed
-#define T29 /* 0xa9e3e905 */ (T_MASK ^ 0x561c16fa)
-#define T30 /* 0xfcefa3f8 */ (T_MASK ^ 0x03105c07)
-#define T31 0x676f02d9
-#define T32 /* 0x8d2a4c8a */ (T_MASK ^ 0x72d5b375)
-#define T33 /* 0xfffa3942 */ (T_MASK ^ 0x0005c6bd)
-#define T34 /* 0x8771f681 */ (T_MASK ^ 0x788e097e)
-#define T35 0x6d9d6122
-#define T36 /* 0xfde5380c */ (T_MASK ^ 0x021ac7f3)
-#define T37 /* 0xa4beea44 */ (T_MASK ^ 0x5b4115bb)
-#define T38 0x4bdecfa9
-#define T39 /* 0xf6bb4b60 */ (T_MASK ^ 0x0944b49f)
-#define T40 /* 0xbebfbc70 */ (T_MASK ^ 0x4140438f)
-#define T41 0x289b7ec6
-#define T42 /* 0xeaa127fa */ (T_MASK ^ 0x155ed805)
-#define T43 /* 0xd4ef3085 */ (T_MASK ^ 0x2b10cf7a)
-#define T44 0x04881d05
-#define T45 /* 0xd9d4d039 */ (T_MASK ^ 0x262b2fc6)
-#define T46 /* 0xe6db99e5 */ (T_MASK ^ 0x1924661a)
-#define T47 0x1fa27cf8
-#define T48 /* 0xc4ac5665 */ (T_MASK ^ 0x3b53a99a)
-#define T49 /* 0xf4292244 */ (T_MASK ^ 0x0bd6ddbb)
-#define T50 0x432aff97
-#define T51 /* 0xab9423a7 */ (T_MASK ^ 0x546bdc58)
-#define T52 /* 0xfc93a039 */ (T_MASK ^ 0x036c5fc6)
-#define T53 0x655b59c3
-#define T54 /* 0x8f0ccc92 */ (T_MASK ^ 0x70f3336d)
-#define T55 /* 0xffeff47d */ (T_MASK ^ 0x00100b82)
-#define T56 /* 0x85845dd1 */ (T_MASK ^ 0x7a7ba22e)
-#define T57 0x6fa87e4f
-#define T58 /* 0xfe2ce6e0 */ (T_MASK ^ 0x01d3191f)
-#define T59 /* 0xa3014314 */ (T_MASK ^ 0x5cfebceb)
-#define T60 0x4e0811a1
-#define T61 /* 0xf7537e82 */ (T_MASK ^ 0x08ac817d)
-#define T62 /* 0xbd3af235 */ (T_MASK ^ 0x42c50dca)
-#define T63 0x2ad7d2bb
-#define T64 /* 0xeb86d391 */ (T_MASK ^ 0x14792c6e)
-
-
-static void
-md5_process(md5_state_t *pms, const md5_byte_t *data /*[64]*/)
-{
- md5_word_t
- a = pms->abcd[0], b = pms->abcd[1],
- c = pms->abcd[2], d = pms->abcd[3];
- md5_word_t t;
-#if BYTE_ORDER > 0
- /* Define storage only for big-endian CPUs. */
- md5_word_t X[16];
-#else
- /* Define storage for little-endian or both types of CPUs. */
- md5_word_t xbuf[16];
- const md5_word_t *X;
-#endif
-
- {
-#if BYTE_ORDER == 0
- /*
- * Determine dynamically whether this is a big-endian or
- * little-endian machine, since we can use a more efficient
- * algorithm on the latter.
- */
- static const int w = 1;
-
- if (*((const md5_byte_t *)&w)) /* dynamic little-endian */
-#endif
-#if BYTE_ORDER <= 0 /* little-endian */
- {
- /*
- * On little-endian machines, we can process properly aligned
- * data without copying it.
- */
- if (!((data - (const md5_byte_t *)0) & 3)) {
- /* data are properly aligned */
- X = (const md5_word_t *)data;
- } else {
- /* not aligned */
- memcpy(xbuf, data, 64);
- X = xbuf;
- }
- }
-#endif
-#if BYTE_ORDER == 0
- else /* dynamic big-endian */
-#endif
-#if BYTE_ORDER >= 0 /* big-endian */
- {
- /*
- * On big-endian machines, we must arrange the bytes in the
- * right order.
- */
- const md5_byte_t *xp = data;
- int i;
-
-# if BYTE_ORDER == 0
- X = xbuf; /* (dynamic only) */
-# else
-# define xbuf X /* (static only) */
-# endif
- for (i = 0; i < 16; ++i, xp += 4)
- xbuf[i] = xp[0] + (xp[1] << 8) + (xp[2] << 16) + (xp[3] << 24);
- }
-#endif
- }
-
-#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32 - (n))))
-
- /* Round 1. */
- /* Let [abcd k s i] denote the operation
- a = b + ((a + F(b,c,d) + X[k] + T[i]) <<< s). */
-#define F(x, y, z) (((x) & (y)) | (~(x) & (z)))
-#define SET(a, b, c, d, k, s, Ti)\
- t = a + F(b,c,d) + X[k] + Ti;\
- a = ROTATE_LEFT(t, s) + b
- /* Do the following 16 operations. */
- SET(a, b, c, d, 0, 7, T1);
- SET(d, a, b, c, 1, 12, T2);
- SET(c, d, a, b, 2, 17, T3);
- SET(b, c, d, a, 3, 22, T4);
- SET(a, b, c, d, 4, 7, T5);
- SET(d, a, b, c, 5, 12, T6);
- SET(c, d, a, b, 6, 17, T7);
- SET(b, c, d, a, 7, 22, T8);
- SET(a, b, c, d, 8, 7, T9);
- SET(d, a, b, c, 9, 12, T10);
- SET(c, d, a, b, 10, 17, T11);
- SET(b, c, d, a, 11, 22, T12);
- SET(a, b, c, d, 12, 7, T13);
- SET(d, a, b, c, 13, 12, T14);
- SET(c, d, a, b, 14, 17, T15);
- SET(b, c, d, a, 15, 22, T16);
-#undef SET
-
- /* Round 2. */
- /* Let [abcd k s i] denote the operation
- a = b + ((a + G(b,c,d) + X[k] + T[i]) <<< s). */
-#define G(x, y, z) (((x) & (z)) | ((y) & ~(z)))
-#define SET(a, b, c, d, k, s, Ti)\
- t = a + G(b,c,d) + X[k] + Ti;\
- a = ROTATE_LEFT(t, s) + b
- /* Do the following 16 operations. */
- SET(a, b, c, d, 1, 5, T17);
- SET(d, a, b, c, 6, 9, T18);
- SET(c, d, a, b, 11, 14, T19);
- SET(b, c, d, a, 0, 20, T20);
- SET(a, b, c, d, 5, 5, T21);
- SET(d, a, b, c, 10, 9, T22);
- SET(c, d, a, b, 15, 14, T23);
- SET(b, c, d, a, 4, 20, T24);
- SET(a, b, c, d, 9, 5, T25);
- SET(d, a, b, c, 14, 9, T26);
- SET(c, d, a, b, 3, 14, T27);
- SET(b, c, d, a, 8, 20, T28);
- SET(a, b, c, d, 13, 5, T29);
- SET(d, a, b, c, 2, 9, T30);
- SET(c, d, a, b, 7, 14, T31);
- SET(b, c, d, a, 12, 20, T32);
-#undef SET
-
- /* Round 3. */
- /* Let [abcd k s t] denote the operation
- a = b + ((a + H(b,c,d) + X[k] + T[i]) <<< s). */
-#define H(x, y, z) ((x) ^ (y) ^ (z))
-#define SET(a, b, c, d, k, s, Ti)\
- t = a + H(b,c,d) + X[k] + Ti;\
- a = ROTATE_LEFT(t, s) + b
- /* Do the following 16 operations. */
- SET(a, b, c, d, 5, 4, T33);
- SET(d, a, b, c, 8, 11, T34);
- SET(c, d, a, b, 11, 16, T35);
- SET(b, c, d, a, 14, 23, T36);
- SET(a, b, c, d, 1, 4, T37);
- SET(d, a, b, c, 4, 11, T38);
- SET(c, d, a, b, 7, 16, T39);
- SET(b, c, d, a, 10, 23, T40);
- SET(a, b, c, d, 13, 4, T41);
- SET(d, a, b, c, 0, 11, T42);
- SET(c, d, a, b, 3, 16, T43);
- SET(b, c, d, a, 6, 23, T44);
- SET(a, b, c, d, 9, 4, T45);
- SET(d, a, b, c, 12, 11, T46);
- SET(c, d, a, b, 15, 16, T47);
- SET(b, c, d, a, 2, 23, T48);
-#undef SET
-
- /* Round 4. */
- /* Let [abcd k s t] denote the operation
- a = b + ((a + I(b,c,d) + X[k] + T[i]) <<< s). */
-#define I(x, y, z) ((y) ^ ((x) | ~(z)))
-#define SET(a, b, c, d, k, s, Ti)\
- t = a + I(b,c,d) + X[k] + Ti;\
- a = ROTATE_LEFT(t, s) + b
- /* Do the following 16 operations. */
- SET(a, b, c, d, 0, 6, T49);
- SET(d, a, b, c, 7, 10, T50);
- SET(c, d, a, b, 14, 15, T51);
- SET(b, c, d, a, 5, 21, T52);
- SET(a, b, c, d, 12, 6, T53);
- SET(d, a, b, c, 3, 10, T54);
- SET(c, d, a, b, 10, 15, T55);
- SET(b, c, d, a, 1, 21, T56);
- SET(a, b, c, d, 8, 6, T57);
- SET(d, a, b, c, 15, 10, T58);
- SET(c, d, a, b, 6, 15, T59);
- SET(b, c, d, a, 13, 21, T60);
- SET(a, b, c, d, 4, 6, T61);
- SET(d, a, b, c, 11, 10, T62);
- SET(c, d, a, b, 2, 15, T63);
- SET(b, c, d, a, 9, 21, T64);
-#undef SET
-
- /* Then perform the following additions. (That is increment each
- of the four registers by the value it had before this block
- was started.) */
- pms->abcd[0] += a;
- pms->abcd[1] += b;
- pms->abcd[2] += c;
- pms->abcd[3] += d;
-}
-
-void
-md5_init(md5_state_t *pms)
-{
- pms->count[0] = pms->count[1] = 0;
- pms->abcd[0] = 0x67452301;
- pms->abcd[1] = /*0xefcdab89*/ T_MASK ^ 0x10325476;
- pms->abcd[2] = /*0x98badcfe*/ T_MASK ^ 0x67452301;
- pms->abcd[3] = 0x10325476;
-}
-
-void
-md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes)
-{
- const md5_byte_t *p = data;
- int left = nbytes;
- int offset = (pms->count[0] >> 3) & 63;
- md5_word_t nbits = (md5_word_t)(nbytes << 3);
-
- if (nbytes <= 0)
- return;
-
- /* Update the message length. */
- pms->count[1] += nbytes >> 29;
- pms->count[0] += nbits;
- if (pms->count[0] < nbits)
- pms->count[1]++;
-
- /* Process an initial partial block. */
- if (offset) {
- int copy = (offset + nbytes > 64 ? 64 - offset : nbytes);
-
- memcpy(pms->buf + offset, p, copy);
- if (offset + copy < 64)
- return;
- p += copy;
- left -= copy;
- md5_process(pms, pms->buf);
- }
-
- /* Process full blocks. */
- for (; left >= 64; p += 64, left -= 64)
- md5_process(pms, p);
-
- /* Process a final partial block. */
- if (left)
- memcpy(pms->buf, p, left);
-}
-
-void
-md5_finish(md5_state_t *pms, md5_byte_t digest[16])
-{
- static const md5_byte_t pad[64] = {
- 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
- };
- md5_byte_t data[8];
- int i;
-
- /* Save the length before padding. */
- for (i = 0; i < 8; ++i)
- data[i] = (md5_byte_t)(pms->count[i >> 2] >> ((i & 3) << 3));
- /* Pad to 56 bytes mod 64. */
- md5_append(pms, pad, ((55 - (pms->count[0] >> 3)) & 63) + 1);
- /* Append the length. */
- md5_append(pms, data, 8);
- for (i = 0; i < 16; ++i)
- digest[i] = (md5_byte_t)(pms->abcd[i >> 2] >> ((i & 3) << 3));
-}
diff --git a/jam-files/engine/md5.h b/jam-files/engine/md5.h
deleted file mode 100644
index 698c995d..00000000
--- a/jam-files/engine/md5.h
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved.
-
- This software is provided 'as-is', without any express or implied
- warranty. In no event will the authors be held liable for any damages
- arising from the use of this software.
-
- Permission is granted to anyone to use this software for any purpose,
- including commercial applications, and to alter it and redistribute it
- freely, subject to the following restrictions:
-
- 1. The origin of this software must not be misrepresented; you must not
- claim that you wrote the original software. If you use this software
- in a product, an acknowledgment in the product documentation would be
- appreciated but is not required.
- 2. Altered source versions must be plainly marked as such, and must not be
- misrepresented as being the original software.
- 3. This notice may not be removed or altered from any source distribution.
-
- L. Peter Deutsch
- ghost@aladdin.com
-
- */
-/* $Id: md5.h,v 1.4 2002/04/13 19:20:28 lpd Exp $ */
-/*
- Independent implementation of MD5 (RFC 1321).
-
- This code implements the MD5 Algorithm defined in RFC 1321, whose
- text is available at
- http://www.ietf.org/rfc/rfc1321.txt
- The code is derived from the text of the RFC, including the test suite
- (section A.5) but excluding the rest of Appendix A. It does not include
- any code or documentation that is identified in the RFC as being
- copyrighted.
-
- The original and principal author of md5.h is L. Peter Deutsch
- <ghost@aladdin.com>. Other authors are noted in the change history
- that follows (in reverse chronological order):
-
- 2002-04-13 lpd Removed support for non-ANSI compilers; removed
- references to Ghostscript; clarified derivation from RFC 1321;
- now handles byte order either statically or dynamically.
- 1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
- 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5);
- added conditionalization for C++ compilation from Martin
- Purschke <purschke@bnl.gov>.
- 1999-05-03 lpd Original version.
- */
-
-#ifndef md5_INCLUDED
-# define md5_INCLUDED
-
-/*
- * This package supports both compile-time and run-time determination of CPU
- * byte order. If ARCH_IS_BIG_ENDIAN is defined as 0, the code will be
- * compiled to run only on little-endian CPUs; if ARCH_IS_BIG_ENDIAN is
- * defined as non-zero, the code will be compiled to run only on big-endian
- * CPUs; if ARCH_IS_BIG_ENDIAN is not defined, the code will be compiled to
- * run on either big- or little-endian CPUs, but will run slightly less
- * efficiently on either one than if ARCH_IS_BIG_ENDIAN is defined.
- */
-
-typedef unsigned char md5_byte_t; /* 8-bit byte */
-typedef unsigned int md5_word_t; /* 32-bit word */
-
-/* Define the state of the MD5 Algorithm. */
-typedef struct md5_state_s {
- md5_word_t count[2]; /* message length in bits, lsw first */
- md5_word_t abcd[4]; /* digest buffer */
- md5_byte_t buf[64]; /* accumulate block */
-} md5_state_t;
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-/* Initialize the algorithm. */
-void md5_init(md5_state_t *pms);
-
-/* Append a string to the message. */
-void md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes);
-
-/* Finish the message and return the digest. */
-void md5_finish(md5_state_t *pms, md5_byte_t digest[16]);
-
-#ifdef __cplusplus
-} /* end extern "C" */
-#endif
-
-#endif /* md5_INCLUDED */
diff --git a/jam-files/engine/mem.c b/jam-files/engine/mem.c
deleted file mode 100644
index 6a11fb38..00000000
--- a/jam-files/engine/mem.c
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-Copyright Rene Rivera 2006.
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or copy at
-http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#include "jam.h"
-
-#ifdef OPT_BOEHM_GC
-
- /* Compile the Boehm GC as one big chunk of code. It's much easier
- this way, than trying to make radical changes to the bjam build
- scripts. */
-
- #define ATOMIC_UNCOLLECTABLE
- #define NO_EXECUTE_PERMISSION
- #define ALL_INTERIOR_POINTERS
-
- #define LARGE_CONFIG
- /*
- #define NO_SIGNALS
- #define SILENT
- */
- #ifndef GC_DEBUG
- #define NO_DEBUGGING
- #endif
-
- #ifdef __GLIBC__
- #define __USE_GNU
- #endif
-
- #include "boehm_gc/reclaim.c"
- #include "boehm_gc/allchblk.c"
- #include "boehm_gc/misc.c"
- #include "boehm_gc/alloc.c"
- #include "boehm_gc/mach_dep.c"
- #include "boehm_gc/os_dep.c"
- #include "boehm_gc/mark_rts.c"
- #include "boehm_gc/headers.c"
- #include "boehm_gc/mark.c"
- #include "boehm_gc/obj_map.c"
- #include "boehm_gc/pcr_interface.c"
- #include "boehm_gc/blacklst.c"
- #include "boehm_gc/new_hblk.c"
- #include "boehm_gc/real_malloc.c"
- #include "boehm_gc/dyn_load.c"
- #include "boehm_gc/dbg_mlc.c"
- #include "boehm_gc/malloc.c"
- #include "boehm_gc/stubborn.c"
- #include "boehm_gc/checksums.c"
- #include "boehm_gc/pthread_support.c"
- #include "boehm_gc/pthread_stop_world.c"
- #include "boehm_gc/darwin_stop_world.c"
- #include "boehm_gc/typd_mlc.c"
- #include "boehm_gc/ptr_chck.c"
- #include "boehm_gc/mallocx.c"
- #include "boehm_gc/gcj_mlc.c"
- #include "boehm_gc/specific.c"
- #include "boehm_gc/gc_dlopen.c"
- #include "boehm_gc/backgraph.c"
- #include "boehm_gc/win32_threads.c"
-
- /* Needs to be last. */
- #include "boehm_gc/finalize.c"
-
-#elif defined(OPT_DUMA)
-
- #ifdef OS_NT
- #define WIN32
- #endif
- #include "duma/duma.c"
- #include "duma/print.c"
-
-#endif
diff --git a/jam-files/engine/mem.h b/jam-files/engine/mem.h
deleted file mode 100644
index 71b2fb4b..00000000
--- a/jam-files/engine/mem.h
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
-Copyright Rene Rivera 2006.
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or copy at
-http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#ifndef BJAM_MEM_H
-#define BJAM_MEM_H
-
-
-#ifdef OPT_BOEHM_GC
-
- /* Use Boehm GC memory allocator. */
- #include <gc.h>
- #define bjam_malloc_x(s) memset(GC_malloc(s),0,s)
- #define bjam_malloc_atomic_x(s) memset(GC_malloc_atomic(s),0,s)
- #define bjam_calloc_x(n,s) memset(GC_malloc((n)*(s)),0,(n)*(s))
- #define bjam_calloc_atomic_x(n,s) memset(GC_malloc_atomic((n)*(s)),0,(n)*(s))
- #define bjam_realloc_x(p,s) GC_realloc(p,s)
- #define bjam_free_x(p) GC_free(p)
- #define bjam_mem_init_x() GC_init(); GC_enable_incremental()
-
- #define bjam_malloc_raw_x(s) malloc(s)
- #define bjam_calloc_raw_x(n,s) calloc(n,s)
- #define bjam_realloc_raw_x(p,s) realloc(p,s)
- #define bjam_free_raw_x(p) free(p)
-
- #ifndef BJAM_NEWSTR_NO_ALLOCATE
- #define BJAM_NEWSTR_NO_ALLOCATE
- #endif
-
-#elif defined(OPT_DUMA)
-
- /* Use Duma memory debugging library. */
- #include <stdlib.h>
- #define _DUMA_CONFIG_H_
- #define DUMA_NO_GLOBAL_MALLOC_FREE
- #define DUMA_EXPLICIT_INIT
- #define DUMA_NO_THREAD_SAFETY
- #define DUMA_NO_CPP_SUPPORT
- /* #define DUMA_NO_LEAKDETECTION */
- /* #define DUMA_USE_FRAMENO */
- /* #define DUMA_PREFER_ATEXIT */
- /* #define DUMA_OLD_DEL_MACRO */
- /* #define DUMA_NO_HANG_MSG */
- #define DUMA_PAGE_SIZE 4096
- #define DUMA_MIN_ALIGNMENT 1
- /* #define DUMA_GNU_INIT_ATTR 0 */
- typedef unsigned int DUMA_ADDR;
- typedef unsigned int DUMA_SIZE;
- #include <duma.h>
- #define bjam_malloc_x(s) malloc(s)
- #define bjam_calloc_x(n,s) calloc(n,s)
- #define bjam_realloc_x(p,s) realloc(p,s)
- #define bjam_free_x(p) free(p)
-
- #ifndef BJAM_NEWSTR_NO_ALLOCATE
- #define BJAM_NEWSTR_NO_ALLOCATE
- #endif
-
-#else
-
- /* Standard C memory allocation. */
- #define bjam_malloc_x(s) malloc(s)
- #define bjam_calloc_x(n,s) calloc(n,s)
- #define bjam_realloc_x(p,s) realloc(p,s)
- #define bjam_free_x(p) free(p)
-
-#endif
-
-#ifndef bjam_malloc_atomic_x
- #define bjam_malloc_atomic_x(s) bjam_malloc_x(s)
-#endif
-#ifndef bjam_calloc_atomic_x
- #define bjam_calloc_atomic_x(n,s) bjam_calloc_x(n,s)
-#endif
-#ifndef bjam_mem_init_x
- #define bjam_mem_init_x()
-#endif
-#ifndef bjam_mem_close_x
- #define bjam_mem_close_x()
-#endif
-#ifndef bjam_malloc_raw_x
- #define bjam_malloc_raw_x(s) bjam_malloc_x(s)
-#endif
-#ifndef bjam_calloc_raw_x
- #define bjam_calloc_raw_x(n,s) bjam_calloc_x(n,s)
-#endif
-#ifndef bjam_realloc_raw_x
- #define bjam_realloc_raw_x(p,s) bjam_realloc_x(p,s)
-#endif
-#ifndef bjam_free_raw_x
- #define bjam_free_raw_x(p) bjam_free_x(p)
-#endif
-
-#ifdef OPT_DEBUG_PROFILE
-
- /* Profile tracing of memory allocations. */
- #define BJAM_MALLOC(s) (profile_memory(s), bjam_malloc_x(s))
- #define BJAM_MALLOC_ATOMIC(s) (profile_memory(s), bjam_malloc_atomic_x(s))
- #define BJAM_CALLOC(n,s) (profile_memory(n*s), bjam_calloc_x(n,s))
- #define BJAM_CALLOC_ATOMIC(n,s) (profile_memory(n*s), bjam_calloc_atomic_x(n,s))
- #define BJAM_REALLOC(p,s) (profile_memory(s), bjam_realloc_x(p,s))
- #define BJAM_FREE(p) bjam_free_x(p)
- #define BJAM_MEM_INIT() bjam_mem_init_x()
- #define BJAM_MEM_CLOSE() bjam_mem_close_x()
-
- #define BJAM_MALLOC_RAW(s) (profile_memory(s), bjam_malloc_raw_x(s))
- #define BJAM_CALLOC_RAW(n,s) (profile_memory(n*s), bjam_calloc_raw_x(n,s))
- #define BJAM_REALLOC_RAW(p,s) (profile_memory(s), bjam_realloc_raw_x(p,s))
- #define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
-
-#else
-
- /* No mem tracing. */
- #define BJAM_MALLOC(s) bjam_malloc_x(s)
- #define BJAM_MALLOC_ATOMIC(s) bjam_malloc_atomic_x(s)
- #define BJAM_CALLOC(n,s) bjam_calloc_x(n,s)
- #define BJAM_CALLOC_ATOMIC(n,s) bjam_calloc_atomic_x(n,s)
- #define BJAM_REALLOC(p,s) bjam_realloc_x(p,s)
- #define BJAM_FREE(p) bjam_free_x(p)
- #define BJAM_MEM_INIT() bjam_mem_init_x()
- #define BJAM_MEM_CLOSE() bjam_mem_close_x()
-
- #define BJAM_MALLOC_RAW(s) bjam_malloc_raw_x(s)
- #define BJAM_CALLOC_RAW(n,s) bjam_calloc_raw_x(n,s)
- #define BJAM_REALLOC_RAW(p,s) bjam_realloc_raw_x(p,s)
- #define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
-
-#endif
-
-
-#endif
diff --git a/jam-files/engine/mkjambase.c b/jam-files/engine/mkjambase.c
deleted file mode 100644
index cdf59982..00000000
--- a/jam-files/engine/mkjambase.c
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * mkjambase.c - turn Jambase into a big C structure
- *
- * Usage: mkjambase jambase.c Jambase ...
- *
- * Results look like this:
- *
- * char *jambase[] = {
- * "...\n",
- * ...
- * 0 };
- *
- * Handles \'s and "'s specially; knows to delete blank and comment lines.
- *
- */
-
-#include <stdio.h>
-#include <string.h>
-
-
-int main( int argc, char * * argv, char * * envp )
-{
- char buf[ 1024 ];
- FILE * fin;
- FILE * fout;
- char * p;
- int doDotC = 0;
-
- if ( argc < 3 )
- {
- fprintf( stderr, "usage: %s jambase.c Jambase ...\n", argv[ 0 ] );
- return -1;
- }
-
- if ( !( fout = fopen( argv[1], "w" ) ) )
- {
- perror( argv[ 1 ] );
- return -1;
- }
-
- /* If the file ends in .c generate a C source file. */
- if ( ( p = strrchr( argv[1], '.' ) ) && !strcmp( p, ".c" ) )
- doDotC++;
-
- /* Now process the files. */
-
- argc -= 2;
- argv += 2;
-
- if ( doDotC )
- {
- fprintf( fout, "/* Generated by mkjambase from Jambase */\n" );
- fprintf( fout, "char *jambase[] = {\n" );
- }
-
- for ( ; argc--; ++argv )
- {
- if ( !( fin = fopen( *argv, "r" ) ) )
- {
- perror( *argv );
- return -1;
- }
-
- if ( doDotC )
- fprintf( fout, "/* %s */\n", *argv );
- else
- fprintf( fout, "### %s ###\n", *argv );
-
- while ( fgets( buf, sizeof( buf ), fin ) )
- {
- if ( doDotC )
- {
- char * p = buf;
-
- /* Strip leading whitespace. */
- while ( ( *p == ' ' ) || ( *p == '\t' ) || ( *p == '\n' ) )
- ++p;
-
- /* Drop comments and empty lines. */
- if ( ( *p == '#' ) || !*p )
- continue;
-
- /* Copy. */
- putc( '"', fout );
- for ( ; *p && ( *p != '\n' ); ++p )
- switch ( *p )
- {
- case '\\': putc( '\\', fout ); putc( '\\', fout ); break;
- case '"' : putc( '\\', fout ); putc( '"' , fout ); break;
- case '\r': break;
- default: putc( *p, fout ); break;
- }
-
- fprintf( fout, "\\n\",\n" );
- }
- else
- {
- fprintf( fout, "%s", buf );
- }
- }
-
- fclose( fin );
- }
-
- if ( doDotC )
- fprintf( fout, "0 };\n" );
-
- fclose( fout );
-
- return 0;
-}
diff --git a/jam-files/engine/modules.c b/jam-files/engine/modules.c
deleted file mode 100644
index 72952594..00000000
--- a/jam-files/engine/modules.c
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-#include "jam.h"
-
-#include "modules.h"
-#include "string.h"
-#include "hash.h"
-#include "newstr.h"
-#include "lists.h"
-#include "parse.h"
-#include "rules.h"
-#include "variable.h"
-#include "strings.h"
-#include <assert.h>
-
-static struct hash * module_hash = 0;
-
-
-static char * new_module_str( module_t * m, char * suffix )
-{
- char * result;
- string s;
- string_copy( &s, m->name );
- string_append( &s, suffix );
- result = newstr( s.value );
- string_free( &s );
- return result;
-}
-
-
-module_t * bindmodule( char * name )
-{
- PROFILE_ENTER( BINDMODULE );
-
- string s;
- module_t m_;
- module_t * m = &m_;
-
- if ( !module_hash )
- module_hash = hashinit( sizeof( module_t ), "modules" );
-
- string_new( &s );
- if ( name )
- {
- string_append( &s, name );
- string_push_back( &s, '.' );
- }
-
- m->name = s.value;
-
- if ( hashenter( module_hash, (HASHDATA * *)&m ) )
- {
- m->name = newstr( m->name );
- m->variables = 0;
- m->rules = 0;
- m->imported_modules = 0;
- m->class_module = 0;
- m->native_rules = 0;
- m->user_module = 0;
- }
- string_free( &s );
-
- PROFILE_EXIT( BINDMODULE );
-
- return m;
-}
-
-/*
- * demand_rules() - Get the module's "rules" hash on demand.
- */
-struct hash * demand_rules( module_t * m )
-{
- if ( !m->rules )
- m->rules = hashinit( sizeof( RULE ), new_module_str( m, "rules" ) );
- return m->rules;
-}
-
-
-/*
- * delete_module() - wipe out the module's rules and variables.
- */
-
-static void delete_rule_( void * xrule, void * data )
-{
- rule_free( (RULE *)xrule );
-}
-
-
-void delete_module( module_t * m )
-{
- /* Clear out all the rules. */
- if ( m->rules )
- {
- hashenumerate( m->rules, delete_rule_, (void *)0 );
- hashdone( m->rules );
- m->rules = 0;
- }
-
- if ( m->variables )
- {
- var_hash_swap( &m->variables );
- var_done();
- var_hash_swap( &m->variables );
- m->variables = 0;
- }
-}
-
-
-module_t * root_module()
-{
- static module_t * root = 0;
- if ( !root )
- root = bindmodule( 0 );
- return root;
-}
-
-void enter_module( module_t * m )
-{
- var_hash_swap( &m->variables );
-}
-
-
-void exit_module( module_t * m )
-{
- var_hash_swap( &m->variables );
-}
-
-
-void import_module( LIST * module_names, module_t * target_module )
-{
- PROFILE_ENTER( IMPORT_MODULE );
-
- struct hash * h;
-
- if ( !target_module->imported_modules )
- target_module->imported_modules = hashinit( sizeof( char * ), "imported" );
- h = target_module->imported_modules;
-
- for ( ; module_names; module_names = module_names->next )
- {
- char * s = module_names->string;
- char * * ss = &s;
- hashenter( h, (HASHDATA * *)&ss );
- }
-
- PROFILE_EXIT( IMPORT_MODULE );
-}
-
-
-static void add_module_name( void * r_, void * result_ )
-{
- char * * r = (char * *)r_;
- LIST * * result = (LIST * *)result_;
-
- *result = list_new( *result, copystr( *r ) );
-}
-
-
-LIST * imported_modules( module_t * module )
-{
- LIST * result = L0;
- if ( module->imported_modules )
- hashenumerate( module->imported_modules, add_module_name, &result );
- return result;
-}
diff --git a/jam-files/engine/modules.h b/jam-files/engine/modules.h
deleted file mode 100644
index 60053a23..00000000
--- a/jam-files/engine/modules.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-#ifndef MODULES_DWA10182001_H
-# define MODULES_DWA10182001_H
-
-#include "lists.h"
-
-struct module_t
-{
- char* name;
- struct hash* rules;
- struct hash* variables;
- struct hash* imported_modules;
- struct module_t* class_module;
- struct hash* native_rules;
- int user_module;
-};
-
-typedef struct module_t module_t ; /* MSVC debugger gets confused unless this is provided */
-
-module_t* bindmodule( char* name );
-module_t* root_module();
-void enter_module( module_t* );
-void exit_module( module_t* );
-void delete_module( module_t* );
-
-void import_module(LIST* module_names, module_t* target_module);
-LIST* imported_modules(module_t* module);
-
-struct hash* demand_rules( module_t* );
-
-
-#endif
-
diff --git a/jam-files/engine/modules/order.c b/jam-files/engine/modules/order.c
deleted file mode 100644
index d77943a7..00000000
--- a/jam-files/engine/modules/order.c
+++ /dev/null
@@ -1,144 +0,0 @@
-/* Copyright Vladimir Prus 2004. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../lists.h"
-#include "../strings.h"
-#include "../newstr.h"
-#include "../variable.h"
-
-
-/* Use quite klugy approach: when we add order dependency from 'a' to 'b',
- just append 'b' to of value of variable 'a'.
-*/
-LIST *add_pair( PARSE *parse, FRAME *frame )
-{
- LIST* arg = lol_get( frame->args, 0 );
-
- var_set(arg->string, list_copy(0, arg->next), VAR_APPEND);
-
- return L0;
-}
-
-/** Given a list and a value, returns position of that value in
- the list, or -1 if not found.
-*/
-int list_index(LIST* list, const char* value)
-{
- int result = 0;
- for(; list; list = list->next, ++result) {
- if (strcmp(list->string, value) == 0)
- return result;
- }
- return -1;
-}
-
-enum colors { white, gray, black };
-
-/* Main routite of topological sort. Calls itself recursively on all
- adjacent vertices which were not yet visited. After that, 'current_vertex'
- is added to '*result_ptr'.
-*/
-void do_ts(int** graph, int current_vertex, int* colors, int** result_ptr)
-{
- int i;
-
- colors[current_vertex] = gray;
- for(i = 0; graph[current_vertex][i] != -1; ++i) {
- int adjacent_vertex = graph[current_vertex][i];
-
- if (colors[adjacent_vertex] == white)
- do_ts(graph, adjacent_vertex, colors, result_ptr);
- /* The vertex is either black, in which case we don't have to do
- anything, a gray, in which case we have a loop. If we have a loop,
- it's not clear what useful diagnostic we can emit, so we emit
- nothing. */
- }
- colors[current_vertex] = black;
- **result_ptr = current_vertex;
- (*result_ptr)++;
-}
-
-void topological_sort(int** graph, int num_vertices, int* result)
-{
- int i;
- int* colors = (int*)BJAM_CALLOC(num_vertices, sizeof(int));
- for (i = 0; i < num_vertices; ++i)
- colors[i] = white;
-
- for(i = 0; i < num_vertices; ++i)
- if (colors[i] == white)
- do_ts(graph, i, colors, &result);
-
- BJAM_FREE(colors);
-}
-
-LIST *order( PARSE *parse, FRAME *frame )
-{
- LIST* arg = lol_get( frame->args, 0 );
- LIST* tmp;
- LIST* result = 0;
- int src;
-
- /* We need to create a graph of order dependencies between
- the passed objects. We assume that there are no duplicates
- passed to 'add_pair'.
- */
- int length = list_length(arg);
- int** graph = (int**)BJAM_CALLOC(length, sizeof(int*));
- int* order = (int*)BJAM_MALLOC((length+1)*sizeof(int));
-
- for(tmp = arg, src = 0; tmp; tmp = tmp->next, ++src) {
- /* For all object this one depend upon, add elements
- to 'graph' */
- LIST* dependencies = var_get(tmp->string);
- int index = 0;
-
- graph[src] = (int*)BJAM_CALLOC(list_length(dependencies)+1, sizeof(int));
- for(; dependencies; dependencies = dependencies->next) {
- int dst = list_index(arg, dependencies->string);
- if (dst != -1)
- graph[src][index++] = dst;
- }
- graph[src][index] = -1;
- }
-
- topological_sort(graph, length, order);
-
- {
- int index = length-1;
- for(; index >= 0; --index) {
- int i;
- tmp = arg;
- for (i = 0; i < order[index]; ++i, tmp = tmp->next);
- result = list_new(result, tmp->string);
- }
- }
-
- /* Clean up */
- {
- int i;
- for(i = 0; i < length; ++i)
- BJAM_FREE(graph[i]);
- BJAM_FREE(graph);
- BJAM_FREE(order);
- }
-
- return result;
-}
-
-void init_order()
-{
- {
- char* args[] = { "first", "second", 0 };
- declare_native_rule("class@order", "add-pair", args, add_pair, 1);
- }
-
- {
- char* args[] = { "objects", "*", 0 };
- declare_native_rule("class@order", "order", args, order, 1);
- }
-
-
-}
diff --git a/jam-files/engine/modules/path.c b/jam-files/engine/modules/path.c
deleted file mode 100644
index f5d09622..00000000
--- a/jam-files/engine/modules/path.c
+++ /dev/null
@@ -1,32 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../timestamp.h"
-#include "../newstr.h"
-
-LIST *path_exists( PARSE *parse, FRAME *frame )
-{
- LIST* l = lol_get( frame->args, 0 );
-
- time_t time;
- timestamp(l->string, &time);
- if (time != 0)
- {
- return list_new(0, newstr("true"));
- }
- else
- {
- return L0;
- }
-}
-
-void init_path()
-{
- {
- char* args[] = { "location", 0 };
- declare_native_rule("path", "exists", args, path_exists, 1);
- }
-
-}
diff --git a/jam-files/engine/modules/property-set.c b/jam-files/engine/modules/property-set.c
deleted file mode 100644
index 2b0fb5d9..00000000
--- a/jam-files/engine/modules/property-set.c
+++ /dev/null
@@ -1,110 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../timestamp.h"
-#include "../newstr.h"
-#include "../strings.h"
-#include "../lists.h"
-#include "../variable.h"
-#include "../compile.h"
-
-LIST* get_grist(char* f)
-{
- char* end = strchr(f, '>');
- string s[1];
- LIST* result;
-
- string_new(s);
-
- string_append_range(s, f, end+1);
- result = list_new(0, newstr(s->value));
-
- string_free(s);
- return result;
-}
-
-/*
-rule create ( raw-properties * )
-{
- raw-properties = [ sequence.unique
- [ sequence.insertion-sort $(raw-properties) ] ] ;
-
- local key = $(raw-properties:J=-:E=) ;
-
- if ! $(.ps.$(key))
- {
- .ps.$(key) = [ new property-set $(raw-properties) ] ;
- }
- return $(.ps.$(key)) ;
-}
-*/
-
-LIST *property_set_create( PARSE *parse, FRAME *frame )
-{
- LIST* properties = lol_get( frame->args, 0 );
- LIST* sorted = 0;
-#if 0
- LIST* order_sensitive = 0;
-#endif
- LIST* unique;
- LIST* tmp;
- LIST* val;
- string var[1];
-
-#if 0
- /* Sort all properties which are not order sensitive */
- for(tmp = properties; tmp; tmp = tmp->next) {
- LIST* g = get_grist(tmp->string);
- LIST* att = call_rule("feature.attributes", frame, g, 0);
- if (list_in(att, "order-sensitive")) {
- order_sensitive = list_new( order_sensitive, tmp->string);
- } else {
- sorted = list_new( sorted, tmp->string);
- }
- list_free(att);
- }
-
- sorted = list_sort(sorted);
- sorted = list_append(sorted, order_sensitive);
- unique = list_unique(sorted);
-#endif
- sorted = list_sort(properties);
- unique = list_unique(sorted);
-
- string_new(var);
- string_append(var, ".ps.");
-
- for(tmp = unique; tmp; tmp = tmp->next) {
- string_append(var, tmp->string);
- string_push_back(var, '-');
- }
- val = var_get(var->value);
- if (val == 0)
- {
- val = call_rule("new", frame,
- list_append(list_new(0, "property-set"), unique), 0);
-
- var_set(newstr(var->value), list_copy(0, val), VAR_SET);
- }
- else
- {
- val = list_copy(0, val);
- }
-
- string_free(var);
- /* The 'unique' variable is freed in 'call_rule'. */
- list_free(sorted);
-
- return val;
-
-}
-
-void init_property_set()
-{
- {
- char* args[] = { "raw-properties", "*", 0 };
- declare_native_rule("property-set", "create", args, property_set_create, 1);
- }
-}
diff --git a/jam-files/engine/modules/readme.txt b/jam-files/engine/modules/readme.txt
deleted file mode 100644
index 2edf6e17..00000000
--- a/jam-files/engine/modules/readme.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-
-This directory constains sources which declare native
-rules for Boost.Build modules. \ No newline at end of file
diff --git a/jam-files/engine/modules/regex.c b/jam-files/engine/modules/regex.c
deleted file mode 100644
index d048ba1d..00000000
--- a/jam-files/engine/modules/regex.c
+++ /dev/null
@@ -1,96 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../timestamp.h"
-#include "../newstr.h"
-#include "../strings.h"
-#include "../regexp.h"
-#include "../compile.h"
-
-/*
-rule transform ( list * : pattern : indices * )
-{
- indices ?= 1 ;
- local result ;
- for local e in $(list)
- {
- local m = [ MATCH $(pattern) : $(e) ] ;
- if $(m)
- {
- result += $(m[$(indices)]) ;
- }
- }
- return $(result) ;
-}
-*/
-LIST *regex_transform( PARSE *parse, FRAME *frame )
-{
- LIST* l = lol_get( frame->args, 0 );
- LIST* pattern = lol_get( frame->args, 1 );
- LIST* indices_list = lol_get(frame->args, 2);
- int* indices = 0;
- int size;
- int* p;
- LIST* result = 0;
-
- string buf[1];
- string_new(buf);
-
- if (indices_list)
- {
- size = list_length(indices_list);
- indices = (int*)BJAM_MALLOC(size*sizeof(int));
- for(p = indices; indices_list; indices_list = indices_list->next)
- {
- *p++ = atoi(indices_list->string);
- }
- }
- else
- {
- size = 1;
- indices = (int*)BJAM_MALLOC(sizeof(int));
- *indices = 1;
- }
-
- {
- /* Result is cached and intentionally never freed */
- regexp *re = regex_compile( pattern->string );
-
- for(; l; l = l->next)
- {
- if( regexec( re, l->string ) )
- {
- int i = 0;
- for(; i < size; ++i)
- {
- int index = indices[i];
- /* Skip empty submatches. Not sure it's right in all cases,
- but surely is right for the case for which this routine
- is optimized -- header scanning.
- */
- if (re->startp[index] != re->endp[index])
- {
- string_append_range( buf, re->startp[index], re->endp[index] );
- result = list_new( result, newstr( buf->value ) );
- string_truncate( buf, 0 );
- }
- }
- }
- }
- string_free( buf );
- }
-
- BJAM_FREE(indices);
-
- return result;
-}
-
-void init_regex()
-{
- {
- char* args[] = { "list", "*", ":", "pattern", ":", "indices", "*", 0 };
- declare_native_rule("regex", "transform", args, regex_transform, 2);
- }
-}
diff --git a/jam-files/engine/modules/sequence.c b/jam-files/engine/modules/sequence.c
deleted file mode 100644
index bda80d94..00000000
--- a/jam-files/engine/modules/sequence.c
+++ /dev/null
@@ -1,42 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-
-# ifndef max
-# define max( a,b ) ((a)>(b)?(a):(b))
-# endif
-
-
-LIST *sequence_select_highest_ranked( PARSE *parse, FRAME *frame )
-{
- /* Returns all of 'elements' for which corresponding element in parallel */
- /* list 'rank' is equal to the maximum value in 'rank'. */
-
- LIST* elements = lol_get( frame->args, 0 );
- LIST* rank = lol_get( frame->args, 1 );
-
- LIST* result = 0;
- LIST* tmp;
- int highest_rank = -1;
-
- for (tmp = rank; tmp; tmp = tmp->next)
- highest_rank = max(highest_rank, atoi(tmp->string));
-
- for (; rank; rank = rank->next, elements = elements->next)
- if (atoi(rank->string) == highest_rank)
- result = list_new(result, elements->string);
-
- return result;
-}
-
-void init_sequence()
-{
- {
- char* args[] = { "elements", "*", ":", "rank", "*", 0 };
- declare_native_rule("sequence", "select-highest-ranked", args,
- sequence_select_highest_ranked, 1);
- }
-
-}
diff --git a/jam-files/engine/modules/set.c b/jam-files/engine/modules/set.c
deleted file mode 100644
index f8219403..00000000
--- a/jam-files/engine/modules/set.c
+++ /dev/null
@@ -1,41 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-
-/*
- local result = ;
- local element ;
- for element in $(B)
- {
- if ! ( $(element) in $(A) )
- {
- result += $(element) ;
- }
- }
- return $(result) ;
-*/
-LIST *set_difference( PARSE *parse, FRAME *frame )
-{
-
- LIST* b = lol_get( frame->args, 0 );
- LIST* a = lol_get( frame->args, 1 );
-
- LIST* result = 0;
- for(; b; b = b->next)
- {
- if (!list_in(a, b->string))
- result = list_new(result, b->string);
- }
- return result;
-}
-
-void init_set()
-{
- {
- char* args[] = { "B", "*", ":", "A", "*", 0 };
- declare_native_rule("set", "difference", args, set_difference, 1);
- }
-
-}
diff --git a/jam-files/engine/native.c b/jam-files/engine/native.c
deleted file mode 100644
index 4c289959..00000000
--- a/jam-files/engine/native.c
+++ /dev/null
@@ -1,36 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "native.h"
-#include "hash.h"
-
-# define P0 (PARSE *)0
-# define C0 (char *)0
-
-
-void declare_native_rule(char* module, char* rule, char** args,
- LIST*(*f)(PARSE*, FRAME*), int version)
-{
- module_t* m = bindmodule(module);
- if (m->native_rules == 0) {
- m->native_rules = hashinit( sizeof( native_rule_t ), "native rules");
- }
-
- {
- native_rule_t n, *np = &n;
- n.name = rule;
- if (args)
- {
- n.arguments = args_new();
- lol_build( n.arguments->data, args );
- }
- else
- {
- n.arguments = 0;
- }
- n.procedure = parse_make( f, P0, P0, P0, C0, C0, 0 );
- n.version = version;
- hashenter(m->native_rules, (HASHDATA**)&np);
- }
-}
diff --git a/jam-files/engine/native.h b/jam-files/engine/native.h
deleted file mode 100644
index 3fc710b9..00000000
--- a/jam-files/engine/native.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/* Copyright David Abrahams 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#ifndef NATIVE_H_VP_2003_12_09
-#define NATIVE_H_VP_2003_12_09
-
-#include "rules.h"
-
-struct native_rule_t
-{
- char* name;
- argument_list* arguments;
- PARSE* procedure;
- /* Version of the interface that the native rule provides.
- It's possible that we want to change the set parameter
- for existing native rule. In that case, version number
- should be incremented so that Boost.Build can check for
- version it relies on.
-
- Versions are numbered from 1.
- */
- int version;
-};
-
-/* MSVC debugger gets confused unless this is provided */
-typedef struct native_rule_t native_rule_t ;
-
-void declare_native_rule(char* module, char* rule, char** args,
- LIST*(*f)(PARSE*, FRAME*), int version);
-
-
-
-#endif
diff --git a/jam-files/engine/newstr.c b/jam-files/engine/newstr.c
deleted file mode 100644
index 6a229eb2..00000000
--- a/jam-files/engine/newstr.c
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "newstr.h"
-# include "hash.h"
-# include "compile.h"
-# include <stddef.h>
-# include <stdlib.h>
-
-/*
- * newstr.c - string manipulation routines
- *
- * To minimize string copying, string creation, copying, and freeing
- * is done through newstr.
- *
- * External functions:
- *
- * newstr() - return a dynamically allocated copy of a string
- * copystr() - return a copy of a string previously returned by newstr()
- * freestr() - free a string returned by newstr() or copystr()
- * str_done() - free string tables
- *
- * Once a string is passed to newstr(), the returned string is readonly.
- *
- * This implementation builds a hash table of all strings, so that multiple
- * calls of newstr() on the same string allocate memory for the string once.
- * Strings are never actually freed.
- */
-
-typedef char * STRING;
-
-static struct hash * strhash = 0;
-static int strtotal = 0;
-static int strcount_in = 0;
-static int strcount_out = 0;
-
-
-/*
- * Immortal string allocator implementation speeds string allocation and cuts
- * down on internal fragmentation.
- */
-
-# define STRING_BLOCK 4096
-typedef struct strblock
-{
- struct strblock * next;
- char data[STRING_BLOCK];
-} strblock;
-
-static strblock * strblock_chain = 0;
-
-/* Storage remaining in the current strblock */
-static char * storage_start = 0;
-static char * storage_finish = 0;
-
-
-/*
- * allocate() - Allocate n bytes of immortal string storage.
- */
-
-static char * allocate( size_t const n )
-{
-#ifdef BJAM_NEWSTR_NO_ALLOCATE
- return (char*)BJAM_MALLOC_ATOMIC(n);
-#else
- /* See if we can grab storage from an existing block. */
- size_t remaining = storage_finish - storage_start;
- if ( remaining >= n )
- {
- char * result = storage_start;
- storage_start += n;
- return result;
- }
- else /* Must allocate a new block. */
- {
- strblock * new_block;
- size_t nalloc = n;
- if ( nalloc < STRING_BLOCK )
- nalloc = STRING_BLOCK;
-
- /* Allocate a new block and link into the chain. */
- new_block = (strblock *)BJAM_MALLOC( offsetof( strblock, data[0] ) + nalloc * sizeof( new_block->data[0] ) );
- if ( new_block == 0 )
- return 0;
- new_block->next = strblock_chain;
- strblock_chain = new_block;
-
- /* Take future allocations out of the larger remaining space. */
- if ( remaining < nalloc - n )
- {
- storage_start = new_block->data + n;
- storage_finish = new_block->data + nalloc;
- }
- return new_block->data;
- }
-#endif
-}
-
-
-/*
- * newstr() - return a dynamically allocated copy of a string.
- */
-
-char * newstr( char * string )
-{
- STRING str;
- STRING * s = &str;
-
- if ( !strhash )
- strhash = hashinit( sizeof( STRING ), "strings" );
-
- *s = string;
-
- if ( hashenter( strhash, (HASHDATA **)&s ) )
- {
- int l = strlen( string );
- char * m = (char *)allocate( l + 1 );
-
- strtotal += l + 1;
- memcpy( m, string, l + 1 );
- *s = m;
- }
-
- strcount_in += 1;
- return *s;
-}
-
-
-/*
- * copystr() - return a copy of a string previously returned by newstr()
- */
-
-char * copystr( char * s )
-{
- strcount_in += 1;
- return s;
-}
-
-
-/*
- * freestr() - free a string returned by newstr() or copystr()
- */
-
-void freestr( char * s )
-{
- strcount_out += 1;
-}
-
-
-/*
- * str_done() - free string tables.
- */
-
-void str_done()
-{
- /* Reclaim string blocks. */
- while ( strblock_chain != 0 )
- {
- strblock * n = strblock_chain->next;
- BJAM_FREE(strblock_chain);
- strblock_chain = n;
- }
-
- hashdone( strhash );
-
- if ( DEBUG_MEM )
- printf( "%dK in strings\n", strtotal / 1024 );
-
- /* printf( "--- %d strings of %d dangling\n", strcount_in-strcount_out, strcount_in ); */
-}
diff --git a/jam-files/engine/newstr.h b/jam-files/engine/newstr.h
deleted file mode 100644
index 84a4d7b6..00000000
--- a/jam-files/engine/newstr.h
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * newstr.h - string manipulation routines
- */
-
-char * copystr ( char * );
-void freestr ( char * );
-char * newstr ( char * );
-void str_done();
diff --git a/jam-files/engine/option.c b/jam-files/engine/option.c
deleted file mode 100644
index d25e5e8a..00000000
--- a/jam-files/engine/option.c
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "option.h"
-
-/*
- * option.c - command line option processing
- *
- * {o >o
- * \<>) "Process command line options as defined in <option.h>.
- * Return the number of argv[] elements used up by options,
- * or -1 if an invalid option flag was given or an argument
- * was supplied for an option that does not require one."
- */
-
-int getoptions( int argc, char * * argv, char * opts, bjam_option * optv )
-{
- int i;
- int optc = N_OPTS;
-
- memset( (char *)optv, '\0', sizeof( *optv ) * N_OPTS );
-
- for ( i = 0; i < argc; ++i )
- {
- char *arg;
-
- if ( ( argv[ i ][ 0 ] != '-' ) ||
- ( ( argv[ i ][ 1 ] != '-' ) && !isalpha( argv[ i ][ 1 ] ) ) )
- continue;
-
- if ( !optc-- )
- {
- printf( "too many options (%d max)\n", N_OPTS );
- return -1;
- }
-
- for ( arg = &argv[ i ][ 1 ]; *arg; ++arg )
- {
- char * f;
-
- for ( f = opts; *f; ++f )
- if ( *f == *arg )
- break;
-
- if ( !*f )
- {
- printf( "Invalid option: -%c\n", *arg );
- return -1;
- }
-
- optv->flag = *f;
-
- if ( f[ 1 ] != ':' )
- {
- optv++->val = "true";
- }
- else if ( arg[ 1 ] )
- {
- optv++->val = &arg[1];
- break;
- }
- else if ( ++i < argc )
- {
- optv++->val = argv[ i ];
- break;
- }
- else
- {
- printf( "option: -%c needs argument\n", *f );
- return -1;
- }
- }
- }
-
- return i;
-}
-
-
-/*
- * Name: getoptval() - find an option given its character.
- */
-
-char * getoptval( bjam_option * optv, char opt, int subopt )
-{
- int i;
- for ( i = 0; i < N_OPTS; ++i, ++optv )
- if ( ( optv->flag == opt ) && !subopt-- )
- return optv->val;
- return 0;
-}
diff --git a/jam-files/engine/option.h b/jam-files/engine/option.h
deleted file mode 100644
index 99ef620d..00000000
--- a/jam-files/engine/option.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * option.h - command line option processing
- *
- * {o >o
- * \ -) "Command line option."
- */
-
-typedef struct bjam_option
-{
- char flag; /* filled in by getoption() */
- char *val; /* set to random address if true */
-} bjam_option;
-
-# define N_OPTS 256
-
-int getoptions( int argc, char **argv, char *opts, bjam_option *optv );
-char * getoptval( bjam_option *optv, char opt, int subopt );
diff --git a/jam-files/engine/output.c b/jam-files/engine/output.c
deleted file mode 100644
index 483c6ca9..00000000
--- a/jam-files/engine/output.c
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- Copyright 2007 Rene Rivera
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#include "jam.h"
-#include "output.h"
-#include "newstr.h"
-#include <stdio.h>
-
-#define bjam_out (stdout)
-#define bjam_err (stderr)
-
-static void out_
-(
- char const * data,
- FILE * io
-)
-{
- while ( *data )
- {
- size_t len = strcspn(data,"\r");
- data += fwrite(data,1,len,io);
- if ( *data == '\r' ) ++data;
- }
-}
-
-
-void out_action
-(
- char const * action,
- char const * target,
- char const * command,
- char const * out_data,
- char const * err_data,
- int exit_reason
-)
-{
- /* Print out the action+target line, if the action is quite the action
- * should be null.
- */
- if ( action )
- {
- fprintf( bjam_out, "%s %s\n", action, target );
- }
-
- /* Print out the command executed if given -d+2. */
- if ( DEBUG_EXEC )
- {
- fputs( command, bjam_out );
- fputc( '\n', bjam_out );
- }
-
- /* Print out the command executed to the command stream. */
- if ( globs.cmdout )
- {
- fputs( command, globs.cmdout );
- }
-
- switch ( exit_reason )
- {
- case EXIT_OK:
- break;
- case EXIT_FAIL:
- break;
- case EXIT_TIMEOUT:
- {
- /* Process expired, make user aware with explicit message. */
- if ( action )
- {
- /* But only output for non-quietly actions. */
- fprintf( bjam_out, "%ld second time limit exceeded\n", globs.timeout );
- }
- break;
- }
- default:
- break;
- }
-
- /* Print out the command output, if requested, or if the program failed. */
- if ( action || exit_reason != EXIT_OK)
- {
- /* But only output for non-quietly actions. */
- if ( ( 0 != out_data ) &&
- ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) ||
- ( globs.pipe_action == 0 ) ) )
- {
- out_( out_data, bjam_out );
- }
- if ( ( 0 != err_data ) &&
- ( globs.pipe_action & 2 /* STDERR_FILENO */ ) )
- {
- out_( err_data, bjam_err );
- }
- }
-
- fflush( bjam_out );
- fflush( bjam_err );
- fflush( globs.cmdout );
-}
-
-
-char * outf_int( int value )
-{
- char buffer[50];
- sprintf( buffer, "%i", value );
- return newstr( buffer );
-}
-
-
-char * outf_double( double value )
-{
- char buffer[50];
- sprintf( buffer, "%f", value );
- return newstr( buffer );
-}
-
-
-char * outf_time( time_t value )
-{
- char buffer[50];
- strftime( buffer, 49, "%Y-%m-%d %H:%M:%SZ", gmtime( &value ) );
- return newstr( buffer );
-}
diff --git a/jam-files/engine/output.h b/jam-files/engine/output.h
deleted file mode 100644
index 9e9876cf..00000000
--- a/jam-files/engine/output.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- Copyright 2007 Rene Rivera
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#ifndef BJAM_OUTPUT_H
-#define BJAM_OUTPUT_H
-
-#include <time.h>
-
-#define EXIT_OK 0
-#define EXIT_FAIL 1
-#define EXIT_TIMEOUT 2
-
-void out_action(
- const char * action,
- const char * target,
- const char * command,
- const char * out_data,
- const char * err_data,
- int exit_reason
- );
-
-char * outf_int( int value );
-char * outf_double( double value );
-char * outf_time( time_t value );
-
-#endif
diff --git a/jam-files/engine/parse.c b/jam-files/engine/parse.c
deleted file mode 100644
index 9114fa05..00000000
--- a/jam-files/engine/parse.c
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "newstr.h"
-#include "modules.h"
-#include "frames.h"
-
-/*
- * parse.c - make and destroy parse trees as driven by the parser
- *
- * 09/07/00 (seiwald) - ref count on PARSE to avoid freeing when used,
- * as per Matt Armstrong.
- * 09/11/00 (seiwald) - structure reworked to reflect that (*func)()
- * returns a LIST *.
- */
-
-static PARSE * yypsave;
-
-void parse_file( char * f, FRAME * frame )
-{
- /* Suspend scan of current file and push this new file in the stream. */
- yyfparse( f );
-
- /* Now parse each block of rules and execute it. Execute it outside of the
- * parser so that recursive calls to yyrun() work (no recursive yyparse's).
- */
-
- for ( ; ; )
- {
- PARSE * p;
-
- /* Filled by yyparse() calling parse_save(). */
- yypsave = 0;
-
- /* If parse error or empty parse, outta here. */
- if ( yyparse() || !( p = yypsave ) )
- break;
-
- /* Run the parse tree. */
- parse_evaluate( p, frame );
- parse_free( p );
- }
-}
-
-
-void parse_save( PARSE * p )
-{
- yypsave = p;
-}
-
-
-PARSE * parse_make(
- LIST * (* func)( PARSE *, FRAME * ),
- PARSE * left,
- PARSE * right,
- PARSE * third,
- char * string,
- char * string1,
- int num )
-{
- PARSE * p = (PARSE *)BJAM_MALLOC( sizeof( PARSE ) );
-
- p->func = func;
- p->left = left;
- p->right = right;
- p->third = third;
- p->string = string;
- p->string1 = string1;
- p->num = num;
- p->refs = 1;
- p->rulename = 0;
-
- if ( left )
- {
- p->file = left->file;
- p->line = left->line;
- }
- else
- {
- yyinput_stream( &p->file, &p->line );
- }
-
- return p;
-}
-
-
-void parse_refer( PARSE * p )
-{
- ++p->refs;
-}
-
-
-void parse_free( PARSE * p )
-{
- if ( --p->refs )
- return;
-
- if ( p->string )
- freestr( p->string );
- if ( p->string1 )
- freestr( p->string1 );
- if ( p->left )
- parse_free( p->left );
- if ( p->right )
- parse_free( p->right );
- if ( p->third )
- parse_free( p->third );
- if ( p->rulename )
- freestr( p->rulename );
-
- BJAM_FREE( (char *)p );
-}
-
-
-LIST * parse_evaluate( PARSE * p, FRAME * frame )
-{
- frame->procedure = p;
- return (*p->func)( p, frame );
-}
diff --git a/jam-files/engine/parse.h b/jam-files/engine/parse.h
deleted file mode 100644
index e324972f..00000000
--- a/jam-files/engine/parse.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#ifndef PARSE_DWA20011020_H
-#define PARSE_DWA20011020_H
-
-#include "frames.h"
-#include "modules.h"
-#include "lists.h"
-
-/*
- * parse.h - make and destroy parse trees as driven by the parser.
- */
-
-/*
- * Parse tree node.
- */
-
-struct _PARSE {
- LIST * (* func)( PARSE *, FRAME * );
- PARSE * left;
- PARSE * right;
- PARSE * third;
- char * string;
- char * string1;
- int num;
- int refs;
-/* module * module; */
- char * rulename;
- char * file;
- int line;
-};
-
-void parse_file( char *, FRAME * );
-void parse_save( PARSE * );
-
-PARSE * parse_make(
- LIST * (* func)( PARSE *, FRAME * ),
- PARSE * left,
- PARSE * right,
- PARSE * third,
- char * string,
- char * string1,
- int num );
-
-void parse_refer ( PARSE * );
-void parse_free ( PARSE * );
-LIST * parse_evaluate( PARSE *, FRAME * );
-
-#endif
diff --git a/jam-files/engine/patchlevel.h b/jam-files/engine/patchlevel.h
deleted file mode 100644
index 699efd84..00000000
--- a/jam-files/engine/patchlevel.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* Keep JAMVERSYM in sync with VERSION. */
-/* It can be accessed as $(JAMVERSION) in the Jamfile. */
-
-#define VERSION_MAJOR 2011
-#define VERSION_MINOR 04
-#define VERSION_PATCH 0
-#define VERSION_MAJOR_SYM "2011"
-#define VERSION_MINOR_SYM "04"
-#define VERSION_PATCH_SYM "00"
-#define VERSION "2011.4"
-#define JAMVERSYM "JAMVERSION=2011.4"
diff --git a/jam-files/engine/pathmac.c b/jam-files/engine/pathmac.c
deleted file mode 100644
index e2c250e3..00000000
--- a/jam-files/engine/pathmac.c
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "pathsys.h"
-
-# ifdef OS_MAC
-
-# define DELIM ':'
-
-/*
- * pathunix.c - manipulate file names on UNIX, NT, OS2
- *
- * External routines:
- *
- * path_parse() - split a file name into dir/base/suffix/member
- * path_build() - build a filename given dir/base/suffix/member
- * path_parent() - make a PATHNAME point to its parent dir
- *
- * File_parse() and path_build() just manipuate a string and a structure;
- * they do not make system calls.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 12/26/93 (seiwald) - handle dir/.suffix properly in path_build()
- * 12/19/94 (mikem) - solaris string table insanity support
- * 12/21/94 (wingerd) Use backslashes for pathnames - the NT way.
- * 02/14/95 (seiwald) - parse and build /xxx properly
- * 02/23/95 (wingerd) Compilers on NT can handle "/" in pathnames, so we
- * should expect hdr searches to come up with strings
- * like "thing/thing.h". So we need to test for "/" as
- * well as "\" when parsing pathnames.
- * 03/16/95 (seiwald) - fixed accursed typo on line 69.
- * 05/03/96 (seiwald) - split from filent.c, fileunix.c
- * 12/20/96 (seiwald) - when looking for the rightmost . in a file name,
- * don't include the archive member name.
- * 01/10/01 (seiwald) - path_parse now strips the trailing : from the
- * directory name, unless the directory name is all
- * :'s, so that $(d:P) works.
- */
-
-/*
- * path_parse() - split a file name into dir/base/suffix/member
- */
-
-void
-path_parse(
- char *file,
- PATHNAME *f )
-{
- char *p, *q;
- char *end;
-
- memset( (char *)f, 0, sizeof( *f ) );
-
- /* Look for <grist> */
-
- if ( file[0] == '<' && ( p = strchr( file, '>' ) ) )
- {
- f->f_grist.ptr = file;
- f->f_grist.len = p - file;
- file = p + 1;
- }
-
- /* Look for dir: */
-
- if ( p = strrchr( file, DELIM ) )
- {
- f->f_dir.ptr = file;
- f->f_dir.len = p - file;
- file = p + 1;
-
- /* All :'s? Include last : as part of directory name */
-
- while ( ( p > f->f_dir.ptr ) && ( *--p == DELIM ) );
-
- if ( p == f->f_dir.ptr )
- ++f->f_dir.len;
- }
-
- end = file + strlen( file );
-
- /* Look for (member). */
-
- if ( ( p = strchr( file, '(' ) ) && ( end[-1] == ')' ) )
- {
- f->f_member.ptr = p + 1;
- f->f_member.len = end - p - 2;
- end = p;
- }
-
- /* Look for .suffix */
- /* This would be memrchr() */
-
- p = 0;
- q = file;
-
- while ( q = memchr( q, '.', end - q ) )
- p = q++;
-
- if ( p )
- {
- f->f_suffix.ptr = p;
- f->f_suffix.len = end - p;
- end = p;
- }
-
- /* Leaves base */
-
- f->f_base.ptr = file;
- f->f_base.len = end - file;
-}
-
-/*
- * path_build() - build a filename given dir/base/suffix/member.
- */
-
-# define DIR_EMPTY 0 /* "" */
-# define DIR_DOT 1 /* : */
-# define DIR_DOTDOT 2 /* :: */
-# define DIR_ABS 3 /* dira:dirb: */
-# define DIR_REL 4 /* :dira:dirb: */
-
-# define G_DIR 0 /* take dir */
-# define G_ROOT 1 /* take root */
-# define G_CAT 2 /* prepend root to dir */
-# define G_DTDR 3 /* :: of rel dir */
-# define G_DDDD 4 /* make it ::: (../..) */
-# define G_MT 5 /* leave it empty */
-
-char grid[5][5] = {
-/* EMPTY DOT DOTDOT ABS REL */
-/* EMPTY */ { G_MT, G_DIR, G_DIR, G_DIR, G_DIR },
-/* DOT */ { G_ROOT, G_DIR, G_DIR, G_DIR, G_DIR },
-/* DOTDOT */ { G_ROOT, G_ROOT, G_DDDD, G_DIR, G_DTDR },
-/* ABS */ { G_ROOT, G_ROOT, G_ROOT, G_DIR, G_CAT },
-/* REL */ { G_ROOT, G_ROOT, G_ROOT, G_DIR, G_CAT }
-};
-
-static int file_flags( char * ptr, int len )
-{
- if ( !len )
- return DIR_EMPTY;
- if ( ( len == 1 ) && ( ptr[0] == DELIM ) )
- return DIR_DOT;
- if ( ( len == 2 ) && ( ptr[0] == DELIM ) && ( ptr[1] == DELIM ) )
- return DIR_DOTDOT;
- if ( ptr[0] == DELIM )
- return DIR_REL;
- return DIR_ABS;
-}
-
-
-void path_build( PATHNAME * f, string * file, int binding )
-{
- int dflag;
- int rflag;
- int act;
-
- file_build1( f, file );
-
- /* Combine root & directory, according to the grid. */
-
- dflag = file_flags( f->f_dir.ptr, f->f_dir.len );
- rflag = file_flags( f->f_root.ptr, f->f_root.len );
-
- switch ( act = grid[ rflag ][ dflag ] )
- {
- case G_DTDR:
- {
- /* :: of rel dir */
- string_push_back( file, DELIM );
- }
- /* fall through */
-
- case G_DIR:
- /* take dir */
- string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
- break;
-
- case G_ROOT:
- /* take root */
- string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
- break;
-
- case G_CAT:
- /* prepend root to dir */
- string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
- if ( file->value[ file->size - 1 ] == DELIM )
- string_pop_back( file );
- string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
- break;
-
- case G_DDDD:
- /* make it ::: (../..) */
- string_append( file, ":::" );
- break;
- }
-
- /* Put : between dir and file (if none already). */
-
- if ( ( act != G_MT ) &&
- ( file->value[ file->size - 1 ] != DELIM ) &&
- ( f->f_base.len || f->f_suffix.len ) )
- {
- string_push_back( file, DELIM );
- }
-
- if ( f->f_base.len )
- string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
-
- if ( f->f_suffix.len )
- string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
-
- if ( f->f_member.len )
- {
- string_push_back( file, '(' );
- string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
- string_push_back( file, ')' );
- }
-
- if ( DEBUG_SEARCH )
- printf( " -> '%s'\n", file->value );
-}
-
-
-/*
- * path_parent() - make a PATHNAME point to its parent dir
- */
-
-void path_parent( PATHNAME * f )
-{
- /* Just set everything else to nothing. */
-
- f->f_base.ptr =
- f->f_suffix.ptr =
- f->f_member.ptr = "";
-
- f->f_base.len =
- f->f_suffix.len =
- f->f_member.len = 0;
-}
-
-# endif /* OS_MAC */
diff --git a/jam-files/engine/pathsys.h b/jam-files/engine/pathsys.h
deleted file mode 100644
index 73775810..00000000
--- a/jam-files/engine/pathsys.h
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * pathsys.h - PATHNAME struct
- */
-
-/*
- * PATHNAME - a name of a file, broken into <grist>dir/base/suffix(member)
- *
- * <grist> is salt to distinguish between targets that otherwise would
- * have the same name: it never appears in the bound name of a target.
- * (member) is an archive member name: the syntax is arbitrary, but must
- * agree in path_parse(), path_build() and the Jambase.
- *
- * On VMS, we keep track of whether the original path was a directory
- * (without a file), so that $(VAR:D) can climb to the parent.
- */
-
-#ifndef PATHSYS_VP_20020211_H
-# define PATHSYS_VP_20020211_H
-
-#include "strings.h"
-
-typedef struct _pathname PATHNAME;
-typedef struct _pathpart PATHPART;
-
-struct _pathpart
-{
- char * ptr;
- int len;
-};
-
-struct _pathname
-{
- PATHPART part[6];
-#ifdef OS_VMS
- int parent;
-#endif
-
-#define f_grist part[0]
-#define f_root part[1]
-#define f_dir part[2]
-#define f_base part[3]
-#define f_suffix part[4]
-#define f_member part[5]
-};
-
-void path_build( PATHNAME * f, string * file, int binding );
-void path_build1( PATHNAME * f, string * file );
-
-void path_parse( char * file, PATHNAME * f );
-void path_parent( PATHNAME * f );
-
-#ifdef NT
-
-/** Returns newstr-allocated string with long equivivalent of 'short_name'.
- If none exists -- i.e. 'short_path' is already long path, it's returned
- unaltered. */
-char * short_path_to_long_path( char * short_path );
-
-#endif
-
-#ifdef USE_PATHUNIX
-/** Returns a static pointer to the system dependent path to the temporary
- directory. NOTE: *without* a trailing path separator.
-*/
-const char * path_tmpdir( void );
-
-/** Returns a new temporary name.
-*/
-const char * path_tmpnam( void );
-
-/** Returns a new temporary path.
-*/
-const char * path_tmpfile( void );
-#endif
-
-/** Give the first argument to 'main', return a full path to
- our executable. Returns null in the unlikely case it
- cannot be determined. Caller is responsible for freeing
- the string.
-
- Implemented in jam.c
-*/
-char * executable_path (char *argv0);
-
-#endif
diff --git a/jam-files/engine/pathunix.c b/jam-files/engine/pathunix.c
deleted file mode 100644
index 2daad14b..00000000
--- a/jam-files/engine/pathunix.c
+++ /dev/null
@@ -1,457 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "pathsys.h"
-# include "strings.h"
-# include "newstr.h"
-# include "filesys.h"
-# include <time.h>
-# include <stdlib.h>
-# ifndef OS_NT
-# include <unistd.h>
-# endif
-
-# ifdef USE_PATHUNIX
-
-/*
- * pathunix.c - manipulate file names on UNIX, NT, OS2, AmigaOS
- *
- * External routines:
- *
- * path_parse() - split a file name into dir/base/suffix/member
- * path_build() - build a filename given dir/base/suffix/member
- * path_parent() - make a PATHNAME point to its parent dir
- *
- * File_parse() and path_build() just manipuate a string and a structure;
- * they do not make system calls.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 12/26/93 (seiwald) - handle dir/.suffix properly in path_build()
- * 12/19/94 (mikem) - solaris string table insanity support
- * 12/21/94 (wingerd) Use backslashes for pathnames - the NT way.
- * 02/14/95 (seiwald) - parse and build /xxx properly
- * 02/23/95 (wingerd) Compilers on NT can handle "/" in pathnames, so we
- * should expect hdr searches to come up with strings
- * like "thing/thing.h". So we need to test for "/" as
- * well as "\" when parsing pathnames.
- * 03/16/95 (seiwald) - fixed accursed typo on line 69.
- * 05/03/96 (seiwald) - split from filent.c, fileunix.c
- * 12/20/96 (seiwald) - when looking for the rightmost . in a file name,
- * don't include the archive member name.
- * 01/13/01 (seiwald) - turn on \ handling on UNIX, on by accident
- */
-
-/*
- * path_parse() - split a file name into dir/base/suffix/member
- */
-
-void path_parse( char * file, PATHNAME * f )
-{
- char * p;
- char * q;
- char * end;
-
- memset( (char *)f, 0, sizeof( *f ) );
-
- /* Look for <grist> */
-
- if ( ( file[0] == '<' ) && ( p = strchr( file, '>' ) ) )
- {
- f->f_grist.ptr = file;
- f->f_grist.len = p - file;
- file = p + 1;
- }
-
- /* Look for dir/ */
-
- p = strrchr( file, '/' );
-
-# if PATH_DELIM == '\\'
- /* On NT, look for dir\ as well */
- {
- char *p1 = strrchr( file, '\\' );
- p = p1 > p ? p1 : p;
- }
-# endif
-
- if ( p )
- {
- f->f_dir.ptr = file;
- f->f_dir.len = p - file;
-
- /* Special case for / - dirname is /, not "" */
-
- if ( !f->f_dir.len )
- f->f_dir.len = 1;
-
-# if PATH_DELIM == '\\'
- /* Special case for D:/ - dirname is D:/, not "D:" */
-
- if ( f->f_dir.len == 2 && file[1] == ':' )
- f->f_dir.len = 3;
-# endif
-
- file = p + 1;
- }
-
- end = file + strlen( file );
-
- /* Look for (member) */
-
- if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
- {
- f->f_member.ptr = p + 1;
- f->f_member.len = end - p - 2;
- end = p;
- }
-
- /* Look for .suffix */
- /* This would be memrchr() */
-
- p = 0;
- q = file;
-
- while ( ( q = (char *)memchr( q, '.', end - q ) ) )
- p = q++;
-
- if ( p )
- {
- f->f_suffix.ptr = p;
- f->f_suffix.len = end - p;
- end = p;
- }
-
- /* Leaves base */
-
- f->f_base.ptr = file;
- f->f_base.len = end - file;
-}
-
-/*
- * path_delims - the string of legal path delimiters
- */
-static char path_delims[] = {
- PATH_DELIM,
-# if PATH_DELIM == '\\'
- '/',
-# endif
- 0
-};
-
-/*
- * is_path_delim() - true iff c is a path delimiter
- */
-static int is_path_delim( char c )
-{
- char* p = strchr( path_delims, c );
- return p && *p;
-}
-
-/*
- * as_path_delim() - convert c to a path delimiter if it isn't one
- * already
- */
-static char as_path_delim( char c )
-{
- return is_path_delim( c ) ? c : PATH_DELIM;
-}
-
-/*
- * path_build() - build a filename given dir/base/suffix/member
- *
- * To avoid changing slash direction on NT when reconstituting paths,
- * instead of unconditionally appending PATH_DELIM we check the
- * past-the-end character of the previous path element. If it is in
- * path_delims, we append that, and only append PATH_DELIM as a last
- * resort. This heuristic is based on the fact that PATHNAME objects
- * are usually the result of calling path_parse, which leaves the
- * original slashes in the past-the-end position. Correctness depends
- * on the assumption that all strings are zero terminated, so a
- * past-the-end character will always be available.
- *
- * As an attendant patch, we had to ensure that backslashes are used
- * explicitly in timestamp.c
- */
-
-void
-path_build(
- PATHNAME *f,
- string *file,
- int binding )
-{
- file_build1( f, file );
-
- /* Don't prepend root if it's . or directory is rooted */
-# if PATH_DELIM == '/'
-
- if ( f->f_root.len
- && !( f->f_root.len == 1 && f->f_root.ptr[0] == '.' )
- && !( f->f_dir.len && f->f_dir.ptr[0] == '/' ) )
-
-# else /* unix */
-
- if ( f->f_root.len
- && !( f->f_root.len == 1 && f->f_root.ptr[0] == '.' )
- && !( f->f_dir.len && f->f_dir.ptr[0] == '/' )
- && !( f->f_dir.len && f->f_dir.ptr[0] == '\\' )
- && !( f->f_dir.len && f->f_dir.ptr[1] == ':' ) )
-
-# endif /* unix */
-
- {
- string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
- /* If 'root' already ends with path delimeter,
- don't add yet another one. */
- if ( ! is_path_delim( f->f_root.ptr[f->f_root.len-1] ) )
- string_push_back( file, as_path_delim( f->f_root.ptr[f->f_root.len] ) );
- }
-
- if ( f->f_dir.len )
- string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
-
- /* UNIX: Put / between dir and file */
- /* NT: Put \ between dir and file */
-
- if ( f->f_dir.len && ( f->f_base.len || f->f_suffix.len ) )
- {
- /* UNIX: Special case for dir \ : don't add another \ */
- /* NT: Special case for dir / : don't add another / */
-
-# if PATH_DELIM == '\\'
- if ( !( f->f_dir.len == 3 && f->f_dir.ptr[1] == ':' ) )
-# endif
- if ( !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[0] ) ) )
- string_push_back( file, as_path_delim( f->f_dir.ptr[f->f_dir.len] ) );
- }
-
- if ( f->f_base.len )
- {
- string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
- }
-
- if ( f->f_suffix.len )
- {
- string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
- }
-
- if ( f->f_member.len )
- {
- string_push_back( file, '(' );
- string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
- string_push_back( file, ')' );
- }
-}
-
-/*
- * path_parent() - make a PATHNAME point to its parent dir
- */
-
-void
-path_parent( PATHNAME *f )
-{
- /* just set everything else to nothing */
-
- f->f_base.ptr =
- f->f_suffix.ptr =
- f->f_member.ptr = "";
-
- f->f_base.len =
- f->f_suffix.len =
- f->f_member.len = 0;
-}
-
-#ifdef NT
-#include <windows.h>
-#include <tchar.h>
-
-/* The definition of this in winnt.h is not ANSI-C compatible. */
-#undef INVALID_FILE_ATTRIBUTES
-#define INVALID_FILE_ATTRIBUTES ((DWORD)-1)
-
-
-DWORD ShortPathToLongPath(LPCTSTR lpszShortPath,LPTSTR lpszLongPath,DWORD
- cchBuffer)
-{
- LONG i=0;
- TCHAR path[_MAX_PATH]={0};
- TCHAR ret[_MAX_PATH]={0};
- LONG pos=0, prev_pos=0;
- LONG len=_tcslen(lpszShortPath);
-
- /* Is the string valid? */
- if (!lpszShortPath) {
- SetLastError(ERROR_INVALID_PARAMETER);
- return 0;
- }
-
- /* Is the path valid? */
- if (GetFileAttributes(lpszShortPath)==INVALID_FILE_ATTRIBUTES)
- return 0;
-
- /* Convert "/" to "\" */
- for (i=0;i<len;++i) {
- if (lpszShortPath[i]==_T('/'))
- path[i]=_T('\\');
- else
- path[i]=lpszShortPath[i];
- }
-
- /* UNC path? */
- if (path[0]==_T('\\') && path[1]==_T('\\')) {
- pos=2;
- for (i=0;i<2;++i) {
- while (path[pos]!=_T('\\') && path[pos]!=_T('\0'))
- ++pos;
- ++pos;
- }
- _tcsncpy(ret,path,pos-1);
- } /* Drive letter? */
- else if (path[1]==_T(':')) {
- if (path[2]==_T('\\'))
- pos=3;
- if (len==3) {
- if (cchBuffer>3)
- _tcscpy(lpszLongPath,lpszShortPath);
- return len;
- }
- _tcsncpy(ret,path,2);
- }
-
- /* Expand the path for each subpath, and strip trailing backslashes */
- for (prev_pos = pos-1;pos<=len;++pos) {
- if (path[pos]==_T('\\') || (path[pos]==_T('\0') &&
- path[pos-1]!=_T('\\'))) {
- WIN32_FIND_DATA fd;
- HANDLE hf=0;
- TCHAR c=path[pos];
- char* new_element;
- path[pos]=_T('\0');
-
- /* the path[prev_pos+1]... path[pos] range is the part of
- path we're handling right now. We need to find long
- name for that element and add it. */
- new_element = path + prev_pos + 1;
-
- /* First add separator, but only if there's something in result already. */
- if (ret[0] != _T('\0'))
- {
- _tcscat(ret,_T("\\"));
- }
-
- /* If it's ".." element, we need to append it, not
- the name in parent that FindFirstFile will return.
- Same goes for "." */
-
- if (new_element[0] == _T('.') && new_element[1] == _T('\0') ||
- new_element[0] == _T('.') && new_element[1] == _T('.')
- && new_element[2] == _T('\0'))
- {
- _tcscat(ret, new_element);
- }
- else
- {
- hf=FindFirstFile(path, &fd);
- if (hf==INVALID_HANDLE_VALUE)
- return 0;
-
- _tcscat(ret,fd.cFileName);
- FindClose(hf);
- }
-
- path[pos]=c;
-
- prev_pos = pos;
- }
- }
-
- len=_tcslen(ret)+1;
- if (cchBuffer>=len)
- _tcscpy(lpszLongPath,ret);
-
- return len;
-}
-
-char* short_path_to_long_path(char* short_path)
-{
- char buffer2[_MAX_PATH];
- int ret = ShortPathToLongPath(short_path, buffer2, _MAX_PATH);
-
- if (ret)
- return newstr(buffer2);
- else
- return newstr(short_path);
-}
-
-#endif
-
-static string path_tmpdir_buffer[1];
-static const char * path_tmpdir_result = 0;
-
-const char * path_tmpdir()
-{
- if (!path_tmpdir_result)
- {
- # ifdef OS_NT
- DWORD pathLength = 0;
- pathLength = GetTempPath(pathLength,NULL);
- string_new(path_tmpdir_buffer);
- string_reserve(path_tmpdir_buffer,pathLength);
- pathLength = GetTempPathA(pathLength,path_tmpdir_buffer[0].value);
- path_tmpdir_buffer[0].value[pathLength-1] = '\0';
- path_tmpdir_buffer[0].size = pathLength-1;
- # else
- const char * t = getenv("TMPDIR");
- if (!t)
- {
- t = "/tmp";
- }
- string_new(path_tmpdir_buffer);
- string_append(path_tmpdir_buffer,t);
- # endif
- path_tmpdir_result = path_tmpdir_buffer[0].value;
- }
- return path_tmpdir_result;
-}
-
-const char * path_tmpnam(void)
-{
- char name_buffer[64];
- # ifdef OS_NT
- unsigned long c0 = GetCurrentProcessId();
- # else
- unsigned long c0 = getpid();
- # endif
- static unsigned long c1 = 0;
- if (0 == c1) c1 = time(0)&0xffff;
- c1 += 1;
- sprintf(name_buffer,"jam%lx%lx.000",c0,c1);
- return newstr(name_buffer);
-}
-
-const char * path_tmpfile(void)
-{
- const char * result = 0;
-
- string file_path;
- string_copy(&file_path,path_tmpdir());
- string_push_back(&file_path,PATH_DELIM);
- string_append(&file_path,path_tmpnam());
- result = newstr(file_path.value);
- string_free(&file_path);
-
- return result;
-}
-
-
-# endif /* unix, NT, OS/2, AmigaOS */
diff --git a/jam-files/engine/pathvms.c b/jam-files/engine/pathvms.c
deleted file mode 100644
index 975fe5a5..00000000
--- a/jam-files/engine/pathvms.c
+++ /dev/null
@@ -1,406 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "pathsys.h"
-
-# ifdef OS_VMS
-
-# define DEBUG
-
-/*
- * pathvms.c - manipulate file names on VMS
- *
- * External routines:
- *
- * path_parse() - split a file name into dir/base/suffix/member
- * path_build() - build a filename given dir/base/suffix/member
- * path_parent() - make a PATHNAME point to its parent dir
- *
- * File_parse() and path_build() just manipuate a string and a structure;
- * they do not make system calls.
- *
- * WARNING! This file contains voodoo logic, as black magic is
- * necessary for wrangling with VMS file name. Woe be to people
- * who mess with this code.
- *
- * 02/09/95 (seiwald) - bungled R=[xxx] - was using directory length!
- * 05/03/96 (seiwald) - split from filevms.c
- */
-
-/*
- * path_parse() - split a file name into dir/base/suffix/member.
- */
-
-void path_parse( char * file, PATHNAME * f )
-{
- char * p;
- char * q;
- char * end;
-
- memset( (char *)f, 0, sizeof( *f ) );
-
- /* Look for <grist> */
-
- if ( ( file[0] == '<' ) && ( p = strchr( file, '>' ) ) )
- {
- f->f_grist.ptr = file;
- f->f_grist.len = p - file;
- file = p + 1;
- }
-
- /* Look for dev:[dir] or dev: */
-
- if ( ( p = strchr( file, ']' ) ) || ( p = strchr( file, ':' ) ) )
- {
- f->f_dir.ptr = file;
- f->f_dir.len = p + 1 - file;
- file = p + 1;
- }
-
- end = file + strlen( file );
-
- /* Look for (member). */
-
- if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
- {
- f->f_member.ptr = p + 1;
- f->f_member.len = end - p - 2;
- end = p;
- }
-
- /* Look for .suffix */
- /* This would be memrchr(). */
-
- p = 0;
- q = file;
-
- while ( q = (char *)memchr( q, '.', end - q ) )
- p = q++;
-
- if ( p )
- {
- f->f_suffix.ptr = p;
- f->f_suffix.len = end - p;
- end = p;
- }
-
- /* Leaves base. */
- f->f_base.ptr = file;
- f->f_base.len = end - file;
-
- /* Is this a directory without a file spec? */
- f->parent = 0;
-}
-
-/*
- * dir mods result
- * --- --- ------
- * Rerooting:
- *
- * (none) :R=dev: dev:
- * devd: :R=dev: devd:
- * devd:[dir] :R=dev: devd:[dir]
- * [.dir] :R=dev: dev:[dir] questionable
- * [dir] :R=dev: dev:[dir]
- *
- * (none) :R=[rdir] [rdir] questionable
- * devd: :R=[rdir] devd:
- * devd:[dir] :R=[rdir] devd:[dir]
- * [.dir] :R=[rdir] [rdir.dir] questionable
- * [dir] :R=[rdir] [rdir]
- *
- * (none) :R=dev:[root] dev:[root]
- * devd: :R=dev:[root] devd:
- * devd:[dir] :R=dev:[root] devd:[dir]
- * [.dir] :R=dev:[root] dev:[root.dir]
- * [dir] :R=dev:[root] [dir]
- *
- * Climbing to parent:
- *
- */
-
-# define DIR_EMPTY 0 /* empty string */
-# define DIR_DEV 1 /* dev: */
-# define DIR_DEVDIR 2 /* dev:[dir] */
-# define DIR_DOTDIR 3 /* [.dir] */
-# define DIR_DASHDIR 4 /* [-] or [-.dir] */
-# define DIR_ABSDIR 5 /* [dir] */
-# define DIR_ROOT 6 /* [000000] or dev:[000000] */
-
-# define G_DIR 0 /* take just dir */
-# define G_ROOT 1 /* take just root */
-# define G_VAD 2 /* root's dev: + [abs] */
-# define G_DRD 3 /* root's dev:[dir] + [.rel] */
-# define G_VRD 4 /* root's dev: + [.rel] made [abs] */
-# define G_DDD 5 /* root's dev:[dir] + . + [dir] */
-
-static int grid[7][7] = {
-
-/* root/dir EMPTY DEV DEVDIR DOTDIR DASH, ABSDIR ROOT */
-/* EMPTY */ G_DIR, G_DIR, G_DIR, G_DIR, G_DIR, G_DIR, G_DIR,
-/* DEV */ G_ROOT, G_DIR, G_DIR, G_VRD, G_VAD, G_VAD, G_VAD,
-/* DEVDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_VAD, G_VAD, G_VAD,
-/* DOTDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_DIR, G_DIR, G_DIR,
-/* DASHDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_DDD, G_DIR, G_DIR,
-/* ABSDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_DIR, G_DIR, G_DIR,
-/* ROOT */ G_ROOT, G_DIR, G_DIR, G_VRD, G_DIR, G_DIR, G_DIR,
-
-};
-
-struct dirinf
-{
- int flags;
-
- struct
- {
- char * ptr;
- int len;
- } dev, dir;
-};
-
-static char * strnchr( char * buf, int c, int len )
-{
- while ( len-- )
- if ( *buf && ( *buf++ == c ) )
- return buf - 1;
- return 0;
-}
-
-
-static void dir_flags( char * buf, int len, struct dirinf * i )
-{
- char * p;
-
- if ( !buf || !len )
- {
- i->flags = DIR_EMPTY;
- i->dev.ptr =
- i->dir.ptr = 0;
- i->dev.len =
- i->dir.len = 0;
- }
- else if ( p = strnchr( buf, ':', len ) )
- {
- i->dev.ptr = buf;
- i->dev.len = p + 1 - buf;
- i->dir.ptr = buf + i->dev.len;
- i->dir.len = len - i->dev.len;
- i->flags = i->dir.len && *i->dir.ptr == '[' ? DIR_DEVDIR : DIR_DEV;
- }
- else
- {
- i->dev.ptr = buf;
- i->dev.len = 0;
- i->dir.ptr = buf;
- i->dir.len = len;
-
- if ( ( *buf == '[' ) && ( buf[1] == ']' ) )
- i->flags = DIR_EMPTY;
- else if ( ( *buf == '[' ) && ( buf[1] == '.' ) )
- i->flags = DIR_DOTDIR;
- else if ( ( *buf == '[' ) && ( buf[1] == '-' ) )
- i->flags = DIR_DASHDIR;
- else
- i->flags = DIR_ABSDIR;
- }
-
- /* But if its rooted in any way. */
-
- if ( ( i->dir.len == 8 ) && !strncmp( i->dir.ptr, "[000000]", 8 ) )
- i->flags = DIR_ROOT;
-}
-
-
-/*
- * path_build() - build a filename given dir/base/suffix/member
- */
-
-void path_build( PATHNAME * f, string * file, int binding )
-{
- struct dirinf root;
- struct dirinf dir;
- int g;
-
- file_build1( f, file );
-
- /* Get info on root and dir for combining. */
- dir_flags( f->f_root.ptr, f->f_root.len, &root );
- dir_flags( f->f_dir.ptr, f->f_dir.len, &dir );
-
- /* Combine. */
- switch ( g = grid[ root.flags ][ dir.flags ] )
- {
- case G_DIR:
- /* take dir */
- string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
- break;
-
- case G_ROOT:
- /* take root */
- string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
- break;
-
- case G_VAD:
- /* root's dev + abs directory */
- string_append_range( file, root.dev.ptr, root.dev.ptr + root.dev.len );
- string_append_range( file, dir.dir.ptr, dir.dir.ptr + dir.dir.len );
- break;
-
- case G_DRD:
- case G_DDD:
- /* root's dev:[dir] + rel directory */
- string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
-
- /* sanity checks: root ends with ] */
-
- if ( file->value[file->size - 1] == ']' )
- string_pop_back( file );
-
- /* Add . if separating two -'s */
-
- if ( g == G_DDD )
- string_push_back( file, '.' );
-
- /* skip [ of dir */
- string_append_range( file, dir.dir.ptr + 1, dir.dir.ptr + 1 + dir.dir.len - 1 );
- break;
-
- case G_VRD:
- /* root's dev + rel directory made abs */
- string_append_range( file, root.dev.ptr, root.dev.ptr + root.dev.len );
- string_push_back( file, '[' );
- /* skip [. of rel dir */
- string_append_range( file, dir.dir.ptr + 2, dir.dir.ptr + 2 + dir.dir.len - 2 );
- break;
- }
-
-# ifdef DEBUG
- if ( DEBUG_SEARCH && ( root.flags || dir.flags ) )
- printf( "%d x %d = %d (%s)\n", root.flags, dir.flags,
- grid[ root.flags ][ dir.flags ], file->value );
-# endif
-
- /*
- * Now do the special :P modifier when no file was present.
- * (none) (none)
- * [dir1.dir2] [dir1]
- * [dir] [000000]
- * [.dir] (none)
- * [] []
- */
-
- if ( ( file->value[ file->size - 1 ] == ']' ) && f->parent )
- {
- char * p = file->value + file->size;
- while ( p-- > file->value )
- {
- if ( *p == '.' )
- {
- /* If we've truncated everything and left with '[',
- return empty string. */
- if ( p == file->value + 1 )
- string_truncate( file, 0 );
- else
- {
- string_truncate( file, p - file->value );
- string_push_back( file, ']' );
- }
- break;
- }
-
- if ( *p == '-' )
- {
- /* handle .- or - */
- if ( ( p > file->value ) && ( p[ -1 ] == '.' ) )
- --p;
-
- *p++ = ']';
- break;
- }
-
- if ( *p == '[' )
- {
- if ( p[ 1 ] == ']' )
- {
- /* CONSIDER: I don't see any use of this code. We immediately
- break, and 'p' is a local variable. */
- p += 2;
- }
- else
- {
- string_truncate( file, p - file->value );
- string_append( file, "[000000]" );
- }
- break;
- }
- }
- }
-
- /* Now copy the file pieces. */
- if ( f->f_base.len )
- {
- string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
- }
-
- /* If there is no suffix, we append a "." onto all generated names. This
- * keeps VMS from appending its own (wrong) idea of what the suffix should
- * be.
- */
- if ( f->f_suffix.len )
- string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
- else if ( binding && f->f_base.len )
- string_push_back( file, '.' );
-
- if ( f->f_member.len )
- {
- string_push_back( file, '(' );
- string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
- string_push_back( file, ')' );
- }
-
-# ifdef DEBUG
- if ( DEBUG_SEARCH )
- printf( "built %.*s + %.*s / %.*s suf %.*s mem %.*s -> %s\n",
- f->f_root.len, f->f_root.ptr,
- f->f_dir.len, f->f_dir.ptr,
- f->f_base.len, f->f_base.ptr,
- f->f_suffix.len, f->f_suffix.ptr,
- f->f_member.len, f->f_member.ptr,
- file->value );
-# endif
-}
-
-
-/*
- * path_parent() - make a PATHNAME point to its parent dir
- */
-
-void path_parent( PATHNAME * f )
-{
- if ( f->f_base.len )
- {
- f->f_base.ptr =
- f->f_suffix.ptr =
- f->f_member.ptr = "";
-
- f->f_base.len =
- f->f_suffix.len =
- f->f_member.len = 0;
- }
- else
- {
- f->parent = 1;
- }
-}
-
-# endif /* VMS */
diff --git a/jam-files/engine/pwd.c b/jam-files/engine/pwd.c
deleted file mode 100644
index 90c8eb17..00000000
--- a/jam-files/engine/pwd.c
+++ /dev/null
@@ -1,66 +0,0 @@
-/* Copyright Vladimir Prus 2002, Rene Rivera 2005. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "jam.h"
-#include "lists.h"
-#include "newstr.h"
-#include "pathsys.h"
-#include "mem.h"
-
-#include <limits.h>
-#include <errno.h>
-
-/* MinGW on windows declares PATH_MAX in limits.h */
-#if defined(NT) && ! defined(__GNUC__)
-#include <direct.h>
-#define PATH_MAX _MAX_PATH
-#else
-#include <unistd.h>
-#if defined(__COMO__)
- #include <linux/limits.h>
-#endif
-#endif
-
-#ifndef PATH_MAX
- #define PATH_MAX 1024
-#endif
-
-/* The current directory can't change in bjam, so optimize this to cache
-** the result.
-*/
-static char * pwd_result = NULL;
-
-
-LIST*
-pwd(void)
-{
- if (!pwd_result)
- {
- int buffer_size = PATH_MAX;
- char * result_buffer = 0;
- do
- {
- char * buffer = BJAM_MALLOC_RAW(buffer_size);
- result_buffer = getcwd(buffer,buffer_size);
- if (result_buffer)
- {
- #ifdef NT
- pwd_result = short_path_to_long_path(result_buffer);
- #else
- pwd_result = newstr(result_buffer);
- #endif
- }
- buffer_size *= 2;
- BJAM_FREE_RAW(buffer);
- }
- while (!pwd_result && errno == ERANGE);
-
- if (!pwd_result)
- {
- perror("can not get current directory");
- return L0;
- }
- }
- return list_new(L0, pwd_result);
-}
diff --git a/jam-files/engine/pwd.h b/jam-files/engine/pwd.h
deleted file mode 100644
index 37cb531e..00000000
--- a/jam-files/engine/pwd.h
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright Vladimir Prus 2002. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#ifndef PWD_H
-#define PWD_H
-
-LIST* pwd(void);
-
-#endif
diff --git a/jam-files/engine/regexp.c b/jam-files/engine/regexp.c
deleted file mode 100644
index 30197a2f..00000000
--- a/jam-files/engine/regexp.c
+++ /dev/null
@@ -1,1328 +0,0 @@
-/*
- * regcomp and regexec -- regsub and regerror are elsewhere
- *
- * Copyright (c) 1986 by University of Toronto.
- * Written by Henry Spencer. Not derived from licensed software.
- *
- * Permission is granted to anyone to use this software for any
- * purpose on any computer system, and to redistribute it freely,
- * subject to the following restrictions:
- *
- * 1. The author is not responsible for the consequences of use of
- * this software, no matter how awful, even if they arise
- * from defects in it.
- *
- * 2. The origin of this software must not be misrepresented, either
- * by explicit claim or by omission.
- *
- * 3. Altered versions must be plainly marked as such, and must not
- * be misrepresented as being the original software.
- *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
- *** hoptoad!gnu, on 27 Dec 1986, to add \n as an alternative to |
- *** to assist in implementing egrep.
- *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
- *** hoptoad!gnu, on 27 Dec 1986, to add \< and \> for word-matching
- *** as in BSD grep and ex.
- *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
- *** hoptoad!gnu, on 28 Dec 1986, to optimize characters quoted with \.
- *** THIS IS AN ALTERED VERSION. It was altered by James A. Woods,
- *** ames!jaw, on 19 June 1987, to quash a regcomp() redundancy.
- *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
- *** seiwald@vix.com, on 28 August 1993, for use in jam. Regmagic.h
- *** was moved into regexp.h, and the include of regexp.h now uses "'s
- *** to avoid conflicting with the system regexp.h. Const, bless its
- *** soul, was removed so it can compile everywhere. The declaration
- *** of strchr() was in conflict on AIX, so it was removed (as it is
- *** happily defined in string.h).
- *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
- *** seiwald@perforce.com, on 20 January 2000, to use function prototypes.
- *
- * Beware that some of this code is subtly aware of the way operator precedence
- * is structured in regular expressions. Serious changes in regular-expression
- * syntax might require a total rethink.
- */
-
-
-#include "jam.h"
-#include "regexp.h"
-#include <stdio.h>
-#include <ctype.h>
-#ifndef ultrix
- #include <stdlib.h>
-#endif
-#include <string.h>
-
-
-/*
- * The "internal use only" fields in regexp.h are present to pass info from
- * compile to execute that permits the execute phase to run lots faster on
- * simple cases. They are:
- :
- * regstart char that must begin a match; '\0' if none obvious.
- * reganch is the match anchored (at beginning-of-line only)?
- * regmust string (pointer into program) that match must include, or NULL.
- * regmlen length of regmust string.
- *
- * Regstart and reganch permit very fast decisions on suitable starting points
- * for a match, cutting down the work a lot. Regmust permits fast rejection of
- * lines that cannot possibly match. The regmust tests are costly enough that
- * regcomp() supplies a regmust only if the r.e. contains something potentially
- * expensive (at present, the only such thing detected is * or + at the start of
- * the r.e., which can involve a lot of backup). Regmlen is supplied because the
- * test in regexec() needs it and regcomp() is computing it anyway.
- */
-
-/*
- * Structure for regexp "program". This is essentially a linear encoding of a
- * nondeterministic finite-state machine (aka syntax charts or "railroad normal
- * form" in parsing technology). Each node is an opcode plus a "next" pointer,
- * possibly plus an operand. "Next" pointers of all nodes except BRANCH
- * implement concatenation; a "next" pointer with a BRANCH on both ends of it is
- * connecting two alternatives. [Here we have one of the subtle syntax
- * dependencies: an individual BRANCH, as opposed to a collection of them, is
- * never concatenated with anything because of operator precedence.] The operand
- * of some types of node is a literal string; for others, it is a node leading
- * into a sub-FSM. In particular, the operand of a BRANCH node is the first node
- * of the branch. [NB this is *not* a tree structure: the tail of the branch
- * connects to the thing following the set of BRANCHes.] The opcodes are:
- */
-
-/* definition number opnd? meaning */
-#define END 0 /* no End of program. */
-#define BOL 1 /* no Match "" at beginning of line. */
-#define EOL 2 /* no Match "" at end of line. */
-#define ANY 3 /* no Match any one character. */
-#define ANYOF 4 /* str Match any character in this string. */
-#define ANYBUT 5 /* str Match any character not in this string. */
-#define BRANCH 6 /* node Match this alternative, or the next... */
-#define BACK 7 /* no Match "", "next" ptr points backward. */
-#define EXACTLY 8 /* str Match this string. */
-#define NOTHING 9 /* no Match empty string. */
-#define STAR 10 /* node Match this (simple) thing 0 or more times. */
-#define PLUS 11 /* node Match this (simple) thing 1 or more times. */
-#define WORDA 12 /* no Match "" at wordchar, where prev is nonword */
-#define WORDZ 13 /* no Match "" at nonwordchar, where prev is word */
-#define OPEN 20 /* no Mark this point in input as start of #n. */
- /* OPEN+1 is number 1, etc. */
-#define CLOSE 30 /* no Analogous to OPEN. */
-
-
-/*
- * Opcode notes:
- *
- * BRANCH The set of branches constituting a single choice are hooked
- * together with their "next" pointers, since precedence prevents
- * anything being concatenated to any individual branch. The
- * "next" pointer of the last BRANCH in a choice points to the
- * thing following the whole choice. This is also where the
- * final "next" pointer of each individual branch points; each
- * branch starts with the operand node of a BRANCH node.
- *
- * BACK Normal "next" pointers all implicitly point forward; BACK
- * exists to make loop structures possible.
- *
- * STAR,PLUS '?', and complex '*' and '+', are implemented as circular
- * BRANCH structures using BACK. Simple cases (one character
- * per match) are implemented with STAR and PLUS for speed
- * and to minimize recursive plunges.
- *
- * OPEN,CLOSE ...are numbered at compile time.
- */
-
-/*
- * A node is one char of opcode followed by two chars of "next" pointer.
- * "Next" pointers are stored as two 8-bit pieces, high order first. The
- * value is a positive offset from the opcode of the node containing it.
- * An operand, if any, simply follows the node. (Note that much of the
- * code generation knows about this implicit relationship.)
- *
- * Using two bytes for the "next" pointer is vast overkill for most things,
- * but allows patterns to get big without disasters.
- */
-#define OP(p) (*(p))
-#define NEXT(p) (((*((p)+1)&0377)<<8) + (*((p)+2)&0377))
-#define OPERAND(p) ((p) + 3)
-
-/*
- * See regmagic.h for one further detail of program structure.
- */
-
-
-/*
- * Utility definitions.
- */
-#ifndef CHARBITS
-#define UCHARAT(p) ((int)*(unsigned char *)(p))
-#else
-#define UCHARAT(p) ((int)*(p)&CHARBITS)
-#endif
-
-#define FAIL(m) { regerror(m); return(NULL); }
-#define ISMULT(c) ((c) == '*' || (c) == '+' || (c) == '?')
-
-/*
- * Flags to be passed up and down.
- */
-#define HASWIDTH 01 /* Known never to match null string. */
-#define SIMPLE 02 /* Simple enough to be STAR/PLUS operand. */
-#define SPSTART 04 /* Starts with * or +. */
-#define WORST 0 /* Worst case. */
-
-/*
- * Global work variables for regcomp().
- */
-static char *regparse; /* Input-scan pointer. */
-static int regnpar; /* () count. */
-static char regdummy;
-static char *regcode; /* Code-emit pointer; &regdummy = don't. */
-static long regsize; /* Code size. */
-
-/*
- * Forward declarations for regcomp()'s friends.
- */
-#ifndef STATIC
-#define STATIC static
-#endif
-STATIC char *reg( int paren, int *flagp );
-STATIC char *regbranch( int *flagp );
-STATIC char *regpiece( int *flagp );
-STATIC char *regatom( int *flagp );
-STATIC char *regnode( int op );
-STATIC char *regnext( register char *p );
-STATIC void regc( int b );
-STATIC void reginsert( char op, char *opnd );
-STATIC void regtail( char *p, char *val );
-STATIC void regoptail( char *p, char *val );
-#ifdef STRCSPN
-STATIC int strcspn();
-#endif
-
-/*
- - regcomp - compile a regular expression into internal code
- *
- * We can't allocate space until we know how big the compiled form will be,
- * but we can't compile it (and thus know how big it is) until we've got a
- * place to put the code. So we cheat: we compile it twice, once with code
- * generation turned off and size counting turned on, and once "for real".
- * This also means that we don't allocate space until we are sure that the
- * thing really will compile successfully, and we never have to move the
- * code and thus invalidate pointers into it. (Note that it has to be in
- * one piece because free() must be able to free it all.)
- *
- * Beware that the optimization-preparation code in here knows about some
- * of the structure of the compiled regexp.
- */
-regexp *
-regcomp( char *exp )
-{
- register regexp *r;
- register char *scan;
- register char *longest;
- register unsigned len;
- int flags;
-
- if (exp == NULL)
- FAIL("NULL argument");
-
- /* First pass: determine size, legality. */
-#ifdef notdef
- if (exp[0] == '.' && exp[1] == '*') exp += 2; /* aid grep */
-#endif
- regparse = (char *)exp;
- regnpar = 1;
- regsize = 0L;
- regcode = &regdummy;
- regc(MAGIC);
- if (reg(0, &flags) == NULL)
- return(NULL);
-
- /* Small enough for pointer-storage convention? */
- if (regsize >= 32767L) /* Probably could be 65535L. */
- FAIL("regexp too big");
-
- /* Allocate space. */
- r = (regexp *)BJAM_MALLOC(sizeof(regexp) + (unsigned)regsize);
- if (r == NULL)
- FAIL("out of space");
-
- /* Second pass: emit code. */
- regparse = (char *)exp;
- regnpar = 1;
- regcode = r->program;
- regc(MAGIC);
- if (reg(0, &flags) == NULL)
- return(NULL);
-
- /* Dig out information for optimizations. */
- r->regstart = '\0'; /* Worst-case defaults. */
- r->reganch = 0;
- r->regmust = NULL;
- r->regmlen = 0;
- scan = r->program+1; /* First BRANCH. */
- if (OP(regnext(scan)) == END) { /* Only one top-level choice. */
- scan = OPERAND(scan);
-
- /* Starting-point info. */
- if (OP(scan) == EXACTLY)
- r->regstart = *OPERAND(scan);
- else if (OP(scan) == BOL)
- r->reganch++;
-
- /*
- * If there's something expensive in the r.e., find the
- * longest literal string that must appear and make it the
- * regmust. Resolve ties in favor of later strings, since
- * the regstart check works with the beginning of the r.e.
- * and avoiding duplication strengthens checking. Not a
- * strong reason, but sufficient in the absence of others.
- */
- if (flags&SPSTART) {
- longest = NULL;
- len = 0;
- for (; scan != NULL; scan = regnext(scan))
- if (OP(scan) == EXACTLY && strlen(OPERAND(scan)) >= len) {
- longest = OPERAND(scan);
- len = strlen(OPERAND(scan));
- }
- r->regmust = longest;
- r->regmlen = len;
- }
- }
-
- return(r);
-}
-
-/*
- - reg - regular expression, i.e. main body or parenthesized thing
- *
- * Caller must absorb opening parenthesis.
- *
- * Combining parenthesis handling with the base level of regular expression
- * is a trifle forced, but the need to tie the tails of the branches to what
- * follows makes it hard to avoid.
- */
-static char *
-reg(
- int paren, /* Parenthesized? */
- int *flagp )
-{
- register char *ret;
- register char *br;
- register char *ender;
- register int parno = 0;
- int flags;
-
- *flagp = HASWIDTH; /* Tentatively. */
-
- /* Make an OPEN node, if parenthesized. */
- if (paren) {
- if (regnpar >= NSUBEXP)
- FAIL("too many ()");
- parno = regnpar;
- regnpar++;
- ret = regnode(OPEN+parno);
- } else
- ret = NULL;
-
- /* Pick up the branches, linking them together. */
- br = regbranch(&flags);
- if (br == NULL)
- return(NULL);
- if (ret != NULL)
- regtail(ret, br); /* OPEN -> first. */
- else
- ret = br;
- if (!(flags&HASWIDTH))
- *flagp &= ~HASWIDTH;
- *flagp |= flags&SPSTART;
- while (*regparse == '|' || *regparse == '\n') {
- regparse++;
- br = regbranch(&flags);
- if (br == NULL)
- return(NULL);
- regtail(ret, br); /* BRANCH -> BRANCH. */
- if (!(flags&HASWIDTH))
- *flagp &= ~HASWIDTH;
- *flagp |= flags&SPSTART;
- }
-
- /* Make a closing node, and hook it on the end. */
- ender = regnode((paren) ? CLOSE+parno : END);
- regtail(ret, ender);
-
- /* Hook the tails of the branches to the closing node. */
- for (br = ret; br != NULL; br = regnext(br))
- regoptail(br, ender);
-
- /* Check for proper termination. */
- if (paren && *regparse++ != ')') {
- FAIL("unmatched ()");
- } else if (!paren && *regparse != '\0') {
- if (*regparse == ')') {
- FAIL("unmatched ()");
- } else
- FAIL("junk on end"); /* "Can't happen". */
- /* NOTREACHED */
- }
-
- return(ret);
-}
-
-/*
- - regbranch - one alternative of an | operator
- *
- * Implements the concatenation operator.
- */
-static char *
-regbranch( int *flagp )
-{
- register char *ret;
- register char *chain;
- register char *latest;
- int flags;
-
- *flagp = WORST; /* Tentatively. */
-
- ret = regnode(BRANCH);
- chain = NULL;
- while (*regparse != '\0' && *regparse != ')' &&
- *regparse != '\n' && *regparse != '|') {
- latest = regpiece(&flags);
- if (latest == NULL)
- return(NULL);
- *flagp |= flags&HASWIDTH;
- if (chain == NULL) /* First piece. */
- *flagp |= flags&SPSTART;
- else
- regtail(chain, latest);
- chain = latest;
- }
- if (chain == NULL) /* Loop ran zero times. */
- (void) regnode(NOTHING);
-
- return(ret);
-}
-
-/*
- - regpiece - something followed by possible [*+?]
- *
- * Note that the branching code sequences used for ? and the general cases
- * of * and + are somewhat optimized: they use the same NOTHING node as
- * both the endmarker for their branch list and the body of the last branch.
- * It might seem that this node could be dispensed with entirely, but the
- * endmarker role is not redundant.
- */
-static char *
-regpiece( int *flagp )
-{
- register char *ret;
- register char op;
- register char *next;
- int flags;
-
- ret = regatom(&flags);
- if (ret == NULL)
- return(NULL);
-
- op = *regparse;
- if (!ISMULT(op)) {
- *flagp = flags;
- return(ret);
- }
-
- if (!(flags&HASWIDTH) && op != '?')
- FAIL("*+ operand could be empty");
- *flagp = (op != '+') ? (WORST|SPSTART) : (WORST|HASWIDTH);
-
- if (op == '*' && (flags&SIMPLE))
- reginsert(STAR, ret);
- else if (op == '*') {
- /* Emit x* as (x&|), where & means "self". */
- reginsert(BRANCH, ret); /* Either x */
- regoptail(ret, regnode(BACK)); /* and loop */
- regoptail(ret, ret); /* back */
- regtail(ret, regnode(BRANCH)); /* or */
- regtail(ret, regnode(NOTHING)); /* null. */
- } else if (op == '+' && (flags&SIMPLE))
- reginsert(PLUS, ret);
- else if (op == '+') {
- /* Emit x+ as x(&|), where & means "self". */
- next = regnode(BRANCH); /* Either */
- regtail(ret, next);
- regtail(regnode(BACK), ret); /* loop back */
- regtail(next, regnode(BRANCH)); /* or */
- regtail(ret, regnode(NOTHING)); /* null. */
- } else if (op == '?') {
- /* Emit x? as (x|) */
- reginsert(BRANCH, ret); /* Either x */
- regtail(ret, regnode(BRANCH)); /* or */
- next = regnode(NOTHING); /* null. */
- regtail(ret, next);
- regoptail(ret, next);
- }
- regparse++;
- if (ISMULT(*regparse))
- FAIL("nested *?+");
-
- return(ret);
-}
-
-/*
- - regatom - the lowest level
- *
- * Optimization: gobbles an entire sequence of ordinary characters so that
- * it can turn them into a single node, which is smaller to store and
- * faster to run. Backslashed characters are exceptions, each becoming a
- * separate node; the code is simpler that way and it's not worth fixing.
- */
-static char *
-regatom( int *flagp )
-{
- register char *ret;
- int flags;
-
- *flagp = WORST; /* Tentatively. */
-
- switch (*regparse++) {
- /* FIXME: these chars only have meaning at beg/end of pat? */
- case '^':
- ret = regnode(BOL);
- break;
- case '$':
- ret = regnode(EOL);
- break;
- case '.':
- ret = regnode(ANY);
- *flagp |= HASWIDTH|SIMPLE;
- break;
- case '[': {
- register int classr;
- register int classend;
-
- if (*regparse == '^') { /* Complement of range. */
- ret = regnode(ANYBUT);
- regparse++;
- } else
- ret = regnode(ANYOF);
- if (*regparse == ']' || *regparse == '-')
- regc(*regparse++);
- while (*regparse != '\0' && *regparse != ']') {
- if (*regparse == '-') {
- regparse++;
- if (*regparse == ']' || *regparse == '\0')
- regc('-');
- else {
- classr = UCHARAT(regparse-2)+1;
- classend = UCHARAT(regparse);
- if (classr > classend+1)
- FAIL("invalid [] range");
- for (; classr <= classend; classr++)
- regc(classr);
- regparse++;
- }
- } else
- regc(*regparse++);
- }
- regc('\0');
- if (*regparse != ']')
- FAIL("unmatched []");
- regparse++;
- *flagp |= HASWIDTH|SIMPLE;
- }
- break;
- case '(':
- ret = reg(1, &flags);
- if (ret == NULL)
- return(NULL);
- *flagp |= flags&(HASWIDTH|SPSTART);
- break;
- case '\0':
- case '|':
- case '\n':
- case ')':
- FAIL("internal urp"); /* Supposed to be caught earlier. */
- break;
- case '?':
- case '+':
- case '*':
- FAIL("?+* follows nothing");
- break;
- case '\\':
- switch (*regparse++) {
- case '\0':
- FAIL("trailing \\");
- break;
- case '<':
- ret = regnode(WORDA);
- break;
- case '>':
- ret = regnode(WORDZ);
- break;
- /* FIXME: Someday handle \1, \2, ... */
- default:
- /* Handle general quoted chars in exact-match routine */
- goto de_fault;
- }
- break;
- de_fault:
- default:
- /*
- * Encode a string of characters to be matched exactly.
- *
- * This is a bit tricky due to quoted chars and due to
- * '*', '+', and '?' taking the SINGLE char previous
- * as their operand.
- *
- * On entry, the char at regparse[-1] is going to go
- * into the string, no matter what it is. (It could be
- * following a \ if we are entered from the '\' case.)
- *
- * Basic idea is to pick up a good char in ch and
- * examine the next char. If it's *+? then we twiddle.
- * If it's \ then we frozzle. If it's other magic char
- * we push ch and terminate the string. If none of the
- * above, we push ch on the string and go around again.
- *
- * regprev is used to remember where "the current char"
- * starts in the string, if due to a *+? we need to back
- * up and put the current char in a separate, 1-char, string.
- * When regprev is NULL, ch is the only char in the
- * string; this is used in *+? handling, and in setting
- * flags |= SIMPLE at the end.
- */
- {
- char *regprev;
- register char ch;
-
- regparse--; /* Look at cur char */
- ret = regnode(EXACTLY);
- for ( regprev = 0 ; ; ) {
- ch = *regparse++; /* Get current char */
- switch (*regparse) { /* look at next one */
-
- default:
- regc(ch); /* Add cur to string */
- break;
-
- case '.': case '[': case '(':
- case ')': case '|': case '\n':
- case '$': case '^':
- case '\0':
- /* FIXME, $ and ^ should not always be magic */
- magic:
- regc(ch); /* dump cur char */
- goto done; /* and we are done */
-
- case '?': case '+': case '*':
- if (!regprev) /* If just ch in str, */
- goto magic; /* use it */
- /* End mult-char string one early */
- regparse = regprev; /* Back up parse */
- goto done;
-
- case '\\':
- regc(ch); /* Cur char OK */
- switch (regparse[1]){ /* Look after \ */
- case '\0':
- case '<':
- case '>':
- /* FIXME: Someday handle \1, \2, ... */
- goto done; /* Not quoted */
- default:
- /* Backup point is \, scan * point is after it. */
- regprev = regparse;
- regparse++;
- continue; /* NOT break; */
- }
- }
- regprev = regparse; /* Set backup point */
- }
- done:
- regc('\0');
- *flagp |= HASWIDTH;
- if (!regprev) /* One char? */
- *flagp |= SIMPLE;
- }
- break;
- }
-
- return(ret);
-}
-
-/*
- - regnode - emit a node
- */
-static char * /* Location. */
-regnode( int op )
-{
- register char *ret;
- register char *ptr;
-
- ret = regcode;
- if (ret == &regdummy) {
- regsize += 3;
- return(ret);
- }
-
- ptr = ret;
- *ptr++ = op;
- *ptr++ = '\0'; /* Null "next" pointer. */
- *ptr++ = '\0';
- regcode = ptr;
-
- return(ret);
-}
-
-/*
- - regc - emit (if appropriate) a byte of code
- */
-static void
-regc( int b )
-{
- if (regcode != &regdummy)
- *regcode++ = b;
- else
- regsize++;
-}
-
-/*
- - reginsert - insert an operator in front of already-emitted operand
- *
- * Means relocating the operand.
- */
-static void
-reginsert(
- char op,
- char *opnd )
-{
- register char *src;
- register char *dst;
- register char *place;
-
- if (regcode == &regdummy) {
- regsize += 3;
- return;
- }
-
- src = regcode;
- regcode += 3;
- dst = regcode;
- while (src > opnd)
- *--dst = *--src;
-
- place = opnd; /* Op node, where operand used to be. */
- *place++ = op;
- *place++ = '\0';
- *place++ = '\0';
-}
-
-/*
- - regtail - set the next-pointer at the end of a node chain
- */
-static void
-regtail(
- char *p,
- char *val )
-{
- register char *scan;
- register char *temp;
- register int offset;
-
- if (p == &regdummy)
- return;
-
- /* Find last node. */
- scan = p;
- for (;;) {
- temp = regnext(scan);
- if (temp == NULL)
- break;
- scan = temp;
- }
-
- if (OP(scan) == BACK)
- offset = scan - val;
- else
- offset = val - scan;
- *(scan+1) = (offset>>8)&0377;
- *(scan+2) = offset&0377;
-}
-
-/*
- - regoptail - regtail on operand of first argument; nop if operandless
- */
-
-static void
-regoptail(
- char *p,
- char *val )
-{
- /* "Operandless" and "op != BRANCH" are synonymous in practice. */
- if (p == NULL || p == &regdummy || OP(p) != BRANCH)
- return;
- regtail(OPERAND(p), val);
-}
-
-/*
- * regexec and friends
- */
-
-/*
- * Global work variables for regexec().
- */
-static char *reginput; /* String-input pointer. */
-static char *regbol; /* Beginning of input, for ^ check. */
-static char **regstartp; /* Pointer to startp array. */
-static char **regendp; /* Ditto for endp. */
-
-/*
- * Forwards.
- */
-STATIC int regtry( regexp *prog, char *string );
-STATIC int regmatch( char *prog );
-STATIC int regrepeat( char *p );
-
-#ifdef DEBUG
-int regnarrate = 0;
-void regdump();
-STATIC char *regprop();
-#endif
-
-/*
- - regexec - match a regexp against a string
- */
-int
-regexec(
- register regexp *prog,
- register char *string )
-{
- register char *s;
-
- /* Be paranoid... */
- if (prog == NULL || string == NULL) {
- regerror("NULL parameter");
- return(0);
- }
-
- /* Check validity of program. */
- if (UCHARAT(prog->program) != MAGIC) {
- regerror("corrupted program");
- return(0);
- }
-
- /* If there is a "must appear" string, look for it. */
- if ( prog->regmust != NULL )
- {
- s = (char *)string;
- while ( ( s = strchr( s, prog->regmust[ 0 ] ) ) != NULL )
- {
- if ( !strncmp( s, prog->regmust, prog->regmlen ) )
- break; /* Found it. */
- ++s;
- }
- if ( s == NULL ) /* Not present. */
- return 0;
- }
-
- /* Mark beginning of line for ^ . */
- regbol = (char *)string;
-
- /* Simplest case: anchored match need be tried only once. */
- if ( prog->reganch )
- return regtry( prog, string );
-
- /* Messy cases: unanchored match. */
- s = (char *)string;
- if (prog->regstart != '\0')
- /* We know what char it must start with. */
- while ((s = strchr(s, prog->regstart)) != NULL) {
- if (regtry(prog, s))
- return(1);
- s++;
- }
- else
- /* We do not -- general case. */
- do {
- if ( regtry( prog, s ) )
- return( 1 );
- } while ( *s++ != '\0' );
-
- /* Failure. */
- return 0;
-}
-
-
-/*
- * regtry() - try match at specific point.
- */
-
-static int /* 0 failure, 1 success */
-regtry(
- regexp *prog,
- char *string )
-{
- register int i;
- register char * * sp;
- register char * * ep;
-
- reginput = string;
- regstartp = prog->startp;
- regendp = prog->endp;
-
- sp = prog->startp;
- ep = prog->endp;
- for ( i = NSUBEXP; i > 0; --i )
- {
- *sp++ = NULL;
- *ep++ = NULL;
- }
- if ( regmatch( prog->program + 1 ) )
- {
- prog->startp[ 0 ] = string;
- prog->endp[ 0 ] = reginput;
- return 1;
- }
- else
- return 0;
-}
-
-
-/*
- * regmatch() - main matching routine.
- *
- * Conceptually the strategy is simple: check to see whether the current node
- * matches, call self recursively to see whether the rest matches, and then act
- * accordingly. In practice we make some effort to avoid recursion, in
- * particular by going through "ordinary" nodes (that do not need to know
- * whether the rest of the match failed) by a loop instead of by recursion.
- */
-
-static int /* 0 failure, 1 success */
-regmatch( char * prog )
-{
- char * scan; /* Current node. */
- char * next; /* Next node. */
-
- scan = prog;
-#ifdef DEBUG
- if (scan != NULL && regnarrate)
- fprintf(stderr, "%s(\n", regprop(scan));
-#endif
- while (scan != NULL) {
-#ifdef DEBUG
- if (regnarrate)
- fprintf(stderr, "%s...\n", regprop(scan));
-#endif
- next = regnext(scan);
-
- switch (OP(scan)) {
- case BOL:
- if (reginput != regbol)
- return(0);
- break;
- case EOL:
- if (*reginput != '\0')
- return(0);
- break;
- case WORDA:
- /* Must be looking at a letter, digit, or _ */
- if ((!isalnum(*reginput)) && *reginput != '_')
- return(0);
- /* Prev must be BOL or nonword */
- if (reginput > regbol &&
- (isalnum(reginput[-1]) || reginput[-1] == '_'))
- return(0);
- break;
- case WORDZ:
- /* Must be looking at non letter, digit, or _ */
- if (isalnum(*reginput) || *reginput == '_')
- return(0);
- /* We don't care what the previous char was */
- break;
- case ANY:
- if (*reginput == '\0')
- return(0);
- reginput++;
- break;
- case EXACTLY: {
- register int len;
- register char *opnd;
-
- opnd = OPERAND(scan);
- /* Inline the first character, for speed. */
- if (*opnd != *reginput)
- return(0);
- len = strlen(opnd);
- if (len > 1 && strncmp(opnd, reginput, len) != 0)
- return(0);
- reginput += len;
- }
- break;
- case ANYOF:
- if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) == NULL)
- return(0);
- reginput++;
- break;
- case ANYBUT:
- if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) != NULL)
- return(0);
- reginput++;
- break;
- case NOTHING:
- break;
- case BACK:
- break;
- case OPEN+1:
- case OPEN+2:
- case OPEN+3:
- case OPEN+4:
- case OPEN+5:
- case OPEN+6:
- case OPEN+7:
- case OPEN+8:
- case OPEN+9: {
- register int no;
- register char *save;
-
- no = OP(scan) - OPEN;
- save = reginput;
-
- if (regmatch(next)) {
- /*
- * Don't set startp if some later
- * invocation of the same parentheses
- * already has.
- */
- if (regstartp[no] == NULL)
- regstartp[no] = save;
- return(1);
- } else
- return(0);
- }
- break;
- case CLOSE+1:
- case CLOSE+2:
- case CLOSE+3:
- case CLOSE+4:
- case CLOSE+5:
- case CLOSE+6:
- case CLOSE+7:
- case CLOSE+8:
- case CLOSE+9: {
- register int no;
- register char *save;
-
- no = OP(scan) - CLOSE;
- save = reginput;
-
- if (regmatch(next)) {
- /*
- * Don't set endp if some later
- * invocation of the same parentheses
- * already has.
- */
- if (regendp[no] == NULL)
- regendp[no] = save;
- return(1);
- } else
- return(0);
- }
- break;
- case BRANCH: {
- register char *save;
-
- if (OP(next) != BRANCH) /* No choice. */
- next = OPERAND(scan); /* Avoid recursion. */
- else {
- do {
- save = reginput;
- if (regmatch(OPERAND(scan)))
- return(1);
- reginput = save;
- scan = regnext(scan);
- } while (scan != NULL && OP(scan) == BRANCH);
- return(0);
- /* NOTREACHED */
- }
- }
- break;
- case STAR:
- case PLUS: {
- register char nextch;
- register int no;
- register char *save;
- register int min;
-
- /*
- * Lookahead to avoid useless match attempts
- * when we know what character comes next.
- */
- nextch = '\0';
- if (OP(next) == EXACTLY)
- nextch = *OPERAND(next);
- min = (OP(scan) == STAR) ? 0 : 1;
- save = reginput;
- no = regrepeat(OPERAND(scan));
- while (no >= min) {
- /* If it could work, try it. */
- if (nextch == '\0' || *reginput == nextch)
- if (regmatch(next))
- return(1);
- /* Couldn't or didn't -- back up. */
- no--;
- reginput = save + no;
- }
- return(0);
- }
- break;
- case END:
- return(1); /* Success! */
- break;
- default:
- regerror("memory corruption");
- return(0);
- break;
- }
-
- scan = next;
- }
-
- /*
- * We get here only if there's trouble -- normally "case END" is
- * the terminating point.
- */
- regerror("corrupted pointers");
- return(0);
-}
-
-/*
- - regrepeat - repeatedly match something simple, report how many
- */
-static int
-regrepeat( char *p )
-{
- register int count = 0;
- register char *scan;
- register char *opnd;
-
- scan = reginput;
- opnd = OPERAND(p);
- switch (OP(p)) {
- case ANY:
- count = strlen(scan);
- scan += count;
- break;
- case EXACTLY:
- while (*opnd == *scan) {
- count++;
- scan++;
- }
- break;
- case ANYOF:
- while (*scan != '\0' && strchr(opnd, *scan) != NULL) {
- count++;
- scan++;
- }
- break;
- case ANYBUT:
- while (*scan != '\0' && strchr(opnd, *scan) == NULL) {
- count++;
- scan++;
- }
- break;
- default: /* Oh dear. Called inappropriately. */
- regerror("internal foulup");
- count = 0; /* Best compromise. */
- break;
- }
- reginput = scan;
-
- return(count);
-}
-
-/*
- - regnext - dig the "next" pointer out of a node
- */
-static char *
-regnext( register char *p )
-{
- register int offset;
-
- if (p == &regdummy)
- return(NULL);
-
- offset = NEXT(p);
- if (offset == 0)
- return(NULL);
-
- if (OP(p) == BACK)
- return(p-offset);
- else
- return(p+offset);
-}
-
-#ifdef DEBUG
-
-STATIC char *regprop();
-
-/*
- - regdump - dump a regexp onto stdout in vaguely comprehensible form
- */
-void
-regdump( regexp *r )
-{
- register char *s;
- register char op = EXACTLY; /* Arbitrary non-END op. */
- register char *next;
-
-
- s = r->program + 1;
- while (op != END) { /* While that wasn't END last time... */
- op = OP(s);
- printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */
- next = regnext(s);
- if (next == NULL) /* Next ptr. */
- printf("(0)");
- else
- printf("(%d)", (s-r->program)+(next-s));
- s += 3;
- if (op == ANYOF || op == ANYBUT || op == EXACTLY) {
- /* Literal string, where present. */
- while (*s != '\0') {
- putchar(*s);
- s++;
- }
- s++;
- }
- putchar('\n');
- }
-
- /* Header fields of interest. */
- if (r->regstart != '\0')
- printf("start `%c' ", r->regstart);
- if (r->reganch)
- printf("anchored ");
- if (r->regmust != NULL)
- printf("must have \"%s\"", r->regmust);
- printf("\n");
-}
-
-/*
- - regprop - printable representation of opcode
- */
-static char *
-regprop( char *op )
-{
- register char *p;
- static char buf[50];
-
- (void) strcpy(buf, ":");
-
- switch (OP(op)) {
- case BOL:
- p = "BOL";
- break;
- case EOL:
- p = "EOL";
- break;
- case ANY:
- p = "ANY";
- break;
- case ANYOF:
- p = "ANYOF";
- break;
- case ANYBUT:
- p = "ANYBUT";
- break;
- case BRANCH:
- p = "BRANCH";
- break;
- case EXACTLY:
- p = "EXACTLY";
- break;
- case NOTHING:
- p = "NOTHING";
- break;
- case BACK:
- p = "BACK";
- break;
- case END:
- p = "END";
- break;
- case OPEN+1:
- case OPEN+2:
- case OPEN+3:
- case OPEN+4:
- case OPEN+5:
- case OPEN+6:
- case OPEN+7:
- case OPEN+8:
- case OPEN+9:
- sprintf(buf+strlen(buf), "OPEN%d", OP(op)-OPEN);
- p = NULL;
- break;
- case CLOSE+1:
- case CLOSE+2:
- case CLOSE+3:
- case CLOSE+4:
- case CLOSE+5:
- case CLOSE+6:
- case CLOSE+7:
- case CLOSE+8:
- case CLOSE+9:
- sprintf(buf+strlen(buf), "CLOSE%d", OP(op)-CLOSE);
- p = NULL;
- break;
- case STAR:
- p = "STAR";
- break;
- case PLUS:
- p = "PLUS";
- break;
- case WORDA:
- p = "WORDA";
- break;
- case WORDZ:
- p = "WORDZ";
- break;
- default:
- regerror("corrupted opcode");
- break;
- }
- if (p != NULL)
- (void) strcat(buf, p);
- return(buf);
-}
-#endif
-
-/*
- * The following is provided for those people who do not have strcspn() in
- * their C libraries. They should get off their butts and do something
- * about it; at least one public-domain implementation of those (highly
- * useful) string routines has been published on Usenet.
- */
-#ifdef STRCSPN
-/*
- * strcspn - find length of initial segment of s1 consisting entirely
- * of characters not from s2
- */
-
-static int
-strcspn(
- char *s1,
- char *s2 )
-{
- register char *scan1;
- register char *scan2;
- register int count;
-
- count = 0;
- for (scan1 = s1; *scan1 != '\0'; scan1++) {
- for (scan2 = s2; *scan2 != '\0';) /* ++ moved down. */
- if (*scan1 == *scan2++)
- return(count);
- count++;
- }
- return(count);
-}
-#endif
diff --git a/jam-files/engine/regexp.h b/jam-files/engine/regexp.h
deleted file mode 100644
index 9d4604f6..00000000
--- a/jam-files/engine/regexp.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Definitions etc. for regexp(3) routines.
- *
- * Caveat: this is V8 regexp(3) [actually, a reimplementation thereof],
- * not the System V one.
- */
-#ifndef REGEXP_DWA20011023_H
-# define REGEXP_DWA20011023_H
-
-#define NSUBEXP 10
-typedef struct regexp {
- char *startp[NSUBEXP];
- char *endp[NSUBEXP];
- char regstart; /* Internal use only. */
- char reganch; /* Internal use only. */
- char *regmust; /* Internal use only. */
- int regmlen; /* Internal use only. */
- char program[1]; /* Unwarranted chumminess with compiler. */
-} regexp;
-
-regexp *regcomp( char *exp );
-int regexec( regexp *prog, char *string );
-void regerror( char *s );
-
-/*
- * The first byte of the regexp internal "program" is actually this magic
- * number; the start node begins in the second byte.
- */
-#define MAGIC 0234
-
-#endif
-
diff --git a/jam-files/engine/rules.c b/jam-files/engine/rules.c
deleted file mode 100644
index a0be1d34..00000000
--- a/jam-files/engine/rules.c
+++ /dev/null
@@ -1,810 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "variable.h"
-# include "rules.h"
-# include "newstr.h"
-# include "hash.h"
-# include "modules.h"
-# include "search.h"
-# include "lists.h"
-# include "pathsys.h"
-# include "timestamp.h"
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * rules.c - access to RULEs, TARGETs, and ACTIONs
- *
- * External routines:
- *
- * bindrule() - return pointer to RULE, creating it if necessary.
- * bindtarget() - return pointer to TARGET, creating it if necessary.
- * touch_target() - mark a target to simulate being new.
- * targetlist() - turn list of target names into a TARGET chain.
- * targetentry() - add a TARGET to a chain of TARGETS.
- * actionlist() - append to an ACTION chain.
- * addsettings() - add a deferred "set" command to a target.
- * pushsettings() - set all target specific variables.
- * popsettings() - reset target specific variables to their pre-push values.
- * freesettings() - delete a settings list.
- * rules_done() - free RULE and TARGET tables.
- *
- * 04/12/94 (seiwald) - actionlist() now just appends a single action.
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- */
-
-static void set_rule_actions( RULE *, rule_actions * );
-static void set_rule_body ( RULE *, argument_list *, PARSE * procedure );
-
-static struct hash * targethash = 0;
-
-struct _located_target
-{
- char * file_name;
- TARGET * target;
-};
-typedef struct _located_target LOCATED_TARGET ;
-
-static struct hash * located_targets = 0;
-
-
-/*
- * target_include() - adds the 'included' TARGET to the list of targets included
- * by the 'including' TARGET. Such targets are modeled as dependencies of the
- * internal include node belonging to the 'including' TARGET.
- */
-
-void target_include( TARGET * including, TARGET * included )
-{
- TARGET * internal;
- if ( !including->includes )
- {
- including->includes = copytarget( including );
- including->includes->original_target = including;
- }
- internal = including->includes;
- internal->depends = targetentry( internal->depends, included );
-}
-
-
-/*
- * enter_rule() - return pointer to RULE, creating it if necessary in
- * target_module.
- */
-
-static RULE * enter_rule( char * rulename, module_t * target_module )
-{
- RULE rule;
- RULE * r = &rule;
-
- r->name = rulename;
-
- if ( hashenter( demand_rules( target_module ), (HASHDATA * *)&r ) )
- {
- r->name = newstr( rulename ); /* never freed */
- r->procedure = (PARSE *)0;
- r->module = 0;
- r->actions = 0;
- r->arguments = 0;
- r->exported = 0;
- r->module = target_module;
-#ifdef HAVE_PYTHON
- r->python_function = 0;
-#endif
- }
- return r;
-}
-
-
-/*
- * define_rule() - return pointer to RULE, creating it if necessary in
- * target_module. Prepare it to accept a body or action originating in
- * src_module.
- */
-
-static RULE * define_rule
-(
- module_t * src_module,
- char * rulename,
- module_t * target_module
-)
-{
- RULE * r = enter_rule( rulename, target_module );
- if ( r->module != src_module ) /* if the rule was imported from elsewhere, clear it now */
- {
- set_rule_body( r, 0, 0 );
- set_rule_actions( r, 0 );
- r->module = src_module; /* r will be executed in the source module */
- }
- return r;
-}
-
-
-void rule_free( RULE * r )
-{
- freestr( r->name );
- r->name = "";
- parse_free( r->procedure );
- r->procedure = 0;
- if ( r->arguments )
- args_free( r->arguments );
- r->arguments = 0;
- if ( r->actions )
- actions_free( r->actions );
- r->actions = 0;
-}
-
-
-/*
- * bindtarget() - return pointer to TARGET, creating it if necessary.
- */
-
-TARGET * bindtarget( char const * target_name )
-{
- TARGET target;
- TARGET * t = &target;
-
- if ( !targethash )
- targethash = hashinit( sizeof( TARGET ), "targets" );
-
- /* Perforce added const everywhere. No time to merge that change. */
-#ifdef NT
- target_name = short_path_to_long_path( (char *)target_name );
-#endif
- t->name = (char *)target_name;
-
- if ( hashenter( targethash, (HASHDATA * *)&t ) )
- {
- memset( (char *)t, '\0', sizeof( *t ) );
- t->name = newstr( (char *)target_name ); /* never freed */
- t->boundname = t->name; /* default for T_FLAG_NOTFILE */
- }
-
- return t;
-}
-
-
-static void bind_explicitly_located_target( void * xtarget, void * data )
-{
- TARGET * t = (TARGET *)xtarget;
- if ( !( t->flags & T_FLAG_NOTFILE ) )
- {
- /* Check if there's a setting for LOCATE */
- SETTINGS * s = t->settings;
- for ( ; s ; s = s->next )
- {
- if ( strcmp( s->symbol, "LOCATE" ) == 0 )
- {
- pushsettings( t->settings );
- /* We are binding a target with explicit LOCATE. So third
- * argument is of no use: nothing will be returned through it.
- */
- t->boundname = search( t->name, &t->time, 0, 0 );
- popsettings( t->settings );
- break;
- }
- }
- }
-}
-
-
-void bind_explicitly_located_targets()
-{
- if ( targethash )
- hashenumerate( targethash, bind_explicitly_located_target, (void *)0 );
-}
-
-
-/* TODO: It is probably not a good idea to use functions in other modules like
- this. */
-void call_bind_rule( char * target, char * boundname );
-
-
-TARGET * search_for_target ( char * name, LIST * search_path )
-{
- PATHNAME f[1];
- string buf[1];
- LOCATED_TARGET lt;
- LOCATED_TARGET * lta = &lt;
- time_t time;
- int found = 0;
- TARGET * result;
-
- string_new( buf );
-
- path_parse( name, f );
-
- f->f_grist.ptr = 0;
- f->f_grist.len = 0;
-
- while ( search_path )
- {
- f->f_root.ptr = search_path->string;
- f->f_root.len = strlen( search_path->string );
-
- string_truncate( buf, 0 );
- path_build( f, buf, 1 );
-
- lt.file_name = buf->value ;
-
- if ( !located_targets )
- located_targets = hashinit( sizeof(LOCATED_TARGET),
- "located targets" );
-
- if ( hashcheck( located_targets, (HASHDATA * *)&lta ) )
- {
- return lta->target;
- }
-
- timestamp( buf->value, &time );
- if ( time )
- {
- found = 1;
- break;
- }
-
- search_path = list_next( search_path );
- }
-
- if ( !found )
- {
- f->f_root.ptr = 0;
- f->f_root.len = 0;
-
- string_truncate( buf, 0 );
- path_build( f, buf, 1 );
-
- timestamp( buf->value, &time );
- }
-
- result = bindtarget( name );
- result->boundname = newstr( buf->value );
- result->time = time;
- result->binding = time ? T_BIND_EXISTS : T_BIND_MISSING;
-
- call_bind_rule( result->name, result->boundname );
-
- string_free( buf );
-
- return result;
-}
-
-
-/*
- * copytarget() - make a new target with the old target's name.
- *
- * Not entered into hash table -- for internal nodes.
- */
-
-TARGET * copytarget( const TARGET * ot )
-{
- TARGET * t = (TARGET *)BJAM_MALLOC( sizeof( *t ) );
- memset( (char *)t, '\0', sizeof( *t ) );
- t->name = copystr( ot->name );
- t->boundname = t->name;
-
- t->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL;
-
- return t;
-}
-
-
-/*
- * touch_target() - mark a target to simulate being new.
- */
-
-void touch_target( char * t )
-{
- bindtarget( t )->flags |= T_FLAG_TOUCHED;
-}
-
-
-/*
- * targetlist() - turn list of target names into a TARGET chain.
- *
- * Inputs:
- * chain existing TARGETS to append to
- * targets list of target names
- */
-
-TARGETS * targetlist( TARGETS * chain, LIST * target_names )
-{
- for ( ; target_names; target_names = list_next( target_names ) )
- chain = targetentry( chain, bindtarget( target_names->string ) );
- return chain;
-}
-
-
-/*
- * targetentry() - add a TARGET to a chain of TARGETS.
- *
- * Inputs:
- * chain existing TARGETS to append to
- * target new target to append
- */
-
-TARGETS * targetentry( TARGETS * chain, TARGET * target )
-{
- TARGETS * c = (TARGETS *)BJAM_MALLOC( sizeof( TARGETS ) );
- c->target = target;
-
- if ( !chain ) chain = c;
- else chain->tail->next = c;
- chain->tail = c;
- c->next = 0;
-
- return chain;
-}
-
-
-/*
- * targetchain() - append two TARGET chains.
- *
- * Inputs:
- * chain exisitng TARGETS to append to
- * target new target to append
- */
-
-TARGETS * targetchain( TARGETS * chain, TARGETS * targets )
-{
- if ( !targets ) return chain;
- if ( !chain ) return targets;
-
- chain->tail->next = targets;
- chain->tail = targets->tail;
-
- return chain;
-}
-
-/*
- * actionlist() - append to an ACTION chain.
- */
-
-ACTIONS * actionlist( ACTIONS * chain, ACTION * action )
-{
- ACTIONS * actions = (ACTIONS *)BJAM_MALLOC( sizeof( ACTIONS ) );
-
- actions->action = action;
-
- if ( !chain ) chain = actions;
- else chain->tail->next = actions;
- chain->tail = actions;
- actions->next = 0;
-
- return chain;
-}
-
-static SETTINGS * settings_freelist;
-
-
-/*
- * addsettings() - add a deferred "set" command to a target.
- *
- * Adds a variable setting (varname=list) onto a chain of settings for a
- * particular target. 'flag' controls the relationship between new and old
- * values in the same way as in var_set() function (see variable.c). Returns
- * the head of the settings chain.
- */
-
-SETTINGS * addsettings( SETTINGS * head, int flag, char * symbol, LIST * value )
-{
- SETTINGS * v;
-
- /* Look for previous settings. */
- for ( v = head; v; v = v->next )
- if ( !strcmp( v->symbol, symbol ) )
- break;
-
- /* If not previously set, alloc a new. */
- /* If appending, do so. */
- /* Else free old and set new. */
- if ( !v )
- {
- v = settings_freelist;
-
- if ( v )
- settings_freelist = v->next;
- else
- v = (SETTINGS *)BJAM_MALLOC( sizeof( *v ) );
-
- v->symbol = newstr( symbol );
- v->value = value;
- v->next = head;
- v->multiple = 0;
- head = v;
- }
- else if ( flag == VAR_APPEND )
- {
- v->value = list_append( v->value, value );
- }
- else if ( flag != VAR_DEFAULT )
- {
- list_free( v->value );
- v->value = value;
- }
- else
- list_free( value );
-
- /* Return (new) head of list. */
- return head;
-}
-
-
-/*
- * pushsettings() - set all target specific variables.
- */
-
-void pushsettings( SETTINGS * v )
-{
- for ( ; v; v = v->next )
- v->value = var_swap( v->symbol, v->value );
-}
-
-
-/*
- * popsettings() - reset target specific variables to their pre-push values.
- */
-
-void popsettings( SETTINGS * v )
-{
- pushsettings( v ); /* just swap again */
-}
-
-
-/*
- * copysettings() - duplicate a settings list, returning the new copy.
- */
-
-SETTINGS * copysettings( SETTINGS * head )
-{
- SETTINGS * copy = 0;
- SETTINGS * v;
- for ( v = head; v; v = v->next )
- copy = addsettings( copy, VAR_SET, v->symbol, list_copy( 0, v->value ) );
- return copy;
-}
-
-
-/*
- * freetargets() - delete a targets list.
- */
-
-void freetargets( TARGETS * chain )
-{
- while ( chain )
- {
- TARGETS * n = chain->next;
- BJAM_FREE( chain );
- chain = n;
- }
-}
-
-
-/*
- * freeactions() - delete an action list.
- */
-
-void freeactions( ACTIONS * chain )
-{
- while ( chain )
- {
- ACTIONS * n = chain->next;
- BJAM_FREE( chain );
- chain = n;
- }
-}
-
-
-/*
- * freesettings() - delete a settings list.
- */
-
-void freesettings( SETTINGS * v )
-{
- while ( v )
- {
- SETTINGS * n = v->next;
- freestr( v->symbol );
- list_free( v->value );
- v->next = settings_freelist;
- settings_freelist = v;
- v = n;
- }
-}
-
-
-static void freetarget( void * xt, void * data )
-{
- TARGET * t = (TARGET *)xt;
- if ( t->settings ) freesettings( t->settings );
- if ( t->depends ) freetargets ( t->depends );
- if ( t->includes ) freetarget ( t->includes, (void *)0 );
- if ( t->actions ) freeactions ( t->actions );
-}
-
-
-/*
- * rules_done() - free RULE and TARGET tables.
- */
-
-void rules_done()
-{
- hashenumerate( targethash, freetarget, 0 );
- hashdone( targethash );
- while ( settings_freelist )
- {
- SETTINGS * n = settings_freelist->next;
- BJAM_FREE( settings_freelist );
- settings_freelist = n;
- }
-}
-
-
-/*
- * args_new() - make a new reference-counted argument list.
- */
-
-argument_list * args_new()
-{
- argument_list * r = (argument_list *)BJAM_MALLOC( sizeof(argument_list) );
- r->reference_count = 0;
- lol_init( r->data );
- return r;
-}
-
-
-/*
- * args_refer() - add a new reference to the given argument list.
- */
-
-void args_refer( argument_list * a )
-{
- ++a->reference_count;
-}
-
-
-/*
- * args_free() - release a reference to the given argument list.
- */
-
-void args_free( argument_list * a )
-{
- if ( --a->reference_count <= 0 )
- {
- lol_free( a->data );
- BJAM_FREE( a );
- }
-}
-
-
-/*
- * actions_refer() - add a new reference to the given actions.
- */
-
-void actions_refer( rule_actions * a )
-{
- ++a->reference_count;
-}
-
-
-/*
- * actions_free() - release a reference to the given actions.
- */
-
-void actions_free( rule_actions * a )
-{
- if ( --a->reference_count <= 0 )
- {
- freestr( a->command );
- list_free( a->bindlist );
- BJAM_FREE( a );
- }
-}
-
-
-/*
- * set_rule_body() - set the argument list and procedure of the given rule.
- */
-
-static void set_rule_body( RULE * rule, argument_list * args, PARSE * procedure )
-{
- if ( args )
- args_refer( args );
- if ( rule->arguments )
- args_free( rule->arguments );
- rule->arguments = args;
-
- if ( procedure )
- parse_refer( procedure );
- if ( rule->procedure )
- parse_free( rule->procedure );
- rule->procedure = procedure;
-}
-
-
-/*
- * global_name() - given a rule, return the name for a corresponding rule in the
- * global module.
- */
-
-static char * global_rule_name( RULE * r )
-{
- if ( r->module == root_module() )
- return r->name;
-
- {
- char name[4096] = "";
- strncat( name, r->module->name, sizeof( name ) - 1 );
- strncat( name, r->name, sizeof( name ) - 1 );
- return newstr( name);
- }
-}
-
-
-/*
- * global_rule() - given a rule, produce the corresponding entry in the global
- * module.
- */
-
-static RULE * global_rule( RULE * r )
-{
- if ( r->module == root_module() )
- return r;
-
- {
- char * name = global_rule_name( r );
- RULE * result = define_rule( r->module, name, root_module() );
- freestr( name );
- return result;
- }
-}
-
-
-/*
- * new_rule_body() - make a new rule named rulename in the given module, with
- * the given argument list and procedure. If exported is true, the rule is
- * exported to the global module as modulename.rulename.
- */
-
-RULE * new_rule_body( module_t * m, char * rulename, argument_list * args, PARSE * procedure, int exported )
-{
- RULE * local = define_rule( m, rulename, m );
- local->exported = exported;
- set_rule_body( local, args, procedure );
-
- /* Mark the procedure with the global rule name, regardless of whether the
- * rule is exported. That gives us something reasonably identifiable that we
- * can use, e.g. in profiling output. Only do this once, since this could be
- * called multiple times with the same procedure.
- */
- if ( procedure->rulename == 0 )
- procedure->rulename = global_rule_name( local );
-
- return local;
-}
-
-
-static void set_rule_actions( RULE * rule, rule_actions * actions )
-{
- if ( actions )
- actions_refer( actions );
- if ( rule->actions )
- actions_free( rule->actions );
- rule->actions = actions;
-}
-
-
-static rule_actions * actions_new( char * command, LIST * bindlist, int flags )
-{
- rule_actions * result = (rule_actions *)BJAM_MALLOC( sizeof( rule_actions ) );
- result->command = copystr( command );
- result->bindlist = bindlist;
- result->flags = flags;
- result->reference_count = 0;
- return result;
-}
-
-
-RULE * new_rule_actions( module_t * m, char * rulename, char * command, LIST * bindlist, int flags )
-{
- RULE * local = define_rule( m, rulename, m );
- RULE * global = global_rule( local );
- set_rule_actions( local, actions_new( command, bindlist, flags ) );
- set_rule_actions( global, local->actions );
- return local;
-}
-
-
-/*
- * Looks for a rule in the specified module, and returns it, if found. First
- * checks if the rule is present in the module's rule table. Second, if name of
- * the rule is in the form name1.name2 and name1 is in the list of imported
- * modules, look in module 'name1' for rule 'name2'.
- */
-
-RULE * lookup_rule( char * rulename, module_t * m, int local_only )
-{
- RULE rule;
- RULE * r = &rule;
- RULE * result = 0;
- module_t * original_module = m;
-
- r->name = rulename;
-
- if ( m->class_module )
- m = m->class_module;
-
- if ( m->rules && hashcheck( m->rules, (HASHDATA * *)&r ) )
- result = r;
- else if ( !local_only && m->imported_modules )
- {
- /* Try splitting the name into module and rule. */
- char *p = strchr( r->name, '.' ) ;
- if ( p )
- {
- *p = '\0';
- /* Now, r->name keeps the module name, and p+1 keeps the rule name.
- */
- if ( hashcheck( m->imported_modules, (HASHDATA * *)&r ) )
- result = lookup_rule( p + 1, bindmodule( rulename ), 1 );
- *p = '.';
- }
- }
-
- if ( result )
- {
- if ( local_only && !result->exported )
- result = 0;
- else
- {
- /* Lookup started in class module. We have found a rule in class
- * module, which is marked for execution in that module, or in some
- * instances. Mark it for execution in the instance where we started
- * the lookup.
- */
- int execute_in_class = ( result->module == m );
- int execute_in_some_instance = ( result->module->class_module &&
- ( result->module->class_module == m ) );
- if ( ( original_module != m ) &&
- ( execute_in_class || execute_in_some_instance ) )
- result->module = original_module;
- }
- }
-
- return result;
-}
-
-
-RULE * bindrule( char * rulename, module_t * m )
-{
- RULE * result = lookup_rule( rulename, m, 0 );
- if ( !result )
- result = lookup_rule( rulename, root_module(), 0 );
- /* We have only one caller, 'evaluate_rule', which will complain about
- * calling an undefined rule. We could issue the error here, but we do not
- * have the necessary information, such as frame.
- */
- if ( !result )
- result = enter_rule( rulename, m );
- return result;
-}
-
-
-RULE * import_rule( RULE * source, module_t * m, char * name )
-{
- RULE * dest = define_rule( source->module, name, m );
- set_rule_body( dest, source->arguments, source->procedure );
- set_rule_actions( dest, source->actions );
- return dest;
-}
diff --git a/jam-files/engine/rules.h b/jam-files/engine/rules.h
deleted file mode 100644
index 806a1469..00000000
--- a/jam-files/engine/rules.h
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#ifndef RULES_DWA_20011020_H
-#define RULES_DWA_20011020_H
-
-#include "modules.h"
-#include "jam.h"
-#include "parse.h"
-
-
-/*
- * rules.h - targets, rules, and related information
- *
- * This file describes the structures holding the targets, rules, and
- * related information accumulated by interpreting the statements
- * of the jam files.
- *
- * The following are defined:
- *
- * RULE - a generic jam rule, the product of RULE and ACTIONS.
- * ACTIONS - a chain of ACTIONs.
- * ACTION - a RULE instance with targets and sources.
- * SETTINGS - variables to set when executing a TARGET's ACTIONS.
- * TARGETS - a chain of TARGETs.
- * TARGET - an entity (e.g. a file) that can be built.
- *
- * 04/11/94 (seiwald) - Combined deps & headers into deps[2] in TARGET.
- * 04/12/94 (seiwald) - actionlist() now just appends a single action.
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 01/19/95 (seiwald) - split DONTKNOW into CANTFIND/CANTMAKE.
- * 02/02/95 (seiwald) - new LEAVES modifier on targets.
- * 02/14/95 (seiwald) - new NOUPDATE modifier on targets.
- */
-
-typedef struct _rule RULE;
-typedef struct _target TARGET;
-typedef struct _targets TARGETS;
-typedef struct _action ACTION;
-typedef struct _actions ACTIONS;
-typedef struct _settings SETTINGS ;
-
-/* RULE - a generic jam rule, the product of RULE and ACTIONS. */
-
-/* A rule's argument list. */
-struct argument_list
-{
- int reference_count;
- LOL data[1];
-};
-
-/* Build actions corresponding to a rule. */
-struct rule_actions
-{
- int reference_count;
- char * command; /* command string from ACTIONS */
- LIST * bindlist;
- int flags; /* modifiers on ACTIONS */
-
-#define RULE_NEWSRCS 0x01 /* $(>) is updated sources only */
-#define RULE_TOGETHER 0x02 /* combine actions on single target */
-#define RULE_IGNORE 0x04 /* ignore return status of executes */
-#define RULE_QUIETLY 0x08 /* do not mention it unless verbose */
-#define RULE_PIECEMEAL 0x10 /* split exec so each $(>) is small */
-#define RULE_EXISTING 0x20 /* $(>) is pre-exisitng sources only */
-};
-
-typedef struct rule_actions rule_actions;
-typedef struct argument_list argument_list;
-
-struct _rule
-{
- char * name;
- PARSE * procedure; /* parse tree from RULE */
- argument_list * arguments; /* argument checking info, or NULL for unchecked
- */
- rule_actions * actions; /* build actions, or NULL for no actions */
- module_t * module; /* module in which this rule is executed */
- int exported; /* nonzero if this rule is supposed to appear in
- * the global module and be automatically
- * imported into other modules
- */
-#ifdef HAVE_PYTHON
- PyObject * python_function;
-#endif
-};
-
-/* ACTIONS - a chain of ACTIONs. */
-struct _actions
-{
- ACTIONS * next;
- ACTIONS * tail; /* valid only for head */
- ACTION * action;
-};
-
-/* ACTION - a RULE instance with targets and sources. */
-struct _action
-{
- RULE * rule;
- TARGETS * targets;
- TARGETS * sources; /* aka $(>) */
- char running; /* has been started */
- char status; /* see TARGET status */
-};
-
-/* SETTINGS - variables to set when executing a TARGET's ACTIONS. */
-struct _settings
-{
- SETTINGS * next;
- char * symbol; /* symbol name for var_set() */
- LIST * value; /* symbol value for var_set() */
- int multiple;
-};
-
-/* TARGETS - a chain of TARGETs. */
-struct _targets
-{
- TARGETS * next;
- TARGETS * tail; /* valid only for head */
- TARGET * target;
-};
-
-/* TARGET - an entity (e.g. a file) that can be built. */
-struct _target
-{
- char * name;
- char * boundname; /* if search() relocates target */
- ACTIONS * actions; /* rules to execute, if any */
- SETTINGS * settings; /* variables to define */
-
- short flags; /* status info */
-
-#define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */
-#define T_FLAG_NOCARE 0x0002 /* NOCARE applied */
-#define T_FLAG_NOTFILE 0x0004 /* NOTFILE applied */
-#define T_FLAG_TOUCHED 0x0008 /* ALWAYS applied or -t target */
-#define T_FLAG_LEAVES 0x0010 /* LEAVES applied */
-#define T_FLAG_NOUPDATE 0x0020 /* NOUPDATE applied */
-#define T_FLAG_VISITED 0x0040 /* CWM: Used in debugging */
-
-/* This flag has been added to support a new built-in rule named "RMBAD". It is
- * used to force removal of outdated targets whose dependencies fail to build.
- */
-#define T_FLAG_RMOLD 0x0080 /* RMBAD applied */
-
-/* This flag was added to support a new built-in rule named "FAIL_EXPECTED" used
- * to indicate that the result of running a given action should be inverted,
- * i.e. ok <=> fail. This is useful for launching certain test runs from a
- * Jamfile.
- */
-#define T_FLAG_FAIL_EXPECTED 0x0100 /* FAIL_EXPECTED applied */
-
-#define T_FLAG_INTERNAL 0x0200 /* internal INCLUDES node */
-
-/* Indicates that the target must be a file. This prevents matching non-files,
- * like directories, when a target is searched.
- */
-#define T_FLAG_ISFILE 0x0400
-
-#define T_FLAG_PRECIOUS 0x0800
-
- char binding; /* how target relates to a real file or
- * folder
- */
-
-#define T_BIND_UNBOUND 0 /* a disembodied name */
-#define T_BIND_MISSING 1 /* could not find real file */
-#define T_BIND_PARENTS 2 /* using parent's timestamp */
-#define T_BIND_EXISTS 3 /* real file, timestamp valid */
-
- TARGETS * depends; /* dependencies */
- TARGETS * dependants; /* the inverse of dependencies */
- TARGETS * rebuilds; /* targets that should be force-rebuilt
- * whenever this one is
- */
- TARGET * includes; /* internal includes node */
- TARGET * original_target; /* original_target->includes = this */
- char rescanned;
-
- time_t time; /* update time */
- time_t leaf; /* update time of leaf sources */
-
- char fate; /* make0()'s diagnosis */
-
-#define T_FATE_INIT 0 /* nothing done to target */
-#define T_FATE_MAKING 1 /* make0(target) on stack */
-
-#define T_FATE_STABLE 2 /* target did not need updating */
-#define T_FATE_NEWER 3 /* target newer than parent */
-
-#define T_FATE_SPOIL 4 /* >= SPOIL rebuilds parents */
-#define T_FATE_ISTMP 4 /* unneeded temp target oddly present */
-
-#define T_FATE_BUILD 5 /* >= BUILD rebuilds target */
-#define T_FATE_TOUCHED 5 /* manually touched with -t */
-#define T_FATE_REBUILD 6
-#define T_FATE_MISSING 7 /* is missing, needs updating */
-#define T_FATE_NEEDTMP 8 /* missing temp that must be rebuild */
-#define T_FATE_OUTDATED 9 /* is out of date, needs updating */
-#define T_FATE_UPDATE 10 /* deps updated, needs updating */
-
-#define T_FATE_BROKEN 11 /* >= BROKEN ruins parents */
-#define T_FATE_CANTFIND 11 /* no rules to make missing target */
-#define T_FATE_CANTMAKE 12 /* can not find dependencies */
-
- char progress; /* tracks make1() progress */
-
-#define T_MAKE_INIT 0 /* make1(target) not yet called */
-#define T_MAKE_ONSTACK 1 /* make1(target) on stack */
-#define T_MAKE_ACTIVE 2 /* make1(target) in make1b() */
-#define T_MAKE_RUNNING 3 /* make1(target) running commands */
-#define T_MAKE_DONE 4 /* make1(target) done */
-
-#ifdef OPT_SEMAPHORE
- #define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */
-#endif
-
-#ifdef OPT_SEMAPHORE
- TARGET * semaphore; /* used in serialization */
-#endif
-
- char status; /* exec_cmd() result */
-
- int asynccnt; /* child deps outstanding */
- TARGETS * parents; /* used by make1() for completion */
- char * cmds; /* type-punned command list */
-
- char * failed;
-};
-
-
-/* Action related functions. */
-ACTIONS * actionlist ( ACTIONS *, ACTION * );
-void freeactions ( ACTIONS * );
-SETTINGS * addsettings ( SETTINGS *, int flag, char * symbol, LIST * value );
-void pushsettings ( SETTINGS * );
-void popsettings ( SETTINGS * );
-SETTINGS * copysettings ( SETTINGS * );
-void freesettings ( SETTINGS * );
-void actions_refer( rule_actions * );
-void actions_free ( rule_actions * );
-
-/* Argument list related functions. */
-void args_free ( argument_list * );
-argument_list * args_new ();
-void args_refer( argument_list * );
-
-/* Rule related functions. */
-RULE * bindrule ( char * rulename, module_t * );
-RULE * import_rule ( RULE * source, module_t *, char * name );
-RULE * new_rule_body ( module_t *, char * rulename, argument_list *, PARSE * procedure, int exprt );
-RULE * new_rule_actions( module_t *, char * rulename, char * command, LIST * bindlist, int flags );
-void rule_free ( RULE * );
-
-/* Target related functions. */
-void bind_explicitly_located_targets();
-TARGET * bindtarget ( char const * target_name );
-TARGET * copytarget ( TARGET const * t );
-void freetargets ( TARGETS * );
-TARGET * search_for_target ( char * name, LIST * search_path );
-TARGETS * targetchain ( TARGETS * chain, TARGETS * );
-TARGETS * targetentry ( TARGETS * chain, TARGET * );
-void target_include ( TARGET * including, TARGET * included );
-TARGETS * targetlist ( TARGETS * chain, LIST * target_names );
-void touch_target ( char * t );
-
-/* Final module cleanup. */
-void rules_done();
-
-#endif
diff --git a/jam-files/engine/scan.c b/jam-files/engine/scan.c
deleted file mode 100644
index 11c44c0e..00000000
--- a/jam-files/engine/scan.c
+++ /dev/null
@@ -1,418 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "jamgram.h"
-#include "jambase.h"
-#include "newstr.h"
-
-/*
- * scan.c - the jam yacc scanner
- *
- * 12/26/93 (seiwald) - bump buf in yylex to 10240 - yuk.
- * 09/16/94 (seiwald) - check for overflows, unmatched {}'s, etc.
- * Also handle tokens abutting EOF by remembering
- * to return EOF now matter how many times yylex()
- * reinvokes yyline().
- * 02/11/95 (seiwald) - honor only punctuation keywords if SCAN_PUNCT.
- * 07/27/95 (seiwald) - Include jamgram.h after scan.h, so that YYSTYPE is
- * defined before Linux's yacc tries to redefine it.
- */
-
-struct keyword
-{
- char * word;
- int type;
-} keywords[] =
-{
-#include "jamgramtab.h"
- { 0, 0 }
-};
-
-struct include
-{
- struct include * next; /* next serial include file */
- char * string; /* pointer into current line */
- char * * strings; /* for yyfparse() -- text to parse */
- FILE * file; /* for yyfparse() -- file being read */
- char * fname; /* for yyfparse() -- file name */
- int line; /* line counter for error messages */
- char buf[ 512 ]; /* for yyfparse() -- line buffer */
-};
-
-static struct include * incp = 0; /* current file; head of chain */
-
-static int scanmode = SCAN_NORMAL;
-static int anyerrors = 0;
-
-
-static char * symdump( YYSTYPE * );
-
-#define BIGGEST_TOKEN 10240 /* no single token can be larger */
-
-
-/*
- * Set parser mode: normal, string, or keyword.
- */
-
-void yymode( int n )
-{
- scanmode = n;
-}
-
-
-void yyerror( char * s )
-{
- /* We use yylval instead of incp to access the error location information as
- * the incp pointer will already be reset to 0 in case the error occurred at
- * EOF.
- *
- * The two may differ only if we get an error while reading a lexical token
- * spanning muliple lines, e.g. a multi-line string literal or action body,
- * in which case yylval location information will hold the information about
- * where this token started while incp will hold the information about where
- * reading it broke.
- *
- * TODO: Test the theory about when yylval and incp location information are
- * the same and when they differ.
- */
- printf( "%s:%d: %s at %s\n", yylval.file, yylval.line, s, symdump( &yylval ) );
- ++anyerrors;
-}
-
-
-int yyanyerrors()
-{
- return anyerrors != 0;
-}
-
-
-void yyfparse( char * s )
-{
- struct include * i = (struct include *)BJAM_MALLOC( sizeof( *i ) );
-
- /* Push this onto the incp chain. */
- i->string = "";
- i->strings = 0;
- i->file = 0;
- i->fname = copystr( s );
- i->line = 0;
- i->next = incp;
- incp = i;
-
- /* If the filename is "+", it means use the internal jambase. */
- if ( !strcmp( s, "+" ) )
- i->strings = jambase;
-}
-
-
-/*
- * yyline() - read new line and return first character.
- *
- * Fabricates a continuous stream of characters across include files, returning
- * EOF at the bitter end.
- */
-
-int yyline()
-{
- struct include * i = incp;
-
- if ( !incp )
- return EOF;
-
- /* Once we start reading from the input stream, we reset the include
- * insertion point so that the next include file becomes the head of the
- * list.
- */
-
- /* If there is more data in this line, return it. */
- if ( *i->string )
- return *i->string++;
-
- /* If we are reading from an internal string list, go to the next string. */
- if ( i->strings )
- {
- if ( *i->strings )
- {
- ++i->line;
- i->string = *(i->strings++);
- return *i->string++;
- }
- }
- else
- {
- /* If necessary, open the file. */
- if ( !i->file )
- {
- FILE * f = stdin;
- if ( strcmp( i->fname, "-" ) && !( f = fopen( i->fname, "r" ) ) )
- perror( i->fname );
- i->file = f;
- }
-
- /* If there is another line in this file, start it. */
- if ( i->file && fgets( i->buf, sizeof( i->buf ), i->file ) )
- {
- ++i->line;
- i->string = i->buf;
- return *i->string++;
- }
- }
-
- /* This include is done. Free it up and return EOF so yyparse() returns to
- * parse_file().
- */
-
- incp = i->next;
-
- /* Close file, free name. */
- if ( i->file && ( i->file != stdin ) )
- fclose( i->file );
- freestr( i->fname );
- BJAM_FREE( (char *)i );
-
- return EOF;
-}
-
-
-/*
- * yylex() - set yylval to current token; return its type.
- *
- * Macros to move things along:
- *
- * yychar() - return and advance character; invalid after EOF.
- * yyprev() - back up one character; invalid before yychar().
- *
- * yychar() returns a continuous stream of characters, until it hits the EOF of
- * the current include file.
- */
-
-#define yychar() ( *incp->string ? *incp->string++ : yyline() )
-#define yyprev() ( incp->string-- )
-
-int yylex()
-{
- int c;
- char buf[ BIGGEST_TOKEN ];
- char * b = buf;
-
- if ( !incp )
- goto eof;
-
- /* Get first character (whitespace or of token). */
- c = yychar();
-
- if ( scanmode == SCAN_STRING )
- {
- /* If scanning for a string (action's {}'s), look for the closing brace.
- * We handle matching braces, if they match.
- */
-
- int nest = 1;
-
- while ( ( c != EOF ) && ( b < buf + sizeof( buf ) ) )
- {
- if ( c == '{' )
- ++nest;
-
- if ( ( c == '}' ) && !--nest )
- break;
-
- *b++ = c;
-
- c = yychar();
-
- /* Turn trailing "\r\n" sequences into plain "\n" for Cygwin. */
- if ( ( c == '\n' ) && ( b[ -1 ] == '\r' ) )
- --b;
- }
-
- /* We ate the ending brace -- regurgitate it. */
- if ( c != EOF )
- yyprev();
-
- /* Check for obvious errors. */
- if ( b == buf + sizeof( buf ) )
- {
- yyerror( "action block too big" );
- goto eof;
- }
-
- if ( nest )
- {
- yyerror( "unmatched {} in action block" );
- goto eof;
- }
-
- *b = 0;
- yylval.type = STRING;
- yylval.string = newstr( buf );
- yylval.file = incp->fname;
- yylval.line = incp->line;
- }
- else
- {
- char * b = buf;
- struct keyword * k;
- int inquote = 0;
- int notkeyword;
-
- /* Eat white space. */
- for ( ;; )
- {
- /* Skip past white space. */
- while ( ( c != EOF ) && isspace( c ) )
- c = yychar();
-
- /* Not a comment? */
- if ( c != '#' )
- break;
-
- /* Swallow up comment line. */
- while ( ( ( c = yychar() ) != EOF ) && ( c != '\n' ) ) ;
- }
-
- /* c now points to the first character of a token. */
- if ( c == EOF )
- goto eof;
-
- yylval.file = incp->fname;
- yylval.line = incp->line;
-
- /* While scanning the word, disqualify it for (expensive) keyword lookup
- * when we can: $anything, "anything", \anything
- */
- notkeyword = c == '$';
-
- /* Look for white space to delimit word. "'s get stripped but preserve
- * white space. \ protects next character.
- */
- while
- (
- ( c != EOF ) &&
- ( b < buf + sizeof( buf ) ) &&
- ( inquote || !isspace( c ) )
- )
- {
- if ( c == '"' )
- {
- /* begin or end " */
- inquote = !inquote;
- notkeyword = 1;
- }
- else if ( c != '\\' )
- {
- /* normal char */
- *b++ = c;
- }
- else if ( ( c = yychar() ) != EOF )
- {
- /* \c */
- if (c == 'n')
- c = '\n';
- else if (c == 'r')
- c = '\r';
- else if (c == 't')
- c = '\t';
- *b++ = c;
- notkeyword = 1;
- }
- else
- {
- /* \EOF */
- break;
- }
-
- c = yychar();
- }
-
- /* Check obvious errors. */
- if ( b == buf + sizeof( buf ) )
- {
- yyerror( "string too big" );
- goto eof;
- }
-
- if ( inquote )
- {
- yyerror( "unmatched \" in string" );
- goto eof;
- }
-
- /* We looked ahead a character - back up. */
- if ( c != EOF )
- yyprev();
-
- /* Scan token table. Do not scan if it is obviously not a keyword or if
- * it is an alphabetic when were looking for punctuation.
- */
-
- *b = 0;
- yylval.type = ARG;
-
- if ( !notkeyword && !( isalpha( *buf ) && ( scanmode == SCAN_PUNCT ) ) )
- for ( k = keywords; k->word; ++k )
- if ( ( *buf == *k->word ) && !strcmp( k->word, buf ) )
- {
- yylval.type = k->type;
- yylval.string = k->word; /* used by symdump */
- break;
- }
-
- if ( yylval.type == ARG )
- yylval.string = newstr( buf );
- }
-
- if ( DEBUG_SCAN )
- printf( "scan %s\n", symdump( &yylval ) );
-
- return yylval.type;
-
-eof:
- /* We do not reset yylval.file & yylval.line here so unexpected EOF error
- * messages would include correct error location information.
- */
- yylval.type = EOF;
- return yylval.type;
-}
-
-
-static char * symdump( YYSTYPE * s )
-{
- static char buf[ BIGGEST_TOKEN + 20 ];
- switch ( s->type )
- {
- case EOF : sprintf( buf, "EOF" ); break;
- case 0 : sprintf( buf, "unknown symbol %s", s->string ); break;
- case ARG : sprintf( buf, "argument %s" , s->string ); break;
- case STRING: sprintf( buf, "string \"%s\"" , s->string ); break;
- default : sprintf( buf, "keyword %s" , s->string ); break;
- }
- return buf;
-}
-
-
-/*
- * Get information about the current file and line, for those epsilon
- * transitions that produce a parse.
- */
-
-void yyinput_stream( char * * name, int * line )
-{
- if ( incp )
- {
- *name = incp->fname;
- *line = incp->line;
- }
- else
- {
- *name = "(builtin)";
- *line = -1;
- }
-}
diff --git a/jam-files/engine/scan.h b/jam-files/engine/scan.h
deleted file mode 100644
index 3fad1c24..00000000
--- a/jam-files/engine/scan.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * scan.h - the jam yacc scanner
- *
- * External functions:
- *
- * yyerror( char *s ) - print a parsing error message.
- * yyfparse( char *s ) - scan include file s.
- * yylex() - parse the next token, returning its type.
- * yymode() - adjust lexicon of scanner.
- * yyparse() - declaration for yacc parser.
- * yyanyerrors() - indicate if any parsing errors occured.
- *
- * The yymode() function is for the parser to adjust the lexicon of the scanner.
- * Aside from normal keyword scanning, there is a mode to handle action strings
- * (look only for the closing }) and a mode to ignore most keywords when looking
- * for a punctuation keyword. This allows non-punctuation keywords to be used in
- * lists without quoting.
- */
-
-/*
- * YYSTYPE - value of a lexical token
- */
-
-#define YYSTYPE YYSYMBOL
-
-typedef struct _YYSTYPE
-{
- int type;
- char * string;
- PARSE * parse;
- LIST * list;
- int number;
- char * file;
- int line;
-} YYSTYPE;
-
-extern YYSTYPE yylval;
-
-void yymode( int n );
-void yyerror( char * s );
-int yyanyerrors();
-void yyfparse( char * s );
-int yyline();
-int yylex();
-int yyparse();
-void yyinput_stream( char * * name, int * line );
-
-# define SCAN_NORMAL 0 /* normal parsing */
-# define SCAN_STRING 1 /* look only for matching } */
-# define SCAN_PUNCT 2 /* only punctuation keywords */
diff --git a/jam-files/engine/search.c b/jam-files/engine/search.c
deleted file mode 100644
index 6c23d97a..00000000
--- a/jam-files/engine/search.c
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "search.h"
-#include "timestamp.h"
-#include "pathsys.h"
-#include "variable.h"
-#include "newstr.h"
-#include "compile.h"
-#include "strings.h"
-#include "hash.h"
-#include "filesys.h"
-#include <string.h>
-
-
-typedef struct _binding
-{
- char * binding;
- char * target;
-} BINDING;
-
-static struct hash *explicit_bindings = 0;
-
-
-void call_bind_rule
-(
- char * target_,
- char * boundname_
-)
-{
- LIST * bind_rule = var_get( "BINDRULE" );
- if ( bind_rule )
- {
- /* No guarantee that the target is an allocated string, so be on the
- * safe side.
- */
- char * target = copystr( target_ );
-
- /* Likewise, do not rely on implementation details of newstr.c: allocate
- * a copy of boundname.
- */
- char * boundname = copystr( boundname_ );
- if ( boundname && target )
- {
- /* Prepare the argument list. */
- FRAME frame[1];
- frame_init( frame );
-
- /* First argument is the target name. */
- lol_add( frame->args, list_new( L0, target ) );
-
- lol_add( frame->args, list_new( L0, boundname ) );
- if ( lol_get( frame->args, 1 ) )
- evaluate_rule( bind_rule->string, frame );
-
- /* Clean up */
- frame_free( frame );
- }
- else
- {
- if ( boundname )
- freestr( boundname );
- if ( target )
- freestr( target );
- }
- }
-}
-
-/*
- * search.c - find a target along $(SEARCH) or $(LOCATE)
- * First, check if LOCATE is set. If so, use it to determine
- * the location of target and return, regardless of whether anything
- * exists on that location.
- *
- * Second, examine all directories in SEARCH. If there's file already
- * or there's another target with the same name which was placed
- * to this location via LOCATE setting, stop and return the location.
- * In case of previous target, return it's name via the third argument.
- *
- * This bevahiour allow to handle dependency on generated files. If
- * caller does not expect that target is generated, 0 can be passed as
- * the third argument.
- */
-
-char *
-search(
- char *target,
- time_t *time,
- char **another_target,
- int file
-)
-{
- PATHNAME f[1];
- LIST *varlist;
- string buf[1];
- int found = 0;
- /* Will be set to 1 if target location is specified via LOCATE. */
- int explicitly_located = 0;
- char *boundname = 0;
-
- if ( another_target )
- *another_target = 0;
-
- if (! explicit_bindings )
- explicit_bindings = hashinit( sizeof(BINDING),
- "explicitly specified locations");
-
- string_new( buf );
- /* Parse the filename */
-
- path_parse( target, f );
-
- f->f_grist.ptr = 0;
- f->f_grist.len = 0;
-
- if ( ( varlist = var_get( "LOCATE" ) ) )
- {
- f->f_root.ptr = varlist->string;
- f->f_root.len = strlen( varlist->string );
-
- path_build( f, buf, 1 );
-
- if ( DEBUG_SEARCH )
- printf( "locate %s: %s\n", target, buf->value );
-
- explicitly_located = 1;
-
- timestamp( buf->value, time );
- found = 1;
- }
- else if ( ( varlist = var_get( "SEARCH" ) ) )
- {
- while ( varlist )
- {
- BINDING b, *ba = &b;
- file_info_t *ff;
-
- f->f_root.ptr = varlist->string;
- f->f_root.len = strlen( varlist->string );
-
- string_truncate( buf, 0 );
- path_build( f, buf, 1 );
-
- if ( DEBUG_SEARCH )
- printf( "search %s: %s\n", target, buf->value );
-
- ff = file_query(buf->value);
- timestamp( buf->value, time );
-
- b.binding = buf->value;
-
- if ( hashcheck( explicit_bindings, (HASHDATA**)&ba ) )
- {
- if ( DEBUG_SEARCH )
- printf(" search %s: found explicitly located target %s\n",
- target, ba->target);
- if ( another_target )
- *another_target = ba->target;
- found = 1;
- break;
- }
- else if ( ff && ff->time )
- {
- if ( !file || ff->is_file )
- {
- found = 1;
- break;
- }
- }
-
- varlist = list_next( varlist );
- }
- }
-
- if ( !found )
- {
- /* Look for the obvious */
- /* This is a questionable move. Should we look in the */
- /* obvious place if SEARCH is set? */
-
- f->f_root.ptr = 0;
- f->f_root.len = 0;
-
- string_truncate( buf, 0 );
- path_build( f, buf, 1 );
-
- if ( DEBUG_SEARCH )
- printf( "search %s: %s\n", target, buf->value );
-
- timestamp( buf->value, time );
- }
-
- boundname = newstr( buf->value );
- string_free( buf );
-
- if ( explicitly_located )
- {
- BINDING b;
- BINDING * ba = &b;
- b.binding = boundname;
- b.target = target;
- /* CONSIDER: we probably should issue a warning is another file
- is explicitly bound to the same location. This might break
- compatibility, though. */
- hashenter( explicit_bindings, (HASHDATA * *)&ba );
- }
-
- /* prepare a call to BINDRULE if the variable is set */
- call_bind_rule( target, boundname );
-
- return boundname;
-}
diff --git a/jam-files/engine/search.h b/jam-files/engine/search.h
deleted file mode 100644
index c364cac0..00000000
--- a/jam-files/engine/search.h
+++ /dev/null
@@ -1,11 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * search.h - find a target along $(SEARCH) or $(LOCATE)
- */
-
-char *search( char *target, time_t *time, char **another_target, int file );
diff --git a/jam-files/engine/strings.c b/jam-files/engine/strings.c
deleted file mode 100644
index 89561237..00000000
--- a/jam-files/engine/strings.c
+++ /dev/null
@@ -1,201 +0,0 @@
-/* Copyright David Abrahams 2004. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "jam.h"
-#include "strings.h"
-#include <stdlib.h>
-#include <string.h>
-#include <assert.h>
-#include <stdio.h>
-
-
-#ifndef NDEBUG
-# define JAM_STRING_MAGIC ((char)0xcf)
-# define JAM_STRING_MAGIC_SIZE 4
-static void assert_invariants( string* self )
-{
- int i;
-
- if ( self->value == 0 )
- {
- assert( self->size == 0 );
- assert( self->capacity == 0 );
- assert( self->opt[0] == 0 );
- return;
- }
-
- assert( self->size < self->capacity );
- assert( ( self->capacity <= sizeof(self->opt) ) == ( self->value == self->opt ) );
- assert( strlen( self->value ) == self->size );
-
- for (i = 0; i < 4; ++i)
- {
- assert( self->magic[i] == JAM_STRING_MAGIC );
- assert( self->value[self->capacity + i] == JAM_STRING_MAGIC );
- }
-}
-#else
-# define JAM_STRING_MAGIC_SIZE 0
-# define assert_invariants(x) do {} while (0)
-#endif
-
-void string_new( string* s )
-{
- s->value = s->opt;
- s->size = 0;
- s->capacity = sizeof(s->opt);
- s->opt[0] = 0;
-#ifndef NDEBUG
- memset(s->magic, JAM_STRING_MAGIC, sizeof(s->magic));
-#endif
- assert_invariants( s );
-}
-
-void string_free( string* s )
-{
- assert_invariants( s );
- if ( s->value != s->opt )
- BJAM_FREE( s->value );
- string_new( s );
-}
-
-static void string_reserve_internal( string* self, size_t capacity )
-{
- if ( self->value == self->opt )
- {
- self->value = (char*)BJAM_MALLOC_ATOMIC( capacity + JAM_STRING_MAGIC_SIZE );
- self->value[0] = 0;
- strncat( self->value, self->opt, sizeof(self->opt) );
- assert( strlen( self->value ) <= self->capacity ); /* This is a regression test */
- }
- else
- {
- self->value = (char*)BJAM_REALLOC( self->value, capacity + JAM_STRING_MAGIC_SIZE );
- }
-#ifndef NDEBUG
- memcpy( self->value + capacity, self->magic, JAM_STRING_MAGIC_SIZE );
-#endif
- self->capacity = capacity;
-}
-
-void string_reserve( string* self, size_t capacity )
-{
- assert_invariants( self );
- if ( capacity <= self->capacity )
- return;
- string_reserve_internal( self, capacity );
- assert_invariants( self );
-}
-
-static void extend_full( string* self, char const* start, char const* finish )
-{
- size_t new_size = self->capacity + ( finish - start );
- size_t new_capacity = self->capacity;
- size_t old_size = self->capacity;
- while ( new_capacity < new_size + 1)
- new_capacity <<= 1;
- string_reserve_internal( self, new_capacity );
- memcpy( self->value + old_size, start, new_size - old_size );
- self->value[new_size] = 0;
- self->size = new_size;
-}
-
-void string_append( string* self, char const* rhs )
-{
- char* p = self->value + self->size;
- char* end = self->value + self->capacity;
- assert_invariants( self );
-
- while ( *rhs && p != end)
- *p++ = *rhs++;
-
- if ( p != end )
- {
- *p = 0;
- self->size = p - self->value;
- }
- else
- {
- extend_full( self, rhs, rhs + strlen(rhs) );
- }
- assert_invariants( self );
-}
-
-void string_append_range( string* self, char const* start, char const* finish )
-{
- char* p = self->value + self->size;
- char* end = self->value + self->capacity;
- assert_invariants( self );
-
- while ( p != end && start != finish )
- *p++ = *start++;
-
- if ( p != end )
- {
- *p = 0;
- self->size = p - self->value;
- }
- else
- {
- extend_full( self, start, finish );
- }
- assert_invariants( self );
-}
-
-void string_copy( string* s, char const* rhs )
-{
- string_new( s );
- string_append( s, rhs );
-}
-
-void string_truncate( string* self, size_t n )
-{
- assert_invariants( self );
- assert( n <= self->capacity );
- self->value[self->size = n] = 0;
- assert_invariants( self );
-}
-
-void string_pop_back( string* self )
-{
- string_truncate( self, self->size - 1 );
-}
-
-void string_push_back( string* self, char x )
-{
- string_append_range( self, &x, &x + 1 );
-}
-
-char string_back( string* self )
-{
- assert_invariants( self );
- return self->value[self->size - 1];
-}
-
-#ifndef NDEBUG
-void string_unit_test()
-{
- string s[1];
- int i;
- char buffer[sizeof(s->opt) * 2 + 2];
- int limit = sizeof(buffer) > 254 ? 254 : sizeof(buffer);
-
- string_new(s);
-
- for (i = 0; i < limit; ++i)
- {
- string_push_back( s, (char)(i + 1) );
- };
-
- for (i = 0; i < limit; ++i)
- {
- assert( i < s->size );
- assert( s->value[i] == (char)(i + 1));
- }
-
- string_free(s);
-
-}
-#endif
-
diff --git a/jam-files/engine/strings.h b/jam-files/engine/strings.h
deleted file mode 100644
index 33c77bd7..00000000
--- a/jam-files/engine/strings.h
+++ /dev/null
@@ -1,34 +0,0 @@
-#ifndef STRINGS_DWA20011024_H
-# define STRINGS_DWA20011024_H
-
-/* Copyright David Abrahams 2004. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-# include <stddef.h>
-
-typedef struct string
-{
- char* value;
- unsigned long size;
- unsigned long capacity;
- char opt[32];
-#ifndef NDEBUG
- char magic[4];
-#endif
-} string;
-
-void string_new( string* );
-void string_copy( string*, char const* );
-void string_free( string* );
-void string_append( string*, char const* );
-void string_append_range( string*, char const*, char const* );
-void string_push_back( string* s, char x );
-void string_reserve( string*, size_t );
-void string_truncate( string*, size_t );
-void string_pop_back( string* );
-char string_back( string* );
-void string_unit_test();
-
-#endif
-
diff --git a/jam-files/engine/subst.c b/jam-files/engine/subst.c
deleted file mode 100644
index 75524ecc..00000000
--- a/jam-files/engine/subst.c
+++ /dev/null
@@ -1,94 +0,0 @@
-#include <stddef.h>
-#include "jam.h"
-#include "regexp.h"
-#include "hash.h"
-
-#include "newstr.h"
-#include "lists.h"
-#include "parse.h"
-#include "compile.h"
-#include "frames.h"
-
-struct regex_entry
-{
- const char* pattern;
- regexp* regex;
-};
-typedef struct regex_entry regex_entry;
-
-static struct hash* regex_hash;
-
-regexp* regex_compile( const char* pattern )
-{
- regex_entry entry, *e = &entry;
- entry.pattern = pattern;
-
- if ( !regex_hash )
- regex_hash = hashinit(sizeof(regex_entry), "regex");
-
- if ( hashenter( regex_hash, (HASHDATA **)&e ) )
- e->regex = regcomp( (char*)pattern );
-
- return e->regex;
-}
-
-LIST*
-builtin_subst(
- PARSE *parse,
- FRAME *frame )
-{
- LIST* result = L0;
- LIST* arg1 = lol_get( frame->args, 0 );
-
- if ( arg1 && list_next(arg1) && list_next(list_next(arg1)) )
- {
-
- const char* source = arg1->string;
- const char* pattern = list_next(arg1)->string;
- regexp* repat = regex_compile( pattern );
-
- if ( regexec( repat, (char*)source) )
- {
- LIST* subst = list_next(arg1);
-
- while ((subst = list_next(subst)) != L0)
- {
-# define BUFLEN 4096
- char buf[BUFLEN + 1];
- const char* in = subst->string;
- char* out = buf;
-
- for ( in = subst->string; *in && out < buf + BUFLEN; ++in )
- {
- if ( *in == '\\' || *in == '$' )
- {
- ++in;
- if ( *in == 0 )
- {
- break;
- }
- else if ( *in >= '0' && *in <= '9' )
- {
- unsigned n = *in - '0';
- const size_t srclen = repat->endp[n] - repat->startp[n];
- const size_t remaining = buf + BUFLEN - out;
- const size_t len = srclen < remaining ? srclen : remaining;
- memcpy( out, repat->startp[n], len );
- out += len;
- continue;
- }
- /* fall through and copy the next character */
- }
- *out++ = *in;
- }
- *out = 0;
-
- result = list_new( result, newstr( buf ) );
-#undef BUFLEN
- }
- }
- }
-
- return result;
-}
-
diff --git a/jam-files/engine/timestamp.c b/jam-files/engine/timestamp.c
deleted file mode 100644
index 8a59c8c0..00000000
--- a/jam-files/engine/timestamp.c
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-
-# include "hash.h"
-# include "filesys.h"
-# include "pathsys.h"
-# include "timestamp.h"
-# include "newstr.h"
-# include "strings.h"
-
-/*
- * timestamp.c - get the timestamp of a file or archive member
- *
- * 09/22/00 (seiwald) - downshift names on OS2, too
- */
-
-/*
- * BINDING - all known files
- */
-
-typedef struct _binding BINDING;
-
-struct _binding {
- char *name;
- short flags;
-
-# define BIND_SCANNED 0x01 /* if directory or arch, has been scanned */
-
- short progress;
-
-# define BIND_INIT 0 /* never seen */
-# define BIND_NOENTRY 1 /* timestamp requested but file never found */
-# define BIND_SPOTTED 2 /* file found but not timed yet */
-# define BIND_MISSING 3 /* file found but can't get timestamp */
-# define BIND_FOUND 4 /* file found and time stamped */
-
- time_t time; /* update time - 0 if not exist */
-};
-
-static struct hash * bindhash = 0;
-static void time_enter( void *, char *, int, time_t );
-
-static char * time_progress[] =
-{
- "INIT",
- "NOENTRY",
- "SPOTTED",
- "MISSING",
- "FOUND"
-};
-
-
-/*
- * timestamp() - return timestamp on a file, if present.
- */
-
-void timestamp( char * target, time_t * time )
-{
- PROFILE_ENTER( timestamp );
-
- PATHNAME f1;
- PATHNAME f2;
- BINDING binding;
- BINDING * b = &binding;
- string buf[ 1 ];
-#ifdef DOWNSHIFT_PATHS
- string path;
- char * p;
-#endif
-
-#ifdef DOWNSHIFT_PATHS
- string_copy( &path, target );
- p = path.value;
-
- do
- {
- *p = tolower( *p );
-#ifdef NT
- /* On NT, we must use backslashes or the file will not be found. */
- if ( *p == '/' )
- *p = PATH_DELIM;
-#endif
- }
- while ( *p++ );
-
- target = path.value;
-#endif /* #ifdef DOWNSHIFT_PATHS */
- string_new( buf );
-
- if ( !bindhash )
- bindhash = hashinit( sizeof( BINDING ), "bindings" );
-
- /* Quick path - is it there? */
- b->name = target;
- b->time = b->flags = 0;
- b->progress = BIND_INIT;
-
- if ( hashenter( bindhash, (HASHDATA * *)&b ) )
- b->name = newstr( target ); /* never freed */
-
- if ( b->progress != BIND_INIT )
- goto afterscanning;
-
- b->progress = BIND_NOENTRY;
-
- /* Not found - have to scan for it. */
- path_parse( target, &f1 );
-
- /* Scan directory if not already done so. */
- {
- BINDING binding;
- BINDING * b = &binding;
-
- f2 = f1;
- f2.f_grist.len = 0;
- path_parent( &f2 );
- path_build( &f2, buf, 0 );
-
- b->name = buf->value;
- b->time = b->flags = 0;
- b->progress = BIND_INIT;
-
- if ( hashenter( bindhash, (HASHDATA * *)&b ) )
- b->name = newstr( buf->value ); /* never freed */
-
- if ( !( b->flags & BIND_SCANNED ) )
- {
- file_dirscan( buf->value, time_enter, bindhash );
- b->flags |= BIND_SCANNED;
- }
- }
-
- /* Scan archive if not already done so. */
- if ( f1.f_member.len )
- {
- BINDING binding;
- BINDING * b = &binding;
-
- f2 = f1;
- f2.f_grist.len = 0;
- f2.f_member.len = 0;
- string_truncate( buf, 0 );
- path_build( &f2, buf, 0 );
-
- b->name = buf->value;
- b->time = b->flags = 0;
- b->progress = BIND_INIT;
-
- if ( hashenter( bindhash, (HASHDATA * *)&b ) )
- b->name = newstr( buf->value ); /* never freed */
-
- if ( !( b->flags & BIND_SCANNED ) )
- {
- file_archscan( buf->value, time_enter, bindhash );
- b->flags |= BIND_SCANNED;
- }
- }
-
- afterscanning:
-
- if ( b->progress == BIND_SPOTTED )
- {
- b->progress = file_time( b->name, &b->time ) < 0
- ? BIND_MISSING
- : BIND_FOUND;
- }
-
- *time = b->progress == BIND_FOUND ? b->time : 0;
- string_free( buf );
-#ifdef DOWNSHIFT_PATHS
- string_free( &path );
-#endif
-
- PROFILE_EXIT( timestamp );
-}
-
-
-static void time_enter( void * closure, char * target, int found, time_t time )
-{
- BINDING binding;
- BINDING * b = &binding;
- struct hash * bindhash = (struct hash *)closure;
-
-#ifdef DOWNSHIFT_PATHS
- char path[ MAXJPATH ];
- char * p = path;
-
- do *p++ = tolower( *target );
- while ( *target++ );
-
- target = path;
-#endif
-
- b->name = target;
- b->flags = 0;
-
- if ( hashenter( bindhash, (HASHDATA * *)&b ) )
- b->name = newstr( target ); /* never freed */
-
- b->time = time;
- b->progress = found ? BIND_FOUND : BIND_SPOTTED;
-
- if ( DEBUG_BINDSCAN )
- printf( "time ( %s ) : %s\n", target, time_progress[ b->progress ] );
-}
-
-
-/*
- * stamps_done() - free timestamp tables.
- */
-
-void stamps_done()
-{
- hashdone( bindhash );
-}
diff --git a/jam-files/engine/timestamp.h b/jam-files/engine/timestamp.h
deleted file mode 100644
index f5752763..00000000
--- a/jam-files/engine/timestamp.h
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * timestamp.h - get the timestamp of a file or archive member
- */
-
-void timestamp( char * target, time_t * time );
-void stamps_done();
diff --git a/jam-files/engine/variable.c b/jam-files/engine/variable.c
deleted file mode 100644
index 795f3458..00000000
--- a/jam-files/engine/variable.c
+++ /dev/null
@@ -1,631 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Reece H. Dunn.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "expand.h"
-#include "hash.h"
-#include "filesys.h"
-#include "newstr.h"
-#include "strings.h"
-#include "pathsys.h"
-#include <stdlib.h>
-#include <stdio.h>
-
-/*
- * variable.c - handle Jam multi-element variables.
- *
- * External routines:
- *
- * var_defines() - load a bunch of variable=value settings.
- * var_string() - expand a string with variables in it.
- * var_get() - get value of a user defined symbol.
- * var_set() - set a variable in jam's user defined symbol table.
- * var_swap() - swap a variable's value with the given one.
- * var_done() - free variable tables.
- *
- * Internal routines:
- *
- * var_enter() - make new var symbol table entry, returning var ptr.
- * var_dump() - dump a variable to stdout.
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- * 01/22/95 (seiwald) - split environment variables at blanks or :'s
- * 05/10/95 (seiwald) - split path variables at SPLITPATH (not :)
- * 09/11/00 (seiwald) - defunct var_list() removed
- */
-
-static struct hash *varhash = 0;
-
-/*
- * VARIABLE - a user defined multi-value variable
- */
-
-typedef struct _variable VARIABLE ;
-
-struct _variable
-{
- char * symbol;
- LIST * value;
-};
-
-static VARIABLE * var_enter( char * symbol );
-static void var_dump( char * symbol, LIST * value, char * what );
-
-
-/*
- * var_hash_swap() - swap all variable settings with those passed
- *
- * Used to implement separate settings spaces for modules
- */
-
-void var_hash_swap( struct hash * * new_vars )
-{
- struct hash * old = varhash;
- varhash = *new_vars;
- *new_vars = old;
-}
-
-
-/*
- * var_defines() - load a bunch of variable=value settings
- *
- * If preprocess is false, take the value verbatim.
- *
- * Otherwise, if the variable value is enclosed in quotes, strip the
- * quotes.
- *
- * Otherwise, if variable name ends in PATH, split value at :'s.
- *
- * Otherwise, split the value at blanks.
- */
-
-void var_defines( char * const * e, int preprocess )
-{
- string buf[1];
-
- string_new( buf );
-
- for ( ; *e; ++e )
- {
- char * val;
-
-# ifdef OS_MAC
- /* On the mac (MPW), the var=val is actually var\0val */
- /* Think different. */
-
- if ( ( val = strchr( *e, '=' ) ) || ( val = *e + strlen( *e ) ) )
-# else
- if ( ( val = strchr( *e, '=' ) ) )
-# endif
- {
- LIST * l = L0;
- char * pp;
- char * p;
-# ifdef OPT_NO_EXTERNAL_VARIABLE_SPLIT
- char split = '\0';
-# else
- # ifdef OS_MAC
- char split = ',';
- # else
- char split = ' ';
- # endif
-# endif
- size_t len = strlen( val + 1 );
-
- int quoted = ( val[1] == '"' ) && ( val[len] == '"' ) &&
- ( len > 1 );
-
- if ( quoted && preprocess )
- {
- string_append_range( buf, val + 2, val + len );
- l = list_new( l, newstr( buf->value ) );
- string_truncate( buf, 0 );
- }
- else
- {
- /* Split *PATH at :'s, not spaces. */
- if ( val - 4 >= *e )
- {
- if ( !strncmp( val - 4, "PATH", 4 ) ||
- !strncmp( val - 4, "Path", 4 ) ||
- !strncmp( val - 4, "path", 4 ) )
- split = SPLITPATH;
- }
-
- /* Do the split. */
- for
- (
- pp = val + 1;
- preprocess && ( ( p = strchr( pp, split ) ) != 0 );
- pp = p + 1
- )
- {
- string_append_range( buf, pp, p );
- l = list_new( l, newstr( buf->value ) );
- string_truncate( buf, 0 );
- }
-
- l = list_new( l, newstr( pp ) );
- }
-
- /* Get name. */
- string_append_range( buf, *e, val );
- var_set( buf->value, l, VAR_SET );
- string_truncate( buf, 0 );
- }
- }
- string_free( buf );
-}
-
-
-/*
- * var_string() - expand a string with variables in it
- *
- * Copies in to out; doesn't modify targets & sources.
- */
-
-int var_string( char * in, char * out, int outsize, LOL * lol )
-{
- char * out0 = out;
- char * oute = out + outsize - 1;
-
- while ( *in )
- {
- char * lastword;
- int dollar = 0;
-
- /* Copy white space. */
- while ( isspace( *in ) )
- {
- if ( out >= oute )
- return -1;
- *out++ = *in++;
- }
-
- lastword = out;
-
- /* Copy non-white space, watching for variables. */
- while ( *in && !isspace( *in ) )
- {
- if ( out >= oute )
- return -1;
-
- if ( ( in[ 0 ] == '$' ) && ( in[ 1 ] == '(' ) )
- {
- ++dollar;
- *out++ = *in++;
- }
- #ifdef OPT_AT_FILES
- else if ( ( in[ 0 ] == '@' ) && ( in[ 1 ] == '(' ) )
- {
- int depth = 1;
- char * ine = in + 2;
- char * split = 0;
-
- /* Scan the content of the response file @() section. */
- while ( *ine && ( depth > 0 ) )
- {
- switch ( *ine )
- {
- case '(': ++depth; break;
- case ')': --depth; break;
- case ':':
- if ( ( depth == 1 ) && ( ine[ 1 ] == 'E' ) && ( ine[ 2 ] == '=' ) )
- split = ine;
- break;
- }
- ++ine;
- }
-
- if ( !split )
- {
- /* the @() reference doesn't match the @(foo:E=bar) format.
- hence we leave it alone by copying directly to output. */
- int l = 0;
- if ( out + 2 >= oute ) return -1;
- *( out++ ) = '@';
- *( out++ ) = '(';
- l = var_string( in + 2, out, oute - out, lol );
- if ( l < 0 ) return -1;
- out += l;
- if ( out + 1 >= oute ) return -1;
- *( out++ ) = ')';
- }
- else if ( depth == 0 )
- {
- string file_name_v;
- int file_name_l = 0;
- const char * file_name_s = 0;
-
- /* Expand the temporary file name var inline. */
- #if 0
- string_copy( &file_name_v, "$(" );
- string_append_range( &file_name_v, in + 2, split );
- string_push_back( &file_name_v, ')' );
- #else
- string_new( &file_name_v );
- string_append_range( &file_name_v, in + 2, split );
- #endif
- file_name_l = var_string( file_name_v.value, out, oute - out + 1, lol );
- string_free( &file_name_v );
- if ( file_name_l < 0 ) return -1;
- file_name_s = out;
-
- /* For stdout/stderr we will create a temp file and generate
- * a command that outputs the content as needed.
- */
- if ( ( strcmp( "STDOUT", out ) == 0 ) ||
- ( strcmp( "STDERR", out ) == 0 ) )
- {
- int err_redir = strcmp( "STDERR", out ) == 0;
- out[ 0 ] = '\0';
- file_name_s = path_tmpfile();
- file_name_l = strlen(file_name_s);
- #ifdef OS_NT
- if ( ( out + 7 + file_name_l + ( err_redir ? 5 : 0 ) ) >= oute )
- return -1;
- sprintf( out,"type \"%s\"%s", file_name_s,
- err_redir ? " 1>&2" : "" );
- #else
- if ( ( out + 6 + file_name_l + ( err_redir ? 5 : 0 ) ) >= oute )
- return -1;
- sprintf( out,"cat \"%s\"%s", file_name_s,
- err_redir ? " 1>&2" : "" );
- #endif
- /* We also make sure that the temp files created by this
- * get nuked eventually.
- */
- file_remove_atexit( file_name_s );
- }
-
- /* Expand the file value into the file reference. */
- var_string_to_file( split + 3, ine - split - 4, file_name_s,
- lol );
-
- /* Continue on with the expansion. */
- out += strlen( out );
- }
-
- /* And continue with the parsing just past the @() reference. */
- in = ine;
- }
- #endif
- else
- {
- *out++ = *in++;
- }
- }
-
- /* Add zero to 'out' so that 'lastword' is correctly zero-terminated. */
- if ( out >= oute )
- return -1;
- /* Do not increment, intentionally. */
- *out = '\0';
-
- /* If a variable encountered, expand it and and embed the
- * space-separated members of the list in the output.
- */
- if ( dollar )
- {
- LIST * l = var_expand( L0, lastword, out, lol, 0 );
-
- out = lastword;
-
- while ( l )
- {
- int so = strlen( l->string );
-
- if ( out + so >= oute )
- return -1;
-
- strcpy( out, l->string );
- out += so;
- l = list_next( l );
- if ( l ) *out++ = ' ';
- }
-
- list_free( l );
- }
- }
-
- if ( out >= oute )
- return -1;
-
- *out++ = '\0';
-
- return out - out0;
-}
-
-
-void var_string_to_file( const char * in, int insize, const char * out, LOL * lol )
-{
- char const * ine = in + insize;
- FILE * out_file = 0;
- int out_debug = DEBUG_EXEC ? 1 : 0;
- if ( globs.noexec )
- {
- /* out_debug = 1; */
- }
- else if ( strcmp( out, "STDOUT" ) == 0 )
- {
- out_file = stdout;
- }
- else if ( strcmp( out, "STDERR" ) == 0 )
- {
- out_file = stderr;
- }
- else
- {
- /* Handle "path to file" filenames. */
- string out_name;
- if ( ( out[ 0 ] == '"' ) && ( out[ strlen( out ) - 1 ] == '"' ) )
- {
- string_copy( &out_name, out + 1 );
- string_truncate( &out_name, out_name.size - 1 );
- }
- else
- {
- string_copy( &out_name,out );
- }
- out_file = fopen( out_name.value, "w" );
- if ( !out_file )
- {
- printf( "failed to write output file '%s'!\n", out_name.value );
- exit( EXITBAD );
- }
- string_free( &out_name );
- }
-
- if ( out_debug ) printf( "\nfile %s\n", out );
-
- while ( *in && ( in < ine ) )
- {
- int dollar = 0;
- const char * output_0 = in;
- const char * output_1 = in;
-
- /* Copy white space. */
- while ( ( output_1 < ine ) && isspace( *output_1 ) )
- ++output_1;
-
- if ( output_0 < output_1 )
- {
- if ( out_file ) fwrite( output_0, output_1 - output_0, 1, out_file );
- if ( out_debug ) fwrite( output_0, output_1 - output_0, 1, stdout );
- }
- output_0 = output_1;
-
- /* Copy non-white space, watching for variables. */
- while ( ( output_1 < ine ) && *output_1 && !isspace( *output_1 ) )
- {
- if ( ( output_1[ 0 ] == '$' ) && ( output_1[ 1 ] == '(' ) )
- ++dollar;
- ++output_1;
- }
-
- /* If a variable encountered, expand it and embed the space-separated
- * members of the list in the output.
- */
- if ( dollar )
- {
- LIST * l = var_expand( L0, (char *)output_0, (char *)output_1, lol, 0 );
-
- while ( l )
- {
- if ( out_file ) fputs( l->string, out_file );
- if ( out_debug ) puts( l->string );
- l = list_next( l );
- if ( l )
- {
- if ( out_file ) fputc( ' ', out_file );
- if ( out_debug ) fputc( ' ', stdout );
- }
- }
-
- list_free( l );
- }
- else if ( output_0 < output_1 )
- {
- if ( out_file )
- {
- const char * output_n = output_0;
- while ( output_n < output_1 )
- {
- output_n += fwrite( output_n, 1, output_1-output_n, out_file );
- }
- }
- if ( out_debug )
- {
- const char * output_n = output_0;
- while ( output_n < output_1 )
- {
- output_n += fwrite( output_n, 1, output_1-output_n, stdout );
- }
- }
- }
-
- in = output_1;
- }
-
- if ( out_file && ( out_file != stdout ) && ( out_file != stderr ) )
- {
- fflush( out_file );
- fclose( out_file );
- }
-
- if ( out_debug ) fputc( '\n', stdout );
-}
-
-
-/*
- * var_get() - get value of a user defined symbol.
- *
- * Returns NULL if symbol unset.
- */
-
-LIST * var_get( char * symbol )
-{
- LIST * result = 0;
-#ifdef OPT_AT_FILES
- /* Some "fixed" variables... */
- if ( strcmp( "TMPDIR", symbol ) == 0 )
- {
- result = list_new( L0, newstr( (char *)path_tmpdir() ) );
- }
- else if ( strcmp( "TMPNAME", symbol ) == 0 )
- {
- result = list_new( L0, newstr( (char *)path_tmpnam() ) );
- }
- else if ( strcmp( "TMPFILE", symbol ) == 0 )
- {
- result = list_new( L0, newstr( (char *)path_tmpfile() ) );
- }
- else if ( strcmp( "STDOUT", symbol ) == 0 )
- {
- result = list_new( L0, newstr( "STDOUT" ) );
- }
- else if ( strcmp( "STDERR", symbol ) == 0 )
- {
- result = list_new( L0, newstr( "STDERR" ) );
- }
- else
-#endif
- {
- VARIABLE var;
- VARIABLE * v = &var;
-
- v->symbol = symbol;
-
- if ( varhash && hashcheck( varhash, (HASHDATA * *)&v ) )
- {
- if ( DEBUG_VARGET )
- var_dump( v->symbol, v->value, "get" );
- result = v->value;
- }
- }
- return result;
-}
-
-
-/*
- * var_set() - set a variable in Jam's user defined symbol table.
- *
- * 'flag' controls the relationship between new and old values of the variable:
- * SET replaces the old with the new; APPEND appends the new to the old; DEFAULT
- * only uses the new if the variable was previously unset.
- *
- * Copies symbol. Takes ownership of value.
- */
-
-void var_set( char * symbol, LIST * value, int flag )
-{
- VARIABLE * v = var_enter( symbol );
-
- if ( DEBUG_VARSET )
- var_dump( symbol, value, "set" );
-
- switch ( flag )
- {
- case VAR_SET:
- /* Replace value */
- list_free( v->value );
- v->value = value;
- break;
-
- case VAR_APPEND:
- /* Append value */
- v->value = list_append( v->value, value );
- break;
-
- case VAR_DEFAULT:
- /* Set only if unset */
- if ( !v->value )
- v->value = value;
- else
- list_free( value );
- break;
- }
-}
-
-
-/*
- * var_swap() - swap a variable's value with the given one.
- */
-
-LIST * var_swap( char * symbol, LIST * value )
-{
- VARIABLE * v = var_enter( symbol );
- LIST * oldvalue = v->value;
- if ( DEBUG_VARSET )
- var_dump( symbol, value, "set" );
- v->value = value;
- return oldvalue;
-}
-
-
-/*
- * var_enter() - make new var symbol table entry, returning var ptr.
- */
-
-static VARIABLE * var_enter( char * symbol )
-{
- VARIABLE var;
- VARIABLE * v = &var;
-
- if ( !varhash )
- varhash = hashinit( sizeof( VARIABLE ), "variables" );
-
- v->symbol = symbol;
- v->value = 0;
-
- if ( hashenter( varhash, (HASHDATA * *)&v ) )
- v->symbol = newstr( symbol ); /* never freed */
-
- return v;
-}
-
-
-/*
- * var_dump() - dump a variable to stdout.
- */
-
-static void var_dump( char * symbol, LIST * value, char * what )
-{
- printf( "%s %s = ", what, symbol );
- list_print( value );
- printf( "\n" );
-}
-
-
-/*
- * var_done() - free variable tables.
- */
-
-static void delete_var_( void * xvar, void * data )
-{
- VARIABLE * v = (VARIABLE *)xvar;
- freestr( v->symbol );
- list_free( v-> value );
-}
-
-
-void var_done()
-{
- hashenumerate( varhash, delete_var_, (void *)0 );
- hashdone( varhash );
-}
diff --git a/jam-files/engine/variable.h b/jam-files/engine/variable.h
deleted file mode 100644
index 5c49e3ca..00000000
--- a/jam-files/engine/variable.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * variable.h - handle jam multi-element variables
- */
-
-struct hash;
-
-void var_defines( char* const *e, int preprocess );
-int var_string( char *in, char *out, int outsize, LOL *lol );
-LIST * var_get( char *symbol );
-void var_set( char *symbol, LIST *value, int flag );
-LIST * var_swap( char *symbol, LIST *value );
-void var_done();
-void var_hash_swap( struct hash** );
-
-/** Expands the "in" expression directly into the "out" file.
- The file can be one of: a path, STDOUT, or STDERR to send
- the output to a file overwriting previous content, to
- the console, or to the error output respectively.
-*/
-void var_string_to_file( const char * in, int insize, const char * out, LOL * lol );
-
-/*
- * Defines for var_set().
- */
-
-# define VAR_SET 0 /* override previous value */
-# define VAR_APPEND 1 /* append to previous value */
-# define VAR_DEFAULT 2 /* set only if no previous value */
-
diff --git a/jam-files/engine/w32_getreg.c b/jam-files/engine/w32_getreg.c
deleted file mode 100644
index 5a06f43e..00000000
--- a/jam-files/engine/w32_getreg.c
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
-Copyright Paul Lin 2003. Copyright 2006 Bojan Resnik.
-Distributed under the Boost Software License, Version 1.0. (See accompanying
-file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-# include "jam.h"
-
-# if defined( OS_NT ) || defined( OS_CYGWIN )
-
-# include "lists.h"
-# include "newstr.h"
-# include "parse.h"
-# include "frames.h"
-# include "strings.h"
-
-# define WIN32_LEAN_AND_MEAN
-# include <windows.h>
-
-# define MAX_REGISTRY_DATA_LENGTH 4096
-# define MAX_REGISTRY_KEYNAME_LENGTH 256
-# define MAX_REGISTRY_VALUENAME_LENGTH 16384
-
-typedef struct
-{
- LPCSTR name;
- HKEY value;
-} KeyMap;
-
-static const KeyMap dlRootKeys[] = {
- { "HKLM", HKEY_LOCAL_MACHINE },
- { "HKCU", HKEY_CURRENT_USER },
- { "HKCR", HKEY_CLASSES_ROOT },
- { "HKEY_LOCAL_MACHINE", HKEY_LOCAL_MACHINE },
- { "HKEY_CURRENT_USER", HKEY_CURRENT_USER },
- { "HKEY_CLASSES_ROOT", HKEY_CLASSES_ROOT },
- { 0, 0 }
-};
-
-static HKEY get_key(char const** path)
-{
- const KeyMap *p;
-
- for (p = dlRootKeys; p->name; ++p)
- {
- int n = strlen(p->name);
- if (!strncmp(*path,p->name,n))
- {
- if ((*path)[n] == '\\' || (*path)[n] == 0)
- {
- *path += n + 1;
- break;
- }
- }
- }
-
- return p->value;
-}
-
-LIST*
-builtin_system_registry(
- PARSE *parse,
- FRAME *frame )
-{
- char const* path = lol_get(frame->args, 0)->string;
- LIST* result = L0;
- HKEY key = get_key(&path);
-
- if (
- key != 0
- && ERROR_SUCCESS == RegOpenKeyEx(key, path, 0, KEY_QUERY_VALUE, &key)
- )
- {
- DWORD type;
- BYTE data[MAX_REGISTRY_DATA_LENGTH];
- DWORD len = sizeof(data);
- LIST const* const field = lol_get(frame->args, 1);
-
- if ( ERROR_SUCCESS ==
- RegQueryValueEx(key, field ? field->string : 0, 0, &type, data, &len) )
- {
- switch (type)
- {
-
- case REG_EXPAND_SZ:
- {
- long len;
- string expanded[1];
- string_new(expanded);
-
- while (
- (len = ExpandEnvironmentStrings(
- (LPCSTR)data, expanded->value, expanded->capacity))
- > expanded->capacity
- )
- string_reserve(expanded, len);
-
- expanded->size = len - 1;
-
- result = list_new( result, newstr(expanded->value) );
- string_free( expanded );
- }
- break;
-
- case REG_MULTI_SZ:
- {
- char* s;
-
- for (s = (char*)data; *s; s += strlen(s) + 1)
- result = list_new( result, newstr(s) );
-
- }
- break;
-
- case REG_DWORD:
- {
- char buf[100];
- sprintf( buf, "%u", *(PDWORD)data );
- result = list_new( result, newstr(buf) );
- }
- break;
-
- case REG_SZ:
- result = list_new( result, newstr((char*)data) );
- break;
- }
- }
- RegCloseKey(key);
- }
- return result;
-}
-
-static LIST* get_subkey_names(HKEY key, char const* path)
-{
- LIST* result = 0;
-
- if ( ERROR_SUCCESS ==
- RegOpenKeyEx(key, path, 0, KEY_ENUMERATE_SUB_KEYS, &key)
- )
- {
- char name[MAX_REGISTRY_KEYNAME_LENGTH];
- DWORD name_size = sizeof(name);
- DWORD index;
- FILETIME last_write_time;
-
- for ( index = 0;
- ERROR_SUCCESS == RegEnumKeyEx(
- key, index, name, &name_size, 0, 0, 0, &last_write_time);
- ++index,
- name_size = sizeof(name)
- )
- {
- name[name_size] = 0;
- result = list_append(result, list_new(0, newstr(name)));
- }
-
- RegCloseKey(key);
- }
-
- return result;
-}
-
-static LIST* get_value_names(HKEY key, char const* path)
-{
- LIST* result = 0;
-
- if ( ERROR_SUCCESS == RegOpenKeyEx(key, path, 0, KEY_QUERY_VALUE, &key) )
- {
- char name[MAX_REGISTRY_VALUENAME_LENGTH];
- DWORD name_size = sizeof(name);
- DWORD index;
-
- for ( index = 0;
- ERROR_SUCCESS == RegEnumValue(
- key, index, name, &name_size, 0, 0, 0, 0);
- ++index,
- name_size = sizeof(name)
- )
- {
- name[name_size] = 0;
- result = list_append(result, list_new(0, newstr(name)));
- }
-
- RegCloseKey(key);
- }
-
- return result;
-}
-
-LIST*
-builtin_system_registry_names(
- PARSE *parse,
- FRAME *frame )
-{
- char const* path = lol_get(frame->args, 0)->string;
- char const* result_type = lol_get(frame->args, 1)->string;
-
- HKEY key = get_key(&path);
-
- if ( !strcmp(result_type, "subkeys") )
- return get_subkey_names(key, path);
- if ( !strcmp(result_type, "values") )
- return get_value_names(key, path);
- return 0;
-}
-
-# endif
diff --git a/jam-files/engine/yyacc.c b/jam-files/engine/yyacc.c
deleted file mode 100644
index b5efc96b..00000000
--- a/jam-files/engine/yyacc.c
+++ /dev/null
@@ -1,268 +0,0 @@
-/* Copyright 2002 Rene Rivera.
-** Distributed under the Boost Software License, Version 1.0.
-** (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#include <stdio.h>
-#include <string.h>
-#include <ctype.h>
-#include <stdlib.h>
-
-/*
-# yyacc - yacc wrapper
-#
-# Allows tokens to be written as `literal` and then automatically
-# substituted with #defined tokens.
-#
-# Usage:
-# yyacc file.y filetab.h file.yy
-#
-# inputs:
-# file.yy yacc grammar with ` literals
-#
-# outputs:
-# file.y yacc grammar
-# filetab.h array of string <-> token mappings
-#
-# 3-13-93
-# Documented and p moved in sed command (for some reason,
-# s/x/y/p doesn't work).
-# 10-12-93
-# Take basename as second argument.
-# 12-31-96
-# reversed order of args to be compatible with GenFile rule
-# 11-20-2002
-# Reimplemented as a C program for portability. (Rene Rivera)
-*/
-
-void print_usage();
-char * copy_string(char * s, int l);
-char * tokenize_string(char * s);
-int cmp_literal(const void * a, const void * b);
-
-typedef struct
-{
- char * string;
- char * token;
-} literal;
-
-int main(int argc, char ** argv)
-{
- int result = 0;
- if (argc != 4)
- {
- print_usage();
- result = 1;
- }
- else
- {
- FILE * token_output_f = 0;
- FILE * grammar_output_f = 0;
- FILE * grammar_source_f = 0;
-
- grammar_source_f = fopen(argv[3],"r");
- if (grammar_source_f == 0) { result = 1; }
- if (result == 0)
- {
- literal literals[1024];
- int t = 0;
- char l[2048];
- while (1)
- {
- if (fgets(l,2048,grammar_source_f) != 0)
- {
- char * c = l;
- while (1)
- {
- char * c1 = strchr(c,'`');
- if (c1 != 0)
- {
- char * c2 = strchr(c1+1,'`');
- if (c2 != 0)
- {
- literals[t].string = copy_string(c1+1,c2-c1-1);
- literals[t].token = tokenize_string(literals[t].string);
- t += 1;
- c = c2+1;
- }
- else
- break;
- }
- else
- break;
- }
- }
- else
- {
- break;
- }
- }
- literals[t].string = 0;
- literals[t].token = 0;
- qsort(literals,t,sizeof(literal),cmp_literal);
- {
- int p = 1;
- int i = 1;
- while (literals[i].string != 0)
- {
- if (strcmp(literals[p-1].string,literals[i].string) != 0)
- {
- literals[p] = literals[i];
- p += 1;
- }
- i += 1;
- }
- literals[p].string = 0;
- literals[p].token = 0;
- t = p;
- }
- token_output_f = fopen(argv[2],"w");
- if (token_output_f != 0)
- {
- int i = 0;
- while (literals[i].string != 0)
- {
- fprintf(token_output_f," { \"%s\", %s },\n",literals[i].string,literals[i].token);
- i += 1;
- }
- fclose(token_output_f);
- }
- else
- result = 1;
- if (result == 0)
- {
- grammar_output_f = fopen(argv[1],"w");
- if (grammar_output_f != 0)
- {
- int i = 0;
- while (literals[i].string != 0)
- {
- fprintf(grammar_output_f,"%%token %s\n",literals[i].token);
- i += 1;
- }
- rewind(grammar_source_f);
- while (1)
- {
- if (fgets(l,2048,grammar_source_f) != 0)
- {
- char * c = l;
- while (1)
- {
- char * c1 = strchr(c,'`');
- if (c1 != 0)
- {
- char * c2 = strchr(c1+1,'`');
- if (c2 != 0)
- {
- literal key;
- literal * replacement = 0;
- key.string = copy_string(c1+1,c2-c1-1);
- key.token = 0;
- replacement = (literal*)bsearch(
- &key,literals,t,sizeof(literal),cmp_literal);
- *c1 = 0;
- fprintf(grammar_output_f,"%s%s",c,replacement->token);
- c = c2+1;
- }
- else
- {
- fprintf(grammar_output_f,"%s",c);
- break;
- }
- }
- else
- {
- fprintf(grammar_output_f,"%s",c);
- break;
- }
- }
- }
- else
- {
- break;
- }
- }
- fclose(grammar_output_f);
- }
- else
- result = 1;
- }
- }
- if (result != 0)
- {
- perror("yyacc");
- }
- }
- return result;
-}
-
-static char * usage[] = {
- "yyacc <grammar output.y> <token table output.h> <grammar source.yy>",
- 0 };
-
-void print_usage()
-{
- char ** u;
- for (u = usage; *u != 0; ++u)
- {
- fputs(*u,stderr); putc('\n',stderr);
- }
-}
-
-char * copy_string(char * s, int l)
-{
- char * result = (char*)malloc(l+1);
- strncpy(result,s,l);
- result[l] = 0;
- return result;
-}
-
-char * tokenize_string(char * s)
-{
- char * result;
- char * literal = s;
- int l;
- int c;
-
- if (strcmp(s,":") == 0) literal = "_colon";
- else if (strcmp(s,"!") == 0) literal = "_bang";
- else if (strcmp(s,"!=") == 0) literal = "_bang_equals";
- else if (strcmp(s,"&&") == 0) literal = "_amperamper";
- else if (strcmp(s,"&") == 0) literal = "_amper";
- else if (strcmp(s,"+") == 0) literal = "_plus";
- else if (strcmp(s,"+=") == 0) literal = "_plus_equals";
- else if (strcmp(s,"||") == 0) literal = "_barbar";
- else if (strcmp(s,"|") == 0) literal = "_bar";
- else if (strcmp(s,";") == 0) literal = "_semic";
- else if (strcmp(s,"-") == 0) literal = "_minus";
- else if (strcmp(s,"<") == 0) literal = "_langle";
- else if (strcmp(s,"<=") == 0) literal = "_langle_equals";
- else if (strcmp(s,">") == 0) literal = "_rangle";
- else if (strcmp(s,">=") == 0) literal = "_rangle_equals";
- else if (strcmp(s,".") == 0) literal = "_period";
- else if (strcmp(s,"?") == 0) literal = "_question";
- else if (strcmp(s,"?=") == 0) literal = "_question_equals";
- else if (strcmp(s,"=") == 0) literal = "_equals";
- else if (strcmp(s,",") == 0) literal = "_comma";
- else if (strcmp(s,"[") == 0) literal = "_lbracket";
- else if (strcmp(s,"]") == 0) literal = "_rbracket";
- else if (strcmp(s,"{") == 0) literal = "_lbrace";
- else if (strcmp(s,"}") == 0) literal = "_rbrace";
- else if (strcmp(s,"(") == 0) literal = "_lparen";
- else if (strcmp(s,")") == 0) literal = "_rparen";
- l = strlen(literal)+2;
- result = (char*)malloc(l+1);
- for (c = 0; literal[c] != 0; ++c)
- {
- result[c] = toupper(literal[c]);
- }
- result[l-2] = '_';
- result[l-1] = 't';
- result[l] = 0;
- return result;
-}
-
-int cmp_literal(const void * a, const void * b)
-{
- return strcmp(((const literal *)a)->string,((const literal *)b)->string);
-}
diff --git a/jam-files/sanity.jam b/jam-files/sanity.jam
deleted file mode 100644
index 8ccfc65d..00000000
--- a/jam-files/sanity.jam
+++ /dev/null
@@ -1,277 +0,0 @@
-import modules ;
-import option ;
-import os ;
-import path ;
-import project ;
-import build-system ;
-import version ;
-
-#Shell with trailing line removed http://lists.boost.org/boost-build/2007/08/17051.php
-rule trim-nl ( str extras * ) {
-return [ MATCH "([^
-]*)" : $(str) ] $(extras) ;
-}
-rule _shell ( cmd : extras * ) {
- return [ trim-nl [ SHELL $(cmd) : $(extras) ] ] ;
-}
-
-cxxflags = [ os.environ "CXXFLAGS" ] ;
-cflags = [ os.environ "CFLAGS" ] ;
-ldflags = [ os.environ "LDFLAGS" ] ;
-
-#Run g++ with empty main and these arguments to see if it passes.
-rule test_flags ( flags * ) {
- flags = $(cxxflags) $(ldflags) $(flags) ;
- local cmd = "bash -c \"g++ "$(flags:J=" ")" -x c++ - <<<'int main() {}' -o /dev/null >/dev/null 2>/dev/null\"" ;
- local ret = [ SHELL $(cmd) : exit-status ] ;
- if --debug-configuration in [ modules.peek : ARGV ] {
- echo $(cmd) ;
- echo $(ret) ;
- }
- if $(ret[2]) = 0 {
- return true ;
- } else {
- return ;
- }
-}
-
-rule test_header ( name ) {
- return [ test_flags "-include $(name)" ] ;
-}
-
-rule test_library ( name ) {
- return [ test_flags "-l$(name)" ] ;
-}
-
-{
- local cleaning = [ option.get "clean" : : yes ] ;
- cleaning ?= [ option.get "clean-all" : no : yes ] ;
- if "clean" in [ modules.peek : ARGV ] {
- cleaning = yes ;
- }
- constant CLEANING : $(cleaning) ;
-}
-
-requirements = ;
-
-FORCE-STATIC = [ option.get "static" : : "yes" ] ;
-if $(FORCE-STATIC) {
- requirements += <runtime-link>static ;
-}
-
-#Determine if a library can be compiled statically.
-rule auto-shared ( name : additional * ) {
- additional ?= "" ;
- if [ test_flags $(additional)" -static -l"$(name) ] {
- return ;
- } else {
- if $(FORCE-STATIC) {
- echo "Could not statically link against lib $(name). Your build will probably fail." ;
- return ;
- } else {
- return "<link>shared" ;
- }
- }
-}
-
-# MacPorts' default location is /opt/local -- use this if no path is given.
-with-macports = [ option.get "with-macports" : : "/opt/local" ] ;
-if $(with-macports) {
- using darwin ;
- ECHO "Using --with-macports=$(with-macports), implying use of darwin GCC" ;
-
- L-boost-search = -L$(with-macports)/lib ;
- boost-search = <search>$(with-macports)/lib ;
- I-boost-include = -I$(with-macports)/include ;
- boost-include = <include>$(with-macports)/include ;
-}
-else {
- with-boost = [ option.get "with-boost" ] ;
- with-boost ?= [ os.environ "BOOST_ROOT" ] ;
- if $(with-boost) {
- L-boost-search = -L$(with-boost)/lib" "-L$(with-boost)/lib64 ;
- boost-search = <search>$(with-boost)/lib <search>$(with-boost)/lib64 ;
- I-boost-include = -I$(with-boost)/include ;
- boost-include = <include>$(with-boost)/include ;
- } else {
- L-boost-search = "" ;
- boost-search = ;
- I-boost-include = "" ;
- boost-include = ;
- }
-}
-
-#Convenience rule for boost libraries. Defines library boost_$(name).
-rule boost-lib ( name macro : deps * ) {
- #Link multi-threaded programs against the -mt version if available. Old
- #versions of boost do not have -mt tagged versions of all libraries. Sadly,
- #boost.jam does not handle this correctly.
- if [ test_flags $(L-boost-search)" -lboost_"$(name)"-mt$(boost-lib-version)" ] {
- lib inner_boost_$(name) : : <threading>single $(boost-search) <name>boost_$(name)$(boost-lib-version) : : <library>$(deps) ;
- lib inner_boost_$(name) : : <threading>multi $(boost-search) <name>boost_$(name)-mt$(boost-lib-version) : : <library>$(deps) ;
- } else {
- lib inner_boost_$(name) : : $(boost-search) <name>boost_$(name)$(boost-lib-version) : : <library>$(deps) ;
- }
-
- alias boost_$(name) : inner_boost_$(name) : $(boost-auto-shared) : : <link>shared:<define>BOOST_$(macro) $(boost-include) ;
-}
-
-#Argument is e.g. 103600
-rule boost ( min-version ) {
- local cmd = "bash -c \"g++ "$(I-boost-include)" -dM -x c++ -E /dev/null -include boost/version.hpp 2>/dev/null |grep '#define BOOST_'\"" ;
- local boost-shell = [ SHELL "$(cmd)" : exit-status ] ;
- if $(boost-shell[2]) != 0 && $(CLEANING) = no {
- echo Failed to run "$(cmd)" ;
- exit Boost does not seem to be installed or g++ is confused. : 1 ;
- }
- boost-version = [ MATCH "#define BOOST_VERSION ([0-9]*)" : $(boost-shell[1]) ] ;
- if $(boost-version) < $(min-version) && $(CLEANING) = no {
- exit You have Boost $(boost-version). This package requires Boost at least $(min-version) (and preferably newer). : 1 ;
- }
- # If matching version tags exist, use them.
- boost-lib-version = [ MATCH "#define BOOST_LIB_VERSION \"([^\"]*)\"" : $(boost-shell[1]) ] ;
- if [ test_flags $(L-boost-search)" -lboost_program_options-"$(boost-lib-version) ] {
- boost-lib-version = "-"$(boost-lib-version) ;
- } else {
- boost-lib-version = "" ;
- }
-
- #Are we linking static binaries against shared boost?
- boost-auto-shared = [ auto-shared "boost_program_options"$(boost-lib-version) : $(L-boost-search) ] ;
-
- #See tools/build/v2/contrib/boost.jam in a boost distribution for a table of macros to define.
- boost-lib system SYSTEM_DYN_LINK ;
- boost-lib thread THREAD_DYN_DLL : boost_system ;
- boost-lib program_options PROGRAM_OPTIONS_DYN_LINK ;
- boost-lib unit_test_framework TEST_DYN_LINK ;
- boost-lib iostreams IOSTREAMS_DYN_LINK ;
- boost-lib filesystem FILE_SYSTEM_DYN_LINK ;
-}
-
-#Link normally to a library, but sometimes static isn't installed so fall back to dynamic.
-rule external-lib ( name : search-path * ) {
- lib $(name) : : [ auto-shared $(name) : "-L"$(search-path) ] <search>$(search-path) ;
-}
-
-#Write the current command line to previous.sh. This does not do shell escaping.
-{
- local build-log = $(TOP)/previous.sh ;
- if ! [ path.exists $(build-log) ] {
- SHELL "touch $(build-log) && chmod +x $(build-log)" ;
- }
- local script = [ modules.peek : ARGV ] ;
- if $(script[1]) = "./jam-files/bjam" {
- #The ./bjam shell script calls ./jam-files/bjam so that appears in argv but
- #we want ./bjam to appear so the environment variables are set correctly.
- script = "./bjam "$(script[2-]:J=" ") ;
- } else {
- script = $(script:J=" ") ;
- }
- script = "#!/bin/sh\n$(script)\n" ;
- local ignored = @($(build-log):E=$(script)) ;
-}
-
-#Boost jam's static clang for Linux is buggy.
-requirements += <cxxflags>$(cxxflags) <cflags>$(cflags) <linkflags>$(ldflags) <os>LINUX,<toolset>clang:<link>shared ;
-
-if ! [ option.get "without-libsegfault" : : "yes" ] && ! $(FORCE-STATIC) {
- #libSegFault prints a stack trace on segfault. Link against it if available.
- if [ test_flags "-lSegFault" ] {
- external-lib SegFault ;
- requirements += <library>SegFault ;
- }
-}
-
-if [ option.get "git" : : "yes" ] {
- local revision = [ _shell "git rev-parse --verify HEAD |head -c 7" ] ;
- constant GITTAG : "/"$(revision) ;
-} else {
- constant GITTAG : "" ;
-}
-
-prefix = [ option.get "prefix" ] ;
-if $(prefix) {
- prefix = [ path.root $(prefix) [ path.pwd ] ] ;
- prefix = $(prefix)$(GITTAG) ;
-} else {
- prefix = $(TOP)$(GITTAG) ;
-}
-
-bindir = [ option.get "bindir" : $(prefix)/bin ] ;
-libdir = [ option.get "libdir" : $(prefix)/lib ] ;
-rule install-bin-libs ( deps * ) {
- install prefix-bin : $(deps) : <location>$(bindir) <install-dependencies>on <install-type>EXE <link>shared:<dll-path>$(libdir) ;
- install prefix-lib : $(deps) : <location>$(libdir) <install-dependencies>on <install-type>LIB <link>shared:<dll-path>$(libdir) ;
-}
-rule install-headers ( name : list * : source-root ? ) {
- local includedir = [ option.get "includedir" : $(prefix)/include ] ;
- source-root ?= "." ;
- install $(name) : $(list) : <location>$(includedir) <install-source-root>$(source-root) ;
-}
-
-rule build-projects ( projects * ) {
- for local p in $(projects) {
- build-project $(p) ;
- }
-}
-
-#Only one post build hook is allowed. Allow multiple.
-post-hooks = ;
-rule post-build ( ok ? ) {
- for local r in $(post-hooks) {
- $(r) $(ok) ;
- }
-}
-IMPORT $(__name__) : post-build : : $(__name__).post-build ;
-build-system.set-post-build-hook $(__name__).post-build ;
-rule add-post-hook ( names * ) {
- post-hooks += $(names) ;
-}
-
-
-#Backend for writing content to files after build completes.
-post-files = ;
-post-contents = ;
-rule save-post-build ( ok ? ) {
- if $(ok) {
- while $(post-files) {
- local ignored = @($(post-files[1]):E=$(post-contents[1])) ;
- post-files = $(post-files[2-]) ;
- post-contents = $(post-contents[2-]) ;
- }
- }
-}
-add-post-hook save-post-build ;
-
-#Queue content to be written to file when build completes successfully.
-rule add-post-write ( name content ) {
- post-files += $(name) ;
- post-contents += $(content) ;
-}
-
-#Compare contents of file with current. If they're different, force the targets to rebuild then overwrite the file.
-rule always-if-changed ( file current : targets * ) {
- local previous = inconsistent ;
- if [ path.exists $(file) ] {
- previous = [ _shell "cat $(file)" ] ;
- }
- if $(current) != $(previous) {
- #Write inconsistent while the build is running
- if [ path.exists $(file) ] {
- local ignored = @($(file):E=inconsistent) ;
- }
- add-post-write $(file) $(current) ;
- for local i in $(targets) {
- always $(i) ;
- }
- }
-}
-
-if [ option.get "sanity-test" : : "yes" ] {
- local current_version = [ modules.peek : JAM_VERSION ] ;
- if ( $(current_version[0]) < 2000 && [ version.check-jam-version 3 1 16 ] ) || [ version.check-jam-version 2011 0 0 ] {
- EXIT "Sane" : 0 ;
- } else {
- EXIT "Bad" : 1 ;
- }
-}