Commit f86a4ef6 authored by Jelmer Vernooij's avatar Jelmer Vernooij

Import upstream version 2.0.6

parent fb7d2e24
#!/bin/sh
p=`dirname $0`
echo "Setting up for waf build"
echo "done. Now run $p/configure then make."
if [ $p != "." ]; then
echo "Notice: The build invoke path is not the main directory! Use make with the parameter"
echo "-C $p. Example: make -C $p all"
fi
No preview for this file type
......@@ -3,10 +3,17 @@
# compare the generated config.h from a waf build with existing samba
# build
OLD_CONFIG=$HOME/samba_old/source3/include/config.h
if test "x$1" != "x" ; then
OLD_CONFIG=$1
fi
if test "x$DIFF" = "x" ; then
DIFF="comm -23"
fi
grep "^.define" bin/default/source3/include/config.h | sort > waf-config.h
grep "^.define" $HOME/samba_old/source3/include/config.h | sort > old-config.h
grep "^.define" $OLD_CONFIG | sort > old-config.h
comm -23 old-config.h waf-config.h
$DIFF old-config.h waf-config.h
#echo
#diff -u old-config.h waf-config.h
source4/main.mk
source4/lib/basic.mk
pidl/config.mk
nsswitch/config.mk
nsswitch/libwbclient/config.mk
source4/heimdal_build/internal.mk
source4/lib/ldb-samba/config.mk
source4/librpc/config.mk
source4/utils/config.mk
source4/utils/net/config.mk
source4/scripting/python/config.mk
source4/auth/config.mk
source4/auth/gensec/config.mk
source4/auth/kerberos/config.mk
source4/auth/ntlm/config.mk
source4/auth/credentials/config.mk
source4/auth/ntlmssp/config.mk
source4/libnet/config.mk
source4/nbt_server/config.mk
source4/wrepl_server/config.mk
source4/ntvfs/config.mk
source4/ntvfs/unixuid/config.mk
source4/ntvfs/sysdep/config.mk
source4/ntvfs/common/config.mk
source4/ntvfs/posix/config.mk
source4/selftest/config.mk
source4/cldap_server/config.mk
source4/smb_server/config.mk
source4/smb_server/smb2/config.mk
source4/smb_server/smb/config.mk
source4/smbd/config.mk source4/smbd/process_model.mk
source4/kdc/config.mk
source4/dsdb/config.mk
source4/dsdb/samdb/ldb_modules/config.mk
source4/web_server/config.mk
source4/param/config.mk
source4/winbind/config.mk
source4/cluster/config.mk
source4/client/config.mk
source4/ntptr/config.mk
source4/rpc_server/config.mk
source4/libcli/config.mk
source4/libcli/smb2/config.mk
source4/libcli/wbclient/config.mk
source4/libcli/security/config.mk
source4/libcli/ldap/config.mk
source4/ntp_signd/config.mk
source4/torture/config.mk
source4/torture/smb2/config.mk
source4/torture/local/config.mk
source4/torture/drs/config.mk
source4/torture/winbind/config.mk
source4/torture/libsmbclient/config.mk
source4/torture/libnetapi/config.mk
source4/lib/messaging/config.mk
source4/lib/events/config.mk
source4/lib/stream/config.mk
source4/lib/cmdline/config.mk
source4/lib/com/config.mk
source4/lib/registry/config.mk
source4/lib/wmi/config.mk
source4/lib/socket/config.mk
source4/lib/samba3/config.mk
source4/ldap_server/config.mk
libgpo/config.mk
libcli/cldap/config.mk
libcli/samsync/config.mk
libcli/nbt/config.mk
libcli/auth/config.mk
libcli/drsuapi/config.mk
libcli/security/config.mk
libcli/smb/config.mk
libcli/named_pipe_auth/config.mk
libcli/ldap/config.mk
lib/uid_wrapper/config.mk
lib/crypto/config.mk
lib/socket_wrapper/config.mk
lib/util/config.mk
lib/util/charset/config.mk
lib/nss_wrapper/config.mk
lib/tsocket/config.mk
lib/popt/config.mk
lib/async_req/config.mk
lib/tdr/config.mk
lib/torture/config.mk
lib/smbconf/config.mk
This diff is collapsed.
#!/bin/sh
cat mklist.txt |
while read line; do
ws=""
list=""
for f in $line; do
echo "Processing $f"
f="../../$f"
test -f $f || {
echo "$f doesn't exist"
exit 1
}
ws="$(dirname $f)/wscript_build"
if [ -f $ws ]; then
if test -s $ws && ! grep "AUTOGENERATED.by.mktowscript" $ws > /dev/null; then
echo "Skipping manually edited file $ws"
continue
fi
fi
list="$list $f"
done
if [ "$list" = "" ]; then
continue
fi
./mktowscript.pl $list > wscript_build.$$ || {
echo "Failed on $f"
rm -f wscript_build.$$
exit 1
}
if cmp wscript_build.$$ $ws > /dev/null 2>&1; then
rm -f wscript_build.$$
else
mv wscript_build.$$ $ws || exit 1
fi
#exit 1
done
......@@ -62,7 +62,7 @@ ctags:
$(WAF) ctags
bin/%:: FORCE
$(WAF) --targets=`basename $@`
$(WAF) --targets=$@
FORCE:
configure: autogen-waf.sh BUILDTOOLS/scripts/configure.waf
......
......@@ -16,5 +16,6 @@ nm "$SHAREDLIB" | cut -d' ' -f2- | egrep '^[BDGTRVWS]' | grep -v @ | cut -c3- |
done
) > $GDBSCRIPT
gdb -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null
# forcing the terminal avoids a problem on Fedora12
TERM=none gdb -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null
rm -f $GDBSCRIPT
#!/bin/sh
#!/bin/bash
set -e
set -x
......@@ -11,7 +11,7 @@ PREFIX=$HOME/testprefix
if [ $# -gt 0 ]; then
tests="$*"
else
tests="lib/replace lib/talloc lib/tevent lib/tdb source4/lib/ldb"
tests="lib/replace lib/talloc lib/tevent lib/tdb lib/ldb"
fi
echo "testing in dirs $tests"
......@@ -21,14 +21,13 @@ for d in $tests; do
pushd $d
rm -rf bin
type waf
./autogen-waf.sh
waf dist
./configure -C --enable-developer --prefix=$PREFIX
time make
make install
make distcheck
case $d in
"source4/lib/ldb")
"lib/ldb")
ldd bin/ldbadd
;;
"lib/replace")
......
......@@ -53,6 +53,7 @@ def PKG_CONFIG_FILES(bld, pc_files, vnum=None):
rule=subst_at_vars,
source=f+'.in',
target=f)
bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX'])
t.vars = []
if t.env.RPATH_ON_INSTALL:
t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR
......
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
import Options
import Build
import Options, Build, os
from optparse import SUPPRESS_HELP
from samba_utils import os_path_relpath, TO_LIST
def SAMBA3_ADD_OPTION(opt, option, help=(), dest=None, default=True,
with_name="with", without_name="without"):
......@@ -41,3 +41,72 @@ def SAMBA3_IS_ENABLED_MODULE(bld, module):
'''Check whether module is in either shared or static list '''
return SAMBA3_IS_STATIC_MODULE(bld, module) or SAMBA3_IS_SHARED_MODULE(bld, module)
Build.BuildContext.SAMBA3_IS_ENABLED_MODULE = SAMBA3_IS_ENABLED_MODULE
def s3_fix_kwargs(bld, kwargs):
'''fix the build arguments for s3 build rules to include the
necessary includes, subdir and cflags options '''
s3dir = os.path.join(bld.env.srcdir, 'source3')
s3reldir = os_path_relpath(s3dir, bld.curdir)
# the extra_includes list is relative to the source3 directory
extra_includes = [ '.', 'include', 'lib', '../lib/tdb_compat' ]
if bld.env.use_intree_heimdal:
extra_includes += [ '../source4/heimdal/lib/com_err',
'../source4/heimdal/lib/gssapi',
'../source4/heimdal_build' ]
if bld.CONFIG_SET('BUILD_TDB2'):
if not bld.CONFIG_SET('USING_SYSTEM_TDB2'):
extra_includes += [ '../lib/tdb2' ]
else:
if not bld.CONFIG_SET('USING_SYSTEM_TDB'):
extra_includes += [ '../lib/tdb/include' ]
if not bld.CONFIG_SET('USING_SYSTEM_TEVENT'):
extra_includes += [ '../lib/tevent' ]
if not bld.CONFIG_SET('USING_SYSTEM_TALLOC'):
extra_includes += [ '../lib/talloc' ]
if not bld.CONFIG_SET('USING_SYSTEM_POPT'):
extra_includes += [ '../lib/popt' ]
# s3 builds assume that they will have a bunch of extra include paths
includes = []
for d in extra_includes:
includes += [ os.path.join(s3reldir, d) ]
# the rule may already have some includes listed
if 'includes' in kwargs:
includes += TO_LIST(kwargs['includes'])
kwargs['includes'] = includes
# some S3 code assumes that CONFIGFILE is set
cflags = ['-DCONFIGFILE="%s"' % bld.env['CONFIGFILE']]
if 'cflags' in kwargs:
cflags += TO_LIST(kwargs['cflags'])
kwargs['cflags'] = cflags
# these wrappers allow for mixing of S3 and S4 build rules in the one build
def SAMBA3_LIBRARY(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_LIBRARY(name, *args, **kwargs)
Build.BuildContext.SAMBA3_LIBRARY = SAMBA3_LIBRARY
def SAMBA3_MODULE(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_MODULE(name, *args, **kwargs)
Build.BuildContext.SAMBA3_MODULE = SAMBA3_MODULE
def SAMBA3_SUBSYSTEM(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_SUBSYSTEM(name, *args, **kwargs)
Build.BuildContext.SAMBA3_SUBSYSTEM = SAMBA3_SUBSYSTEM
def SAMBA3_BINARY(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_BINARY(name, *args, **kwargs)
Build.BuildContext.SAMBA3_BINARY = SAMBA3_BINARY
......@@ -10,6 +10,8 @@ abi_type_maps = {
'struct __va_list_tag *' : 'va_list'
}
version_key = lambda x: map(int, x.split("."))
def normalise_signature(sig):
'''normalise a signature from gdb'''
sig = sig.strip()
......@@ -145,8 +147,16 @@ def abi_process_file(fname, version, symmap):
symmap[symname] = version
f.close()
def abi_write_vscript(vscript, libname, version, symmap, abi_match):
'''write a vscript file for a library in --version-script format'''
def abi_write_vscript(vscript, libname, current_version, versions, symmap, abi_match):
'''write a vscript file for a library in --version-script format
:param vscript: Path to the vscript file
:param libname: Name of the library, uppercased
:param current_version: Current version
:param versions: Versions to consider
:param symmap: Dictionary mapping symbols -> version
:param abi_match: List of symbols considered to be public in the current version
'''
invmap = {}
for s in symmap:
......@@ -154,16 +164,19 @@ def abi_write_vscript(vscript, libname, version, symmap, abi_match):
f = open(vscript, mode='w')
last_key = ""
for k in sorted(invmap):
versions = sorted(versions, key=version_key)
for k in versions:
symver = "%s_%s" % (libname, k)
if symver == version:
if symver == current_version:
break
f.write("%s {\n\tglobal: \n" % symver)
for s in invmap[k]:
f.write("\t\t%s;\n" % s);
f.write("%s {\n" % symver)
if k in invmap:
f.write("\tglobal: \n")
for s in invmap.get(k, []):
f.write("\t\t%s;\n" % s);
f.write("}%s;\n\n" % last_key)
last_key = " %s" % symver
f.write("%s {\n" % version)
f.write("%s {\n" % current_version)
f.write("\tglobal:\n")
for x in abi_match:
f.write("\t\t%s;\n" % x)
......@@ -179,13 +192,14 @@ def abi_build_vscript(task):
tgt = task.outputs[0].bldpath(task.env)
symmap = {}
versions = []
for f in task.inputs:
fname = f.abspath(task.env)
basename = os.path.basename(fname)
version = basename[len(task.env.LIBNAME)+1:-len(".sigs")]
versions.append(version)
abi_process_file(fname, version, symmap)
abi_write_vscript(tgt, task.env.LIBNAME, task.env.VERSION, symmap,
abi_write_vscript(tgt, task.env.LIBNAME, task.env.VERSION, versions, symmap,
task.env.ABI_MATCH)
......@@ -193,10 +207,14 @@ def ABI_VSCRIPT(bld, libname, abi_directory, version, vscript, abi_match=None):
'''generate a vscript file for our public libraries'''
if abi_directory:
source = bld.path.ant_glob('%s/%s-[0-9]*.sigs' % (abi_directory, libname))
source = sorted(source.split())
def abi_file_key(path):
return version_key(path[:-len(".sigs")].rsplit("-")[-1])
source = sorted(source.split(), key=abi_file_key)
else:
source = ''
libname = os.path.basename(libname)
version = os.path.basename(version)
libname = libname.replace("-", "_").replace("+","_").upper()
version = version.replace("-", "_").replace("+","_").upper()
......
......@@ -80,7 +80,7 @@ def nolink(self):
def CHECK_HEADER(conf, h, add_headers=False, lib=None):
'''check for a header'''
if h in missing_headers:
if h in missing_headers and lib is None:
return False
d = h.upper().replace('/', '_')
d = d.replace('.', '_')
......@@ -463,21 +463,22 @@ Build.BuildContext.CONFIG_SET = CONFIG_SET
Build.BuildContext.CONFIG_GET = CONFIG_GET
def library_flags(conf, libs):
def library_flags(self, libs):
'''work out flags from pkg_config'''
ccflags = []
ldflags = []
for lib in TO_LIST(libs):
inc_path = getattr(conf.env, 'CPPPATH_%s' % lib.upper(), [])
lib_path = getattr(conf.env, 'LIBPATH_%s' % lib.upper(), [])
inc_path = getattr(self.env, 'CPPPATH_%s' % lib.upper(), [])
lib_path = getattr(self.env, 'LIBPATH_%s' % lib.upper(), [])
ccflags.extend(['-I%s' % i for i in inc_path])
ldflags.extend(['-L%s' % l for l in lib_path])
extra_ccflags = TO_LIST(getattr(conf.env, 'CCFLAGS_%s' % lib.upper(), []))
extra_ldflags = TO_LIST(getattr(conf.env, 'LDFLAGS_%s' % lib.upper(), []))
extra_ccflags = TO_LIST(getattr(self.env, 'CCFLAGS_%s' % lib.upper(), []))
extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), []))
ccflags.extend(extra_ccflags)
ldflags.extend(extra_ldflags)
if 'EXTRA_LDFLAGS' in conf.env:
ldflags.extend(conf.env['EXTRA_LDFLAGS'])
if 'EXTRA_LDFLAGS' in self.env:
ldflags.extend(self.env['EXTRA_LDFLAGS'])
ccflags = unique_list(ccflags)
ldflags = unique_list(ldflags)
return (ccflags, ldflags)
......@@ -596,8 +597,10 @@ def SAMBA_CONFIG_H(conf, path=None):
if Options.options.developer:
# we add these here to ensure that -Wstrict-prototypes is not set during configure
conf.ADD_CFLAGS('-Wall -g -Wshadow -Wstrict-prototypes -Wpointer-arith -Wcast-qual -Wcast-align -Wwrite-strings -Werror-implicit-function-declaration -Wformat=2 -Wno-format-y2k -Wmissing-prototypes',
conf.ADD_CFLAGS('-Wall -g -Wshadow -Wstrict-prototypes -Wpointer-arith -Wcast-align -Wwrite-strings -Werror-implicit-function-declaration -Wformat=2 -Wno-format-y2k -Wmissing-prototypes -fno-common',
testflags=True)
if os.getenv('TOPLEVEL_BUILD'):
conf.ADD_CFLAGS('-Wcast-qual', testflags=True)
conf.env.DEVELOPER_MODE = True
if Options.options.picky_developer:
......@@ -644,6 +647,8 @@ def ADD_CFLAGS(conf, flags, testflags=False):
def ADD_LDFLAGS(conf, flags, testflags=False):
'''add some LDFLAGS to the command line
optionally set testflags to ensure all the flags work
this will return the flags that are added, if any
'''
if testflags:
ok_flags=[]
......@@ -654,7 +659,7 @@ def ADD_LDFLAGS(conf, flags, testflags=False):
if not 'EXTRA_LDFLAGS' in conf.env:
conf.env['EXTRA_LDFLAGS'] = []
conf.env['EXTRA_LDFLAGS'].extend(TO_LIST(flags))
return flags
@conf
......
......@@ -6,13 +6,11 @@ from samba_utils import *
def PRIVATE_NAME(bld, name, private_extension, private_library):
'''possibly rename a library to include a bundled extension'''
if bld.env.DISABLE_SHARED or not private_extension:
return name
if name in bld.env.PRIVATE_EXTENSION_EXCEPTION and not private_library:
return name
extension = getattr(bld.env, 'PRIVATE_EXTENSION', '')
if extension:
return name + '-' + extension
# we now use the same private name for libraries as the public name.
# see http://git.samba.org/?p=tridge/junkcode.git;a=tree;f=shlib for a
# demonstration that this is the right thing to do
# also see http://lists.samba.org/archive/samba-technical/2011-January/075816.html
return name
......@@ -124,10 +122,14 @@ def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0',
minversion = minimum_library_version(conf, libname, minversion)
msg = 'Checking for system %s' % libname
if minversion != '0.0.0':
msg += ' >= %s' % minversion
# try pkgconfig first
if (conf.check_cfg(package=libname,
args='"%s >= %s" --cflags --libs' % (libname, minversion),
msg='Checking for system %s >= %s' % (libname, minversion)) and
msg=msg) and
check_functions_headers()):
conf.SET_TARGET_TYPE(libname, 'SYSLIB')
conf.env[found] = True
......
......@@ -85,6 +85,14 @@ def build_dependencies(self):
new_ldflags.extend(ldflags)
self.ldflags = new_ldflags
if getattr(self, 'allow_undefined_symbols', False) and self.env.undefined_ldflags:
for f in self.env.undefined_ldflags:
self.ldflags.remove(f)
if getattr(self, 'allow_undefined_symbols', False) and self.env.undefined_ignore_ldflags:
for f in self.env.undefined_ignore_ldflags:
self.ldflags.append(f)
debug('deps: computed dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
self.sname, self.uselib, self.uselib_local, self.add_objects)
......@@ -133,7 +141,7 @@ def build_includes(self):
includes.extend(self.samba_includes_extended)
if 'EXTRA_INCLUDES' in bld.env:
if 'EXTRA_INCLUDES' in bld.env and getattr(self, 'global_include', True):
includes.extend(bld.env['EXTRA_INCLUDES'])
includes.append('#')
......@@ -214,9 +222,11 @@ def add_init_functions(self):
cflags = getattr(self, 'samba_cflags', [])[:]
if modules == []:
cflags.append('-DSTATIC_%s_MODULES=%s' % (sname.replace('-','_'), sentinal))
sname = sname.replace('-','_')
sname = sname.replace('/','_')
cflags.append('-DSTATIC_%s_MODULES=%s' % (sname, sentinal))
if sentinal == 'NULL':
cflags.append('-DSTATIC_%s_MODULES_PROTO' % sname.replace('-','_'))
cflags.append('-DSTATIC_%s_MODULES_PROTO' % sname)
self.ccflags = cflags
return
......@@ -286,8 +296,7 @@ def check_duplicate_sources(bld, tgt_list):
Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
for tname in subsystems[s]:
if len(subsystems[s][tname]) > 1:
Logs.error("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
sys.exit(1)
raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
return ret
......@@ -953,10 +962,11 @@ def show_object_duplicates(bld, tgt_list):
# this provides a way to save our dependency calculations between runs
savedeps_version = 3
savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_include_first', 'samba_cflags',
'source', 'grouping_library', 'samba_ldflags']
'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols',
'use_global_deps', 'global_include' ]
savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes', 'ccflags', 'ldflags', 'samba_deps_extended']
savedeps_outenv = ['INC_PATHS']
savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS' ]
savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ]
savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS']
savedeps_files = ['buildtools/wafsamba/samba_deps.py']
......
......@@ -80,14 +80,14 @@ def vcs_dir_contents(path):
while repo != "/":
if os.path.isdir(os.path.join(repo, ".git")):
ls_files_cmd = [ 'git', 'ls-files', '--full-name',
os.path.relpath(path, repo) ]
os_path_relpath(path, repo) ]
cwd = None
env = dict(os.environ)
env["GIT_DIR"] = os.path.join(repo, ".git")
break
elif os.path.isdir(os.path.join(repo, ".bzr")):
ls_files_cmd = [ 'bzr', 'ls', '--recursive', '--versioned',
os.path.relpath(path, repo)]
os_path_relpath(path, repo)]
cwd = repo
env = None
break
......
# specialist handling of header files for Samba
import Build, re, Task, TaskGen, shutil, sys, Logs
from samba_utils import *
def header_install_path(header, header_path):
'''find the installation path for a header, given a header_path option'''
if not header_path:
return ''
if not isinstance(header_path, list):
return header_path
for (p1, dir) in header_path:
for p2 in TO_LIST(p1):
if fnmatch.fnmatch(header, p2):
return dir
# default to current path
return ''
re_header = re.compile('^\s*#\s*include[ \t]*"([^"]+)"', re.I | re.M)
# a dictionary mapping source header paths to public header paths
header_map = {}
def find_suggested_header(hpath):
'''find a suggested header path to use'''
base = os.path.basename(hpath)
ret = []
for h in header_map:
if os.path.basename(h) == base:
ret.append('<%s>' % header_map[h])
ret.append('"%s"' % h)
return ret
def create_public_header(task):
'''create a public header from a private one, output within the build tree'''
src = task.inputs[0].abspath(task.env)
tgt = task.outputs[0].bldpath(task.env)
if os.path.exists(tgt):
os.unlink(tgt)
relsrc = os_path_relpath(src, task.env.TOPDIR)
infile = open(src, mode='r')
outfile = open(tgt, mode='w')
linenumber = 0
search_paths = [ '', task.env.RELPATH ]
for i in task.env.EXTRA_INCLUDES:
if i.startswith('#'):
search_paths.append(i[1:])
for line in infile:
linenumber += 1
# allow some straight substitutions
if task.env.public_headers_replace and line.strip() in task.env.public_headers_replace:
outfile.write(task.env.public_headers_replace[line.strip()] + '\n')
continue
# see if its an include line
m = re_header.match(line)
if m is None:
outfile.write(line)
continue
# its an include, get the header path
hpath = m.group(1)
if hpath.startswith("bin/default/"):
hpath = hpath[12:]
# some are always allowed
if task.env.public_headers_skip and hpath in task.env.public_headers_skip:
outfile.write(line)
continue
# work out the header this refers to
found = False
for s in search_paths:
p = os.path.normpath(os.path.join(s, hpath))
if p in header_map:
outfile.write("#include <%s>\n" % header_map[p])
found = True
break
if found:
continue
if task.env.public_headers_allow_broken:
Logs.warn("Broken public header include '%s' in '%s'" % (hpath, relsrc))
outfile.write(line)
continue
# try to be nice to the developer by suggesting an alternative
suggested = find_suggested_header(hpath)
outfile.close()
os.unlink(tgt)
sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % (
os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
raise Utils.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
hpath, relsrc, task.env.RELPATH))
infile.close()
outfile.close()
def public_headers_simple(bld, public_headers, header_path=None, public_headers_install=True):
'''install some headers - simple version, no munging needed
'''
if not public_headers_install:
return
for h in TO_LIST(public_headers):
inst_path = header_install_path(h, header_path)
if h.find(':') != -1:
s = h.split(":")
h_name = s[0]
inst_name = s[1]
else:
h_name = h
inst_name = os.path.basename(h)
bld.INSTALL_FILES('${INCLUDEDIR}', h_name, destname=inst_name)
def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install=True):
'''install some headers
header_path may either be a string that is added to the INCLUDEDIR,
or it can be a dictionary of wildcard patterns which map to destination
directories relative to INCLUDEDIR