2016-07-25 18:44:35 -03:00
|
|
|
# Copyright (C) 2016 Intel Corporation. All rights reserved.
|
|
|
|
#
|
|
|
|
# This file is free software: you can redistribute it and/or modify it
|
|
|
|
# under the terms of the GNU General Public License as published by the
|
|
|
|
# Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This file is distributed in the hope that it will be useful, but
|
|
|
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
|
|
|
# See the GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
"""
|
|
|
|
Waf tool for Ardupilot libraries. The function bld.ap_library() creates the
|
|
|
|
necessary task generators for creating the objects of a library for a vehicle.
|
|
|
|
That includes the common objects, which are shared among vehicles. That
|
|
|
|
function is used by bld.ap_stlib() and shouldn't need to be called otherwise.
|
|
|
|
|
|
|
|
The environment variable AP_LIBRARIES_OBJECTS_KW is a dictionary of keyword
|
|
|
|
arguments to be passed to bld.objects() when during the creation of the task
|
|
|
|
generators. You can use it to pass extra arguments to that function (although
|
|
|
|
some of them will be rewritten, see the implementation for details).
|
|
|
|
|
2016-08-18 13:49:24 -03:00
|
|
|
This tool also checks if the headers used by the source files don't use
|
|
|
|
vehicle-related headers and fails the build if they do.
|
2016-07-25 18:44:35 -03:00
|
|
|
"""
|
2016-08-22 17:48:55 -03:00
|
|
|
import os
|
2016-07-25 18:44:35 -03:00
|
|
|
import re
|
|
|
|
|
2024-06-07 01:08:45 -03:00
|
|
|
from waflib import Errors, Task, Utils, Logs
|
2016-07-25 18:44:35 -03:00
|
|
|
from waflib.Configure import conf
|
2016-08-18 13:49:24 -03:00
|
|
|
from waflib.TaskGen import after_method, before_method, feature
|
2016-07-25 18:44:35 -03:00
|
|
|
from waflib.Tools import c_preproc
|
|
|
|
|
|
|
|
import ardupilotwaf as ap
|
|
|
|
|
|
|
|
UTILITY_SOURCE_EXTS = ['utility/' + glob for glob in ap.SOURCE_EXTS]
|
|
|
|
|
|
|
|
def _common_tgen_name(library):
|
|
|
|
return 'objs/%s' % library
|
|
|
|
|
|
|
|
def _vehicle_tgen_name(library, vehicle):
|
|
|
|
return 'objs/%s/%s' % (library, vehicle)
|
|
|
|
|
|
|
|
_vehicle_indexes = {}
|
|
|
|
def _vehicle_index(vehicle):
|
|
|
|
""" Used for the objects taskgens idx parameter """
|
|
|
|
if vehicle not in _vehicle_indexes:
|
|
|
|
_vehicle_indexes[vehicle] = len(_vehicle_indexes) + 1
|
|
|
|
return _vehicle_indexes[vehicle]
|
|
|
|
|
2021-01-18 21:53:39 -04:00
|
|
|
# note that AP_NavEKF3_core.h is needed for AP_NavEKF3_feature.h
|
2024-02-25 05:09:01 -04:00
|
|
|
_vehicle_macros = ['APM_BUILD_DIRECTORY', 'AP_BUILD_TARGET_NAME',
|
2021-10-25 14:11:36 -03:00
|
|
|
'APM_BUILD_TYPE', 'APM_BUILD_COPTER_OR_HELI',
|
|
|
|
'AP_NavEKF3_core.h', 'lua_generated_bindings.h']
|
2016-07-25 18:44:35 -03:00
|
|
|
_macros_re = re.compile(r'\b(%s)\b' % '|'.join(_vehicle_macros))
|
|
|
|
|
2021-10-27 05:06:04 -03:00
|
|
|
# some cpp files are not available at the time we run this check so need to be
|
|
|
|
# unilaterally added
|
|
|
|
_vehicle_cpp_need_macros = ['lua_generated_bindings.cpp']
|
|
|
|
_macros_cpp_re = re.compile(r'\b(%s)\b' % '|'.join(_vehicle_cpp_need_macros))
|
|
|
|
|
2016-07-25 18:44:35 -03:00
|
|
|
def _remove_comments(s):
|
|
|
|
return c_preproc.re_cpp.sub(c_preproc.repl, s)
|
|
|
|
|
|
|
|
_depends_on_vehicle_cache = {}
|
|
|
|
def _depends_on_vehicle(bld, source_node):
|
|
|
|
path = source_node.srcpath()
|
|
|
|
|
2020-05-15 21:39:14 -03:00
|
|
|
if not bld.env.BUILDROOT:
|
|
|
|
bld.env.BUILDROOT = bld.bldnode.make_node('').abspath()
|
2021-02-12 21:58:40 -04:00
|
|
|
|
2021-10-27 05:06:04 -03:00
|
|
|
if _macros_cpp_re.search(path) is not None:
|
2020-11-11 12:06:50 -04:00
|
|
|
_depends_on_vehicle_cache[path] = True
|
|
|
|
|
2016-07-25 18:44:35 -03:00
|
|
|
if path not in _depends_on_vehicle_cache:
|
2021-08-10 07:05:47 -03:00
|
|
|
try:
|
|
|
|
s = _remove_comments(source_node.read())
|
|
|
|
_depends_on_vehicle_cache[path] = _macros_re.search(s) is not None
|
|
|
|
except Exception:
|
|
|
|
return False
|
2016-07-25 18:44:35 -03:00
|
|
|
|
|
|
|
return _depends_on_vehicle_cache[path]
|
|
|
|
|
|
|
|
@conf
|
|
|
|
def ap_library(bld, library, vehicle):
|
|
|
|
try:
|
|
|
|
common_tg = bld.get_tgen_by_name(_common_tgen_name(library))
|
|
|
|
except Errors.WafError:
|
|
|
|
common_tg = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
vehicle_tg = bld.get_tgen_by_name(_vehicle_tgen_name(library, vehicle))
|
|
|
|
except Errors.WafError:
|
|
|
|
vehicle_tg = None
|
|
|
|
|
|
|
|
if common_tg and vehicle_tg:
|
|
|
|
return
|
|
|
|
|
2017-04-02 11:56:50 -03:00
|
|
|
if library.find('*') != -1:
|
|
|
|
# allow for wildcard patterns, used for submodules without direct waf support
|
|
|
|
library_dir = bld.srcnode.find_dir('.')
|
|
|
|
wildcard = library
|
|
|
|
else:
|
|
|
|
library_dir = bld.srcnode.find_dir('libraries/%s' % library)
|
|
|
|
wildcard = ap.SOURCE_EXTS + UTILITY_SOURCE_EXTS
|
|
|
|
|
2016-07-25 18:44:35 -03:00
|
|
|
if not library_dir:
|
|
|
|
bld.fatal('ap_library: %s not found' % library)
|
|
|
|
|
2017-04-02 11:56:50 -03:00
|
|
|
src = library_dir.ant_glob(wildcard)
|
2016-07-25 18:44:35 -03:00
|
|
|
|
2020-05-10 23:43:17 -03:00
|
|
|
# allow for dynamically generated sources in a library that inherit the
|
|
|
|
# dependencies and includes
|
|
|
|
if library in bld.env.AP_LIB_EXTRA_SOURCES:
|
|
|
|
for s in bld.env.AP_LIB_EXTRA_SOURCES[library]:
|
|
|
|
src.append(bld.bldnode.find_or_declare(os.path.join('libraries', library, s)))
|
|
|
|
|
2016-07-25 18:44:35 -03:00
|
|
|
if not common_tg:
|
|
|
|
kw = dict(bld.env.AP_LIBRARIES_OBJECTS_KW)
|
|
|
|
kw['features'] = kw.get('features', []) + ['ap_library_object']
|
|
|
|
kw.update(
|
|
|
|
name=_common_tgen_name(library),
|
|
|
|
source=[s for s in src if not _depends_on_vehicle(bld, s)],
|
|
|
|
idx=0,
|
|
|
|
)
|
|
|
|
bld.objects(**kw)
|
|
|
|
|
|
|
|
if not vehicle_tg:
|
|
|
|
source = [s for s in src if _depends_on_vehicle(bld, s)]
|
|
|
|
|
|
|
|
if not source:
|
|
|
|
return
|
|
|
|
|
|
|
|
kw = dict(bld.env.AP_LIBRARIES_OBJECTS_KW)
|
|
|
|
kw['features'] = kw.get('features', []) + ['ap_library_object']
|
|
|
|
kw.update(
|
|
|
|
name=_vehicle_tgen_name(library, vehicle),
|
|
|
|
source=source,
|
2021-09-27 16:39:15 -03:00
|
|
|
defines=ap.get_legacy_defines(vehicle, bld),
|
2016-07-25 18:44:35 -03:00
|
|
|
idx=_vehicle_index(vehicle),
|
|
|
|
)
|
|
|
|
bld.objects(**kw)
|
|
|
|
|
|
|
|
@before_method('process_use')
|
|
|
|
@feature('cxxstlib')
|
|
|
|
def process_ap_libraries(self):
|
|
|
|
self.use = Utils.to_list(getattr(self, 'use', []))
|
|
|
|
libraries = Utils.to_list(getattr(self, 'ap_libraries', []))
|
|
|
|
vehicle = getattr(self, 'ap_vehicle', None)
|
|
|
|
|
|
|
|
for l in libraries:
|
|
|
|
self.use.append(_common_tgen_name(l))
|
|
|
|
if vehicle:
|
|
|
|
self.use.append(_vehicle_tgen_name(l, vehicle))
|
|
|
|
|
2022-10-24 00:16:08 -03:00
|
|
|
@before_method('process_source')
|
|
|
|
@feature('cxxstlib')
|
|
|
|
def dynamic_post(self):
|
|
|
|
if not getattr(self, 'dynamic_source', None):
|
|
|
|
return
|
|
|
|
self.source = Utils.to_list(self.source)
|
|
|
|
self.source.extend(self.bld.bldnode.ant_glob(self.dynamic_source))
|
|
|
|
|
2016-08-18 13:49:24 -03:00
|
|
|
class ap_library_check_headers(Task.Task):
|
2016-07-25 18:44:35 -03:00
|
|
|
color = 'PINK'
|
2016-08-18 13:49:24 -03:00
|
|
|
before = 'cxx c'
|
|
|
|
dispatched_headers = set()
|
|
|
|
whitelist = (
|
|
|
|
'libraries/AP_Vehicle/AP_Vehicle_Type.h',
|
2020-11-09 00:53:42 -04:00
|
|
|
'libraries/AP_Common/AP_FWVersionDefine.h',
|
2020-11-11 13:15:42 -04:00
|
|
|
'libraries/AP_Scripting/lua_generated_bindings.h',
|
2021-01-18 21:53:39 -04:00
|
|
|
'libraries/AP_NavEKF3/AP_NavEKF3_feature.h',
|
2022-12-08 22:42:24 -04:00
|
|
|
'libraries/AP_LandingGear/AP_LandingGear_config.h',
|
2016-08-18 13:49:24 -03:00
|
|
|
)
|
2016-08-26 11:45:42 -03:00
|
|
|
whitelist = tuple(os.path.join(*p.split('/')) for p in whitelist)
|
2016-07-25 18:44:35 -03:00
|
|
|
|
|
|
|
def run(self):
|
2016-08-18 13:49:24 -03:00
|
|
|
for n in self.headers:
|
|
|
|
s = _remove_comments(n.read())
|
|
|
|
if _macros_re.search(s):
|
|
|
|
raise Errors.WafError('%s: library header uses vehicle-dependent macros' % n.srcpath())
|
|
|
|
|
|
|
|
def uid(self):
|
|
|
|
try:
|
|
|
|
return self._uid
|
|
|
|
except AttributeError:
|
|
|
|
self._uid = 'check_headers-%s' % self.compiled_task.uid()
|
|
|
|
return self._uid
|
|
|
|
|
|
|
|
def signature(self):
|
|
|
|
bld = self.generator.bld
|
|
|
|
# force scan() to be called
|
|
|
|
bld.imp_sigs[self.uid()] = None
|
2016-08-22 17:48:55 -03:00
|
|
|
s = super(ap_library_check_headers, self).signature()
|
|
|
|
bld.ap_persistent_task_sigs[self.uid()] = s
|
|
|
|
return s
|
2016-08-18 13:49:24 -03:00
|
|
|
|
|
|
|
def scan(self):
|
|
|
|
r = []
|
|
|
|
self.headers = []
|
2017-08-06 06:24:04 -03:00
|
|
|
|
2016-08-22 17:48:55 -03:00
|
|
|
srcnode_path = self.generator.bld.srcnode.abspath()
|
2016-08-18 13:49:24 -03:00
|
|
|
|
|
|
|
# force dependency scan, if necessary
|
|
|
|
self.compiled_task.signature()
|
2024-04-25 21:52:30 -03:00
|
|
|
if not self.compiled_task.uid() in self.generator.bld.node_deps:
|
|
|
|
return r, []
|
2016-08-18 13:49:24 -03:00
|
|
|
for n in self.generator.bld.node_deps[self.compiled_task.uid()]:
|
2016-08-22 17:48:55 -03:00
|
|
|
# using common Node methods doesn't work here
|
|
|
|
p = n.abspath()
|
|
|
|
if not p.startswith(srcnode_path):
|
2016-08-18 13:49:24 -03:00
|
|
|
continue
|
2020-11-11 13:15:42 -04:00
|
|
|
rel_p = os.path.relpath(p, srcnode_path)
|
|
|
|
if rel_p in self.whitelist:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# check if the path ends with something in the white list
|
|
|
|
# this is required for white listing files in 'build/' (for scripting generated bindings)
|
|
|
|
found = False
|
|
|
|
for m in self.whitelist:
|
|
|
|
if rel_p.endswith(m):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if found:
|
2016-08-18 13:49:24 -03:00
|
|
|
continue
|
|
|
|
|
|
|
|
r.append(n)
|
|
|
|
if n not in self.dispatched_headers:
|
|
|
|
self.headers.append(n)
|
|
|
|
self.dispatched_headers.add(n)
|
|
|
|
|
|
|
|
return r, []
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.compiled_task)
|
2016-07-25 18:44:35 -03:00
|
|
|
|
|
|
|
def keyword(self):
|
2016-08-18 13:49:24 -03:00
|
|
|
return 'Checking included headers'
|
2016-07-25 18:44:35 -03:00
|
|
|
|
2023-03-22 02:04:28 -03:00
|
|
|
def custom_flags_check(tgen):
|
|
|
|
'''
|
|
|
|
check for tasks marked as having custom cpp or c flags
|
|
|
|
a library can do this by setting AP_LIB_EXTRA_CXXFLAGS and AP_LIB_EXTRA_CFLAGS
|
|
|
|
|
|
|
|
For example add this is the configure section of the library, using AP_DDS as an example:
|
|
|
|
|
|
|
|
cfg.env.AP_LIB_EXTRA_CXXFLAGS['AP_DDS'] = ['-DSOME_CXX_FLAG']
|
|
|
|
cfg.env.AP_LIB_EXTRA_CFLAGS['AP_DDS'] = ['-DSOME_C_FLAG']
|
|
|
|
'''
|
|
|
|
if not tgen.name.startswith("objs/"):
|
|
|
|
return
|
|
|
|
libname = tgen.name[5:]
|
|
|
|
if libname in tgen.env.AP_LIB_EXTRA_CXXFLAGS:
|
|
|
|
tgen.env.CXXFLAGS.extend(tgen.env.AP_LIB_EXTRA_CXXFLAGS[libname])
|
|
|
|
if libname in tgen.env.AP_LIB_EXTRA_CFLAGS:
|
|
|
|
tgen.env.CFLAGS.extend(tgen.env.AP_LIB_EXTRA_CFLAGS[libname])
|
|
|
|
|
|
|
|
|
2020-08-27 00:06:36 -03:00
|
|
|
def double_precision_check(tasks):
|
|
|
|
'''check for tasks marked as double precision'''
|
|
|
|
|
|
|
|
for t in tasks:
|
|
|
|
if len(t.inputs) == 1:
|
|
|
|
# get a list of tasks we need to change to be double precision
|
|
|
|
double_tasks = []
|
|
|
|
for library in t.env.DOUBLE_PRECISION_SOURCES.keys():
|
|
|
|
for s in t.env.DOUBLE_PRECISION_SOURCES[library]:
|
|
|
|
double_tasks.append([library, s])
|
|
|
|
|
|
|
|
src = str(t.inputs[0]).split('/')[-2:]
|
2023-08-12 06:14:36 -03:00
|
|
|
double_library = t.env.DOUBLE_PRECISION_LIBRARIES.get(src[0],False)
|
|
|
|
|
|
|
|
if double_library or src in double_tasks:
|
2020-08-27 00:06:36 -03:00
|
|
|
t.env.CXXFLAGS = t.env.CXXFLAGS[:]
|
2023-06-20 13:17:07 -03:00
|
|
|
for opt in ['-fsingle-precision-constant', '-cl-single-precision-constant']:
|
|
|
|
try:
|
|
|
|
t.env.CXXFLAGS.remove(opt)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2020-08-27 00:06:36 -03:00
|
|
|
t.env.CXXFLAGS.append("-DALLOW_DOUBLE_MATH_FUNCTIONS")
|
|
|
|
|
2021-06-26 12:47:53 -03:00
|
|
|
|
|
|
|
def gsoap_library_check(bld, tasks):
|
|
|
|
'''check for tasks marked as gSOAP library source'''
|
|
|
|
|
|
|
|
for t in tasks:
|
|
|
|
if len(t.inputs) == 1:
|
|
|
|
gsoap_tasks = []
|
|
|
|
for s in t.env.AP_LIB_EXTRA_SOURCES["AP_ONVIF"]:
|
|
|
|
gsoap_tasks.append(bld.bldnode.find_or_declare(os.path.join('libraries', "AP_ONVIF", s)))
|
|
|
|
|
|
|
|
if t.inputs[0] in gsoap_tasks:
|
|
|
|
t.env.CXXFLAGS += [
|
|
|
|
'-Wno-shadow',
|
|
|
|
]
|
|
|
|
if 'clang++' not in t.env.COMPILER_CXX:
|
|
|
|
t.env.CXXFLAGS += [
|
|
|
|
'-Wno-suggest-override',
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2016-07-25 18:44:35 -03:00
|
|
|
@feature('ap_library_object')
|
2016-08-18 13:49:24 -03:00
|
|
|
@after_method('process_source')
|
2016-07-25 18:44:35 -03:00
|
|
|
def ap_library_register_for_check(self):
|
2016-08-18 13:49:24 -03:00
|
|
|
if not hasattr(self, 'compiled_tasks'):
|
2016-07-25 18:44:35 -03:00
|
|
|
return
|
|
|
|
|
2023-03-22 02:04:28 -03:00
|
|
|
custom_flags_check(self)
|
2020-08-27 00:06:36 -03:00
|
|
|
double_precision_check(self.compiled_tasks)
|
2021-06-26 12:47:53 -03:00
|
|
|
if self.env.ENABLE_ONVIF:
|
|
|
|
gsoap_library_check(self.bld, self.compiled_tasks)
|
2020-08-27 00:06:36 -03:00
|
|
|
|
2018-03-28 01:04:48 -03:00
|
|
|
if not self.env.ENABLE_HEADER_CHECKS:
|
2017-08-06 06:24:04 -03:00
|
|
|
return
|
|
|
|
|
2016-08-18 13:49:24 -03:00
|
|
|
for t in self.compiled_tasks:
|
|
|
|
tsk = self.create_task('ap_library_check_headers')
|
|
|
|
tsk.compiled_task = t
|
2016-07-25 18:44:35 -03:00
|
|
|
|
2024-06-07 01:08:45 -03:00
|
|
|
def write_compilation_database(bld):
|
|
|
|
"""
|
|
|
|
Write the compilation database as JSON
|
|
|
|
"""
|
|
|
|
database_file = bld.bldnode.find_or_declare('compile_commands.json')
|
|
|
|
# don't remove the file at clean
|
|
|
|
|
|
|
|
Logs.info('Build commands will be stored in %s', database_file.path_from(bld.path))
|
|
|
|
try:
|
|
|
|
root = database_file.read_json()
|
|
|
|
except IOError:
|
|
|
|
root = []
|
|
|
|
compile_cmd_db = dict((x['file'], x) for x in root)
|
|
|
|
for task in bld.compilation_database_tasks:
|
|
|
|
try:
|
|
|
|
cmd = task.last_cmd
|
|
|
|
except AttributeError:
|
|
|
|
continue
|
|
|
|
f_node = task.inputs[0]
|
|
|
|
filename = f_node.path_from(task.get_cwd())
|
|
|
|
entry = {
|
|
|
|
"directory": task.get_cwd().abspath(),
|
|
|
|
"arguments": cmd,
|
|
|
|
"file": filename,
|
|
|
|
}
|
|
|
|
compile_cmd_db[filename] = entry
|
|
|
|
root = list(compile_cmd_db.values())
|
|
|
|
database_file.write_json(root)
|
|
|
|
|
|
|
|
def target_list_changed(bld, targets):
|
|
|
|
"""
|
|
|
|
Check if the list of targets has changed recorded in target_list file
|
|
|
|
"""
|
|
|
|
# target_list file is in the root build directory
|
|
|
|
target_list_file = bld.bldnode.find_or_declare('target_list')
|
|
|
|
try:
|
|
|
|
with open(target_list_file.abspath(), 'r') as f:
|
|
|
|
old_targets = f.read().strip().split(',')
|
|
|
|
except IOError:
|
|
|
|
Logs.info('No target_list file found, creating')
|
|
|
|
old_targets = []
|
|
|
|
if old_targets != targets:
|
|
|
|
with open(target_list_file.abspath(), 'w') as f:
|
|
|
|
f.write(','.join(targets))
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
@conf
|
|
|
|
def remove_target_list(cfg):
|
|
|
|
target_list_file = cfg.bldnode.make_node(cfg.options.board + '/target_list')
|
|
|
|
try:
|
|
|
|
Logs.info('Removing target_list file %s', target_list_file.abspath())
|
|
|
|
os.remove(target_list_file.abspath())
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
@feature('cxxprogram', 'cxxstlib')
|
|
|
|
@after_method('propagate_uselib_vars')
|
|
|
|
def dry_run_compilation_database(self):
|
|
|
|
if not hasattr(self, 'bld'):
|
|
|
|
return
|
|
|
|
bld = self.bld
|
|
|
|
bld.compilation_database_tasks = []
|
|
|
|
targets = bld.targets.split(',')
|
|
|
|
use = self.use
|
|
|
|
if isinstance(use, str):
|
|
|
|
use = [use]
|
|
|
|
# if targets have not changed and neither has configuration,
|
|
|
|
# we can skip compilation database generation
|
|
|
|
if not target_list_changed(bld, targets + use):
|
|
|
|
Logs.info('Targets have not changed, skipping compilation database compile_commands.json generation')
|
|
|
|
return
|
|
|
|
Logs.info('Generating compile_commands.json')
|
|
|
|
# we need only to generate last_cmd, so override
|
|
|
|
# exec_command temporarily
|
|
|
|
def exec_command(bld, *k, **kw):
|
|
|
|
return 0
|
|
|
|
|
|
|
|
for g in bld.groups:
|
|
|
|
for tg in g:
|
|
|
|
# we only care to list targets and library objects
|
|
|
|
if not hasattr(tg, 'name'):
|
|
|
|
continue
|
|
|
|
if (tg.name not in targets) and (tg.name not in self.use):
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
f = tg.post
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
f()
|
|
|
|
|
|
|
|
if isinstance(tg, Task.Task):
|
|
|
|
lst = [tg]
|
|
|
|
else:
|
|
|
|
lst = tg.tasks
|
|
|
|
for tsk in lst:
|
|
|
|
if tsk.__class__.__name__ == "swig":
|
|
|
|
tsk.runnable_status()
|
|
|
|
if hasattr(tsk, 'more_tasks'):
|
|
|
|
lst.extend(tsk.more_tasks)
|
|
|
|
# Not all dynamic tasks can be processed, in some cases
|
|
|
|
# one may have to call the method "run()" like this:
|
|
|
|
# elif tsk.__class__.__name__ == 'src2c':
|
|
|
|
# tsk.run()
|
|
|
|
# if hasattr(tsk, 'more_tasks'):
|
|
|
|
# lst.extend(tsk.more_tasks)
|
|
|
|
|
|
|
|
tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
|
|
|
|
if isinstance(tsk, tup):
|
|
|
|
bld.compilation_database_tasks.append(tsk)
|
|
|
|
tsk.nocache = True
|
|
|
|
old_exec = tsk.exec_command
|
|
|
|
tsk.exec_command = exec_command
|
|
|
|
tsk.run()
|
|
|
|
tsk.exec_command = old_exec
|
|
|
|
|
|
|
|
write_compilation_database(bld)
|
|
|
|
|
2016-07-25 18:44:35 -03:00
|
|
|
def configure(cfg):
|
|
|
|
cfg.env.AP_LIBRARIES_OBJECTS_KW = dict()
|
2020-05-10 23:43:17 -03:00
|
|
|
cfg.env.AP_LIB_EXTRA_SOURCES = dict()
|
2023-03-22 02:04:28 -03:00
|
|
|
cfg.env.AP_LIB_EXTRA_CXXFLAGS = dict()
|
|
|
|
cfg.env.AP_LIB_EXTRA_CFLAGS = dict()
|
2020-08-27 00:06:36 -03:00
|
|
|
cfg.env.DOUBLE_PRECISION_SOURCES = dict()
|
2023-08-12 06:14:36 -03:00
|
|
|
cfg.env.DOUBLE_PRECISION_LIBRARIES = dict()
|