2018-01-05 02:53:19 -04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# encoding: utf-8
|
|
|
|
|
|
|
|
"""
|
2018-01-08 19:41:37 -04:00
|
|
|
Waf tool for ChibiOS build
|
2018-01-05 02:53:19 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
from waflib import Errors, Logs, Task, Utils
|
|
|
|
from waflib.TaskGen import after_method, before_method, feature
|
|
|
|
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import re
|
2018-03-01 01:51:17 -04:00
|
|
|
import pickle
|
2019-10-20 23:35:57 -03:00
|
|
|
import zlib
|
|
|
|
import struct
|
2018-03-01 01:51:17 -04:00
|
|
|
|
2018-01-05 02:53:19 -04:00
|
|
|
_dynamic_env_data = {}
|
|
|
|
def _load_dynamic_env_data(bld):
|
|
|
|
bldnode = bld.bldnode.make_node('modules/ChibiOS')
|
|
|
|
tmp_str = bldnode.find_node('include_dirs').read()
|
|
|
|
tmp_str = tmp_str.replace(';\n','')
|
2018-02-24 21:10:57 -04:00
|
|
|
tmp_str = tmp_str.replace('-I','') #remove existing -I flags
|
|
|
|
# split, coping with separator
|
|
|
|
idirs = re.split('; ', tmp_str)
|
|
|
|
|
|
|
|
# create unique list, coping with relative paths
|
|
|
|
idirs2 = []
|
|
|
|
for d in idirs:
|
|
|
|
if d.startswith('../'):
|
|
|
|
# relative paths from the make build are relative to BUILDROOT
|
|
|
|
d = os.path.join(bld.env.BUILDROOT, d)
|
|
|
|
d = os.path.normpath(d)
|
|
|
|
if not d in idirs2:
|
|
|
|
idirs2.append(d)
|
|
|
|
_dynamic_env_data['include_dirs'] = idirs2
|
2018-01-05 02:53:19 -04:00
|
|
|
|
|
|
|
@feature('ch_ap_library', 'ch_ap_program')
|
|
|
|
@before_method('process_source')
|
|
|
|
def ch_dynamic_env(self):
|
|
|
|
# The generated files from configuration possibly don't exist if it's just
|
|
|
|
# a list command (TODO: figure out a better way to address that).
|
|
|
|
if self.bld.cmd == 'list':
|
|
|
|
return
|
|
|
|
|
|
|
|
if not _dynamic_env_data:
|
|
|
|
_load_dynamic_env_data(self.bld)
|
|
|
|
self.use += ' ch'
|
|
|
|
self.env.append_value('INCLUDES', _dynamic_env_data['include_dirs'])
|
|
|
|
|
|
|
|
|
|
|
|
class upload_fw(Task.Task):
|
|
|
|
color='BLUE'
|
|
|
|
always_run = True
|
|
|
|
def run(self):
|
|
|
|
upload_tools = self.env.get_flat('UPLOAD_TOOLS')
|
2019-09-25 08:32:32 -03:00
|
|
|
upload_port = self.generator.bld.options.upload_port
|
2018-01-05 02:53:19 -04:00
|
|
|
src = self.inputs[0]
|
2019-09-25 08:32:32 -03:00
|
|
|
cmd = "{} '{}/uploader.py' '{}'".format(self.env.get_flat('PYTHON'), upload_tools, src)
|
|
|
|
if upload_port is not None:
|
|
|
|
cmd += " '--port' '%s'" % upload_port
|
|
|
|
return self.exec_command(cmd)
|
2018-01-05 02:53:19 -04:00
|
|
|
|
|
|
|
def exec_command(self, cmd, **kw):
|
|
|
|
kw['stdout'] = sys.stdout
|
|
|
|
return super(upload_fw, self).exec_command(cmd, **kw)
|
|
|
|
|
|
|
|
def keyword(self):
|
|
|
|
return "Uploading"
|
|
|
|
|
2018-01-07 18:57:19 -04:00
|
|
|
class set_default_parameters(Task.Task):
|
|
|
|
color='CYAN'
|
|
|
|
always_run = True
|
|
|
|
def keyword(self):
|
|
|
|
return "apj_tool"
|
2018-03-20 06:25:12 -03:00
|
|
|
def run(self):
|
|
|
|
rel_default_parameters = self.env.get_flat('DEFAULT_PARAMETERS')
|
2018-03-24 02:11:50 -03:00
|
|
|
abs_default_parameters = os.path.join(self.env.SRCROOT, rel_default_parameters)
|
|
|
|
apj_tool = self.env.APJ_TOOL
|
|
|
|
sys.path.append(os.path.dirname(apj_tool))
|
|
|
|
from apj_tool import embedded_defaults
|
|
|
|
defaults = embedded_defaults(self.inputs[0].abspath())
|
2019-02-13 03:42:38 -04:00
|
|
|
if defaults.find():
|
|
|
|
defaults.set_file(abs_default_parameters)
|
|
|
|
defaults.save()
|
2018-03-20 06:25:12 -03:00
|
|
|
|
2018-01-07 18:57:19 -04:00
|
|
|
|
2018-06-23 01:53:20 -03:00
|
|
|
class generate_bin(Task.Task):
|
2018-01-05 02:53:19 -04:00
|
|
|
color='CYAN'
|
2018-06-23 01:53:20 -03:00
|
|
|
run_str="${OBJCOPY} -O binary ${SRC} ${TGT}"
|
|
|
|
always_run = True
|
|
|
|
def keyword(self):
|
|
|
|
return "Generating"
|
|
|
|
def __str__(self):
|
|
|
|
return self.outputs[0].path_from(self.generator.bld.bldnode)
|
|
|
|
|
2019-10-20 23:35:57 -03:00
|
|
|
def to_unsigned(i):
|
|
|
|
'''convert a possibly signed integer to unsigned'''
|
|
|
|
if i < 0:
|
|
|
|
i += 2**32
|
|
|
|
return i
|
|
|
|
|
|
|
|
class set_app_descriptor(Task.Task):
|
|
|
|
'''setup app descriptor in bin file'''
|
|
|
|
color='BLUE'
|
|
|
|
always_run = True
|
|
|
|
def keyword(self):
|
|
|
|
return "app_descriptor"
|
|
|
|
def run(self):
|
|
|
|
if not 'APP_DESCRIPTOR' in self.env:
|
|
|
|
return
|
|
|
|
if self.env.APP_DESCRIPTOR == 'MissionPlanner':
|
|
|
|
descriptor = b'\x40\xa2\xe4\xf1\x64\x68\x91\x06'
|
|
|
|
else:
|
|
|
|
Logs.error("Bad APP_DESCRIPTOR %s" % self.env.APP_DESCRIPTOR)
|
|
|
|
return
|
|
|
|
img = open(self.inputs[0].abspath(), 'rb').read()
|
|
|
|
offset = img.find(descriptor)
|
|
|
|
if offset == -1:
|
|
|
|
Logs.error("Failed to find %s APP_DESCRIPTOR" % self.env.APP_DESCRIPTOR)
|
|
|
|
return
|
|
|
|
offset += 8
|
|
|
|
# next 8 bytes is 64 bit CRC. We set first 4 bytes to
|
|
|
|
# CRC32 of image before descriptor and 2nd 4 bytes
|
|
|
|
# to CRC32 of image after descriptor. This is very efficient
|
|
|
|
# for bootloader to calculate
|
|
|
|
# after CRC comes image length and 32 bit git hash
|
|
|
|
desc_len = 16
|
|
|
|
crc1 = to_unsigned(zlib.crc32(img[:offset]))
|
|
|
|
crc2 = to_unsigned(zlib.crc32(img[offset+desc_len:]))
|
|
|
|
githash = to_unsigned(int('0x' + self.generator.bld.git_head_hash(short=True),16))
|
|
|
|
desc = struct.pack('<IIII', crc1, crc2, len(img), githash)
|
|
|
|
img = img[:offset] + desc + img[offset+desc_len:]
|
|
|
|
Logs.info("Applying %s APP_DESCRIPTOR %08x%08x" % (self.env.APP_DESCRIPTOR, crc1, crc2))
|
|
|
|
open(self.inputs[0].abspath(), 'wb').write(img)
|
|
|
|
|
2018-06-23 01:53:20 -03:00
|
|
|
class generate_apj(Task.Task):
|
2019-01-19 00:35:32 -04:00
|
|
|
'''generate an apj firmware file'''
|
2018-06-23 01:53:20 -03:00
|
|
|
color='CYAN'
|
2018-04-10 19:06:09 -03:00
|
|
|
always_run = True
|
|
|
|
def keyword(self):
|
2019-01-19 00:35:32 -04:00
|
|
|
return "apj_gen"
|
|
|
|
def run(self):
|
|
|
|
import json, time, base64, zlib
|
|
|
|
img = open(self.inputs[0].abspath(),'rb').read()
|
|
|
|
d = {
|
|
|
|
"board_id": int(self.env.APJ_BOARD_ID),
|
|
|
|
"magic": "APJFWv1",
|
|
|
|
"description": "Firmware for a %s board" % self.env.APJ_BOARD_TYPE,
|
|
|
|
"image": base64.b64encode(zlib.compress(img,9)).decode('utf-8'),
|
|
|
|
"summary": self.env.BOARD,
|
|
|
|
"version": "0.1",
|
|
|
|
"image_size": len(img),
|
|
|
|
"git_identity": self.generator.bld.git_head_hash(short=True),
|
|
|
|
"board_revision": 0
|
|
|
|
}
|
2019-09-22 07:37:36 -03:00
|
|
|
if self.env.build_dates:
|
|
|
|
# we omit build_time when we don't have build_dates so that apj
|
|
|
|
# file is idential for same git hash and compiler
|
|
|
|
d["build_time"] = int(time.time())
|
2019-01-19 00:35:32 -04:00
|
|
|
apj_file = self.outputs[0].abspath()
|
|
|
|
f = open(apj_file, "w")
|
|
|
|
f.write(json.dumps(d, indent=4))
|
|
|
|
f.close()
|
2018-04-10 19:06:09 -03:00
|
|
|
|
|
|
|
class build_abin(Task.Task):
|
|
|
|
'''build an abin file for skyviper firmware upload via web UI'''
|
|
|
|
color='CYAN'
|
|
|
|
run_str='${TOOLS_SCRIPTS}/make_abin.sh ${SRC}.bin ${SRC}.abin'
|
2018-01-05 02:53:19 -04:00
|
|
|
always_run = True
|
|
|
|
def keyword(self):
|
|
|
|
return "Generating"
|
|
|
|
def __str__(self):
|
|
|
|
return self.outputs[0].path_from(self.generator.bld.bldnode)
|
|
|
|
|
2018-06-23 01:53:20 -03:00
|
|
|
class build_intel_hex(Task.Task):
|
|
|
|
'''build an intel hex file for upload with DFU'''
|
|
|
|
color='CYAN'
|
2018-06-24 03:01:33 -03:00
|
|
|
run_str='${TOOLS_SCRIPTS}/make_intel_hex.py ${SRC} ${FLASH_RESERVE_START_KB}'
|
2018-06-23 01:53:20 -03:00
|
|
|
always_run = True
|
|
|
|
def keyword(self):
|
|
|
|
return "Generating"
|
|
|
|
def __str__(self):
|
|
|
|
return self.outputs[0].path_from(self.generator.bld.bldnode)
|
|
|
|
|
2018-01-05 02:53:19 -04:00
|
|
|
@feature('ch_ap_program')
|
|
|
|
@after_method('process_source')
|
|
|
|
def chibios_firmware(self):
|
|
|
|
self.link_task.always_run = True
|
|
|
|
|
|
|
|
link_output = self.link_task.outputs[0]
|
2019-10-20 23:35:57 -03:00
|
|
|
hex_task = None
|
2018-01-07 18:57:19 -04:00
|
|
|
|
2018-06-23 01:53:20 -03:00
|
|
|
bin_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.bin').name)
|
|
|
|
apj_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.apj').name)
|
|
|
|
|
|
|
|
generate_bin_task = self.create_task('generate_bin', src=link_output, tgt=bin_target)
|
|
|
|
generate_bin_task.set_run_after(self.link_task)
|
|
|
|
|
|
|
|
generate_apj_task = self.create_task('generate_apj', src=bin_target, tgt=apj_target)
|
|
|
|
generate_apj_task.set_run_after(generate_bin_task)
|
2018-01-07 18:57:19 -04:00
|
|
|
|
2018-04-10 19:06:09 -03:00
|
|
|
if self.env.BUILD_ABIN:
|
|
|
|
abin_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.abin').name)
|
|
|
|
abin_task = self.create_task('build_abin', src=link_output, tgt=abin_target)
|
2018-06-23 01:53:20 -03:00
|
|
|
abin_task.set_run_after(generate_apj_task)
|
|
|
|
|
|
|
|
bootloader_bin = self.bld.srcnode.make_node("Tools/bootloaders/%s_bl.bin" % self.env.BOARD)
|
2019-07-15 22:41:05 -03:00
|
|
|
if self.bld.env.HAVE_INTEL_HEX:
|
|
|
|
if os.path.exists(bootloader_bin.abspath()):
|
|
|
|
hex_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.hex').name)
|
|
|
|
hex_task = self.create_task('build_intel_hex', src=[bin_target, bootloader_bin], tgt=hex_target)
|
|
|
|
hex_task.set_run_after(generate_bin_task)
|
|
|
|
else:
|
|
|
|
print("Not embedding bootloader; %s does not exist" % bootloader_bin)
|
2018-04-10 19:06:09 -03:00
|
|
|
|
2018-01-07 18:57:19 -04:00
|
|
|
if self.env.DEFAULT_PARAMETERS:
|
|
|
|
default_params_task = self.create_task('set_default_parameters',
|
|
|
|
src=link_output)
|
|
|
|
default_params_task.set_run_after(self.link_task)
|
2018-06-24 07:24:06 -03:00
|
|
|
generate_bin_task.set_run_after(default_params_task)
|
2019-10-20 23:35:57 -03:00
|
|
|
|
|
|
|
if self.env.APP_DESCRIPTOR:
|
|
|
|
app_descriptor_task = self.create_task('set_app_descriptor', src=bin_target)
|
|
|
|
app_descriptor_task.set_run_after(generate_bin_task)
|
|
|
|
generate_apj_task.set_run_after(app_descriptor_task)
|
|
|
|
if hex_task is not None:
|
|
|
|
hex_task.set_run_after(app_descriptor_task)
|
|
|
|
|
2018-01-05 02:53:19 -04:00
|
|
|
if self.bld.options.upload:
|
2018-06-23 04:04:49 -03:00
|
|
|
_upload_task = self.create_task('upload_fw', src=apj_target)
|
2018-06-23 01:53:20 -03:00
|
|
|
_upload_task.set_run_after(generate_apj_task)
|
2018-01-05 02:53:19 -04:00
|
|
|
|
2018-03-01 02:28:25 -04:00
|
|
|
def setup_can_build(cfg):
|
|
|
|
'''enable CAN build. By doing this here we can auto-enable CAN in
|
|
|
|
the build based on the presence of CAN pins in hwdef.dat'''
|
|
|
|
env = cfg.env
|
|
|
|
env.AP_LIBRARIES += [
|
|
|
|
'AP_UAVCAN',
|
|
|
|
'modules/uavcan/libuavcan/src/**/*.cpp',
|
|
|
|
]
|
|
|
|
|
|
|
|
env.CFLAGS += ['-DUAVCAN_STM32_CHIBIOS=1',
|
|
|
|
'-DUAVCAN_STM32_NUM_IFACES=2']
|
|
|
|
|
|
|
|
env.CXXFLAGS += [
|
|
|
|
'-Wno-error=cast-align',
|
|
|
|
'-DUAVCAN_STM32_CHIBIOS=1',
|
|
|
|
'-DUAVCAN_STM32_NUM_IFACES=2'
|
|
|
|
]
|
|
|
|
|
|
|
|
env.DEFINES += [
|
|
|
|
'UAVCAN_CPP_VERSION=UAVCAN_CPP03',
|
|
|
|
'UAVCAN_NO_ASSERTIONS=1',
|
|
|
|
'UAVCAN_NULLPTR=nullptr'
|
|
|
|
]
|
|
|
|
|
|
|
|
env.INCLUDES += [
|
|
|
|
cfg.srcnode.find_dir('modules/uavcan/libuavcan/include').abspath(),
|
|
|
|
]
|
|
|
|
cfg.get_board().with_uavcan = True
|
|
|
|
|
2018-03-01 01:51:17 -04:00
|
|
|
def load_env_vars(env):
|
|
|
|
'''optionally load extra environment variables from env.py in the build directory'''
|
|
|
|
print("Checking for env.py")
|
|
|
|
env_py = os.path.join(env.BUILDROOT, 'env.py')
|
|
|
|
if not os.path.exists(env_py):
|
|
|
|
print("No env.py found")
|
|
|
|
return
|
2018-03-04 18:48:05 -04:00
|
|
|
e = pickle.load(open(env_py, 'rb'))
|
2018-03-01 01:51:17 -04:00
|
|
|
for k in e.keys():
|
|
|
|
v = e[k]
|
2018-07-01 20:34:44 -03:00
|
|
|
if k == 'ROMFS_FILES':
|
|
|
|
env.ROMFS_FILES += v
|
|
|
|
continue
|
2018-03-01 01:51:17 -04:00
|
|
|
if k in env:
|
|
|
|
if isinstance(env[k], dict):
|
|
|
|
a = v.split('=')
|
|
|
|
env[k][a[0]] = '='.join(a[1:])
|
|
|
|
print("env updated %s=%s" % (k, v))
|
|
|
|
elif isinstance(env[k], list):
|
|
|
|
env[k].append(v)
|
|
|
|
print("env appended %s=%s" % (k, v))
|
|
|
|
else:
|
|
|
|
env[k] = v
|
|
|
|
print("env added %s=%s" % (k, v))
|
|
|
|
else:
|
|
|
|
env[k] = v
|
|
|
|
print("env set %s=%s" % (k, v))
|
2018-08-02 02:22:30 -03:00
|
|
|
if env.ENABLE_ASSERTS:
|
|
|
|
env.CHIBIOS_BUILD_FLAGS += ' ENABLE_ASSERTS=yes'
|
2018-03-01 01:51:17 -04:00
|
|
|
|
2019-02-14 08:07:12 -04:00
|
|
|
def setup_optimization(env):
|
|
|
|
'''setup optimization flags for build'''
|
|
|
|
if env.DEBUG:
|
|
|
|
OPTIMIZE = "-Og"
|
|
|
|
elif env.OPTIMIZE:
|
|
|
|
OPTIMIZE = env.OPTIMIZE
|
|
|
|
else:
|
|
|
|
OPTIMIZE = "-Os"
|
|
|
|
env.CFLAGS += [ OPTIMIZE ]
|
|
|
|
env.CXXFLAGS += [ OPTIMIZE ]
|
|
|
|
env.CHIBIOS_BUILD_FLAGS += ' USE_COPT=%s' % OPTIMIZE
|
|
|
|
|
2018-01-05 02:53:19 -04:00
|
|
|
def configure(cfg):
|
|
|
|
cfg.find_program('make', var='MAKE')
|
|
|
|
#cfg.objcopy = cfg.find_program('%s-%s'%(cfg.env.TOOLCHAIN,'objcopy'), var='OBJCOPY', mandatory=True)
|
|
|
|
cfg.find_program('arm-none-eabi-objcopy', var='OBJCOPY')
|
|
|
|
env = cfg.env
|
|
|
|
bldnode = cfg.bldnode.make_node(cfg.variant)
|
|
|
|
def srcpath(path):
|
|
|
|
return cfg.srcnode.make_node(path).abspath()
|
|
|
|
|
|
|
|
def bldpath(path):
|
|
|
|
return bldnode.make_node(path).abspath()
|
|
|
|
env.AP_PROGRAM_FEATURES += ['ch_ap_program']
|
|
|
|
|
|
|
|
kw = env.AP_LIBRARIES_OBJECTS_KW
|
|
|
|
kw['features'] = Utils.to_list(kw.get('features', [])) + ['ch_ap_library']
|
|
|
|
|
|
|
|
env.CH_ROOT = srcpath('modules/ChibiOS')
|
|
|
|
env.AP_HAL_ROOT = srcpath('libraries/AP_HAL_ChibiOS')
|
|
|
|
env.BUILDDIR = bldpath('modules/ChibiOS')
|
|
|
|
env.BUILDROOT = bldpath('')
|
2018-03-24 02:11:50 -03:00
|
|
|
env.SRCROOT = srcpath('')
|
2018-01-05 02:53:19 -04:00
|
|
|
env.PT_DIR = srcpath('Tools/ardupilotwaf/chibios/image')
|
2018-11-12 01:39:12 -04:00
|
|
|
env.MKFW_TOOLS = srcpath('Tools/ardupilotwaf')
|
|
|
|
env.UPLOAD_TOOLS = srcpath('Tools/scripts')
|
2018-01-18 04:42:59 -04:00
|
|
|
env.CHIBIOS_SCRIPTS = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts')
|
2018-02-08 03:12:33 -04:00
|
|
|
env.TOOLS_SCRIPTS = srcpath('Tools/scripts')
|
2018-01-07 18:57:19 -04:00
|
|
|
env.APJ_TOOL = srcpath('Tools/scripts/apj_tool.py')
|
2018-01-05 02:53:19 -04:00
|
|
|
env.SERIAL_PORT = srcpath('/dev/serial/by-id/*_STLink*')
|
|
|
|
|
2018-02-24 21:10:57 -04:00
|
|
|
# relative paths to pass to make, relative to directory that make is run from
|
|
|
|
env.CH_ROOT_REL = os.path.relpath(env.CH_ROOT, env.BUILDROOT)
|
|
|
|
env.AP_HAL_REL = os.path.relpath(env.AP_HAL_ROOT, env.BUILDROOT)
|
|
|
|
env.BUILDDIR_REL = os.path.relpath(env.BUILDDIR, env.BUILDROOT)
|
|
|
|
|
2018-01-12 17:34:19 -04:00
|
|
|
mk_custom = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/chibios_board.mk' % env.BOARD)
|
|
|
|
mk_common = srcpath('libraries/AP_HAL_ChibiOS/hwdef/common/chibios_board.mk')
|
|
|
|
# see if there is a board specific make file
|
|
|
|
if os.path.exists(mk_custom):
|
|
|
|
env.BOARD_MK = mk_custom
|
|
|
|
else:
|
|
|
|
env.BOARD_MK = mk_common
|
|
|
|
|
2018-01-07 18:57:19 -04:00
|
|
|
if cfg.options.default_parameters:
|
|
|
|
cfg.msg('Default parameters', cfg.options.default_parameters, color='YELLOW')
|
|
|
|
env.DEFAULT_PARAMETERS = srcpath(cfg.options.default_parameters)
|
|
|
|
|
2018-01-14 23:29:35 -04:00
|
|
|
# we need to run chibios_hwdef.py at configure stage to generate the ldscript.ld
|
|
|
|
# that is needed by the remaining configure checks
|
2018-01-12 21:29:11 -04:00
|
|
|
import subprocess
|
2018-01-07 18:57:19 -04:00
|
|
|
|
2018-06-16 07:00:54 -03:00
|
|
|
if env.BOOTLOADER:
|
|
|
|
env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef-bl.dat' % env.BOARD)
|
2018-06-21 02:38:16 -03:00
|
|
|
env.BOOTLOADER_OPTION="--bootloader"
|
2018-06-16 07:00:54 -03:00
|
|
|
else:
|
|
|
|
env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef.dat' % env.BOARD)
|
2018-06-21 02:38:16 -03:00
|
|
|
env.BOOTLOADER_OPTION=""
|
2018-01-12 21:29:11 -04:00
|
|
|
hwdef_script = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py')
|
|
|
|
hwdef_out = env.BUILDROOT
|
|
|
|
if not os.path.exists(hwdef_out):
|
|
|
|
os.mkdir(hwdef_out)
|
2018-08-06 23:49:05 -03:00
|
|
|
python = sys.executable
|
2018-01-12 21:29:11 -04:00
|
|
|
try:
|
2018-08-06 23:49:05 -03:00
|
|
|
cmd = "{0} '{1}' -D '{2}' '{3}' {4}".format(python, hwdef_script, hwdef_out, env.HWDEF, env.BOOTLOADER_OPTION)
|
2018-01-12 21:29:11 -04:00
|
|
|
ret = subprocess.call(cmd, shell=True)
|
|
|
|
except Exception:
|
2018-03-07 17:37:42 -04:00
|
|
|
cfg.fatal("Failed to process hwdef.dat")
|
|
|
|
if ret != 0:
|
|
|
|
cfg.fatal("Failed to process hwdef.dat ret=%d" % ret)
|
2018-01-05 02:53:19 -04:00
|
|
|
|
2018-03-01 01:51:17 -04:00
|
|
|
load_env_vars(cfg.env)
|
2018-03-01 02:28:25 -04:00
|
|
|
if env.HAL_WITH_UAVCAN:
|
|
|
|
setup_can_build(cfg)
|
2019-02-14 08:07:12 -04:00
|
|
|
setup_optimization(cfg.env)
|
2018-03-01 01:51:17 -04:00
|
|
|
|
2018-03-01 03:26:49 -04:00
|
|
|
def pre_build(bld):
|
|
|
|
'''pre-build hook to change dynamic sources'''
|
2018-03-01 01:51:17 -04:00
|
|
|
load_env_vars(bld.env)
|
2018-03-01 02:28:25 -04:00
|
|
|
if bld.env.HAL_WITH_UAVCAN:
|
|
|
|
bld.get_board().with_uavcan = True
|
2018-03-01 03:26:49 -04:00
|
|
|
|
2018-06-16 07:00:54 -03:00
|
|
|
def build(bld):
|
2018-08-02 02:22:30 -03:00
|
|
|
|
2018-01-13 01:46:28 -04:00
|
|
|
bld(
|
2019-01-19 00:35:32 -04:00
|
|
|
# build hwdef.h from hwdef.dat. This is needed after a waf clean
|
2018-06-16 07:00:54 -03:00
|
|
|
source=bld.path.ant_glob(bld.env.HWDEF),
|
2018-08-06 23:49:05 -03:00
|
|
|
rule="%s '${AP_HAL_ROOT}/hwdef/scripts/chibios_hwdef.py' -D '${BUILDROOT}' '%s' %s" % (
|
|
|
|
bld.env.get_flat('PYTHON'), bld.env.HWDEF, bld.env.BOOTLOADER_OPTION),
|
2018-01-13 01:46:28 -04:00
|
|
|
group='dynamic_sources',
|
2018-06-26 22:02:14 -03:00
|
|
|
target=[bld.bldnode.find_or_declare('hwdef.h'),
|
|
|
|
bld.bldnode.find_or_declare('ldscript.ld')]
|
2018-01-13 01:46:28 -04:00
|
|
|
)
|
|
|
|
|
2018-01-05 02:53:19 -04:00
|
|
|
bld(
|
2018-01-05 19:52:34 -04:00
|
|
|
# create the file modules/ChibiOS/include_dirs
|
2018-05-03 23:02:39 -03:00
|
|
|
rule="touch Makefile && BUILDDIR=${BUILDDIR_REL} CHIBIOS=${CH_ROOT_REL} AP_HAL=${AP_HAL_REL} ${CHIBIOS_BUILD_FLAGS} ${CHIBIOS_BOARD_NAME} ${MAKE} pass -f '${BOARD_MK}'",
|
2018-01-05 02:53:19 -04:00
|
|
|
group='dynamic_sources',
|
2018-06-26 22:02:14 -03:00
|
|
|
target=bld.bldnode.find_or_declare('modules/ChibiOS/include_dirs')
|
2018-01-05 02:53:19 -04:00
|
|
|
)
|
|
|
|
|
2018-01-14 23:29:35 -04:00
|
|
|
common_src = [bld.bldnode.find_or_declare('hwdef.h'),
|
2018-07-02 02:13:07 -03:00
|
|
|
bld.bldnode.find_or_declare('modules/ChibiOS/include_dirs')]
|
2018-01-14 23:29:35 -04:00
|
|
|
common_src += bld.path.ant_glob('libraries/AP_HAL_ChibiOS/hwdef/common/*.[ch]')
|
|
|
|
common_src += bld.path.ant_glob('libraries/AP_HAL_ChibiOS/hwdef/common/*.mk')
|
|
|
|
common_src += bld.path.ant_glob('modules/ChibiOS/os/hal/**/*.[ch]')
|
|
|
|
common_src += bld.path.ant_glob('modules/ChibiOS/os/hal/**/*.mk')
|
2018-07-02 02:13:07 -03:00
|
|
|
if bld.env.ROMFS_FILES:
|
|
|
|
common_src += [bld.bldnode.find_or_declare('ap_romfs_embedded.h')]
|
2018-01-14 23:29:35 -04:00
|
|
|
ch_task = bld(
|
2018-01-05 19:52:34 -04:00
|
|
|
# build libch.a from ChibiOS sources and hwdef.h
|
2019-06-05 05:28:07 -03:00
|
|
|
rule="BUILDDIR='${BUILDDIR_REL}' CHIBIOS='${CH_ROOT_REL}' AP_HAL=${AP_HAL_REL} ${CHIBIOS_BUILD_FLAGS} ${CHIBIOS_BOARD_NAME} '${MAKE}' -j%u lib -f '${BOARD_MK}'" % bld.options.jobs,
|
2018-01-05 19:52:34 -04:00
|
|
|
group='dynamic_sources',
|
2018-01-14 23:29:35 -04:00
|
|
|
source=common_src,
|
2018-06-26 22:02:14 -03:00
|
|
|
target=bld.bldnode.find_or_declare('modules/ChibiOS/libch.a')
|
2018-01-05 19:52:34 -04:00
|
|
|
)
|
2018-01-14 23:29:35 -04:00
|
|
|
ch_task.name = "ChibiOS_lib"
|
2018-01-05 19:52:34 -04:00
|
|
|
|
2018-01-05 02:53:19 -04:00
|
|
|
bld.env.LIB += ['ch']
|
|
|
|
bld.env.LIBPATH += ['modules/ChibiOS/']
|
2019-08-15 07:41:15 -03:00
|
|
|
# list of functions that will be wrapped to move them out of libc into our
|
|
|
|
# own code note that we also include functions that we deliberately don't
|
|
|
|
# implement anywhere (the FILE* functions). This allows us to get link
|
|
|
|
# errors if we accidentially try to use one of those functions either
|
|
|
|
# directly or via another libc call
|
2019-09-08 17:31:03 -03:00
|
|
|
wraplist = ['sscanf', 'fprintf', 'snprintf', 'vsnprintf','vasprintf','asprintf','vprintf','scanf',
|
|
|
|
'fiprintf','printf',
|
|
|
|
'fopen', 'fread', 'fflush', 'fwrite', 'fread', 'fputs', 'fgets',
|
2019-08-15 07:41:15 -03:00
|
|
|
'clearerr', 'fseek', 'ferror', 'fclose', 'tmpfile', 'getc', 'ungetc', 'feof',
|
|
|
|
'ftell', 'freopen', 'remove', 'vfprintf', 'fscanf' ]
|
2018-01-05 02:53:19 -04:00
|
|
|
for w in wraplist:
|
2018-11-12 01:39:12 -04:00
|
|
|
bld.env.LINKFLAGS += ['-Wl,--wrap,%s' % w]
|