2012-11-20 05:27:00 -04:00
|
|
|
#!/usr/bin/env python
|
2021-05-12 01:13:12 -03:00
|
|
|
|
|
|
|
'''Generates parameter metadata files suitable for consumption by
|
|
|
|
ground control stations and various web services
|
|
|
|
|
|
|
|
AP_FLAKE8_CLEAN
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
2016-11-08 07:06:05 -04:00
|
|
|
from __future__ import print_function
|
2023-01-11 18:43:32 -04:00
|
|
|
import copy
|
2016-07-31 07:22:06 -03:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
2019-02-07 10:38:06 -04:00
|
|
|
from argparse import ArgumentParser
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2016-07-31 07:22:06 -03:00
|
|
|
from param import (Library, Parameter, Vehicle, known_group_fields,
|
2022-08-11 20:32:31 -03:00
|
|
|
known_param_fields, required_param_fields, required_library_param_fields, known_units)
|
2013-05-26 19:25:24 -03:00
|
|
|
from htmlemit import HtmlEmit
|
2016-03-17 02:04:03 -03:00
|
|
|
from rstemit import RSTEmit
|
2021-08-18 21:37:55 -03:00
|
|
|
from rstlatexpdfemit import RSTLATEXPDFEmit
|
2016-07-31 07:22:06 -03:00
|
|
|
from xmlemit import XmlEmit
|
2017-03-02 20:15:02 -04:00
|
|
|
from mdemit import MDEmit
|
2019-12-17 21:33:24 -04:00
|
|
|
from jsonemit import JSONEmit
|
2020-03-05 07:45:15 -04:00
|
|
|
from xmlemit_mp import XmlEmitMP
|
2012-07-04 21:42:38 -03:00
|
|
|
|
2019-02-07 10:38:06 -04:00
|
|
|
parser = ArgumentParser(description="Parse ArduPilot parameters.")
|
|
|
|
parser.add_argument("-v", "--verbose", dest='verbose', action='store_true', default=False, help="show debugging output")
|
|
|
|
parser.add_argument("--vehicle", required=True, help="Vehicle type to generate for")
|
|
|
|
parser.add_argument("--no-emit",
|
|
|
|
dest='emit_params',
|
|
|
|
action='store_false',
|
|
|
|
default=True,
|
|
|
|
help="don't emit parameter documention, just validate")
|
|
|
|
parser.add_argument("--format",
|
|
|
|
dest='output_format',
|
|
|
|
action='store',
|
|
|
|
default='all',
|
2021-08-18 21:37:55 -03:00
|
|
|
choices=['all', 'html', 'rst', 'rstlatexpdf', 'wiki', 'xml', 'json', 'edn', 'md', 'xml_mp'],
|
2019-02-07 10:38:06 -04:00
|
|
|
help="what output format to use")
|
2021-04-07 01:52:02 -03:00
|
|
|
|
2019-02-07 10:38:06 -04:00
|
|
|
args = parser.parse_args()
|
2013-05-21 04:01:13 -03:00
|
|
|
|
|
|
|
|
2012-07-04 21:42:38 -03:00
|
|
|
# Regular expressions for parsing the parameter metadata
|
|
|
|
|
2023-02-08 20:25:06 -04:00
|
|
|
prog_param = re.compile(r"@Param(?:{([^}]+)})?: (\w+).*((?:\n[ \t]*// @(\w+)(?:{([^}]+)})?: ?(.*))+)(?:\n[ \t\r]*\n|\n[ \t]+[A-Z]|\n\-\-\]\])", re.MULTILINE) # noqa
|
2012-07-04 21:42:38 -03:00
|
|
|
|
2017-08-25 10:34:35 -03:00
|
|
|
# match e.g @Value: 0=Unity, 1=Koala, 17=Liability
|
2021-03-16 19:56:03 -03:00
|
|
|
prog_param_fields = re.compile(r"[ \t]*// @(\w+): ?([^\r\n]*)")
|
2018-01-28 21:02:22 -04:00
|
|
|
# match e.g @Value{Copter}: 0=Volcano, 1=Peppermint
|
2020-03-01 01:45:17 -04:00
|
|
|
prog_param_tagged_fields = re.compile(r"[ \t]*// @(\w+){([^}]+)}: ([^\r\n]*)")
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
prog_groups = re.compile(r"@Group: *(\w+).*((?:\n[ \t]*// @(Path): (\S+))+)", re.MULTILINE)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
apm_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../')
|
2021-05-12 01:09:58 -03:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 01:09:58 -03:00
|
|
|
def find_vehicle_parameter_filepath(vehicle_name):
|
|
|
|
apm_tools_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../Tools/')
|
|
|
|
|
|
|
|
vehicle_name_to_dir_name_map = {
|
|
|
|
"Copter": "ArduCopter",
|
|
|
|
"Plane": "ArduPlane",
|
|
|
|
"Tracker": "AntennaTracker",
|
|
|
|
"Sub": "ArduSub",
|
|
|
|
}
|
|
|
|
|
|
|
|
# first try ArduCopter/Parmameters.cpp
|
|
|
|
for top_dir in apm_path, apm_tools_path:
|
|
|
|
path = os.path.join(top_dir, vehicle_name, "Parameters.cpp")
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
|
|
|
|
# then see if we can map e.g. Copter -> ArduCopter
|
|
|
|
if vehicle_name in vehicle_name_to_dir_name_map:
|
|
|
|
path = os.path.join(top_dir, vehicle_name_to_dir_name_map[vehicle_name], "Parameters.cpp")
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
|
|
|
|
raise ValueError("Unable to find parameters file for (%s)" % vehicle_name)
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2023-02-08 21:53:11 -04:00
|
|
|
|
2023-02-08 20:25:06 -04:00
|
|
|
def debug(str_to_print):
|
|
|
|
"""Debug output if verbose is set."""
|
|
|
|
if args.verbose:
|
|
|
|
print(str_to_print)
|
|
|
|
|
2023-02-08 21:53:11 -04:00
|
|
|
|
2023-02-08 20:25:06 -04:00
|
|
|
def lua_applets():
|
|
|
|
'''return list of Library objects for lua applets and drivers'''
|
2023-02-11 01:32:02 -04:00
|
|
|
lua_lib = Library("", reference="Lua Script", not_rst=True, check_duplicates=True)
|
2023-04-04 10:52:52 -03:00
|
|
|
dirs = ["libraries/AP_Scripting/applets", "libraries/AP_Scripting/drivers"]
|
2023-02-08 20:25:06 -04:00
|
|
|
paths = []
|
2023-04-04 10:52:52 -03:00
|
|
|
for d in dirs:
|
|
|
|
for root, dirs, files in os.walk(os.path.join(apm_path, d)):
|
|
|
|
for file in files:
|
|
|
|
if not file.endswith(".lua"):
|
|
|
|
continue
|
|
|
|
f = os.path.join(root, file)
|
|
|
|
debug("Adding lua path %s" % f)
|
|
|
|
# the library is expected to have the path as a relative path from within
|
|
|
|
# a vehicle directory
|
|
|
|
f = f.replace(apm_path, "../")
|
|
|
|
paths.append(f)
|
2023-02-08 20:25:06 -04:00
|
|
|
setattr(lua_lib, "Path", ','.join(paths))
|
|
|
|
return lua_lib
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
libraries = []
|
|
|
|
|
2020-02-12 23:28:20 -04:00
|
|
|
# AP_Vehicle also has parameters rooted at "", but isn't referenced
|
|
|
|
# from the vehicle in any way:
|
|
|
|
ap_vehicle_lib = Library("") # the "" is tacked onto the front of param name
|
|
|
|
setattr(ap_vehicle_lib, "Path", os.path.join('..', 'libraries', 'AP_Vehicle', 'AP_Vehicle.cpp'))
|
|
|
|
libraries.append(ap_vehicle_lib)
|
|
|
|
|
2023-02-08 20:25:06 -04:00
|
|
|
libraries.append(lua_applets())
|
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
error_count = 0
|
2018-09-19 07:33:24 -03:00
|
|
|
current_param = None
|
|
|
|
current_file = None
|
2013-05-21 04:01:13 -03:00
|
|
|
|
2016-07-31 07:22:06 -03:00
|
|
|
|
|
|
|
def error(str_to_print):
|
|
|
|
"""Show errors."""
|
2013-05-21 04:01:13 -03:00
|
|
|
global error_count
|
|
|
|
error_count += 1
|
2018-09-19 07:33:24 -03:00
|
|
|
if current_file is not None:
|
2020-04-01 11:07:41 -03:00
|
|
|
print("Error in %s" % current_file)
|
2018-09-19 07:33:24 -03:00
|
|
|
if current_param is not None:
|
|
|
|
print("At param %s" % current_param)
|
2016-07-31 07:22:06 -03:00
|
|
|
print(str_to_print)
|
2013-05-21 04:01:13 -03:00
|
|
|
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2017-09-24 21:23:47 -03:00
|
|
|
truename_map = {
|
2020-03-26 20:14:17 -03:00
|
|
|
"Rover": "Rover",
|
2017-09-24 21:23:47 -03:00
|
|
|
"ArduSub": "Sub",
|
|
|
|
"ArduCopter": "Copter",
|
|
|
|
"ArduPlane": "Plane",
|
|
|
|
"AntennaTracker": "Tracker",
|
2021-04-10 21:02:47 -03:00
|
|
|
"AP_Periph": "AP_Periph",
|
2021-06-24 10:48:10 -03:00
|
|
|
"Blimp": "Blimp",
|
2017-09-24 21:23:47 -03:00
|
|
|
}
|
2020-05-26 23:36:00 -03:00
|
|
|
valid_truenames = frozenset(truename_map.values())
|
2021-05-12 01:45:27 -03:00
|
|
|
truename = truename_map.get(args.vehicle, args.vehicle)
|
2020-05-26 23:36:00 -03:00
|
|
|
|
2022-12-26 22:13:20 -04:00
|
|
|
documentation_tags_which_are_comma_separated_nv_pairs = frozenset([
|
|
|
|
'Values',
|
|
|
|
'Bitmask',
|
|
|
|
])
|
|
|
|
|
2021-05-12 01:09:58 -03:00
|
|
|
vehicle_path = find_vehicle_parameter_filepath(args.vehicle)
|
2021-05-12 00:42:57 -03:00
|
|
|
|
2021-05-12 01:45:27 -03:00
|
|
|
basename = os.path.basename(os.path.dirname(vehicle_path))
|
2021-05-12 00:42:57 -03:00
|
|
|
path = os.path.normpath(os.path.dirname(vehicle_path))
|
2021-05-12 01:45:27 -03:00
|
|
|
reference = basename # so links don't break we use ArduCopter
|
|
|
|
vehicle = Vehicle(truename, path, reference=reference)
|
2021-05-12 00:42:57 -03:00
|
|
|
debug('Found vehicle type %s' % vehicle.name)
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
def process_vehicle(vehicle):
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("===\n\n\nProcessing %s" % vehicle.name)
|
2021-04-06 23:03:00 -03:00
|
|
|
current_file = vehicle.path+'/Parameters.cpp'
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2018-09-19 07:33:24 -03:00
|
|
|
f = open(current_file)
|
2012-07-04 20:44:53 -03:00
|
|
|
p_text = f.read()
|
2016-07-31 07:22:06 -03:00
|
|
|
f.close()
|
2012-07-04 20:44:53 -03:00
|
|
|
group_matches = prog_groups.findall(p_text)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
debug(group_matches)
|
2012-07-04 20:44:53 -03:00
|
|
|
for group_match in group_matches:
|
2018-10-07 22:20:54 -03:00
|
|
|
lib = Library(group_match[0])
|
2012-07-04 21:42:38 -03:00
|
|
|
fields = prog_param_fields.findall(group_match[1])
|
|
|
|
for field in fields:
|
|
|
|
if field[0] in known_group_fields:
|
2018-10-07 22:20:54 -03:00
|
|
|
setattr(lib, field[0], field[1])
|
2012-07-04 21:42:38 -03:00
|
|
|
else:
|
2017-08-25 10:34:35 -03:00
|
|
|
error("group: unknown parameter metadata field '%s'" % field[0])
|
2018-10-07 22:20:54 -03:00
|
|
|
if not any(lib.name == parsed_l.name for parsed_l in libraries):
|
|
|
|
libraries.append(lib)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2021-04-07 01:52:02 -03:00
|
|
|
param_matches = []
|
2023-01-06 20:59:44 -04:00
|
|
|
param_matches = prog_param.findall(p_text)
|
2021-04-07 01:52:02 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
for param_match in param_matches:
|
2020-05-26 23:36:00 -03:00
|
|
|
(only_vehicles, param_name, field_text) = (param_match[0],
|
|
|
|
param_match[1],
|
|
|
|
param_match[2])
|
|
|
|
if len(only_vehicles):
|
|
|
|
only_vehicles_list = [x.strip() for x in only_vehicles.split(",")]
|
|
|
|
for only_vehicle in only_vehicles_list:
|
|
|
|
if only_vehicle not in valid_truenames:
|
|
|
|
raise ValueError("Invalid only_vehicle %s" % only_vehicle)
|
|
|
|
if vehicle.truename not in only_vehicles_list:
|
|
|
|
continue
|
2021-05-12 01:45:27 -03:00
|
|
|
p = Parameter(vehicle.reference+":"+param_name, current_file)
|
2013-05-21 04:01:13 -03:00
|
|
|
debug(p.name + ' ')
|
2021-05-12 01:13:12 -03:00
|
|
|
global current_param
|
2018-09-19 07:33:24 -03:00
|
|
|
current_param = p.name
|
2012-07-04 20:44:53 -03:00
|
|
|
fields = prog_param_fields.findall(field_text)
|
2022-12-29 22:10:57 -04:00
|
|
|
p.__field_text = field_text
|
2013-05-21 04:01:13 -03:00
|
|
|
field_list = []
|
2012-07-04 20:44:53 -03:00
|
|
|
for field in fields:
|
2022-12-26 22:13:20 -04:00
|
|
|
(field_name, field_value) = field
|
2013-05-21 04:01:13 -03:00
|
|
|
field_list.append(field[0])
|
2012-07-04 20:44:53 -03:00
|
|
|
if field[0] in known_param_fields:
|
2020-02-20 20:13:37 -04:00
|
|
|
value = re.sub('@PREFIX@', "", field[1]).rstrip()
|
2022-12-26 22:13:20 -04:00
|
|
|
if hasattr(p, field_name):
|
|
|
|
if field_name in documentation_tags_which_are_comma_separated_nv_pairs:
|
|
|
|
# allow concatenation of (e.g.) bitmask fields
|
|
|
|
x = eval("p.%s" % field_name)
|
|
|
|
x += ", "
|
|
|
|
x += value
|
|
|
|
value = x
|
|
|
|
else:
|
|
|
|
error("%s already has field %s" % (p.name, field_name))
|
2018-03-13 03:49:12 -03:00
|
|
|
setattr(p, field[0], value)
|
2022-12-29 22:10:57 -04:00
|
|
|
elif field[0] in frozenset(["CopyFieldsFrom", "CopyValuesFrom"]):
|
2021-12-15 03:39:50 -04:00
|
|
|
setattr(p, field[0], field[1])
|
2012-07-04 20:44:53 -03:00
|
|
|
else:
|
2017-08-25 10:34:35 -03:00
|
|
|
error("param: unknown parameter metadata field '%s'" % field[0])
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
vehicle.params.append(p)
|
2018-09-19 07:33:24 -03:00
|
|
|
current_file = None
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("Processed %u params" % len(vehicle.params))
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
process_vehicle(vehicle)
|
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("Found %u documented libraries" % len(libraries))
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2021-04-07 01:52:02 -03:00
|
|
|
libraries = list(libraries)
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
alllibs = libraries[:]
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2017-08-25 10:34:35 -03:00
|
|
|
def process_library(vehicle, library, pathprefix=None):
|
2017-02-05 22:16:37 -04:00
|
|
|
'''process one library'''
|
|
|
|
paths = library.Path.split(',')
|
|
|
|
for path in paths:
|
|
|
|
path = path.strip()
|
2018-09-19 07:33:24 -03:00
|
|
|
global current_file
|
|
|
|
current_file = path
|
2017-02-05 22:16:37 -04:00
|
|
|
debug("\n Processing file '%s'" % path)
|
|
|
|
if pathprefix is not None:
|
2018-10-07 22:20:54 -03:00
|
|
|
libraryfname = os.path.join(pathprefix, path)
|
2017-02-05 22:16:37 -04:00
|
|
|
elif path.find('/') == -1:
|
2021-05-12 00:42:57 -03:00
|
|
|
libraryfname = os.path.join(vehicle.path, path)
|
2017-02-05 22:16:37 -04:00
|
|
|
else:
|
|
|
|
libraryfname = os.path.normpath(os.path.join(apm_path + '/libraries/' + path))
|
|
|
|
if path and os.path.exists(libraryfname):
|
|
|
|
f = open(libraryfname)
|
|
|
|
p_text = f.read()
|
|
|
|
f.close()
|
|
|
|
else:
|
2020-02-12 23:28:20 -04:00
|
|
|
error("Path %s not found for library %s (fname=%s)" % (path, library.name, libraryfname))
|
2017-02-05 22:16:37 -04:00
|
|
|
continue
|
|
|
|
|
2017-09-24 21:04:40 -03:00
|
|
|
param_matches = prog_param.findall(p_text)
|
2017-02-05 22:16:37 -04:00
|
|
|
debug("Found %u documented parameters" % len(param_matches))
|
|
|
|
for param_match in param_matches:
|
2020-05-26 23:36:00 -03:00
|
|
|
(only_vehicles, param_name, field_text) = (param_match[0],
|
|
|
|
param_match[1],
|
|
|
|
param_match[2])
|
|
|
|
if len(only_vehicles):
|
|
|
|
only_vehicles_list = [x.strip() for x in only_vehicles.split(",")]
|
|
|
|
for only_vehicle in only_vehicles_list:
|
|
|
|
if only_vehicle not in valid_truenames:
|
|
|
|
raise ValueError("Invalid only_vehicle %s" % only_vehicle)
|
2021-05-12 01:45:27 -03:00
|
|
|
if vehicle.name not in only_vehicles_list:
|
2020-05-26 23:36:00 -03:00
|
|
|
continue
|
|
|
|
p = Parameter(library.name+param_name, current_file)
|
2017-02-05 22:16:37 -04:00
|
|
|
debug(p.name + ' ')
|
2018-09-19 07:33:24 -03:00
|
|
|
global current_param
|
|
|
|
current_param = p.name
|
2017-02-05 22:16:37 -04:00
|
|
|
fields = prog_param_fields.findall(field_text)
|
2022-12-29 22:10:57 -04:00
|
|
|
p.__field_text = field_text
|
2022-08-11 20:32:31 -03:00
|
|
|
field_list = []
|
2017-02-05 22:16:37 -04:00
|
|
|
for field in fields:
|
2022-12-26 22:13:20 -04:00
|
|
|
(field_name, field_value) = field
|
2022-08-11 20:32:31 -03:00
|
|
|
field_list.append(field[0])
|
2017-02-05 22:16:37 -04:00
|
|
|
if field[0] in known_param_fields:
|
2018-03-13 03:49:12 -03:00
|
|
|
value = re.sub('@PREFIX@', library.name, field[1])
|
2022-12-26 22:13:20 -04:00
|
|
|
if hasattr(p, field_name):
|
|
|
|
if field_name in documentation_tags_which_are_comma_separated_nv_pairs:
|
|
|
|
# allow concatenation of (e.g.) bitmask fields
|
|
|
|
x = eval("p.%s" % field_name)
|
|
|
|
x += ", "
|
|
|
|
x += value
|
|
|
|
value = x
|
|
|
|
else:
|
|
|
|
error("%s already has field %s" % (p.name, field_name))
|
2018-03-13 03:49:12 -03:00
|
|
|
setattr(p, field[0], value)
|
2022-12-29 22:10:57 -04:00
|
|
|
elif field[0] in frozenset(["CopyFieldsFrom", "CopyValuesFrom"]):
|
2021-12-13 22:00:34 -04:00
|
|
|
setattr(p, field[0], field[1])
|
2017-02-05 22:16:37 -04:00
|
|
|
else:
|
2017-08-25 10:34:35 -03:00
|
|
|
error("param: unknown parameter metadata field %s" % field[0])
|
2022-08-11 20:32:31 -03:00
|
|
|
|
2017-09-24 21:41:25 -03:00
|
|
|
debug("matching %s" % field_text)
|
2017-08-25 10:34:35 -03:00
|
|
|
fields = prog_param_tagged_fields.findall(field_text)
|
2021-05-14 21:51:21 -03:00
|
|
|
# a parameter is considered to be vehicle-specific if
|
|
|
|
# there does not exist a Values: or Values{VehicleName}
|
2021-08-17 17:10:34 -03:00
|
|
|
# for that vehicle but @Values{OtherVehicle} exists.
|
2023-03-17 10:54:00 -03:00
|
|
|
seen_values_or_bitmask_for_this_vehicle = False
|
2021-05-14 21:51:21 -03:00
|
|
|
seen_values_or_bitmask_for_other_vehicle = False
|
2017-08-25 10:34:35 -03:00
|
|
|
for field in fields:
|
|
|
|
only_for_vehicles = field[1].split(",")
|
2023-01-06 20:59:44 -04:00
|
|
|
only_for_vehicles = [some_vehicle.rstrip().lstrip() for some_vehicle in only_for_vehicles]
|
2017-09-27 22:18:23 -03:00
|
|
|
delta = set(only_for_vehicles) - set(truename_map.values())
|
|
|
|
if len(delta):
|
|
|
|
error("Unknown vehicles (%s)" % delta)
|
2021-05-12 01:45:27 -03:00
|
|
|
debug("field[0]=%s vehicle=%s field[1]=%s only_for_vehicles=%s\n" %
|
|
|
|
(field[0], vehicle.name, field[1], str(only_for_vehicles)))
|
2021-05-12 23:08:23 -03:00
|
|
|
if field[0] not in known_param_fields:
|
|
|
|
error("tagged param: unknown parameter metadata field '%s'" % field[0])
|
|
|
|
continue
|
|
|
|
if vehicle.name not in only_for_vehicles:
|
2023-03-17 10:54:00 -03:00
|
|
|
if len(only_for_vehicles) and field[0] in documentation_tags_which_are_comma_separated_nv_pairs:
|
2021-05-14 21:51:21 -03:00
|
|
|
seen_values_or_bitmask_for_other_vehicle = True
|
2021-05-12 23:08:23 -03:00
|
|
|
continue
|
2023-03-17 10:54:00 -03:00
|
|
|
|
|
|
|
append_value = False
|
|
|
|
if field[0] in documentation_tags_which_are_comma_separated_nv_pairs:
|
|
|
|
if vehicle.name in only_for_vehicles:
|
|
|
|
if seen_values_or_bitmask_for_this_vehicle:
|
|
|
|
append_value = hasattr(p, field[0])
|
|
|
|
seen_values_or_bitmask_for_this_vehicle = True
|
|
|
|
else:
|
|
|
|
if seen_values_or_bitmask_for_this_vehicle:
|
|
|
|
continue
|
|
|
|
append_value = hasattr(p, field[0])
|
|
|
|
|
2018-08-05 22:05:13 -03:00
|
|
|
value = re.sub('@PREFIX@', library.name, field[2])
|
2023-03-17 10:54:00 -03:00
|
|
|
if append_value:
|
|
|
|
setattr(p, field[0], getattr(p, field[0]) + ',' + value)
|
|
|
|
else:
|
|
|
|
setattr(p, field[0], value)
|
2021-05-12 23:08:23 -03:00
|
|
|
|
2021-06-22 00:17:46 -03:00
|
|
|
if (getattr(p, 'Values', None) is not None and
|
|
|
|
getattr(p, 'Bitmask', None) is not None):
|
|
|
|
error("Both @Values and @Bitmask present")
|
|
|
|
|
2021-05-14 21:51:21 -03:00
|
|
|
if (getattr(p, 'Values', None) is None and
|
|
|
|
getattr(p, 'Bitmask', None) is None):
|
|
|
|
# values and Bitmask available for this vehicle
|
|
|
|
if seen_values_or_bitmask_for_other_vehicle:
|
|
|
|
# we've (e.g.) seen @Values{Copter} when we're
|
|
|
|
# processing for Rover, and haven't seen either
|
|
|
|
# @Values: or @Vales{Rover} - so we omit this
|
|
|
|
# parameter on the assumption that it is not
|
|
|
|
# applicable for this vehicle.
|
|
|
|
continue
|
|
|
|
|
2019-07-01 05:31:55 -03:00
|
|
|
p.path = path # Add path. Later deleted - only used for duplicates
|
2023-02-11 01:32:02 -04:00
|
|
|
if library.check_duplicates and library.has_param(p.name):
|
|
|
|
error("Duplicate parameter %s in %s" % (p.name, library.name))
|
|
|
|
continue
|
2019-07-01 05:31:55 -03:00
|
|
|
library.params.append(p)
|
2017-02-05 22:16:37 -04:00
|
|
|
|
|
|
|
group_matches = prog_groups.findall(p_text)
|
|
|
|
debug("Found %u groups" % len(group_matches))
|
|
|
|
debug(group_matches)
|
2020-06-01 04:31:16 -03:00
|
|
|
done_groups = dict()
|
2017-02-05 22:16:37 -04:00
|
|
|
for group_match in group_matches:
|
|
|
|
group = group_match[0]
|
|
|
|
debug("Group: %s" % group)
|
2020-06-01 04:31:16 -03:00
|
|
|
do_append = True
|
|
|
|
if group in done_groups:
|
|
|
|
# this is to handle cases like the RangeFinder
|
|
|
|
# parameters, where the wasp stuff gets tack into the
|
|
|
|
# same RNGFND1_ group
|
|
|
|
lib = done_groups[group]
|
|
|
|
do_append = False
|
|
|
|
else:
|
|
|
|
lib = Library(group)
|
|
|
|
done_groups[group] = lib
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
fields = prog_param_fields.findall(group_match[1])
|
|
|
|
for field in fields:
|
|
|
|
if field[0] in known_group_fields:
|
2018-10-07 22:20:54 -03:00
|
|
|
setattr(lib, field[0], field[1])
|
2022-12-29 22:10:57 -04:00
|
|
|
elif field[0] in ["CopyFieldsFrom", "CopyValuesFrom"]:
|
2021-12-15 03:39:50 -04:00
|
|
|
setattr(p, field[0], field[1])
|
2017-02-05 22:16:37 -04:00
|
|
|
else:
|
|
|
|
error("unknown parameter metadata field '%s'" % field[0])
|
2018-10-07 22:20:54 -03:00
|
|
|
if not any(lib.name == parsed_l.name for parsed_l in libraries):
|
2020-06-01 04:31:16 -03:00
|
|
|
if do_append:
|
2021-05-12 01:45:27 -03:00
|
|
|
lib.set_name(library.name + lib.name)
|
2018-10-07 22:20:54 -03:00
|
|
|
debug("Group name: %s" % lib.name)
|
|
|
|
process_library(vehicle, lib, os.path.dirname(libraryfname))
|
2020-06-01 04:31:16 -03:00
|
|
|
if do_append:
|
|
|
|
alllibs.append(lib)
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2018-09-19 07:33:24 -03:00
|
|
|
current_file = None
|
2017-02-05 22:16:37 -04:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
for library in libraries:
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("===\n\n\nProcessing library %s" % library.name)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
|
|
|
if hasattr(library, 'Path'):
|
2017-08-25 10:34:35 -03:00
|
|
|
process_library(vehicle, library)
|
2012-07-05 03:01:52 -03:00
|
|
|
else:
|
2013-05-21 04:01:13 -03:00
|
|
|
error("Skipped: no Path found")
|
2012-07-05 03:01:52 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("Processed %u documented parameters" % len(library.params))
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2017-02-06 19:37:15 -04:00
|
|
|
# sort libraries by name
|
2018-10-07 22:20:54 -03:00
|
|
|
alllibs = sorted(alllibs, key=lambda x: x.name)
|
2017-02-05 22:16:37 -04:00
|
|
|
|
2017-02-06 19:37:15 -04:00
|
|
|
libraries = alllibs
|
2018-10-07 22:20:54 -03:00
|
|
|
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
def is_number(numberString):
|
|
|
|
try:
|
|
|
|
float(numberString)
|
|
|
|
return True
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2020-09-13 19:25:47 -03:00
|
|
|
def clean_param(param):
|
|
|
|
if (hasattr(param, "Values")):
|
|
|
|
valueList = param.Values.split(",")
|
|
|
|
new_valueList = []
|
|
|
|
for i in valueList:
|
|
|
|
(start, sep, end) = i.partition(":")
|
|
|
|
if sep != ":":
|
|
|
|
raise ValueError("Expected a colon seperator in (%s)" % (i,))
|
|
|
|
if len(end) == 0:
|
|
|
|
raise ValueError("Expected a colon-separated string, got (%s)" % i)
|
|
|
|
end = end.strip()
|
|
|
|
start = start.strip()
|
|
|
|
new_valueList.append(":".join([start, end]))
|
|
|
|
param.Values = ",".join(new_valueList)
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-12-13 22:00:34 -04:00
|
|
|
def do_copy_values(vehicle_params, libraries, param):
|
|
|
|
if not hasattr(param, "CopyValuesFrom"):
|
|
|
|
return
|
|
|
|
|
|
|
|
# so go and find the values...
|
|
|
|
wanted_name = param.CopyValuesFrom
|
|
|
|
del param.CopyValuesFrom
|
|
|
|
for x in vehicle_params:
|
2021-12-15 03:39:50 -04:00
|
|
|
name = x.name
|
|
|
|
(v, name) = name.split(":")
|
|
|
|
if name != wanted_name:
|
2021-12-13 22:00:34 -04:00
|
|
|
continue
|
|
|
|
param.Values = x.Values
|
|
|
|
return
|
|
|
|
|
|
|
|
for lib in libraries:
|
|
|
|
for x in lib.params:
|
|
|
|
if x.name != wanted_name:
|
|
|
|
continue
|
|
|
|
param.Values = x.Values
|
|
|
|
return
|
|
|
|
|
|
|
|
error("Did not find value to copy (%s wants %s)" %
|
|
|
|
(param.name, wanted_name))
|
|
|
|
|
|
|
|
|
2022-12-29 22:10:57 -04:00
|
|
|
def do_copy_fields(vehicle_params, libraries, param):
|
|
|
|
do_copy_values(vehicle_params, libraries, param)
|
|
|
|
|
|
|
|
if not hasattr(param, 'CopyFieldsFrom'):
|
|
|
|
return
|
|
|
|
|
|
|
|
# so go and find the values...
|
|
|
|
wanted_name = param.CopyFieldsFrom
|
|
|
|
del param.CopyFieldsFrom
|
|
|
|
for x in vehicle_params:
|
|
|
|
name = x.name
|
|
|
|
(v, name) = name.split(":")
|
|
|
|
if name != wanted_name:
|
|
|
|
continue
|
|
|
|
for field in dir(x):
|
|
|
|
if hasattr(param, field):
|
|
|
|
# override
|
|
|
|
continue
|
|
|
|
if field.startswith("__") or field in frozenset(["name", "real_path"]):
|
|
|
|
# internal methods like __ne__
|
|
|
|
continue
|
|
|
|
setattr(param, field, getattr(x, field))
|
|
|
|
return
|
|
|
|
|
|
|
|
for lib in libraries:
|
|
|
|
for x in lib.params:
|
|
|
|
if x.name != wanted_name:
|
|
|
|
continue
|
|
|
|
for field in dir(x):
|
|
|
|
if hasattr(param, field):
|
|
|
|
# override
|
|
|
|
continue
|
|
|
|
if field.startswith("__") or field in frozenset(["name", "real_path"]):
|
|
|
|
# internal methods like __ne__
|
|
|
|
continue
|
|
|
|
setattr(param, field, getattr(x, field))
|
|
|
|
return
|
|
|
|
|
|
|
|
error("Did not find value to copy (%s wants %s)" %
|
|
|
|
(param.name, wanted_name))
|
|
|
|
|
|
|
|
|
|
|
|
def validate(param, is_library=False):
|
2017-02-05 22:16:37 -04:00
|
|
|
"""
|
|
|
|
Validates the parameter meta data.
|
|
|
|
"""
|
2018-09-19 07:33:24 -03:00
|
|
|
global current_file
|
|
|
|
current_file = param.real_path
|
|
|
|
global current_param
|
|
|
|
current_param = param.name
|
2017-02-05 22:16:37 -04:00
|
|
|
# Validate values
|
|
|
|
if (hasattr(param, "Range")):
|
|
|
|
rangeValues = param.__dict__["Range"].split(" ")
|
|
|
|
if (len(rangeValues) != 2):
|
2020-02-20 20:13:37 -04:00
|
|
|
error("Invalid Range values for %s (%s)" %
|
|
|
|
(param.name, param.__dict__["Range"]))
|
2017-02-05 22:16:37 -04:00
|
|
|
return
|
|
|
|
min_value = rangeValues[0]
|
|
|
|
max_value = rangeValues[1]
|
|
|
|
if not is_number(min_value):
|
|
|
|
error("Min value not number: %s %s" % (param.name, min_value))
|
|
|
|
return
|
|
|
|
if not is_number(max_value):
|
|
|
|
error("Max value not number: %s %s" % (param.name, max_value))
|
|
|
|
return
|
2021-05-12 01:13:12 -03:00
|
|
|
# Check for duplicate in @value field
|
2020-03-23 10:11:22 -03:00
|
|
|
if (hasattr(param, "Values")):
|
|
|
|
valueList = param.__dict__["Values"].split(",")
|
|
|
|
values = []
|
|
|
|
for i in valueList:
|
2021-05-12 01:13:12 -03:00
|
|
|
i = i.replace(" ", "")
|
2020-03-23 10:11:22 -03:00
|
|
|
values.append(i.partition(":")[0])
|
|
|
|
if (len(values) != len(set(values))):
|
2023-03-17 10:54:00 -03:00
|
|
|
error("Duplicate values found" + str({x for x in values if values.count(x) > 1}))
|
2017-05-02 07:36:14 -03:00
|
|
|
# Validate units
|
|
|
|
if (hasattr(param, "Units")):
|
|
|
|
if (param.__dict__["Units"] != "") and (param.__dict__["Units"] not in known_units):
|
|
|
|
error("unknown units field '%s'" % param.__dict__["Units"])
|
2020-09-13 19:38:12 -03:00
|
|
|
# Validate User
|
|
|
|
if (hasattr(param, "User")):
|
|
|
|
if param.User.strip() not in ["Standard", "Advanced"]:
|
|
|
|
error("unknown user (%s)" % param.User.strip())
|
2015-12-11 19:03:07 -04:00
|
|
|
|
2021-03-16 19:56:03 -03:00
|
|
|
if (hasattr(param, "Description")):
|
|
|
|
if not param.Description or not param.Description.strip():
|
|
|
|
error("Empty Description (%s)" % param)
|
|
|
|
|
2022-12-29 22:10:57 -04:00
|
|
|
required_fields = required_param_fields
|
|
|
|
if is_library:
|
|
|
|
required_fields = required_library_param_fields
|
|
|
|
for req_field in required_fields:
|
|
|
|
if not getattr(param, req_field, False):
|
|
|
|
error("missing parameter metadata field '%s' in %s" % (req_field, param.__field_text))
|
|
|
|
|
|
|
|
|
|
|
|
# handle CopyFieldsFrom and CopyValuesFrom:
|
|
|
|
for param in vehicle.params:
|
|
|
|
do_copy_fields(vehicle.params, libraries, param)
|
|
|
|
for library in libraries:
|
|
|
|
for param in library.params:
|
|
|
|
do_copy_fields(vehicle.params, libraries, param)
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
for param in vehicle.params:
|
|
|
|
clean_param(param)
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
for param in vehicle.params:
|
|
|
|
validate(param)
|
2015-12-11 19:03:07 -04:00
|
|
|
|
2018-09-24 03:02:18 -03:00
|
|
|
# Find duplicate names in library and fix up path
|
|
|
|
for library in libraries:
|
|
|
|
param_names_seen = set()
|
|
|
|
param_names_duplicate = set()
|
|
|
|
# Find duplicates:
|
|
|
|
for param in library.params:
|
|
|
|
if param.name in param_names_seen: # is duplicate
|
|
|
|
param_names_duplicate.add(param.name)
|
|
|
|
param_names_seen.add(param.name)
|
|
|
|
# Fix up path for duplicates
|
|
|
|
for param in library.params:
|
|
|
|
if param.name in param_names_duplicate:
|
|
|
|
param.path = param.path.rsplit('/')[-1].rsplit('.')[0]
|
|
|
|
else:
|
|
|
|
# not a duplicate, so delete attribute.
|
|
|
|
delattr(param, "path")
|
|
|
|
|
2020-09-13 19:25:47 -03:00
|
|
|
for library in libraries:
|
|
|
|
for param in library.params:
|
|
|
|
clean_param(param)
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
for library in libraries:
|
|
|
|
for param in library.params:
|
2022-12-29 22:10:57 -04:00
|
|
|
validate(param, is_library=True)
|
2021-12-13 22:00:34 -04:00
|
|
|
|
2021-04-06 23:48:40 -03:00
|
|
|
if not args.emit_params:
|
|
|
|
sys.exit(error_count)
|
|
|
|
|
|
|
|
all_emitters = {
|
|
|
|
'json': JSONEmit,
|
|
|
|
'xml': XmlEmit,
|
|
|
|
'html': HtmlEmit,
|
|
|
|
'rst': RSTEmit,
|
2021-08-18 21:37:55 -03:00
|
|
|
'rstlatexpdf': RSTLATEXPDFEmit,
|
2021-04-06 23:48:40 -03:00
|
|
|
'md': MDEmit,
|
|
|
|
'xml_mp': XmlEmitMP,
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
|
|
|
from ednemit import EDNEmit
|
|
|
|
all_emitters['edn'] = EDNEmit
|
|
|
|
except ImportError:
|
|
|
|
# if the user wanted edn only then don't hide any errors
|
|
|
|
if args.output_format == 'edn':
|
|
|
|
raise
|
|
|
|
|
|
|
|
if args.verbose:
|
|
|
|
print("Unable to emit EDN, install edn_format and pytz if edn is desired")
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2021-04-06 23:48:40 -03:00
|
|
|
# filter to just the ones we want to emit:
|
|
|
|
emitters_to_use = []
|
|
|
|
for emitter_name in all_emitters.keys():
|
|
|
|
if args.output_format == 'all' or args.output_format == emitter_name:
|
|
|
|
emitters_to_use.append(emitter_name)
|
|
|
|
|
2021-08-17 17:10:34 -03:00
|
|
|
# actually invoke each emitter:
|
2021-04-06 23:48:40 -03:00
|
|
|
for emitter_name in emitters_to_use:
|
2023-01-06 20:59:44 -04:00
|
|
|
emit = all_emitters[emitter_name]()
|
2021-04-07 01:52:02 -03:00
|
|
|
|
2023-01-06 20:59:44 -04:00
|
|
|
emit.emit(vehicle)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
emit.start_libraries()
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2023-01-11 18:43:32 -04:00
|
|
|
# create a single parameter list for all SIM_ parameters (for rst to use)
|
|
|
|
sim_params = []
|
|
|
|
for library in libraries:
|
|
|
|
if library.name.startswith("SIM_"):
|
|
|
|
sim_params.extend(library.params)
|
|
|
|
sim_params = sorted(sim_params, key=lambda x : x.name)
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
for library in libraries:
|
|
|
|
if library.params:
|
2023-01-11 18:43:32 -04:00
|
|
|
# we sort the parameters in the SITL library to avoid
|
|
|
|
# rename, and on the assumption that an asciibetical sort
|
|
|
|
# gives a good layout:
|
|
|
|
if emitter_name == 'rst':
|
2023-02-08 21:53:11 -04:00
|
|
|
if library.not_rst:
|
|
|
|
continue
|
2023-01-11 18:43:32 -04:00
|
|
|
if library.name == 'SIM_':
|
|
|
|
library = copy.deepcopy(library)
|
|
|
|
library.params = sim_params
|
|
|
|
elif library.name.startswith('SIM_'):
|
|
|
|
continue
|
2018-12-06 05:07:51 -04:00
|
|
|
emit.emit(library)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
emit.close()
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
sys.exit(error_count)
|