2012-11-20 05:27:00 -04:00
|
|
|
#!/usr/bin/env python
|
2021-05-12 01:13:12 -03:00
|
|
|
|
|
|
|
'''Generates parameter metadata files suitable for consumption by
|
|
|
|
ground control stations and various web services
|
|
|
|
|
|
|
|
AP_FLAKE8_CLEAN
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
2016-11-08 07:06:05 -04:00
|
|
|
from __future__ import print_function
|
2016-07-31 07:22:06 -03:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
2019-02-07 10:38:06 -04:00
|
|
|
from argparse import ArgumentParser
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2016-07-31 07:22:06 -03:00
|
|
|
from param import (Library, Parameter, Vehicle, known_group_fields,
|
2017-05-02 07:36:14 -03:00
|
|
|
known_param_fields, required_param_fields, known_units)
|
2013-05-26 19:25:24 -03:00
|
|
|
from htmlemit import HtmlEmit
|
2016-03-17 02:04:03 -03:00
|
|
|
from rstemit import RSTEmit
|
2016-07-31 07:22:06 -03:00
|
|
|
from xmlemit import XmlEmit
|
2017-03-02 20:15:02 -04:00
|
|
|
from mdemit import MDEmit
|
2019-12-17 21:33:24 -04:00
|
|
|
from jsonemit import JSONEmit
|
2020-03-05 07:45:15 -04:00
|
|
|
from xmlemit_mp import XmlEmitMP
|
2012-07-04 21:42:38 -03:00
|
|
|
|
2019-02-07 10:38:06 -04:00
|
|
|
parser = ArgumentParser(description="Parse ArduPilot parameters.")
|
|
|
|
parser.add_argument("-v", "--verbose", dest='verbose', action='store_true', default=False, help="show debugging output")
|
|
|
|
parser.add_argument("--vehicle", required=True, help="Vehicle type to generate for")
|
|
|
|
parser.add_argument("--no-emit",
|
|
|
|
dest='emit_params',
|
|
|
|
action='store_false',
|
|
|
|
default=True,
|
|
|
|
help="don't emit parameter documention, just validate")
|
|
|
|
parser.add_argument("--format",
|
|
|
|
dest='output_format',
|
|
|
|
action='store',
|
|
|
|
default='all',
|
2020-03-05 07:45:15 -04:00
|
|
|
choices=['all', 'html', 'rst', 'wiki', 'xml', 'json', 'edn', 'md', 'xml_mp'],
|
2019-02-07 10:38:06 -04:00
|
|
|
help="what output format to use")
|
2021-04-07 01:52:02 -03:00
|
|
|
parser.add_argument("--sitl",
|
|
|
|
dest='emit_sitl',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help="true to only emit sitl parameters, false to not emit sitl parameters")
|
|
|
|
|
2019-02-07 10:38:06 -04:00
|
|
|
args = parser.parse_args()
|
2013-05-21 04:01:13 -03:00
|
|
|
|
|
|
|
|
2012-07-04 21:42:38 -03:00
|
|
|
# Regular expressions for parsing the parameter metadata
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
prog_param = re.compile(r"@Param(?:{([^}]+)})?: (\w+).*((?:\n[ \t]*// @(\w+)(?:{([^}]+)})?: ?(.*))+)(?:\n[ \t\r]*\n|\n[ \t]+[A-Z])", re.MULTILINE) # noqa
|
2012-07-04 21:42:38 -03:00
|
|
|
|
2017-08-25 10:34:35 -03:00
|
|
|
# match e.g @Value: 0=Unity, 1=Koala, 17=Liability
|
2021-03-16 19:56:03 -03:00
|
|
|
prog_param_fields = re.compile(r"[ \t]*// @(\w+): ?([^\r\n]*)")
|
2018-01-28 21:02:22 -04:00
|
|
|
# match e.g @Value{Copter}: 0=Volcano, 1=Peppermint
|
2020-03-01 01:45:17 -04:00
|
|
|
prog_param_tagged_fields = re.compile(r"[ \t]*// @(\w+){([^}]+)}: ([^\r\n]*)")
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
prog_groups = re.compile(r"@Group: *(\w+).*((?:\n[ \t]*// @(Path): (\S+))+)", re.MULTILINE)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
apm_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../')
|
2021-05-12 01:09:58 -03:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 01:09:58 -03:00
|
|
|
def find_vehicle_parameter_filepath(vehicle_name):
|
|
|
|
apm_tools_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../Tools/')
|
|
|
|
|
|
|
|
vehicle_name_to_dir_name_map = {
|
|
|
|
"Copter": "ArduCopter",
|
|
|
|
"Plane": "ArduPlane",
|
|
|
|
"Tracker": "AntennaTracker",
|
|
|
|
"Sub": "ArduSub",
|
|
|
|
}
|
|
|
|
|
|
|
|
# first try ArduCopter/Parmameters.cpp
|
|
|
|
for top_dir in apm_path, apm_tools_path:
|
|
|
|
path = os.path.join(top_dir, vehicle_name, "Parameters.cpp")
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
|
|
|
|
# then see if we can map e.g. Copter -> ArduCopter
|
|
|
|
if vehicle_name in vehicle_name_to_dir_name_map:
|
|
|
|
path = os.path.join(top_dir, vehicle_name_to_dir_name_map[vehicle_name], "Parameters.cpp")
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
|
|
|
|
raise ValueError("Unable to find parameters file for (%s)" % vehicle_name)
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
libraries = []
|
|
|
|
|
2020-02-12 23:28:20 -04:00
|
|
|
# AP_Vehicle also has parameters rooted at "", but isn't referenced
|
|
|
|
# from the vehicle in any way:
|
|
|
|
ap_vehicle_lib = Library("") # the "" is tacked onto the front of param name
|
|
|
|
setattr(ap_vehicle_lib, "Path", os.path.join('..', 'libraries', 'AP_Vehicle', 'AP_Vehicle.cpp'))
|
|
|
|
libraries.append(ap_vehicle_lib)
|
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
error_count = 0
|
2018-09-19 07:33:24 -03:00
|
|
|
current_param = None
|
|
|
|
current_file = None
|
2013-05-21 04:01:13 -03:00
|
|
|
|
2016-07-31 07:22:06 -03:00
|
|
|
|
|
|
|
def debug(str_to_print):
|
|
|
|
"""Debug output if verbose is set."""
|
2019-02-07 10:38:06 -04:00
|
|
|
if args.verbose:
|
2016-07-31 07:22:06 -03:00
|
|
|
print(str_to_print)
|
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
|
2016-07-31 07:22:06 -03:00
|
|
|
def error(str_to_print):
|
|
|
|
"""Show errors."""
|
2013-05-21 04:01:13 -03:00
|
|
|
global error_count
|
|
|
|
error_count += 1
|
2018-09-19 07:33:24 -03:00
|
|
|
if current_file is not None:
|
2020-04-01 11:07:41 -03:00
|
|
|
print("Error in %s" % current_file)
|
2018-09-19 07:33:24 -03:00
|
|
|
if current_param is not None:
|
|
|
|
print("At param %s" % current_param)
|
2016-07-31 07:22:06 -03:00
|
|
|
print(str_to_print)
|
2013-05-21 04:01:13 -03:00
|
|
|
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2017-09-24 21:23:47 -03:00
|
|
|
truename_map = {
|
2020-03-26 20:14:17 -03:00
|
|
|
"Rover": "Rover",
|
2017-09-24 21:23:47 -03:00
|
|
|
"ArduSub": "Sub",
|
|
|
|
"ArduCopter": "Copter",
|
|
|
|
"ArduPlane": "Plane",
|
|
|
|
"AntennaTracker": "Tracker",
|
2021-04-10 21:02:47 -03:00
|
|
|
"AP_Periph": "AP_Periph",
|
2017-09-24 21:23:47 -03:00
|
|
|
}
|
2020-05-26 23:36:00 -03:00
|
|
|
valid_truenames = frozenset(truename_map.values())
|
2021-05-12 01:45:27 -03:00
|
|
|
truename = truename_map.get(args.vehicle, args.vehicle)
|
2020-05-26 23:36:00 -03:00
|
|
|
|
2021-05-12 01:09:58 -03:00
|
|
|
vehicle_path = find_vehicle_parameter_filepath(args.vehicle)
|
2021-05-12 00:42:57 -03:00
|
|
|
|
2021-05-12 01:45:27 -03:00
|
|
|
basename = os.path.basename(os.path.dirname(vehicle_path))
|
2021-05-12 00:42:57 -03:00
|
|
|
path = os.path.normpath(os.path.dirname(vehicle_path))
|
2021-05-12 01:45:27 -03:00
|
|
|
reference = basename # so links don't break we use ArduCopter
|
|
|
|
vehicle = Vehicle(truename, path, reference=reference)
|
2021-05-12 00:42:57 -03:00
|
|
|
debug('Found vehicle type %s' % vehicle.name)
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
def process_vehicle(vehicle):
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("===\n\n\nProcessing %s" % vehicle.name)
|
2021-04-06 23:03:00 -03:00
|
|
|
current_file = vehicle.path+'/Parameters.cpp'
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2018-09-19 07:33:24 -03:00
|
|
|
f = open(current_file)
|
2012-07-04 20:44:53 -03:00
|
|
|
p_text = f.read()
|
2016-07-31 07:22:06 -03:00
|
|
|
f.close()
|
2012-07-04 20:44:53 -03:00
|
|
|
group_matches = prog_groups.findall(p_text)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
debug(group_matches)
|
2012-07-04 20:44:53 -03:00
|
|
|
for group_match in group_matches:
|
2018-10-07 22:20:54 -03:00
|
|
|
lib = Library(group_match[0])
|
2012-07-04 21:42:38 -03:00
|
|
|
fields = prog_param_fields.findall(group_match[1])
|
|
|
|
for field in fields:
|
|
|
|
if field[0] in known_group_fields:
|
2018-10-07 22:20:54 -03:00
|
|
|
setattr(lib, field[0], field[1])
|
2012-07-04 21:42:38 -03:00
|
|
|
else:
|
2017-08-25 10:34:35 -03:00
|
|
|
error("group: unknown parameter metadata field '%s'" % field[0])
|
2018-10-07 22:20:54 -03:00
|
|
|
if not any(lib.name == parsed_l.name for parsed_l in libraries):
|
|
|
|
libraries.append(lib)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2021-04-07 01:52:02 -03:00
|
|
|
param_matches = []
|
|
|
|
if not args.emit_sitl:
|
|
|
|
param_matches = prog_param.findall(p_text)
|
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
for param_match in param_matches:
|
2020-05-26 23:36:00 -03:00
|
|
|
(only_vehicles, param_name, field_text) = (param_match[0],
|
|
|
|
param_match[1],
|
|
|
|
param_match[2])
|
|
|
|
if len(only_vehicles):
|
|
|
|
only_vehicles_list = [x.strip() for x in only_vehicles.split(",")]
|
|
|
|
for only_vehicle in only_vehicles_list:
|
|
|
|
if only_vehicle not in valid_truenames:
|
|
|
|
raise ValueError("Invalid only_vehicle %s" % only_vehicle)
|
|
|
|
if vehicle.truename not in only_vehicles_list:
|
|
|
|
continue
|
2021-05-12 01:45:27 -03:00
|
|
|
p = Parameter(vehicle.reference+":"+param_name, current_file)
|
2013-05-21 04:01:13 -03:00
|
|
|
debug(p.name + ' ')
|
2021-05-12 01:13:12 -03:00
|
|
|
global current_param
|
2018-09-19 07:33:24 -03:00
|
|
|
current_param = p.name
|
2012-07-04 20:44:53 -03:00
|
|
|
fields = prog_param_fields.findall(field_text)
|
2013-05-21 04:01:13 -03:00
|
|
|
field_list = []
|
2012-07-04 20:44:53 -03:00
|
|
|
for field in fields:
|
2013-05-21 04:01:13 -03:00
|
|
|
field_list.append(field[0])
|
2012-07-04 20:44:53 -03:00
|
|
|
if field[0] in known_param_fields:
|
2020-02-20 20:13:37 -04:00
|
|
|
value = re.sub('@PREFIX@', "", field[1]).rstrip()
|
2018-03-13 03:49:12 -03:00
|
|
|
setattr(p, field[0], value)
|
2012-07-04 20:44:53 -03:00
|
|
|
else:
|
2017-08-25 10:34:35 -03:00
|
|
|
error("param: unknown parameter metadata field '%s'" % field[0])
|
2013-05-21 04:01:13 -03:00
|
|
|
for req_field in required_param_fields:
|
2016-07-31 07:22:06 -03:00
|
|
|
if req_field not in field_list:
|
2013-05-21 04:01:13 -03:00
|
|
|
error("missing parameter metadata field '%s' in %s" % (req_field, field_text))
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
vehicle.params.append(p)
|
2018-09-19 07:33:24 -03:00
|
|
|
current_file = None
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("Processed %u params" % len(vehicle.params))
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
process_vehicle(vehicle)
|
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("Found %u documented libraries" % len(libraries))
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2021-04-07 01:52:02 -03:00
|
|
|
if args.emit_sitl:
|
|
|
|
libraries = filter(lambda x : x.name == 'SIM_', libraries)
|
|
|
|
else:
|
|
|
|
libraries = filter(lambda x : x.name != 'SIM_', libraries)
|
|
|
|
|
|
|
|
libraries = list(libraries)
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
alllibs = libraries[:]
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2017-08-25 10:34:35 -03:00
|
|
|
def process_library(vehicle, library, pathprefix=None):
|
2017-02-05 22:16:37 -04:00
|
|
|
'''process one library'''
|
|
|
|
paths = library.Path.split(',')
|
|
|
|
for path in paths:
|
|
|
|
path = path.strip()
|
2018-09-19 07:33:24 -03:00
|
|
|
global current_file
|
|
|
|
current_file = path
|
2017-02-05 22:16:37 -04:00
|
|
|
debug("\n Processing file '%s'" % path)
|
|
|
|
if pathprefix is not None:
|
2018-10-07 22:20:54 -03:00
|
|
|
libraryfname = os.path.join(pathprefix, path)
|
2017-02-05 22:16:37 -04:00
|
|
|
elif path.find('/') == -1:
|
2021-05-12 00:42:57 -03:00
|
|
|
libraryfname = os.path.join(vehicle.path, path)
|
2017-02-05 22:16:37 -04:00
|
|
|
else:
|
|
|
|
libraryfname = os.path.normpath(os.path.join(apm_path + '/libraries/' + path))
|
|
|
|
if path and os.path.exists(libraryfname):
|
|
|
|
f = open(libraryfname)
|
|
|
|
p_text = f.read()
|
|
|
|
f.close()
|
|
|
|
else:
|
2020-02-12 23:28:20 -04:00
|
|
|
error("Path %s not found for library %s (fname=%s)" % (path, library.name, libraryfname))
|
2017-02-05 22:16:37 -04:00
|
|
|
continue
|
|
|
|
|
2017-09-24 21:04:40 -03:00
|
|
|
param_matches = prog_param.findall(p_text)
|
2017-02-05 22:16:37 -04:00
|
|
|
debug("Found %u documented parameters" % len(param_matches))
|
|
|
|
for param_match in param_matches:
|
2020-05-26 23:36:00 -03:00
|
|
|
(only_vehicles, param_name, field_text) = (param_match[0],
|
|
|
|
param_match[1],
|
|
|
|
param_match[2])
|
|
|
|
if len(only_vehicles):
|
|
|
|
only_vehicles_list = [x.strip() for x in only_vehicles.split(",")]
|
|
|
|
for only_vehicle in only_vehicles_list:
|
|
|
|
if only_vehicle not in valid_truenames:
|
|
|
|
raise ValueError("Invalid only_vehicle %s" % only_vehicle)
|
2021-05-12 01:45:27 -03:00
|
|
|
if vehicle.name not in only_vehicles_list:
|
2020-05-26 23:36:00 -03:00
|
|
|
continue
|
|
|
|
p = Parameter(library.name+param_name, current_file)
|
2017-02-05 22:16:37 -04:00
|
|
|
debug(p.name + ' ')
|
2018-09-19 07:33:24 -03:00
|
|
|
global current_param
|
|
|
|
current_param = p.name
|
2017-02-05 22:16:37 -04:00
|
|
|
fields = prog_param_fields.findall(field_text)
|
|
|
|
for field in fields:
|
|
|
|
if field[0] in known_param_fields:
|
2018-03-13 03:49:12 -03:00
|
|
|
value = re.sub('@PREFIX@', library.name, field[1])
|
|
|
|
setattr(p, field[0], value)
|
2017-02-05 22:16:37 -04:00
|
|
|
else:
|
2017-08-25 10:34:35 -03:00
|
|
|
error("param: unknown parameter metadata field %s" % field[0])
|
2017-09-24 21:41:25 -03:00
|
|
|
debug("matching %s" % field_text)
|
2017-08-25 10:34:35 -03:00
|
|
|
fields = prog_param_tagged_fields.findall(field_text)
|
2021-05-14 21:51:21 -03:00
|
|
|
# a parameter is considered to be vehicle-specific if
|
|
|
|
# there does not exist a Values: or Values{VehicleName}
|
|
|
|
# for that vehicle but @Values{OtherVehcicle} exists.
|
|
|
|
seen_values_or_bitmask_for_other_vehicle = False
|
2017-08-25 10:34:35 -03:00
|
|
|
for field in fields:
|
|
|
|
only_for_vehicles = field[1].split(",")
|
2018-10-07 22:20:54 -03:00
|
|
|
only_for_vehicles = [x.rstrip().lstrip() for x in only_for_vehicles]
|
2017-09-27 22:18:23 -03:00
|
|
|
delta = set(only_for_vehicles) - set(truename_map.values())
|
|
|
|
if len(delta):
|
|
|
|
error("Unknown vehicles (%s)" % delta)
|
2021-05-12 01:45:27 -03:00
|
|
|
debug("field[0]=%s vehicle=%s field[1]=%s only_for_vehicles=%s\n" %
|
|
|
|
(field[0], vehicle.name, field[1], str(only_for_vehicles)))
|
2021-05-12 23:08:23 -03:00
|
|
|
if field[0] not in known_param_fields:
|
|
|
|
error("tagged param: unknown parameter metadata field '%s'" % field[0])
|
|
|
|
continue
|
|
|
|
if vehicle.name not in only_for_vehicles:
|
2021-05-14 21:51:21 -03:00
|
|
|
if len(only_for_vehicles) and field[0] in ['Values', 'Bitmask']:
|
|
|
|
seen_values_or_bitmask_for_other_vehicle = True
|
2021-05-12 23:08:23 -03:00
|
|
|
continue
|
2018-08-05 22:05:13 -03:00
|
|
|
value = re.sub('@PREFIX@', library.name, field[2])
|
2021-05-12 23:08:23 -03:00
|
|
|
setattr(p, field[0], value)
|
|
|
|
|
2021-06-22 00:17:46 -03:00
|
|
|
if (getattr(p, 'Values', None) is not None and
|
|
|
|
getattr(p, 'Bitmask', None) is not None):
|
|
|
|
error("Both @Values and @Bitmask present")
|
|
|
|
|
2021-05-14 21:51:21 -03:00
|
|
|
if (getattr(p, 'Values', None) is None and
|
|
|
|
getattr(p, 'Bitmask', None) is None):
|
|
|
|
# values and Bitmask available for this vehicle
|
|
|
|
if seen_values_or_bitmask_for_other_vehicle:
|
|
|
|
# we've (e.g.) seen @Values{Copter} when we're
|
|
|
|
# processing for Rover, and haven't seen either
|
|
|
|
# @Values: or @Vales{Rover} - so we omit this
|
|
|
|
# parameter on the assumption that it is not
|
|
|
|
# applicable for this vehicle.
|
|
|
|
continue
|
|
|
|
|
2019-07-01 05:31:55 -03:00
|
|
|
p.path = path # Add path. Later deleted - only used for duplicates
|
|
|
|
library.params.append(p)
|
2017-02-05 22:16:37 -04:00
|
|
|
|
|
|
|
group_matches = prog_groups.findall(p_text)
|
|
|
|
debug("Found %u groups" % len(group_matches))
|
|
|
|
debug(group_matches)
|
2020-06-01 04:31:16 -03:00
|
|
|
done_groups = dict()
|
2017-02-05 22:16:37 -04:00
|
|
|
for group_match in group_matches:
|
|
|
|
group = group_match[0]
|
|
|
|
debug("Group: %s" % group)
|
2020-06-01 04:31:16 -03:00
|
|
|
do_append = True
|
|
|
|
if group in done_groups:
|
|
|
|
# this is to handle cases like the RangeFinder
|
|
|
|
# parameters, where the wasp stuff gets tack into the
|
|
|
|
# same RNGFND1_ group
|
|
|
|
lib = done_groups[group]
|
|
|
|
do_append = False
|
|
|
|
else:
|
|
|
|
lib = Library(group)
|
|
|
|
done_groups[group] = lib
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
fields = prog_param_fields.findall(group_match[1])
|
|
|
|
for field in fields:
|
|
|
|
if field[0] in known_group_fields:
|
2018-10-07 22:20:54 -03:00
|
|
|
setattr(lib, field[0], field[1])
|
2017-02-05 22:16:37 -04:00
|
|
|
else:
|
|
|
|
error("unknown parameter metadata field '%s'" % field[0])
|
2018-10-07 22:20:54 -03:00
|
|
|
if not any(lib.name == parsed_l.name for parsed_l in libraries):
|
2020-06-01 04:31:16 -03:00
|
|
|
if do_append:
|
2021-05-12 01:45:27 -03:00
|
|
|
lib.set_name(library.name + lib.name)
|
2018-10-07 22:20:54 -03:00
|
|
|
debug("Group name: %s" % lib.name)
|
|
|
|
process_library(vehicle, lib, os.path.dirname(libraryfname))
|
2020-06-01 04:31:16 -03:00
|
|
|
if do_append:
|
|
|
|
alllibs.append(lib)
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2018-09-19 07:33:24 -03:00
|
|
|
current_file = None
|
2017-02-05 22:16:37 -04:00
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2012-07-04 20:44:53 -03:00
|
|
|
for library in libraries:
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("===\n\n\nProcessing library %s" % library.name)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
|
|
|
if hasattr(library, 'Path'):
|
2017-08-25 10:34:35 -03:00
|
|
|
process_library(vehicle, library)
|
2012-07-05 03:01:52 -03:00
|
|
|
else:
|
2013-05-21 04:01:13 -03:00
|
|
|
error("Skipped: no Path found")
|
2012-07-05 03:01:52 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
debug("Processed %u documented parameters" % len(library.params))
|
2012-07-04 20:44:53 -03:00
|
|
|
|
2017-02-06 19:37:15 -04:00
|
|
|
# sort libraries by name
|
2018-10-07 22:20:54 -03:00
|
|
|
alllibs = sorted(alllibs, key=lambda x: x.name)
|
2017-02-05 22:16:37 -04:00
|
|
|
|
2017-02-06 19:37:15 -04:00
|
|
|
libraries = alllibs
|
2018-10-07 22:20:54 -03:00
|
|
|
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
def is_number(numberString):
|
|
|
|
try:
|
|
|
|
float(numberString)
|
|
|
|
return True
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2020-09-13 19:25:47 -03:00
|
|
|
def clean_param(param):
|
|
|
|
if (hasattr(param, "Values")):
|
|
|
|
valueList = param.Values.split(",")
|
|
|
|
new_valueList = []
|
|
|
|
for i in valueList:
|
|
|
|
(start, sep, end) = i.partition(":")
|
|
|
|
if sep != ":":
|
|
|
|
raise ValueError("Expected a colon seperator in (%s)" % (i,))
|
|
|
|
if len(end) == 0:
|
|
|
|
raise ValueError("Expected a colon-separated string, got (%s)" % i)
|
|
|
|
end = end.strip()
|
|
|
|
start = start.strip()
|
|
|
|
new_valueList.append(":".join([start, end]))
|
|
|
|
param.Values = ",".join(new_valueList)
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
def validate(param):
|
|
|
|
"""
|
|
|
|
Validates the parameter meta data.
|
|
|
|
"""
|
2018-09-19 07:33:24 -03:00
|
|
|
global current_file
|
|
|
|
current_file = param.real_path
|
|
|
|
global current_param
|
|
|
|
current_param = param.name
|
2017-02-05 22:16:37 -04:00
|
|
|
# Validate values
|
|
|
|
if (hasattr(param, "Range")):
|
|
|
|
rangeValues = param.__dict__["Range"].split(" ")
|
|
|
|
if (len(rangeValues) != 2):
|
2020-02-20 20:13:37 -04:00
|
|
|
error("Invalid Range values for %s (%s)" %
|
|
|
|
(param.name, param.__dict__["Range"]))
|
2017-02-05 22:16:37 -04:00
|
|
|
return
|
|
|
|
min_value = rangeValues[0]
|
|
|
|
max_value = rangeValues[1]
|
|
|
|
if not is_number(min_value):
|
|
|
|
error("Min value not number: %s %s" % (param.name, min_value))
|
|
|
|
return
|
|
|
|
if not is_number(max_value):
|
|
|
|
error("Max value not number: %s %s" % (param.name, max_value))
|
|
|
|
return
|
2021-05-12 01:13:12 -03:00
|
|
|
# Check for duplicate in @value field
|
2020-03-23 10:11:22 -03:00
|
|
|
if (hasattr(param, "Values")):
|
|
|
|
valueList = param.__dict__["Values"].split(",")
|
|
|
|
values = []
|
|
|
|
for i in valueList:
|
2021-05-12 01:13:12 -03:00
|
|
|
i = i.replace(" ", "")
|
2020-03-23 10:11:22 -03:00
|
|
|
values.append(i.partition(":")[0])
|
|
|
|
if (len(values) != len(set(values))):
|
|
|
|
error("Duplicate values found")
|
2017-05-02 07:36:14 -03:00
|
|
|
# Validate units
|
|
|
|
if (hasattr(param, "Units")):
|
|
|
|
if (param.__dict__["Units"] != "") and (param.__dict__["Units"] not in known_units):
|
|
|
|
error("unknown units field '%s'" % param.__dict__["Units"])
|
2020-09-13 19:38:12 -03:00
|
|
|
# Validate User
|
|
|
|
if (hasattr(param, "User")):
|
|
|
|
if param.User.strip() not in ["Standard", "Advanced"]:
|
|
|
|
error("unknown user (%s)" % param.User.strip())
|
2015-12-11 19:03:07 -04:00
|
|
|
|
2021-03-16 19:56:03 -03:00
|
|
|
if (hasattr(param, "Description")):
|
|
|
|
if not param.Description or not param.Description.strip():
|
|
|
|
error("Empty Description (%s)" % param)
|
|
|
|
|
2021-05-12 01:13:12 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
for param in vehicle.params:
|
|
|
|
clean_param(param)
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2021-05-12 00:42:57 -03:00
|
|
|
for param in vehicle.params:
|
|
|
|
validate(param)
|
2015-12-11 19:03:07 -04:00
|
|
|
|
2018-09-24 03:02:18 -03:00
|
|
|
# Find duplicate names in library and fix up path
|
|
|
|
for library in libraries:
|
|
|
|
param_names_seen = set()
|
|
|
|
param_names_duplicate = set()
|
|
|
|
# Find duplicates:
|
|
|
|
for param in library.params:
|
|
|
|
if param.name in param_names_seen: # is duplicate
|
|
|
|
param_names_duplicate.add(param.name)
|
|
|
|
param_names_seen.add(param.name)
|
|
|
|
# Fix up path for duplicates
|
|
|
|
for param in library.params:
|
|
|
|
if param.name in param_names_duplicate:
|
|
|
|
param.path = param.path.rsplit('/')[-1].rsplit('.')[0]
|
|
|
|
else:
|
|
|
|
# not a duplicate, so delete attribute.
|
|
|
|
delattr(param, "path")
|
|
|
|
|
2020-09-13 19:25:47 -03:00
|
|
|
for library in libraries:
|
|
|
|
for param in library.params:
|
|
|
|
clean_param(param)
|
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
for library in libraries:
|
|
|
|
for param in library.params:
|
|
|
|
validate(param)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2021-04-06 23:48:40 -03:00
|
|
|
if not args.emit_params:
|
|
|
|
sys.exit(error_count)
|
|
|
|
|
|
|
|
all_emitters = {
|
|
|
|
'json': JSONEmit,
|
|
|
|
'xml': XmlEmit,
|
|
|
|
'html': HtmlEmit,
|
|
|
|
'rst': RSTEmit,
|
|
|
|
'md': MDEmit,
|
|
|
|
'xml_mp': XmlEmitMP,
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
|
|
|
from ednemit import EDNEmit
|
|
|
|
all_emitters['edn'] = EDNEmit
|
|
|
|
except ImportError:
|
|
|
|
# if the user wanted edn only then don't hide any errors
|
|
|
|
if args.output_format == 'edn':
|
|
|
|
raise
|
|
|
|
|
|
|
|
if args.verbose:
|
|
|
|
print("Unable to emit EDN, install edn_format and pytz if edn is desired")
|
2018-10-07 22:20:54 -03:00
|
|
|
|
2021-04-06 23:48:40 -03:00
|
|
|
# filter to just the ones we want to emit:
|
|
|
|
emitters_to_use = []
|
|
|
|
for emitter_name in all_emitters.keys():
|
|
|
|
if args.output_format == 'all' or args.output_format == emitter_name:
|
|
|
|
emitters_to_use.append(emitter_name)
|
|
|
|
|
2021-04-07 01:52:02 -03:00
|
|
|
if args.emit_sitl:
|
|
|
|
# only generate rst for SITL for now:
|
|
|
|
emitters_to_use = ['rst']
|
|
|
|
|
2021-04-06 23:48:40 -03:00
|
|
|
# actually invoke each emiiter:
|
|
|
|
for emitter_name in emitters_to_use:
|
2021-04-07 01:52:02 -03:00
|
|
|
emit = all_emitters[emitter_name](sitl=args.emit_sitl)
|
|
|
|
|
|
|
|
if not args.emit_sitl:
|
2021-05-12 00:42:57 -03:00
|
|
|
emit.emit(vehicle)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
emit.start_libraries()
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
for library in libraries:
|
|
|
|
if library.params:
|
2018-12-06 05:07:51 -04:00
|
|
|
emit.emit(library)
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2017-02-05 22:16:37 -04:00
|
|
|
emit.close()
|
2016-07-31 07:22:06 -03:00
|
|
|
|
2013-05-21 04:01:13 -03:00
|
|
|
sys.exit(error_count)
|