mirror of
https://github.com/ArduPilot/ardupilot
synced 2025-01-03 06:28:27 -04:00
autotest: add parser for the logger metadata
This commit is contained in:
parent
55ee3362b9
commit
60ff6c0b24
61
Tools/autotest/logger_metadata/emit_html.py
Normal file
61
Tools/autotest/logger_metadata/emit_html.py
Normal file
@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import emitter
|
||||
|
||||
class HTMLEmitter(emitter.Emitter):
|
||||
def preface(self):
|
||||
return """<!-- Dynamically generated list of Logger Messages
|
||||
This page was generated using Tools/autotest/logger_metdata/parse.py
|
||||
|
||||
DO NOT EDIT
|
||||
-->
|
||||
|
||||
|
||||
<h3 style="text-align: center">Onboard Message Log Messages</h3>
|
||||
<hr />
|
||||
|
||||
<p>This is a list of log messages which may be present in logs produced and stored onboard ArduPilot vehicles.</p>
|
||||
|
||||
<!-- add auto-generated table of contents with "Table of Contents Plus" plugin -->
|
||||
[toc exclude="Onboard Message Log Messages"]
|
||||
|
||||
"""
|
||||
def postface(self):
|
||||
return ""
|
||||
|
||||
def start(self):
|
||||
self.fh = open("LogMessages.html", mode='w')
|
||||
print(self.preface(), file=self.fh)
|
||||
|
||||
def emit(self, doccos):
|
||||
self.start()
|
||||
for docco in doccos:
|
||||
print(' <h1>%s</h1>' % docco.name, file=self.fh)
|
||||
if docco.url is not None:
|
||||
print(' <a href="%s">More information</a>' % docco.url, file=self.fh)
|
||||
if docco.description is not None:
|
||||
print(' <h2>%s</h2>' %
|
||||
docco.description, file=self.fh)
|
||||
print(' <table>', file=self.fh)
|
||||
print(" <tr><th>FieldName</th><th>Description</th><tr>",
|
||||
file=self.fh)
|
||||
for f in docco.fields:
|
||||
if "description" in docco.fields[f]:
|
||||
fdesc = docco.fields[f]["description"]
|
||||
else:
|
||||
fdesc = ""
|
||||
print(' <tr><td>%s</td><td>%s</td></tr>' % (f, fdesc),
|
||||
file=self.fh)
|
||||
# if "bits" in docco.fields[f]:
|
||||
# print(' <bits>%s</bits>' %
|
||||
# docco.fields[f]["bits"], file=self.fh)
|
||||
print(' </table>', file=self.fh)
|
||||
|
||||
print("", file=self.fh)
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
print(self.postface(), file=self.fh)
|
||||
self.fh.close()
|
181
Tools/autotest/logger_metadata/emit_rst.py
Normal file
181
Tools/autotest/logger_metadata/emit_rst.py
Normal file
@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import emitter
|
||||
|
||||
class RSTEmitter(emitter.Emitter):
|
||||
def preface(self):
|
||||
return """.. Dynamically generated list of Logger Messages
|
||||
.. This page was generated using Tools/autotest/logger_metdata/parse.py
|
||||
|
||||
.. DO NOT EDIT
|
||||
|
||||
.. _logmessages:
|
||||
|
||||
Onboard Message Log Messages
|
||||
============================
|
||||
|
||||
This is a list of log messages which may be present in logs produced and stored onboard ArduPilot vehicles.
|
||||
|
||||
"""
|
||||
def postface(self):
|
||||
return ""
|
||||
|
||||
def start(self):
|
||||
self.fh = open("LogMessages.rst", mode='w')
|
||||
print(self.preface(), file=self.fh)
|
||||
|
||||
def emit(self, doccos):
|
||||
self.start()
|
||||
for docco in doccos:
|
||||
print('.. _%s:' % docco.name, file=self.fh)
|
||||
print("", file=self.fh)
|
||||
desc = docco.description
|
||||
if desc is None:
|
||||
desc = ""
|
||||
line = '%s: %s' % (docco.name, desc)
|
||||
print(line, file=self.fh)
|
||||
print("~" * len(line), file=self.fh)
|
||||
|
||||
rows = []
|
||||
for f in docco.fields:
|
||||
if "description" in docco.fields[f]:
|
||||
fdesc = docco.fields[f]["description"]
|
||||
else:
|
||||
fdesc = ""
|
||||
rows.append([f, fdesc])
|
||||
# if "bits" in docco.fields[f]:
|
||||
# print(' <bits>%s</bits>' %
|
||||
# docco.fields[f]["bits"], file=self.fh)
|
||||
print(self.tablify(rows), file=self.fh)
|
||||
|
||||
print("", file=self.fh)
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
print(self.postface(), file=self.fh)
|
||||
self.fh.close()
|
||||
|
||||
|
||||
# tablify swiped from rstemit.py
|
||||
|
||||
def tablify_row(self, rowheading, row, widths, height):
|
||||
joiner = "|"
|
||||
|
||||
row_lines = [x.split("\n") for x in row]
|
||||
for row_line in row_lines:
|
||||
row_line.extend([""] * (height - len(row_line)))
|
||||
if rowheading is not None:
|
||||
rowheading_lines = rowheading.split("\n")
|
||||
rowheading_lines.extend([""] * (height - len(rowheading_lines)))
|
||||
|
||||
out_lines = []
|
||||
for i in range(0, height):
|
||||
out_line = ""
|
||||
if rowheading is not None:
|
||||
rowheading_line = rowheading_lines[i]
|
||||
out_line += joiner + " " + rowheading_line + " " * (widths[0] - len(rowheading_line) - 1)
|
||||
joiner = "#"
|
||||
j = 0
|
||||
for item in row_lines:
|
||||
widthnum = j
|
||||
if rowheading is not None:
|
||||
widthnum += 1
|
||||
line = item[i]
|
||||
out_line += joiner + " " + line + " " * (widths[widthnum] - len(line) - 1)
|
||||
joiner = "|"
|
||||
j += 1
|
||||
out_line += "|"
|
||||
out_lines.append(out_line)
|
||||
return "\n".join(out_lines)
|
||||
|
||||
def tablify_longest_row_length(self, rows, rowheadings, headings):
|
||||
check_width_rows = rows[:]
|
||||
if headings is not None:
|
||||
check_width_rows.append(headings)
|
||||
longest_row_length = 0
|
||||
for row in check_width_rows:
|
||||
if len(row) > longest_row_length:
|
||||
longest_row_length = len(row)
|
||||
if rowheadings is not None:
|
||||
longest_row_length += 1
|
||||
return longest_row_length
|
||||
|
||||
def longest_line_in_string(self, string):
|
||||
longest = 0
|
||||
for line in string.split("\n"):
|
||||
if len(line) > longest:
|
||||
longest = len(line)
|
||||
return longest
|
||||
|
||||
def tablify_calc_row_widths_heights(self, rows, rowheadings, headings):
|
||||
rows_to_check = []
|
||||
if headings is not None:
|
||||
rows_to_check.append(headings)
|
||||
rows_to_check.extend(rows[:])
|
||||
|
||||
heights = [0] * len(rows_to_check)
|
||||
|
||||
longest_row_length = self.tablify_longest_row_length(rows, rowheadings, headings)
|
||||
widths = [0] * longest_row_length
|
||||
|
||||
all_rowheadings = []
|
||||
if rowheadings is not None:
|
||||
if headings is not None:
|
||||
all_rowheadings.append("")
|
||||
all_rowheadings.extend(rowheadings)
|
||||
|
||||
for rownum in range(0, len(rows_to_check)):
|
||||
row = rows_to_check[rownum]
|
||||
values_to_check = []
|
||||
if rowheadings is not None:
|
||||
values_to_check.append(all_rowheadings[rownum])
|
||||
values_to_check.extend(row[:])
|
||||
colnum = 0
|
||||
for value in values_to_check:
|
||||
height = len(value.split("\n"))
|
||||
if height > heights[rownum]:
|
||||
heights[rownum] = height
|
||||
longest_line = self.longest_line_in_string(value)
|
||||
width = longest_line + 2 # +2 for leading/trailing ws
|
||||
if width > widths[colnum]:
|
||||
widths[colnum] = width
|
||||
colnum += 1
|
||||
return (widths, heights)
|
||||
|
||||
def tablify(self, rows, headings=None, rowheadings=None):
|
||||
|
||||
(widths, heights) = self.tablify_calc_row_widths_heights(rows, rowheadings, headings)
|
||||
|
||||
# create dividing lines
|
||||
bar = ""
|
||||
heading_bar = ""
|
||||
for width in widths:
|
||||
bar += "+"
|
||||
heading_bar += "+"
|
||||
bar += "-" * width
|
||||
heading_bar += "=" * width
|
||||
bar += "+"
|
||||
heading_bar += "+"
|
||||
|
||||
# create table
|
||||
ret = bar + "\n"
|
||||
if headings is not None:
|
||||
rowheading = None
|
||||
if rowheadings is not None:
|
||||
rowheading = ""
|
||||
ret += self.tablify_row(rowheading, headings, widths, heights[0]) + "\n"
|
||||
ret += heading_bar + "\n"
|
||||
for i in range(0, len(rows)):
|
||||
rowheading = None
|
||||
height = i
|
||||
if rowheadings is not None:
|
||||
rowheading = rowheadings[i]
|
||||
if headings is not None:
|
||||
height += 1
|
||||
ret += self.tablify_row(rowheading, rows[i], widths, heights[height]) + "\n"
|
||||
ret += bar + "\n"
|
||||
|
||||
return ret
|
||||
|
49
Tools/autotest/logger_metadata/emit_xml.py
Normal file
49
Tools/autotest/logger_metadata/emit_xml.py
Normal file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import emitter
|
||||
|
||||
class XMLEmitter(emitter.Emitter):
|
||||
def preface(self):
|
||||
return """<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Dynamically generated list of documented logfile messages (generated by parse.py) -->
|
||||
<loggermessagefile>
|
||||
"""
|
||||
def postface(self):
|
||||
return "</loggermessagefile>"
|
||||
|
||||
def start(self):
|
||||
self.fh = open("LogMessages.xml", mode='w')
|
||||
print(self.preface(), file=self.fh)
|
||||
|
||||
def emit(self, doccos):
|
||||
self.start()
|
||||
for docco in doccos:
|
||||
print(' <logformat name="%s">' % docco.name, file=self.fh)
|
||||
if docco.url is not None:
|
||||
print(' <url>%s</url>' % docco.url, file=self.fh)
|
||||
if docco.description is not None:
|
||||
print(' <description>%s</description>' %
|
||||
docco.description, file=self.fh)
|
||||
print(' <fields>', file=self.fh)
|
||||
for f in docco.fields:
|
||||
print(' <field name="%s">' % f, file=self.fh)
|
||||
if "description" in docco.fields[f]:
|
||||
print(' <description>%s</description>' %
|
||||
docco.fields[f]["description"], file=self.fh)
|
||||
if "bits" in docco.fields[f]:
|
||||
print(' <bits>%s</bits>' %
|
||||
docco.fields[f]["bits"], file=self.fh)
|
||||
print(' </field>', file=self.fh)
|
||||
|
||||
print(' </fields>', file=self.fh)
|
||||
|
||||
print(' </logformat>', file=self.fh)
|
||||
|
||||
print("", file=self.fh)
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
print(self.postface(), file=self.fh)
|
||||
self.fh.close()
|
3
Tools/autotest/logger_metadata/emitter.py
Normal file
3
Tools/autotest/logger_metadata/emitter.py
Normal file
@ -0,0 +1,3 @@
|
||||
class Emitter(object):
|
||||
pass
|
||||
|
169
Tools/autotest/logger_metadata/parse.py
Executable file
169
Tools/autotest/logger_metadata/parse.py
Executable file
@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import emit_html
|
||||
import emit_rst
|
||||
import emit_xml
|
||||
|
||||
topdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../')
|
||||
topdir = os.path.realpath(topdir)
|
||||
|
||||
re_loggermessage = re.compile(r"@LoggerMessage\s*:\s*(\w+)", re.MULTILINE)
|
||||
re_commentline = re.compile(r"\s*//")
|
||||
re_description = re.compile(r"\s*//\s*@Description\s*:\s*(.*)")
|
||||
re_url = re.compile(r"\s*//\s*@URL\s*:\s*(.*)")
|
||||
re_field = re.compile(r"\s*//\s*@Field\s*:\s*(\w+):\s*(.*)")
|
||||
re_fieldbits = re.compile(r"\s*//\s*@FieldBits\s*:\s*(\w+):\s*(.*)")
|
||||
re_fieldbits = re.compile(r"\s*//\s*@FieldBits\s*:\s*(\w+):\s*(.*)")
|
||||
re_vehicles = re.compile(r"\s*//\s*@Vehicles\s*:\s*(.*)")
|
||||
|
||||
# TODO: validate URLS actually return 200
|
||||
# TODO: augment with other information from log definitions; type and units...
|
||||
|
||||
|
||||
class LoggerDocco(object):
|
||||
|
||||
vehicle_map = {
|
||||
"Rover": "APMrover2",
|
||||
"Sub": "ArduSub",
|
||||
"Copter": "ArduCopter",
|
||||
"Plane": "ArduPlane",
|
||||
"Tracker": "AntennaTracker",
|
||||
}
|
||||
|
||||
def __init__(self, vehicle):
|
||||
self.vehicle = vehicle
|
||||
self.doccos = []
|
||||
self.emitters = [
|
||||
emit_html.HTMLEmitter(),
|
||||
emit_rst.RSTEmitter(),
|
||||
emit_xml.XMLEmitter(),
|
||||
]
|
||||
|
||||
class Docco(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.url = None
|
||||
self.description = None
|
||||
self.fields = {}
|
||||
self.vehicles = None
|
||||
|
||||
def set_description(self, desc):
|
||||
self.description = desc
|
||||
|
||||
def set_url(self, url):
|
||||
self.url = url
|
||||
|
||||
def set_field_description(self, field, description):
|
||||
if field not in self.fields:
|
||||
self.fields[field] = {}
|
||||
self.fields[field]["description"] = description
|
||||
|
||||
def set_field_bits(self, field, bits):
|
||||
if field not in self.fields:
|
||||
self.fields = {}
|
||||
self.fields[field]["bits"] = bits
|
||||
|
||||
def set_vehicles(self, vehicles):
|
||||
self.vehicles = vehicles
|
||||
|
||||
def search_for_files(self, dirs_to_search):
|
||||
_next = []
|
||||
for _dir in dirs_to_search:
|
||||
for entry in os.listdir(_dir):
|
||||
filepath = os.path.join(_dir, entry)
|
||||
if os.path.isdir(filepath):
|
||||
_next.append(filepath)
|
||||
continue
|
||||
(name, extension) = os.path.splitext(filepath)
|
||||
if extension not in [".cpp", ".h"]:
|
||||
continue
|
||||
self.files.append(filepath)
|
||||
if len(_next):
|
||||
self.search_for_files(_next)
|
||||
|
||||
def parse_file(self, filepath):
|
||||
with open(filepath) as f:
|
||||
# print("Opened (%s)" % filepath)
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
state_outside = "outside"
|
||||
state_inside = "inside"
|
||||
state = state_outside
|
||||
docco = None
|
||||
for line in lines:
|
||||
if state == state_outside:
|
||||
m = re_loggermessage.search(line)
|
||||
if m is None:
|
||||
continue
|
||||
name = m.group(1)
|
||||
state = state_inside
|
||||
docco = LoggerDocco.Docco(name)
|
||||
elif state == state_inside:
|
||||
if not re_commentline.match(line):
|
||||
state = state_outside
|
||||
if docco.vehicles is None or self.vehicle in docco.vehicles:
|
||||
self.finalise_docco(docco)
|
||||
continue
|
||||
m = re_description.match(line)
|
||||
if m is not None:
|
||||
docco.set_description(m.group(1))
|
||||
continue
|
||||
m = re_url.match(line)
|
||||
if m is not None:
|
||||
docco.set_url(m.group(1))
|
||||
continue
|
||||
m = re_field.match(line)
|
||||
if m is not None:
|
||||
docco.set_field_description(m.group(1), m.group(2))
|
||||
continue
|
||||
m = re_fieldbits.match(line)
|
||||
if m is not None:
|
||||
docco.set_field_bits(m.group(1), m.group(2))
|
||||
continue
|
||||
m = re_vehicles.match(line)
|
||||
if m is not None:
|
||||
docco.set_vehicles([x.strip() for x in m.group(1).split(',')])
|
||||
continue
|
||||
print("Unknown field (%s)" % str(line))
|
||||
sys.exit(1)
|
||||
|
||||
def parse_files(self):
|
||||
for _file in self.files:
|
||||
self.parse_file(_file)
|
||||
|
||||
def emit_output(self):
|
||||
for emitter in self.emitters:
|
||||
emitter.emit(self.doccos)
|
||||
|
||||
def run(self):
|
||||
self.files = []
|
||||
self.search_for_files([self.vehicle_map[self.vehicle], "libraries"])
|
||||
self.parse_files()
|
||||
self.emit_output()
|
||||
|
||||
def finalise_docco(self, docco):
|
||||
self.doccos.append(docco)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="Parse parameters.")
|
||||
parser.add_argument("-v", "--verbose", dest='verbose', action='store_true', default=False, help="show debugging output")
|
||||
parser.add_argument("--vehicle", required=True, help="Vehicle type to generate for")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
s = LoggerDocco(args.vehicle)
|
||||
|
||||
if args.vehicle not in s.vehicle_map:
|
||||
print("Invalid vehicle (choose from: %s)" % str(s.vehicle_map.keys()))
|
||||
sys.exit(1)
|
||||
|
||||
s.run()
|
Loading…
Reference in New Issue
Block a user