2014-01-27 02:38:57 -04:00
|
|
|
#
|
|
|
|
# Code to abstract the parsing of APM Dataflash log files, currently only used by the LogAnalyzer
|
|
|
|
#
|
2014-03-03 14:55:25 -04:00
|
|
|
# Initial code by Andrew Chapman (amchapman@gmail.com), 16th Jan 2014
|
2014-01-27 02:38:57 -04:00
|
|
|
#
|
|
|
|
|
2022-07-18 10:52:56 -03:00
|
|
|
# AP_FLAKE8_CLEAN
|
|
|
|
|
2022-07-11 10:09:01 -03:00
|
|
|
from __future__ import print_function, division
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-02-26 08:50:55 -04:00
|
|
|
import bisect
|
2014-07-03 07:21:14 -03:00
|
|
|
import ctypes
|
2022-07-18 11:09:04 -03:00
|
|
|
import sys
|
2014-02-22 15:36:30 -04:00
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
import numpy
|
2015-10-14 21:05:45 -03:00
|
|
|
from VehicleType import VehicleType, VehicleTypeString
|
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-27 19:35:13 -03:00
|
|
|
class Format(object):
|
2014-06-24 19:30:55 -03:00
|
|
|
'''Data channel format as specified by the FMT lines in the log file'''
|
2022-07-18 11:09:04 -03:00
|
|
|
|
|
|
|
def __init__(self, msgType, msgLen, name, types, labels):
|
|
|
|
self.NAME = 'FMT'
|
2014-06-24 19:30:55 -03:00
|
|
|
self.msgType = msgType
|
2022-07-18 11:09:04 -03:00
|
|
|
self.msgLen = msgLen
|
|
|
|
self.name = name
|
|
|
|
self.types = types
|
|
|
|
self.labels = labels.split(',')
|
2014-07-03 07:19:20 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def __str__(self):
|
2017-07-21 20:28:28 -03:00
|
|
|
return "%8s %s" % (self.name, repr(self.labels))
|
2014-01-27 02:38:57 -04:00
|
|
|
|
2014-07-03 07:19:20 -03:00
|
|
|
@staticmethod
|
2022-07-18 11:09:04 -03:00
|
|
|
def trycastToFormatType(value, valueType):
|
2022-07-18 10:52:56 -03:00
|
|
|
"""
|
|
|
|
Using format characters from libraries/DataFlash/DataFlash.h to cast strings to basic python int/float/string
|
|
|
|
types tries a cast, if it does not work, well, acceptable as the text logs do not match the format, e.g. MODE is
|
|
|
|
expected to be int
|
|
|
|
"""
|
2014-07-03 07:19:20 -03:00
|
|
|
try:
|
2015-10-13 20:13:50 -03:00
|
|
|
if valueType in "fcCeELd":
|
2014-07-03 07:19:20 -03:00
|
|
|
return float(value)
|
2015-10-13 20:13:50 -03:00
|
|
|
elif valueType in "bBhHiIMQq":
|
2014-07-03 07:19:20 -03:00
|
|
|
return int(value)
|
|
|
|
elif valueType in "nNZ":
|
|
|
|
return str(value)
|
2022-07-11 09:30:25 -03:00
|
|
|
except ValueError:
|
2014-07-03 07:19:20 -03:00
|
|
|
pass
|
|
|
|
return value
|
|
|
|
|
|
|
|
def to_class(self):
|
|
|
|
members = dict(
|
2022-07-18 11:09:04 -03:00
|
|
|
NAME=self.name,
|
|
|
|
labels=self.labels[:],
|
2014-07-03 07:19:20 -03:00
|
|
|
)
|
|
|
|
|
2014-07-03 11:54:52 -03:00
|
|
|
fieldtypes = [i for i in self.types]
|
|
|
|
fieldlabels = self.labels[:]
|
2014-07-03 07:19:20 -03:00
|
|
|
|
|
|
|
# field access
|
2014-07-03 11:54:52 -03:00
|
|
|
for (label, _type) in zip(fieldlabels, fieldtypes):
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 07:19:20 -03:00
|
|
|
def createproperty(name, format):
|
|
|
|
# extra scope for variable sanity
|
|
|
|
# scaling via _NAME and def NAME(self): return self._NAME / SCALE
|
|
|
|
propertyname = name
|
|
|
|
attributename = '_' + name
|
2022-07-18 11:09:04 -03:00
|
|
|
p = property(
|
|
|
|
lambda x: getattr(x, attributename),
|
|
|
|
lambda x, v: setattr(x, attributename, Format.trycastToFormatType(v, format)),
|
|
|
|
)
|
2014-07-03 07:19:20 -03:00
|
|
|
members[propertyname] = p
|
|
|
|
members[attributename] = None
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 11:54:52 -03:00
|
|
|
createproperty(label, _type)
|
2014-07-03 07:19:20 -03:00
|
|
|
|
|
|
|
# repr shows all values but the header
|
2022-07-18 11:09:04 -03:00
|
|
|
members['__repr__'] = lambda x: "<{cls} {data}>".format(
|
|
|
|
cls=x.__class__.__name__, data=' '.join(["{}:{}".format(k, getattr(x, '_' + k)) for k in x.labels])
|
|
|
|
)
|
2014-07-03 07:19:20 -03:00
|
|
|
|
|
|
|
def init(a, *x):
|
|
|
|
if len(x) != len(a.labels):
|
|
|
|
raise ValueError("Invalid Length")
|
2022-07-18 11:09:04 -03:00
|
|
|
for (l, v) in zip(a.labels, x):
|
2014-07-03 07:19:20 -03:00
|
|
|
try:
|
|
|
|
setattr(a, l, v)
|
|
|
|
except Exception as e:
|
2022-07-18 11:09:04 -03:00
|
|
|
print("{} {} {} failed".format(a, l, v))
|
2014-07-03 07:19:20 -03:00
|
|
|
print(e)
|
|
|
|
|
|
|
|
members['__init__'] = init
|
|
|
|
|
|
|
|
# finally, create the class
|
2022-07-18 11:09:04 -03:00
|
|
|
cls = type('Log__{:s}'.format(self.name), (object,), members)
|
2014-07-03 07:19:20 -03:00
|
|
|
return cls
|
|
|
|
|
2014-01-27 02:38:57 -04:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
class logheader(ctypes.LittleEndianStructure):
|
2022-07-18 11:09:04 -03:00
|
|
|
_fields_ = [
|
2014-07-03 07:21:14 -03:00
|
|
|
('head1', ctypes.c_uint8),
|
|
|
|
('head2', ctypes.c_uint8),
|
|
|
|
('msgid', ctypes.c_uint8),
|
|
|
|
]
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
def __repr__(self):
|
2022-07-18 11:09:04 -03:00
|
|
|
return "<logheader head1=0x{self.head1:x} head2=0x{self.head2:x} msgid=0x{self.msgid:x} ({self.msgid})>".format(
|
|
|
|
self=self
|
|
|
|
)
|
2014-07-03 07:21:14 -03:00
|
|
|
|
|
|
|
|
|
|
|
class BinaryFormat(ctypes.LittleEndianStructure):
|
|
|
|
NAME = 'FMT'
|
|
|
|
MSG = 128
|
|
|
|
SIZE = 0
|
|
|
|
FIELD_FORMAT = {
|
2019-04-26 08:29:08 -03:00
|
|
|
'a': ctypes.c_int16 * 32,
|
2014-07-03 07:21:14 -03:00
|
|
|
'b': ctypes.c_int8,
|
|
|
|
'B': ctypes.c_uint8,
|
|
|
|
'h': ctypes.c_int16,
|
|
|
|
'H': ctypes.c_uint16,
|
|
|
|
'i': ctypes.c_int32,
|
|
|
|
'I': ctypes.c_uint32,
|
|
|
|
'f': ctypes.c_float,
|
2015-07-09 15:38:33 -03:00
|
|
|
'd': ctypes.c_double,
|
2014-07-03 07:21:14 -03:00
|
|
|
'n': ctypes.c_char * 4,
|
|
|
|
'N': ctypes.c_char * 16,
|
|
|
|
'Z': ctypes.c_char * 64,
|
2022-07-18 11:09:04 -03:00
|
|
|
'c': ctypes.c_int16, # * 100,
|
|
|
|
'C': ctypes.c_uint16, # * 100,
|
|
|
|
'e': ctypes.c_int32, # * 100,
|
|
|
|
'E': ctypes.c_uint32, # * 100,
|
2014-07-03 07:21:14 -03:00
|
|
|
'L': ctypes.c_int32,
|
|
|
|
'M': ctypes.c_uint8,
|
2015-07-09 23:25:17 -03:00
|
|
|
'q': ctypes.c_int64,
|
|
|
|
'Q': ctypes.c_uint64,
|
2014-07-03 07:21:14 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
FIELD_SCALE = {
|
|
|
|
'c': 100,
|
|
|
|
'C': 100,
|
|
|
|
'e': 100,
|
|
|
|
'E': 100,
|
|
|
|
}
|
|
|
|
|
|
|
|
_packed_ = True
|
2022-07-18 11:09:04 -03:00
|
|
|
_fields_ = [
|
2014-07-03 07:21:14 -03:00
|
|
|
('head', logheader),
|
|
|
|
('type', ctypes.c_uint8),
|
|
|
|
('length', ctypes.c_uint8),
|
|
|
|
('name', ctypes.c_char * 4),
|
2014-07-03 11:54:52 -03:00
|
|
|
('types', ctypes.c_char * 16),
|
2014-07-03 07:21:14 -03:00
|
|
|
('labels', ctypes.c_char * 64),
|
|
|
|
]
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
def __repr__(self):
|
2022-07-18 11:09:04 -03:00
|
|
|
return "<{cls} {data}>".format(
|
|
|
|
cls=self.__class__.__name__,
|
|
|
|
data=' '.join(["{}:{}".format(k, getattr(self, k)) for (k, _) in self._fields_[1:]]),
|
|
|
|
)
|
2014-07-03 07:21:14 -03:00
|
|
|
|
|
|
|
def to_class(self):
|
2022-07-11 10:09:01 -03:00
|
|
|
labels = self.labels.decode(encoding="utf-8") if self.labels else ""
|
2014-07-03 07:21:14 -03:00
|
|
|
members = dict(
|
2022-07-11 10:09:01 -03:00
|
|
|
NAME=self.name.decode(encoding="utf-8"),
|
|
|
|
MSG=self.type,
|
|
|
|
SIZE=self.length,
|
|
|
|
labels=labels.split(","),
|
|
|
|
_pack_=True,
|
2022-07-18 11:09:04 -03:00
|
|
|
)
|
2014-07-03 07:21:14 -03:00
|
|
|
|
2023-08-31 18:45:12 -03:00
|
|
|
if isinstance(self.types[0], str):
|
2020-07-16 20:22:10 -03:00
|
|
|
fieldtypes = [i for i in self.types]
|
|
|
|
else:
|
|
|
|
fieldtypes = [chr(i) for i in self.types]
|
|
|
|
fieldlabels = members["labels"]
|
2015-05-05 15:14:17 -03:00
|
|
|
if self.labels and (len(fieldtypes) != len(fieldlabels)):
|
2014-07-04 08:12:24 -03:00
|
|
|
print("Broken FMT message for {} .. ignoring".format(self.name), file=sys.stderr)
|
|
|
|
return None
|
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
fields = [('head', logheader)]
|
2014-07-03 07:21:14 -03:00
|
|
|
|
|
|
|
# field access
|
2014-07-03 11:54:52 -03:00
|
|
|
for (label, _type) in zip(fieldlabels, fieldtypes):
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
def createproperty(name, format):
|
|
|
|
# extra scope for variable sanity
|
|
|
|
# scaling via _NAME and def NAME(self): return self._NAME / SCALE
|
|
|
|
propertyname = name
|
|
|
|
attributename = '_' + name
|
|
|
|
scale = BinaryFormat.FIELD_SCALE.get(format, None)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2020-07-16 20:22:10 -03:00
|
|
|
def get_message_attribute(x):
|
|
|
|
ret = getattr(x, attributename)
|
2022-07-18 11:09:04 -03:00
|
|
|
if str(format) in ['Z', 'n', 'N']:
|
2022-07-11 10:09:01 -03:00
|
|
|
ret = ret.decode(encoding="utf-8")
|
2020-07-16 20:22:10 -03:00
|
|
|
return ret
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2020-07-16 20:22:10 -03:00
|
|
|
p = property(get_message_attribute)
|
2014-07-03 07:21:14 -03:00
|
|
|
if scale is not None:
|
2022-07-18 11:09:04 -03:00
|
|
|
p = property(lambda x: getattr(x, attributename) / scale)
|
2014-07-03 07:21:14 -03:00
|
|
|
members[propertyname] = p
|
2015-07-09 15:38:33 -03:00
|
|
|
try:
|
|
|
|
fields.append((attributename, BinaryFormat.FIELD_FORMAT[format]))
|
|
|
|
except KeyError:
|
|
|
|
print('ERROR: Failed to add FMT type: {}, with format: {}'.format(attributename, format))
|
|
|
|
raise
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 11:54:52 -03:00
|
|
|
createproperty(label, _type)
|
2014-07-03 07:21:14 -03:00
|
|
|
members['_fields_'] = fields
|
|
|
|
|
|
|
|
# repr shows all values but the header
|
2022-07-18 11:09:04 -03:00
|
|
|
members['__repr__'] = lambda x: "<{cls} {data}>".format(
|
|
|
|
cls=x.__class__.__name__, data=' '.join(["{}:{}".format(k, getattr(x, k)) for k in x.labels])
|
|
|
|
)
|
2014-07-03 07:21:14 -03:00
|
|
|
|
|
|
|
# finally, create the class
|
2022-07-18 11:09:04 -03:00
|
|
|
cls = type('Log__%s' % self.name, (ctypes.LittleEndianStructure,), members)
|
2014-07-04 08:12:24 -03:00
|
|
|
|
|
|
|
if ctypes.sizeof(cls) != cls.SIZE:
|
|
|
|
print("size mismatch for {} expected {} got {}".format(cls, ctypes.sizeof(cls), cls.SIZE), file=sys.stderr)
|
|
|
|
return None
|
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
return cls
|
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
BinaryFormat.SIZE = ctypes.sizeof(BinaryFormat)
|
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-27 19:35:13 -03:00
|
|
|
class Channel(object):
|
2014-06-24 19:30:55 -03:00
|
|
|
'''storage for a single stream of data, i.e. all GPS.RelAlt values'''
|
|
|
|
|
|
|
|
# TODO: rethink data storage, but do more thorough regression testing before refactoring it
|
|
|
|
# TODO: store data as a scipy spline curve so we can more easily interpolate and sample the slope?
|
|
|
|
|
|
|
|
def __init__(self):
|
2022-07-18 10:52:56 -03:00
|
|
|
# store dupe data in dict and list for now, until we decide which is the better way to go
|
|
|
|
self.dictData = {} # dict of linenum->value
|
|
|
|
self.listData = [] # list of (linenum,value)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def getSegment(self, startLine, endLine):
|
|
|
|
'''returns a segment of this data (from startLine to endLine, inclusive) as a new Channel instance'''
|
|
|
|
segment = Channel()
|
2022-07-18 11:09:04 -03:00
|
|
|
segment.dictData = {k: v for k, v in self.dictData.items() if k >= startLine and k <= endLine}
|
2022-07-11 10:09:01 -03:00
|
|
|
segment.listData = [(k, v) for k, v in self.listData if k >= startLine and k <= endLine]
|
2014-06-24 19:30:55 -03:00
|
|
|
return segment
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def min(self):
|
|
|
|
return min(self.dictData.values())
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def max(self):
|
|
|
|
return max(self.dictData.values())
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def avg(self):
|
|
|
|
return numpy.mean(self.dictData.values())
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def getNearestValueFwd(self, lineNumber):
|
|
|
|
'''Returns (value,lineNumber)'''
|
2022-07-18 11:09:04 -03:00
|
|
|
index = bisect.bisect_left(self.listData, (lineNumber, -99999))
|
|
|
|
while index < len(self.listData):
|
|
|
|
line = self.listData[index][0]
|
2014-06-24 19:30:55 -03:00
|
|
|
if line >= lineNumber:
|
2022-07-18 11:09:04 -03:00
|
|
|
return (self.listData[index][1], line)
|
2014-06-24 19:30:55 -03:00
|
|
|
index += 1
|
2022-07-11 09:30:25 -03:00
|
|
|
raise ValueError("Error finding nearest value for line %d" % lineNumber)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def getNearestValueBack(self, lineNumber):
|
|
|
|
'''Returns (value,lineNumber)'''
|
2022-07-18 11:09:04 -03:00
|
|
|
index = bisect.bisect_left(self.listData, (lineNumber, -99999)) - 1
|
|
|
|
while index >= 0:
|
|
|
|
line = self.listData[index][0]
|
2014-06-24 19:30:55 -03:00
|
|
|
if line <= lineNumber:
|
2022-07-18 11:09:04 -03:00
|
|
|
return (self.listData[index][1], line)
|
2014-06-24 19:30:55 -03:00
|
|
|
index -= 1
|
2022-07-11 09:30:25 -03:00
|
|
|
raise ValueError("Error finding nearest value for line %d" % lineNumber)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def getNearestValue(self, lineNumber, lookForwards=True):
|
2022-07-18 10:52:56 -03:00
|
|
|
"""
|
|
|
|
Find the nearest data value to the given lineNumber, defaults to first looking forwards.
|
|
|
|
Returns (value,lineNumber)
|
|
|
|
"""
|
2014-06-24 19:30:55 -03:00
|
|
|
if lookForwards:
|
|
|
|
try:
|
|
|
|
return self.getNearestValueFwd(lineNumber)
|
2022-07-11 09:30:25 -03:00
|
|
|
except ValueError:
|
2014-06-24 19:30:55 -03:00
|
|
|
return self.getNearestValueBack(lineNumber)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
return self.getNearestValueBack(lineNumber)
|
2022-07-11 09:30:25 -03:00
|
|
|
except ValueError:
|
2014-06-24 19:30:55 -03:00
|
|
|
return self.getNearestValueFwd(lineNumber)
|
|
|
|
raise Exception("Error finding nearest value for line %d" % lineNumber)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def getInterpolatedValue(self, lineNumber):
|
2022-07-18 11:09:04 -03:00
|
|
|
(prevValue, prevValueLine) = self.getNearestValue(lineNumber, lookForwards=False)
|
|
|
|
(nextValue, nextValueLine) = self.getNearestValue(lineNumber, lookForwards=True)
|
2014-06-24 19:30:55 -03:00
|
|
|
if prevValueLine == nextValueLine:
|
|
|
|
return prevValue
|
2022-07-18 11:09:04 -03:00
|
|
|
weight = (lineNumber - prevValueLine) / float(nextValueLine - prevValueLine)
|
|
|
|
return (weight * prevValue) + ((1 - weight) * nextValue)
|
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def getIndexOf(self, lineNumber):
|
|
|
|
'''returns the index within this channel's listData of the given lineNumber, or raises an Exception if not found'''
|
2022-07-18 11:09:04 -03:00
|
|
|
index = bisect.bisect_left(self.listData, (lineNumber, -99999))
|
|
|
|
if self.listData[index][0] == lineNumber:
|
2014-06-24 19:30:55 -03:00
|
|
|
return index
|
|
|
|
else:
|
|
|
|
raise Exception("Error finding index for line %d" % lineNumber)
|
2014-03-03 04:07:45 -04:00
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-03-03 04:07:45 -04:00
|
|
|
class LogIterator:
|
2022-07-18 10:52:56 -03:00
|
|
|
"""
|
|
|
|
Smart iterator that can move through a log by line number and maintain an index into the nearest values of all data
|
|
|
|
channels
|
|
|
|
"""
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2022-07-18 10:52:56 -03:00
|
|
|
# TODO: LogIterator currently indexes the next available value rather than the nearest value, we should make it
|
|
|
|
# configurable between next/nearest
|
2014-06-24 19:30:55 -03:00
|
|
|
|
|
|
|
class LogIteratorSubValue:
|
|
|
|
'''syntactic sugar to allow access by LogIterator[lineLabel][dataLabel]'''
|
2022-07-18 11:09:04 -03:00
|
|
|
|
|
|
|
logdata = None
|
2014-06-24 19:30:55 -03:00
|
|
|
iterators = None
|
|
|
|
lineLabel = None
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def __init__(self, logdata, iterators, lineLabel):
|
|
|
|
self.logdata = logdata
|
|
|
|
self.lineLabel = lineLabel
|
|
|
|
self.iterators = iterators
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def __getitem__(self, dataLabel):
|
|
|
|
index = self.iterators[self.lineLabel][0]
|
|
|
|
return self.logdata.channels[self.lineLabel][dataLabel].listData[index][1]
|
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
iterators = {} # lineLabel -> (listIndex,lineNumber)
|
|
|
|
logdata = None
|
2014-06-24 19:30:55 -03:00
|
|
|
currentLine = None
|
|
|
|
|
|
|
|
def __init__(self, logdata, lineNumber=0):
|
|
|
|
self.logdata = logdata
|
|
|
|
self.currentLine = lineNumber
|
|
|
|
for lineLabel in self.logdata.formats:
|
|
|
|
if lineLabel in self.logdata.channels:
|
|
|
|
self.iterators[lineLabel] = ()
|
|
|
|
self.jump(lineNumber)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def __iter__(self):
|
|
|
|
return self
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def __getitem__(self, lineLabel):
|
|
|
|
return LogIterator.LogIteratorSubValue(self.logdata, self.iterators, lineLabel)
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def next(self):
|
|
|
|
'''increment iterator to next log line'''
|
|
|
|
self.currentLine += 1
|
|
|
|
if self.currentLine > self.logdata.lineCount:
|
|
|
|
return self
|
|
|
|
for lineLabel in self.iterators.keys():
|
|
|
|
# check if the currentLine has gone past our the line we're pointing to for this type of data
|
|
|
|
dataLabel = self.logdata.formats[lineLabel].labels[0]
|
|
|
|
(index, lineNumber) = self.iterators[lineLabel]
|
2022-07-18 10:52:56 -03:00
|
|
|
# if so, and it is not the last entry in the log, increment the indices for dataLabels under that lineLabel
|
2022-07-18 11:09:04 -03:00
|
|
|
if (self.currentLine > lineNumber) and (
|
|
|
|
index < len(self.logdata.channels[lineLabel][dataLabel].listData) - 1
|
|
|
|
):
|
2014-06-24 19:30:55 -03:00
|
|
|
index += 1
|
|
|
|
lineNumber = self.logdata.channels[lineLabel][dataLabel].listData[index][0]
|
2022-07-18 11:09:04 -03:00
|
|
|
self.iterators[lineLabel] = (index, lineNumber)
|
2014-06-24 19:30:55 -03:00
|
|
|
return self
|
2022-07-18 11:09:04 -03:00
|
|
|
|
2022-07-11 10:09:01 -03:00
|
|
|
__next__ = next
|
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
def jump(self, lineNumber):
|
|
|
|
'''jump iterator to specified log line'''
|
|
|
|
self.currentLine = lineNumber
|
|
|
|
for lineLabel in self.iterators.keys():
|
|
|
|
dataLabel = self.logdata.formats[lineLabel].labels[0]
|
2022-07-18 11:09:04 -03:00
|
|
|
(value, lineNumber) = self.logdata.channels[lineLabel][dataLabel].getNearestValue(self.currentLine)
|
2014-06-24 19:30:55 -03:00
|
|
|
self.iterators[lineLabel] = (self.logdata.channels[lineLabel][dataLabel].getIndexOf(lineNumber), lineNumber)
|
2014-02-23 10:20:18 -04:00
|
|
|
|
2014-01-27 02:38:57 -04:00
|
|
|
|
|
|
|
class DataflashLogHelper:
|
2014-06-24 19:30:55 -03:00
|
|
|
'''helper functions for dealing with log data, put here to keep DataflashLog class as a simple parser and data store'''
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getTimeAtLine(logdata, lineNumber):
|
|
|
|
'''returns the nearest GPS timestamp in milliseconds after the given line number'''
|
2022-07-18 10:52:56 -03:00
|
|
|
if "GPS" not in logdata.channels:
|
2014-06-24 19:30:55 -03:00
|
|
|
raise Exception("no GPS log data found")
|
|
|
|
# older logs use 'TIme', newer logs use 'TimeMS'
|
2017-09-23 22:51:51 -03:00
|
|
|
# even newer logs use TimeUS
|
|
|
|
timeLabel = None
|
|
|
|
for possible in "TimeMS", "Time", "TimeUS":
|
|
|
|
if possible in logdata.channels["GPS"]:
|
|
|
|
timeLabel = possible
|
|
|
|
break
|
|
|
|
if timeLabel is None:
|
|
|
|
raise Exception("Unable to get time label")
|
2014-06-24 19:30:55 -03:00
|
|
|
while lineNumber <= logdata.lineCount:
|
|
|
|
if lineNumber in logdata.channels["GPS"][timeLabel].dictData:
|
|
|
|
return logdata.channels["GPS"][timeLabel].dictData[lineNumber]
|
|
|
|
lineNumber = lineNumber + 1
|
|
|
|
|
2014-06-25 19:27:09 -03:00
|
|
|
sys.stderr.write("didn't find GPS data for " + str(lineNumber) + " - using maxtime\n")
|
|
|
|
return logdata.channels["GPS"][timeLabel].max()
|
|
|
|
|
2014-06-24 19:30:55 -03:00
|
|
|
@staticmethod
|
|
|
|
def findLoiterChunks(logdata, minLengthSeconds=0, noRCInputs=True):
|
2022-07-18 10:52:56 -03:00
|
|
|
"""
|
|
|
|
Returns a list of (to, from) pairs defining sections of the log which are in loiter mode, ordered from longest
|
|
|
|
to shortest in time. If `noRCInputs == True` it only returns chunks with no control inputs
|
|
|
|
"""
|
2014-06-24 19:30:55 -03:00
|
|
|
# TODO: implement noRCInputs handling when identifying stable loiter chunks, for now we're ignoring it
|
|
|
|
|
|
|
|
def chunkSizeCompare(chunk1, chunk2):
|
2022-07-18 11:09:04 -03:00
|
|
|
chunk1Len = chunk1[1] - chunk1[0]
|
|
|
|
chunk2Len = chunk2[1] - chunk2[0]
|
2014-06-24 19:30:55 -03:00
|
|
|
if chunk1Len == chunk2Len:
|
|
|
|
return 0
|
|
|
|
elif chunk1Len > chunk2Len:
|
|
|
|
return -1
|
|
|
|
else:
|
|
|
|
return 1
|
|
|
|
|
2022-07-11 10:09:01 -03:00
|
|
|
changes = [{"line": k, "modeName": v[0], "modeNum": v[1]} for k, v in sorted(logdata.modeChanges.items())]
|
2014-06-24 19:30:55 -03:00
|
|
|
chunks = []
|
2022-07-11 10:09:01 -03:00
|
|
|
for i in range(len(changes)):
|
|
|
|
if changes[i]["modeName"] == "LOITER":
|
|
|
|
startLine = changes[i]["line"]
|
|
|
|
try:
|
|
|
|
endLine = changes[i + 1]["line"]
|
|
|
|
except IndexError:
|
2014-06-24 19:30:55 -03:00
|
|
|
endLine = logdata.lineCount
|
2022-07-18 11:09:04 -03:00
|
|
|
chunkTimeSeconds = (
|
|
|
|
DataflashLogHelper.getTimeAtLine(logdata, endLine)
|
|
|
|
- DataflashLogHelper.getTimeAtLine(logdata, startLine)
|
|
|
|
+ 1
|
|
|
|
) / 1000.0
|
2014-06-24 19:30:55 -03:00
|
|
|
if chunkTimeSeconds > minLengthSeconds:
|
2022-07-18 11:09:04 -03:00
|
|
|
chunks.append((startLine, endLine))
|
2022-07-18 10:52:56 -03:00
|
|
|
chunks.sort(key=lambda chunk: chunk[1] - chunk[0])
|
2014-06-24 19:30:55 -03:00
|
|
|
return chunks
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isLogEmpty(logdata):
|
|
|
|
'''returns an human readable error string if the log is essentially empty, otherwise returns None'''
|
|
|
|
# naive check for now, see if the throttle output was ever above 20%
|
|
|
|
throttleThreshold = 20
|
2015-10-14 21:05:45 -03:00
|
|
|
if logdata.vehicleType == VehicleType.Copter:
|
2022-07-18 11:09:04 -03:00
|
|
|
throttleThreshold = 200 # copter uses 0-1000, plane+rover use 0-100
|
2014-06-24 19:30:55 -03:00
|
|
|
if "CTUN" in logdata.channels:
|
2016-09-07 19:36:47 -03:00
|
|
|
try:
|
|
|
|
maxThrottle = logdata.channels["CTUN"]["ThrOut"].max()
|
2022-07-18 10:52:56 -03:00
|
|
|
except KeyError:
|
2016-09-07 19:36:47 -03:00
|
|
|
# ThrOut was shorted to ThO at some stage...
|
|
|
|
maxThrottle = logdata.channels["CTUN"]["ThO"].max()
|
|
|
|
# at roughly the same time ThO became a range from 0 to 1
|
|
|
|
throttleThreshold = 0.2
|
2014-06-24 19:30:55 -03:00
|
|
|
if maxThrottle < throttleThreshold:
|
|
|
|
return "Throttle never above 20%"
|
|
|
|
return None
|
2014-01-27 02:38:57 -04:00
|
|
|
|
|
|
|
|
2014-06-27 19:35:13 -03:00
|
|
|
class DataflashLog(object):
|
2022-07-18 10:52:56 -03:00
|
|
|
"""
|
|
|
|
ArduPilot Dataflash log file reader and container class. Keep this simple, add more advanced or specific functions
|
|
|
|
to DataflashLogHelper class
|
|
|
|
"""
|
2020-07-16 20:22:10 -03:00
|
|
|
|
2014-06-27 19:35:13 -03:00
|
|
|
knownHardwareTypes = ["APM", "PX4", "MPNG"]
|
|
|
|
|
2014-07-08 17:49:42 -03:00
|
|
|
def __init__(self, logfile=None, format="auto", ignoreBadlines=False):
|
2014-06-27 19:35:13 -03:00
|
|
|
self.filename = None
|
|
|
|
|
2022-07-18 11:09:04 -03:00
|
|
|
self.vehicleType = None # from VehicleType enumeration; value derived from header
|
|
|
|
self.vehicleTypeString = None # set at same time has the enum value
|
2014-06-27 19:35:13 -03:00
|
|
|
self.firmwareVersion = ""
|
2022-07-18 11:09:04 -03:00
|
|
|
self.firmwareHash = ""
|
|
|
|
self.freeRAM = 0
|
|
|
|
self.hardwareType = "" # APM 1, APM 2, PX4, MPNG, etc What is VRBrain? BeagleBone, etc? Needs more testing
|
|
|
|
|
|
|
|
self.formats = {} # name -> Format
|
|
|
|
self.parameters = {} # token -> value
|
|
|
|
self.messages = {} # lineNum -> message
|
|
|
|
self.modeChanges = {} # lineNum -> (mode,value)
|
|
|
|
self.channels = {} # lineLabel -> {dataLabel:Channel}
|
|
|
|
|
|
|
|
self.filesizeKB = 0
|
2014-06-27 19:35:13 -03:00
|
|
|
self.durationSecs = 0
|
2022-07-18 11:09:04 -03:00
|
|
|
self.lineCount = 0
|
2014-06-27 19:35:13 -03:00
|
|
|
self.skippedLines = 0
|
2016-09-07 19:38:19 -03:00
|
|
|
self.backpatch_these_modechanges = []
|
2022-07-18 11:09:04 -03:00
|
|
|
self.frame = None
|
2016-09-07 19:38:19 -03:00
|
|
|
|
2014-07-08 17:06:15 -03:00
|
|
|
if logfile:
|
2014-07-08 17:49:42 -03:00
|
|
|
self.read(logfile, format, ignoreBadlines)
|
2014-06-24 19:30:55 -03:00
|
|
|
|
|
|
|
def getCopterType(self):
|
|
|
|
'''returns quad/hex/octo/tradheli if this is a copter log'''
|
2015-10-14 21:05:45 -03:00
|
|
|
if self.vehicleType != VehicleType.Copter:
|
2014-06-24 19:30:55 -03:00
|
|
|
return None
|
|
|
|
motLabels = []
|
2022-07-18 11:09:04 -03:00
|
|
|
if "MOT" in self.formats: # not listed in PX4 log header for some reason?
|
2014-06-24 19:30:55 -03:00
|
|
|
motLabels = self.formats["MOT"].labels
|
|
|
|
if "GGain" in motLabels:
|
|
|
|
return "tradheli"
|
|
|
|
elif len(motLabels) == 4:
|
|
|
|
return "quad"
|
|
|
|
elif len(motLabels) == 6:
|
|
|
|
return "hex"
|
|
|
|
elif len(motLabels) == 8:
|
|
|
|
return "octo"
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
2017-09-10 22:59:37 -03:00
|
|
|
def num_motor_channels(self):
|
|
|
|
motor_channels_for_frame = {
|
|
|
|
"QUAD": 4,
|
|
|
|
"HEXA": 6,
|
|
|
|
"Y6": 6,
|
|
|
|
"OCTA": 8,
|
|
|
|
"OCTA_QUAD": 8,
|
2020-09-16 12:01:42 -03:00
|
|
|
"DECA": 10,
|
2022-07-18 11:09:04 -03:00
|
|
|
# "HELI": 1,
|
|
|
|
# "HELI_DUAL": 2,
|
2017-09-10 22:59:37 -03:00
|
|
|
"TRI": 3,
|
|
|
|
"SINGLE": 1,
|
|
|
|
"COAX": 2,
|
|
|
|
"TAILSITTER": 1,
|
2022-07-18 11:09:04 -03:00
|
|
|
"DODECA_HEXA": 12,
|
2017-09-10 22:59:37 -03:00
|
|
|
}
|
|
|
|
return motor_channels_for_frame[self.frame]
|
|
|
|
|
2014-07-08 17:04:25 -03:00
|
|
|
def read(self, logfile, format="auto", ignoreBadlines=False):
|
2014-06-24 19:30:55 -03:00
|
|
|
'''returns on successful log read (including bad lines if ignoreBadlines==True), will throw an Exception otherwise'''
|
|
|
|
# TODO: dataflash log parsing code is pretty hacky, should re-write more methodically
|
2022-07-18 11:09:04 -03:00
|
|
|
df_header = bytearray([0xA3, 0x95, 0x80, 0x80])
|
2014-06-24 19:30:55 -03:00
|
|
|
self.filename = logfile
|
2014-06-24 19:35:09 -03:00
|
|
|
if self.filename == '<stdin>':
|
|
|
|
f = sys.stdin
|
|
|
|
else:
|
2020-07-16 20:22:10 -03:00
|
|
|
f = open(self.filename, 'rb')
|
2014-06-24 21:14:55 -03:00
|
|
|
|
2020-07-16 20:22:10 -03:00
|
|
|
if format.lower() == 'bin':
|
|
|
|
head = df_header
|
2014-07-03 07:19:20 -03:00
|
|
|
elif format == 'log':
|
|
|
|
head = ""
|
|
|
|
elif format == 'auto':
|
|
|
|
if self.filename == '<stdin>':
|
|
|
|
# assuming TXT format
|
|
|
|
head = ""
|
|
|
|
else:
|
|
|
|
head = f.read(4)
|
|
|
|
f.seek(0)
|
|
|
|
else:
|
2014-07-08 16:38:54 -03:00
|
|
|
raise ValueError("Unknown log format for {}: {}".format(self.filename, format))
|
2014-07-03 07:19:20 -03:00
|
|
|
|
2020-07-16 20:22:10 -03:00
|
|
|
if head == df_header:
|
2014-07-03 07:21:14 -03:00
|
|
|
numBytes, lineNumber = self.read_binary(f, ignoreBadlines)
|
2014-07-03 07:19:20 -03:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
numBytes, lineNumber = self.read_text(f, ignoreBadlines)
|
|
|
|
|
|
|
|
# gather some general stats about the log
|
2022-07-18 11:09:04 -03:00
|
|
|
self.lineCount = lineNumber
|
2014-07-03 07:19:20 -03:00
|
|
|
self.filesizeKB = numBytes / 1024.0
|
|
|
|
# TODO: switch duration calculation to use TimeMS values rather than GPS timestemp
|
|
|
|
if "GPS" in self.channels:
|
|
|
|
# the GPS time label changed at some point, need to handle both
|
2015-07-09 23:25:17 -03:00
|
|
|
timeLabel = None
|
2022-07-18 11:09:04 -03:00
|
|
|
for i in 'TimeMS', 'TimeUS', 'Time':
|
2015-07-09 23:25:17 -03:00
|
|
|
if i in self.channels["GPS"]:
|
|
|
|
timeLabel = i
|
|
|
|
break
|
2015-10-13 07:01:14 -03:00
|
|
|
firstTimeGPS = int(self.channels["GPS"][timeLabel].listData[0][1])
|
2022-07-18 11:09:04 -03:00
|
|
|
lastTimeGPS = int(self.channels["GPS"][timeLabel].listData[-1][1])
|
2015-07-09 23:25:17 -03:00
|
|
|
if timeLabel == 'TimeUS':
|
|
|
|
firstTimeGPS /= 1000
|
|
|
|
lastTimeGPS /= 1000
|
2022-07-18 11:09:04 -03:00
|
|
|
self.durationSecs = (lastTimeGPS - firstTimeGPS) / 1000
|
2014-07-03 07:19:20 -03:00
|
|
|
|
|
|
|
# TODO: calculate logging rate based on timestamps
|
|
|
|
# ...
|
|
|
|
|
2015-10-14 21:05:45 -03:00
|
|
|
msg_vehicle_to_vehicle_map = {
|
|
|
|
"ArduCopter": VehicleType.Copter,
|
|
|
|
"APM:Copter": VehicleType.Copter,
|
|
|
|
"ArduPlane": VehicleType.Plane,
|
2022-07-18 11:09:04 -03:00
|
|
|
"ArduRover": VehicleType.Rover,
|
2015-10-14 21:05:45 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
# takes the vehicle type supplied via "MSG" and sets vehicleType from
|
|
|
|
# the VehicleType enumeration
|
|
|
|
def set_vehicleType_from_MSG_vehicle(self, MSG_vehicle):
|
|
|
|
ret = self.msg_vehicle_to_vehicle_map.get(MSG_vehicle, None)
|
|
|
|
if ret is None:
|
|
|
|
raise ValueError("Unknown vehicle type (%s)" % (MSG_vehicle))
|
|
|
|
self.vehicleType = ret
|
|
|
|
self.vehicleTypeString = VehicleTypeString[ret]
|
|
|
|
|
2016-09-07 19:38:19 -03:00
|
|
|
def handleModeChange(self, lineNumber, e):
|
|
|
|
if self.vehicleType == VehicleType.Copter:
|
2018-12-25 22:24:43 -04:00
|
|
|
modes = {
|
2022-07-18 11:09:04 -03:00
|
|
|
0: 'STABILIZE',
|
|
|
|
1: 'ACRO',
|
|
|
|
2: 'ALT_HOLD',
|
|
|
|
3: 'AUTO',
|
|
|
|
4: 'GUIDED',
|
|
|
|
5: 'LOITER',
|
|
|
|
6: 'RTL',
|
|
|
|
7: 'CIRCLE',
|
|
|
|
9: 'LAND',
|
|
|
|
10: 'OF_LOITER',
|
|
|
|
11: 'DRIFT',
|
|
|
|
13: 'SPORT',
|
|
|
|
14: 'FLIP',
|
|
|
|
15: 'AUTOTUNE',
|
|
|
|
16: 'POSHOLD',
|
|
|
|
17: 'BRAKE',
|
|
|
|
18: 'THROW',
|
|
|
|
19: 'AVOID_ADSB',
|
|
|
|
20: 'GUIDED_NOGPS',
|
|
|
|
21: 'SMART_RTL',
|
2018-12-25 22:24:43 -04:00
|
|
|
}
|
2016-09-07 19:38:19 -03:00
|
|
|
try:
|
|
|
|
if hasattr(e, 'ThrCrs'):
|
|
|
|
self.modeChanges[lineNumber] = (modes[int(e.Mode)], e.ThrCrs)
|
|
|
|
else:
|
|
|
|
# assume it has ModeNum:
|
|
|
|
self.modeChanges[lineNumber] = (modes[int(e.Mode)], e.ModeNum)
|
2022-07-18 10:52:56 -03:00
|
|
|
except ValueError:
|
2016-09-07 19:38:19 -03:00
|
|
|
if hasattr(e, 'ThrCrs'):
|
|
|
|
self.modeChanges[lineNumber] = (e.Mode, e.ThrCrs)
|
|
|
|
else:
|
2018-12-25 22:24:43 -04:00
|
|
|
# some .log files have the name spelt out by name
|
|
|
|
# rather than number, contrary to the format
|
|
|
|
# string. Attempt to map that back to a number:
|
|
|
|
uppername = str(e.Mode).upper()
|
|
|
|
for num in modes:
|
|
|
|
if modes[num].upper() == uppername:
|
|
|
|
self.modeChanges[lineNumber] = (uppername, num)
|
|
|
|
return
|
2016-09-07 19:38:19 -03:00
|
|
|
# assume it has ModeNum:
|
2017-09-29 20:17:26 -03:00
|
|
|
print("Unknown mode=%u" % e.ModeNum)
|
|
|
|
self.modeChanges[lineNumber] = (e.Mode, "mode=%u" % e.ModeNum)
|
2016-09-07 19:38:19 -03:00
|
|
|
elif self.vehicleType in [VehicleType.Plane, VehicleType.Copter, VehicleType.Rover]:
|
|
|
|
self.modeChanges[lineNumber] = (e.Mode, e.ModeNum)
|
|
|
|
else:
|
|
|
|
# if you've gotten to here the chances are we don't
|
|
|
|
# know what vehicle you're flying...
|
2022-07-18 11:09:04 -03:00
|
|
|
raise Exception(
|
|
|
|
"Unknown log type for MODE line vehicletype=({}) line=({})".format(self.vehicleTypeString, repr(e))
|
|
|
|
)
|
2016-09-07 19:38:19 -03:00
|
|
|
|
|
|
|
def backPatchModeChanges(self):
|
|
|
|
for (lineNumber, e) in self.backpatch_these_modechanges:
|
|
|
|
self.handleModeChange(lineNumber, e)
|
|
|
|
|
2017-09-10 22:59:37 -03:00
|
|
|
def set_frame(self, frame):
|
|
|
|
self.frame = frame
|
|
|
|
|
2014-07-03 07:19:20 -03:00
|
|
|
def process(self, lineNumber, e):
|
|
|
|
if e.NAME == 'FMT':
|
|
|
|
cls = e.to_class()
|
2022-07-18 11:09:04 -03:00
|
|
|
if cls is not None: # FMT messages can be broken ...
|
|
|
|
if hasattr(e, 'type') and e.type not in self._formats: # binary log specific
|
2014-07-04 08:12:24 -03:00
|
|
|
self._formats[e.type] = cls
|
|
|
|
if cls.NAME not in self.formats:
|
|
|
|
self.formats[cls.NAME] = cls
|
2014-07-03 07:19:20 -03:00
|
|
|
elif e.NAME == "PARM":
|
|
|
|
self.parameters[e.Name] = e.Value
|
|
|
|
elif e.NAME == "MSG":
|
2017-09-10 22:59:37 -03:00
|
|
|
tokens = e.Message.split(' ')
|
|
|
|
if not self.frame:
|
|
|
|
if "Frame" in tokens[0]:
|
|
|
|
self.set_frame(tokens[1])
|
2014-07-03 07:19:20 -03:00
|
|
|
if not self.vehicleType:
|
2017-07-21 20:15:59 -03:00
|
|
|
try:
|
2022-07-18 11:09:04 -03:00
|
|
|
self.set_vehicleType_from_MSG_vehicle(tokens[0])
|
2017-07-21 20:15:59 -03:00
|
|
|
except ValueError:
|
2017-12-01 18:11:11 -04:00
|
|
|
return
|
2016-09-07 19:38:19 -03:00
|
|
|
self.backPatchModeChanges()
|
2014-07-03 07:19:20 -03:00
|
|
|
self.firmwareVersion = tokens[1]
|
|
|
|
if len(tokens) == 3:
|
|
|
|
self.firmwareHash = tokens[2][1:-1]
|
|
|
|
else:
|
|
|
|
self.messages[lineNumber] = e.Message
|
|
|
|
elif e.NAME == "MODE":
|
2016-09-07 19:38:19 -03:00
|
|
|
if self.vehicleType is None:
|
2022-07-18 11:09:04 -03:00
|
|
|
self.backpatch_these_modechanges.append((lineNumber, e))
|
2014-07-03 07:19:20 -03:00
|
|
|
else:
|
2016-09-07 19:38:19 -03:00
|
|
|
self.handleModeChange(lineNumber, e)
|
2014-07-03 07:19:20 -03:00
|
|
|
# anything else must be the log data
|
|
|
|
else:
|
|
|
|
groupName = e.NAME
|
|
|
|
|
|
|
|
# first time seeing this type of log line, create the channel storage
|
2022-07-18 10:52:56 -03:00
|
|
|
if groupName not in self.channels:
|
2014-07-03 07:19:20 -03:00
|
|
|
self.channels[groupName] = {}
|
|
|
|
for label in e.labels:
|
|
|
|
self.channels[groupName][label] = Channel()
|
|
|
|
|
|
|
|
# store each token in its relevant channel
|
|
|
|
for label in e.labels:
|
|
|
|
value = getattr(e, label)
|
|
|
|
channel = self.channels[groupName][label]
|
|
|
|
channel.dictData[lineNumber] = value
|
|
|
|
channel.listData.append((lineNumber, value))
|
|
|
|
|
|
|
|
def read_text(self, f, ignoreBadlines):
|
2022-07-18 11:09:04 -03:00
|
|
|
self.formats = {'FMT': Format}
|
2014-06-24 19:30:55 -03:00
|
|
|
lineNumber = 0
|
2014-06-24 21:14:55 -03:00
|
|
|
numBytes = 0
|
2014-07-03 07:19:20 -03:00
|
|
|
knownHardwareTypes = ["APM", "PX4", "MPNG"]
|
2014-06-24 19:30:55 -03:00
|
|
|
for line in f:
|
|
|
|
lineNumber = lineNumber + 1
|
2014-06-24 21:14:55 -03:00
|
|
|
numBytes += len(line) + 1
|
2022-07-11 10:09:01 -03:00
|
|
|
line = line.decode(encoding="utf-8")
|
2014-06-24 19:30:55 -03:00
|
|
|
try:
|
|
|
|
line = line.strip('\n\r')
|
|
|
|
tokens = line.split(', ')
|
|
|
|
# first handle the log header lines
|
|
|
|
if line == " Ready to drive." or line == " Ready to FLY.":
|
|
|
|
continue
|
|
|
|
if line == "----------------------------------------": # present in pre-3.0 logs
|
2022-07-18 11:09:04 -03:00
|
|
|
raise Exception(
|
|
|
|
"Log file seems to be in the older format (prior to self-describing logs), which isn't supported"
|
|
|
|
)
|
2014-06-24 19:30:55 -03:00
|
|
|
if len(tokens) == 1:
|
|
|
|
tokens2 = line.split(' ')
|
|
|
|
if line == "":
|
|
|
|
pass
|
2022-07-18 11:09:04 -03:00
|
|
|
elif len(tokens2) == 1 and tokens2[0].isdigit(): # log index
|
2014-06-24 19:30:55 -03:00
|
|
|
pass
|
|
|
|
elif len(tokens2) == 3 and tokens2[0] == "Free" and tokens2[1] == "RAM:":
|
|
|
|
self.freeRAM = int(tokens2[2])
|
|
|
|
elif tokens2[0] in knownHardwareTypes:
|
2022-07-18 10:52:56 -03:00
|
|
|
# not sure if we can parse this more usefully, for now only need to report it back verbatim
|
|
|
|
self.hardwareType = line
|
2022-07-18 11:09:04 -03:00
|
|
|
elif (len(tokens2) == 2 or len(tokens2) == 3) and tokens2[1][
|
|
|
|
0
|
|
|
|
].lower() == "v": # e.g. ArduCopter V3.1 (5c6503e2)
|
2017-07-21 20:15:59 -03:00
|
|
|
try:
|
|
|
|
self.set_vehicleType_from_MSG_vehicle(tokens2[0])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2014-06-24 19:30:55 -03:00
|
|
|
self.firmwareVersion = tokens2[1]
|
|
|
|
if len(tokens2) == 3:
|
2022-07-18 11:09:04 -03:00
|
|
|
self.firmwareHash = tokens2[2][1:-1]
|
2014-06-24 19:30:55 -03:00
|
|
|
else:
|
|
|
|
errorMsg = "Error parsing line %d of log file: %s" % (lineNumber, self.filename)
|
|
|
|
if ignoreBadlines:
|
2014-06-25 19:25:29 -03:00
|
|
|
print(errorMsg + " (skipping line)", file=sys.stderr)
|
2014-06-24 19:30:55 -03:00
|
|
|
self.skippedLines += 1
|
|
|
|
else:
|
|
|
|
raise Exception("")
|
|
|
|
else:
|
2014-07-03 07:19:20 -03:00
|
|
|
if not tokens[0] in self.formats:
|
|
|
|
raise ValueError("Unknown Format {}".format(tokens[0]))
|
|
|
|
e = self.formats[tokens[0]](*tokens[1:])
|
|
|
|
self.process(lineNumber, e)
|
2014-06-24 19:30:55 -03:00
|
|
|
except Exception as e:
|
2020-07-16 20:22:10 -03:00
|
|
|
print("BAD LINE: " + str(line), file=sys.stderr)
|
2014-08-12 12:58:50 -03:00
|
|
|
if not ignoreBadlines:
|
2022-07-18 11:09:04 -03:00
|
|
|
raise Exception(
|
|
|
|
"Error parsing line %d of log file %s - %s" % (lineNumber, self.filename, e.args[0])
|
|
|
|
)
|
|
|
|
return (numBytes, lineNumber)
|
2014-02-23 10:20:18 -04:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
def read_binary(self, f, ignoreBadlines):
|
|
|
|
lineNumber = 0
|
|
|
|
numBytes = 0
|
|
|
|
for e in self._read_binary(f, ignoreBadlines):
|
|
|
|
lineNumber += 1
|
|
|
|
if e is None:
|
|
|
|
continue
|
|
|
|
numBytes += e.SIZE
|
2022-07-18 11:09:04 -03:00
|
|
|
# print(e)
|
2014-07-03 07:21:14 -03:00
|
|
|
self.process(lineNumber, e)
|
2022-07-18 11:09:04 -03:00
|
|
|
return (numBytes, lineNumber)
|
2014-02-23 10:20:18 -04:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
def _read_binary(self, f, ignoreBadlines):
|
2022-07-18 11:09:04 -03:00
|
|
|
self._formats = {128: BinaryFormat}
|
2014-07-03 07:21:14 -03:00
|
|
|
data = bytearray(f.read())
|
|
|
|
offset = 0
|
2015-05-05 15:15:22 -03:00
|
|
|
while len(data) > offset + ctypes.sizeof(logheader):
|
2014-07-03 07:21:14 -03:00
|
|
|
h = logheader.from_buffer(data, offset)
|
2022-07-18 11:09:04 -03:00
|
|
|
if not (h.head1 == 0xA3 and h.head2 == 0x95):
|
2022-07-18 10:52:56 -03:00
|
|
|
if ignoreBadlines is False:
|
2014-07-15 05:25:36 -03:00
|
|
|
raise ValueError(h)
|
|
|
|
else:
|
2022-07-18 11:09:04 -03:00
|
|
|
if h.head1 == 0xFF and h.head2 == 0xFF and h.msgid == 0xFF:
|
|
|
|
print(
|
|
|
|
"Assuming EOF due to dataflash block tail filled with \\xff... (offset={off})".format(
|
|
|
|
off=offset
|
|
|
|
),
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2014-07-15 05:25:36 -03:00
|
|
|
break
|
2017-02-06 20:37:38 -04:00
|
|
|
offset += 1
|
|
|
|
continue
|
2014-07-15 05:25:36 -03:00
|
|
|
|
2014-07-03 07:21:14 -03:00
|
|
|
if h.msgid in self._formats:
|
|
|
|
typ = self._formats[h.msgid]
|
|
|
|
if len(data) <= offset + typ.SIZE:
|
|
|
|
break
|
|
|
|
try:
|
|
|
|
e = typ.from_buffer(data, offset)
|
2022-07-11 09:30:25 -03:00
|
|
|
except ValueError:
|
2022-07-18 11:09:04 -03:00
|
|
|
print(
|
|
|
|
"data:{} offset:{} size:{} sizeof:{} sum:{}".format(
|
|
|
|
len(data), offset, typ.SIZE, ctypes.sizeof(typ), offset + typ.SIZE
|
|
|
|
)
|
|
|
|
)
|
2014-07-03 07:21:14 -03:00
|
|
|
raise
|
|
|
|
offset += typ.SIZE
|
|
|
|
else:
|
|
|
|
raise ValueError(str(h) + "unknown type")
|
|
|
|
yield e
|