2020-06-23 12:51:07 -03:00
|
|
|
#! /usr/bin/env python3
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import codecs
|
|
|
|
import colorsys
|
|
|
|
import json
|
2021-02-27 14:52:14 -04:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2020-01-15 05:27:15 -04:00
|
|
|
import sys
|
2021-02-27 14:52:14 -04:00
|
|
|
from typing import Optional, Set, Tuple
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Generate uORB pub/sub dependency graph from source code')
|
|
|
|
|
|
|
|
parser.add_argument('-s', '--src-path', action='append',
|
|
|
|
help='Source path(s) (default=src, can be specified multiple times)',
|
|
|
|
default=[])
|
|
|
|
parser.add_argument('-e', '--exclude-path', action='append',
|
|
|
|
help='Excluded path(s), can be specified multiple times',
|
|
|
|
default=[])
|
2021-02-27 14:52:14 -04:00
|
|
|
parser.add_argument('--merge-depends', action='store_true',
|
2021-07-28 17:07:12 -03:00
|
|
|
help='Merge library topics in the modules that depend on them.')
|
2021-02-27 14:52:14 -04:00
|
|
|
parser.add_argument('-v','--verbosity', action='count',
|
|
|
|
help='increase output verbosity; primarily for debugging; repeat for more detail',
|
|
|
|
default=0)
|
2017-11-30 06:28:17 -04:00
|
|
|
parser.add_argument('-f', '--file', metavar='file', action='store',
|
|
|
|
help='output file name prefix',
|
|
|
|
default='graph')
|
|
|
|
parser.add_argument('-o', '--output', metavar='output', action='store',
|
|
|
|
help='output format (json or graphviz)',
|
|
|
|
default='json')
|
2021-02-27 14:52:14 -04:00
|
|
|
parser.add_argument('-u','--use-topic-union', action='store_true',
|
2017-11-30 06:28:17 -04:00
|
|
|
help='''
|
|
|
|
Use the union of all publication and subscription topics (useful for complete
|
|
|
|
graphs or only few/single module(s)). The default is to use the intersection
|
|
|
|
(remove topics that have no subscriber or no publisher)''')
|
2017-12-01 10:25:13 -04:00
|
|
|
parser.add_argument('-m', '--modules', action='store',
|
|
|
|
help='Comma-separated whitelist of modules (the module\'s '+
|
|
|
|
'MAIN, e.g. from a startup script)',
|
|
|
|
default='')
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
logging.basicConfig(level=logging.WARNING,format='%(message)s')
|
2021-02-27 14:52:14 -04:00
|
|
|
log = logging.getLogger()
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
def get_N_colors(N, s=0.8, v=0.9):
|
|
|
|
""" get N distinct colors as a list of hex strings """
|
|
|
|
HSV_tuples = [(x*1.0/N, s, v) for x in range(N)]
|
|
|
|
hex_out = []
|
|
|
|
for rgb in HSV_tuples:
|
|
|
|
rgb = map(lambda x: int(x*255), colorsys.hsv_to_rgb(*rgb))
|
|
|
|
hex_out.append("#"+"".join(map(lambda x: format(x, '02x'), rgb)))
|
|
|
|
return hex_out
|
|
|
|
|
2022-01-24 04:18:21 -04:00
|
|
|
def topic_filename(topic):
|
|
|
|
MSG_PATH = 'msg/'
|
|
|
|
|
|
|
|
file_list = os.listdir(MSG_PATH)
|
|
|
|
msg_files = [file.replace('.msg', '') for file in file_list if file.endswith(".msg")]
|
|
|
|
|
|
|
|
if topic in msg_files:
|
|
|
|
return topic
|
|
|
|
else:
|
|
|
|
for msg_file in msg_files:
|
|
|
|
with open(f'{MSG_PATH}/{msg_file}.msg') as f:
|
|
|
|
ret = re.findall(f'^# TOPICS.*{topic}.*',f.read(),re.MULTILINE)
|
|
|
|
if len(ret) > 0:
|
|
|
|
return msg_file
|
|
|
|
return "no_file"
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2019-04-01 09:36:43 -03:00
|
|
|
class PubSub(object):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" Collects either publication or subscription information for nodes
|
|
|
|
(modules and topics) & edges """
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
# special value to signal an ambiguous was found -- don't record this topic, and stop processing.
|
|
|
|
AMBIGUOUS_SITE_TOPIC = "AMBIGUOUS"
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
def __init__(self, name, topic_blacklist, regexes):
|
2017-11-30 06:28:17 -04:00
|
|
|
"""
|
|
|
|
:param is_publication: if True, publications, False for
|
|
|
|
subscriptions
|
|
|
|
:param topic_blacklist: list of topics to blacklist
|
|
|
|
:param orb_pub_sub_regexes: list of regexes to extract orb calls
|
|
|
|
(e.g. orb_subscribe). They need to have 2 captures, the second
|
|
|
|
one is the one capturing ORB_ID(<topic>
|
|
|
|
"""
|
2021-02-27 14:52:14 -04:00
|
|
|
self._name = name
|
2017-11-30 06:28:17 -04:00
|
|
|
self._topic_blacklist = topic_blacklist
|
2021-02-27 14:52:14 -04:00
|
|
|
self._regexes = set([ re.compile(regex) for regex in regexes])
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
def match(self, source_line: str) -> str:
|
2017-11-30 06:28:17 -04:00
|
|
|
""" Extract subscribed/published topics from a source string
|
|
|
|
:param src_str: string of C/C++ code with comments and whitespace removed
|
2021-02-27 17:14:17 -04:00
|
|
|
:return: if any topic was found, returned as a str. On error, raise on exception. On ambiguous line, return `AMBIGUOUS_SITE_TOPIC`. Otherwise, return `None`
|
2017-11-30 06:28:17 -04:00
|
|
|
"""
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
for regex in self._regexes:
|
|
|
|
# just the matches for this particular pattern:
|
|
|
|
match = regex.search(source_line)
|
|
|
|
|
|
|
|
if match is None:
|
|
|
|
continue
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
# # all regexes should contain 2 capture groups (or else this code block crashes)
|
2021-02-27 14:52:14 -04:00
|
|
|
route_group, topic_group = match.groups()
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
log.debug(" ####:{}: {}, {}".format( self._name, route_group, topic_group))
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
# # TODO: handle this case... but not sure where, yet
|
|
|
|
# if match == 'ORB_ID_VEHICLE_ATTITUDE_CONTROLS': # special case
|
|
|
|
# match = orb_id+orb_id_vehicle_attitude_controls_topic
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
# match has the form: '[ORB_ID(]<topic_name>'
|
2021-02-27 14:52:14 -04:00
|
|
|
if route_group:
|
|
|
|
if route_group == 'ORB_ID':
|
|
|
|
log.debug(" >>> Found ORB_ID topic: " + topic_group + " w/regex: " + str(regex.pattern))
|
2021-02-27 17:14:17 -04:00
|
|
|
return self._filter_topic(topic_group)
|
2021-02-27 14:52:14 -04:00
|
|
|
elif route_group == '[':
|
2021-02-27 17:14:17 -04:00
|
|
|
if not topic_group:
|
|
|
|
log.debug(" !! found an ambiguous site => return an empty set")
|
|
|
|
return PubSub.AMBIGUOUS_SITE_TOPIC
|
2021-02-27 14:52:14 -04:00
|
|
|
else:
|
|
|
|
raise SyntaxError('!!! Encountered regex case: `route_group` contains unrecognized value!: '+ route_group+' (::'+str(regex.pattern)+')\n'
|
|
|
|
+ " ("+ route_group+', '+topic_group +")\n"
|
|
|
|
+ " " + source_line)
|
2021-02-27 17:14:17 -04:00
|
|
|
elif route_group.empty() and topic_group.empty():
|
|
|
|
log.debug('!!! Found ambiguous site, without `ORB_ID` or topic (::'+str(regex.pattern))
|
|
|
|
return PubSub.AMBIGUOUS_SITE_TOPIC
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2017-11-30 06:28:17 -04:00
|
|
|
else:
|
2021-02-27 14:52:14 -04:00
|
|
|
raise SyntaxError(" !!! unhandled case: unknown-variant: "+route_group+", " + topic_group + " ....from regex: " + str(regex.pattern))
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
return None
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
def _filter_topic(self, topic_name: str) -> str:
|
2021-02-27 14:52:14 -04:00
|
|
|
""" add topic to set, unless the topic is ignored """
|
2017-11-30 06:28:17 -04:00
|
|
|
if topic_name in self._topic_blacklist:
|
2021-02-27 14:52:14 -04:00
|
|
|
log.debug(" XX Ignoring blacklisted topic " + topic_name)
|
2021-02-27 17:14:17 -04:00
|
|
|
return None
|
2021-02-27 14:52:14 -04:00
|
|
|
else:
|
2021-02-27 17:14:17 -04:00
|
|
|
return topic_name
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
class Publications(PubSub):
|
|
|
|
""" Collects topic publication information for scopes """
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
def __init__(self, topic_blacklist, regexes):
|
|
|
|
super().__init__('PUB', topic_blacklist, regexes)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
class Subscriptions(PubSub):
|
|
|
|
""" Collects topic subscription information for scopes """
|
|
|
|
|
|
|
|
def __init__(self, topic_blacklist, regexes):
|
|
|
|
super().__init__('SUB', topic_blacklist, regexes)
|
|
|
|
|
|
|
|
|
|
|
|
class Ambiguities(PubSub):
|
|
|
|
""" Collects topic information that cannot be classified """
|
|
|
|
|
|
|
|
def __init__(self, topic_blacklist, regexes):
|
|
|
|
super().__init__('AMB', topic_blacklist, regexes)
|
|
|
|
|
|
|
|
|
|
|
|
class Scope(object):
|
|
|
|
""" Defines a scope to add dependencies or topics to """
|
|
|
|
def __init__(self, typename, name):
|
|
|
|
self.publications = set()
|
|
|
|
self.subscriptions = set()
|
|
|
|
self.dependencies = set()
|
|
|
|
self.ambiguities = set()
|
|
|
|
self._name = name
|
|
|
|
self._typename = typename
|
|
|
|
|
|
|
|
def add_dependency(self, dependency_name: str):
|
|
|
|
if isinstance( dependency_name, str):
|
|
|
|
self.dependencies.add(dependency_name)
|
|
|
|
|
|
|
|
def is_empty(self):
|
|
|
|
return (0 == len(self.publications)) and (0==len(self.subscriptions))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self._name
|
|
|
|
|
|
|
|
def reduce_ambiguities(self) -> Set[str]:
|
2021-02-27 17:14:17 -04:00
|
|
|
self.ambiguities = self.ambiguities - self.subscriptions - self.publications
|
2021-02-27 14:52:14 -04:00
|
|
|
return self.dependencies
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
@property
|
2021-02-27 14:52:14 -04:00
|
|
|
def typename(self):
|
|
|
|
return self._typename
|
|
|
|
|
|
|
|
# define these so we can hash these classes in dicts and sets
|
|
|
|
def __hash__(self):
|
|
|
|
return self._name.__hash__()
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
if isinstance(other, str):
|
|
|
|
return self._name == other
|
|
|
|
else:
|
|
|
|
return self._name == other._name
|
|
|
|
|
|
|
|
class LibraryScope(Scope):
|
|
|
|
def __init__(self, name):
|
|
|
|
super().__init__('Library',name)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
class ModuleScope(Scope):
|
|
|
|
def __init__(self, name):
|
|
|
|
super().__init__('Module',name)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2019-04-01 09:36:43 -03:00
|
|
|
class Graph(object):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" Collects Node and Edge information by parsing the source tree """
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
"""
|
|
|
|
:kwargs:
|
|
|
|
- scope_whitelist
|
|
|
|
- scope_blacklist
|
|
|
|
- topic_blacklist
|
|
|
|
"""
|
|
|
|
|
2017-11-30 06:28:17 -04:00
|
|
|
self._whitespace_pattern = re.compile(r'\s+')
|
2021-02-27 17:14:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
self._scope_blacklist = set(kwargs.get('scope_blacklist',set()))
|
|
|
|
self._scope_whitelist = set(kwargs.get('scope_whitelist',set()))
|
|
|
|
|
|
|
|
self._path_blacklist = []
|
|
|
|
|
|
|
|
self._topic_blacklist = set(kwargs.get('topic_blacklist',set()))
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
self._orb_id_vehicle_attitude_controls_topic = 'actuator_controls_0'
|
2021-02-27 14:52:14 -04:00
|
|
|
self._orb_id_vehicle_attitude_controls_re = re.compile(r'\#define\s+ORB_ID_VEHICLE_ATTITUDE_CONTROLS\s+([^,)]+)')
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-03-16 00:23:34 -03:00
|
|
|
self._warnings = [] # list of all ambiguous scan sites
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
self._current_scope = [] # stack with current module (they can be nested)
|
|
|
|
|
|
|
|
self._found_modules = {} # dict of all found modules
|
|
|
|
self._found_libraries = {} # dict of all found modules
|
|
|
|
|
|
|
|
self._print_nodes = set() # combination of libraries + modules
|
|
|
|
self._print_topics = set() # all topics
|
2017-11-30 06:28:17 -04:00
|
|
|
self._topic_colors = {} # key = topic, value = color (html string)
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
# note: the source-file-string is pre-processed to remove whitespace -- regexes should ignore whitespace
|
2021-02-27 17:14:17 -04:00
|
|
|
# note: the regexes should 2 capture groups '()' to correctly register with downstream code
|
|
|
|
capture_cases_sub = [r"orb_subscribe\w*\((ORB_ID)(?:\(|::)(\w+)",
|
|
|
|
r"orb_copy\((ORB_ID)(?:\(|::)(\w+)",
|
|
|
|
r"Subscription\w*(?:<[^>]+>|)\w*(?:\[[^]]+\]|)[\{\(](ORB_ID)(?:\(|::)(\w+)",
|
|
|
|
r"SubscriptionCallbackWorkItem\w+\{this,(ORB_ID)(?:\(|::)(\w+)",
|
2021-02-27 14:52:14 -04:00
|
|
|
]
|
|
|
|
self._subscriptions = Subscriptions( self._topic_blacklist, capture_cases_sub)
|
|
|
|
|
|
|
|
# note: the source-file-string is pre-processed to remove whitespace -- regexes should ignore whitespace
|
2021-02-27 17:14:17 -04:00
|
|
|
# note: the regexes should 2 capture groups '()' to correctly register with downstream code
|
|
|
|
capture_cases_pub = [r"orb_advertise(?:_multi|_queue|_multi_queue|)\((ORB_ID)(?:\(|::)(\w+)",
|
|
|
|
r"orb_publish(?:_auto|)\((ORB_ID)(?:\(|::)(\w+)",
|
|
|
|
r"Publication\w*<\w+>\w+(?:\[[^]]+\]|)[\(\{]*(ORB_ID)(?:\(|::)(\w+)",
|
2021-02-27 14:52:14 -04:00
|
|
|
]
|
|
|
|
self._publications = Publications( self._topic_blacklist, capture_cases_pub)
|
|
|
|
|
|
|
|
# note: the source-file-string is pre-processed to remove whitespace -- regexes should ignore whitespace
|
2021-02-27 17:14:17 -04:00
|
|
|
# note: the regexes should 2 capture groups '()' to correctly register with downstream code
|
|
|
|
capture_cases_ambiguous = [ r"Publication\w*(?:\<\w+\>|)\w+(\[)()",
|
|
|
|
r"Subscription\w*(?:\<\w+\>|)\w+(\[)()",
|
|
|
|
r"(ORB_ID)(?:\(|::)(\w+)",
|
2021-02-27 14:52:14 -04:00
|
|
|
]
|
|
|
|
self._ambiguities = Ambiguities( self._topic_blacklist, capture_cases_ambiguous)
|
|
|
|
|
|
|
|
def _get_current_scope(self):
|
|
|
|
if len(self._current_scope) == 0:
|
2017-11-30 06:28:17 -04:00
|
|
|
return None
|
2021-02-27 14:52:14 -04:00
|
|
|
return self._current_scope[-1]
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
def build(self, src_path_list, **kwargs):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" parse the source tree & extract pub/sub information.
|
|
|
|
:param use_topic_pubsub_union: if true, use all topics that have a
|
|
|
|
publisher or subscriber. If false, use only topics with at least one
|
|
|
|
publisher and subscriber.
|
|
|
|
|
|
|
|
fill in self._module_subsciptions & self._module_publications
|
|
|
|
"""
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
|
|
|
|
self._path_blacklist = set([ os.path.normpath(p) for p in kwargs.get('path_blacklist',[]) ])
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
for path in src_path_list:
|
2021-02-27 14:52:14 -04:00
|
|
|
log.info("## Add src path: " + path )
|
|
|
|
self._build_recursive(path, **kwargs)
|
|
|
|
|
|
|
|
# Summarize the found counts: (all topics are defined in 'dependency' library)
|
|
|
|
log.info('### Summary: Total Scanned:')
|
2021-02-27 17:14:17 -04:00
|
|
|
log.info(' Library Count: '+str(len(self._found_libraries)))
|
|
|
|
log.info(' Module Count: '+str(len(self._found_modules)))
|
|
|
|
log.info(' Warning Count: '+str(len(self._warnings)))
|
2021-02-27 14:52:14 -04:00
|
|
|
|
|
|
|
if kwargs['merge_depends']:
|
|
|
|
graph.merge_depends()
|
|
|
|
|
|
|
|
# filter all scopes, topics into only the scopes + topics to output
|
|
|
|
self._generate_print_lists(use_topic_pubsub_union=kwargs['use_topic_pubsub_union'], merge_depends=kwargs['merge_depends'])
|
|
|
|
|
|
|
|
# Summarize the found counts:
|
2021-02-27 17:14:17 -04:00
|
|
|
log.info('### Summary (in-scope):')
|
|
|
|
log.info(' Scope Count: '+str(len(self._print_scopes)))
|
|
|
|
log.info(' Ambiguous Topics: '+str(len(self._print_ambiguities)))
|
|
|
|
log.info(' Linked Topics: '+str(len(self._print_topics)))
|
|
|
|
log.info(' Warnings: '+str(len(self._warnings)))
|
2021-02-27 14:52:14 -04:00
|
|
|
|
|
|
|
if 0 < len(self._warnings):
|
|
|
|
# print out the list of warning-sites:
|
|
|
|
log.info('## Warning Sites:')
|
|
|
|
for w in self._warnings:
|
2021-02-27 17:14:17 -04:00
|
|
|
scope_name = 'no-scope'
|
|
|
|
if None is not w[0]:
|
|
|
|
scope_name = w[0].name
|
2021-02-27 14:52:14 -04:00
|
|
|
# warnings tuple contains: (current_scope, file_name, line_number, line)
|
2021-02-27 17:14:17 -04:00
|
|
|
log.info(" -['{}']:{:<64s}:{} = {}".format(scope_name, w[1].lstrip('/.'), w[2], w[3] ))
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
# initialize colors
|
2021-02-27 14:52:14 -04:00
|
|
|
color_list = get_N_colors(len(self._print_topics), 0.7, 0.85)
|
2017-11-30 06:28:17 -04:00
|
|
|
self._topic_colors = {}
|
2021-02-27 14:52:14 -04:00
|
|
|
for i, topic in enumerate(self._print_topics):
|
2017-11-30 06:28:17 -04:00
|
|
|
self._topic_colors[topic] = color_list[i]
|
|
|
|
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
def _generate_print_lists(self, use_topic_pubsub_union, merge_depends):
|
|
|
|
""" generate the set of scopes (modules + libraries) and topics to print to output """
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
subscribed_topics = set()
|
|
|
|
published_topics = set()
|
|
|
|
ambiguous_topics = set()
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
# gather all found scopes:
|
|
|
|
all_scopes = { **self._found_libraries, **self._found_modules }
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
if 0 == len(self._scope_whitelist):
|
|
|
|
select_scopes = self._found_modules
|
|
|
|
else:
|
|
|
|
select_scopes = {}
|
|
|
|
for scope_name in self._scope_whitelist:
|
|
|
|
if scope_name in all_scopes:
|
|
|
|
select_scopes[scope_name] = all_scopes[scope_name]
|
|
|
|
if not isinstance(select_scopes, dict) or 0 == len(select_scopes):
|
2021-02-27 17:14:17 -04:00
|
|
|
log.error("!! No requested modules not found -- exiting.")
|
|
|
|
sys.exit(0)
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
log.debug('### Condensing found topics: scope -> total')
|
2021-02-27 14:52:14 -04:00
|
|
|
for name,scope in select_scopes.items():
|
2021-02-27 17:14:17 -04:00
|
|
|
log.debug(' # Scope: '+ name )
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
log.debug(' ## Subs: ' + str(len(scope.subscriptions)))
|
|
|
|
for topic in sorted(scope.subscriptions):
|
|
|
|
log.debug(' - ' + topic)
|
2021-02-27 14:52:14 -04:00
|
|
|
subscribed_topics.add(topic)
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
log.debug(' ## Pubs: ' + str(len(scope.publications)))
|
|
|
|
for topic in sorted(scope.publications):
|
|
|
|
log.debug(' - ' + topic )
|
2021-02-27 14:52:14 -04:00
|
|
|
published_topics.add(topic)
|
|
|
|
|
|
|
|
scope.reduce_ambiguities()
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
log.debug(' ## Ambiguities: ' + str(len(scope.ambiguities)))
|
|
|
|
for topic in sorted(scope.ambiguities):
|
|
|
|
log.debug(' - ' + topic )
|
2021-02-27 14:52:14 -04:00
|
|
|
ambiguous_topics.add(topic)
|
|
|
|
|
|
|
|
# filter modules iff they have at least a subscription or a publication
|
|
|
|
scopes_with_topic = {}
|
|
|
|
for name,scope in select_scopes.items():
|
|
|
|
if not scope.is_empty():
|
|
|
|
scopes_with_topic[name] = scope
|
|
|
|
|
|
|
|
self._print_ambiguities = ambiguous_topics
|
|
|
|
if use_topic_pubsub_union:
|
|
|
|
self._print_topics = subscribed_topics | published_topics
|
|
|
|
self._print_scopes = scopes_with_topic
|
|
|
|
else:
|
|
|
|
self._print_topics = subscribed_topics & published_topics
|
|
|
|
|
|
|
|
# cull scopes to only those that pub or sub to a topic that has both
|
|
|
|
intersect_scopes = {}
|
|
|
|
for name,scope in scopes_with_topic.items():
|
|
|
|
all_scope_topics = scope.publications | scope.subscriptions
|
|
|
|
for topic in all_scope_topics:
|
|
|
|
if topic in self._print_topics:
|
|
|
|
intersect_scopes[scope.name] = scope
|
|
|
|
break
|
|
|
|
self._print_scopes = intersect_scopes
|
|
|
|
|
|
|
|
|
|
|
|
def _build_recursive(self, path, **kwargs):
|
|
|
|
if os.path.normpath(path) in self._path_blacklist:
|
|
|
|
log.debug('ignoring excluded path '+path)
|
2017-11-30 06:28:17 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
entries = os.listdir(path)
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
# check if entering a new scope
|
2017-11-30 06:28:17 -04:00
|
|
|
cmake_file = 'CMakeLists.txt'
|
2021-02-27 17:14:17 -04:00
|
|
|
new_scope = False
|
2017-11-30 06:28:17 -04:00
|
|
|
if cmake_file in entries:
|
2021-02-27 17:14:17 -04:00
|
|
|
new_scope = self._extract_build_information(os.path.join(path, cmake_file), **kwargs)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
# iterate directories recursively
|
|
|
|
for entry in entries:
|
|
|
|
file_name = os.path.join(path, entry)
|
|
|
|
if os.path.isdir(file_name):
|
2021-02-27 14:52:14 -04:00
|
|
|
self._build_recursive(file_name, **kwargs)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
|
|
|
# iterate source files
|
2021-02-27 17:14:17 -04:00
|
|
|
# Note: Skip all entries if we're not in a scope -- both finding known pubs/subs and emitting warnings
|
|
|
|
for entry in entries:
|
|
|
|
file_name = os.path.join(path, entry)
|
|
|
|
if os.path.isfile(file_name):
|
|
|
|
_, ext = os.path.splitext(file_name)
|
|
|
|
if ext in ['.cpp', '.c', '.h', '.hpp']:
|
|
|
|
self._process_source_file(file_name)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
if new_scope:
|
2021-02-27 14:52:14 -04:00
|
|
|
self._current_scope.pop()
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
def _extract_build_information(self, file_name, **kwargs):
|
2021-02-27 17:14:17 -04:00
|
|
|
""" extract the module or library name from a CMakeLists.txt file and store
|
|
|
|
in self._current_scope if there is any
|
|
|
|
Also records dependencies, if any are specified.
|
|
|
|
"""
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2019-11-07 07:29:16 -04:00
|
|
|
datafile = open(file_name)
|
2017-11-30 06:28:17 -04:00
|
|
|
found_module_def = False
|
2021-02-27 14:52:14 -04:00
|
|
|
found_module_depends = False
|
|
|
|
found_library_def = False
|
2021-07-29 03:36:24 -03:00
|
|
|
scope_added = False
|
2017-11-30 06:28:17 -04:00
|
|
|
for line in datafile:
|
|
|
|
if 'px4_add_module' in line: # must contain 'px4_add_module'
|
|
|
|
found_module_def = True
|
2021-02-27 14:52:14 -04:00
|
|
|
elif 'px4_add_library' in line: # must contain 'px4_add_module'
|
|
|
|
tokens = line.split('(')
|
|
|
|
if 1 < len(tokens):
|
|
|
|
found_library_def = True
|
|
|
|
library_name = tokens[1].split()[0].strip().rstrip(')')
|
|
|
|
library_scope = LibraryScope(library_name)
|
|
|
|
self._current_scope.append(library_scope)
|
2021-07-29 03:36:24 -03:00
|
|
|
scope_added = True
|
2021-02-27 14:52:14 -04:00
|
|
|
self._found_libraries[library_name] = library_scope
|
|
|
|
if self._in_scope():
|
|
|
|
log.debug(' >> found library: ' + library_name)
|
|
|
|
|
|
|
|
# we can return early because we have no further information to collect from libraries
|
|
|
|
return True
|
|
|
|
elif found_module_def and 'DEPENDS' in line.upper():
|
|
|
|
found_module_depends = True
|
|
|
|
elif found_module_depends:
|
|
|
|
# two tabs is a *sketchy* heuristic -- spacing isn't guaranteed by cmake;
|
|
|
|
# ... but the hard-tabs *is* specified by PX4 coding standards, so it's likely to be consistent
|
2021-07-29 05:39:45 -03:00
|
|
|
if line.startswith('\t\t') and not line.strip().startswith('#'):
|
2021-02-27 14:52:14 -04:00
|
|
|
depends = [dep.strip() for dep in line.split()]
|
|
|
|
for name in depends:
|
2021-07-29 05:39:45 -03:00
|
|
|
log.debug(' >> {:}: found module dep: {:}'
|
|
|
|
.format(self._current_scope[-1].name, name))
|
2021-02-27 14:52:14 -04:00
|
|
|
self._current_scope[-1].add_dependency(name)
|
|
|
|
if kwargs['merge_depends']:
|
|
|
|
if (0 < len(self._scope_whitelist)) and self._current_scope[-1].name in self._scope_whitelist:
|
|
|
|
# if we whitelist a module with dependencies, whitelist the dependencies, too
|
|
|
|
self._scope_whitelist.add(name)
|
|
|
|
|
2021-07-29 05:39:45 -03:00
|
|
|
elif line.strip() != "":
|
2021-02-27 14:52:14 -04:00
|
|
|
found_module_depends = False ## done with the 'DEPENDS' section.
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
words = line.split()
|
|
|
|
# get the definition of MAIN
|
|
|
|
if found_module_def and 'MAIN' in words and len(words) >= 2:
|
2021-02-27 14:52:14 -04:00
|
|
|
module_name = words[1]
|
|
|
|
module_scope = ModuleScope(module_name)
|
|
|
|
self._current_scope.append(module_scope)
|
2021-07-29 03:36:24 -03:00
|
|
|
scope_added = True
|
2021-02-27 14:52:14 -04:00
|
|
|
self._found_modules[module_name] = module_scope
|
|
|
|
if self._in_scope():
|
|
|
|
log.debug(' >> Found module name: ' + module_scope.name)
|
|
|
|
|
2021-07-29 03:36:24 -03:00
|
|
|
return scope_added
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _process_source_file(self, file_name):
|
|
|
|
""" extract information from a single source file """
|
|
|
|
|
2021-07-29 03:36:24 -03:00
|
|
|
current_scope = self._get_current_scope()
|
|
|
|
log.debug( " >> {:}extracting topics from file: {:}"
|
|
|
|
.format(current_scope.name+": " if current_scope is not None else "",
|
|
|
|
file_name))
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2017-11-30 06:28:17 -04:00
|
|
|
with codecs.open(file_name, 'r', 'utf-8') as f:
|
|
|
|
try:
|
|
|
|
content = f.read()
|
|
|
|
except:
|
2019-12-19 11:36:16 -04:00
|
|
|
print('Failed reading file: %s, skipping content.' % file_name)
|
2017-11-30 06:28:17 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
if current_scope:
|
|
|
|
if current_scope.name in self._scope_blacklist:
|
|
|
|
return
|
|
|
|
elif current_scope.name == 'uorb_tests': # skip this
|
|
|
|
return
|
|
|
|
elif current_scope.name == 'uorb':
|
|
|
|
|
|
|
|
# search and validate the ORB_ID_VEHICLE_ATTITUDE_CONTROLS define
|
|
|
|
matches = self._orb_id_vehicle_attitude_controls_re.findall(content)
|
|
|
|
for match in matches:
|
|
|
|
if match != 'ORB_ID('+self._orb_id_vehicle_attitude_controls_topic:
|
|
|
|
# if we land here, you need to change _orb_id_vehicle_attitude_controls_topic
|
|
|
|
raise Exception(
|
|
|
|
'The extracted define for ORB_ID_VEHICLE_ATTITUDE_CONTROLS '
|
|
|
|
'is '+match+' but expected ORB_ID('+
|
|
|
|
self._orb_id_vehicle_attitude_controls_topic)
|
|
|
|
|
|
|
|
return # skip uorb module for the rest
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
line_number = 0
|
2021-02-27 17:14:17 -04:00
|
|
|
for full_line in content.splitlines():
|
2021-02-27 14:52:14 -04:00
|
|
|
line_number += 1
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
short_line = re.sub(self._whitespace_pattern, '', full_line)
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
topic = self._publications.match(short_line)
|
|
|
|
if topic:
|
|
|
|
if current_scope:
|
|
|
|
current_scope.publications.add(topic)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise AssertionError("Encountered Publication topic outside of any scope! " + file_name + " Aborting!")
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
topic = self._subscriptions.match(short_line)
|
|
|
|
if topic:
|
|
|
|
if current_scope:
|
|
|
|
current_scope.subscriptions.add(topic)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise AssertionError("Encountered Subscription topic outside of any scope! " + file_name + " Aborting!")
|
|
|
|
|
|
|
|
topic = self._ambiguities.match(short_line)
|
|
|
|
if topic:
|
|
|
|
if current_scope:
|
|
|
|
if topic != PubSub.AMBIGUOUS_SITE_TOPIC:
|
|
|
|
current_scope.ambiguities.add(topic)
|
|
|
|
self._warnings.append((current_scope, file_name, line_number, full_line))
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise AssertionError("Encountered Ambiguous topic outside of any scope! " + file_name + " Aborting!")
|
2021-02-27 14:52:14 -04:00
|
|
|
|
|
|
|
def _in_scope(self, scope_name = None):
|
2021-02-27 17:14:17 -04:00
|
|
|
if 0 < len(self._current_scope):
|
2021-02-27 14:52:14 -04:00
|
|
|
if None is scope_name:
|
|
|
|
scope_name = self._current_scope[-1].name
|
|
|
|
if scope_name in self._scope_whitelist:
|
|
|
|
return True
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
return False
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
def merge_depends(self):
|
2021-02-27 17:14:17 -04:00
|
|
|
log.info('### Merge Depends:')
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
for modname,module in self._found_modules.items():
|
2021-02-27 17:14:17 -04:00
|
|
|
if modname in self._scope_whitelist or 0==len(self._scope_whitelist):
|
2021-02-27 14:52:14 -04:00
|
|
|
for depname in module.dependencies:
|
|
|
|
if depname in self._found_libraries:
|
|
|
|
dep = self._found_libraries[depname]
|
|
|
|
# copy topics from library to depending library
|
|
|
|
for topic in dep.publications:
|
|
|
|
module.publications.add(topic)
|
|
|
|
for topic in dep.subscriptions:
|
|
|
|
module.subscriptions.add(topic)
|
|
|
|
for topic in dep.ambiguities:
|
|
|
|
module.ambiguities
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
# omit all libraries -- they've already been merged into their respective dependees
|
|
|
|
self._scope_whitelist = set([ str(s) for s in self._scope_whitelist if s not in self._found_libraries])
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
|
|
|
@property
|
2021-02-27 14:52:14 -04:00
|
|
|
def output_scopes(self):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" get the set of all modules """
|
2021-02-27 14:52:14 -04:00
|
|
|
return self._print_scopes
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
@property
|
2021-02-27 14:52:14 -04:00
|
|
|
def output_topics(self):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" get set set of all topics """
|
2021-02-27 14:52:14 -04:00
|
|
|
return self._print_topics
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
@property
|
|
|
|
def topic_colors(self):
|
|
|
|
""" get a dict of all topic colors with key=topic, value=color """
|
|
|
|
return self._topic_colors
|
|
|
|
|
2019-04-01 09:36:43 -03:00
|
|
|
class OutputGraphviz(object):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" write graph using Graphviz """
|
|
|
|
|
|
|
|
def __init__(self, graph):
|
|
|
|
self._graph = graph
|
|
|
|
|
|
|
|
def write(self, file_name, engine='fdp',
|
|
|
|
show_publications=True, show_subscriptions=True):
|
|
|
|
""" write the graph to a file
|
|
|
|
:param engine: graphviz engine
|
|
|
|
- fdp works for large graphs
|
|
|
|
- neato works better for smaller graphs
|
|
|
|
- circo works for single modules
|
|
|
|
CLI: fdp graph.fv -Tpdf -o test.pdf
|
|
|
|
"""
|
|
|
|
|
|
|
|
print('Writing to '+file_name)
|
|
|
|
|
|
|
|
ratio = 1 # aspect ratio
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
output_topics = self._graph.output_topics
|
|
|
|
output_scopes = self._graph.output_scopes
|
2017-11-30 06:28:17 -04:00
|
|
|
topic_colors = self._graph.topic_colors
|
|
|
|
|
|
|
|
graph_attr={'splines': 'true', 'ratio': str(ratio), 'overlap': 'false'}
|
|
|
|
graph_attr['sep'] = '"+15,15"' # increase spacing between nodes
|
|
|
|
graph = Digraph(comment='autogenerated graph with graphviz using uorb_graph.py',
|
|
|
|
engine=engine, graph_attr=graph_attr)
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
# scopes: modules
|
|
|
|
log.info(' > Writing scopes')
|
|
|
|
for name,_ in output_scopes.items():
|
|
|
|
graph.node('m_'+name, name, shape='box', fontcolor='#ffffff',
|
2017-11-30 06:28:17 -04:00
|
|
|
style='filled', color='#666666', fontsize='16')
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
log.info(' > Writing topics')
|
|
|
|
for topic in output_topics:
|
2017-11-30 06:28:17 -04:00
|
|
|
graph.node('t_'+topic, topic, shape='ellipse', fontcolor='#ffffff',
|
|
|
|
style='filled', color=topic_colors[topic])
|
|
|
|
|
|
|
|
# edges
|
2021-02-27 14:52:14 -04:00
|
|
|
log.info(' > Writing publish edges')
|
2017-11-30 06:28:17 -04:00
|
|
|
if show_publications:
|
2021-02-27 14:52:14 -04:00
|
|
|
for scope_name,scope in output_scopes.items():
|
|
|
|
for topic in scope.publications:
|
|
|
|
if topic in output_topics:
|
|
|
|
graph.edge('m_'+scope_name, 't_'+topic, color=topic_colors[topic], style='dashed')
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
log.info(' > Writing subscribe edges')
|
2017-11-30 06:28:17 -04:00
|
|
|
if show_subscriptions:
|
2021-02-27 14:52:14 -04:00
|
|
|
for scope_name,scope in output_scopes.items():
|
|
|
|
for topic in scope.subscriptions:
|
|
|
|
if topic in output_topics:
|
|
|
|
graph.edge('t_'+topic, 'm_'+scope_name, color=topic_colors[topic])
|
2017-11-30 06:28:17 -04:00
|
|
|
|
2019-09-22 15:16:14 -03:00
|
|
|
graph.render(file_name, view=False)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
|
2019-04-01 09:36:43 -03:00
|
|
|
class OutputJSON(object):
|
2017-11-30 06:28:17 -04:00
|
|
|
""" write graph to a JSON file (that can be used with D3.js) """
|
|
|
|
|
|
|
|
def __init__(self, graph):
|
|
|
|
self._graph = graph
|
|
|
|
|
|
|
|
def write(self, file_name):
|
|
|
|
|
|
|
|
print('Writing to '+file_name)
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
output_topics = self._graph.output_topics
|
|
|
|
output_scopes = self._graph.output_scopes
|
2017-11-30 06:28:17 -04:00
|
|
|
topic_colors = self._graph.topic_colors
|
|
|
|
|
|
|
|
data = {}
|
|
|
|
nodes = []
|
|
|
|
|
|
|
|
# nodes
|
2017-12-11 06:58:37 -04:00
|
|
|
# (sort by length, such that short names are last. The rendering order
|
|
|
|
# will be the same, so that in case of an overlap, the shorter label
|
|
|
|
# will be on top)
|
2021-02-27 14:52:14 -04:00
|
|
|
for scope_tuple in sorted(output_scopes.items(), key=(lambda st: len(st[0])), reverse=True):
|
2017-11-30 06:28:17 -04:00
|
|
|
node = {}
|
2021-02-27 14:52:14 -04:00
|
|
|
node['id'] = 'm_'+scope_tuple[0]
|
|
|
|
node['name'] = scope_tuple[0]
|
|
|
|
node['type'] = scope_tuple[1].typename
|
2017-11-30 06:28:17 -04:00
|
|
|
node['color'] = '#666666'
|
|
|
|
# TODO: add url to open module documentation?
|
|
|
|
nodes.append(node)
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
for topic in sorted(output_topics, key=len, reverse=True):
|
2017-11-30 06:28:17 -04:00
|
|
|
node = {}
|
|
|
|
node['id'] = 't_'+topic
|
|
|
|
node['name'] = topic
|
|
|
|
node['type'] = 'topic'
|
|
|
|
node['color'] = topic_colors[topic]
|
|
|
|
# url is opened when double-clicking on the node
|
2022-07-31 22:39:39 -03:00
|
|
|
node['url'] = 'https://github.com/PX4/PX4-Autopilot/blob/main/msg/'+topic_filename(topic)+'.msg'
|
2017-11-30 06:28:17 -04:00
|
|
|
nodes.append(node)
|
|
|
|
|
|
|
|
data['nodes'] = nodes
|
|
|
|
|
|
|
|
edges = []
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2017-11-30 06:28:17 -04:00
|
|
|
# edges
|
2021-02-27 14:52:14 -04:00
|
|
|
for name,scope in output_scopes.items():
|
|
|
|
for topic in scope.publications:
|
|
|
|
if topic in output_topics:
|
|
|
|
edge = {}
|
|
|
|
edge['source'] = 'm_'+name
|
|
|
|
edge['target'] = 't_'+topic
|
|
|
|
edge['color'] = topic_colors[topic]
|
|
|
|
edge['style'] = 'dashed'
|
|
|
|
edges.append(edge)
|
|
|
|
|
|
|
|
for name,scope in output_scopes.items():
|
|
|
|
for topic in scope.subscriptions:
|
|
|
|
if topic in output_topics:
|
|
|
|
edge = {}
|
|
|
|
edge['source'] = 't_'+topic
|
|
|
|
edge['target'] = 'm_'+name
|
|
|
|
edge['color'] = topic_colors[topic]
|
|
|
|
edge['style'] = 'normal'
|
|
|
|
edges.append(edge)
|
2017-11-30 06:28:17 -04:00
|
|
|
|
|
|
|
data['links'] = edges
|
|
|
|
|
|
|
|
with open(file_name, 'w') as outfile:
|
|
|
|
json.dump(data, outfile) # add indent=2 for readable formatting
|
|
|
|
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
if "__main__" == __name__:
|
|
|
|
|
2021-02-27 14:52:14 -04:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
if 0 < args.verbosity:
|
|
|
|
if 1 == args.verbosity:
|
|
|
|
log.setLevel(logging.INFO)
|
|
|
|
print("set log level to INFO")
|
|
|
|
else: # implicity 1<
|
|
|
|
log.setLevel(logging.DEBUG)
|
|
|
|
print("set log level to DEBUG")
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
# ignore topics that are subscribed/published by many topics, but are not really
|
|
|
|
# useful to show in the graph
|
|
|
|
topic_blacklist = [ 'parameter_update', 'mavlink_log', 'log_message' ]
|
|
|
|
print('Excluded topics: '+str(topic_blacklist))
|
|
|
|
|
|
|
|
if len(args.modules) == 0:
|
2021-02-27 14:52:14 -04:00
|
|
|
scope_whitelist = []
|
2021-02-27 17:14:17 -04:00
|
|
|
else:
|
2021-02-27 14:52:14 -04:00
|
|
|
scope_whitelist = [ m.strip() for m in args.modules.split(',')]
|
|
|
|
scope_whitelist = set(scope_whitelist)
|
2021-02-27 17:14:17 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
graph = Graph(scope_whitelist=scope_whitelist, topic_blacklist=topic_blacklist)
|
|
|
|
|
|
|
|
# if no source paths are supplied, guess that we're in the project root, and apply it to the entire 'src/' tree
|
2021-02-27 17:14:17 -04:00
|
|
|
if len(args.src_path) == 0:
|
|
|
|
args.src_path = ['src']
|
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
# transcribe only the source paths that actually exist:
|
|
|
|
source_paths = []
|
2021-02-27 14:52:14 -04:00
|
|
|
for path in args.src_path:
|
2021-02-27 17:14:17 -04:00
|
|
|
if os.path.exists(path):
|
|
|
|
source_paths.append(path)
|
|
|
|
else:
|
|
|
|
log.warn("Could not find path: " + path)
|
|
|
|
|
|
|
|
if 0 == len(source_paths):
|
|
|
|
print("!! None of the source directories were valid -- Exiting.")
|
|
|
|
sys.exit(-1)
|
|
|
|
|
|
|
|
# ignore certain paths
|
|
|
|
path_blacklist = ['src/lib/parameters/']
|
|
|
|
if 0 < len(args.exclude_path):
|
|
|
|
path_blacklist = args.exclude_path
|
|
|
|
if path_blacklist:
|
|
|
|
print('Excluded Path: '+str(path_blacklist))
|
2021-02-27 14:52:14 -04:00
|
|
|
|
2021-02-27 17:14:17 -04:00
|
|
|
graph.build(source_paths, path_blacklist=path_blacklist, use_topic_pubsub_union=args.use_topic_union, merge_depends=args.merge_depends)
|
2021-02-27 17:14:17 -04:00
|
|
|
|
|
|
|
if args.output == 'json':
|
|
|
|
output_json = OutputJSON(graph)
|
|
|
|
output_json.write(args.file+'.json')
|
|
|
|
|
|
|
|
elif args.output in ('graphviz','gv'):
|
|
|
|
try:
|
|
|
|
from graphviz import Digraph
|
|
|
|
except ImportError as e:
|
2021-02-27 14:52:14 -04:00
|
|
|
print("Failed to import graphviz: " + str(e))
|
2021-02-27 17:14:17 -04:00
|
|
|
print("")
|
|
|
|
print("You may need to install it with:")
|
|
|
|
print(" pip3 install --user graphviz")
|
|
|
|
print("")
|
|
|
|
sys.exit(1)
|
|
|
|
output_graphviz = OutputGraphviz(graph)
|
|
|
|
engine='fdp' # use neato or fdp
|
|
|
|
output_graphviz.write(args.file+'.fv', engine=engine)
|
|
|
|
output_graphviz.write(args.file+'_subs.fv', show_publications=False, engine=engine)
|
|
|
|
output_graphviz.write(args.file+'_pubs.fv', show_subscriptions=False, engine=engine)
|
2021-02-27 14:52:14 -04:00
|
|
|
elif args.output == 'none':
|
|
|
|
pass
|
2021-02-27 17:14:17 -04:00
|
|
|
else:
|
|
|
|
print('Error: unknown output format '+args.output)
|