Make notebook script-executable

* Add settings to disable certain features
  * Add IPython magic to conditionally execute cells
* Add support for config through environment vars
* Installation instructions in README.md
This commit is contained in:
Maximilian Schmeller 2022-09-12 19:24:26 +02:00
parent 98fdd418b5
commit dfbabe7ae0
4 changed files with 266 additions and 148 deletions

View file

@ -1,3 +1,22 @@
# MA Autoware Trace Analysis # MA Autoware Trace Analysis
Automatically extract data dependencies and latencies from Autoware source code and trace data Automatically extract data dependencies and end-to-end (E2E) latencies from ROS2 trace data.
## Prerequisites
* Python 3.10
* [JupyterLab](https://jupyter.org/install#jupyterlab) or [Jupyter Notebook](https://jupyter.org/install#jupyter-notebook)
* [ROS2 Tracing](https://github.com/ros2/ros2_tracing)
* [Tracetools Analysis](https://gitlab.com/ros-tracing/tracetools_analysis)
* `python3-babeltrace` and `python3-lttng`, e.g. via `sudo apt install`, **for Python 3.10** (this requires either Ubuntu 22.04 or custom installation)
## Installation
```shell
# Make sure you are running Python 3.10 and the corresponding pip:
python3.10 -V
pip3.10 -V
pip3.10 install -r requirements.txt
```
## Usage

View file

@ -8,6 +8,19 @@ import pickle
import time import time
from typing import List from typing import List
from IPython.core.magic import (register_cell_magic, needs_local_scope)
from IPython import get_ipython
@register_cell_magic
@needs_local_scope
def skip_if_false(line, cell, local_ns=None):
condition_var = eval(line, None, local_ns)
if condition_var:
result = get_ipython().run_cell(cell)
return None
return f"Skipped (evaluated {line} to False)"
def left_abbreviate(string, limit=120): def left_abbreviate(string, limit=120):
return string if len(string) <= limit else f"...{string[:limit - 3]}" return string if len(string) <= limit else f"...{string[:limit - 3]}"
@ -44,6 +57,14 @@ def stable_hash(obj):
return hashlib.md5(json.dumps(obj).encode("utf-8")).hexdigest()[:10] return hashlib.md5(json.dumps(obj).encode("utf-8")).hexdigest()[:10]
def parse_as(type, string):
if any(issubclass(type, type2) for type2 in (str, bool, float, int)):
return type(string)
if issubclass(type, list) or issubclass(type, dict) or issubclass(type, set):
val = json.loads(string)
return type(val)
raise ValueError(f"Unknown type {type.__name__}")
def cached(name, function, file_deps: List[str]): def cached(name, function, file_deps: List[str]):
if not os.path.isdir("cache"): if not os.path.isdir("cache"):
os.makedirs("cache", exist_ok=True) os.makedirs("cache", exist_ok=True)

View file

@ -4,5 +4,3 @@ matplotlib
pyvis pyvis
graphviz graphviz
ruamel.yaml ruamel.yaml
blist @ git+https://github.com/mojomex/blist.git@47724cbc4137ddfb685f9711e950fb82587bf971

View file

@ -21,22 +21,13 @@
"import pandas as pd\n", "import pandas as pd\n",
"from matplotlib import pyplot as plt\n", "from matplotlib import pyplot as plt\n",
"\n", "\n",
"from clang_interop.cl_types import ClContext\n", "from misc.utils import ProgressPrinter, cached, parse_as\n",
"from clang_interop.process_clang_output import process_clang_output\n",
"\n",
"sys.path.append(\"../autoware/build/tracetools_read/\")\n",
"sys.path.append(\"../autoware/build/tracetools_analysis/\")\n",
"from tracetools_read.trace import *\n",
"from tracetools_analysis.loading import load_file\n",
"from tracetools_analysis.processor.ros2 import Ros2Handler\n",
"from tracetools_analysis.utils.ros2 import Ros2DataModelUtil\n",
"\n",
"from tracing_interop.tr_types import TrTimer, TrTopic, TrPublisher, TrPublishInstance, TrCallbackInstance, \\\n",
"TrCallbackSymbol, TrCallbackObject, TrSubscriptionObject, TrContext\n",
"from misc.utils import ProgressPrinter, cached\n",
"\n", "\n",
"%load_ext pyinstrument\n", "%load_ext pyinstrument\n",
"%matplotlib inline" "%matplotlib inline\n",
"\n",
"A=True\n",
"B=None"
] ]
}, },
{ {
@ -49,10 +40,133 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"TR_PATH = os.path.expanduser(\"data/awsim-trace/ust\")\n", "##################################################\n",
"CL_PATH = os.path.expanduser(\"~/Projects/llvm-project/clang-tools-extra/ros2-internal-dependency-checker/output\")" "# User Settings\n",
"##################################################\n",
"# Change these to influence the execution of the\n",
"# notebook.\n",
"# You can override these from the command line\n",
"# by defining environment variables with the\n",
"# name of the constants below, prefixed with\n",
"# \"ANA_NB_\".\n",
"# For example, the environment variable\n",
"# \"ANA_NB_TR_PATH\" will override the \"TR_PATH\"\n",
"# setting below.\n",
"##################################################\n",
"\n",
"# The path to the build folder of a ROS2 workspace that contains the\n",
"# tracetools_read and tracetools_analysis folders.\n",
"TRACING_WS_BUILD_PATH = \"../autoware/build\"\n",
"\n",
"# Path to trace directory (e.g. ~/.ros/my-trace/ust) or to a converted trace file.\n",
"# Using the path \"/ust\" at the end is optional but greatly reduces processing time\n",
"# if kernel traces are also present.\n",
"TR_PATH = \"data/awsim-trace/ust\"\n",
"\n",
"# Path to the folder all artifacts from this notebook are saved to.\n",
"# This entails plots as well as data tables.\n",
"OUT_PATH = \"out/\"\n",
"\n",
"# Whether to annotate topics/publications with bandwidth/message size\n",
"BW_ENABLED = True\n",
"# Path to a results folder as output by ma-hw-perf-tools/messages/record.bash\n",
"# Used to annotate message sizes in E2E latency calculations\n",
"BW_PATH = \"../ma-hw-perf-tools/data/results\"\n",
"\n",
"# Whether to use dependencies extracted by the Clang-tools to supplement\n",
"# automatic node-internal data flow annotations.\n",
"# If in doubt, set to False.\n",
"CL_ENABLED = False\n",
"# Path to the output directory of the ROS2 dependency checker.\n",
"# Will only be used if CL_ENABLED is True.\n",
"CL_PATH = \"~/Projects/llvm-project/clang-tools-extra/ros2-internal-dependency-checker/output\"\n",
"\n",
"# Whether to compute data flow graphs.\n",
"# If you are only interested in E2E latencies, set this to False\n",
"DFG_ENABLED = False\n",
"# Whether to plot data flow graphs (ignored if DFG_ENABLED is False)\n",
"DFG_PLOT = True\n",
"\n",
"# The maximum node namespace hierarchy level to be plotted.\n",
"# Top-level (1): e.g. /sensing, /control, etc.\n",
"# Level 3: e.g. /sensing/lidar/pointcloud_processor\n",
"DFG_MAX_HIER_LEVEL = 100\n",
"\n",
"# RegEx pattern for nodes that shall be marked as system inputs\n",
"# These will be plotted with a start arrow as known from automata diagrams\n",
"DFG_INPUT_NODE_PATTERNS = [r\"^/sensing\"]\n",
"# RegEx pattern for nodes that shall be marked as system outputs\n",
"# These will be plotted with a double border\n",
"DFG_OUTPUT_NODE_PATTERNS = [r\"^/awapi\", r\"^/control/external_cmd_converter\"]\n",
"# RegEx for nodes which shall not be plotted in the DFG\n",
"DFG_EXCL_NODE_PATTERNS = [r\"^/rviz2\", r\"transform_listener_impl\"]\n",
"\n",
"# Whether to compute E2E latencies.\n",
"E2E_ENABLED = True\n",
"# Whether to plot end-to-end latency information (ignored if E2E_ENABLED is False)\n",
"E2E_PLOT = True\n",
"# The index of the output message that shall be used in plots that visualize a specific\n",
"# message dependency tree. This index has to be 0 <= n < #output messages\n",
"E2E_PLOT_TIMESTAMP = 3900\n",
"# E2E latency threshold. Every E2E latency higher than this is discarded.\n",
"# Set this as low as comfortably possible to speed up calculations.\n",
"# WARNING: If you set this too low (i.e. to E2E latencies that plausibly can happen)\n",
"# your results will be wrong)\n",
"E2E_TIME_LIMIT_S = 2\n",
"\n",
"# All topics containing any of these RegEx patterns are considered output topics in E2E latency calculations\n",
"# E.g. r\"^/control/\" will cover all control topics\n",
"E2E_OUTPUT_TOPIC_PATTERNS = [r\"^/control/trajectory_follower/control_cmd\"]\n",
"# All topics containing any of these RegEx patterns are considered input topics in E2E latency calculations\n",
"# E.g. r\"^/sensing/\" will cover all sensing topics\n",
"E2E_INPUT_TOPIC_PATTERNS = [\"/vehicle/status/\", \"/sensing/imu\"]\n",
"\n",
"\n",
"# This code overrides the above constants with environment variables, do not edit.\n",
"for env_key, env_value in os.environ.items():\n",
" if env_key.startswith(\"ANA_NB_\"):\n",
" key = env_key.removeprefix(\"ANA_NB_\")\n",
" if key not in globals().keys():\n",
" continue\n",
" value = parse_as(type(globals()[key]), env_value)\n",
" globals()[key] = value\n",
"\n",
"# Convert input paths to absolute paths\n",
"def _expand_path(path):\n",
" return os.path.realpath(os.path.expandvars(os.path.expanduser(path)))\n",
"\n",
"TRACING_WS_BUILD_PATH = _expand_path(TRACING_WS_BUILD_PATH)\n",
"TR_PATH = _expand_path(TR_PATH)\n",
"OUT_PATH = _expand_path(OUT_PATH)\n",
"BW_PATH = _expand_path(BW_PATH)\n",
"CL_PATH = _expand_path(CL_PATH)"
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [
"from clang_interop.cl_types import ClContext\n",
"from clang_interop.process_clang_output import process_clang_output\n",
"\n",
"sys.path.append(os.path.join(TRACING_WS_BUILD_PATH, \"tracetools_read/\"))\n",
"sys.path.append(os.path.join(TRACING_WS_BUILD_PATH, \"tracetools_analysis/\"))\n",
"from tracetools_read.trace import *\n",
"from tracetools_analysis.loading import load_file\n",
"from tracetools_analysis.processor.ros2 import Ros2Handler\n",
"from tracetools_analysis.utils.ros2 import Ros2DataModelUtil\n",
"\n",
"from tracing_interop.tr_types import TrTimer, TrTopic, TrPublisher, TrPublishInstance, TrCallbackInstance, \\\n",
"TrCallbackSymbol, TrCallbackObject, TrSubscriptionObject, TrContext"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"source": [ "source": [
@ -86,7 +200,7 @@
"for name in _tr_globals:\n", "for name in _tr_globals:\n",
" globals()[name] = getattr(_tracing_context, name)\n", " globals()[name] = getattr(_tracing_context, name)\n",
"\n", "\n",
"print(\"Done.\")\n" "print(\"Done.\")"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
@ -112,30 +226,13 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false DFG_ENABLED\n",
"from latency_graph import latency_graph as lg\n", "from latency_graph import latency_graph as lg\n",
"\n", "\n",
"lat_graph = lg.LatencyGraph(_tracing_context)\n", "def _make_latency_graph():\n",
" return lg.LatencyGraph(_tracing_context)\n",
"\n", "\n",
"import pickle\n", "lat_graph = cached(\"lat_graph\", _make_latency_graph, [TR_PATH])"
"\n",
"with open(\"lat_graph.pkl\", \"wb\") as f:\n",
" pickle.dump(lat_graph, f)\n",
"#with open(\"lat_graph.pkl\", \"rb\") as f:\n",
"# lat_graph = pickle.load(f)"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [
"len(lat_graph.edges)"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
@ -149,6 +246,9 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false DFG_ENABLED\n",
"%%skip_if_false DFG_PLOT\n",
"\n",
"from matching.subscriptions import sanitize\n", "from matching.subscriptions import sanitize\n",
"from typing import Iterable, Sized\n", "from typing import Iterable, Sized\n",
"from tracing_interop.tr_types import TrNode, TrCallbackObject, TrCallbackSymbol, TrSubscriptionObject\n", "from tracing_interop.tr_types import TrNode, TrCallbackObject, TrCallbackSymbol, TrSubscriptionObject\n",
@ -246,6 +346,9 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false DFG_ENABLED\n",
"%%skip_if_false DFG_PLOT\n",
"\n",
"import re\n", "import re\n",
"import math\n", "import math\n",
"\n", "\n",
@ -253,19 +356,12 @@
"# Compute in/out topics for hierarchy level X\n", "# Compute in/out topics for hierarchy level X\n",
"##################################################\n", "##################################################\n",
"\n", "\n",
"HIER_LEVEL = 100\n",
"\n",
"input_node_patterns = [r\"^/sensing\"]\n",
"output_node_patterns = [r\"^/awapi\", r\"^/control/external_cmd_converter\"]\n",
"\n",
"node_excluded_patterns = [r\"^/rviz2\", r\"transform_listener_impl\"]\n",
"\n",
"def get_nodes_on_level(lat_graph: lg.LatencyGraph):\n", "def get_nodes_on_level(lat_graph: lg.LatencyGraph):\n",
" def _traverse_node(node: lg.LGHierarchyLevel, cur_lvl=0):\n", " def _traverse_node(node: lg.LGHierarchyLevel, cur_lvl=0):\n",
" if cur_lvl == HIER_LEVEL:\n", " if cur_lvl == DFG_MAX_HIER_LEVEL:\n",
" return [node]\n", " return [node]\n",
"\n", "\n",
" if not node.children and cur_lvl < HIER_LEVEL:\n", " if not node.children and cur_lvl < DFG_MAX_HIER_LEVEL:\n",
" return [node]\n", " return [node]\n",
"\n", "\n",
" collected_nodes = []\n", " collected_nodes = []\n",
@ -276,10 +372,10 @@
" return _traverse_node(lat_graph.top_node)\n", " return _traverse_node(lat_graph.top_node)\n",
"\n", "\n",
"lvl_nodes = get_nodes_on_level(lat_graph)\n", "lvl_nodes = get_nodes_on_level(lat_graph)\n",
"lvl_nodes = [n for n in lvl_nodes if not any(re.search(p, n.full_name) for p in node_excluded_patterns)]\n", "lvl_nodes = [n for n in lvl_nodes if not any(re.search(p, n.full_name) for p in DFG_EXCL_NODE_PATTERNS)]\n",
"\n", "\n",
"input_nodes = [n.full_name for n in lvl_nodes if any(re.search(p, n.full_name) for p in input_node_patterns)]\n", "input_nodes = [n.full_name for n in lvl_nodes if any(re.search(p, n.full_name) for p in DFG_INPUT_NODE_PATTERNS)]\n",
"output_nodes = [n.full_name for n in lvl_nodes if any(re.search(p, n.full_name) for p in output_node_patterns)]\n", "output_nodes = [n.full_name for n in lvl_nodes if any(re.search(p, n.full_name) for p in DFG_OUTPUT_NODE_PATTERNS)]\n",
"\n", "\n",
"print(', '.join(map(lambda n: n, input_nodes)))\n", "print(', '.join(map(lambda n: n, input_nodes)))\n",
"print(', '.join(map(lambda n: n, output_nodes)))\n", "print(', '.join(map(lambda n: n, output_nodes)))\n",
@ -382,14 +478,12 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"\n",
"from latency_graph.message_tree import DepTree\n", "from latency_graph.message_tree import DepTree\n",
"from tqdm.notebook import tqdm\n", "from tqdm.notebook import tqdm\n",
"from bisect import bisect\n", "from bisect import bisect\n",
"\n", "\n",
"# All topics containing any of these strings are considered end topics in E2E latency calculations\n",
"# E.g. /control/ will cover all control topics\n",
"end_topic_filters = [\"/control/trajectory_follower/control_cmd\"]\n",
"\n",
"\n", "\n",
"def inst_get_dep_msg(inst: TrCallbackInstance):\n", "def inst_get_dep_msg(inst: TrCallbackInstance):\n",
" if inst.callback_object not in _tracing_context.callback_objects.by_callback_object:\n", " if inst.callback_object not in _tracing_context.callback_objects.by_callback_object:\n",
@ -545,11 +639,12 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"\n", "\n",
"#for path in e2e_topic_paths:\n", "end_topics = [t for t in _tracing_context.topics if any(re.search(f, t.name) for f in E2E_OUTPUT_TOPIC_PATTERNS)]\n",
"# end_topics = path[-1]\n", "\n",
"end_topics = [t for t in _tracing_context.topics if any(f in t.name for f in end_topic_filters)]\n", "def build_dep_trees():\n",
"for end_topic in end_topics:\n", " for end_topic in end_topics:\n",
" end_topic: TrTopic\n", " end_topic: TrTopic\n",
"\n", "\n",
" pubs = end_topic.publishers\n", " pubs = end_topic.publishers\n",
@ -571,23 +666,7 @@
" print(f\"Size: min={min(sizes)} avg={sum(sizes) / len(sizes)} max={max(sizes)}\")\n", " print(f\"Size: min={min(sizes)} avg={sum(sizes) / len(sizes)} max={max(sizes)}\")\n",
" print(f\"E2E Lat: min={min(e2e_lats)*1000:.3f}ms avg={sum(e2e_lats) / len(sizes)*1000:.3f}ms max={max(e2e_lats)*1000:.3f}ms\")\n", " print(f\"E2E Lat: min={min(e2e_lats)*1000:.3f}ms avg={sum(e2e_lats) / len(sizes)*1000:.3f}ms max={max(e2e_lats)*1000:.3f}ms\")\n",
"\n", "\n",
"with open(\"trees.pkl\", \"wb\") as f:\n", "trees = cached(\"trees\", build_dep_trees, [TR_PATH])"
" pickle.dump(trees, f)\n"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [
"with open(\"trees.pkl\", \"rb\") as f:\n",
" trees = pickle.load(f)"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
@ -601,6 +680,9 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"%%skip_if_false BW_ENABLED\n",
"\n",
"import glob\n", "import glob\n",
"\n", "\n",
"\n", "\n",
@ -625,7 +707,6 @@
" return f\"{bytes:.0f} B\"\n", " return f\"{bytes:.0f} B\"\n",
"\n", "\n",
"\n", "\n",
"BW_PATH = \"../ma-hw-perf-tools/data/results\"\n",
"bw_files = glob.glob(os.path.join(BW_PATH, \"*.log\"))\n", "bw_files = glob.glob(os.path.join(BW_PATH, \"*.log\"))\n",
"msg_sizes = {}\n", "msg_sizes = {}\n",
"for bw_file in bw_files:\n", "for bw_file in bw_files:\n",
@ -660,12 +741,10 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"from typing import List\n", "%%skip_if_false E2E_ENABLED\n",
"from latency_graph.message_tree import DepTree\n",
"\n", "\n",
"# All topics containing any of these strings are considered start topics in E2E latency calculations\n", "\n",
"# E.g. /sensing/ will cover all sensing topics\n", "from latency_graph.message_tree import DepTree\n",
"start_topic_filters = [\"/vehicle/status/\", \"/sensing/imu\"]\n",
"\n", "\n",
"def leaf_topics(tree: DepTree, lvl=0):\n", "def leaf_topics(tree: DepTree, lvl=0):\n",
" ret_list = []\n", " ret_list = []\n",
@ -679,21 +758,6 @@
" ret_list += leaf_topics(dep, lvl+1)\n", " ret_list += leaf_topics(dep, lvl+1)\n",
" return ret_list\n", " return ret_list\n",
"\n", "\n",
"#all_topics = set()\n",
"\n",
"#for tree in trees:\n",
"# for d, t in leaf_topics(tree):\n",
"# if t in [\"/parameter_events\", \"/clock\"]:\n",
"# continue\n",
"# all_topics.add(t)\n",
"\n",
"#def critical_path(self: DepTree, start_topic_filters: List[str]):\n",
"# if not self.deps:\n",
"# return [self.head]\n",
"#\n",
"# return [self.head, *max(map(DepTree.critical_path, self.deps), key=lambda ls: ls[-1].timestamp)]\n",
"\n",
"E2E_TIME_LIMIT_S = 2\n",
"\n", "\n",
"def all_e2es(tree: DepTree, t_start=None):\n", "def all_e2es(tree: DepTree, t_start=None):\n",
" if t_start is None:\n", " if t_start is None:\n",
@ -707,7 +771,8 @@
" ret_list += all_e2es(dep, t_start)\n", " ret_list += all_e2es(dep, t_start)\n",
" return ret_list\n", " return ret_list\n",
"\n", "\n",
"def relevant_e2es(tree: DepTree, start_topic_filters, t_start=None, path=None):\n", "\n",
"def relevant_e2es(tree: DepTree, input_topic_patterns, t_start=None, path=None):\n",
" if t_start is None:\n", " if t_start is None:\n",
" t_start = tree.head.timestamp\n", " t_start = tree.head.timestamp\n",
"\n", "\n",
@ -723,19 +788,19 @@
" if not tree.deps:\n", " if not tree.deps:\n",
" match tree.head:\n", " match tree.head:\n",
" case TrPublishInstance(publisher=pub):\n", " case TrPublishInstance(publisher=pub):\n",
" if pub and any(f in pub.topic_name for f in start_topic_filters):\n", " if pub and any(re.search(f, pub.topic_name) for f in input_topic_patterns):\n",
" return [(latency,new_path)]\n", " return [(latency,new_path)]\n",
"\n", "\n",
" ret_list = []\n", " ret_list = []\n",
" for dep in tree.deps:\n", " for dep in tree.deps:\n",
" ret_list += relevant_e2es(dep, start_topic_filters, t_start, new_path)\n", " ret_list += relevant_e2es(dep, input_topic_patterns, t_start, new_path)\n",
" return ret_list\n", " return ret_list\n",
"\n", "\n",
"\n", "\n",
"e2ess = []\n", "e2ess = []\n",
"e2e_pathss = []\n", "e2e_pathss = []\n",
"for tree in trees:\n", "for tree in trees:\n",
" e2es, e2e_paths = zip(*relevant_e2es(tree, start_topic_filters))\n", " e2es, e2e_paths = zip(*relevant_e2es(tree, E2E_INPUT_TOPIC_PATTERNS))\n",
" e2ess.append(e2es)\n", " e2ess.append(e2es)\n",
" e2e_pathss.append(e2e_paths)" " e2e_pathss.append(e2e_paths)"
], ],
@ -751,26 +816,26 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"from matplotlib.animation import FuncAnimation\n", "#from matplotlib.animation import FuncAnimation\n",
"from IPython import display\n", "#from IPython import display\n",
"\n", "\n",
"fig, ax = plt.subplots(figsize=(16, 9))\n", "#fig, ax = plt.subplots(figsize=(16, 9))\n",
"ax: plt.Axes\n", "#ax: plt.Axes\n",
"ax.set_xlim(0, 4)\n", "#ax.set_xlim(0, 4)\n",
"\n", "\n",
"ax.hist([], bins=200, range=(0, 4), histtype='stepfilled')\n", "#ax.hist([], bins=200, range=(0, 4), histtype='stepfilled')\n",
"ax.set_title(\"Time: 0.000000s\")\n", "#ax.set_title(\"Time: 0.000000s\")\n",
"\n", "\n",
"def anim(frame):\n", "#def anim(frame):\n",
" print(frame, end='\\r')\n", "# print(frame, end='\\r')\n",
" ax.clear()\n", "# ax.clear()\n",
" ax.hist(e2es[frame], bins=200, range=(0, 4), histtype='stepfilled')\n", "# ax.hist(e2es[frame], bins=200, range=(0, 4), histtype='stepfilled')\n",
" ax.set_title(f\"Time: {(trees[frame].head.timestamp - trees[0].head.timestamp):.6}s\")\n", "# ax.set_title(f\"Time: {(trees[frame].head.timestamp - trees[0].head.timestamp):.6}s\")\n",
"\n", "\n",
"\n", "\n",
"anim_created = FuncAnimation(fig, anim, min(len(trees), 10000), interval=16, repeat_delay=200)\n", "#anim_created = FuncAnimation(fig, anim, min(len(trees), 10000), interval=16, repeat_delay=200)\n",
"\n", "\n",
"video = anim_created.save(\"anim.mp4\", dpi=120)\n", "#video = anim_created.save(\"anim.mp4\", dpi=120)\n",
"\n", "\n",
"#for tree in trees:\n", "#for tree in trees:\n",
"# path = tree.critical_path(start_topic_filters)\n", "# path = tree.critical_path(start_topic_filters)\n",
@ -802,6 +867,10 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"%%skip_if_false E2E_PLOT\n",
"\n",
"\n",
"fig, ax = plt.subplots(figsize=(18, 9), num=\"e2e_plot\")\n", "fig, ax = plt.subplots(figsize=(18, 9), num=\"e2e_plot\")\n",
"DS=1\n", "DS=1\n",
"times = [tree.head.timestamp - trees[0].head.timestamp for tree in trees[::DS]]\n", "times = [tree.head.timestamp - trees[0].head.timestamp for tree in trees[::DS]]\n",
@ -847,6 +916,8 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"\n",
"def critical_path(self):\n", "def critical_path(self):\n",
" if not self.deps:\n", " if not self.deps:\n",
" return [self.head]\n", " return [self.head]\n",
@ -891,7 +962,7 @@
"\n", "\n",
" match inst:\n", " match inst:\n",
" case TrPublishInstance(publisher=pub):\n", " case TrPublishInstance(publisher=pub):\n",
" return pub and any(f in pub.topic_name for f in start_topic_filters)\n", " return pub and any(f in pub.topic_name for f in E2E_INPUT_TOPIC_PATTERNS)\n",
" return False\n", " return False\n",
"\n", "\n",
"\n", "\n",
@ -909,6 +980,9 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"%%skip_if_false E2E_PLOT\n",
"\n",
"from cycler import cycler\n", "from cycler import cycler\n",
"\n", "\n",
"def dict_safe_append(dictionary, key, value):\n", "def dict_safe_append(dictionary, key, value):\n",
@ -961,8 +1035,6 @@
" last_pub_time = None\n", " last_pub_time = None\n",
" last_cb_time = t\n", " last_cb_time = t\n",
"\n", "\n",
" #dict_safe_append(time_breakdown, \"tmr_cb_calc_time\", tmr_cb_calc_time)\n",
" #dict_safe_append(time_breakdown, \"sub_cb_calc_time\", sub_cb_calc_time)\n",
" dict_safe_append(time_breakdown, \"Timer CB\", tmr_cb_relevant_time)\n", " dict_safe_append(time_breakdown, \"Timer CB\", tmr_cb_relevant_time)\n",
" dict_safe_append(time_breakdown, \"Subscription CB\", sub_cb_relevant_time)\n", " dict_safe_append(time_breakdown, \"Subscription CB\", sub_cb_relevant_time)\n",
" dict_safe_append(time_breakdown, \"DDS\", dds_time)\n", " dict_safe_append(time_breakdown, \"DDS\", dds_time)\n",
@ -1002,6 +1074,9 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"%%skip_if_false E2E_PLOT\n",
"\n",
"from scipy import stats\n", "from scipy import stats\n",
"\n", "\n",
"fig, ax = plt.subplots(figsize=(60, 15), num=\"crit_pdf\")\n", "fig, ax = plt.subplots(figsize=(60, 15), num=\"crit_pdf\")\n",
@ -1038,16 +1113,18 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"%%skip_if_false E2E_PLOT\n",
"\n",
"from tracing_interop.tr_types import TrSubscription\n", "from tracing_interop.tr_types import TrSubscription\n",
"from matching.subscriptions import sanitize\n", "from matching.subscriptions import sanitize\n",
"import matplotlib.patches as mpatch\n", "import matplotlib.patches as mpatch\n",
"from matplotlib.text import Text\n", "from matplotlib.text import Text\n",
"import math\n", "import math\n",
"\n", "\n",
"i = 3900\n", "tree = trees[E2E_PLOT_TIMESTAMP]\n",
"tree = trees[i]\n", "e2es = e2ess[E2E_PLOT_TIMESTAMP]\n",
"e2es = e2ess[i]\n", "e2e_paths = e2e_pathss[E2E_PLOT_TIMESTAMP]\n",
"e2e_paths = e2e_pathss[i]\n",
"margin_y = .2\n", "margin_y = .2\n",
"margin_x=0\n", "margin_x=0\n",
"arr_width= 1 - margin_y\n", "arr_width= 1 - margin_y\n",
@ -1199,7 +1276,7 @@
"plot_margin_x = .01 * tree_e2e\n", "plot_margin_x = .01 * tree_e2e\n",
"ax.set_xlim(-tree_e2e - plot_margin_x, plot_margin_x)\n", "ax.set_xlim(-tree_e2e - plot_margin_x, plot_margin_x)\n",
"ax.set_yticks(np.array(range(len(y_labels))) + .5, y_labels)\n", "ax.set_yticks(np.array(range(len(y_labels))) + .5, y_labels)\n",
"ax.set_title(f\"Timestep {i}: {(tree.head.timestamp - trees[0].head.timestamp):10.6f}s\")\n", "ax.set_title(f\"Timestep {E2E_PLOT_TIMESTAMP}: {(tree.head.timestamp - trees[0].head.timestamp):10.6f}s\")\n",
"ax.set_xlabel(\"Time relative to output message [s]\")\n", "ax.set_xlabel(\"Time relative to output message [s]\")\n",
"ax.set_ylabel(\"Start topic\")\n", "ax.set_ylabel(\"Start topic\")\n",
"\n", "\n",
@ -1234,6 +1311,8 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n",
"\n",
"from termcolor import colored\n", "from termcolor import colored\n",
"from tqdm import tqdm\n", "from tqdm import tqdm\n",
"from matching.subscriptions import sanitize\n", "from matching.subscriptions import sanitize\n",
@ -1295,13 +1374,14 @@
" perc = np.percentile(durations, [70, 90, 95, 100])\n", " perc = np.percentile(durations, [70, 90, 95, 100])\n",
" colors = [\"green\", \"yellow\", \"red\", \"magenta\"]\n", " colors = [\"green\", \"yellow\", \"red\", \"magenta\"]\n",
" for part, duration in zip(key, durations):\n", " for part, duration in zip(key, durations):\n",
" i = 0\n", " E2E_PLOT_TIMESTAMP = 0\n",
" for j, p in enumerate(perc):\n", " for j, p in enumerate(perc):\n",
" if duration < p:\n", " if duration < p:\n",
" break\n", " break\n",
" i = j\n", " E2E_PLOT_TIMESTAMP = j\n",
" dur_str = colored(f\"{duration * 1000 :>.3f}ms\", colors[i])\n", " dur_str = colored(f\"{duration * 1000 :>.3f}ms\", colors[E2E_PLOT_TIMESTAMP])\n",
" print(f\" -> {dur_str} {part}\")\n" " print(f\" -> {dur_str} {part}\")\n",
"\n"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,