From 4268335e09c0de36ac5383828b59d4077aceb1f4 Mon Sep 17 00:00:00 2001 From: Maximilian Schmeller Date: Wed, 14 Sep 2022 14:08:47 +0200 Subject: [PATCH] Fixed crash when BW_ENABLED=False, user-friendly README.md --- README.md | 32 +++++++++++++++++++++++++++++++- misc/utils.py | 2 +- trace-analysis.ipynb | 20 +++++++++++--------- 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 1bec1f4..64dc2d9 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ Automatically extract data dependencies and end-to-end (E2E) latencies from ROS2 trace data. ## Prerequisites -* Python 3.10 +* Python 3.10 or newer (this is crucial!) * [JupyterLab](https://jupyter.org/install#jupyterlab) or [Jupyter Notebook](https://jupyter.org/install#jupyter-notebook) * [ROS2 Tracing](https://github.com/ros2/ros2_tracing) * [Tracetools Analysis](https://gitlab.com/ros-tracing/tracetools_analysis) @@ -19,4 +19,34 @@ pip3.10 -V pip3.10 install -r requirements.txt ``` +Make sure that ROS2 Tracing and Tracetools Analysis were compiled with `colcon build [...] --symlink-install [...]`. Without `--symlink-install`, the build folder structure will be incorrect and the libraries required by this tool cannot be located. + ## Usage + +The `trace_analysis.ipynb` notebook is the entry point for users. +Configure the notebook according to the comments in the user settings cell. +Settings can either be changed in the notebook or via environment variables: +```python +# In the notebook (User Settings cell near the top): +TR_PATH = "path/to/trace/dir" +E2E_ENABLED = True +... +``` + +```shell +# In the shell of your choice (choose Bash!): +# Each setting is named the same as in the notebook but prefixed by "ANA_NB_". +ANA_NB_TR_PATH="path/to/trace/dir" +ANA_NB_E2E_ENABLED="True" +``` + +You can run the notebook via the "Run All" command in Jupyter or you can execute it headless +from the command line: +```shell +jupyter nbconvert --to notebook --execute trace-analysis.ipynb +``` + +nbconvert can also be called from Python directly, +read more info on nbconvert [here](https://nbconvert.readthedocs.io/en/latest/execute_api.html). + +The output files are found in the configured output dir (default: `out/`). Inputs are processed and cached in `cache/`. diff --git a/misc/utils.py b/misc/utils.py index 3d43b01..1490ce4 100644 --- a/misc/utils.py +++ b/misc/utils.py @@ -17,7 +17,7 @@ from IPython import get_ipython def skip_if_false(line, cell, local_ns=None): condition_var = eval(line, None, local_ns) if condition_var: - result = get_ipython().run_cell(cell) + get_ipython().run_cell(cell) return None return f"Skipped (evaluated {line} to False)" diff --git a/trace-analysis.ipynb b/trace-analysis.ipynb index e3ff6ec..26cea8a 100644 --- a/trace-analysis.ipynb +++ b/trace-analysis.ipynb @@ -24,10 +24,7 @@ "from misc.utils import ProgressPrinter, cached, parse_as\n", "\n", "%load_ext pyinstrument\n", - "%matplotlib inline\n", - "\n", - "A=True\n", - "B=None" + "%matplotlib inline" ] }, { @@ -68,7 +65,7 @@ "OUT_PATH = \"out/\"\n", "\n", "# Whether to annotate topics/publications with bandwidth/message size\n", - "BW_ENABLED = True\n", + "BW_ENABLED = False\n", "# Path to a results folder as output by ma-hw-perf-tools/messages/record.bash\n", "# Used to annotate message sizes in E2E latency calculations\n", "BW_PATH = \"../ma-hw-perf-tools/data/results\"\n", @@ -139,7 +136,9 @@ "TR_PATH = _expand_path(TR_PATH)\n", "OUT_PATH = _expand_path(OUT_PATH)\n", "BW_PATH = _expand_path(BW_PATH)\n", - "CL_PATH = _expand_path(CL_PATH)" + "CL_PATH = _expand_path(CL_PATH)\n", + "\n", + "os.makedirs(OUT_PATH, exist_ok=True)" ] }, { @@ -226,7 +225,6 @@ "execution_count": null, "outputs": [], "source": [ - "%%skip_if_false DFG_ENABLED\n", "from latency_graph import latency_graph as lg\n", "\n", "def _make_latency_graph():\n", @@ -1251,7 +1249,7 @@ " if \"DDS\" not in legend_entries:\n", " legend_entries[\"DDS\"] = r\n", "\n", - " topic_stats = msg_sizes.get(pub.topic_name)\n", + " topic_stats = msg_sizes.get(pub.topic_name) if BW_ENABLED else None\n", " if topic_stats:\n", " size_str = bw_str(topic_stats)\n", " ax.text(r_x + r_w / 2, r_y + arr_width + margin_y, size_str, ha=\"center\", backgroundcolor=(1,1,1,.5), zorder=11000)\n", @@ -1346,7 +1344,10 @@ "items = list(critical_paths.items())\n", "items.sort(key=lambda pair: len(pair[1]), reverse=True)\n", "\n", + "out_df = pd.DataFrame(columns=[\"path\", \"timestamp\", \"e2e_latency\"])\n", "for key, paths in items:\n", + " path_records = [(\" -> \".join(key), p[0].timestamp, p[0].timestamp - p[-1].timestamp) for p in paths]\n", + " out_df.append(path_records)\n", " print(f\"======== {len(paths)}x: {sum(map(lambda p: p[0].timestamp - p[-1].timestamp, paths))/len(paths)*1000:.3f}ms\")\n", " paths_durations = []\n", " for path in paths:\n", @@ -1381,7 +1382,8 @@ " E2E_PLOT_TIMESTAMP = j\n", " dur_str = colored(f\"{duration * 1000 :>.3f}ms\", colors[E2E_PLOT_TIMESTAMP])\n", " print(f\" -> {dur_str} {part}\")\n", - "\n" + "\n", + "out_df.to_csv(os.path.join(OUT_PATH, \"e2e.csv\"), sep=\"\\t\")" ], "metadata": { "collapsed": false,