Fixes for different setting combos

This commit is contained in:
Maximilian Schmeller 2022-09-15 16:17:24 +02:00
parent 7d3e957ed9
commit b456645621
5 changed files with 36 additions and 69 deletions

View file

@ -5,7 +5,7 @@ from multiprocessing import Pool
from typing import Optional, Set, List, Iterable, Dict, Tuple from typing import Optional, Set, List, Iterable, Dict, Tuple
import numpy as np import numpy as np
from tqdm.notebook import tqdm from tqdm import tqdm
from tqdm.contrib import concurrent from tqdm.contrib import concurrent
from matching.subscriptions import sanitize from matching.subscriptions import sanitize

View file

@ -35,7 +35,11 @@ def parse_as(type, string):
raise ValueError(f"Unknown type {type.__name__}") raise ValueError(f"Unknown type {type.__name__}")
def cached(name, function, file_deps: List[str]): def cached(name, function, file_deps: List[str], disable_cache=False):
if disable_cache:
print(f"[CACHE] Cache disabled for {name}.")
return function()
if not os.path.isdir("cache"): if not os.path.isdir("cache"):
os.makedirs("cache", exist_ok=True) os.makedirs("cache", exist_ok=True)

View file

@ -4,3 +4,8 @@ matplotlib
pyvis pyvis
graphviz graphviz
ruamel.yaml ruamel.yaml
ipython
bidict
termcolor
tqdm

View file

@ -5,18 +5,14 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"import glob\n",
"import json\n",
"import os\n", "import os\n",
"import pickle\n",
"import re\n",
"import sys\n", "import sys\n",
"\n", "\n",
"import numpy as np\n", "import numpy as np\n",
"import pandas as pd\n", "import pandas as pd\n",
"from matplotlib import pyplot as plt\n", "from matplotlib import pyplot as plt\n",
"\n", "\n",
"from misc.utils import ProgressPrinter, cached, parse_as\n", "from misc.utils import cached, parse_as\n",
"\n", "\n",
"%load_ext pyinstrument\n", "%load_ext pyinstrument\n",
"%matplotlib inline" "%matplotlib inline"
@ -49,17 +45,20 @@
"\n", "\n",
"# The path to the build folder of a ROS2 workspace that contains the\n", "# The path to the build folder of a ROS2 workspace that contains the\n",
"# tracetools_read and tracetools_analysis folders.\n", "# tracetools_read and tracetools_analysis folders.\n",
"TRACING_WS_BUILD_PATH = \"../src/build\"\n", "TRACING_WS_BUILD_PATH = \"~/Projects/autoware/build\"\n",
"\n", "\n",
"# Path to trace directory (e.g. ~/.ros/my-trace/ust) or to a converted trace file.\n", "# Path to trace directory (e.g. ~/.ros/my-trace/ust) or to a converted trace file.\n",
"# Using the path \"/ust\" at the end is optional but greatly reduces processing time\n", "# Using the path \"/ust\" at the end is optional but greatly reduces processing time\n",
"# if kernel traces are also present.\n", "# if kernel traces are also present.\n",
"TR_PATH = \"../sa-tracing-results/140922/01/ust\"\n", "TR_PATH = \"data/awsim-trace/ust\"\n",
"\n", "\n",
"# Path to the folder all artifacts from this notebook are saved to.\n", "# Path to the folder all artifacts from this notebook are saved to.\n",
"# This entails plots as well as data tables.\n", "# This entails plots as well as data tables.\n",
"OUT_PATH = \"out/\"\n", "OUT_PATH = \"out/\"\n",
"\n", "\n",
"# Whether to cache the results of long computations per set of inputs\n",
"CACHING_ENABLED = True\n",
"\n",
"# Whether to annotate topics/publications with bandwidth/message size\n", "# Whether to annotate topics/publications with bandwidth/message size\n",
"BW_ENABLED = False\n", "BW_ENABLED = False\n",
"# Path to a results folder as output by ma-hw-perf-tools/messages/record.bash\n", "# Path to a results folder as output by ma-hw-perf-tools/messages/record.bash\n",
@ -109,12 +108,10 @@
"\n", "\n",
"# All topics containing any of these RegEx patterns are considered output topics in E2E latency calculations\n", "# All topics containing any of these RegEx patterns are considered output topics in E2E latency calculations\n",
"# E.g. r\"^/control/\" will cover all control topics\n", "# E.g. r\"^/control/\" will cover all control topics\n",
"E2E_OUTPUT_TOPIC_PATTERNS = [r\"^/control/command/control_cmd\"]\n", "E2E_OUTPUT_TOPIC_PATTERNS = [r\"^/control/trajectory_follower/control_cmd\"]\n",
"#E2E_OUTPUT_TOPIC_PATTERNS = [r\"^/system/emergency/control_cmd\"]\n",
"# All topics containing any of these RegEx patterns are considered input topics in E2E latency calculations\n", "# All topics containing any of these RegEx patterns are considered input topics in E2E latency calculations\n",
"# E.g. r\"^/sensing/\" will cover all sensing topics\n", "# E.g. r\"^/sensing/\" will cover all sensing topics\n",
"E2E_INPUT_TOPIC_PATTERNS = [\"/vehicle/status/\", \"/perception/\"]\n", "E2E_INPUT_TOPIC_PATTERNS = [\"/vehicle/status/\", \"/sensing/imu\"]\n",
"#E2E_INPUT_TOPIC_PATTERNS = [\"/vehicle/\", \"/sensing/\", \"/localization/\", \"planning\"]\n",
"\n", "\n",
"\n", "\n",
"# This code overrides the above constants with environment variables, do not edit.\n", "# This code overrides the above constants with environment variables, do not edit.\n",
@ -150,18 +147,13 @@
"execution_count": null, "execution_count": null,
"outputs": [], "outputs": [],
"source": [ "source": [
"from clang_interop.cl_types import ClContext\n",
"from clang_interop.process_clang_output import process_clang_output\n",
"\n",
"sys.path.append(os.path.join(TRACING_WS_BUILD_PATH, \"tracetools_read/\"))\n", "sys.path.append(os.path.join(TRACING_WS_BUILD_PATH, \"tracetools_read/\"))\n",
"sys.path.append(os.path.join(TRACING_WS_BUILD_PATH, \"tracetools_analysis/\"))\n", "sys.path.append(os.path.join(TRACING_WS_BUILD_PATH, \"tracetools_analysis/\"))\n",
"from tracetools_read.trace import *\n", "from tracetools_read.trace import *\n",
"from tracetools_analysis.loading import load_file\n", "from tracetools_analysis.loading import load_file\n",
"from tracetools_analysis.processor.ros2 import Ros2Handler\n", "from tracetools_analysis.processor.ros2 import Ros2Handler\n",
"from tracetools_analysis.utils.ros2 import Ros2DataModelUtil\n",
"\n", "\n",
"from tracing_interop.tr_types import TrTimer, TrTopic, TrPublisher, TrPublishInstance, TrCallbackInstance, \\\n", "from tracing_interop.tr_types import TrTimer, TrTopic, TrPublisher, TrPublishInstance, TrCallbackInstance, TrContext"
"TrCallbackSymbol, TrCallbackObject, TrSubscriptionObject, TrContext"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
@ -193,7 +185,7 @@
" return TrContext(handler)\n", " return TrContext(handler)\n",
"\n", "\n",
"\n", "\n",
"_tracing_context = cached(\"tr_objects\", _load_traces, [TR_PATH])\n", "_tracing_context = cached(\"tr_objects\", _load_traces, [TR_PATH], CACHING_ENABLED)\n",
"_tr_globals = [\"nodes\", \"publishers\", \"subscriptions\", \"timers\", \"timer_node_links\", \"subscription_objects\",\n", "_tr_globals = [\"nodes\", \"publishers\", \"subscriptions\", \"timers\", \"timer_node_links\", \"subscription_objects\",\n",
" \"callback_objects\", \"callback_symbols\", \"publish_instances\", \"callback_instances\", \"topics\"]\n", " \"callback_objects\", \"callback_symbols\", \"publish_instances\", \"callback_instances\", \"topics\"]\n",
"\n", "\n",
@ -212,22 +204,6 @@
} }
} }
}, },
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [
"for t in topics:\n",
" if \"control\" in t.name:\n",
" print(t.name)"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"source": [ "source": [
@ -250,7 +226,7 @@
"def _make_latency_graph():\n", "def _make_latency_graph():\n",
" return lg.LatencyGraph(_tracing_context)\n", " return lg.LatencyGraph(_tracing_context)\n",
"\n", "\n",
"lat_graph = cached(\"lat_graph\", _make_latency_graph, [TR_PATH])" "lat_graph = cached(\"lat_graph\", _make_latency_graph, [TR_PATH], CACHING_ENABLED)"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
@ -267,9 +243,7 @@
"%%skip_if_false DFG_ENABLED\n", "%%skip_if_false DFG_ENABLED\n",
"%%skip_if_false DFG_PLOT\n", "%%skip_if_false DFG_PLOT\n",
"\n", "\n",
"from matching.subscriptions import sanitize\n", "from tracing_interop.tr_types import TrNode, TrCallbackObject, TrSubscriptionObject\n",
"from typing import Iterable, Sized\n",
"from tracing_interop.tr_types import TrNode, TrCallbackObject, TrCallbackSymbol, TrSubscriptionObject\n",
"\n", "\n",
"#################################################\n", "#################################################\n",
"# Plot DFG\n", "# Plot DFG\n",
@ -498,8 +472,7 @@
"source": [ "source": [
"%%skip_if_false E2E_ENABLED\n", "%%skip_if_false E2E_ENABLED\n",
"\n", "\n",
"from latency_graph.message_tree import DepTree\n", "from tqdm import tqdm\n",
"from tqdm.notebook import tqdm\n",
"from bisect import bisect\n", "from bisect import bisect\n",
"\n", "\n",
"\n", "\n",
@ -635,9 +608,6 @@
" if pub.topic_name in visited_topics:\n", " if pub.topic_name in visited_topics:\n",
" return None\n", " return None\n",
" \n", " \n",
" if pub.node and pub.node.name and \"concealer\" in pub.node.name:\n",
" return None\n",
"\n",
" visited_topics.add(pub.topic_name)\n", " visited_topics.add(pub.topic_name)\n",
" deps = [get_msg_dep_cb(inst)]\n", " deps = [get_msg_dep_cb(inst)]\n",
" case TrCallbackInstance() as cb_inst:\n", " case TrCallbackInstance() as cb_inst:\n",
@ -645,16 +615,9 @@
" if not is_dep_cb:\n", " if not is_dep_cb:\n",
" deps += inst_get_dep_insts(cb_inst)\n", " deps += inst_get_dep_insts(cb_inst)\n",
" children_are_dep_cbs = True\n", " children_are_dep_cbs = True\n",
" match cb_inst.callback_obj.owner:\n",
" case TrSubscriptionObject() as sub_obj:\n",
" if \"concealer\" in sub_obj.subscription.node.name:\n",
" return None\n",
" case TrTimer() as tmr_obj:\n",
" if \"concealer\" in tmr_obj.node.name:\n",
" return None\n",
" case _:\n", " case _:\n",
" print(f\"[WARN] Expected inst to be of type TrPublishInstance or TrCallbackInstance, got {type(inst).__name__}\")\n",
" return None\n", " return None\n",
" raise TypeError(f\"Expected inst to be of type TrPublishInstance or TrCallbackInstance, got {type(inst).__name__}\")\n",
" #print(\"Rec level\", lvl)\n", " #print(\"Rec level\", lvl)\n",
" deps = [dep for dep in deps if dep is not None]\n", " deps = [dep for dep in deps if dep is not None]\n",
" deps = [get_dep_tree(dep, lvl + 1, set(visited_topics), children_are_dep_cbs, start_time) for dep in deps]\n", " deps = [get_dep_tree(dep, lvl + 1, set(visited_topics), children_are_dep_cbs, start_time) for dep in deps]\n",
@ -691,7 +654,7 @@
" all_trees.append(tree)\n", " all_trees.append(tree)\n",
" return all_trees\n", " return all_trees\n",
"\n", "\n",
"trees = cached(\"trees\", build_dep_trees, [TR_PATH])" "trees = cached(\"trees\", build_dep_trees, [TR_PATH], CACHING_ENABLED)"
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
@ -1108,15 +1071,15 @@
"fig, ax = plt.subplots(figsize=(60, 15), num=\"crit_pdf\")\n", "fig, ax = plt.subplots(figsize=(60, 15), num=\"crit_pdf\")\n",
"ax.set_prop_cycle(cycler('color', [plt.cm.nipy_spectral(i/4) for i in range(5)]))\n", "ax.set_prop_cycle(cycler('color', [plt.cm.nipy_spectral(i/4) for i in range(5)]))\n",
"\n", "\n",
"#kde = stats.gaussian_kde(timestep_mags)\n", "kde = stats.gaussian_kde(timestep_mags)\n",
"#xs = np.linspace(timestep_mags.min(), timestep_mags.max(), 1000)\n", "xs = np.linspace(timestep_mags.min(), timestep_mags.max(), 1000)\n",
"#ax.plot(xs, kde(xs), label=\"End-to-End Latency\")\n", "ax.plot(xs, kde(xs), label=\"End-to-End Latency\")\n",
"#perc = 90\n", "perc = 90\n",
"#ax.axvline(np.percentile(timestep_mags, perc), label=f\"{perc}th percentile\")\n", "ax.axvline(np.percentile(timestep_mags, perc), label=f\"{perc}th percentile\")\n",
"\n", "\n",
"#ax2 = ax.twinx()\n", "ax2 = ax.twinx()\n",
"#ax2.hist(timestep_mags, 200)\n", "ax2.hist(timestep_mags, 200)\n",
"#ax2.set_ylim(0, ax2.get_ylim()[1])\n", "ax2.set_ylim(0, ax2.get_ylim()[1])\n",
"\n", "\n",
"ax.set_title(\"Time Distribution for E2E Breakdown\")\n", "ax.set_title(\"Time Distribution for E2E Breakdown\")\n",
"ax.set_xlabel(\"Time [s]\")\n", "ax.set_xlabel(\"Time [s]\")\n",
@ -1143,9 +1106,7 @@
"%%skip_if_false E2E_PLOT\n", "%%skip_if_false E2E_PLOT\n",
"\n", "\n",
"from tracing_interop.tr_types import TrSubscription\n", "from tracing_interop.tr_types import TrSubscription\n",
"from matching.subscriptions import sanitize\n",
"import matplotlib.patches as mpatch\n", "import matplotlib.patches as mpatch\n",
"from matplotlib.text import Text\n",
"import math\n", "import math\n",
"\n", "\n",
"tree = trees[E2E_PLOT_TIMESTAMP]\n", "tree = trees[E2E_PLOT_TIMESTAMP]\n",
@ -1341,7 +1302,6 @@
"\n", "\n",
"from termcolor import colored\n", "from termcolor import colored\n",
"from tqdm import tqdm\n", "from tqdm import tqdm\n",
"from matching.subscriptions import sanitize\n",
"\n", "\n",
"critical_paths = {}\n", "critical_paths = {}\n",
"print(len(relevant_trees))\n", "print(len(relevant_trees))\n",

View file

@ -1,7 +1,5 @@
import sys
import pandas as pd import pandas as pd
from tqdm.notebook import tqdm from tqdm import tqdm
def row_to_type(row, type, **type_kwargs): def row_to_type(row, type, **type_kwargs):
@ -15,7 +13,7 @@ def df_to_type_list(df, type, mappers=None, **type_kwargs):
has_idx = not isinstance(df.index, pd.RangeIndex) has_idx = not isinstance(df.index, pd.RangeIndex)
ret_list = [] ret_list = []
i=0
for row in tqdm(df.itertuples(index=has_idx), desc=f" ├─ Processing {type.__name__}s", total=len(df)): for row in tqdm(df.itertuples(index=has_idx), desc=f" ├─ Processing {type.__name__}s", total=len(df)):
row_dict = row._asdict() row_dict = row._asdict()
if has_idx: if has_idx: