dataflow-analysis/trace-analysis.ipynb

1494 lines
255 KiB
Text
Raw Normal View History

{
"cells": [
{
"cell_type": "code",
2022-05-31 16:49:47 +02:00
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import sys\n",
"import numpy as np\n",
"import pandas as pd\n",
"from matplotlib import pyplot as plt\n",
"\n",
"from IPython.display import display, clear_output\n",
"\n",
2022-06-28 10:06:42 +02:00
"sys.path.append(\"../ros2_tracing/tracetools_read\")\n",
"sys.path.append(\"../tracetools_analysis/tracetools_analysis\")\n",
"#from tracetools_read.trace import *\n",
"from tracetools_analysis.loading import load_file\n",
"from tracetools_analysis.processor.ros2 import Ros2Handler\n",
2022-05-30 16:51:06 +02:00
"from tracetools_analysis.utils.ros2 import Ros2DataModelUtil\n",
"\n",
"from dataclasses import dataclass\n",
"from typing import List, Dict, Set, Union, Tuple\n",
2022-05-30 16:51:06 +02:00
"from functools import cached_property\n",
"import pickle\n",
2022-05-31 16:49:47 +02:00
"import re\n",
"\n",
"from utils import ProgressPrinter"
]
},
{
"cell_type": "code",
2022-05-31 16:49:47 +02:00
"execution_count": 2,
"metadata": {},
2022-06-03 14:57:54 +02:00
"outputs": [],
"source": [
"def pkl_filename_from_file_timestamp(file_path):\n",
" if os.path.exists(file_path):\n",
" timestamp = os.path.getmtime(file_path)\n",
" pkl_filename = f\"ros_objects_{hash(timestamp)}.pkl\"\n",
" return pkl_filename, os.path.exists(pkl_filename)\n",
" return None, False\n",
"\n",
2022-06-28 10:06:42 +02:00
"path = os.path.expanduser(\"./\")\n",
2022-05-30 16:51:06 +02:00
"path_converted = os.path.join(path, 'converted')\n",
"pkl_filename, pkl_exists = pkl_filename_from_file_timestamp(path_converted)\n",
"\n",
"if not pkl_exists:\n",
2022-05-30 16:51:06 +02:00
" file = load_file(path)\n",
" handler = Ros2Handler.process(file)\n",
" util = Ros2DataModelUtil(handler)\n",
" pkl_filename, pkl_exists = pkl_filename_from_file_timestamp(path_converted)"
]
},
{
"cell_type": "code",
2022-05-31 16:49:47 +02:00
"execution_count": 3,
"metadata": {},
2022-05-30 16:51:06 +02:00
"outputs": [],
"source": [
"if False:\n",
2022-06-28 10:06:42 +02:00
" n=3\n",
2022-05-30 16:51:06 +02:00
" self = handler.data\n",
" print('====================ROS 2 DATA MODEL===================')\n",
" print('██ Contexts: ██')\n",
" print(self.contexts[:n].to_string())\n",
" print('██ Nodes: ██')\n",
" print(self.nodes[:n].to_string())\n",
" print('██ Publishers (rmw): ██')\n",
" print(self.rmw_publishers[:n].to_string())\n",
" print('██ Publishers (rcl): ██')\n",
" print(self.rcl_publishers[:n].to_string())\n",
" print('██ Subscriptions (rmw): ██')\n",
" print(self.rmw_subscriptions[:n].to_string())\n",
" print('██ Subscriptions (rcl): ██')\n",
" print(self.rcl_subscriptions[:n].to_string())\n",
" print('██ Subscription objects: ██')\n",
" print(self.subscription_objects[:n].to_string())\n",
" print('██ Services: ██')\n",
" print(self.services[:n].to_string())\n",
" print('██ Clients: ██')\n",
" print(self.clients[:n].to_string())\n",
" print('██ Timers: ██')\n",
" print(self.timers[:n].to_string())\n",
" print('██ Timer-node links: ██')\n",
" print(self.timer_node_links[:n].to_string())\n",
" print('██ Callback objects: ██')\n",
" print(self.callback_objects[:n].to_string())\n",
" print('██ Callback symbols: ██')\n",
" print(self.callback_symbols[:n].to_string())\n",
" print('██ Callback instances: ██')\n",
" print(self.callback_instances[:n].to_string())\n",
" print('██ Publish instances (rclcpp): ██')\n",
" print(self.rclcpp_publish_instances[:n].to_string())\n",
" print('██ Publish instances (rcl): ██')\n",
" print(self.rcl_publish_instances[:n].to_string())\n",
" print('██ Publish instances (rmw): ██')\n",
" print(self.rmw_publish_instances[:n].to_string())\n",
" print('██ Take instances (rmw): ██')\n",
" print(self.rmw_take_instances[:n].to_string())\n",
" print('██ Take instances (rcl): ██')\n",
" print(self.rcl_take_instances[:n].to_string())\n",
" print('██ Take instances (rclcpp): ██')\n",
" print(self.rclcpp_take_instances[:n].to_string())\n",
" print('██ Lifecycle state machines: ██')\n",
" print(self.lifecycle_state_machines[:n].to_string())\n",
" print('██ Lifecycle transitions: ██')\n",
" print(self.lifecycle_transitions[:n].to_string())\n",
" print('==================================================')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Data Structures"
]
},
{
"cell_type": "code",
2022-05-31 16:49:47 +02:00
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def str_to_cls(classname):\n",
" return getattr(sys.modules[__name__], classname)\n",
"\n",
"def row_to_type(row, type, has_idx):\n",
" return type(id=row.name, **row) if has_idx else type(**row)\n",
"\n",
"def df_to_type_list(df, type):\n",
" if isinstance(type, str):\n",
" type = str_to_cls(type)\n",
" \n",
" has_idx = not isinstance(df.index, pd.RangeIndex)\n",
" return [row_to_type(row, type, has_idx) for _, row in df.iterrows()]\n",
"\n",
"def by_index(df, index, type):\n",
" return df_to_type_list(df.loc[index], type)\n",
"\n",
"def by_column(df, column_name, column_val, type):\n",
" return df_to_type_list(df[df[column_name] == column_val], type)\n",
"\n",
"def list_to_dict(ls, key='id'):\n",
" return {getattr(item, key): item for item in ls}\n",
"\n",
"#################################\n",
"# Predefined (from ROS2DataModel)\n",
"#################################\n",
"\n",
"@dataclass\n",
"class Node:\n",
" id: int\n",
" timestamp: int\n",
" tid: int\n",
" rmw_handle: int\n",
" name: str\n",
" namespace: str\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def path(self) -> str:\n",
2022-06-28 10:06:42 +02:00
" return '/'.join((self.namespace, self.name))\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def publishers(self) -> List['Publisher']:\n",
" return list(filter(lambda pub: pub.node_handle == self.id, publishers.values()))\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def subscriptions(self) -> List['Subscription']:\n",
" return list(filter(lambda sub: sub.node_handle == self.id, subscriptions.values()))\n",
" \n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def timers(self) -> List['Timer']:\n",
" links = [link.id for link in timer_node_links.values() if link.node_handle == self.id]\n",
" return list(filter(lambda timer: timer.id in links, timers.values()))\n",
"\n",
" def __hash__(self):\n",
" return hash(self.id)\n",
"\n",
"@dataclass\n",
"class Publisher:\n",
" id: int\n",
" timestamp: int\n",
" node_handle: int\n",
" rmw_handle: int\n",
" topic_name: str\n",
" depth: int\n",
"\n",
" @property\n",
" def node(self) -> 'Node':\n",
" return nodes[self.node_handle]\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def subscriptions(self) -> List['Subscription']:\n",
" return list(filter(lambda sub: sub.topic_name == self.topic_name, subscriptions.values()))\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def instances(self) -> List['PublishInstance']:\n",
" return list(filter(lambda inst: inst.publisher_handle == self.id, publish_instances))\n",
" \n",
" @property\n",
" def topic(self) -> 'Topic':\n",
" return topics[self.topic_name]\n",
"\n",
" def __hash__(self):\n",
" return hash(self.id)\n",
"\n",
"\n",
"@dataclass\n",
"class Subscription:\n",
" id: int\n",
" timestamp: int\n",
" node_handle: int\n",
" rmw_handle: int\n",
" topic_name: str\n",
" depth: int\n",
"\n",
" @property\n",
" def node(self) -> 'Node':\n",
" return nodes[self.node_handle]\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def publishers(self) -> List['Publisher']:\n",
" return list(filter(lambda pub: pub.topic_name == self.topic_name, publishers.values()))\n",
" \n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def subscription_object(self) -> 'SubscriptionObject':\n",
" sub_objs = list(filter(lambda sub_obj: sub_obj.subscription_handle == self.id, subscription_objects.values()))\n",
" assert len(sub_objs) <= 1\n",
" return sub_objs[0] if sub_objs else None\n",
"\n",
" @property\n",
" def topic(self) -> 'Topic':\n",
" return topics[self.topic_name]\n",
"\n",
" def __hash__(self):\n",
" return hash(self.id)\n",
" \n",
"@dataclass\n",
"class Timer:\n",
" id: int\n",
" timestamp: int\n",
" period: int\n",
" tid: int\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def nodes(self) -> List['Node']:\n",
" links = [link.node_handle for link in timer_node_links.values() if link.id == self.id]\n",
" return list(filter(lambda node: node.id in links, nodes.values()))\n",
" \n",
" @property\n",
" def callback_object(self) -> 'CallbackObject':\n",
" return callback_objects[self.id]\n",
"\n",
" def __hash__(self):\n",
" return hash(self.id)\n",
"\n",
"@dataclass\n",
"class TimerNodeLink:\n",
" id: int\n",
" timestamp: int\n",
" node_handle: int\n",
"\n",
"@dataclass\n",
"class SubscriptionObject:\n",
" id: int # subscription\n",
" timestamp: int\n",
" subscription_handle: int\n",
"\n",
" @property\n",
" def subscription(self) -> 'Subscription':\n",
" return subscriptions[self.subscription_handle]\n",
"\n",
" @property\n",
" def callback_object(self) -> 'CallbackObject':\n",
" return callback_objects[self.id]\n",
"\n",
" def __hash__(self):\n",
" return hash((self.id, self.timestamp, self.subscription_handle))\n",
"\n",
"@dataclass\n",
"class CallbackObject:\n",
" id: int # (reference) = subscription_object.id | timer.id | ....\n",
" timestamp: int\n",
" callback_object: int\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def callback_instances(self) -> List['CallbackInstance']:\n",
" return list(filter(lambda inst: inst.callback_object == self.callback_object, callback_instances))\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def owner(self):\n",
" if self.id in timers:\n",
" return timers[self.id]\n",
" if self.id in publishers:\n",
" return publishers[self.id]\n",
" if self.id in subscription_objects:\n",
" return subscription_objects[self.id]\n",
" if self.id in handler.data.services.index:\n",
" return 'Service'\n",
" if self.id in handler.data.clients.index:\n",
" return 'Client'\n",
" return None\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def owner_info(self):\n",
" info = util.get_callback_owner_info(self.callback_object)\n",
" if info is None:\n",
" return None, None\n",
" \n",
" type_name, dict_str = info.split(\" -- \")\n",
" kv_strs = dict_str.split(\", \")\n",
" info_dict = {k: v for k, v in map(lambda kv_str: kv_str.split(\": \", maxsplit=1), kv_strs)}\n",
" return type_name, info_dict\n",
"\n",
" def __hash__(self):\n",
" return hash((self.id, self.timestamp, self.callback_object))\n",
"\n",
"@dataclass\n",
"class PublishInstance:\n",
" publisher_handle: int\n",
" timestamp: int\n",
" message: int\n",
"\n",
" @property\n",
" def publisher(self) -> 'Publisher':\n",
" return publishers[self.publisher_handle]\n",
"\n",
" def __hash__(self):\n",
" return hash((self.publisher_handle, self.timestamp, self.message))\n",
"\n",
"@dataclass\n",
"class CallbackInstance:\n",
" callback_object: int\n",
" timestamp: pd.Timestamp\n",
" duration: pd.Timedelta\n",
" intra_process: bool\n",
"\n",
" @property\n",
" def callback_obj(self) -> 'CallbackObject':\n",
" return callback_objects[self.callback_object]\n",
"\n",
" def __hash__(self):\n",
" return hash((self.callback_object, self.timestamp, self.duration))\n",
"\n",
"@dataclass\n",
"class CallbackSymbol:\n",
" id: int # callback_object\n",
" timestamp: int\n",
" symbol: str\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def callback_obj(self) -> 'CallbackObject':\n",
" cb_objs = list(filter(lambda cb_obj: cb_obj.callback_object == self.id, callback_objects.values()))\n",
" assert len(cb_objs) <= 1\n",
" return cb_objs[0] if cb_objs else None\n",
"\n",
" def __hash__(self):\n",
" return hash((self.id, self.timestamp, self.symbol))\n",
"\n",
"\n",
"#######################################\n",
"# Self-defined (not from ROS2DataModel)\n",
"#######################################\n",
"\n",
"@dataclass\n",
"class Topic:\n",
" name: str\n",
"\n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def publishers(self) -> List['Publisher']:\n",
" return list(filter(lambda pub: pub.topic_name == self.name, publishers.values()))\n",
" \n",
2022-05-30 16:51:06 +02:00
" @cached_property\n",
" def subscriptions(self) -> List['Subscription']:\n",
" return list(filter(lambda sub: sub.topic_name == self.name, subscriptions.values()))\n",
"\n",
" def __hash__(self):\n",
" return hash(self.name)\n"
]
},
{
"cell_type": "code",
2022-05-31 16:49:47 +02:00
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2022-06-03 14:57:54 +02:00
"Found pickled ROS objects from previous session, restoring...\n",
2022-05-30 16:51:06 +02:00
"Done.\n"
]
}
],
"source": [
"\n",
"if not pkl_exists:\n",
2022-05-30 16:51:06 +02:00
" print(\"Did not find pickled ROS objects, extracting...\")\n",
" #######################################\n",
" # Instantiate collections\n",
" #######################################\n",
"\n",
" nodes: Dict[int, 'Node'] = list_to_dict(df_to_type_list(handler.data.nodes, 'Node')); print(f\"Processed {len(nodes):<8d} nodes\")\n",
" publishers: Dict[int, 'Publisher'] = list_to_dict(df_to_type_list(handler.data.rcl_publishers, 'Publisher')); print(f\"Processed {len(publishers):<8d} publishers\")\n",
" subscriptions: Dict[int, 'Subscription'] = list_to_dict(df_to_type_list(handler.data.rcl_subscriptions, 'Subscription')); print(f\"Processed {len(subscriptions):<8d} subscriptions\")\n",
" timers: Dict[int, 'Timer'] = list_to_dict(df_to_type_list(handler.data.timers, 'Timer')); print(f\"Processed {len(timers):<8d} timers\")\n",
" timer_node_links: Dict[int, 'TimerNodeLink'] = list_to_dict(df_to_type_list(handler.data.timer_node_links, 'TimerNodeLink')); print(f\"Processed {len(timer_node_links):<8d} timer-node links\")\n",
" subscription_objects: Dict[int, 'SubscriptionObject'] = list_to_dict(df_to_type_list(handler.data.subscription_objects, 'SubscriptionObject')); print(f\"Processed {len(subscription_objects):<8d} subscription objects\")\n",
" callback_objects: Dict[int, 'CallbackObject'] = list_to_dict(df_to_type_list(handler.data.callback_objects, 'CallbackObject')); print(f\"Processed {len(callback_objects):<8d} callback objects\")\n",
" callback_symbols: Dict[int, 'CallbackSymbol'] = list_to_dict(df_to_type_list(handler.data.callback_symbols, 'CallbackSymbol')); print(f\"Processed {len(callback_symbols):<8d} callback symbols\")\n",
" publish_instances: List['PublishInstance'] = df_to_type_list(handler.data.rcl_publish_instances, 'PublishInstance'); print(f\"Processed {len(publish_instances):<8d} publish instances\")\n",
" callback_instances: List['CallbackInstance'] = df_to_type_list(handler.data.callback_instances, 'CallbackInstance'); print(f\"Processed {len(callback_instances):<8d} callback instances\")\n",
"\n",
" _unique_topic_names = {*(pub.topic_name for pub in publishers.values()), *(sub.topic_name for sub in subscriptions.values())}\n",
" topics: Dict[str, 'Topic'] = list_to_dict(map(Topic, _unique_topic_names), key=\"name\"); print(f\"Processed {len(topics):<8d} topics\")\n",
"\n",
" print(\"Caching dynamic properties...\")\n",
"\n",
" [(o.path, o.publishers, o.subscriptions, o.timers) for o in nodes.values()] ; print(\"Cached node properties\")\n",
" [(o.instances, o.subscriptions) for o in publishers.values()] ; print(\"Cached publisher properties\")\n",
" [(o.publishers, o.subscription_object) for o in subscriptions.values()] ; print(\"Cached subscription properties\")\n",
" [(o.nodes) for o in timers.values()] ; print(\"Cached timer properties\")\n",
" [(o.callback_instances, o.owner, o.owner_info) for o in callback_objects.values()] ; print(\"Cached callback object properties\")\n",
" [(o.callback_obj) for o in callback_symbols.values()] ; print(\"Cached callback symbol properties\")\n",
" [(o.publishers, o.subscriptions) for o in topics.values()] ; print(\"Cached topic properties\")\n",
"\n",
" fields_to_pickle = [\n",
" \"nodes\",\n",
" \"publishers\",\n",
" \"subscriptions\",\n",
" \"timers\",\n",
" \"timer_node_links\",\n",
" \"subscription_objects\",\n",
" \"callback_objects\",\n",
" \"callback_symbols\",\n",
" \"publish_instances\",\n",
" \"callback_instances\",\n",
2022-05-31 16:49:47 +02:00
" \"topics\"\n",
2022-05-30 16:51:06 +02:00
" ]\n",
"\n",
" pkl_dict = {key: globals()[key] for key in fields_to_pickle}\n",
"\n",
" print(\"Pickling...\")\n",
" with open(pkl_filename, \"wb\") as f:\n",
" pickle.dump(pkl_dict, f)\n",
"else:\n",
" print(\"Found pickled ROS objects from previous session, restoring...\")\n",
" with open(pkl_filename, \"rb\") as f:\n",
" pkl_dict = pickle.load(f)\n",
" for k, v in pkl_dict.items():\n",
" globals()[k] = v\n",
"\n",
"print(\"Done.\")\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Callback-Sub & Callback-Timer Links"
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": null,
"metadata": {},
2022-06-28 10:06:42 +02:00
"outputs": [],
"source": [
"import re\n",
"\n",
"sym_table = []\n",
"\n",
"for sym in callback_symbols.values():\n",
" try:\n",
" cbo = list(filter(lambda val: val.callback_object==sym.id, callback_objects.values()))\n",
" assert len(cbo) == 1\n",
" cbo = cbo[0]\n",
" except:\n",
" print(len(cbo))\n",
" continue\n",
" owner_info = cbo.owner_info\n",
"\n",
" if None in owner_info: continue\n",
" type, info = owner_info\n",
" sym_table.append((sym, type, info))\n",
"\n",
"sym_table.sort(key=lambda tup: tup[1])\n",
"\n",
"def trim(string, length):\n",
" if len(string) > length:\n",
" return f\"{string[:length-3]}...\"\n",
" return string\n",
"\n",
"for sym, type, info in sym_table:\n",
" sym: CallbackSymbol\n",
2022-06-28 10:06:42 +02:00
" pretty_sym = Ros2DataModelUtil._prettify(None, sym.symbol)\n",
" pretty_sym = re.sub(r\"std::shared_ptr<(.*?) *(const)?>\", r\"\\1*\", pretty_sym)\n",
" try:\n",
" i = len(sym.callback_obj.callback_instances)\n",
" except KeyError:\n",
" i = -1\n",
" print(f\"{trim(pretty_sym, 100):100s}: i={i:>4d} {type:12s} n={info['node']:40s}\", end=' ') \n",
" if type == 'Timer':\n",
" print(f\"p={info['period']:7s}\")\n",
" elif type == 'Subscription':\n",
" print(f\"t={info['topic']:30s}\")\n",
" else:\n",
" print()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Topic-Node Mapping"
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": null,
"metadata": {},
2022-06-28 10:06:42 +02:00
"outputs": [],
"source": [
"# Aggregate topics that have the same pubs and subs\n",
"topic_cohorts = {}\n",
"for topic in topics.values():\n",
" key = (frozenset({*(pub.node_handle for pub in topic.publishers)}), frozenset({*(sub.node_handle for sub in topic.subscriptions)}))\n",
" if key not in topic_cohorts:\n",
" topic_cohorts[key] = []\n",
" topic_cohorts[key].append(topic)\n",
"\n",
"print(f\"{len(topics)} topics were aggregated into {len(topic_cohorts)} cohorts\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Timer-Node Mapping"
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": null,
"metadata": {},
2022-06-28 10:06:42 +02:00
"outputs": [],
"source": [
"unknowns = {}\n",
"\n",
"print_node_timer = lambda node_path, period: print(f\"{node_path:<90s}: {1/(period*1e-9):8.2f}Hz\")\n",
"\n",
"for timer in timers.values():\n",
" timer_nodes = timer.nodes\n",
" if not timer_nodes:\n",
" if timer.period not in unknowns:\n",
" unknowns[timer.period] = 0\n",
" unknowns[timer.period] += 1\n",
"\n",
" for node in timer_nodes: print_node_timer(node.path, timer.period)\n",
" \n",
"for period, count in unknowns.items():\n",
" print_node_timer(f\"UNKNOWN (x{count})\", period)\n",
"\n",
"n_unknown = sum(unknowns.values()) # Values are counts per period\n",
"print(f\"Found {len(timers) - n_unknown} timers with a recorded node, {n_unknown} without.\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Measure Frequency Deviations"
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": null,
"metadata": {},
2022-06-28 10:06:42 +02:00
"outputs": [],
"source": [
"# Get Publisher frequencies\n",
"df_publications = handler.data.rcl_publish_instances\n",
"pub_stats = {}\n",
"unknown = 0\n",
"for pi in publish_instances:\n",
" try:\n",
" pub = pi.publisher\n",
" except KeyError:\n",
" unknown += 1\n",
" continue\n",
" if pub.id not in pub_stats:\n",
" pub_stats[pub.id] = {'times': []}\n",
" pub_stats[pub.id]['times'].append(pi.timestamp*1e-9) # Nanoseconds to seconds float\n",
"\n",
"print(f\"{unknown} unknown publisher handles ({len(pub_stats)} known ones)\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Plot Frequency Deviations"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"fig_dirname = \"fig_frequency\"\n",
"os.makedirs(fig_dirname, exist_ok=True)\n",
"for i, (k, v) in enumerate(sorted(pub_stats.items(), key=lambda kv: len(kv[1]['times']), reverse=True)):\n",
" pub_time_diff = np.diff(np.array(v['times']))\n",
" v['period'] = pub_time_diff.mean()\n",
" v['period_std'] = pub_time_diff.std()\n",
" v['frequency'] = 1 / v['period']\n",
" v['frequency_std'] = (1/pub_time_diff).std()\n",
"\n",
" try:\n",
" publisher = publishers[k]\n",
" publisher_node = publisher.node\n",
" topic_name = publisher.topic_name\n",
" node_path = publisher_node.path\n",
" except Exception:\n",
" topic_name=\"UNKNOWN\"\n",
" node_path=\"UNKNOWN\"\n",
" \n",
" fig = plt.figure(figsize=(15,5))\n",
" ax = fig.add_subplot()\n",
" ax.hist(1/pub_time_diff)\n",
" ax.set_xlabel(\"Publication Frequency [Hz]\")\n",
" ax.set_ylabel(\"#Publications\")\n",
" ax.set_title(f\"{node_path} =({v['frequency']:.2f}Hz)=> {topic_name}\")\n",
2022-06-28 10:06:42 +02:00
" plt.savefig('/'.join((fig_dirname, f\"{i:06}{node_path}__{topic_name}\".replace('/','-'))))\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Data Flow Graph"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"node_filters = [\"transform_listener_impl\", \"_monitor\"]\n",
"topic_filters = [\"/rosout\", \"/parameter_events\", \"/diagnostics\"]\n",
"\n",
"from pyvis.network import Network\n",
"net = Network(notebook=True, height='750px', width='100%', bgcolor='#ffffff', font_color='#000000')\n",
"\n",
"net.add_node(\"INPUT\", label=\"Input\", size=100, color=\"green\", physics=False, x=0, y=0)\n",
"net.add_node(\"OUTPUT\", label=\"Output\", size=100, color=\"red\", physics=False, x=6000, y=0)\n",
"\n",
"\n",
"for node in nodes.values():\n",
" if any(f in node.path for f in node_filters): \n",
" continue\n",
" net.add_node(node.id, label=node.name, title=node.path, size=20, color=\"#333\")\n",
"\n",
"for cohort_key, cohort_topics in topic_cohorts.items():\n",
" cohort_topic_names = [topic.name for topic in cohort_topics if not any(f in topic.name for f in topic_filters)]\n",
" if not cohort_topic_names: \n",
" continue\n",
" cohort_id=\"\\n\".join(cohort_topic_names)\n",
" cohort_weight=len(cohort_topic_names)\n",
" net.add_node(cohort_id, label=\" \", title=cohort_id, size=5, color=\"#333\")\n",
" \n",
" pubs = cohort_key[0]\n",
" subs = cohort_key[1]\n",
" n_pubs = len(pubs)\n",
" n_subs = len(subs)\n",
" \n",
" try:\n",
" if not n_pubs:\n",
" net.add_edge(\"INPUT\", cohort_id, arrows=\"to\", color=\"green\", weight=cohort_weight)\n",
" if not n_subs:\n",
" net.add_edge(cohort_id, \"OUTPUT\", arrows=\"to\", color=\"red\", weight=cohort_weight)\n",
"\n",
" for pub in pubs:\n",
" net.add_edge(pub, cohort_id, arrows=\"to\", color=\"green\", weight=cohort_weight)\n",
" for sub in subs:\n",
" net.add_edge(cohort_id, sub, arrows=\"to\", color=\"red\", weight=cohort_weight)\n",
" except:\n",
" continue\n",
"\n",
"net.toggle_physics(True)\n",
"net.show_buttons()\n",
"net.show(\"graph.html\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Pub-Use Latencies\n",
"Compute for each node and its data dependencies the list of pub-use delays (per-topic-per-node list of pub-use delays)"
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": null,
"metadata": {},
2022-06-28 10:06:42 +02:00
"outputs": [],
"source": [
"def filter_none(ls):\n",
" return filter(lambda x: x is not None, ls)\n",
"\n",
"def safe_map(func, ls):\n",
" def safe_func(arg):\n",
" try:\n",
" return func(arg)\n",
" except:\n",
" return None\n",
" \n",
" return map(safe_func, ls)\n",
"\n",
"pub_use_delays = {node.id: {\n",
" 'pubs': {}, \n",
" 'invocations': {}, \n",
" 'n_unknown_invocations': 0, \n",
" 'n_pub_timestamps': 0\n",
" } for node in nodes.values()}\n",
"\n",
"for node in nodes.values():\n",
" node_pub_use_dict = pub_use_delays[node.id]\n",
" timestamp_min = np.inf; timestamp_max = 0\n",
"\n",
" n_pub_timestamps = 0\n",
" for sub in node.subscriptions:\n",
" node_pub_use_dict['pubs'][sub.topic_name] = {}\n",
" for pub in sub.publishers:\n",
" pub_timestamps = [inst.timestamp for inst in pub.instances]\n",
"\n",
" try:\n",
" pub_t_min = min(pub_timestamps); pub_t_max = max(pub_timestamps)\n",
" except ValueError:\n",
" pub_t_min = np.inf; pub_t_max = 0\n",
" \n",
" if pub_t_min < timestamp_min: timestamp_min = pub_t_min\n",
" if pub_t_max > timestamp_max: timestamp_max = pub_t_max\n",
"\n",
" node_pub_use_dict['pubs'][sub.topic_name][pub.node.path] = pub_timestamps\n",
" node_pub_use_dict['n_pub_timestamps'] += len(pub_timestamps)\n",
"\n",
" timer_cb_objs = list(filter_none(safe_map(lambda timer: timer.callback_object, node.timers)))\n",
" subsc_cb_objs = list(filter_none(safe_map(lambda subsc: subsc.subscription_object.callback_object, node.subscriptions)))\n",
"\n",
" print(f\"{node.path:95s} has {len(timer_cb_objs):1d} timer callbacks, {len(subsc_cb_objs):2d} subscription callbacks, {len(node_pub_use_dict['pubs']):2d} subscribed topics.\")\n",
"\n",
" node_invocations = node_pub_use_dict['invocations']\n",
"\n",
" for cb_obj in timer_cb_objs + subsc_cb_objs:\n",
" cb_invocations = []\n",
" for inst in cb_obj.callback_instances:\n",
" cb_invocations.append((inst.timestamp, inst.duration))\n",
"\n",
" node_invocations[cb_obj.id] = cb_invocations"
]
},
{
"cell_type": "code",
2022-05-30 16:51:06 +02:00
"execution_count": null,
"metadata": {},
2022-05-30 16:51:06 +02:00
"outputs": [],
"source": [
"from matplotlib import cm\n",
"\n",
"fig_dirname = \"fig_pub_use\"\n",
"os.makedirs(fig_dirname, exist_ok=True)\n",
"plt.close('all')\n",
"\n",
"node_filters=[]#\"transform_listener_impl\",]\n",
"\n",
"nodes_filtered = [node for node in nodes.values() if not any(f in node.path for f in node_filters)]\n",
"print(f\"Ignoring {len(nodes.values()) - len(nodes_filtered)} nodes due to filters.\")\n",
"\n",
"common_offset = min(map(lambda cb_inst: cb_inst.timestamp.timestamp(), callback_instances))\n",
"\n",
"zero_color = cm.get_cmap('viridis')(0.0)\n",
"\n",
"for node_i, (node, node_path, node_pub_use_dict) in enumerate(map(lambda node: (node, node.path, pub_use_delays[node.id]), nodes_filtered)):\n",
"\n",
" if not node_pub_use_dict['invocations']:\n",
" print(f\"{node_path:95s} has no invocations, skipping.\")\n",
" continue\n",
"\n",
" if len(node_pub_use_dict['pubs']) == 0:\n",
" print(f\"Skipping {node_path}, no publications\")\n",
" continue\n",
"\n",
" fig = plt.figure(figsize=(15,5))\n",
" ax: plt.Axes = fig.add_subplot()\n",
"\n",
" max_pubs_per_topic = max(len(pubs) for pubs in node_pub_use_dict['pubs'].values())\n",
" topic_names, topic_pubs = (zip(*node_pub_use_dict['pubs'].items()))\n",
"\n",
" vmin = 0; vmax = max_pubs_per_topic\n",
"\n",
" y_labels = []\n",
" current_y = 0\n",
"\n",
" for invoc_i, (cb_obj_id, cb_invocations) in enumerate(node_pub_use_dict['invocations'].items()):\n",
" try:\n",
" cb_obj = callback_objects[cb_obj_id]\n",
" sym = callback_symbols[cb_obj.callback_object].symbol\n",
" sym = Ros2DataModelUtil._prettify(None, sym)\n",
" sym = re.sub(r\"std::shared_ptr<(.*?)>\", r\"\\1*\", sym)\n",
"\n",
" cb_owner = cb_obj.owner\n",
" if isinstance(cb_owner, Timer):\n",
" cb_type = \"T\"\n",
" elif isinstance(cb_owner, SubscriptionObject):\n",
" cb_type = \"S\"\n",
" except KeyError or AttributeError:\n",
" sym = \"UNKNOWN\"\n",
" cb_type = \"U\"\n",
" \n",
" y_labels.append(f\"{sym} {cb_type}\")\n",
" n_markers = len(cb_invocations)\n",
"\n",
" points_x = []; points_y = []\n",
" for time, dur in cb_invocations:\n",
" time = time.timestamp() - common_offset; dur = dur.total_seconds()\n",
" points_x += [time, time+dur, None]\n",
" points_y += [current_y, current_y, 0.0]\n",
" \n",
" ax.plot(points_x,points_y, marker='.', c=zero_color)\n",
" current_y += 1\n",
"\n",
" n_cbs = current_y\n",
"\n",
" for topic_i, (topic_name, pubs) in enumerate(zip(topic_names, topic_pubs)):\n",
" for pub_i, (pub_name, timestamps) in enumerate(pubs.items()):\n",
" n_markers = len(timestamps)\n",
" ax.scatter(np.array(timestamps)*1e-9 - common_offset, (current_y,) * n_markers, marker='.', c=(pub_i,) * n_markers, vmin=vmin, vmax=vmax)\n",
" \n",
" y_labels.append(topic_name)\n",
" current_y += 1\n",
" \n",
" trigger_strs = []\n",
" t = node.timers\n",
" if t:\n",
" n_timers = len(t)\n",
" freqs = map(lambda timer: 1 / (timer.period*1e-9), t)\n",
" trigger_strs.append(f\"{n_timers} timer{'s' if n_timers != 1 else ''}, {'Hz, '.join((f'{freq:.0f}' for freq in freqs))}Hz\")\n",
" if node.subscriptions:\n",
" n_subs = len(node.subscriptions)\n",
" trigger_strs.append(f\"{n_subs} subscription{'s' if n_subs != 1 else ''}\")\n",
"\n",
" ax.set_xlabel(\"Publication / Invocation Timestamp [s]\")\n",
" ax.set_ylabel(\"Topic\")\n",
" ax.set_yticks(range(current_y))\n",
" ax.set_yticklabels(y_labels)\n",
" ax.set_ylim(0 - .1, current_y - 1 + .1)\n",
" ax.set_title(f\"{node_path} ({'; '.join(trigger_strs)})\")\n",
" ax.set_xlim(50, 50.25)\n",
"\n",
" ax.hlines(n_cbs - 0.5, *ax.get_xlim(), linestyles='dashed')\n",
" plt.savefig(os.path.join(fig_dirname, f\"{node_i:06}{node_path}\".replace('/','-')))"
]
},
2022-05-30 16:51:06 +02:00
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# E2E Latency Calculation"
2022-05-30 16:51:06 +02:00
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": 6,
"metadata": {},
2022-06-28 10:06:42 +02:00
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(483/483) Processing done. \n"
]
}
],
2022-05-30 16:51:06 +02:00
"source": [
"#################################################\n",
"# Data structures & helpers\n",
"#################################################\n",
"\n",
"LatencyStats = pd.Series\n",
"\n",
"@dataclass\n",
"class LatencyGraph:\n",
" verts: Set[CallbackObject]\n",
" edges: Dict[Tuple[CallbackObject, CallbackObject], Tuple[Topic, LatencyStats]]\n",
2022-06-28 10:06:42 +02:00
" starts: Dict[CallbackObject, Topic]\n",
" ends: Dict[CallbackObject, Topic]\n",
"\n",
2022-05-31 16:19:22 +02:00
"def pub_use_latencies(cb_instances: List[CallbackInstance], pub_instances: List[PublishInstance]):\n",
" cb_times = sorted([inst.timestamp.timestamp() for inst in cb_instances])\n",
"\n",
2022-05-31 16:19:22 +02:00
" if not pub_instances:\n",
" return pd.Series(np.full(len(cb_instances), np.nan), index=cb_times)\n",
2022-05-31 16:19:22 +02:00
"\n",
2022-05-31 16:49:47 +02:00
" pub_times = np.array(sorted([pub.timestamp * 1e-9 for pub in pub_instances]))\n",
2022-05-31 16:19:22 +02:00
"\n",
" pub_use_lats = np.array([cb_time - np.max(pub_times[pub_times < cb_time], initial=-np.inf) for cb_time in cb_times])\n",
" pub_use_lats[np.isposinf(pub_use_lats)] = np.nan\n",
" ret_series = pd.Series(pub_use_lats, index=cb_times)\n",
" return ret_series\n",
"\n",
"def inst_runtime_interval(cb_inst):\n",
" inst_t_min = cb_inst.timestamp.timestamp()\n",
" inst_t_max = inst_t_min + cb_inst.duration.total_seconds()\n",
" return (inst_t_min, inst_t_max)\n",
"\n",
2022-06-28 10:06:42 +02:00
"def count_pub_insts_in_intervals(cb_intervals: List[Tuple[float, float]], pub_insts: List[PublishInstance]):\n",
" \"\"\"\n",
" Counts number of publication instancess that lie within one of the cb_intervals.\n",
" \"\"\"\n",
" pub_timestamps = [inst.timestamp * 1e-9 for inst in pub_insts]\n",
" \n",
" # Algorithm: Two-pointer method\n",
" # With both the pub_timestamps and cb_intervals sorted ascending,\n",
" # we can cut down the O(m*n) comparisons to O(m+n).\n",
" pub_timestamps.sort()\n",
" cb_intervals.sort(key=lambda tup: tup[0])\n",
"\n",
" n_overlaps = 0\n",
" cb_iter = iter(cb_intervals)\n",
" pub_iter = iter(pub_timestamps)\n",
" (t_min, t_max) = next(cb_iter, (None, None))\n",
" t_pub = next(pub_iter, None)\n",
"\n",
" while t_pub is not None and t_min is not None:\n",
" if t_min <= t_pub <= t_max: # If publication in interval, increase counter, go to next pub (multiple pubs can be within one interval)\n",
" n_overlaps += 1\n",
" t_pub = next(pub_iter, None)\n",
" elif t_pub < t_min: # If publication before interval, increase pub\n",
" t_pub = next(pub_iter, None)\n",
" else: # If interval before publication, increase interval\n",
" (t_min, t_max) = next(cb_iter, (None, None))\n",
"\n",
" return n_overlaps\n",
"\n",
"#################################################\n",
"# Identify input and output topics\n",
"#################################################\n",
"\n",
"in_topics = [t for t in topics.values() if not t.publishers]\n",
"out_topics = [t for t in topics.values() if not t.subscriptions]\n",
"\n",
"#################################################\n",
"# For each node, work out dependencies and\n",
"# publications of each callback\n",
"#################################################\n",
"\n",
2022-06-03 14:57:54 +02:00
"cb_to_scored_pub: Dict[CallbackObject, Set[Tuple[Publisher, float]]] = {}\n",
"topic_to_dep_cb: Dict[Topic, Set[CallbackObject]] = {}\n",
2022-05-31 16:19:22 +02:00
"pub_cb_to_lat_stats: Dict[Tuple[Publisher, CallbackObject], LatencyStats] = {}\n",
"\n",
2022-05-31 16:49:47 +02:00
"with ProgressPrinter(\"Processing\", len(callback_objects)) as p:\n",
" for cb in callback_objects.values():\n",
2022-06-28 10:06:42 +02:00
" p.step(Ros2DataModelUtil._prettify(None, callback_symbols[cb.callback_object].symbol) if cb.callback_object in callback_symbols else str(cb.id))\n",
" # Find topics the callback EXPLICITLY depends on\n",
" # - Timer callbacks: no EXPLICIT dependencies\n",
" # - Subscription callbacks: callback depends on the subscribed topic. Possibly also has other IMPLICIT dependencies\n",
2022-05-31 16:49:47 +02:00
"\n",
2022-06-28 10:06:42 +02:00
" if type(cb.owner).__name__ == SubscriptionObject.__name__:\n",
" owner_node = cb.owner.subscription.node\n",
" dep_topics = [cb.owner.subscription.topic,]\n",
" elif type(cb.owner).__name__ == Timer.__name__:\n",
2022-05-31 16:49:47 +02:00
" owner_nodes = cb.owner.nodes\n",
" if len(owner_nodes) != 1:\n",
" raise(ValueError(\"Timer has more than one owner!\"))\n",
2022-06-28 10:06:42 +02:00
" dep_topics = []\n",
2022-05-31 16:49:47 +02:00
" elif cb.owner is None:\n",
2022-06-28 10:06:42 +02:00
" dep_topics = []\n",
2022-05-31 16:49:47 +02:00
" continue\n",
" else:\n",
" raise RuntimeError(f\"Callback owners other than timers/subscriptions cannot be handled: {cb.owner} {cb.owner_info}\")\n",
"\n",
2022-05-31 16:49:47 +02:00
" for topic in dep_topics: \n",
2022-06-03 14:57:54 +02:00
" if topic not in topic_to_dep_cb:\n",
" topic_to_dep_cb[topic] = set()\n",
" topic_to_dep_cb[topic].add(cb)\n",
"\n",
2022-05-31 16:49:47 +02:00
" for pub in topic.publishers:\n",
" pub_cb_to_lat_stats[(pub, cb)] = pub_use_latencies(cb.callback_instances, pub.instances)\n",
2022-05-31 16:19:22 +02:00
"\n",
2022-05-31 16:49:47 +02:00
" # Find topics the callback publishes to (HEURISTICALLY!)\n",
" # For topics published to during the runtime of the callback's instances, \n",
" # assume that they are published by the callback\n",
"\n",
2022-05-31 16:49:47 +02:00
" cb_runtime_intervals = [inst_runtime_interval(inst) for inst in cb.callback_instances]\n",
2022-06-28 10:06:42 +02:00
" cb_pub_overlap_counts = [count_pub_insts_in_intervals(cb_runtime_intervals, pub.instances) for pub in owner_node.publishers]\n",
2022-05-31 16:49:47 +02:00
"\n",
" for pub, olap_count in zip(owner_node.publishers, cb_pub_overlap_counts):\n",
" if olap_count == 0 or not pub.instances:\n",
" continue\n",
" score = olap_count / len(pub.instances)\n",
"\n",
2022-06-03 14:57:54 +02:00
" if cb not in cb_to_scored_pub: \n",
" cb_to_scored_pub[cb] = set()\n",
" cb_to_scored_pub[cb].add((pub, score))\n"
]
},
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": 7,
2022-06-03 14:57:54 +02:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2022-06-28 10:06:42 +02:00
"/diagnostics:\n",
" 0.069979% void (AutowareStateMonitorNode::?)(autoware_auto_vehicle_msgs::msg::ControlModeReport*),\n",
" 0.069979% void (AutowareStateMonitorNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/scenario_selector/trajectory:\n",
" 97.216700% void (ScenarioSelectorNode::?)(autoware_auto_planning_msgs::msg::Trajectory*),\n",
" 2.783300% void (ScenarioSelectorNode::?)(autoware_auto_planning_msgs::msg::Trajectory*)\n",
"/diagnostics:\n",
" 9.008380% void (diagnostic_updater::Updater::?)(),\n",
" 90.712291% void (planning_diagnostics::PlanningErrorMonitorNode::?)()\n",
"/control/trajectory_follower/control_cmd:\n",
" 50.000000% void (autoware::motion::control::trajectory_follower_nodes::LatLonMuxer::?)(autoware_auto_control_msgs::msg::AckermannLateralCommand*),\n",
" 50.000000% void (autoware::motion::control::trajectory_follower_nodes::LatLonMuxer::?)(autoware_auto_control_msgs::msg::LongitudinalCommand*)\n",
"/planning/scenario_planning/status/stop_reason:\n",
" 0.306748% void (motion_planning::ObstacleStopPlannerNode::?)(sensor_msgs::msg::PointCloud2*),\n",
" 100.000000% void (motion_planning::ObstacleStopPlannerNode::?)(autoware_auto_planning_msgs::msg::Trajectory*)\n",
"/planning/scenario_planning/lane_driving/trajectory:\n",
" 0.306748% void (motion_planning::ObstacleStopPlannerNode::?)(sensor_msgs::msg::PointCloud2*),\n",
" 100.000000% void (motion_planning::ObstacleStopPlannerNode::?)(autoware_auto_planning_msgs::msg::Trajectory*)\n",
"/planning/scenario_planning/lane_driving/motion_planning/obstacle_stop_planner/debug/marker:\n",
" 0.204499% void (motion_planning::ObstacleStopPlannerNode::?)(sensor_msgs::msg::PointCloud2*),\n",
" 100.000000% void (motion_planning::ObstacleStopPlannerNode::?)(autoware_auto_planning_msgs::msg::Trajectory*)\n",
"/diagnostics:\n",
" 9.047619% void (diagnostic_updater::Updater::?)(),\n",
" 90.793651% void (VehicleCmdGate::?)()\n",
"/control/command/emergency_cmd:\n",
" 100.000000% void (VehicleCmdGate::?)(autoware_auto_control_msgs::msg::AckermannControlCommand*),\n",
" 7.980703% void (behavior_path_planner::BehaviorPathPlannerNode::?)()\n",
"/control/command/control_cmd:\n",
" 100.000000% void (VehicleCmdGate::?)(autoware_auto_control_msgs::msg::AckermannControlCommand*),\n",
" 7.953136% void (behavior_path_planner::BehaviorPathPlannerNode::?)()\n",
"/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/debug/intersection:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.204082% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/debug/stop_line:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.102041% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/status/infrastructure_commands:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.204082% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/debug/virtual_traffic_light:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.102041% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::OccupancyGrid*)\n",
"/planning/scenario_planning/status/infrastructure_commands:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.204082% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(sensor_msgs::msg::PointCloud2*),\n",
" 0.102041% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/debug/merge_from_private:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.102041% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/status/stop_reasons:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.306122% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(sensor_msgs::msg::PointCloud2*)\n",
"/planning/scenario_planning/status/infrastructure_commands:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.204082% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/debug/occlusion_spot:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.102041% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(sensor_msgs::msg::PointCloud2*),\n",
" 0.102041% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(nav_msgs::msg::Odometry*)\n",
"/planning/scenario_planning/lane_driving/behavior_planning/path:\n",
" 100.000000% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(autoware_auto_planning_msgs::msg::PathWithLaneId*),\n",
" 0.408163% void (behavior_velocity_planner::BehaviorVelocityPlannerNode::?)(sensor_msgs::msg::PointCloud2*)\n"
2022-06-03 14:57:54 +02:00
]
}
],
"source": [
"pub_to_scored_cb = {}\n",
"\n",
"verts = set(callback_objects.values())\n",
"edges = {}\n",
"for send_cb, scored_pubs in cb_to_scored_pub.items():\n",
" for pub, score in scored_pubs:\n",
" if score == 0.0:\n",
" continue\n",
2022-06-28 10:06:42 +02:00
" if pub not in pub_to_scored_cb:\n",
" pub_to_scored_cb[pub] = []\n",
" pub_to_scored_cb[pub].append((send_cb, score))\n",
2022-06-03 14:57:54 +02:00
" receiver_cbs = [sub.subscription_object.callback_object for sub in pub.subscriptions if sub.subscription_object is not None]\n",
" for recv_cb in receiver_cbs:\n",
" edges[(send_cb, recv_cb)] = (pub.topic, pub_cb_to_lat_stats[(pub, recv_cb)])\n",
"\n",
2022-06-28 10:06:42 +02:00
"for pub, scored_cbs in pub_to_scored_cb.items():\n",
" if len(scored_cbs) > 1:\n",
" def _mapfun(tup):\n",
" cb, score = tup\n",
" pretty_sym = Ros2DataModelUtil._prettify(None, callback_symbols[cb.callback_object].symbol)\n",
" pretty_sym = re.sub(r\"std::shared_ptr<(.*?) *(const)?>\", r\"\\1*\", pretty_sym)\n",
" return f'{score*100:>10.6f}% {pretty_sym}'\n",
" cbstr = ',\\n '.join(map(_mapfun, scored_cbs))\n",
" print(f\"{pub.topic_name}:\\n {cbstr}\")\n",
"\n",
"inputs = {}\n",
"outputs = {}\n",
2022-06-03 14:57:54 +02:00
"for topic in out_topics:\n",
2022-06-28 10:06:42 +02:00
" outputs.update({cb: topic for pub in topic.publishers if pub in pub_to_scored_cb for cb, score in pub_to_scored_cb[pub]})\n",
2022-06-03 14:57:54 +02:00
"for topic in in_topics:\n",
2022-06-28 10:06:42 +02:00
" inputs.update({sub.subscription_object.callback_object: topic for sub in topic.subscriptions if sub.subscription_object is not None})\n",
2022-06-03 14:57:54 +02:00
"\n",
"\n",
"#################################################\n",
"# Filter callback objects and topics\n",
"#################################################\n",
2022-06-28 10:06:42 +02:00
"\n",
"callback_symbol_filters = [\n",
" \"rcl_interfaces::msg::ParameterEvent\", \"diagnostic_updater::Updater\", \n",
" \"rclcpp::ParameterService::ParameterService\", \"tf2_ros::TransformListener\",\n",
" \"rclcpp_components::ComponentManager\", \"diagnostic_aggregator::Aggregator\"\n",
" ]\n",
"\n",
"verts = set(filter(lambda vert: not any(f in callback_symbols[vert.callback_object].symbol for f in callback_symbol_filters), verts))\n",
"edges = {(cb1, cb2): val for (cb1, cb2), val in edges.items() if cb1 in verts and cb2 in verts}\n",
"outputs = {cb: topic for cb, topic in outputs.items() if cb in verts}\n",
"inputs = {cb: topic for cb, topic in inputs.items() if cb in verts}\n",
"\n",
"latency_graph = LatencyGraph(verts, edges, inputs, outputs)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Node(id=281471218807408, timestamp=1652795532762216423, tid=10104, rmw_handle=281471218875664, name='behavior_path_planner', namespace='/planning/scenario_planning/lane_driving/behavior_planning')\n",
" CallbackObject -> CallbackObject CallbackObject CallbackObject CallbackObject CallbackObject CallbackObject\n",
" CallbackObject -> CallbackObject CallbackObject CallbackObject\n"
]
}
],
"source": [
"#################################################\n",
"# Get intra-node dependencies from settings\n",
"#################################################\n",
"\n",
"def _find_node(path):\n",
" return next(filter(lambda n: n.path == path, nodes.values()))\n",
"\n",
"from ruamel.yaml import YAML\n",
"\n",
"yaml = YAML()\n",
"with open(\"settings/intra-node-data-deps.yaml\", \"r\") as f:\n",
" node_internal_deps = yaml.load(f)\n",
" # Convert node path to node instance\n",
" node_internal_deps = {_find_node(path): {\n",
" callback_objects[cb_id]: [callback_objects[dep_id] for dep_id in dep_ids]\n",
" for cb_id, dep_ids \n",
" in deps.items()\n",
" } for path, deps in node_internal_deps.items()}\n",
"\n",
"for node, cb_mappings in node_internal_deps.items():\n",
" print(node)\n",
" for cb, deps in cb_mappings.items():\n",
" print(\" \", type(cb).__name__, \"->\", ' '.join(map(lambda x: type(x).__name__, deps)))"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n",
"[WARN] CB has no owners\n"
]
},
{
"data": {
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n<!-- Generated by graphviz version 2.43.0 (0)\n -->\n<!-- Title: G Pages: 1 -->\n<svg width=\"3884pt\" height=\"3107pt\"\n viewBox=\"0.00 0.00 3884.00 3107.48\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\n<g id=\"graph0\" class=\"graph\" transform=\"scale(1 1) rotate(0) translate(4 3103.48)\">\n<title>G</title>\n<polygon fill=\"white\" stroke=\"transparent\" points=\"-4,4 -4,-3103.48 3880,-3103.48 3880,4 -4,4\"/>\n<g id=\"clust1\" class=\"cluster\">\n<title>cluster_/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"211,-1752.48 211,-2165.48 1120,-2165.48 1120,-1752.48 211,-1752.48\"/>\n<text text-anchor=\"middle\" x=\"665.5\" y=\"-2150.28\" font-family=\"Times,serif\" font-size=\"14.00\">/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner</text>\n</g>\n<g id=\"clust2\" class=\"cluster\">\n<title>cluster_/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"89,-2173.48 89,-2416.48 1988,-2416.48 1988,-2173.48 89,-2173.48\"/>\n<text text-anchor=\"middle\" x=\"1038.5\" y=\"-2401.28\" font-family=\"Times,serif\" font-size=\"14.00\">/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner</text>\n</g>\n<g id=\"clust3\" class=\"cluster\">\n<title>cluster_/control/external_cmd_selector</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"339,-2424.48 339,-2735.48 992,-2735.48 992,-2424.48 339,-2424.48\"/>\n<text text-anchor=\"middle\" x=\"665.5\" y=\"-2720.28\" font-family=\"Times,serif\" font-size=\"14.00\">/control/external_cmd_selector</text>\n</g>\n<g id=\"clust4\" class=\"cluster\">\n<title>cluster_/planning/scenario_planning/scenario_selector</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"1322.5,-1343.48 1322.5,-1552.48 1943.5,-1552.48 1943.5,-1343.48 1322.5,-1343.48\"/>\n<text text-anchor=\"middle\" x=\"1633\" y=\"-1537.28\" font-family=\"Times,serif\" font-size=\"14.00\">/planning/scenario_planning/scenario_selector</text>\n</g>\n<g id=\"clust5\" class=\"cluster\">\n<title>cluster_/planning/scenario_planning/external_velocity_limit_selector</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"295,-1637.48 295,-1744.48 1036,-1744.48 1036,-1637.48 295,-1637.48\"/>\n<text text-anchor=\"middle\" x=\"665.5\" y=\"-1729.28\" font-family=\"Times,serif\" font-size=\"14.00\">/planning/scenario_planning/external_velocity_limit_selector</text>\n</g>\n<g id=\"clust6\" class=\"cluster\">\n<title>cluster_/planning/scenario_planning/parking/freespace_planner</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"3003,-1465.48 3003,-1640.48 3771,-1640.48 3771,-1465.48 3003,-1465.48\"/>\n<text text-anchor=\"middle\" x=\"3387\" y=\"-1625.28\" font-family=\"Times,serif\" font-size=\"14.00\">/planning/scenario_planning/parking/freespace_planner</text>\n</g>\n<g id=\"clust7\" class=\"cluster\">\n<title>cluster_/planning/scenario_planning/parking/costmap_generator</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"2128.5,-1587.48 2128.5,-1762.48 2757.5,-1762.48 2757.5,-1587.48 2128.5,-1587.48\"/>\n<text text-anchor=\"middle\" x=\"2443\" y=\"-1747.28\" font-family=\"Times,serif\" font-size=\"14.00\">/planning/scenario_planning/parking/costmap_generator</text>\n</g>\n<g id=\"clust8\" class=\"cluster\">\n<title>cluster_/control/trajectory_follower/longitudinal_controller_node_exe</title>\n<polygon fill=\"lightgray\" stroke=\"black\" points=\"2898,-1010.48 2898,-1117.48 3876,-1117.48 3876,-1010.48 2898,-1010.48\"/>\n<text text-anchor=\"middle\" x=\"3387\" y=\"-1102.28\" font-family=\"Times,serif\" font-size=\"14.00\">/control/trajectory_follower/longitudinal_controller_node_exe</text>\n</g>\n<g id=\"clust9\" class=\"cluster\
"text/plain": [
"<graphviz.graphs.Digraph at 0x7ff7a78b04f0>"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
2022-06-03 14:57:54 +02:00
"\n",
"#################################################\n",
"# Plot DFG\n",
"#################################################\n",
"\n",
2022-06-28 10:06:42 +02:00
"import graphviz as gv\n",
"\n",
2022-06-28 10:06:42 +02:00
"g = gv.Digraph('G', filename=\"latency_graph.gv\", node_attr={'shape': 'record', 'margin': '0.00001', 'width': '0.00001', 'height': '0.001'}, graph_attr={'pack': '1'})\n",
"g.graph_attr['rankdir'] = 'LR'\n",
"\n",
"g.node(\"INPUT\", gv.nohtml(\"{INPUT |<out>}\"))\n",
"g.node(\"OUTPUT\", gv.nohtml(\"{<in> |OUTPUT}\"))\n",
"\n",
"nodes_to_cbs = {}\n",
"export_dict = {}\n",
2022-06-03 14:57:54 +02:00
"\n",
"for vert in latency_graph.verts:\n",
2022-06-28 10:06:42 +02:00
" vert: CallbackObject\n",
"\n",
" if isinstance(vert.owner, Timer):\n",
" owner_nodes = vert.owner.nodes\n",
" elif isinstance(vert.owner, SubscriptionObject):\n",
" owner_nodes = [vert.owner.subscription.node]\n",
" else:\n",
" owner_nodes = []\n",
"\n",
" if len(owner_nodes) > 1:\n",
" raise RuntimeError(f\"CB has owners {', '.join(map(lambda n: n.path, owner_nodes))}\")\n",
" elif not owner_nodes:\n",
" print(\"[WARN] CB has no owners\")\n",
" continue\n",
"\n",
" owner = owner_nodes[0]\n",
" if not owner in nodes_to_cbs: nodes_to_cbs[owner] = []\n",
" nodes_to_cbs[owner].append(vert)\n",
" if not owner.path in export_dict: export_dict[owner.path] = []\n",
"\n",
"for node, cbs in nodes_to_cbs.items():\n",
" with g.subgraph(name=f\"cluster_{node.path}\") as c:\n",
" c.attr(label=node.path)\n",
" c.attr(margin='0.0')\n",
" c.attr(bgcolor='lightgray')\n",
"\n",
" for cb in cbs:\n",
" cb: CallbackObject\n",
" pretty_sym = Ros2DataModelUtil._prettify(None, callback_symbols[cb.callback_object].symbol)\n",
" pretty_sym = re.sub(r\"std::shared_ptr<(.*?) *(const)?>\", r\"\\1*\", pretty_sym)\n",
" pretty_sym = pretty_sym.replace('{', '\\\\{').replace('}', '\\\\}')\n",
"\n",
" export_dict[node.path].append({cb.id: {\n",
" 'symbol': pretty_sym, \n",
" 'ins': sum(map(lambda k: k[1].id == cb.id, latency_graph.edges.keys())) + sum(map(lambda k: k.id == cb.id, latency_graph.starts.keys())), \n",
" 'outs': sum(map(lambda k: k[0].id == cb.id, latency_graph.edges.keys())) + sum(map(lambda k: k.id == cb.id, latency_graph.ends.keys()))\n",
" }})\n",
"\n",
" cb_durations = np.array(list(map(lambda inst: inst.duration.total_seconds(), cb.callback_instances)))\n",
" if len(cb_durations) == 0:\n",
" cb_durations = np.zeros(1)\n",
" cb_dur_stats = (cb_durations.min(), cb_durations.mean(), cb_durations.max())\n",
" c.node(str(cb.id), gv.nohtml(f\"{{<in> |{pretty_sym} |<out>}}\"), tooltip=f\"{cb_dur_stats[0]*1e6:.0f}µs, {cb_dur_stats[1]*1e6:.0f}µs, {cb_dur_stats[2]*1e6:.0f}µs\", fontcolor=('red' if isinstance(cb.owner, Timer) else 'black'))\n",
"\n",
2022-06-03 14:57:54 +02:00
"\n",
"for (c1, c2), (topic, lat_stats) in latency_graph.edges.items():\n",
2022-06-28 10:06:42 +02:00
" g.edge(f\"{c1.id}:out\", f\"{c2.id}:in\", tooltip=f\"{topic.name} ({lat_stats.mean()*1000:.2f}ms)\")\n",
2022-06-03 14:57:54 +02:00
"\n",
2022-06-28 10:06:42 +02:00
"for c, t in latency_graph.starts.items():\n",
" g.edge(\"INPUT:out\", f\"{c.id}:in\", tooltip=t.name)\n",
2022-06-03 14:57:54 +02:00
"\n",
2022-06-28 10:06:42 +02:00
"for c, t in latency_graph.ends.items():\n",
" g.edge(f\"{c.id}:out\", \"OUTPUT:in\", tooltip=t.name)\n",
2022-06-03 14:57:54 +02:00
"\n",
2022-06-28 10:06:42 +02:00
"for n, deps in node_internal_deps.items():\n",
" for cb, dep_cbs in deps.items():\n",
" for dep_cb in dep_cbs:\n",
" g.edge(f\"{dep_cb.id}:out\", f\"{cb.id}:in\", tooltip=\"node-internal data dependency\", color='red')\n",
"\n",
2022-06-28 10:06:42 +02:00
"with open(\"settings/node-cbs.yaml\", \"w\") as f:\n",
" yaml = YAML()\n",
" yaml.dump(export_dict, f)\n",
"\n",
"g.save(\"latency_graph.gv\")\n",
"\n",
"g"
2022-05-30 16:51:06 +02:00
]
},
2022-05-31 16:49:47 +02:00
{
"cell_type": "code",
2022-06-28 10:06:42 +02:00
"execution_count": 10,
2022-05-31 16:49:47 +02:00
"metadata": {},
"outputs": [
{
2022-06-28 10:06:42 +02:00
"name": "stdout",
"output_type": "stream",
"text": [
"....................................*.......................................*.......................................*........................................*.......................................*...#*....................................*.......................................*.......................................*.........................................*.......................................*.......................................*.......................................*.....#*..........................................*.......................................*.......................................*.......................................*.....#**.....................................*.......................................*.......................................*.......................................*.......................................*.......................................*.......................................*.......................................*.......................................*.......#*....................................*.......................................*...#**#***.****.**.#**....................................*.......................................*........................................*.......................................*.......................................*.........................................*........................................*.......................................*.........................................*.......................................*.......................................*.......................................*.......................................*.......................................*.........................................*............................................*.......................................*.........................................*.........................................*.......................................*...........................................*........................................*.......................................*.........................................*.......................................*..........................................*.......................................*.........................................*...#**....................................*.....#****.........................................*.......................................*.....*.===== PATH 0 =====\n",
"===== PATH 1 =====\n",
"===== PATH 2 =====\n",
"===== PATH 3 =====\n",
"===== PATH 4 =====\n",
"/diagnostics 56.812ms 56.812ms\n",
"===== PATH 5 =====\n",
"===== PATH 6 =====\n",
"===== PATH 7 =====\n",
"===== PATH 8 =====\n",
"===== PATH 9 =====\n",
"===== PATH 10 =====\n",
"===== PATH 11 =====\n",
"===== PATH 12 =====\n",
"===== PATH 13 =====\n",
"===== PATH 14 =====\n",
"===== PATH 15 =====\n",
"===== PATH 16 =====\n",
"===== PATH 17 =====\n",
"===== PATH 18 =====\n",
"===== PATH 19 =====\n",
"===== PATH 20 =====\n",
"===== PATH 21 =====\n",
"===== PATH 22 =====\n",
"===== PATH 23 =====\n",
"===== PATH 24 =====\n",
"===== PATH 25 =====\n",
"===== PATH 26 =====\n",
"===== PATH 27 =====\n",
"===== PATH 28 =====\n",
"/diagnostics 56.812ms 56.812ms\n",
"===== PATH 29 =====\n",
"===== PATH 30 =====\n",
"===== PATH 31 =====\n",
"===== PATH 32 =====\n",
"===== PATH 33 =====\n",
"===== PATH 34 =====\n",
"===== PATH 35 =====\n",
"===== PATH 36 =====\n",
"===== PATH 37 =====\n",
"===== PATH 38 =====\n",
"===== PATH 39 =====\n",
"===== PATH 40 =====\n",
"===== PATH 41 =====\n",
"===== PATH 42 =====\n",
"===== PATH 43 =====\n",
"===== PATH 44 =====\n",
"===== PATH 45 =====\n",
"===== PATH 46 =====\n",
"===== PATH 47 =====\n",
"===== PATH 48 =====\n",
"===== PATH 49 =====\n",
"===== PATH 50 =====\n",
"===== PATH 51 =====\n",
"===== PATH 52 =====\n"
]
2022-05-31 16:49:47 +02:00
}
],
2022-06-28 10:06:42 +02:00
"source": [
"\n",
"#################################################\n",
"# Transitively add latencies to get E2E latency\n",
"#################################################\n",
"\n",
"@dataclass\n",
"class PathElem:\n",
" src: CallbackObject\n",
" dst: CallbackObject\n",
" topic: Topic\n",
" latencies: LatencyStats\n",
"\n",
"def get_latency_paths(cb1, cb_to_cb_to_lat_stats, goals, parent_path=[]) -> List[List[PathElem]]:\n",
" if cb1 in goals:\n",
" return [parent_path]\n",
"\n",
" if cb1 not in cb_to_cb_to_lat_stats:\n",
" return [parent_path]\n",
"\n",
" paths = []\n",
" for cb2 in cb_to_cb_to_lat_stats[cb1]:\n",
" for topic, lats in cb_to_cb_to_lat_stats[cb1][cb2].items():\n",
" new_paths = get_latency_paths(cb2, cb_to_cb_to_lat_stats, goals, parent_path + [PathElem(cb1, cb2, topic, lats)])\n",
" paths += new_paths\n",
"\n",
" return paths\n",
"\n",
"\n",
"cb_to_cb_to_lat_stats = {}\n",
"for (pub, cb2), lat_stats in pub_cb_to_lat_stats.items():\n",
" if pub not in pub_to_scored_cb:\n",
" #print(f\"[WARN] Pub on topic {pub.topic.name} not in pub_to_scored_cb, skipping.\")\n",
" print(end='.')\n",
" continue\n",
" if len(pub_to_scored_cb[pub]) > 1:\n",
" #print(f\"[WARN] Pub on topic {pub.topic.name} has {len(pub_to_scored_cb[pub])} callbacks.\")\n",
" print(end='#')\n",
" for cb1, score in pub_to_scored_cb[pub]:\n",
" if score != 1.0:\n",
" #print(f\"[WARN] Callback for topic {pub.topic.name} only has a score of {score*100:.3f}%\")\n",
" print(end='*')\n",
" if cb1 in cb_to_cb_to_lat_stats and cb2 in cb_to_cb_to_lat_stats[cb1]:\n",
" #print(f\"[WARN] Pair of callbacks already in dict!\")\n",
" print(end='_')\n",
" else:\n",
" if cb1 not in cb_to_cb_to_lat_stats:\n",
" cb_to_cb_to_lat_stats[cb1] = {}\n",
" if cb2 not in cb_to_cb_to_lat_stats[cb1]:\n",
" cb_to_cb_to_lat_stats[cb1][cb2] = {}\n",
" cb_to_cb_to_lat_stats[cb1][cb2][pub.topic] = pub_cb_to_lat_stats[(pub, cb2)]\n",
"\n",
"latency_paths = []\n",
"for cb in inputs:\n",
" latency_paths += get_latency_paths(cb, cb_to_cb_to_lat_stats, outputs.keys())\n",
"\n",
"def pl(l, lvl=0):\n",
" if isinstance(l, list):\n",
" print(\" \"*lvl, type(l), len(l))\n",
" for i in l:\n",
" pl(i, lvl+1)\n",
" else:\n",
" print(\" \"*lvl, type(l))\n",
"\n",
"#pl(latency_paths)\n",
"\n",
"for i, path in enumerate(latency_paths):\n",
" print(\"===== PATH\", i, \"=====\")\n",
" #print(type(path))\n",
" tot_lat = 0.0\n",
" for item in path:\n",
" tot_lat += item.latencies.mean()\n",
" print(f\"{item.topic.name:120s} {item.latencies.mean()*1000:.3f}ms {tot_lat*1000:.3f}ms\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
2022-05-31 16:49:47 +02:00
"source": [
"fig = plt.figure(figsize=(30, 10))\n",
2022-05-31 16:49:47 +02:00
"ax = fig.add_subplot()\n",
"\n",
"for lat_stats in pub_cb_to_lat_stats.values():\n",
" ax.plot(lat_stats.index, np.where(np.isnan(lat_stats), 0, lat_stats))\n",
2022-05-31 16:49:47 +02:00
"\n",
"ax.set_ylim(0, .1)\n",
"ax.set_xlim(655+1.652795e9, 660+1.652795e9)\n",
"None"
2022-05-31 16:49:47 +02:00
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"interpreter": {
2022-06-28 10:06:42 +02:00
"hash": "e7370f93d1d0cde622a1f8e1c04877d8463912d04d973331ad4851f04de6915a"
},
"kernelspec": {
2022-06-28 10:06:42 +02:00
"display_name": "Python 3.8.10 64-bit",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
}
},
"nbformat": 4,
"nbformat_minor": 2
}