Implement handlers for one-time events/object creation

This commit is contained in:
Christophe Bedard 2019-06-13 13:45:29 +02:00
parent 08ec9c30b2
commit c88000bede
3 changed files with 70 additions and 32 deletions

View file

@ -14,10 +14,15 @@ class DataModel():
def __init__(self):
# Objects (one-time events, usually when something is created)
self._contexts = pd.DataFrame(columns=[])
self._nodes = pd.DataFrame(columns=[])
self._publishers = pd.DataFrame(columns=[])
self._subscriptions = pd.DataFrame(columns=[])
self._contexts = pd.DataFrame(columns=['context_handle', 'timestamp', 'pid'])
self._contexts.set_index(['context_handle'], inplace=True, drop=True)
self._nodes = pd.DataFrame(columns=['node_handle', 'timestamp', 'tid', 'rmw_handle', 'name', 'namespace'])
self._nodes.set_index(['node_handle'], inplace=True, drop=True)
self._publishers = pd.DataFrame(columns=['publisher_handle', 'timestamp', 'node_handle', 'rmw_handle', 'topic_name', 'depth'])
self._publishers.set_index(['publisher_handle'], inplace=True, drop=True)
self._subscriptions = pd.DataFrame(columns=['subscription_handle', 'timestamp', 'node_handle', 'rmw_handle', 'topic_name', 'depth'])
self._subscriptions.set_index(['subscription_handle'], inplace=True, drop=True)
self._services = pd.DataFrame(columns=[])
self._clients = pd.DataFrame(columns=[])
self._timers = pd.DataFrame(columns=[])
@ -25,15 +30,33 @@ class DataModel():
# Events
# TODO
def add_context(self, context_handle, timestamp, pid):
self._contexts.loc[context_handle] = [timestamp, pid]
# self._contexts = self._contexts.append({'context_handle': context_handle, 'timestamp': timestamp, 'pid': pid}, ignore_index=True)
def add_node(self, node_handle, timestamp, tid, rmw_handle, name, namespace):
self._nodes.loc[node_handle] = [timestamp, tid, rmw_handle, name, namespace]
def add_publisher(self, publisher_handle, timestamp, node_handle, rmw_handle, topic_name, depth):
self._publishers.loc[publisher_handle] = [timestamp, node_handle, rmw_handle, topic_name, depth]
def add_subscription(self, subscription_handle, timestamp, node_handle, rmw_handle, topic_name, depth):
self._subscriptions.loc[subscription_handle] = [timestamp, node_handle, rmw_handle, topic_name, depth]
def print(self):
"""Debug method to print every contained df."""
print('====================DATA MODEL====================')
print(f'Contexts:\n{self._contexts.to_string()}')
print()
print(f'Nodes:\n{self._nodes.to_string()}')
print()
print(f'Publishers:\n{self._publishers.to_string()}')
print()
print(f'Subscription:\n{self._subscriptions.to_string()}')
print()
print(f'Services:\n{self._services.to_string()}')
print()
print(f'Clients:\n{self._clients.to_string()}')
print()
print(f'Timers:\n{self._timers.to_string()}')
print('==================================================')

View file

@ -2,6 +2,7 @@
from .handler import EventHandler
from .lttng_models import get_field
from .data_model import DataModel
def ros_process(events):
@ -63,43 +64,59 @@ class RosProcessor(EventHandler):
}
super().__init__(handler_map)
# TODO add other stuff
# Instances of callback_start for eventual matching
self._callback_starts = {}
# Callback instances, callback_address: (end - start, start)
self.callbacks_instances = {}
self._data = DataModel()
def get_data_model(self):
return self._data
def _handle_rcl_init(self, event, metadata):
# TODO
pass
context_handle = get_field(event, 'context_handle')
self._data.add_context(context_handle, metadata.timestamp, metadata.pid)
def _handle_rcl_node_init(self, event, metadata):
# TODO
pass
node_handle = get_field(event, 'node_handle')
rmw_handle = get_field(event, 'rmw_handle')
name = get_field(event, 'node_name')
namespace = get_field(event, 'namespace')
self._data.add_node(node_handle, metadata.timestamp, metadata.tid, rmw_handle, name, namespace)
def _handle_rcl_publisher_init(self, event, metadata):
# TODO
pass
pub_handle = get_field(event, 'publisher_handle')
node_handle = get_field(event, 'node_handle')
rmw_handle = get_field(event, 'rmw_publisher_handle')
topic_name = get_field(event, 'topic_name')
depth = get_field(event, 'depth')
self._data.add_publisher(pub_handle, metadata.timestamp, node_handle, rmw_handle, topic_name, depth)
def _handle_subscription_init(self, event, metadata):
# TODO
pass
sub_handle = get_field(event, 'subscription_handle')
node_handle = get_field(event, 'node_handle')
rmw_handle = get_field(event, 'rmw_subscription_handle')
topic_name = get_field(event, 'topic_name')
depth = get_field(event, 'depth')
self._data.add_subscription(sub_handle, metadata.timestamp, node_handle, rmw_handle, topic_name, depth)
def _handle_rclcpp_subscription_callback_added(self, event, metadata):
# TODO
pass
# Add the callback address key and create an empty list
callback_addr = get_field(event, 'callback')
self.callbacks_instances[callback_addr] = []
# callback_addr = get_field(event, 'callback')
# self.callbacks_instances[callback_addr] = []
def _handle_rclcpp_subscription_callback_start(self, event, metadata):
callback_addr = get_field(event, 'callback')
self._callback_starts[callback_addr] = metadata.timestamp
# TODO
pass
# callback_addr = get_field(event, 'callback')
# self._callback_starts[callback_addr] = metadata.timestamp
def _handle_rclcpp_subscription_callback_end(self, event, metadata):
callback_addr = get_field(event, 'callback')
start_timestamp = self._callback_starts.pop(callback_addr, None)
if start_timestamp is not None:
duration = metadata.timestamp - start_timestamp
self.callbacks_instances[callback_addr].append((duration, start_timestamp))
# TODO
pass
# callback_addr = get_field(event, 'callback')
# start_timestamp = self._callback_starts.pop(callback_addr, None)
# if start_timestamp is not None:
# duration = metadata.timestamp - start_timestamp
# self.callbacks_instances[callback_addr].append((duration, start_timestamp))
def _handle_rcl_service_init(self, event, metadata):
# TODO

View file

@ -4,8 +4,8 @@
import argparse
import pickle
from tracetools_analysis.analysis import ros_processor, to_pandas
from tracetools_analysis.analysis import processor, to_pandas
from tracetools_analysis.analysis import data_model
def parse_args():
parser = argparse.ArgumentParser(description='Process a pickle file generated from tracing and analyze the data.')
@ -21,11 +21,9 @@ def main():
with open(pickle_filename, 'rb') as f:
events = _get_events_from_pickled_file(f)
print(f'imported {len(events)} events')
processor = ros_processor.ros_process(events)
df = to_pandas.callback_durations_to_df(processor)
print(df.to_string())
p = processor.ros_process(events)
p.get_data_model().print()
def _get_events_from_pickled_file(file):
p = pickle.Unpickler(file)