Refactored project structure; Caching util; process_clang_output.py provides Python API now
This commit is contained in:
parent
71fe4626f7
commit
dc689174c0
10 changed files with 906 additions and 722 deletions
0
clang_interop/__init__.py
Normal file
0
clang_interop/__init__.py
Normal file
|
@ -3,13 +3,15 @@ import json
|
|||
import os
|
||||
import pickle
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Tuple, List, Literal, Iterable
|
||||
from typing import Tuple, Iterable
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import termcolor
|
||||
|
||||
from clang_interop.types import ClNode, ClField, ClTimer, ClMethod, ClPublisher, ClSubscription, ClMemberRef, ClContext, \
|
||||
ClTranslationUnit
|
||||
|
||||
IN_DIR = "/home/max/Projects/llvm-project/clang-tools-extra/ros2-internal-dependency-checker/output"
|
||||
SRC_DIR = "/home/max/Projects/autoware/src"
|
||||
|
||||
|
@ -58,159 +60,10 @@ def fuse_objects(o1, o2):
|
|||
return o1
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceRange:
|
||||
start_file: str
|
||||
start_line: int | None
|
||||
start_col: int | None
|
||||
|
||||
end_file: str
|
||||
end_line: int | None
|
||||
end_col: int | None
|
||||
|
||||
def __init__(self, json_obj):
|
||||
begin = json_obj["begin"].split(":")
|
||||
end = json_obj["end"].split(":")
|
||||
|
||||
self.start_file = os.path.realpath(begin[0])
|
||||
self.start_line = int(begin[1]) if len(begin) > 1 else None
|
||||
self.start_col = int(begin[2].split(" ")[0]) if len(begin) > 2 else None
|
||||
|
||||
self.end_file = os.path.realpath(end[0])
|
||||
self.end_line = int(end[1]) if len(end) > 1 else None
|
||||
self.end_col = int(end[2].split(" ")[0]) if len(end) > 2 else None
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.start_file, self.start_line, self.start_col,
|
||||
self.end_file, self.end_line, self.end_col))
|
||||
|
||||
|
||||
@dataclass
|
||||
class Node:
|
||||
id: int
|
||||
qualified_name: str
|
||||
source_range: 'SourceRange'
|
||||
field_ids: List[int] | None
|
||||
method_ids: List[int] | None
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.id = json_obj['id']
|
||||
self.qualified_name = json_obj['id']
|
||||
self.source_range = SourceRange(json_obj['source_range'])
|
||||
self.field_ids = list(map(lambda obj: obj['id'], json_obj['fields'])) if 'fields' in json_obj else None
|
||||
self.method_ids = list(map(lambda obj: obj['id'], json_obj['methods'])) if 'methods' in json_obj else None
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Method:
|
||||
id: int
|
||||
qualified_name: str
|
||||
source_range: 'SourceRange'
|
||||
return_type: str | None
|
||||
parameter_types: List[str] | None
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.id = json_obj['id']
|
||||
self.qualified_name = json_obj['qualified_name']
|
||||
self.source_range = SourceRange(json_obj['source_range'])
|
||||
self.return_type = json_obj['signature']['return_type'] if 'signature' in json_obj else None
|
||||
self.parameter_types = json_obj['signature']['parameter_types'] if 'signature' in json_obj else None
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Field:
|
||||
id: int
|
||||
qualified_name: str
|
||||
source_range: 'SourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.id = json_obj['id']
|
||||
self.qualified_name = json_obj['qualified_name']
|
||||
self.source_range = SourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MemberRef:
|
||||
type: Literal["read", "write", "call", "arg", "pub"] | None
|
||||
member_chain: List[int]
|
||||
method_id: int | None
|
||||
node_id: int | None
|
||||
source_range: 'SourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
access_type = json_obj['context']['access_type']
|
||||
if access_type == 'none':
|
||||
access_type = None
|
||||
self.type = access_type
|
||||
self.member_chain = list(map(lambda obj: obj['id'], json_obj['member'][::-1]))
|
||||
self.method_id = json_obj['context']['method']['id'] if 'method' in json_obj['context'] else None
|
||||
self.node_id = json_obj['context']['node']['id'] if 'node' in json_obj['context'] else None
|
||||
self.source_range = SourceRange(json_obj['context']['statement']['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Subscription:
|
||||
topic: str | None
|
||||
callback_id: int | None
|
||||
source_range: 'SourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.topic = json_obj['topic'] if 'topic' in json_obj else None
|
||||
self.callback_id = json_obj['callback']['id'] if 'callback' in json_obj else None
|
||||
self.source_range = SourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Publisher:
|
||||
topic: str | None
|
||||
member_id: int | None
|
||||
source_range: 'SourceRange'
|
||||
|
||||
def update(self, t2: 'Timer'):
|
||||
return self
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.topic = json_obj['topic'] if 'topic' in json_obj else None
|
||||
self.member_id = json_obj['member']['id'] if 'member' in json_obj else None
|
||||
self.source_range = SourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Timer:
|
||||
callback_id: int | None
|
||||
source_range: 'SourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.callback_id = json_obj['callback']['id'] if 'callback' in json_obj else None
|
||||
self.source_range = SourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
def find_data_deps(nodes: Iterable[Node], pubs: Iterable[Publisher], subs: Iterable[Subscription],
|
||||
timers: Iterable[Timer], fields, methods, accesses: Iterable[MemberRef]):
|
||||
def find_data_deps(accesses: Iterable[ClMemberRef]):
|
||||
writes = set()
|
||||
reads = set()
|
||||
publications = set()
|
||||
publications = {}
|
||||
|
||||
for member_ref in accesses:
|
||||
member_id = member_ref.member_chain[0] if member_ref.member_chain else None
|
||||
|
@ -229,7 +82,9 @@ def find_data_deps(nodes: Iterable[Node], pubs: Iterable[Publisher], subs: Itera
|
|||
writes.add(dep_tuple)
|
||||
reads.add(dep_tuple)
|
||||
case "pub":
|
||||
publications.add(dep_tuple)
|
||||
if member_ref.method_id not in publications:
|
||||
publications[member_ref.method_id] = set()
|
||||
publications[member_ref.method_id].add(member_id)
|
||||
|
||||
reads = pd.DataFrame.from_records(list(reads), columns=['method_id', 'member_id'])
|
||||
writes = pd.DataFrame.from_records(list(writes), columns=['method_id', 'member_id'])
|
||||
|
@ -250,7 +105,7 @@ def find_data_deps(nodes: Iterable[Node], pubs: Iterable[Publisher], subs: Itera
|
|||
|
||||
deps[reading_method].discard(reading_method) # Remove reflexive dependencies
|
||||
|
||||
return publications, deps
|
||||
return deps, publications
|
||||
|
||||
|
||||
def dedup(elems):
|
||||
|
@ -274,6 +129,10 @@ def dedup(elems):
|
|||
return ret_list
|
||||
|
||||
|
||||
def dictify(elems, key='id'):
|
||||
return {getattr(e, key): e for e in elems}
|
||||
|
||||
|
||||
def definitions_from_json(cb_dict):
|
||||
nodes = []
|
||||
pubs = []
|
||||
|
@ -285,35 +144,35 @@ def definitions_from_json(cb_dict):
|
|||
|
||||
if "nodes" in cb_dict:
|
||||
for node in cb_dict["nodes"]:
|
||||
nodes.append(Node(node))
|
||||
nodes.append(ClNode(node))
|
||||
for field in node["fields"]:
|
||||
fields.append(Field(field))
|
||||
fields.append(ClField(field))
|
||||
for method in node["methods"]:
|
||||
methods.append(Method(method))
|
||||
methods.append(ClMethod(method))
|
||||
|
||||
if "publishers" in cb_dict:
|
||||
for publisher in cb_dict["publishers"]:
|
||||
pubs.append(Publisher(publisher))
|
||||
pubs.append(ClPublisher(publisher))
|
||||
|
||||
if "subscriptions" in cb_dict:
|
||||
for subscription in cb_dict["subscriptions"]:
|
||||
subs.append(Subscription(subscription))
|
||||
subs.append(ClSubscription(subscription))
|
||||
|
||||
if "timers" in cb_dict:
|
||||
for timer in cb_dict["timers"]:
|
||||
timers.append(Timer(timer))
|
||||
timers.append(ClTimer(timer))
|
||||
|
||||
if "accesses" in cb_dict:
|
||||
for access_type in cb_dict["accesses"]:
|
||||
for access in cb_dict["accesses"][access_type]:
|
||||
accesses.append(MemberRef(access))
|
||||
accesses.append(ClMemberRef(access))
|
||||
|
||||
nodes = dedup(nodes)
|
||||
pubs = dedup(pubs)
|
||||
subs = dedup(subs)
|
||||
timers = dedup(timers)
|
||||
fields = dedup(fields)
|
||||
methods = dedup(methods)
|
||||
nodes = dictify(dedup(nodes))
|
||||
pubs = dictify(dedup(pubs), key='member_id')
|
||||
subs = dictify(dedup(subs), key='callback_id')
|
||||
timers = dictify(dedup(timers), key='callback_id')
|
||||
fields = dictify(dedup(fields))
|
||||
methods = dictify(dedup(methods))
|
||||
|
||||
return nodes, pubs, subs, timers, fields, methods, accesses
|
||||
|
||||
|
@ -341,7 +200,7 @@ def prompt_user(file: str, cb: str, idf: str, text: str) -> Tuple[str, bool, boo
|
|||
return answer, answer == "q", answer == "z"
|
||||
|
||||
|
||||
def main(nodes, cbs, fields, methods):
|
||||
def main(cbs):
|
||||
open_files = {}
|
||||
cb_rw_dict = {}
|
||||
|
||||
|
@ -407,8 +266,8 @@ def main(nodes, cbs, fields, methods):
|
|||
print("Done.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
out_dict = {}
|
||||
def process_clang_output(directory=IN_DIR):
|
||||
clang_context = ClContext()
|
||||
|
||||
for filename in os.listdir(IN_DIR):
|
||||
source_filename = SRC_FILE_NAME(filename)
|
||||
|
@ -418,22 +277,20 @@ if __name__ == "__main__":
|
|||
if cb_dict is None:
|
||||
print(f" [WARN ] Empty tool output detected in {filename}")
|
||||
continue
|
||||
definitions = definitions_from_json(cb_dict)
|
||||
deps, publications = find_data_deps(*definitions)
|
||||
|
||||
(nodes, pubs, subs, timers, fields, methods, accesses) = definitions
|
||||
out_dict[source_filename] = {
|
||||
"dependencies": deps,
|
||||
"publications": publications,
|
||||
"nodes": nodes,
|
||||
"publishers": pubs,
|
||||
"subscriptions": subs,
|
||||
"timers": timers,
|
||||
"fields": fields,
|
||||
"methods": methods
|
||||
}
|
||||
nodes, pubs, subs, timers, fields, methods, accesses = definitions_from_json(cb_dict)
|
||||
deps, publications = find_data_deps(accesses)
|
||||
|
||||
tu = ClTranslationUnit(deps, publications, nodes, pubs, subs, timers, fields, methods, accesses)
|
||||
clang_context.translation_units[source_filename] = tu
|
||||
|
||||
return clang_context
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
clang_context = process_clang_output()
|
||||
|
||||
with open(OUT_NAME, "wb") as f:
|
||||
pickle.dump(out_dict, f)
|
||||
pickle.dump(clang_context, f)
|
||||
|
||||
print("Done.")
|
173
clang_interop/types.py
Normal file
173
clang_interop/types.py
Normal file
|
@ -0,0 +1,173 @@
|
|||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Literal, Dict, Set
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClTranslationUnit:
|
||||
dependencies: Dict[int, Set[int]]
|
||||
publications: Dict[int, Set[int]]
|
||||
nodes: Dict[int, 'ClNode']
|
||||
publishers: Dict[int, 'ClPublisher']
|
||||
subscriptions: Dict[int, 'ClSubscription']
|
||||
timers: Dict[int, 'ClTimer']
|
||||
fields: Dict[int, 'ClField']
|
||||
methods: Dict[int, 'ClMethod']
|
||||
accesses: List['ClMemberRef']
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClContext:
|
||||
translation_units: Dict[str, 'ClTranslationUnit'] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClSourceRange:
|
||||
start_file: str
|
||||
start_line: int | None
|
||||
start_col: int | None
|
||||
|
||||
end_file: str
|
||||
end_line: int | None
|
||||
end_col: int | None
|
||||
|
||||
def __init__(self, json_obj):
|
||||
begin = json_obj["begin"].split(":")
|
||||
end = json_obj["end"].split(":")
|
||||
|
||||
self.start_file = os.path.realpath(begin[0])
|
||||
self.start_line = int(begin[1]) if len(begin) > 1 else None
|
||||
self.start_col = int(begin[2].split(" ")[0]) if len(begin) > 2 else None
|
||||
|
||||
self.end_file = os.path.realpath(end[0])
|
||||
self.end_line = int(end[1]) if len(end) > 1 else None
|
||||
self.end_col = int(end[2].split(" ")[0]) if len(end) > 2 else None
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.start_file, self.start_line, self.start_col,
|
||||
self.end_file, self.end_line, self.end_col))
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClNode:
|
||||
id: int
|
||||
qualified_name: str
|
||||
source_range: 'ClSourceRange'
|
||||
field_ids: List[int] | None
|
||||
method_ids: List[int] | None
|
||||
ros_name: str | None
|
||||
ros_namespace: str | None
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.id = json_obj['id']
|
||||
self.qualified_name = json_obj['id']
|
||||
self.source_range = ClSourceRange(json_obj['source_range'])
|
||||
self.field_ids = list(map(lambda obj: obj['id'], json_obj['fields'])) if 'fields' in json_obj else None
|
||||
self.method_ids = list(map(lambda obj: obj['id'], json_obj['methods'])) if 'methods' in json_obj else None
|
||||
self.ros_name = json_obj['ros_name'] if 'ros_name' in json_obj else None
|
||||
self.ros_namespace = json_obj['ros_namespace'] if 'ros_namespace' in json_obj else None
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClMethod:
|
||||
id: int
|
||||
qualified_name: str
|
||||
source_range: 'ClSourceRange'
|
||||
return_type: str | None
|
||||
parameter_types: List[str] | None
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.id = json_obj['id']
|
||||
self.qualified_name = json_obj['qualified_name']
|
||||
self.source_range = ClSourceRange(json_obj['source_range'])
|
||||
self.return_type = json_obj['signature']['return_type'] if 'signature' in json_obj else None
|
||||
self.parameter_types = json_obj['signature']['parameter_types'] if 'signature' in json_obj else None
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClField:
|
||||
id: int
|
||||
qualified_name: str
|
||||
source_range: 'ClSourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.id = json_obj['id']
|
||||
self.qualified_name = json_obj['qualified_name']
|
||||
self.source_range = ClSourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClMemberRef:
|
||||
type: Literal["read", "write", "call", "arg", "pub"] | None
|
||||
member_chain: List[int]
|
||||
method_id: int | None
|
||||
node_id: int | None
|
||||
source_range: 'ClSourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
access_type = json_obj['context']['access_type']
|
||||
if access_type == 'none':
|
||||
access_type = None
|
||||
self.type = access_type
|
||||
self.member_chain = list(map(lambda obj: obj['id'], json_obj['member'][::-1]))
|
||||
self.method_id = json_obj['context']['method']['id'] if 'method' in json_obj['context'] else None
|
||||
self.node_id = json_obj['context']['node']['id'] if 'node' in json_obj['context'] else None
|
||||
self.source_range = ClSourceRange(json_obj['context']['statement']['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClSubscription:
|
||||
topic: str | None
|
||||
callback_id: int | None
|
||||
source_range: 'ClSourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.topic = json_obj['topic'] if 'topic' in json_obj else None
|
||||
self.callback_id = json_obj['callback']['id'] if 'callback' in json_obj else None
|
||||
self.source_range = ClSourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClPublisher:
|
||||
topic: str | None
|
||||
member_id: int | None
|
||||
source_range: 'ClSourceRange'
|
||||
|
||||
def update(self, t2: 'ClTimer'):
|
||||
return self
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.topic = json_obj['topic'] if 'topic' in json_obj else None
|
||||
self.member_id = json_obj['member']['id'] if 'member' in json_obj else None
|
||||
self.source_range = ClSourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClTimer:
|
||||
callback_id: int | None
|
||||
source_range: 'ClSourceRange'
|
||||
|
||||
def __init__(self, json_obj):
|
||||
self.callback_id = json_obj['callback']['id'] if 'callback' in json_obj else None
|
||||
self.source_range = ClSourceRange(json_obj['source_range'])
|
||||
|
||||
def __hash__(self):
|
||||
return self.source_range.__hash__()
|
0
misc/__init__.py
Normal file
0
misc/__init__.py
Normal file
77
misc/utils.py
Normal file
77
misc/utils.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import base64
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
from typing import List
|
||||
|
||||
|
||||
def left_abbreviate(string, limit=120):
|
||||
return string if len(string) <= limit else f"...{string[:limit - 3]}"
|
||||
|
||||
|
||||
class ProgressPrinter:
|
||||
def __init__(self, verb, n) -> None:
|
||||
self.verb = verb
|
||||
self.n = n
|
||||
self.i = 0
|
||||
self.fmt_len = math.ceil(math.log10(n if n > 0 else 1))
|
||||
|
||||
def step(self, msg):
|
||||
self.i += 1
|
||||
print(f"({self.i:>{self.fmt_len}d}/{self.n}) {self.verb} {left_abbreviate(msg):<120}", end="\r")
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
self.i -= 1
|
||||
|
||||
if exc_value:
|
||||
self.step("error.")
|
||||
print()
|
||||
print(exc_value)
|
||||
return
|
||||
|
||||
self.step("done.")
|
||||
print()
|
||||
|
||||
|
||||
def stable_hash(obj):
|
||||
return hashlib.md5(json.dumps(obj).encode("utf-8")).hexdigest()[:10]
|
||||
|
||||
|
||||
def cached(name, function, file_deps: List[str]):
|
||||
if not os.path.isdir("cache"):
|
||||
os.makedirs("cache", exist_ok=True)
|
||||
|
||||
dep_time = 0.0
|
||||
for file in file_deps:
|
||||
m_time = os.path.getmtime(file) if os.path.exists(file) else 0.
|
||||
if m_time > dep_time:
|
||||
dep_time = m_time
|
||||
|
||||
deps_hash = stable_hash(sorted(file_deps))
|
||||
pkl_filename = f"cache/{name}_{deps_hash}.pkl"
|
||||
|
||||
pkl_time = os.path.getmtime(pkl_filename) if os.path.exists(pkl_filename) else 0.
|
||||
|
||||
if pkl_time > dep_time:
|
||||
with open(pkl_filename, "rb") as f:
|
||||
print(f"[CACHE] Found up-to-date cache entry for {name}, loading.")
|
||||
return pickle.load(f)
|
||||
|
||||
if os.path.exists(pkl_filename):
|
||||
print(f"[CACHE] Data dependencies for {name} changed, deleting cached version.")
|
||||
os.remove(pkl_filename)
|
||||
|
||||
print(f"[CACHE] Creating cache entry for {name} (in {pkl_filename}).")
|
||||
obj = function()
|
||||
|
||||
with open(pkl_filename, "wb") as f:
|
||||
pickle.dump(obj, f)
|
||||
|
||||
return obj
|
File diff suppressed because it is too large
Load diff
0
tracing_interop/__init__.py
Normal file
0
tracing_interop/__init__.py
Normal file
339
tracing_interop/types.py
Normal file
339
tracing_interop/types.py
Normal file
|
@ -0,0 +1,339 @@
|
|||
from dataclasses import dataclass
|
||||
from functools import cached_property
|
||||
from typing import List, Dict
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from tracetools_analysis.processor.ros2 import Ros2Handler
|
||||
from tracetools_analysis.utils.ros2 import Ros2DataModelUtil
|
||||
|
||||
from .utils import list_to_dict, df_to_type_list
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrContext:
|
||||
nodes: Dict[int, 'TrNode']
|
||||
publishers: Dict[int, 'TrPublisher']
|
||||
subscriptions: Dict[int, 'TrSubscription']
|
||||
timers: Dict[int, 'TrTimer']
|
||||
timer_node_links: Dict[int, 'TrTimerNodeLink']
|
||||
subscription_objects: Dict[int, 'TrSubscriptionObject']
|
||||
callback_objects: Dict[int, 'TrCallbackObject']
|
||||
callback_symbols: Dict[int, 'TrCallbackSymbol']
|
||||
publish_instances: List['TrPublishInstance']
|
||||
callback_instances: List['TrCallbackInstance']
|
||||
topics: Dict[str, 'TrTopic']
|
||||
|
||||
util: Ros2DataModelUtil | None
|
||||
handler: Ros2Handler | None
|
||||
|
||||
def __init__(self, util: Ros2DataModelUtil, handler: Ros2Handler):
|
||||
self.util = util
|
||||
self.handler = handler
|
||||
|
||||
print("[TrContext] Processing ROS 2 objects from traces...")
|
||||
|
||||
self.nodes = list_to_dict(df_to_type_list(handler.data.nodes, TrNode, _c=self))
|
||||
print(f" ├─ Processed {len(self.nodes):<8d} nodes")
|
||||
self.publishers = list_to_dict(df_to_type_list(handler.data.rcl_publishers, TrPublisher, _c=self))
|
||||
print(f" ├─ Processed {len(self.publishers):<8d} publishers")
|
||||
self.subscriptions = list_to_dict(df_to_type_list(handler.data.rcl_subscriptions, TrSubscription, _c=self))
|
||||
print(f" ├─ Processed {len(self.subscriptions):<8d} subscriptions")
|
||||
self.timers = list_to_dict(df_to_type_list(handler.data.timers, TrTimer, _c=self))
|
||||
print(f" ├─ Processed {len(self.timers):<8d} timers")
|
||||
self.timer_node_links = list_to_dict(df_to_type_list(handler.data.timer_node_links, TrTimerNodeLink))
|
||||
print(f" ├─ Processed {len(self.timer_node_links):<8d} timer-node links")
|
||||
self.subscription_objects = list_to_dict(
|
||||
df_to_type_list(handler.data.subscription_objects, TrSubscriptionObject, _c=self))
|
||||
print(f" ├─ Processed {len(self.subscription_objects):<8d} subscription objects")
|
||||
self.callback_objects = list_to_dict(df_to_type_list(handler.data.callback_objects, TrCallbackObject, _c=self))
|
||||
print(f" ├─ Processed {len(self.callback_objects):<8d} callback objects")
|
||||
self.callback_symbols = list_to_dict(df_to_type_list(handler.data.callback_symbols, TrCallbackSymbol, _c=self))
|
||||
print(f" ├─ Processed {len(self.callback_symbols):<8d} callback symbols")
|
||||
self.publish_instances = df_to_type_list(handler.data.rcl_publish_instances, TrPublishInstance, _c=self)
|
||||
print(f" ├─ Processed {len(self.publish_instances):<8d} publish instances")
|
||||
self.callback_instances = df_to_type_list(handler.data.callback_instances, TrCallbackInstance, _c=self)
|
||||
print(f" ├─ Processed {len(self.callback_instances):<8d} callback instances")
|
||||
|
||||
_unique_topic_names = {*(pub.topic_name for pub in self.publishers.values()),
|
||||
*(sub.topic_name for sub in self.subscriptions.values())}
|
||||
self.topics = list_to_dict(map(lambda name: TrTopic(name=name, _c=self), _unique_topic_names), key="name")
|
||||
print(f" └─ Processed {len(self.topics):<8d} topics\n")
|
||||
|
||||
print("[TrContext] Caching dynamic properties...")
|
||||
|
||||
[(o.path, o.publishers, o.subscriptions, o.timers) for o in self.nodes.values()]
|
||||
print(" ├─ Cached node properties")
|
||||
[(o.instances, o.subscriptions) for o in self.publishers.values()]
|
||||
print(" ├─ Cached publisher properties")
|
||||
[(o.publishers, o.subscription_objects) for o in self.subscriptions.values()]
|
||||
print(" ├─ Cached subscription properties")
|
||||
[(o.nodes) for o in self.timers.values()]
|
||||
print(" ├─ Cached timer properties")
|
||||
[(o.callback_instances, o.owner, o.owner_info) for o in self.callback_objects.values()]
|
||||
print(" ├─ Cached callback object properties")
|
||||
[(o.callback_objs) for o in self.callback_symbols.values()]
|
||||
print(" ├─ Cached callback symbol properties")
|
||||
[(o.publishers, o.subscriptions) for o in self.topics.values()]
|
||||
print(" └─ Cached topic properties\n")
|
||||
|
||||
def __getstate__(self):
|
||||
state = self.__dict__.copy()
|
||||
del state["util"]
|
||||
del state["handler"]
|
||||
return state
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__dict__.update(state)
|
||||
self.util = None
|
||||
self.handler = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrNode:
|
||||
id: int
|
||||
timestamp: int
|
||||
tid: int
|
||||
rmw_handle: int
|
||||
name: str
|
||||
namespace: str
|
||||
_c: TrContext
|
||||
|
||||
@cached_property
|
||||
def path(self) -> str:
|
||||
return '/'.join((self.namespace, self.name))
|
||||
|
||||
@cached_property
|
||||
def publishers(self) -> List['TrPublisher']:
|
||||
return list(filter(lambda pub: pub.node_handle == self.id, self._c.publishers.values()))
|
||||
|
||||
@cached_property
|
||||
def subscriptions(self) -> List['TrSubscription']:
|
||||
return list(filter(lambda sub: sub.node_handle == self.id, self._c.subscriptions.values()))
|
||||
|
||||
@cached_property
|
||||
def timers(self) -> List['TrTimer']:
|
||||
links = [link.id for link in self._c.timer_node_links.values() if link.node_handle == self.id]
|
||||
return list(filter(lambda timer: timer.id in links, self._c.timers.values()))
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrPublisher:
|
||||
id: int
|
||||
timestamp: int
|
||||
node_handle: int
|
||||
rmw_handle: int
|
||||
topic_name: str
|
||||
depth: int
|
||||
_c: TrContext
|
||||
|
||||
@property
|
||||
def node(self) -> 'TrNode':
|
||||
return self._c.nodes[self.node_handle]
|
||||
|
||||
@cached_property
|
||||
def subscriptions(self) -> List['TrSubscription']:
|
||||
return list(filter(lambda sub: sub.topic_name == self.topic_name, self._c.subscriptions.values()))
|
||||
|
||||
@cached_property
|
||||
def instances(self) -> List['TrPublishInstance']:
|
||||
return list(filter(lambda inst: inst.publisher_handle == self.id, self._c.publish_instances))
|
||||
|
||||
@property
|
||||
def topic(self) -> 'TrTopic':
|
||||
return self._c.topics[self.topic_name]
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrSubscription:
|
||||
id: int
|
||||
timestamp: int
|
||||
node_handle: int
|
||||
rmw_handle: int
|
||||
topic_name: str
|
||||
depth: int
|
||||
_c: TrContext
|
||||
|
||||
@property
|
||||
def node(self) -> 'TrNode':
|
||||
return self._c.nodes[self.node_handle]
|
||||
|
||||
@cached_property
|
||||
def publishers(self) -> List['TrPublisher']:
|
||||
return list(filter(lambda pub: pub.topic_name == self.topic_name, self._c.publishers.values()))
|
||||
|
||||
@cached_property
|
||||
def subscription_objects(self) -> List['TrSubscriptionObject']:
|
||||
return list(
|
||||
filter(lambda sub_obj: sub_obj.subscription_handle == self.id, self._c.subscription_objects.values()))
|
||||
|
||||
@property
|
||||
def topic(self) -> 'TrTopic':
|
||||
return self._c.topics[self.topic_name]
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrTimer:
|
||||
id: int
|
||||
timestamp: int
|
||||
period: int
|
||||
tid: int
|
||||
_c: TrContext
|
||||
|
||||
@cached_property
|
||||
def nodes(self) -> List['TrNode']:
|
||||
links = [link.node_handle for link in self._c.timer_node_links.values() if link.id == self.id]
|
||||
return list(filter(lambda node: node.id in links, self._c.nodes.values()))
|
||||
|
||||
@property
|
||||
def callback_object(self) -> 'TrCallbackObject':
|
||||
return self._c.callback_objects[self.id]
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrTimerNodeLink:
|
||||
id: int
|
||||
timestamp: int
|
||||
node_handle: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrSubscriptionObject:
|
||||
id: int # subscription
|
||||
timestamp: int
|
||||
subscription_handle: int
|
||||
_c: TrContext
|
||||
|
||||
@property
|
||||
def subscription(self) -> 'TrSubscription':
|
||||
return self._c.subscriptions[self.subscription_handle]
|
||||
|
||||
@property
|
||||
def callback_object(self) -> 'TrCallbackObject':
|
||||
return self._c.callback_objects[self.id]
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.id, self.timestamp, self.subscription_handle))
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrCallbackObject:
|
||||
id: int # (reference) = subscription_object.id | timer.id | ....
|
||||
timestamp: int
|
||||
callback_object: int
|
||||
_c: TrContext
|
||||
|
||||
@cached_property
|
||||
def callback_instances(self) -> List['TrCallbackInstance']:
|
||||
return list(filter(lambda inst: inst.callback_object == self.callback_object, self._c.callback_instances))
|
||||
|
||||
@property
|
||||
def callback_symbol(self) -> 'TrCallbackSymbol':
|
||||
return self._c.callback_symbols[self.id]
|
||||
|
||||
@cached_property
|
||||
def owner(self):
|
||||
if self.id in self._c.timers:
|
||||
return self._c.timers[self.id]
|
||||
if self.id in self._c.publishers:
|
||||
return self._c.publishers[self.id]
|
||||
if self.id in self._c.subscription_objects:
|
||||
return self._c.subscription_objects[self.id]
|
||||
if self.id in self._c.handler.data.services.index:
|
||||
return 'Service'
|
||||
if self.id in self._c.handler.data.clients.index:
|
||||
return 'Client'
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def owner_info(self):
|
||||
info = self._c.util.get_callback_owner_info(self.callback_object)
|
||||
if info is None:
|
||||
return None, None
|
||||
|
||||
type_name, dict_str = info.split(" -- ")
|
||||
kv_strs = dict_str.split(", ")
|
||||
info_dict = {k: v for k, v in map(lambda kv_str: kv_str.split(": ", maxsplit=1), kv_strs)}
|
||||
return type_name, info_dict
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.id, self.timestamp, self.callback_object))
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrPublishInstance:
|
||||
publisher_handle: int
|
||||
timestamp: int
|
||||
message: int
|
||||
_c: TrContext
|
||||
|
||||
@property
|
||||
def publisher(self) -> 'TrPublisher':
|
||||
return self._c.publishers[self.publisher_handle]
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.publisher_handle, self.timestamp, self.message))
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrCallbackInstance:
|
||||
callback_object: int
|
||||
timestamp: pd.Timestamp
|
||||
duration: pd.Timedelta
|
||||
intra_process: bool
|
||||
_c: TrContext
|
||||
|
||||
@property
|
||||
def callback_obj(self) -> 'TrCallbackObject':
|
||||
return self._c.callback_objects[self.callback_object]
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.callback_object, self.timestamp, self.duration))
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrCallbackSymbol:
|
||||
id: int # callback_object
|
||||
timestamp: int
|
||||
symbol: str
|
||||
_c: TrContext
|
||||
|
||||
@cached_property
|
||||
def callback_objs(self) -> List['TrCallbackObject']:
|
||||
return list(filter(lambda cb_obj: cb_obj.callback_object == self.id, self._c.callback_objects.values()))
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.id, self.timestamp, self.symbol))
|
||||
|
||||
|
||||
#######################################
|
||||
# Self-defined (not from ROS2DataModel)
|
||||
#######################################
|
||||
|
||||
@dataclass
|
||||
class TrTopic:
|
||||
name: str
|
||||
_c: TrContext
|
||||
|
||||
@cached_property
|
||||
def publishers(self) -> List['TrPublisher']:
|
||||
return list(filter(lambda pub: pub.topic_name == self.name, self._c.publishers.values()))
|
||||
|
||||
@cached_property
|
||||
def subscriptions(self) -> List['TrSubscription']:
|
||||
return list(filter(lambda sub: sub.topic_name == self.name, self._c.subscriptions.values()))
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.name)
|
24
tracing_interop/utils.py
Normal file
24
tracing_interop/utils.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
import sys
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def row_to_type(row, type, has_idx, **type_kwargs):
|
||||
return type(id=row.name, **row, **type_kwargs) if has_idx else type(**row, **type_kwargs)
|
||||
|
||||
|
||||
def df_to_type_list(df, type, **type_kwargs):
|
||||
has_idx = not isinstance(df.index, pd.RangeIndex)
|
||||
return [row_to_type(row, type, has_idx, **type_kwargs) for _, row in df.iterrows()]
|
||||
|
||||
|
||||
def by_index(df, index, type):
|
||||
return df_to_type_list(df.loc[index], type)
|
||||
|
||||
|
||||
def by_column(df, column_name, column_val, type):
|
||||
return df_to_type_list(df[df[column_name] == column_val], type)
|
||||
|
||||
|
||||
def list_to_dict(ls, key='id'):
|
||||
return {getattr(item, key): item for item in ls}
|
32
utils.py
32
utils.py
|
@ -1,32 +0,0 @@
|
|||
import math
|
||||
|
||||
|
||||
def left_abbreviate(string, limit=120):
|
||||
return string if len(string) <= limit else f"...{string[:limit-3]}"
|
||||
|
||||
|
||||
class ProgressPrinter:
|
||||
def __init__(self, verb, n) -> None:
|
||||
self.verb = verb
|
||||
self.n = n
|
||||
self.i = 0
|
||||
self.fmt_len = math.ceil(math.log10(n if n > 0 else 1))
|
||||
|
||||
def step(self, msg):
|
||||
self.i += 1
|
||||
print(f"({self.i:>{self.fmt_len}d}/{self.n}) {self.verb} {left_abbreviate(msg):<120}", end="\r")
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
self.i -= 1
|
||||
|
||||
if exc_value:
|
||||
self.step("error.")
|
||||
print()
|
||||
print(exc_value)
|
||||
return
|
||||
|
||||
self.step("done.")
|
||||
print()
|
Loading…
Add table
Add a link
Reference in a new issue