Fix typing information for mypy tests
Signed-off-by: Christophe Bedard <bedard.christophe@gmail.com>
This commit is contained in:
parent
d7c55b9a22
commit
377ce353fa
22 changed files with 203 additions and 121 deletions
|
@ -13,12 +13,13 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import List
|
from typing import Set
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from tracetools_analysis.processor import AutoProcessor
|
from tracetools_analysis.processor import AutoProcessor
|
||||||
from tracetools_analysis.processor import EventHandler
|
from tracetools_analysis.processor import EventHandler
|
||||||
from tracetools_analysis.processor import EventMetadata
|
from tracetools_analysis.processor import EventMetadata
|
||||||
|
from tracetools_analysis.processor import HandlerMap
|
||||||
|
|
||||||
|
|
||||||
class AbstractEventHandler(EventHandler):
|
class AbstractEventHandler(EventHandler):
|
||||||
|
@ -32,18 +33,18 @@ class AbstractEventHandler(EventHandler):
|
||||||
class SubSubEventHandler(AbstractEventHandler):
|
class SubSubEventHandler(AbstractEventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname': self._handler_whatever,
|
'myeventname': self._handler_whatever,
|
||||||
'myeventname69': self._handler_whatever,
|
'myeventname69': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> List[str]:
|
def required_events() -> Set[str]:
|
||||||
return [
|
return {
|
||||||
'myeventname',
|
'myeventname',
|
||||||
'myeventname69',
|
'myeventname69',
|
||||||
]
|
}
|
||||||
|
|
||||||
def _handler_whatever(
|
def _handler_whatever(
|
||||||
self, event: Dict, metadata: EventMetadata
|
self, event: Dict, metadata: EventMetadata
|
||||||
|
@ -54,16 +55,16 @@ class SubSubEventHandler(AbstractEventHandler):
|
||||||
class SubSubEventHandler2(AbstractEventHandler):
|
class SubSubEventHandler2(AbstractEventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname2': self._handler_whatever,
|
'myeventname2': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> List[str]:
|
def required_events() -> Set[str]:
|
||||||
return [
|
return {
|
||||||
'myeventname2',
|
'myeventname2',
|
||||||
]
|
}
|
||||||
|
|
||||||
def _handler_whatever(
|
def _handler_whatever(
|
||||||
self, event: Dict, metadata: EventMetadata
|
self, event: Dict, metadata: EventMetadata
|
||||||
|
@ -74,16 +75,16 @@ class SubSubEventHandler2(AbstractEventHandler):
|
||||||
class SubEventHandler(EventHandler):
|
class SubEventHandler(EventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname3': self._handler_whatever,
|
'myeventname3': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> List[str]:
|
def required_events() -> Set[str]:
|
||||||
return [
|
return {
|
||||||
'myeventname3',
|
'myeventname3',
|
||||||
]
|
}
|
||||||
|
|
||||||
def _handler_whatever(
|
def _handler_whatever(
|
||||||
self, event: Dict, metadata: EventMetadata
|
self, event: Dict, metadata: EventMetadata
|
||||||
|
|
|
@ -23,6 +23,7 @@ from pandas.util.testing import assert_frame_equal
|
||||||
from tracetools_analysis.data_model import DataModel
|
from tracetools_analysis.data_model import DataModel
|
||||||
from tracetools_analysis.processor import EventHandler
|
from tracetools_analysis.processor import EventHandler
|
||||||
from tracetools_analysis.processor import EventMetadata
|
from tracetools_analysis.processor import EventMetadata
|
||||||
|
from tracetools_analysis.processor import HandlerMap
|
||||||
from tracetools_analysis.utils import DataModelUtil
|
from tracetools_analysis.utils import DataModelUtil
|
||||||
|
|
||||||
|
|
||||||
|
@ -113,13 +114,13 @@ class TestDataModelUtil(unittest.TestCase):
|
||||||
)
|
)
|
||||||
assert_frame_equal(input_df, expected_df)
|
assert_frame_equal(input_df, expected_df)
|
||||||
|
|
||||||
def test_creation(self) -> None:
|
def handler_whatever(
|
||||||
def handler_whatever(
|
self, event: Dict, metadata: EventMetadata
|
||||||
self, event: Dict, metadata: EventMetadata
|
) -> None:
|
||||||
) -> None:
|
pass
|
||||||
pass
|
|
||||||
|
|
||||||
handler_map = {'fake': handler_whatever}
|
def test_creation(self) -> None:
|
||||||
|
handler_map: HandlerMap = {'fake': self.handler_whatever}
|
||||||
data_model = DataModel()
|
data_model = DataModel()
|
||||||
|
|
||||||
# Should handle the event handler not having any data model
|
# Should handle the event handler not having any data model
|
||||||
|
|
|
@ -111,7 +111,7 @@ class TestDependencySolver(unittest.TestCase):
|
||||||
# Pass parameter and check that the new instance has it
|
# Pass parameter and check that the new instance has it
|
||||||
solution = DependencySolver(depone_instance, myparam='myvalue').solve()
|
solution = DependencySolver(depone_instance, myparam='myvalue').solve()
|
||||||
self.assertEqual(len(solution), 2, 'solution length invalid')
|
self.assertEqual(len(solution), 2, 'solution length invalid')
|
||||||
self.assertEqual(solution[0].myparam, 'myvalue', 'parameter not passed on')
|
self.assertEqual(solution[0].myparam, 'myvalue', 'parameter not passed on') # type: ignore
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -73,10 +73,10 @@ class TestProcessCommand(unittest.TestCase):
|
||||||
# Should fail to find converted file under directory
|
# Should fail to find converted file under directory
|
||||||
file_path, create_file = inspect_input_path(self.without_converted_file_dir, False)
|
file_path, create_file = inspect_input_path(self.without_converted_file_dir, False)
|
||||||
self.assertIsNone(file_path)
|
self.assertIsNone(file_path)
|
||||||
self.assertIsNone(create_file)
|
self.assertFalse(create_file)
|
||||||
file_path, create_file = inspect_input_path(self.without_converted_file_dir, True)
|
file_path, create_file = inspect_input_path(self.without_converted_file_dir, True)
|
||||||
self.assertIsNone(file_path)
|
self.assertIsNone(file_path)
|
||||||
self.assertIsNone(create_file)
|
self.assertFalse(create_file)
|
||||||
|
|
||||||
# Should accept any file path if it exists
|
# Should accept any file path if it exists
|
||||||
file_path, create_file = inspect_input_path(self.random_file_path, False)
|
file_path, create_file = inspect_input_path(self.random_file_path, False)
|
||||||
|
|
|
@ -13,18 +13,19 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import List
|
from typing import Set
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from tracetools_analysis.processor import EventHandler
|
from tracetools_analysis.processor import EventHandler
|
||||||
from tracetools_analysis.processor import EventMetadata
|
from tracetools_analysis.processor import EventMetadata
|
||||||
|
from tracetools_analysis.processor import HandlerMap
|
||||||
from tracetools_analysis.processor import Processor
|
from tracetools_analysis.processor import Processor
|
||||||
|
|
||||||
|
|
||||||
class StubHandler1(EventHandler):
|
class StubHandler1(EventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname': self._handler_whatever,
|
'myeventname': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
@ -39,7 +40,7 @@ class StubHandler1(EventHandler):
|
||||||
class StubHandler2(EventHandler):
|
class StubHandler2(EventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname': self._handler_whatever,
|
'myeventname': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
@ -54,8 +55,8 @@ class StubHandler2(EventHandler):
|
||||||
class WrongHandler(EventHandler):
|
class WrongHandler(EventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname': self._handler_wrong,
|
'myeventname': self._handler_wrong, # type: ignore # intentionally wrong
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
|
||||||
|
@ -68,17 +69,17 @@ class WrongHandler(EventHandler):
|
||||||
class MissingEventHandler(EventHandler):
|
class MissingEventHandler(EventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myeventname': self._handler_whatever,
|
'myeventname': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> List[str]:
|
def required_events() -> Set[str]:
|
||||||
return [
|
return {
|
||||||
'no_handler_for_this',
|
'no_handler_for_this',
|
||||||
'myeventname',
|
'myeventname',
|
||||||
]
|
}
|
||||||
|
|
||||||
def _handler_whatever(
|
def _handler_whatever(
|
||||||
self, event: Dict, metadata: EventMetadata
|
self, event: Dict, metadata: EventMetadata
|
||||||
|
@ -89,16 +90,16 @@ class MissingEventHandler(EventHandler):
|
||||||
class EventHandlerWithRequiredEvent(EventHandler):
|
class EventHandlerWithRequiredEvent(EventHandler):
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'myrequiredevent': self._handler_whatever,
|
'myrequiredevent': self._handler_whatever,
|
||||||
}
|
}
|
||||||
super().__init__(handler_map=handler_map)
|
super().__init__(handler_map=handler_map)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> List[str]:
|
def required_events() -> Set[str]:
|
||||||
return [
|
return {
|
||||||
'myrequiredevent',
|
'myrequiredevent',
|
||||||
]
|
}
|
||||||
|
|
||||||
def _handler_whatever(
|
def _handler_whatever(
|
||||||
self, event: Dict, metadata: EventMetadata
|
self, event: Dict, metadata: EventMetadata
|
||||||
|
|
|
@ -316,8 +316,8 @@ class TestProfileHandler(unittest.TestCase):
|
||||||
cls.processor.process(input_events)
|
cls.processor.process(input_events)
|
||||||
|
|
||||||
def test_profiling(self) -> None:
|
def test_profiling(self) -> None:
|
||||||
handler = self.__class__.handler
|
handler = self.__class__.handler # type: ignore
|
||||||
expected_df = self.__class__.expected
|
expected_df = self.__class__.expected # type: ignore
|
||||||
result_df = handler.data.times
|
result_df = handler.data.times
|
||||||
assert_frame_equal(result_df, expected_df)
|
assert_frame_equal(result_df, expected_df)
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from tracetools_analysis.conversion import ctf
|
from tracetools_analysis.conversion import ctf
|
||||||
|
|
||||||
|
@ -50,7 +49,7 @@ def parse_args():
|
||||||
def convert(
|
def convert(
|
||||||
trace_directory: str,
|
trace_directory: str,
|
||||||
output_file_name: str = DEFAULT_CONVERT_FILE_NAME,
|
output_file_name: str = DEFAULT_CONVERT_FILE_NAME,
|
||||||
) -> Optional[int]:
|
) -> int:
|
||||||
"""
|
"""
|
||||||
Convert trace directory to a file.
|
Convert trace directory to a file.
|
||||||
|
|
||||||
|
@ -71,6 +70,7 @@ def convert(
|
||||||
time_diff = time.time() - start_time
|
time_diff = time.time() - start_time
|
||||||
print(f'converted {count} events in {time_diff_to_str(time_diff)}')
|
print(f'converted {count} events in {time_diff_to_str(time_diff)}')
|
||||||
print(f'output written to: {output_file_path}')
|
print(f'output written to: {output_file_path}')
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -28,4 +28,4 @@ class DataModel():
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
"""Print the data model."""
|
"""Print the data model."""
|
||||||
return None
|
raise NotImplementedError
|
||||||
|
|
|
@ -52,5 +52,5 @@ class MemoryUsageDataModel(DataModel):
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
print('==================MEMORY USAGE DATA MODEL==================')
|
print('==================MEMORY USAGE DATA MODEL==================')
|
||||||
tail = 20
|
tail = 20
|
||||||
print(f'Memory difference (tail={tail}):\n{self.times.tail(tail).to_string()}')
|
print(f'Memory difference (tail={tail}):\n{self.memory_diff.tail(tail).to_string()}')
|
||||||
print('===========================================================')
|
print('===========================================================')
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
|
|
||||||
"""Module for profile data model."""
|
"""Module for profile data model."""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
from . import DataModel
|
from . import DataModel
|
||||||
|
@ -45,7 +47,7 @@ class ProfileDataModel(DataModel):
|
||||||
tid: int,
|
tid: int,
|
||||||
depth: int,
|
depth: int,
|
||||||
function_name: str,
|
function_name: str,
|
||||||
parent_name: str,
|
parent_name: Optional[str],
|
||||||
start_timestamp: int,
|
start_timestamp: int,
|
||||||
duration: int,
|
duration: int,
|
||||||
actual_duration: int,
|
actual_duration: int,
|
||||||
|
|
|
@ -19,6 +19,7 @@ import pickle
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from typing import Optional
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
from tracetools_read.trace import is_trace_directory
|
from tracetools_read.trace import is_trace_directory
|
||||||
|
@ -30,7 +31,7 @@ from ..convert import DEFAULT_CONVERT_FILE_NAME
|
||||||
def inspect_input_path(
|
def inspect_input_path(
|
||||||
input_path: str,
|
input_path: str,
|
||||||
force_conversion: bool = False,
|
force_conversion: bool = False,
|
||||||
) -> Tuple[str, bool]:
|
) -> Tuple[Optional[str], bool]:
|
||||||
"""
|
"""
|
||||||
Check input path for a converted file or a trace directory.
|
Check input path for a converted file or a trace directory.
|
||||||
|
|
||||||
|
@ -75,7 +76,7 @@ def inspect_input_path(
|
||||||
print(
|
print(
|
||||||
f'cannot find either a trace directory or a converted file: {input_directory}',
|
f'cannot find either a trace directory or a converted file: {input_directory}',
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return None, None
|
return None, False
|
||||||
else:
|
else:
|
||||||
converted_file_path = input_path
|
converted_file_path = input_path
|
||||||
if force_conversion:
|
if force_conversion:
|
||||||
|
@ -91,12 +92,13 @@ def inspect_input_path(
|
||||||
def convert_if_needed(
|
def convert_if_needed(
|
||||||
input_path: str,
|
input_path: str,
|
||||||
force_conversion: bool = False,
|
force_conversion: bool = False,
|
||||||
) -> str:
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Inspect input path and convert trace directory to file if necessary.
|
Inspect input path and convert trace directory to file if necessary.
|
||||||
|
|
||||||
:param input_path: the path to a converted file or trace directory
|
:param input_path: the path to a converted file or trace directory
|
||||||
:param force_conversion: whether to re-create converted file even if it is found
|
:param force_conversion: whether to re-create converted file even if it is found
|
||||||
|
:return: the path to the converted file, or `None` if it failed
|
||||||
"""
|
"""
|
||||||
converted_file_path, create_converted_file = inspect_input_path(input_path, force_conversion)
|
converted_file_path, create_converted_file = inspect_input_path(input_path, force_conversion)
|
||||||
|
|
||||||
|
@ -130,6 +132,9 @@ def load_file(
|
||||||
else:
|
else:
|
||||||
file_path = input_path
|
file_path = input_path
|
||||||
|
|
||||||
|
if file_path is None:
|
||||||
|
raise RuntimeError(f'could not use input path: {input_path}')
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
with open(os.path.expanduser(file_path), 'rb') as f:
|
with open(os.path.expanduser(file_path), 'rb') as f:
|
||||||
p = pickle.Unpickler(f)
|
p = pickle.Unpickler(f)
|
||||||
|
|
|
@ -19,7 +19,6 @@ import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from tracetools_analysis.loading import load_file
|
from tracetools_analysis.loading import load_file
|
||||||
from tracetools_analysis.processor import Processor
|
from tracetools_analysis.processor import Processor
|
||||||
|
@ -54,7 +53,7 @@ def process(
|
||||||
input_path: str,
|
input_path: str,
|
||||||
force_conversion: bool = False,
|
force_conversion: bool = False,
|
||||||
hide_results: bool = False,
|
hide_results: bool = False,
|
||||||
) -> Optional[int]:
|
) -> int:
|
||||||
"""
|
"""
|
||||||
Process converted trace file.
|
Process converted trace file.
|
||||||
|
|
||||||
|
@ -77,6 +76,7 @@ def process(
|
||||||
if not hide_results:
|
if not hide_results:
|
||||||
processor.print_data()
|
processor.print_data()
|
||||||
print(f'processed {len(events)} events in {time_diff_to_str(time_diff)}')
|
print(f'processed {len(events)} events in {time_diff_to_str(time_diff)}')
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -16,9 +16,11 @@
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import sys
|
import sys
|
||||||
|
from types import ModuleType
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from typing import Optional
|
||||||
from typing import Set
|
from typing import Set
|
||||||
from typing import Type
|
from typing import Type
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
@ -80,8 +82,9 @@ class EventMetadata():
|
||||||
return self._tid
|
return self._tid
|
||||||
|
|
||||||
|
|
||||||
HandlerMap = Dict[str, Callable[[DictEvent, EventMetadata], None]]
|
HandlerMethod = Callable[[DictEvent, EventMetadata], None]
|
||||||
HandlerMultimap = Dict[str, List[Callable[[DictEvent, EventMetadata], None]]]
|
HandlerMap = Dict[str, HandlerMethod]
|
||||||
|
HandlerMultimap = Dict[str, List[HandlerMethod]]
|
||||||
|
|
||||||
|
|
||||||
class Dependant():
|
class Dependant():
|
||||||
|
@ -92,6 +95,12 @@ class Dependant():
|
||||||
Dependencies are type-related only.
|
Dependencies are type-related only.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
**kwargs,
|
||||||
|
) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def dependencies() -> List[Type['Dependant']]:
|
def dependencies() -> List[Type['Dependant']]:
|
||||||
"""
|
"""
|
||||||
|
@ -116,7 +125,7 @@ class EventHandler(Dependant):
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
handler_map: HandlerMap,
|
handler_map: HandlerMap,
|
||||||
data_model: DataModel = None,
|
data_model: Optional[DataModel] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -130,7 +139,7 @@ class EventHandler(Dependant):
|
||||||
assert all(required_name in handler_map.keys() for required_name in self.required_events())
|
assert all(required_name in handler_map.keys() for required_name in self.required_events())
|
||||||
self._handler_map = handler_map
|
self._handler_map = handler_map
|
||||||
self._data_model = data_model
|
self._data_model = data_model
|
||||||
self._processor = None
|
self._processor: Optional[Processor] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def handler_map(self) -> HandlerMap:
|
def handler_map(self) -> HandlerMap:
|
||||||
|
@ -138,12 +147,12 @@ class EventHandler(Dependant):
|
||||||
return self._handler_map
|
return self._handler_map
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self) -> Union[DataModel, None]:
|
def data(self) -> DataModel:
|
||||||
"""Get the data model."""
|
"""Get the data model."""
|
||||||
return self._data_model
|
return self._data_model # type: ignore
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def processor(self) -> 'Processor':
|
def processor(self) -> Optional['Processor']:
|
||||||
return self._processor
|
return self._processor
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -154,7 +163,7 @@ class EventHandler(Dependant):
|
||||||
Without these events, the EventHandler would be invalid/useless. Inheriting classes can
|
Without these events, the EventHandler would be invalid/useless. Inheriting classes can
|
||||||
decide not to declare that they require specific events.
|
decide not to declare that they require specific events.
|
||||||
"""
|
"""
|
||||||
return {}
|
return set()
|
||||||
|
|
||||||
def register_processor(
|
def register_processor(
|
||||||
self,
|
self,
|
||||||
|
@ -261,12 +270,11 @@ class DependencySolver():
|
||||||
solution,
|
solution,
|
||||||
)
|
)
|
||||||
# If an instance of this type was given initially, use it instead
|
# If an instance of this type was given initially, use it instead
|
||||||
new_instance = None
|
dep_type_instance = initial_map.get(dep_type, None)
|
||||||
if dep_type in initial_map:
|
if dep_type_instance is not None:
|
||||||
new_instance = initial_map.get(dep_type)
|
solution.append(dep_type_instance)
|
||||||
else:
|
else:
|
||||||
new_instance = dep_type(**self._kwargs)
|
solution.append(dep_type(**self._kwargs))
|
||||||
solution.append(new_instance)
|
|
||||||
visited.add(dep_type)
|
visited.add(dep_type)
|
||||||
|
|
||||||
|
|
||||||
|
@ -311,7 +319,7 @@ class Processor():
|
||||||
:param handlers: the list of primary `EventHandler`s
|
:param handlers: the list of primary `EventHandler`s
|
||||||
:param kwargs: the parameters to pass on to new instances
|
:param kwargs: the parameters to pass on to new instances
|
||||||
"""
|
"""
|
||||||
return DependencySolver(*handlers, **kwargs).solve()
|
return DependencySolver(*handlers, **kwargs).solve() # type: ignore
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_handler_maps(
|
def _get_handler_maps(
|
||||||
|
@ -323,10 +331,10 @@ class Processor():
|
||||||
:param handlers: the list of handlers
|
:param handlers: the list of handlers
|
||||||
:return: the merged multimap
|
:return: the merged multimap
|
||||||
"""
|
"""
|
||||||
handler_multimap = defaultdict(list)
|
handler_multimap: HandlerMultimap = defaultdict(list)
|
||||||
for handler in handlers:
|
for handler in handlers:
|
||||||
for event_name, handler in handler.handler_map.items():
|
for event_name, handler_method in handler.handler_map.items():
|
||||||
handler_multimap[event_name].append(handler)
|
handler_multimap[event_name].append(handler_method)
|
||||||
return handler_multimap
|
return handler_multimap
|
||||||
|
|
||||||
def _register_with_handlers(
|
def _register_with_handlers(
|
||||||
|
@ -491,13 +499,13 @@ class AutoProcessor():
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_applicable_event_handler_classes(
|
def _get_applicable_event_handler_classes(
|
||||||
event_names: List[str],
|
event_names: Set[str],
|
||||||
handler_classes: List[Type[EventHandler]],
|
handler_classes: Set[Type[EventHandler]],
|
||||||
) -> Set[Type[EventHandler]]:
|
) -> Set[Type[EventHandler]]:
|
||||||
"""
|
"""
|
||||||
Get applicable EventHandler subclasses for a list of event names.
|
Get applicable EventHandler subclasses for a set of event names.
|
||||||
|
|
||||||
:param event_names: the list of event names
|
:param event_names: the set of event names
|
||||||
:return: a list of EventHandler subclasses for which requirements are met
|
:return: a list of EventHandler subclasses for which requirements are met
|
||||||
"""
|
"""
|
||||||
return {
|
return {
|
||||||
|
@ -542,14 +550,19 @@ class AutoProcessor():
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _import_event_handler_submodules(
|
def _import_event_handler_submodules(
|
||||||
name: str = __name__,
|
name: str = __name__,
|
||||||
recursive=True,
|
recursive: bool = True,
|
||||||
):
|
) -> Dict[str, ModuleType]:
|
||||||
"""Force import of EventHandler submodules."""
|
"""
|
||||||
|
Force import of EventHandler submodules.
|
||||||
|
|
||||||
|
:param name: the base module name
|
||||||
|
:param recursive: `True` if importing recursively, `False` otherwise
|
||||||
|
"""
|
||||||
import importlib
|
import importlib
|
||||||
import pkgutil
|
import pkgutil
|
||||||
package = importlib.import_module(name)
|
package = importlib.import_module(name)
|
||||||
results = {}
|
results = {}
|
||||||
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
|
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__): # type: ignore #1422
|
||||||
full_name = package.__name__ + '.' + name
|
full_name = package.__name__ + '.' + name
|
||||||
results[full_name] = importlib.import_module(full_name)
|
results[full_name] = importlib.import_module(full_name)
|
||||||
if recursive and is_pkg:
|
if recursive and is_pkg:
|
||||||
|
@ -570,10 +583,10 @@ class ProcessingProgressDisplay():
|
||||||
:param processing_elements: the list of elements doing processing
|
:param processing_elements: the list of elements doing processing
|
||||||
"""
|
"""
|
||||||
self.__info_string = '[' + ', '.join(processing_elements) + ']'
|
self.__info_string = '[' + ', '.join(processing_elements) + ']'
|
||||||
self.__total_work = None
|
self.__total_work: int = 0
|
||||||
self.__progress_count = None
|
self.__progress_count: int = 0
|
||||||
self.__rolling_count = None
|
self.__rolling_count: int = 0
|
||||||
self.__work_display_period = None
|
self.__work_display_period: int = 0
|
||||||
|
|
||||||
def set_work_total(
|
def set_work_total(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -21,6 +21,7 @@ from tracetools_read import get_field
|
||||||
|
|
||||||
from . import EventHandler
|
from . import EventHandler
|
||||||
from . import EventMetadata
|
from . import EventMetadata
|
||||||
|
from . import HandlerMap
|
||||||
from ..data_model.cpu_time import CpuTimeDataModel
|
from ..data_model.cpu_time import CpuTimeDataModel
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +38,7 @@ class CpuTimeHandler(EventHandler):
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create a CpuTimeHandler."""
|
"""Create a CpuTimeHandler."""
|
||||||
# Link event to handling method
|
# Link event to handling method
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'sched_switch':
|
'sched_switch':
|
||||||
self._handle_sched_switch,
|
self._handle_sched_switch,
|
||||||
}
|
}
|
||||||
|
@ -57,6 +58,10 @@ class CpuTimeHandler(EventHandler):
|
||||||
'sched_switch',
|
'sched_switch',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> CpuTimeDataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
def _handle_sched_switch(
|
def _handle_sched_switch(
|
||||||
self, event: Dict, metadata: EventMetadata
|
self, event: Dict, metadata: EventMetadata
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
|
@ -21,6 +21,7 @@ from tracetools_read import get_field
|
||||||
|
|
||||||
from . import EventHandler
|
from . import EventHandler
|
||||||
from . import EventMetadata
|
from . import EventMetadata
|
||||||
|
from . import HandlerMap
|
||||||
from ..data_model.memory_usage import MemoryUsageDataModel
|
from ..data_model.memory_usage import MemoryUsageDataModel
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,6 +39,10 @@ class MemoryUsageHandler(EventHandler):
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> MemoryUsageDataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
def _update(
|
def _update(
|
||||||
self,
|
self,
|
||||||
timestamp: int,
|
timestamp: int,
|
||||||
|
@ -72,7 +77,7 @@ class UserspaceMemoryUsageHandler(MemoryUsageHandler):
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
# Link event to handling method
|
# Link event to handling method
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'lttng_ust_libc:malloc':
|
'lttng_ust_libc:malloc':
|
||||||
self._handle_malloc,
|
self._handle_malloc,
|
||||||
'lttng_ust_libc:calloc':
|
'lttng_ust_libc:calloc':
|
||||||
|
@ -196,7 +201,7 @@ class KernelMemoryUsageHandler(MemoryUsageHandler):
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
# Link event to handling method
|
# Link event to handling method
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'kmem_mm_page_alloc':
|
'kmem_mm_page_alloc':
|
||||||
self._handle_malloc,
|
self._handle_malloc,
|
||||||
'kmem_mm_page_free':
|
'kmem_mm_page_free':
|
||||||
|
|
|
@ -24,7 +24,7 @@ from tracetools_read import get_field
|
||||||
|
|
||||||
from . import EventHandler
|
from . import EventHandler
|
||||||
from . import EventMetadata
|
from . import EventMetadata
|
||||||
|
from . import HandlerMap
|
||||||
from ..data_model.profile import ProfileDataModel
|
from ..data_model.profile import ProfileDataModel
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ class ProfileHandler(EventHandler):
|
||||||
|
|
||||||
:param address_to_func: the mapping from function address (`int` or hex `str`) to name
|
:param address_to_func: the mapping from function address (`int` or hex `str`) to name
|
||||||
"""
|
"""
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'lttng_ust_cyg_profile_fast:func_entry':
|
'lttng_ust_cyg_profile_fast:func_entry':
|
||||||
self._handle_function_entry,
|
self._handle_function_entry,
|
||||||
'lttng_ust_cyg_profile_fast:func_exit':
|
'lttng_ust_cyg_profile_fast:func_exit':
|
||||||
|
@ -95,6 +95,10 @@ class ProfileHandler(EventHandler):
|
||||||
'sched_switch',
|
'sched_switch',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> ProfileDataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def addr_to_int(addr: Union[int, str]) -> int:
|
def addr_to_int(addr: Union[int, str]) -> int:
|
||||||
"""Transform an address into an `int` if it's a hex `str`."""
|
"""Transform an address into an `int` if it's a hex `str`."""
|
||||||
|
@ -106,20 +110,22 @@ class ProfileHandler(EventHandler):
|
||||||
timestamp = metadata.timestamp
|
timestamp = metadata.timestamp
|
||||||
# If function(s) currently running stop(s) executing
|
# If function(s) currently running stop(s) executing
|
||||||
prev_tid = get_field(event, 'prev_tid')
|
prev_tid = get_field(event, 'prev_tid')
|
||||||
if prev_tid in self._current_funcs:
|
prev_info_list = self._current_funcs.get(prev_tid, None)
|
||||||
|
if prev_info_list is not None:
|
||||||
# Increment durations using last start timestamp
|
# Increment durations using last start timestamp
|
||||||
for info in self._current_funcs.get(prev_tid):
|
for info in prev_info_list:
|
||||||
last_start = info[2]
|
last_start = info[2]
|
||||||
total_duration = info[3]
|
total_duration = info[3]
|
||||||
total_duration += timestamp - last_start
|
total_duration += timestamp - last_start
|
||||||
info[2] = None
|
info[2] = -1
|
||||||
info[3] = total_duration
|
info[3] = total_duration
|
||||||
# If stopped function(s) start(s) executing again
|
# If stopped function(s) start(s) executing again
|
||||||
next_tid = get_field(event, 'next_tid')
|
next_tid = get_field(event, 'next_tid')
|
||||||
if next_tid in self._current_funcs:
|
next_info_list = self._current_funcs.get(next_tid, None)
|
||||||
|
if next_info_list is not None:
|
||||||
# Set last start timestamp to now
|
# Set last start timestamp to now
|
||||||
for info in self._current_funcs.get(next_tid):
|
for info in next_info_list:
|
||||||
assert info[2] is None
|
assert info[2] == -1
|
||||||
info[2] = timestamp
|
info[2] = timestamp
|
||||||
|
|
||||||
def _handle_function_entry(
|
def _handle_function_entry(
|
||||||
|
@ -153,9 +159,9 @@ class ProfileHandler(EventHandler):
|
||||||
self.data.add_duration(
|
self.data.add_duration(
|
||||||
tid,
|
tid,
|
||||||
function_depth,
|
function_depth,
|
||||||
function_name,
|
function_name, # type: ignore
|
||||||
parent_name,
|
parent_name, # type: ignore
|
||||||
start_timestamp,
|
start_timestamp, # type: ignore
|
||||||
duration,
|
duration,
|
||||||
actual_duration,
|
actual_duration,
|
||||||
)
|
)
|
||||||
|
|
|
@ -16,11 +16,13 @@
|
||||||
|
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import Set
|
from typing import Set
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
from tracetools_read import get_field
|
from tracetools_read import get_field
|
||||||
|
|
||||||
from . import EventHandler
|
from . import EventHandler
|
||||||
from . import EventMetadata
|
from . import EventMetadata
|
||||||
|
from . import HandlerMap
|
||||||
from ..data_model.ros2 import Ros2DataModel
|
from ..data_model.ros2 import Ros2DataModel
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +39,7 @@ class Ros2Handler(EventHandler):
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create a Ros2Handler."""
|
"""Create a Ros2Handler."""
|
||||||
# Link a ROS trace event to its corresponding handling method
|
# Link a ROS trace event to its corresponding handling method
|
||||||
handler_map = {
|
handler_map: HandlerMap = {
|
||||||
'ros2:rcl_init':
|
'ros2:rcl_init':
|
||||||
self._handle_rcl_init,
|
self._handle_rcl_init,
|
||||||
'ros2:rcl_node_init':
|
'ros2:rcl_node_init':
|
||||||
|
@ -74,7 +76,7 @@ class Ros2Handler(EventHandler):
|
||||||
)
|
)
|
||||||
|
|
||||||
# Temporary buffers
|
# Temporary buffers
|
||||||
self._callback_instances = {}
|
self._callback_instances: Dict[int, Tuple[Dict, EventMetadata]] = {}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> Set[str]:
|
def required_events() -> Set[str]:
|
||||||
|
@ -82,6 +84,10 @@ class Ros2Handler(EventHandler):
|
||||||
'ros2:rcl_init',
|
'ros2:rcl_init',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> Ros2DataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
def _handle_rcl_init(
|
def _handle_rcl_init(
|
||||||
self, event: Dict, metadata: EventMetadata,
|
self, event: Dict, metadata: EventMetadata,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -207,8 +213,9 @@ class Ros2Handler(EventHandler):
|
||||||
) -> None:
|
) -> None:
|
||||||
# Fetch from dict
|
# Fetch from dict
|
||||||
callback_object = get_field(event, 'callback')
|
callback_object = get_field(event, 'callback')
|
||||||
(event_start, metadata_start) = self._callback_instances.get(callback_object)
|
callback_instance_data = self._callback_instances.get(callback_object)
|
||||||
if event_start is not None and metadata_start is not None:
|
if callback_instance_data is not None:
|
||||||
|
(event_start, metadata_start) = callback_instance_data
|
||||||
del self._callback_instances[callback_object]
|
del self._callback_instances[callback_object]
|
||||||
duration = metadata.timestamp - metadata_start.timestamp
|
duration = metadata.timestamp - metadata_start.timestamp
|
||||||
is_intra_process = get_field(event_start, 'is_intra_process', raise_if_not_found=False)
|
is_intra_process = get_field(event_start, 'is_intra_process', raise_if_not_found=False)
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
|
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from typing import Optional
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
@ -43,7 +44,7 @@ class DataModelUtil():
|
||||||
self.__data = data_object.data if isinstance(data_object, EventHandler) else data_object
|
self.__data = data_object.data if isinstance(data_object, EventHandler) else data_object
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self) -> Union[DataModel, None]:
|
def data(self) -> Optional[DataModel]:
|
||||||
return self.__data
|
return self.__data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -37,6 +37,10 @@ class CpuTimeDataModelUtil(DataModelUtil):
|
||||||
"""
|
"""
|
||||||
super().__init__(data_object)
|
super().__init__(data_object)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> CpuTimeDataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
def get_time_per_thread(self) -> DataFrame:
|
def get_time_per_thread(self) -> DataFrame:
|
||||||
"""Get a DataFrame of total duration for each thread."""
|
"""Get a DataFrame of total duration for each thread."""
|
||||||
return self.data.times.loc[:, ['tid', 'duration']].groupby(by='tid').sum()
|
return self.data.times.loc[:, ['tid', 'duration']].groupby(by='tid').sum()
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
from typing import List
|
||||||
|
from typing import Optional
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
@ -66,12 +68,13 @@ class MemoryUsageDataModelUtil(DataModelUtil):
|
||||||
"""
|
"""
|
||||||
suffixes = ['B', 'KB', 'MB', 'GB', 'TB']
|
suffixes = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||||
suffixIndex = 0
|
suffixIndex = 0
|
||||||
while size > 1024 and suffixIndex < 4:
|
mem_size = float(size)
|
||||||
|
while mem_size > 1024.0 and suffixIndex < 4:
|
||||||
# Increment the index of the suffix
|
# Increment the index of the suffix
|
||||||
suffixIndex += 1
|
suffixIndex += 1
|
||||||
# Apply the division
|
# Apply the division
|
||||||
size = size / 1024.0
|
mem_size = mem_size / 1024.0
|
||||||
return f'{size:.{precision}f} {suffixes[suffixIndex]}'
|
return f'{mem_size:.{precision}f} {suffixes[suffixIndex]}'
|
||||||
|
|
||||||
def get_max_memory_usage_per_tid(self) -> DataFrame:
|
def get_max_memory_usage_per_tid(self) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
|
@ -79,50 +82,61 @@ class MemoryUsageDataModelUtil(DataModelUtil):
|
||||||
|
|
||||||
:return dataframe with maximum memory usage (userspace & kernel) per tid
|
:return dataframe with maximum memory usage (userspace & kernel) per tid
|
||||||
"""
|
"""
|
||||||
if self.data_ust is not None:
|
tids_ust = None
|
||||||
ust_memory_usage_dfs = self.get_absolute_userspace_memory_usage_by_tid()
|
tids_kernel = None
|
||||||
|
ust_memory_usage_dfs = self.get_absolute_userspace_memory_usage_by_tid()
|
||||||
|
if ust_memory_usage_dfs is not None:
|
||||||
tids_ust = set(ust_memory_usage_dfs.keys())
|
tids_ust = set(ust_memory_usage_dfs.keys())
|
||||||
if self.data_kernel is not None:
|
kernel_memory_usage_dfs = self.get_absolute_kernel_memory_usage_by_tid()
|
||||||
kernel_memory_usage_dfs = self.get_absolute_kernel_memory_usage_by_tid()
|
if kernel_memory_usage_dfs is not None:
|
||||||
tids_kernel = set(kernel_memory_usage_dfs.keys())
|
tids_kernel = set(kernel_memory_usage_dfs.keys())
|
||||||
# Use only the userspace tid values if available, otherwise use the kernel tid values
|
# Use only the userspace tid values if available, otherwise use the kernel tid values
|
||||||
tids = tids_ust if self.data_ust is not None else tids_kernel
|
tids = tids_ust or tids_kernel
|
||||||
|
# Should not happen, since it is checked in __init__
|
||||||
|
if tids is None:
|
||||||
|
raise RuntimeError('no data')
|
||||||
data = [
|
data = [
|
||||||
[
|
[
|
||||||
tid,
|
tid,
|
||||||
self.format_size(ust_memory_usage_dfs[tid]['memory_usage'].max(), precision=1)
|
self.format_size(ust_memory_usage_dfs[tid]['memory_usage'].max(), precision=1)
|
||||||
if self.data_ust is not None
|
if ust_memory_usage_dfs is not None
|
||||||
|
and ust_memory_usage_dfs.get(tid) is not None
|
||||||
else None,
|
else None,
|
||||||
self.format_size(kernel_memory_usage_dfs[tid]['memory_usage'].max(), precision=1)
|
self.format_size(kernel_memory_usage_dfs[tid]['memory_usage'].max(), precision=1)
|
||||||
if self.data_kernel is not None and ust_memory_usage_dfs.get(tid) is not None
|
if kernel_memory_usage_dfs is not None
|
||||||
|
and kernel_memory_usage_dfs.get(tid) is not None
|
||||||
else None,
|
else None,
|
||||||
]
|
]
|
||||||
for tid in tids
|
for tid in tids
|
||||||
]
|
]
|
||||||
return DataFrame(data, columns=['tid', 'max_memory_usage_ust', 'max_memory_usage_kernel'])
|
return DataFrame(data, columns=['tid', 'max_memory_usage_ust', 'max_memory_usage_kernel'])
|
||||||
|
|
||||||
def get_absolute_userspace_memory_usage_by_tid(self) -> Dict[int, DataFrame]:
|
def get_absolute_userspace_memory_usage_by_tid(self) -> Optional[Dict[int, DataFrame]]:
|
||||||
"""
|
"""
|
||||||
Get absolute userspace memory usage over time per tid.
|
Get absolute userspace memory usage over time per tid.
|
||||||
|
|
||||||
:return (tid -> DataFrame of absolute memory usage over time)
|
:return (tid -> DataFrame of absolute memory usage over time)
|
||||||
"""
|
"""
|
||||||
|
if self.data_ust is None:
|
||||||
|
return None
|
||||||
return self._get_absolute_memory_usage_by_tid(self.data_ust)
|
return self._get_absolute_memory_usage_by_tid(self.data_ust)
|
||||||
|
|
||||||
def get_absolute_kernel_memory_usage_by_tid(self) -> Dict[int, DataFrame]:
|
def get_absolute_kernel_memory_usage_by_tid(self) -> Optional[Dict[int, DataFrame]]:
|
||||||
"""
|
"""
|
||||||
Get absolute kernel memory usage over time per tid.
|
Get absolute kernel memory usage over time per tid.
|
||||||
|
|
||||||
:return (tid -> DataFrame of absolute memory usage over time)
|
:return (tid -> DataFrame of absolute memory usage over time)
|
||||||
"""
|
"""
|
||||||
|
if self.data_kernel is None:
|
||||||
|
return None
|
||||||
return self._get_absolute_memory_usage_by_tid(self.data_kernel)
|
return self._get_absolute_memory_usage_by_tid(self.data_kernel)
|
||||||
|
|
||||||
def _get_absolute_memory_usage_by_tid(
|
def _get_absolute_memory_usage_by_tid(
|
||||||
self,
|
self,
|
||||||
data_model: MemoryUsageDataModel,
|
data_model: MemoryUsageDataModel,
|
||||||
) -> Dict[int, DataFrame]:
|
) -> Dict[int, DataFrame]:
|
||||||
previous = defaultdict(int)
|
previous: Dict[int, int] = defaultdict(int)
|
||||||
data = defaultdict(list)
|
data: Dict[int, List[Dict[str, int]]] = defaultdict(list)
|
||||||
for index, row in data_model.memory_diff.iterrows():
|
for index, row in data_model.memory_diff.iterrows():
|
||||||
timestamp = row['timestamp']
|
timestamp = row['timestamp']
|
||||||
tid = int(row['tid'])
|
tid = int(row['tid'])
|
||||||
|
|
|
@ -41,6 +41,10 @@ class ProfileDataModelUtil(DataModelUtil):
|
||||||
"""
|
"""
|
||||||
super().__init__(data_object)
|
super().__init__(data_object)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> ProfileDataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
def with_tid(
|
def with_tid(
|
||||||
self,
|
self,
|
||||||
tid: int,
|
tid: int,
|
||||||
|
@ -54,12 +58,12 @@ class ProfileDataModelUtil(DataModelUtil):
|
||||||
def get_call_tree(
|
def get_call_tree(
|
||||||
self,
|
self,
|
||||||
tid: int,
|
tid: int,
|
||||||
) -> Dict[str, List[str]]:
|
) -> Dict[str, Set[str]]:
|
||||||
depth_names = self.with_tid(tid)[
|
depth_names = self.with_tid(tid)[
|
||||||
['depth', 'function_name', 'parent_name']
|
['depth', 'function_name', 'parent_name']
|
||||||
].drop_duplicates()
|
].drop_duplicates()
|
||||||
# print(depth_names.to_string())
|
# print(depth_names.to_string())
|
||||||
tree = defaultdict(set)
|
tree: Dict[str, Set[str]] = defaultdict(set)
|
||||||
for _, row in depth_names.iterrows():
|
for _, row in depth_names.iterrows():
|
||||||
depth = row['depth']
|
depth = row['depth']
|
||||||
name = row['function_name']
|
name = row['function_name']
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Mapping
|
from typing import Mapping
|
||||||
|
from typing import Optional
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
@ -41,6 +42,10 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
"""
|
"""
|
||||||
super().__init__(data_object)
|
super().__init__(data_object)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> Ros2DataModel:
|
||||||
|
return super().data # type: ignore
|
||||||
|
|
||||||
def _prettify(
|
def _prettify(
|
||||||
self,
|
self,
|
||||||
original: str,
|
original: str,
|
||||||
|
@ -140,7 +145,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_node_tid_from_name(
|
def get_node_tid_from_name(
|
||||||
self,
|
self,
|
||||||
node_name: str,
|
node_name: str,
|
||||||
) -> Union[int, None]:
|
) -> Optional[int]:
|
||||||
"""
|
"""
|
||||||
Get the tid corresponding to a node.
|
Get the tid corresponding to a node.
|
||||||
|
|
||||||
|
@ -157,7 +162,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_node_names_from_tid(
|
def get_node_names_from_tid(
|
||||||
self,
|
self,
|
||||||
tid: str,
|
tid: str,
|
||||||
) -> Union[List[str], None]:
|
) -> Optional[List[str]]:
|
||||||
"""
|
"""
|
||||||
Get the list of node names corresponding to a tid.
|
Get the list of node names corresponding to a tid.
|
||||||
|
|
||||||
|
@ -171,7 +176,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_callback_owner_info(
|
def get_callback_owner_info(
|
||||||
self,
|
self,
|
||||||
callback_obj: int,
|
callback_obj: int,
|
||||||
) -> Union[str, None]:
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Get information about the owner of a callback.
|
Get information about the owner of a callback.
|
||||||
|
|
||||||
|
@ -207,9 +212,9 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
type_name = 'Client'
|
type_name = 'Client'
|
||||||
info = self.get_client_handle_info(reference)
|
info = self.get_client_handle_info(reference)
|
||||||
|
|
||||||
if info is not None:
|
if info is None:
|
||||||
info = f'{type_name} -- {self.format_info_dict(info)}'
|
return None
|
||||||
return info
|
return f'{type_name} -- {self.format_info_dict(info)}'
|
||||||
|
|
||||||
def get_timer_handle_info(
|
def get_timer_handle_info(
|
||||||
self,
|
self,
|
||||||
|
@ -245,6 +250,8 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
|
|
||||||
node_handle = self.data.publishers.loc[publisher_handle, 'node_handle']
|
node_handle = self.data.publishers.loc[publisher_handle, 'node_handle']
|
||||||
node_handle_info = self.get_node_handle_info(node_handle)
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
|
if node_handle_info is None:
|
||||||
|
return None
|
||||||
topic_name = self.data.publishers.loc[publisher_handle, 'topic_name']
|
topic_name = self.data.publishers.loc[publisher_handle, 'topic_name']
|
||||||
publisher_info = {'topic': topic_name}
|
publisher_info = {'topic': topic_name}
|
||||||
return {**node_handle_info, **publisher_info}
|
return {**node_handle_info, **publisher_info}
|
||||||
|
@ -252,7 +259,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_subscription_reference_info(
|
def get_subscription_reference_info(
|
||||||
self,
|
self,
|
||||||
subscription_reference: int,
|
subscription_reference: int,
|
||||||
) -> Union[Mapping[str, Any], None]:
|
) -> Optional[Mapping[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get information about a subscription handle.
|
Get information about a subscription handle.
|
||||||
|
|
||||||
|
@ -297,6 +304,8 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
|
|
||||||
node_handle = subscriptions_info.loc[subscription_reference, 'node_handle']
|
node_handle = subscriptions_info.loc[subscription_reference, 'node_handle']
|
||||||
node_handle_info = self.get_node_handle_info(node_handle)
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
|
if node_handle_info is None:
|
||||||
|
return None
|
||||||
topic_name = subscriptions_info.loc[subscription_reference, 'topic_name']
|
topic_name = subscriptions_info.loc[subscription_reference, 'topic_name']
|
||||||
subscription_info = {'topic': topic_name}
|
subscription_info = {'topic': topic_name}
|
||||||
return {**node_handle_info, **subscription_info}
|
return {**node_handle_info, **subscription_info}
|
||||||
|
@ -304,7 +313,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_service_handle_info(
|
def get_service_handle_info(
|
||||||
self,
|
self,
|
||||||
service_handle: int,
|
service_handle: int,
|
||||||
) -> Union[Mapping[str, Any], None]:
|
) -> Optional[Mapping[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get information about a service handle.
|
Get information about a service handle.
|
||||||
|
|
||||||
|
@ -316,6 +325,8 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
|
|
||||||
node_handle = self.data.services.loc[service_handle, 'node_handle']
|
node_handle = self.data.services.loc[service_handle, 'node_handle']
|
||||||
node_handle_info = self.get_node_handle_info(node_handle)
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
|
if node_handle_info is None:
|
||||||
|
return None
|
||||||
service_name = self.data.services.loc[service_handle, 'service_name']
|
service_name = self.data.services.loc[service_handle, 'service_name']
|
||||||
service_info = {'service': service_name}
|
service_info = {'service': service_name}
|
||||||
return {**node_handle_info, **service_info}
|
return {**node_handle_info, **service_info}
|
||||||
|
@ -323,7 +334,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_client_handle_info(
|
def get_client_handle_info(
|
||||||
self,
|
self,
|
||||||
client_handle: int,
|
client_handle: int,
|
||||||
) -> Union[Mapping[str, Any], None]:
|
) -> Optional[Mapping[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get information about a client handle.
|
Get information about a client handle.
|
||||||
|
|
||||||
|
@ -335,6 +346,8 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
|
|
||||||
node_handle = self.data.clients.loc[client_handle, 'node_handle']
|
node_handle = self.data.clients.loc[client_handle, 'node_handle']
|
||||||
node_handle_info = self.get_node_handle_info(node_handle)
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
|
if node_handle_info is None:
|
||||||
|
return None
|
||||||
service_name = self.data.clients.loc[client_handle, 'service_name']
|
service_name = self.data.clients.loc[client_handle, 'service_name']
|
||||||
service_info = {'service': service_name}
|
service_info = {'service': service_name}
|
||||||
return {**node_handle_info, **service_info}
|
return {**node_handle_info, **service_info}
|
||||||
|
@ -342,7 +355,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
def get_node_handle_info(
|
def get_node_handle_info(
|
||||||
self,
|
self,
|
||||||
node_handle: int,
|
node_handle: int,
|
||||||
) -> Union[Mapping[str, Any], None]:
|
) -> Optional[Mapping[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get information about a node handle.
|
Get information about a node handle.
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue