Compare commits
113 commits
Author | SHA1 | Date | |
---|---|---|---|
e18ddb62d5 | |||
![]() |
d2bd9bae04 | ||
![]() |
6550e878bc | ||
![]() |
d8c352c082 | ||
![]() |
eb2b8f6867 | ||
![]() |
0dd466ea23 | ||
![]() |
8ec5b5f4f9 | ||
![]() |
6d3a0f58bd | ||
![]() |
943bf2011a | ||
![]() |
62653d6ca0 | ||
![]() |
760dca9598 | ||
![]() |
d139a0f11b | ||
![]() |
fada2d0fc2 | ||
![]() |
d9250ce036 | ||
![]() |
41110872ad | ||
![]() |
7100baf0f4 | ||
![]() |
d4ff095a1b | ||
![]() |
f1e1500862 | ||
![]() |
42ec45f5da | ||
![]() |
9c853b27fc | ||
![]() |
a6cc9fe188 | ||
![]() |
08ede42192 | ||
![]() |
5e2d56e826 | ||
![]() |
c9012108ea | ||
![]() |
1dfb747b62 | ||
![]() |
f3a619af7d | ||
![]() |
e8c5ba4f85 | ||
![]() |
a4537af5d2 | ||
![]() |
398b1d2158 | ||
![]() |
47d0be0878 | ||
![]() |
93bc08a070 | ||
![]() |
9a7f8c8444 | ||
![]() |
fdfcfb727b | ||
![]() |
bffbe1b10e | ||
![]() |
132c159764 | ||
![]() |
7f1a3eec80 | ||
![]() |
9c535d1261 | ||
![]() |
cd84f45e6a | ||
![]() |
87bf652760 | ||
![]() |
40deb51ba4 | ||
![]() |
92544235d8 | ||
![]() |
653d731d79 | ||
![]() |
3fb2c697e4 | ||
![]() |
92930fc9cb | ||
![]() |
637c61f8a3 | ||
![]() |
eff2b94faa | ||
![]() |
c5645a440c | ||
![]() |
b3828f016e | ||
![]() |
7d7bf6be42 | ||
![]() |
dd68fcd533 | ||
![]() |
06cc38dd68 | ||
![]() |
e2975f921c | ||
![]() |
1c3d8a539a | ||
![]() |
dc6a0214c9 | ||
![]() |
fa68df0dac | ||
![]() |
92d6dbd286 | ||
![]() |
274373c79c | ||
![]() |
6aaee559e9 | ||
![]() |
7befd3fcaa | ||
![]() |
8941f95b57 | ||
![]() |
8cbd0dc645 | ||
![]() |
280bd99e1d | ||
![]() |
f47b2b5a7e | ||
![]() |
19bbbc2fee | ||
![]() |
0ac1bfa80d | ||
![]() |
18a8ff28b5 | ||
![]() |
b181a68b0a | ||
![]() |
5a6fb7441a | ||
![]() |
34514d3610 | ||
![]() |
d942a2ef55 | ||
![]() |
ec6362ad89 | ||
![]() |
d0c4a8b0db | ||
![]() |
3e0acc9a95 | ||
![]() |
f9ef14e058 | ||
![]() |
3ed8f0ed1d | ||
![]() |
e73296b34a | ||
![]() |
d77a16db5a | ||
![]() |
3431b4b7b2 | ||
![]() |
44731dfadb | ||
![]() |
c874d59f74 | ||
![]() |
ee5258f4a9 | ||
![]() |
c12fc7a5a6 | ||
![]() |
fcdaff9931 | ||
![]() |
2242b8cbed | ||
![]() |
2091bf0508 | ||
![]() |
8c80e4dffe | ||
![]() |
673ba38482 | ||
![]() |
0b1c3619dd | ||
![]() |
21f98cc0af | ||
![]() |
fd4f5e8140 | ||
![]() |
7bb3be8407 | ||
![]() |
df5c4c9fd3 | ||
![]() |
b1d81893b6 | ||
![]() |
a510cd8b4c | ||
![]() |
f6b0940775 | ||
![]() |
233c977019 | ||
![]() |
0a51492ea7 | ||
![]() |
ce7db8bcfe | ||
![]() |
ba69f41f94 | ||
![]() |
8c0a8d13b6 | ||
![]() |
e33b1a2561 | ||
![]() |
f47ec5af5c | ||
![]() |
10310dbd8c | ||
![]() |
4b9279c1db | ||
![]() |
38da641ae4 | ||
![]() |
dabfdf3e46 | ||
![]() |
fe50e1d13d | ||
![]() |
681d1b5460 | ||
![]() |
72402f38fa | ||
![]() |
18e6c49f1c | ||
![]() |
6863340afa | ||
![]() |
40bf4abeda | ||
![]() |
485ff6e31c |
59 changed files with 2630 additions and 458 deletions
13
.github/workflows/mirror-rolling-to-master.yaml
vendored
Normal file
13
.github/workflows/mirror-rolling-to-master.yaml
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
name: Mirror rolling to master
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ rolling ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
mirror-to-master:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: zofrex/mirror-branch@v1
|
||||||
|
with:
|
||||||
|
target-branch: master
|
53
.github/workflows/test.yml
vendored
Normal file
53
.github/workflows/test.yml
vendored
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
name: Test
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- rolling
|
||||||
|
schedule:
|
||||||
|
- cron: "0 5 * * *"
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:24.04
|
||||||
|
continue-on-error: ${{ matrix.build-type == 'binary' }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
distro:
|
||||||
|
- rolling
|
||||||
|
build-type:
|
||||||
|
- binary
|
||||||
|
- source
|
||||||
|
env:
|
||||||
|
ROS2_REPOS_FILE_URL: 'https://raw.githubusercontent.com/ros2/ros2/${{ matrix.distro }}/ros2.repos'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: ros-tooling/setup-ros@master
|
||||||
|
with:
|
||||||
|
required-ros-distributions: ${{ matrix.build-type == 'binary' && matrix.distro || '' }}
|
||||||
|
use-ros2-testing: true
|
||||||
|
- uses: ros-tooling/action-ros-ci@master
|
||||||
|
with:
|
||||||
|
package-name: ros2trace_analysis tracetools_analysis
|
||||||
|
target-ros2-distro: ${{ matrix.distro }}
|
||||||
|
vcs-repo-file-url: ${{ matrix.build-type == 'source' && env.ROS2_REPOS_FILE_URL || '' }}
|
||||||
|
colcon-defaults: |
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"mixin": [
|
||||||
|
"coverage-pytest"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"test": {
|
||||||
|
"mixin": [
|
||||||
|
"coverage-pytest"
|
||||||
|
],
|
||||||
|
"executor": "sequential",
|
||||||
|
"retest-until-pass": 2,
|
||||||
|
"pytest-args": ["-m", "not xfail"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
- uses: codecov/codecov-action@v3
|
||||||
|
with:
|
||||||
|
files: ros_ws/coveragepy/.coverage
|
|
@ -1,55 +0,0 @@
|
||||||
variables:
|
|
||||||
DOCKER_DRIVER: overlay2
|
|
||||||
PACKAGES_LIST: tracetools_analysis ros2trace_analysis
|
|
||||||
BASE_IMAGE_ID: registry.gitlab.com/micro-ros/ros_tracing/ci_base
|
|
||||||
DISTRO: foxy
|
|
||||||
ROS2TRACING_BRANCH: master
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- build_test
|
|
||||||
- report
|
|
||||||
|
|
||||||
.global_artifacts: &global_artifacts
|
|
||||||
artifacts:
|
|
||||||
paths:
|
|
||||||
- install
|
|
||||||
- build/*/test_results/*/*.xunit.xml
|
|
||||||
- build/*/pytest.xml
|
|
||||||
reports:
|
|
||||||
junit:
|
|
||||||
- build/*/test_results/*/*.xunit.xml
|
|
||||||
- build/*/pytest.xml
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- . /root/ws/install/local_setup.sh
|
|
||||||
- python3 get_branch.py --check
|
|
||||||
- git clone https://gitlab.com/micro-ROS/ros_tracing/ros2_tracing.git --branch $(python3 get_branch.py)
|
|
||||||
|
|
||||||
build:
|
|
||||||
stage: build_test
|
|
||||||
image: $BASE_IMAGE_ID:$DISTRO
|
|
||||||
script:
|
|
||||||
- colcon build --symlink-install --event-handlers console_cohesion+ --packages-up-to $PACKAGES_LIST
|
|
||||||
- colcon test --event-handlers console_cohesion+ --packages-select $PACKAGES_LIST
|
|
||||||
- colcon test-result --all
|
|
||||||
<<: *global_artifacts
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
stage: report
|
|
||||||
image: $BASE_IMAGE_ID:$DISTRO
|
|
||||||
script:
|
|
||||||
- colcon build --symlink-install --event-handlers console_cohesion+ --packages-up-to $PACKAGES_LIST --mixin coverage-pytest --cmake-args -DBUILD_TESTING=ON --no-warn-unused-cli
|
|
||||||
- colcon test --event-handlers console_cohesion+ --packages-select $PACKAGES_LIST --mixin coverage-pytest
|
|
||||||
- colcon test-result --all
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
- colcon coveragepy-result --packages-select $PACKAGES_LIST --verbose --coverage-report-args -m
|
|
||||||
allow_failure: true
|
|
||||||
<<: *global_artifacts
|
|
||||||
|
|
||||||
trigger_gen_docs:
|
|
||||||
stage: report
|
|
||||||
only:
|
|
||||||
refs:
|
|
||||||
- master
|
|
||||||
- foxy
|
|
||||||
trigger: micro-ROS/ros_tracing/tracetools_analysis-api
|
|
72
README.md
72
README.md
|
@ -1,66 +1,82 @@
|
||||||
# tracetools_analysis
|
# tracetools_analysis
|
||||||
|
|
||||||
[](https://gitlab.com/micro-ROS/ros_tracing/tracetools_analysis/commits/master)
|
<!-- [](https://github.com/ros-tracing/tracetools_analysis/actions/workflows/test.yml) -->
|
||||||
|
[](https://codecov.io/gh/ros-tracing/tracetools_analysis)
|
||||||
|
|
||||||
Analysis tools for [ROS 2 tracing](https://gitlab.com/micro-ROS/ros_tracing/ros2_tracing).
|
Analysis tools for [`ros2_tracing`](https://github.com/ros2/ros2_tracing).
|
||||||
|
|
||||||
|
**Note**: make sure to use the right branch, depending on the ROS 2 distro: [use `rolling` for Rolling, `humble` for Humble, etc.](https://docs.ros.org/en/rolling/The-ROS2-Project/Contributing/Developer-Guide.html)
|
||||||
|
|
||||||
## Trace analysis
|
## Trace analysis
|
||||||
|
|
||||||
After generating a trace (see [`ros2_tracing`](https://gitlab.com/micro-ROS/ros_tracing/ros2_tracing#tracing)), we can analyze it to extract useful execution data.
|
After generating a trace (see [`ros2_tracing`](https://github.com/ros2/ros2_tracing#tracing)), we can analyze it to extract useful execution data.
|
||||||
|
|
||||||
### Commands
|
### Commands
|
||||||
|
|
||||||
Since CTF traces (the output format of the [LTTng](https://lttng.org/) tracer) are very slow to read, we first convert them into a single file which can be read much faster.
|
Then we can process a trace to create a data model which could be queried for analysis.
|
||||||
|
|
||||||
```
|
```shell
|
||||||
$ ros2 trace-analysis convert /path/to/trace/directory
|
|
||||||
```
|
|
||||||
|
|
||||||
Then we can process it to create a data model which could be queried for analysis.
|
|
||||||
|
|
||||||
```
|
|
||||||
$ ros2 trace-analysis process /path/to/trace/directory
|
$ ros2 trace-analysis process /path/to/trace/directory
|
||||||
```
|
```
|
||||||
|
|
||||||
### Jupyter
|
Note that this simply outputs lightly-processed ROS 2 trace data which is split into a number of pandas `DataFrame`s.
|
||||||
|
This can be used to quickly check the trace data.
|
||||||
|
For real data processing/trace analysis, see [*Analysis*](#analysis).
|
||||||
|
|
||||||
The last command will process and output the raw data models, but to actually display results, process and analyze using a Jupyter Notebook.
|
Since CTF traces (the output format of the [LTTng](https://lttng.org/) tracer) are very slow to read, the trace is first converted into a single file which can be read much faster and can be re-used to run many analyses.
|
||||||
|
This is done automatically, but if the trace changed after the file was generated, it can be re-generated using the `--force-conversion` option.
|
||||||
|
Run with `--help` to see all options.
|
||||||
|
|
||||||
|
### Analysis
|
||||||
|
|
||||||
|
The command above will process and output raw data models.
|
||||||
|
We need to actually analyze the data and display some results.
|
||||||
|
We recommend doing this in a Jupyter Notebook, but you can do this in a normal Python file.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ jupyter notebook
|
$ jupyter notebook
|
||||||
```
|
```
|
||||||
|
|
||||||
Then navigate to the [`analysis/`](./tracetools_analysis/analysis/) directory, and select one of the provided notebooks, or create your own!
|
Navigate to the [`analysis/`](./tracetools_analysis/analysis/) directory, and select one of the provided notebooks, or create your own!
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from tracetools_analysis import loading
|
from tracetools_analysis.loading import load_file
|
||||||
from tracetools_analysis import processor
|
from tracetools_analysis.processor import Processor
|
||||||
from tracetools_analysis import utils
|
from tracetools_analysis.processor.cpu_time import CpuTimeHandler
|
||||||
|
from tracetools_analysis.processor.ros2 import Ros2Handler
|
||||||
|
from tracetools_analysis.utils.cpu_time import CpuTimeDataModelUtil
|
||||||
|
from tracetools_analysis.utils.ros2 import Ros2DataModelUtil
|
||||||
|
|
||||||
# Load trace directory or converted trace file
|
# Load trace directory or converted trace file
|
||||||
events = loading.load_file('/path/to/trace/or/converted/file')
|
events = load_file('/path/to/trace/or/converted/file')
|
||||||
|
|
||||||
# Process
|
# Process
|
||||||
ros2_handler = processor.Ros2Handler()
|
ros2_handler = Ros2Handler()
|
||||||
cpu_handler = processor.CpuTimeHandler()
|
cpu_handler = CpuTimeHandler()
|
||||||
|
|
||||||
processor.Processor(ros2_handler, cpu_handler).process(events)
|
Processor(ros2_handler, cpu_handler).process(events)
|
||||||
|
|
||||||
# Use data model utils to extract information
|
# Use data model utils to extract information
|
||||||
ros2_util = utils.ros2.Ros2DataModelUtil(ros2_handler.data)
|
ros2_util = Ros2DataModelUtil(ros2_handler.data)
|
||||||
cpu_util = utils.cpu_time.CpuTimeDataModelUtil(cpu_handler.data)
|
cpu_util = CpuTimeDataModelUtil(cpu_handler.data)
|
||||||
|
|
||||||
callback_durations = ros2_util.get_callback_durations()
|
callback_symbols = ros2_util.get_callback_symbols()
|
||||||
|
callback_object, callback_symbol = list(callback_symbols.items())[0]
|
||||||
|
callback_durations = ros2_util.get_callback_durations(callback_object)
|
||||||
time_per_thread = cpu_util.get_time_per_thread()
|
time_per_thread = cpu_util.get_time_per_thread()
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
# Display, e.g. with bokeh or matplotlib
|
# Display, e.g., with bokeh, matplotlib, print, etc.
|
||||||
|
print(callback_symbol)
|
||||||
|
print(callback_durations)
|
||||||
|
|
||||||
|
print(time_per_thread)
|
||||||
# ...
|
# ...
|
||||||
```
|
```
|
||||||
|
|
||||||
Note: bokeh has to be installed manually, e.g. with `pip`:
|
Note: bokeh has to be installed manually, e.g., with `pip`:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ pip3 install bokeh
|
$ pip3 install bokeh
|
||||||
|
@ -68,7 +84,7 @@ $ pip3 install bokeh
|
||||||
|
|
||||||
## Design
|
## Design
|
||||||
|
|
||||||
See the [`ros2_tracing` design document](https://gitlab.com/micro-ROS/ros_tracing/ros2_tracing/blob/master/doc/design_ros_2.md), especially the [*Goals and requirements*](https://gitlab.com/micro-ROS/ros_tracing/ros2_tracing/blob/master/doc/design_ros_2.md#goals-and-requirements) and [*Analysis*](https://gitlab.com/micro-ROS/ros_tracing/ros2_tracing/blob/master/doc/design_ros_2.md#analysis) sections.
|
See the [`ros2_tracing` design document](https://github.com/ros2/ros2_tracing/blob/rolling/doc/design_ros_2.md), especially the [*Goals and requirements*](https://github.com/ros2/ros2_tracing/blob/rolling/doc/design_ros_2.md#goals-and-requirements) and [*Analysis*](https://github.com/ros2/ros2_tracing/blob/rolling/doc/design_ros_2.md#analysis) sections.
|
||||||
|
|
||||||
## Packages
|
## Packages
|
||||||
|
|
||||||
|
@ -80,4 +96,4 @@ Package containing a `ros2cli` extension to perform trace analysis.
|
||||||
|
|
||||||
Package containing tools for analyzing trace data.
|
Package containing tools for analyzing trace data.
|
||||||
|
|
||||||
See the [API documentation](https://micro-ros.gitlab.io/ros_tracing/tracetools_analysis-api/).
|
See the [API documentation](https://docs.ros.org/en/rolling/p/tracetools_analysis/).
|
||||||
|
|
2
codecov.yml
Normal file
2
codecov.yml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
fixes:
|
||||||
|
- "/builds/ros-tracing/tracetools_analysis/::"
|
119
get_branch.py
119
get_branch.py
|
@ -1,119 +0,0 @@
|
||||||
# Copyright 2020 Christophe Bedard
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
"""Get ros2_tracing branch name from the last commit or a default value."""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from typing import List
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
ENV_DEFAULT_BRANCH = 'ROS2TRACING_BRANCH'
|
|
||||||
ENV_COMMIT_DESCRIPTION = 'CI_COMMIT_DESCRIPTION'
|
|
||||||
ROS2_TRACING_BRANCH_TRAILER_TOKEN = 'Ros2-tracing-branch'
|
|
||||||
|
|
||||||
|
|
||||||
def add_args(parser: argparse.ArgumentParser) -> None:
|
|
||||||
parser.add_argument(
|
|
||||||
'-c', '--check',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='only process and print resulting branch in a verbose way',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Extract name of the (optional) ros2_tracing branch to be used for CI',
|
|
||||||
)
|
|
||||||
add_args(parser)
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def get_trailer_value(
|
|
||||||
trailer_name: str,
|
|
||||||
commit_description: str,
|
|
||||||
check: bool = False,
|
|
||||||
) -> Optional[str]:
|
|
||||||
# Get trailer line
|
|
||||||
trailer_lines = [
|
|
||||||
line for line in commit_description.split('\n') if trailer_name in line
|
|
||||||
]
|
|
||||||
if len(trailer_lines) == 0:
|
|
||||||
if check:
|
|
||||||
print(f'could not find any trailer lines for: \'{trailer_name}\'')
|
|
||||||
return None
|
|
||||||
elif len(trailer_lines) > 1:
|
|
||||||
if check:
|
|
||||||
print(
|
|
||||||
f'found more than one trailer lines for: \'{trailer_name}\' '
|
|
||||||
'(will use the first one)'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Extract value
|
|
||||||
line = trailer_lines[0]
|
|
||||||
if not (trailer_name + ':') in line:
|
|
||||||
if check:
|
|
||||||
print(f'could not find: \'{trailer_name}:\'')
|
|
||||||
return None
|
|
||||||
key_value = line.split(':')
|
|
||||||
if len(key_value) != 2:
|
|
||||||
if check:
|
|
||||||
print(f'misformed trailer line: \'{key_value}\'')
|
|
||||||
return None
|
|
||||||
value = key_value[1].strip()
|
|
||||||
if len(value) == 0:
|
|
||||||
if check:
|
|
||||||
print(f'misformed trailer value: \'{value}\'')
|
|
||||||
return None
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> int:
|
|
||||||
args = parse_args()
|
|
||||||
check = args.check
|
|
||||||
|
|
||||||
# Get default value
|
|
||||||
default_branch = os.environ.get(ENV_DEFAULT_BRANCH, None)
|
|
||||||
if default_branch is None:
|
|
||||||
if check:
|
|
||||||
print(f'could not get environment variable: \'{ENV_DEFAULT_BRANCH}\'')
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Get commit description
|
|
||||||
commit_description = os.environ.get(ENV_COMMIT_DESCRIPTION, None)
|
|
||||||
if commit_description is None:
|
|
||||||
if check:
|
|
||||||
print(f'could not get environment variable: \'{ENV_COMMIT_DESCRIPTION}\'')
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Get value
|
|
||||||
branch = get_trailer_value(
|
|
||||||
ROS2_TRACING_BRANCH_TRAILER_TOKEN,
|
|
||||||
commit_description,
|
|
||||||
check,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Print value
|
|
||||||
prefix = 'ros2_tracing branch: ' if check else ''
|
|
||||||
print(prefix + (branch or default_branch))
|
|
||||||
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.exit(main())
|
|
3
pytest.ini
Normal file
3
pytest.ini
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[pytest]
|
||||||
|
junit_family=xunit2
|
||||||
|
|
|
@ -2,6 +2,12 @@
|
||||||
Changelog for package ros2trace_analysis
|
Changelog for package ros2trace_analysis
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
3.0.0 (2022-01-21)
|
||||||
|
------------------
|
||||||
|
* Add 'process --convert-only' option
|
||||||
|
* Deprecate 'convert' verb since it is just an implementation detail
|
||||||
|
* Contributors: Christophe Bedard
|
||||||
|
|
||||||
0.2.2 (2019-11-19)
|
0.2.2 (2019-11-19)
|
||||||
------------------
|
------------------
|
||||||
* Add flag for hiding processing results with the process verb
|
* Add flag for hiding processing results with the process verb
|
||||||
|
|
|
@ -2,10 +2,13 @@
|
||||||
<?xml-model href="http://download.ros.org/schema/package_format2.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
<?xml-model href="http://download.ros.org/schema/package_format2.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
<package format="2">
|
<package format="2">
|
||||||
<name>ros2trace_analysis</name>
|
<name>ros2trace_analysis</name>
|
||||||
<version>1.0.1</version>
|
<version>3.1.0</version>
|
||||||
<description>The trace-analysis command for ROS 2 command line tools.</description>
|
<description>The trace-analysis command for ROS 2 command line tools.</description>
|
||||||
<maintainer email="bedard.christophe@gmail.com">Christophe Bedard</maintainer>
|
<maintainer email="bedard.christophe@gmail.com">Christophe Bedard</maintainer>
|
||||||
<license>Apache 2.0</license>
|
<license>Apache 2.0</license>
|
||||||
|
<url type="website">https://index.ros.org/p/ros2trace_analysis/</url>
|
||||||
|
<url type="repository">https://github.com/ros-tracing/tracetools_analysis</url>
|
||||||
|
<url type="bugtracker">https://github.com/ros-tracing/tracetools_analysis/issues</url>
|
||||||
<author email="christophe.bedard@apex.ai">Christophe Bedard</author>
|
<author email="christophe.bedard@apex.ai">Christophe Bedard</author>
|
||||||
|
|
||||||
<depend>ros2cli</depend>
|
<depend>ros2cli</depend>
|
||||||
|
|
|
@ -18,12 +18,14 @@ from tracetools_analysis.convert import convert
|
||||||
|
|
||||||
|
|
||||||
class ConvertVerb(VerbExtension):
|
class ConvertVerb(VerbExtension):
|
||||||
"""Convert trace data to a file."""
|
"""Convert trace data to a file. DEPRECATED: use the 'process' verb directly."""
|
||||||
|
|
||||||
def add_arguments(self, parser, cli_name):
|
def add_arguments(self, parser, cli_name):
|
||||||
add_args(parser)
|
add_args(parser)
|
||||||
|
|
||||||
def main(self, *, args):
|
def main(self, *, args):
|
||||||
|
import warnings
|
||||||
|
warnings.warn("'convert' is deprecated, use 'process' directly instead", stacklevel=2)
|
||||||
return convert(
|
return convert(
|
||||||
args.trace_directory,
|
args.trace_directory,
|
||||||
args.output_file_name,
|
args.output_file_name,
|
||||||
|
|
|
@ -18,7 +18,7 @@ from tracetools_analysis.process import process
|
||||||
|
|
||||||
|
|
||||||
class ProcessVerb(VerbExtension):
|
class ProcessVerb(VerbExtension):
|
||||||
"""Process a file converted from a trace directory and output model data."""
|
"""Process ROS 2 trace data and output model data."""
|
||||||
|
|
||||||
def add_arguments(self, parser, cli_name):
|
def add_arguments(self, parser, cli_name):
|
||||||
add_args(parser)
|
add_args(parser)
|
||||||
|
@ -28,4 +28,5 @@ class ProcessVerb(VerbExtension):
|
||||||
args.input_path,
|
args.input_path,
|
||||||
args.force_conversion,
|
args.force_conversion,
|
||||||
args.hide_results,
|
args.hide_results,
|
||||||
|
args.convert_only,
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,7 +5,7 @@ package_name = 'ros2trace_analysis'
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=package_name,
|
name=package_name,
|
||||||
version='1.0.1',
|
version='3.1.0',
|
||||||
packages=find_packages(exclude=['test']),
|
packages=find_packages(exclude=['test']),
|
||||||
data_files=[
|
data_files=[
|
||||||
('share/' + package_name, ['package.xml']),
|
('share/' + package_name, ['package.xml']),
|
||||||
|
@ -22,7 +22,7 @@ setup(
|
||||||
),
|
),
|
||||||
author='Christophe Bedard',
|
author='Christophe Bedard',
|
||||||
author_email='christophe.bedard@apex.ai',
|
author_email='christophe.bedard@apex.ai',
|
||||||
url='https://gitlab.com/micro-ROS/ros_tracing/tracetools_analysis',
|
url='https://github.com/ros-tracing/tracetools_analysis',
|
||||||
keywords=[],
|
keywords=[],
|
||||||
description='The trace-analysis command for ROS 2 command line tools.',
|
description='The trace-analysis command for ROS 2 command line tools.',
|
||||||
long_description=(
|
long_description=(
|
||||||
|
|
|
@ -12,12 +12,14 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ament_flake8.main import main
|
from ament_flake8.main import main_with_errors
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.flake8
|
@pytest.mark.flake8
|
||||||
@pytest.mark.linter
|
@pytest.mark.linter
|
||||||
def test_flake8():
|
def test_flake8():
|
||||||
rc = main(argv=[])
|
rc, errors = main_with_errors(argv=[])
|
||||||
assert rc == 0, 'Found errors'
|
assert rc == 0, \
|
||||||
|
'Found %d code style errors / warnings:\n' % len(errors) + \
|
||||||
|
'\n'.join(errors)
|
||||||
|
|
4
test_ros2trace_analysis/.coveragerc
Normal file
4
test_ros2trace_analysis/.coveragerc
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[run]
|
||||||
|
omit =
|
||||||
|
setup.py
|
||||||
|
test/*
|
3
test_ros2trace_analysis/.gitignore
vendored
Normal file
3
test_ros2trace_analysis/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
*~
|
||||||
|
*.pyc
|
||||||
|
|
9
test_ros2trace_analysis/CHANGELOG.rst
Normal file
9
test_ros2trace_analysis/CHANGELOG.rst
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Changelog for package test_ros2trace_analysis
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
3.1.0 (2024-06-15)
|
||||||
|
------------------
|
||||||
|
* Fix test_ros2trace_analysis package version (`#26 <https://github.com/ros-tracing/tracetools_analysis/issues/26>`_)
|
||||||
|
* Use tracepoint names from tracetools_trace and add tests (`#25 <https://github.com/ros-tracing/tracetools_analysis/issues/25>`_)
|
||||||
|
* Contributors: Christophe Bedard
|
32
test_ros2trace_analysis/package.xml
Normal file
32
test_ros2trace_analysis/package.xml
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
|
<package format="3">
|
||||||
|
<name>test_ros2trace_analysis</name>
|
||||||
|
<version>3.1.0</version>
|
||||||
|
<description>Tests for the ros2trace_analysis package.</description>
|
||||||
|
<maintainer email="bedard.christophe@gmail.com">Christophe Bedard</maintainer>
|
||||||
|
<license>Apache 2.0</license>
|
||||||
|
<url type="website">https://index.ros.org/p/test_ros2trace_analysis/</url>
|
||||||
|
<url type="repository">https://github.com/ros-tracing/tracetools_analysis</url>
|
||||||
|
<url type="bugtracker">https://github.com/ros-tracing/tracetools_analysis/issues</url>
|
||||||
|
<author email="bedard.christophe@gmail.com">Christophe Bedard</author>
|
||||||
|
|
||||||
|
<test_depend>ament_copyright</test_depend>
|
||||||
|
<test_depend>ament_flake8</test_depend>
|
||||||
|
<test_depend>ament_mypy</test_depend>
|
||||||
|
<test_depend>ament_pep257</test_depend>
|
||||||
|
<test_depend>ament_xmllint</test_depend>
|
||||||
|
<test_depend>launch</test_depend>
|
||||||
|
<test_depend>launch_ros</test_depend>
|
||||||
|
<test_depend>python3-pytest</test_depend>
|
||||||
|
<test_depend>ros2run</test_depend>
|
||||||
|
<test_depend>ros2trace</test_depend>
|
||||||
|
<test_depend>ros2trace_analysis</test_depend>
|
||||||
|
<test_depend>test_tracetools</test_depend>
|
||||||
|
<test_depend>tracetools</test_depend>
|
||||||
|
<test_depend>tracetools_trace</test_depend>
|
||||||
|
|
||||||
|
<export>
|
||||||
|
<build_type>ament_python</build_type>
|
||||||
|
</export>
|
||||||
|
</package>
|
0
test_ros2trace_analysis/resource/test_ros2trace_analysis
Normal file
0
test_ros2trace_analysis/resource/test_ros2trace_analysis
Normal file
26
test_ros2trace_analysis/setup.py
Normal file
26
test_ros2trace_analysis/setup.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
from setuptools import find_packages
|
||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
package_name = 'test_ros2trace_analysis'
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name=package_name,
|
||||||
|
version='3.1.0',
|
||||||
|
packages=find_packages(exclude=['test']),
|
||||||
|
data_files=[
|
||||||
|
('share/' + package_name, ['package.xml']),
|
||||||
|
('share/ament_index/resource_index/packages',
|
||||||
|
['resource/' + package_name]),
|
||||||
|
],
|
||||||
|
install_requires=['setuptools'],
|
||||||
|
zip_safe=True,
|
||||||
|
maintainer='Christophe Bedard',
|
||||||
|
maintainer_email='bedard.christophe@gmail.com',
|
||||||
|
author='Christophe Bedard',
|
||||||
|
author_email='bedard.christophe@gmail.com',
|
||||||
|
url='https://github.com/ros-tracing/tracetools_analysis',
|
||||||
|
keywords=[],
|
||||||
|
description='Tests for the ros2trace_analysis package.',
|
||||||
|
license='Apache 2.0',
|
||||||
|
tests_require=['pytest'],
|
||||||
|
)
|
23
test_ros2trace_analysis/test/test_copyright.py
Normal file
23
test_ros2trace_analysis/test/test_copyright.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# Copyright 2017 Open Source Robotics Foundation, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ament_copyright.main import main
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.copyright
|
||||||
|
@pytest.mark.linter
|
||||||
|
def test_copyright():
|
||||||
|
rc = main(argv=['.', 'test'])
|
||||||
|
assert rc == 0, 'Found errors'
|
25
test_ros2trace_analysis/test/test_flake8.py
Normal file
25
test_ros2trace_analysis/test/test_flake8.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Copyright 2017 Open Source Robotics Foundation, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ament_flake8.main import main_with_errors
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.flake8
|
||||||
|
@pytest.mark.linter
|
||||||
|
def test_flake8():
|
||||||
|
rc, errors = main_with_errors(argv=[])
|
||||||
|
assert rc == 0, \
|
||||||
|
'Found %d code style errors / warnings:\n' % len(errors) + \
|
||||||
|
'\n'.join(errors)
|
22
test_ros2trace_analysis/test/test_mypy.py
Normal file
22
test_ros2trace_analysis/test/test_mypy.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Copyright 2019 Canonical Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ament_mypy.main import main
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.mypy
|
||||||
|
@pytest.mark.linter
|
||||||
|
def test_mypy():
|
||||||
|
assert main(argv=[]) == 0, 'Found errors'
|
23
test_ros2trace_analysis/test/test_pep257.py
Normal file
23
test_ros2trace_analysis/test/test_pep257.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# Copyright 2017 Open Source Robotics Foundation, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ament_pep257.main import main
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.linter
|
||||||
|
@pytest.mark.pep257
|
||||||
|
def test_pep257():
|
||||||
|
rc = main(argv=[])
|
||||||
|
assert rc == 0, 'Found code style errors / warnings'
|
|
@ -0,0 +1,175 @@
|
||||||
|
# Copyright 2024 Apex.AI, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from typing import Dict
|
||||||
|
from typing import List
|
||||||
|
from typing import Optional
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from launch import LaunchDescription
|
||||||
|
from launch import LaunchService
|
||||||
|
from launch_ros.actions import Node
|
||||||
|
from tracetools_trace.tools.lttng import is_lttng_installed
|
||||||
|
|
||||||
|
|
||||||
|
def are_tracepoints_included() -> bool:
|
||||||
|
"""
|
||||||
|
Check if tracing instrumentation is enabled and if tracepoints are included.
|
||||||
|
|
||||||
|
:return: True if tracepoints are included, False otherwise
|
||||||
|
"""
|
||||||
|
if not is_lttng_installed():
|
||||||
|
return False
|
||||||
|
process = subprocess.run(
|
||||||
|
['ros2', 'run', 'tracetools', 'status'],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
encoding='utf-8',
|
||||||
|
)
|
||||||
|
return 0 == process.returncode
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipIf(not is_lttng_installed(minimum_version='2.9.0'), 'LTTng is required')
|
||||||
|
class TestROS2TraceAnalysisCLI(unittest.TestCase):
|
||||||
|
|
||||||
|
def __init__(self, *args) -> None:
|
||||||
|
super().__init__(
|
||||||
|
*args,
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_test_tmpdir(self, test_name: str) -> str:
|
||||||
|
prefix = self.__class__.__name__ + '__' + test_name
|
||||||
|
return tempfile.mkdtemp(prefix=prefix)
|
||||||
|
|
||||||
|
def run_command(
|
||||||
|
self,
|
||||||
|
args: List[str],
|
||||||
|
*,
|
||||||
|
env: Optional[Dict[str, str]] = None,
|
||||||
|
) -> subprocess.Popen:
|
||||||
|
print('=>running:', args)
|
||||||
|
process_env = os.environ.copy()
|
||||||
|
process_env['PYTHONUNBUFFERED'] = '1'
|
||||||
|
if env:
|
||||||
|
process_env.update(env)
|
||||||
|
return subprocess.Popen(
|
||||||
|
args,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
encoding='utf-8',
|
||||||
|
env=process_env,
|
||||||
|
)
|
||||||
|
|
||||||
|
def wait_and_print_command_output(
|
||||||
|
self,
|
||||||
|
process: subprocess.Popen,
|
||||||
|
) -> int:
|
||||||
|
stdout, stderr = process.communicate()
|
||||||
|
stdout = stdout.strip(' \r\n\t')
|
||||||
|
stderr = stderr.strip(' \r\n\t')
|
||||||
|
print('=>stdout:\n' + stdout)
|
||||||
|
print('=>stderr:\n' + stderr)
|
||||||
|
return process.wait()
|
||||||
|
|
||||||
|
def run_command_and_wait(
|
||||||
|
self,
|
||||||
|
args: List[str],
|
||||||
|
*,
|
||||||
|
env: Optional[Dict[str, str]] = None,
|
||||||
|
) -> int:
|
||||||
|
process = self.run_command(args, env=env)
|
||||||
|
return self.wait_and_print_command_output(process)
|
||||||
|
|
||||||
|
def run_nodes(self) -> None:
|
||||||
|
nodes = [
|
||||||
|
Node(
|
||||||
|
package='test_tracetools',
|
||||||
|
executable='test_ping',
|
||||||
|
output='screen',
|
||||||
|
),
|
||||||
|
Node(
|
||||||
|
package='test_tracetools',
|
||||||
|
executable='test_pong',
|
||||||
|
output='screen',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
ld = LaunchDescription(nodes)
|
||||||
|
ls = LaunchService()
|
||||||
|
ls.include_launch_description(ld)
|
||||||
|
exit_code = ls.run()
|
||||||
|
self.assertEqual(0, exit_code)
|
||||||
|
|
||||||
|
def test_process_bad_input_path(self) -> None:
|
||||||
|
tmpdir = self.create_test_tmpdir('test_process_bad_input_path')
|
||||||
|
|
||||||
|
# No input path
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace-analysis', 'process'])
|
||||||
|
self.assertEqual(2, ret)
|
||||||
|
|
||||||
|
# Does not exist
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace-analysis', 'process', ''])
|
||||||
|
self.assertEqual(1, ret)
|
||||||
|
fake_input = os.path.join(tmpdir, 'doesnt_exist')
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace-analysis', 'process', fake_input])
|
||||||
|
self.assertEqual(1, ret)
|
||||||
|
|
||||||
|
# Exists but empty
|
||||||
|
empty_input = os.path.join(tmpdir, 'empty')
|
||||||
|
os.mkdir(empty_input)
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace-analysis', 'process', empty_input])
|
||||||
|
self.assertEqual(1, ret)
|
||||||
|
|
||||||
|
# Exists but converted file empty
|
||||||
|
empty_converted_file = os.path.join(empty_input, 'converted')
|
||||||
|
Path(empty_converted_file).touch()
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace-analysis', 'process', empty_input])
|
||||||
|
self.assertEqual(1, ret)
|
||||||
|
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
@unittest.skipIf(not are_tracepoints_included(), 'tracepoints are required')
|
||||||
|
def test_process(self) -> None:
|
||||||
|
tmpdir = self.create_test_tmpdir('test_process')
|
||||||
|
session_name = 'test_process'
|
||||||
|
|
||||||
|
# Run and trace nodes
|
||||||
|
ret = self.run_command_and_wait(
|
||||||
|
[
|
||||||
|
'ros2', 'trace',
|
||||||
|
'start', session_name,
|
||||||
|
'--path', tmpdir,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
self.assertEqual(0, ret)
|
||||||
|
trace_dir = os.path.join(tmpdir, session_name)
|
||||||
|
self.run_nodes()
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace', 'stop', session_name])
|
||||||
|
self.assertEqual(0, ret)
|
||||||
|
|
||||||
|
# Process trace
|
||||||
|
ret = self.run_command_and_wait(['ros2', 'trace-analysis', 'process', trace_dir])
|
||||||
|
self.assertEqual(0, ret)
|
||||||
|
|
||||||
|
# Check that converted file exists and isn't empty
|
||||||
|
converted_file = os.path.join(trace_dir, 'converted')
|
||||||
|
self.assertTrue(os.path.isfile(converted_file))
|
||||||
|
self.assertGreater(os.path.getsize(converted_file), 0)
|
||||||
|
|
||||||
|
shutil.rmtree(tmpdir)
|
23
test_ros2trace_analysis/test/test_xmllint.py
Normal file
23
test_ros2trace_analysis/test/test_xmllint.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# Copyright 2019 Open Source Robotics Foundation, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ament_xmllint.main import main
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.linter
|
||||||
|
@pytest.mark.xmllint
|
||||||
|
def test_xmllint():
|
||||||
|
rc = main(argv=[])
|
||||||
|
assert rc == 0, 'Found errors'
|
|
@ -2,6 +2,40 @@
|
||||||
Changelog for package tracetools_analysis
|
Changelog for package tracetools_analysis
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
3.1.0 (2024-06-15)
|
||||||
|
------------------
|
||||||
|
* Use tracepoint names from tracetools_trace and add tests (`#25 <https://github.com/ros-tracing/tracetools_analysis/issues/25>`_)
|
||||||
|
* Use underscores in setup.cfg (`#21 <https://github.com/ros-tracing/tracetools_analysis/issues/21>`_)
|
||||||
|
* Skip TestDataModelUtil.test_convert_time_columns if pandas < 2.2.0 (`#20 <https://github.com/ros-tracing/tracetools_analysis/issues/20>`_)
|
||||||
|
* Fix warnings when using mypy>=1.8.0 (`#16 <https://github.com/ros-tracing/tracetools_analysis/issues/16>`_)
|
||||||
|
* Support traces with multiple callbacks for same pointer (`#13 <https://github.com/ros-tracing/tracetools_analysis/issues/13>`_) (`#15 <https://github.com/ros-tracing/tracetools_analysis/issues/15>`_)
|
||||||
|
* Update path to ros2_tracing in notebooks (`#8 <https://github.com/ros-tracing/tracetools_analysis/issues/8>`_)
|
||||||
|
* Refactored for compatibility with Bokeh 3.2.0 (`#7 <https://github.com/ros-tracing/tracetools_analysis/issues/7>`_)
|
||||||
|
* Fix mypy errors (`#4 <https://github.com/ros-tracing/tracetools_analysis/issues/4>`_)
|
||||||
|
* Contributors: Christophe Bedard, Oren Bell
|
||||||
|
|
||||||
|
3.0.0 (2022-01-21)
|
||||||
|
------------------
|
||||||
|
* Update context_fields option name in profile example launch file
|
||||||
|
* Fix both rcl and rmw subscriptions being added to the rcl dataframe
|
||||||
|
* Support rmw pub/sub init and take instrumentation
|
||||||
|
* Support publishing instrumentation
|
||||||
|
* Change 'input_path' arg help message wording
|
||||||
|
* Add 'process --convert-only' option
|
||||||
|
* Deprecate 'convert' verb since it is just an implementation detail
|
||||||
|
* Simplify jupyter notebooks and add way to use Debian packages
|
||||||
|
* Contributors: Christophe Bedard
|
||||||
|
|
||||||
|
2.0.0 (2021-03-31)
|
||||||
|
------------------
|
||||||
|
* Set callback_instances' timestamp & duration cols to datetime/timedelta
|
||||||
|
* Improve performance by using lists of dicts as intermediate storage & converting to dataframes at the end
|
||||||
|
* Update callback_duration notebook and pingpong sample data
|
||||||
|
* Support instrumentation for linking a timer to a node
|
||||||
|
* Disable kernel tracing for pingpong example launchfile
|
||||||
|
* Support lifecycle node state transition instrumentation
|
||||||
|
* Contributors: Christophe Bedard
|
||||||
|
|
||||||
1.0.0 (2020-06-02)
|
1.0.0 (2020-06-02)
|
||||||
------------------
|
------------------
|
||||||
* Add sphinx documentation for tracetools_analysis
|
* Add sphinx documentation for tracetools_analysis
|
||||||
|
|
File diff suppressed because one or more lines are too long
167
tracetools_analysis/analysis/lifecycle_states.ipynb
Normal file
167
tracetools_analysis/analysis/lifecycle_states.ipynb
Normal file
|
@ -0,0 +1,167 @@
|
||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Lifecycle node states\n",
|
||||||
|
"#\n",
|
||||||
|
"# Get trace data using the provided launch file:\n",
|
||||||
|
"# $ ros2 launch tracetools_analysis lifecycle_states.launch.py"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"path = '~/.ros/tracing/lifecycle-node-state/'"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import sys\n",
|
||||||
|
"# Add paths to tracetools_analysis and tracetools_read.\n",
|
||||||
|
"# There are two options:\n",
|
||||||
|
"# 1. from source, assuming a workspace with:\n",
|
||||||
|
"# src/tracetools_analysis/\n",
|
||||||
|
"# src/ros2/ros2_tracing/tracetools_read/\n",
|
||||||
|
"sys.path.insert(0, '../')\n",
|
||||||
|
"sys.path.insert(0, '../../../ros2/ros2_tracing/tracetools_read/')\n",
|
||||||
|
"# 2. from Debian packages, setting the right ROS 2 distro:\n",
|
||||||
|
"#ROS_DISTRO = 'rolling'\n",
|
||||||
|
"#sys.path.insert(0, f'/opt/ros/{ROS_DISTRO}/lib/python3.8/site-packages')\n",
|
||||||
|
"import datetime as dt\n",
|
||||||
|
"\n",
|
||||||
|
"from bokeh.palettes import Category10\n",
|
||||||
|
"from bokeh.plotting import figure\n",
|
||||||
|
"from bokeh.plotting import output_notebook\n",
|
||||||
|
"from bokeh.io import show\n",
|
||||||
|
"from bokeh.layouts import row\n",
|
||||||
|
"from bokeh.models import ColumnDataSource\n",
|
||||||
|
"from bokeh.models import DatetimeTickFormatter\n",
|
||||||
|
"from bokeh.models import PrintfTickFormatter\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import pandas as pd\n",
|
||||||
|
"\n",
|
||||||
|
"from tracetools_analysis.loading import load_file\n",
|
||||||
|
"from tracetools_analysis.processor.ros2 import Ros2Handler\n",
|
||||||
|
"from tracetools_analysis.utils.ros2 import Ros2DataModelUtil"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Process\n",
|
||||||
|
"events = load_file(path)\n",
|
||||||
|
"handler = Ros2Handler.process(events)\n",
|
||||||
|
"#handler.data.print_data()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"data_util = Ros2DataModelUtil(handler.data)\n",
|
||||||
|
"\n",
|
||||||
|
"state_intervals = data_util.get_lifecycle_node_state_intervals()\n",
|
||||||
|
"for handle, states in state_intervals.items():\n",
|
||||||
|
" print(handle)\n",
|
||||||
|
" print(states.to_string())\n",
|
||||||
|
"\n",
|
||||||
|
"output_notebook()\n",
|
||||||
|
"psize = 450"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Plot\n",
|
||||||
|
"colors = Category10[10]\n",
|
||||||
|
"\n",
|
||||||
|
"lifecycle_node_names = {\n",
|
||||||
|
" handle: data_util.get_lifecycle_node_handle_info(handle)['lifecycle node'] for handle in state_intervals.keys()\n",
|
||||||
|
"}\n",
|
||||||
|
"states_labels = []\n",
|
||||||
|
"start_times = []\n",
|
||||||
|
"\n",
|
||||||
|
"fig = figure(\n",
|
||||||
|
" y_range=list(lifecycle_node_names.values()),\n",
|
||||||
|
" title='Lifecycle states over time',\n",
|
||||||
|
" y_axis_label='node',\n",
|
||||||
|
" plot_width=psize*2, plot_height=psize,\n",
|
||||||
|
")\n",
|
||||||
|
"\n",
|
||||||
|
"for lifecycle_node_handle, states in state_intervals.items():\n",
|
||||||
|
" lifecycle_node_name = lifecycle_node_names[lifecycle_node_handle]\n",
|
||||||
|
"\n",
|
||||||
|
" start_times.append(states['start_timestamp'].iloc[0])\n",
|
||||||
|
" for index, row in states.iterrows():\n",
|
||||||
|
" # TODO fix end\n",
|
||||||
|
" if index == max(states.index):\n",
|
||||||
|
" continue\n",
|
||||||
|
" start = row['start_timestamp']\n",
|
||||||
|
" end = row['end_timestamp']\n",
|
||||||
|
" state = row['state']\n",
|
||||||
|
" if state not in states_labels:\n",
|
||||||
|
" states_labels.append(state)\n",
|
||||||
|
" state_index = states_labels.index(state)\n",
|
||||||
|
" fig.line(\n",
|
||||||
|
" x=[start, end],\n",
|
||||||
|
" y=[lifecycle_node_name]*2,\n",
|
||||||
|
" line_width=10.0,\n",
|
||||||
|
" line_color=colors[state_index],\n",
|
||||||
|
" legend_label=state,\n",
|
||||||
|
" )\n",
|
||||||
|
"\n",
|
||||||
|
"fig.title.align = 'center'\n",
|
||||||
|
"fig.xaxis[0].formatter = DatetimeTickFormatter(seconds=['%Ss'])\n",
|
||||||
|
"fig.xaxis[0].axis_label = 'time (' + min(start_times).strftime('%Y-%m-%d %H:%M') + ')'\n",
|
||||||
|
"show(fig)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3 (ipykernel)",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.10.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
|
@ -10,10 +10,7 @@
|
||||||
"#\n",
|
"#\n",
|
||||||
"# Get trace data using the provided launch file:\n",
|
"# Get trace data using the provided launch file:\n",
|
||||||
"# $ ros2 launch tracetools_analysis memory_usage.launch.py\n",
|
"# $ ros2 launch tracetools_analysis memory_usage.launch.py\n",
|
||||||
"# (wait a few seconds, then kill with Ctrl+C)\n",
|
"# (wait at least a few seconds, then kill with Ctrl+C)"
|
||||||
"#\n",
|
|
||||||
"# (optional) convert trace data:\n",
|
|
||||||
"# $ ros2 trace-analysis convert ~/.ros/tracing/memory-usage"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -32,11 +29,16 @@
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"import sys\n",
|
"import sys\n",
|
||||||
"# Assuming a workspace with:\n",
|
"# Add paths to tracetools_analysis and tracetools_read.\n",
|
||||||
"# src/tracetools_analysis/\n",
|
"# There are two options:\n",
|
||||||
"# src/micro-ROS/ros_tracing/ros2_tracing/tracetools_read/\n",
|
"# 1. from source, assuming a workspace with:\n",
|
||||||
|
"# src/tracetools_analysis/\n",
|
||||||
|
"# src/ros2/ros2_tracing/tracetools_read/\n",
|
||||||
"sys.path.insert(0, '../')\n",
|
"sys.path.insert(0, '../')\n",
|
||||||
"sys.path.insert(0, '../../../micro-ROS/ros_tracing/ros2_tracing/tracetools_read/')\n",
|
"sys.path.insert(0, '../../../ros2/ros2_tracing/tracetools_read/')\n",
|
||||||
|
"# 2. from Debian packages, setting the right ROS 2 distro:\n",
|
||||||
|
"#ROS_DISTRO = 'rolling'\n",
|
||||||
|
"#sys.path.insert(0, f'/opt/ros/{ROS_DISTRO}/lib/python3.8/site-packages')\n",
|
||||||
"import datetime as dt\n",
|
"import datetime as dt\n",
|
||||||
"\n",
|
"\n",
|
||||||
"from bokeh.palettes import viridis\n",
|
"from bokeh.palettes import viridis\n",
|
||||||
|
@ -151,7 +153,7 @@
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"kernelspec": {
|
"kernelspec": {
|
||||||
"display_name": "Python 3",
|
"display_name": "Python 3 (ipykernel)",
|
||||||
"language": "python",
|
"language": "python",
|
||||||
"name": "python3"
|
"name": "python3"
|
||||||
},
|
},
|
||||||
|
@ -165,7 +167,7 @@
|
||||||
"name": "python",
|
"name": "python",
|
||||||
"nbconvert_exporter": "python",
|
"nbconvert_exporter": "python",
|
||||||
"pygments_lexer": "ipython3",
|
"pygments_lexer": "ipython3",
|
||||||
"version": "3.6.9"
|
"version": "3.10.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nbformat": 4,
|
"nbformat": 4,
|
||||||
|
|
Binary file not shown.
413
tracetools_analysis/analysis/testing.ipynb
Normal file
413
tracetools_analysis/analysis/testing.ipynb
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,4 @@
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
|
|
||||||
Tools for analyzing trace data from ROS 2 systems generated by the `ros2_tracing packages <https://index.ros.org/r/ros2_tracing/gitlab-micro-ROS-ros_tracing-ros2_tracing/>`_.
|
Tools for analyzing trace data from ROS 2 systems generated by the `ros2_tracing packages <https://index.ros.org/r/ros2_tracing/>`_.
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
tracetools_analysis
|
tracetools_analysis
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
.. automodule:: tracetools_analysis
|
||||||
|
|
||||||
loading
|
loading
|
||||||
#######
|
#######
|
||||||
|
|
||||||
|
|
|
@ -39,9 +39,9 @@ copyright = '2019-2020, Robert Bosch GmbH & Christophe Bedard' # noqa
|
||||||
author = 'Robert Bosch GmbH, Christophe Bedard'
|
author = 'Robert Bosch GmbH, Christophe Bedard'
|
||||||
|
|
||||||
# The short X.Y version
|
# The short X.Y version
|
||||||
version = os.environ.get('SPHINX_VERSION_SHORT', '')
|
version = ''
|
||||||
# The full version, including alpha/beta/rc tags
|
# The full version, including alpha/beta/rc tags
|
||||||
release = os.environ.get('SPHINX_VERSION_FULL', '')
|
release = '1.0.1'
|
||||||
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
|
@ -100,7 +100,9 @@ html_theme = 'alabaster'
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#
|
#
|
||||||
# html_theme_options = {}
|
html_theme_options = {
|
||||||
|
'sidebar_width': '260px',
|
||||||
|
}
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
tracetools_analysis
|
tracetools_analysis
|
||||||
===================
|
===================
|
||||||
|
|
||||||
tracetools_analysis provides tools for analyzing trace data from ROS 2 systems generated by the `ros2_tracing packages <https://index.ros.org/r/ros2_tracing/gitlab-micro-ROS-ros_tracing-ros2_tracing/>`_.
|
tracetools_analysis provides tools for analyzing trace data from ROS 2 systems generated by the `ros2_tracing packages <https://index.ros.org/r/ros2_tracing/>`_.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 4
|
:maxdepth: 4
|
||||||
|
|
38
tracetools_analysis/launch/lifecycle_states.launch.py
Normal file
38
tracetools_analysis/launch/lifecycle_states.launch.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
# Copyright 2020 Christophe Bedard
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Example launch file for a lifecycle node state analysis."""
|
||||||
|
|
||||||
|
import launch
|
||||||
|
from launch_ros.actions import Node
|
||||||
|
from tracetools_launch.action import Trace
|
||||||
|
|
||||||
|
|
||||||
|
def generate_launch_description():
|
||||||
|
return launch.LaunchDescription([
|
||||||
|
Trace(
|
||||||
|
session_name='lifecycle-node-state',
|
||||||
|
events_kernel=[],
|
||||||
|
),
|
||||||
|
Node(
|
||||||
|
package='test_tracetools',
|
||||||
|
executable='test_lifecycle_node',
|
||||||
|
output='screen',
|
||||||
|
),
|
||||||
|
Node(
|
||||||
|
package='test_tracetools',
|
||||||
|
executable='test_lifecycle_client',
|
||||||
|
output='screen',
|
||||||
|
),
|
||||||
|
])
|
|
@ -14,14 +14,14 @@
|
||||||
|
|
||||||
"""Example launch file for a memory_usage analysis."""
|
"""Example launch file for a memory_usage analysis."""
|
||||||
|
|
||||||
from launch import LaunchDescription
|
import launch
|
||||||
from launch_ros.actions import Node
|
from launch_ros.actions import Node
|
||||||
from tracetools_launch.action import Trace
|
from tracetools_launch.action import Trace
|
||||||
from tracetools_trace.tools.names import DEFAULT_EVENTS_ROS
|
from tracetools_trace.tools.names import DEFAULT_EVENTS_ROS
|
||||||
|
|
||||||
|
|
||||||
def generate_launch_description():
|
def generate_launch_description():
|
||||||
return LaunchDescription([
|
return launch.LaunchDescription([
|
||||||
Trace(
|
Trace(
|
||||||
session_name='memory-usage',
|
session_name='memory-usage',
|
||||||
events_ust=[
|
events_ust=[
|
||||||
|
@ -38,13 +38,13 @@ def generate_launch_description():
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
Node(
|
Node(
|
||||||
package='tracetools_test',
|
package='test_tracetools',
|
||||||
executable='test_ping',
|
executable='test_ping',
|
||||||
arguments=['do_more'],
|
arguments=['do_more'],
|
||||||
output='screen',
|
output='screen',
|
||||||
),
|
),
|
||||||
Node(
|
Node(
|
||||||
package='tracetools_test',
|
package='test_tracetools',
|
||||||
executable='test_pong',
|
executable='test_pong',
|
||||||
arguments=['do_more'],
|
arguments=['do_more'],
|
||||||
output='screen',
|
output='screen',
|
||||||
|
|
|
@ -14,24 +14,25 @@
|
||||||
|
|
||||||
"""Example launch file for a callback duration analysis."""
|
"""Example launch file for a callback duration analysis."""
|
||||||
|
|
||||||
from launch import LaunchDescription
|
import launch
|
||||||
from launch_ros.actions import Node
|
from launch_ros.actions import Node
|
||||||
from tracetools_launch.action import Trace
|
from tracetools_launch.action import Trace
|
||||||
|
|
||||||
|
|
||||||
def generate_launch_description():
|
def generate_launch_description():
|
||||||
return LaunchDescription([
|
return launch.LaunchDescription([
|
||||||
Trace(
|
Trace(
|
||||||
session_name='pingpong',
|
session_name='pingpong',
|
||||||
|
events_kernel=[],
|
||||||
),
|
),
|
||||||
Node(
|
Node(
|
||||||
package='tracetools_test',
|
package='test_tracetools',
|
||||||
executable='test_ping',
|
executable='test_ping',
|
||||||
arguments=['do_more'],
|
arguments=['do_more'],
|
||||||
output='screen',
|
output='screen',
|
||||||
),
|
),
|
||||||
Node(
|
Node(
|
||||||
package='tracetools_test',
|
package='test_tracetools',
|
||||||
executable='test_pong',
|
executable='test_pong',
|
||||||
arguments=['do_more'],
|
arguments=['do_more'],
|
||||||
output='screen',
|
output='screen',
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
"""Example launch file for a profiling analysis."""
|
"""Example launch file for a profiling analysis."""
|
||||||
|
|
||||||
from launch import LaunchDescription
|
import launch
|
||||||
from launch_ros.actions import Node
|
from launch_ros.actions import Node
|
||||||
from tracetools_launch.action import Trace
|
from tracetools_launch.action import Trace
|
||||||
from tracetools_trace.tools.names import DEFAULT_CONTEXT
|
from tracetools_trace.tools.names import DEFAULT_CONTEXT
|
||||||
|
@ -22,7 +22,7 @@ from tracetools_trace.tools.names import DEFAULT_EVENTS_ROS
|
||||||
|
|
||||||
|
|
||||||
def generate_launch_description():
|
def generate_launch_description():
|
||||||
return LaunchDescription([
|
return launch.LaunchDescription([
|
||||||
Trace(
|
Trace(
|
||||||
session_name='profile',
|
session_name='profile',
|
||||||
events_ust=[
|
events_ust=[
|
||||||
|
@ -36,18 +36,19 @@ def generate_launch_description():
|
||||||
events_kernel=[
|
events_kernel=[
|
||||||
'sched_switch',
|
'sched_switch',
|
||||||
],
|
],
|
||||||
context_names=[
|
context_fields={
|
||||||
'ip',
|
'kernel': DEFAULT_CONTEXT,
|
||||||
] + DEFAULT_CONTEXT,
|
'userspace': DEFAULT_CONTEXT + ['ip'],
|
||||||
|
},
|
||||||
),
|
),
|
||||||
Node(
|
Node(
|
||||||
package='tracetools_test',
|
package='test_tracetools',
|
||||||
executable='test_ping',
|
executable='test_ping',
|
||||||
arguments=['do_more'],
|
arguments=['do_more'],
|
||||||
output='screen',
|
output='screen',
|
||||||
),
|
),
|
||||||
Node(
|
Node(
|
||||||
package='tracetools_test',
|
package='test_tracetools',
|
||||||
executable='test_pong',
|
executable='test_pong',
|
||||||
arguments=['do_more'],
|
arguments=['do_more'],
|
||||||
output='screen',
|
output='screen',
|
||||||
|
|
|
@ -2,15 +2,19 @@
|
||||||
<?xml-model href="http://download.ros.org/schema/package_format2.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
<?xml-model href="http://download.ros.org/schema/package_format2.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
<package format="2">
|
<package format="2">
|
||||||
<name>tracetools_analysis</name>
|
<name>tracetools_analysis</name>
|
||||||
<version>1.0.1</version>
|
<version>3.1.0</version>
|
||||||
<description>Tools for analysing trace data.</description>
|
<description>Tools for analysing trace data.</description>
|
||||||
<maintainer email="bedard.christophe@gmail.com">Christophe Bedard</maintainer>
|
<maintainer email="bedard.christophe@gmail.com">Christophe Bedard</maintainer>
|
||||||
<maintainer email="ingo.luetkebohle@de.bosch.com">Ingo Lütkebohle</maintainer>
|
<maintainer email="ingo.luetkebohle@de.bosch.com">Ingo Lütkebohle</maintainer>
|
||||||
<license>Apache 2.0</license>
|
<license>Apache 2.0</license>
|
||||||
|
<url type="website">https://index.ros.org/p/tracetools_analysis/</url>
|
||||||
|
<url type="repository">https://github.com/ros-tracing/tracetools_analysis</url>
|
||||||
|
<url type="bugtracker">https://github.com/ros-tracing/tracetools_analysis/issues</url>
|
||||||
<author email="ingo.luetkebohle@de.bosch.com">Ingo Lütkebohle</author>
|
<author email="ingo.luetkebohle@de.bosch.com">Ingo Lütkebohle</author>
|
||||||
<author email="fixed-term.christophe.bourquebedard@de.bosch.com">Christophe Bedard</author>
|
<author email="fixed-term.christophe.bourquebedard@de.bosch.com">Christophe Bedard</author>
|
||||||
|
|
||||||
<depend>tracetools_read</depend>
|
<depend>tracetools_read</depend>
|
||||||
|
<depend>tracetools_trace</depend>
|
||||||
<depend>python3-pandas</depend>
|
<depend>python3-pandas</depend>
|
||||||
|
|
||||||
<exec_depend>jupyter-notebook</exec_depend>
|
<exec_depend>jupyter-notebook</exec_depend>
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[develop]
|
[develop]
|
||||||
script-dir=$base/lib/tracetools_analysis
|
script_dir=$base/lib/tracetools_analysis
|
||||||
[install]
|
[install]
|
||||||
install-scripts=$base/lib/tracetools_analysis
|
install_scripts=$base/lib/tracetools_analysis
|
||||||
|
|
|
@ -7,7 +7,7 @@ package_name = 'tracetools_analysis'
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=package_name,
|
name=package_name,
|
||||||
version='1.0.1',
|
version='3.1.0',
|
||||||
packages=find_packages(exclude=['test']),
|
packages=find_packages(exclude=['test']),
|
||||||
data_files=[
|
data_files=[
|
||||||
('share/' + package_name, ['package.xml']),
|
('share/' + package_name, ['package.xml']),
|
||||||
|
@ -32,7 +32,7 @@ setup(
|
||||||
'fixed-term.christophe.bourquebedard@de.bosch.com, '
|
'fixed-term.christophe.bourquebedard@de.bosch.com, '
|
||||||
'ingo.luetkebohle@de.bosch.com'
|
'ingo.luetkebohle@de.bosch.com'
|
||||||
),
|
),
|
||||||
url='https://gitlab.com/micro-ROS/ros_tracing/tracetools_analysis',
|
url='https://github.com/ros-tracing/tracetools_analysis',
|
||||||
keywords=[],
|
keywords=[],
|
||||||
description='Tools for analysing trace data.',
|
description='Tools for analysing trace data.',
|
||||||
long_description=(
|
long_description=(
|
||||||
|
|
|
@ -12,12 +12,14 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ament_flake8.main import main
|
from ament_flake8.main import main_with_errors
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.flake8
|
@pytest.mark.flake8
|
||||||
@pytest.mark.linter
|
@pytest.mark.linter
|
||||||
def test_flake8():
|
def test_flake8():
|
||||||
rc = main(argv=[])
|
rc, errors = main_with_errors(argv=[])
|
||||||
assert rc == 0, 'Found errors'
|
assert rc == 0, \
|
||||||
|
'Found %d code style errors / warnings:\n' % len(errors) + \
|
||||||
|
'\n'.join(errors)
|
||||||
|
|
|
@ -17,8 +17,10 @@ from datetime import timezone
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from packaging.version import Version
|
||||||
|
from pandas import __version__ as pandas_version
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from pandas.util.testing import assert_frame_equal
|
from pandas.testing import assert_frame_equal
|
||||||
|
|
||||||
from tracetools_analysis.data_model import DataModel
|
from tracetools_analysis.data_model import DataModel
|
||||||
from tracetools_analysis.processor import EventHandler
|
from tracetools_analysis.processor import EventHandler
|
||||||
|
@ -34,6 +36,10 @@ class TestDataModelUtil(unittest.TestCase):
|
||||||
*args,
|
*args,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@unittest.skipIf(
|
||||||
|
Version(pandas_version) < Version('2.2.0'),
|
||||||
|
'skip due to missing fix: pandas-dev/pandas#55812',
|
||||||
|
)
|
||||||
def test_convert_time_columns(self) -> None:
|
def test_convert_time_columns(self) -> None:
|
||||||
input_df = DataFrame(
|
input_df = DataFrame(
|
||||||
data=[
|
data=[
|
||||||
|
|
|
@ -18,7 +18,7 @@ from typing import List
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from pandas.util.testing import assert_frame_equal
|
from pandas.testing import assert_frame_equal
|
||||||
|
|
||||||
from tracetools_analysis.processor import Processor
|
from tracetools_analysis.processor import Processor
|
||||||
from tracetools_analysis.processor.profile import ProfileHandler
|
from tracetools_analysis.processor.profile import ProfileHandler
|
||||||
|
@ -286,17 +286,8 @@ class TestProfileHandler(unittest.TestCase):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def build_expected_df(expected_data: List[Dict[str, Any]]) -> DataFrame:
|
def build_expected_df(expected_data: List[Dict[str, Any]]) -> DataFrame:
|
||||||
# Make sure the columns are in the same order
|
# Columns should be in the same order
|
||||||
expected_df = DataFrame(columns=[
|
return DataFrame.from_dict(expected_data)
|
||||||
'tid',
|
|
||||||
'depth',
|
|
||||||
'function_name',
|
|
||||||
'parent_name',
|
|
||||||
'start_timestamp',
|
|
||||||
'duration',
|
|
||||||
'actual_duration',
|
|
||||||
])
|
|
||||||
return expected_df.append(expected_data, ignore_index=True)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def transform_fake_fields(events: List[DictEvent]) -> None:
|
def transform_fake_fields(events: List[DictEvent]) -> None:
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from tracetools_analysis import time_diff_to_str
|
from tracetools_analysis import time_diff_to_str
|
||||||
|
from tracetools_analysis.data_model.ros2 import Ros2DataModel
|
||||||
|
from tracetools_analysis.utils.ros2 import Ros2DataModelUtil
|
||||||
|
|
||||||
|
|
||||||
class TestUtils(unittest.TestCase):
|
class TestUtils(unittest.TestCase):
|
||||||
|
@ -31,3 +33,15 @@ class TestUtils(unittest.TestCase):
|
||||||
self.assertEqual('1 m 10 s', time_diff_to_str(69.6969))
|
self.assertEqual('1 m 10 s', time_diff_to_str(69.6969))
|
||||||
self.assertEqual('6 m 10 s', time_diff_to_str(369.6969))
|
self.assertEqual('6 m 10 s', time_diff_to_str(369.6969))
|
||||||
self.assertEqual('2 m 0 s', time_diff_to_str(120.499999999))
|
self.assertEqual('2 m 0 s', time_diff_to_str(120.499999999))
|
||||||
|
|
||||||
|
def test_ros2_no_callbacks(self) -> None:
|
||||||
|
data_model = Ros2DataModel()
|
||||||
|
data_model.finalize()
|
||||||
|
util = Ros2DataModelUtil(data_model)
|
||||||
|
self.assertEqual({}, util.get_callback_symbols())
|
||||||
|
|
||||||
|
def test_ros2_no_lifecycle_transitions(self) -> None:
|
||||||
|
data_model = Ros2DataModel()
|
||||||
|
data_model.finalize()
|
||||||
|
util = Ros2DataModelUtil(data_model)
|
||||||
|
self.assertEqual({}, util.get_lifecycle_node_state_intervals())
|
||||||
|
|
|
@ -41,7 +41,11 @@ def add_args(parser: argparse.ArgumentParser) -> None:
|
||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
def parse_args() -> argparse.Namespace:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Convert trace data to a file.')
|
description=(
|
||||||
|
'Convert trace data to a file. '
|
||||||
|
"DEPRECATED: use the 'process' verb directly."
|
||||||
|
),
|
||||||
|
)
|
||||||
add_args(parser)
|
add_args(parser)
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
@ -79,4 +83,6 @@ def main():
|
||||||
trace_directory = args.trace_directory
|
trace_directory = args.trace_directory
|
||||||
output_file_name = args.output_file_name
|
output_file_name = args.output_file_name
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
warnings.warn("'convert' is deprecated, use 'process' directly instead", stacklevel=2)
|
||||||
convert(trace_directory, output_file_name)
|
convert(trace_directory, output_file_name)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -14,17 +15,46 @@
|
||||||
|
|
||||||
"""Base data model module."""
|
"""Base data model module."""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
from typing import Dict
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
DataModelIntermediateStorage = List[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
class DataModel():
|
class DataModel():
|
||||||
"""
|
"""
|
||||||
Container with pre-processed data for an analysis to use.
|
Container with pre-processed data for an analysis to use.
|
||||||
|
|
||||||
Contains data for an analysis to use. This is a middleground between trace events data and the
|
Contains data for an analysis to use. This is a middleground between trace events data and the
|
||||||
output data of an analysis. It uses pandas `DataFrame` directly.
|
output data of an analysis.
|
||||||
|
It uses native/simple Python data structures (e.g. lists of dicts) during processing, but
|
||||||
|
converts them to pandas `DataFrame` at the end.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
pass
|
self._finalized = False
|
||||||
|
|
||||||
|
def finalize(self) -> None:
|
||||||
|
"""
|
||||||
|
Finalize the data model.
|
||||||
|
|
||||||
|
Call this once data is done being generated or added to the model.
|
||||||
|
Finalization tasks are up to the inheriting/concrete class.
|
||||||
|
"""
|
||||||
|
# Avoid calling it twice for data models which might be shared
|
||||||
|
if not self._finalized:
|
||||||
|
self._finalized = True
|
||||||
|
self._finalize()
|
||||||
|
|
||||||
|
def _finalize(self) -> None:
|
||||||
|
"""
|
||||||
|
Do the finalization.
|
||||||
|
|
||||||
|
Only called once.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
"""Print the data model."""
|
"""Print the data model."""
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -17,6 +18,7 @@
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
from . import DataModel
|
from . import DataModel
|
||||||
|
from . import DataModelIntermediateStorage
|
||||||
|
|
||||||
|
|
||||||
class CpuTimeDataModel(DataModel):
|
class CpuTimeDataModel(DataModel):
|
||||||
|
@ -29,12 +31,7 @@ class CpuTimeDataModel(DataModel):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Create a CpuTimeDataModel."""
|
"""Create a CpuTimeDataModel."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.times = pd.DataFrame(columns=[
|
self._times: DataModelIntermediateStorage = []
|
||||||
'tid',
|
|
||||||
'start_timestamp',
|
|
||||||
'duration',
|
|
||||||
'cpu_id',
|
|
||||||
])
|
|
||||||
|
|
||||||
def add_duration(
|
def add_duration(
|
||||||
self,
|
self,
|
||||||
|
@ -43,13 +40,15 @@ class CpuTimeDataModel(DataModel):
|
||||||
duration: int,
|
duration: int,
|
||||||
cpu_id: int,
|
cpu_id: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
data = {
|
self._times.append({
|
||||||
'tid': tid,
|
'tid': tid,
|
||||||
'start_timestamp': start_timestamp,
|
'start_timestamp': start_timestamp,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'cpu_id': cpu_id,
|
'cpu_id': cpu_id,
|
||||||
}
|
})
|
||||||
self.times = self.times.append(data, ignore_index=True)
|
|
||||||
|
def _finalize(self) -> None:
|
||||||
|
self.times = pd.DataFrame.from_dict(self._times)
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
print('====================CPU TIME DATA MODEL====================')
|
print('====================CPU TIME DATA MODEL====================')
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Apex.AI, Inc.
|
# Copyright 2019 Apex.AI, Inc.
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -17,6 +18,7 @@
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
from . import DataModel
|
from . import DataModel
|
||||||
|
from . import DataModelIntermediateStorage
|
||||||
|
|
||||||
|
|
||||||
class MemoryUsageDataModel(DataModel):
|
class MemoryUsageDataModel(DataModel):
|
||||||
|
@ -30,11 +32,7 @@ class MemoryUsageDataModel(DataModel):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Create a MemoryUsageDataModel."""
|
"""Create a MemoryUsageDataModel."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.memory_diff = pd.DataFrame(columns=[
|
self._memory_diff: DataModelIntermediateStorage = []
|
||||||
'timestamp',
|
|
||||||
'tid',
|
|
||||||
'memory_diff',
|
|
||||||
])
|
|
||||||
|
|
||||||
def add_memory_difference(
|
def add_memory_difference(
|
||||||
self,
|
self,
|
||||||
|
@ -42,12 +40,14 @@ class MemoryUsageDataModel(DataModel):
|
||||||
tid: int,
|
tid: int,
|
||||||
memory_diff: int,
|
memory_diff: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
data = {
|
self._memory_diff.append({
|
||||||
'timestamp': timestamp,
|
'timestamp': timestamp,
|
||||||
'tid': tid,
|
'tid': tid,
|
||||||
'memory_diff': memory_diff,
|
'memory_diff': memory_diff,
|
||||||
}
|
})
|
||||||
self.memory_diff = self.memory_diff.append(data, ignore_index=True)
|
|
||||||
|
def _finalize(self) -> None:
|
||||||
|
self.memory_diff = pd.DataFrame.from_dict(self._memory_diff)
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
print('==================MEMORY USAGE DATA MODEL==================')
|
print('==================MEMORY USAGE DATA MODEL==================')
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -19,6 +20,7 @@ from typing import Optional
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
from . import DataModel
|
from . import DataModel
|
||||||
|
from . import DataModelIntermediateStorage
|
||||||
|
|
||||||
|
|
||||||
class ProfileDataModel(DataModel):
|
class ProfileDataModel(DataModel):
|
||||||
|
@ -32,15 +34,7 @@ class ProfileDataModel(DataModel):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Create a ProfileDataModel."""
|
"""Create a ProfileDataModel."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.times = pd.DataFrame(columns=[
|
self._times: DataModelIntermediateStorage = []
|
||||||
'tid',
|
|
||||||
'depth',
|
|
||||||
'function_name',
|
|
||||||
'parent_name',
|
|
||||||
'start_timestamp',
|
|
||||||
'duration',
|
|
||||||
'actual_duration',
|
|
||||||
])
|
|
||||||
|
|
||||||
def add_duration(
|
def add_duration(
|
||||||
self,
|
self,
|
||||||
|
@ -52,7 +46,7 @@ class ProfileDataModel(DataModel):
|
||||||
duration: int,
|
duration: int,
|
||||||
actual_duration: int,
|
actual_duration: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
data = {
|
self._times.append({
|
||||||
'tid': tid,
|
'tid': tid,
|
||||||
'depth': depth,
|
'depth': depth,
|
||||||
'function_name': function_name,
|
'function_name': function_name,
|
||||||
|
@ -60,8 +54,10 @@ class ProfileDataModel(DataModel):
|
||||||
'start_timestamp': start_timestamp,
|
'start_timestamp': start_timestamp,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'actual_duration': actual_duration,
|
'actual_duration': actual_duration,
|
||||||
}
|
})
|
||||||
self.times = self.times.append(data, ignore_index=True)
|
|
||||||
|
def _finalize(self) -> None:
|
||||||
|
self.times = pd.DataFrame.from_dict(self._times)
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
print('====================PROFILE DATA MODEL====================')
|
print('====================PROFILE DATA MODEL====================')
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2020-2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -14,9 +15,11 @@
|
||||||
|
|
||||||
"""Module for ROS 2 data model."""
|
"""Module for ROS 2 data model."""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
from . import DataModel
|
from . import DataModel
|
||||||
|
from . import DataModelIntermediateStorage
|
||||||
|
|
||||||
|
|
||||||
class Ros2DataModel(DataModel):
|
class Ros2DataModel(DataModel):
|
||||||
|
@ -30,129 +33,296 @@ class Ros2DataModel(DataModel):
|
||||||
"""Create a Ros2DataModel."""
|
"""Create a Ros2DataModel."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
# Objects (one-time events, usually when something is created)
|
# Objects (one-time events, usually when something is created)
|
||||||
self.contexts = pd.DataFrame(columns=['context_handle',
|
self._contexts: DataModelIntermediateStorage = []
|
||||||
'timestamp',
|
self._nodes: DataModelIntermediateStorage = []
|
||||||
'pid',
|
self._rmw_publishers: DataModelIntermediateStorage = []
|
||||||
'version'])
|
self._rcl_publishers: DataModelIntermediateStorage = []
|
||||||
self.contexts.set_index(['context_handle'], inplace=True, drop=True)
|
self._rmw_subscriptions: DataModelIntermediateStorage = []
|
||||||
self.nodes = pd.DataFrame(columns=['node_handle',
|
self._rcl_subscriptions: DataModelIntermediateStorage = []
|
||||||
'timestamp',
|
self._subscription_objects: DataModelIntermediateStorage = []
|
||||||
'tid',
|
self._services: DataModelIntermediateStorage = []
|
||||||
'rmw_handle',
|
self._clients: DataModelIntermediateStorage = []
|
||||||
'name',
|
self._timers: DataModelIntermediateStorage = []
|
||||||
'namespace'])
|
self._timer_node_links: DataModelIntermediateStorage = []
|
||||||
self.nodes.set_index(['node_handle'], inplace=True, drop=True)
|
self._callback_objects: DataModelIntermediateStorage = []
|
||||||
self.publishers = pd.DataFrame(columns=['publisher_handle',
|
self._callback_symbols: DataModelIntermediateStorage = []
|
||||||
'timestamp',
|
self._lifecycle_state_machines: DataModelIntermediateStorage = []
|
||||||
'node_handle',
|
|
||||||
'rmw_handle',
|
|
||||||
'topic_name',
|
|
||||||
'depth'])
|
|
||||||
self.publishers.set_index(['publisher_handle'], inplace=True, drop=True)
|
|
||||||
self.subscriptions = pd.DataFrame(columns=['subscription_handle',
|
|
||||||
'timestamp',
|
|
||||||
'node_handle',
|
|
||||||
'rmw_handle',
|
|
||||||
'topic_name',
|
|
||||||
'depth'])
|
|
||||||
self.subscriptions.set_index(['subscription_handle'], inplace=True, drop=True)
|
|
||||||
self.subscription_objects = pd.DataFrame(columns=['subscription',
|
|
||||||
'timestamp',
|
|
||||||
'subscription_handle'])
|
|
||||||
self.subscription_objects.set_index(['subscription'], inplace=True, drop=True)
|
|
||||||
self.services = pd.DataFrame(columns=['service_handle',
|
|
||||||
'timestamp',
|
|
||||||
'node_handle',
|
|
||||||
'rmw_handle',
|
|
||||||
'service_name'])
|
|
||||||
self.services.set_index(['service_handle'], inplace=True, drop=True)
|
|
||||||
self.clients = pd.DataFrame(columns=['client_handle',
|
|
||||||
'timestamp',
|
|
||||||
'node_handle',
|
|
||||||
'rmw_handle',
|
|
||||||
'service_name'])
|
|
||||||
self.clients.set_index(['client_handle'], inplace=True, drop=True)
|
|
||||||
self.timers = pd.DataFrame(columns=['timer_handle',
|
|
||||||
'timestamp',
|
|
||||||
'period',
|
|
||||||
'tid'])
|
|
||||||
self.timers.set_index(['timer_handle'], inplace=True, drop=True)
|
|
||||||
|
|
||||||
self.callback_objects = pd.DataFrame(columns=['reference',
|
|
||||||
'timestamp',
|
|
||||||
'callback_object'])
|
|
||||||
self.callback_objects.set_index(['reference'], inplace=True, drop=True)
|
|
||||||
self.callback_symbols = pd.DataFrame(columns=['callback_object',
|
|
||||||
'timestamp',
|
|
||||||
'symbol'])
|
|
||||||
self.callback_symbols.set_index(['callback_object'], inplace=True, drop=True)
|
|
||||||
|
|
||||||
# Events (multiple instances, may not have a meaningful index)
|
# Events (multiple instances, may not have a meaningful index)
|
||||||
self.callback_instances = pd.DataFrame(columns=['callback_object',
|
self._rclcpp_publish_instances: DataModelIntermediateStorage = []
|
||||||
'timestamp',
|
self._rcl_publish_instances: DataModelIntermediateStorage = []
|
||||||
'duration',
|
self._rmw_publish_instances: DataModelIntermediateStorage = []
|
||||||
'intra_process'])
|
self._rmw_take_instances: DataModelIntermediateStorage = []
|
||||||
|
self._rcl_take_instances: DataModelIntermediateStorage = []
|
||||||
|
self._rclcpp_take_instances: DataModelIntermediateStorage = []
|
||||||
|
self._callback_instances: DataModelIntermediateStorage = []
|
||||||
|
self._lifecycle_transitions: DataModelIntermediateStorage = []
|
||||||
|
|
||||||
def add_context(
|
def add_context(
|
||||||
self, context_handle, timestamp, pid, version
|
self, context_handle, timestamp, pid, version
|
||||||
) -> None:
|
) -> None:
|
||||||
self.contexts.loc[context_handle] = [timestamp, pid, version]
|
self._contexts.append({
|
||||||
|
'context_handle': context_handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'pid': pid,
|
||||||
|
'version': version,
|
||||||
|
})
|
||||||
|
|
||||||
def add_node(
|
def add_node(
|
||||||
self, node_handle, timestamp, tid, rmw_handle, name, namespace
|
self, node_handle, timestamp, tid, rmw_handle, name, namespace
|
||||||
) -> None:
|
) -> None:
|
||||||
self.nodes.loc[node_handle] = [timestamp, tid, rmw_handle, name, namespace]
|
self._nodes.append({
|
||||||
|
'node_handle': node_handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'tid': tid,
|
||||||
|
'rmw_handle': rmw_handle,
|
||||||
|
'name': name,
|
||||||
|
'namespace': namespace,
|
||||||
|
})
|
||||||
|
|
||||||
def add_publisher(
|
def add_rmw_publisher(
|
||||||
|
self, handle, timestamp, gid,
|
||||||
|
) -> None:
|
||||||
|
self._rmw_publishers.append({
|
||||||
|
'publisher_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'gid': gid,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rcl_publisher(
|
||||||
self, handle, timestamp, node_handle, rmw_handle, topic_name, depth
|
self, handle, timestamp, node_handle, rmw_handle, topic_name, depth
|
||||||
) -> None:
|
) -> None:
|
||||||
self.publishers.loc[handle] = [timestamp, node_handle, rmw_handle, topic_name, depth]
|
self._rcl_publishers.append({
|
||||||
|
'publisher_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'node_handle': node_handle,
|
||||||
|
'rmw_handle': rmw_handle,
|
||||||
|
'topic_name': topic_name,
|
||||||
|
'depth': depth,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rclcpp_publish_instance(
|
||||||
|
self, timestamp, message,
|
||||||
|
) -> None:
|
||||||
|
self._rclcpp_publish_instances.append({
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'message': message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rcl_publish_instance(
|
||||||
|
self, publisher_handle, timestamp, message,
|
||||||
|
) -> None:
|
||||||
|
self._rcl_publish_instances.append({
|
||||||
|
'publisher_handle': publisher_handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'message': message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rmw_publish_instance(
|
||||||
|
self, timestamp, message,
|
||||||
|
) -> None:
|
||||||
|
self._rmw_publish_instances.append({
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'message': message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rmw_subscription(
|
||||||
|
self, handle, timestamp, gid
|
||||||
|
) -> None:
|
||||||
|
self._rmw_subscriptions.append({
|
||||||
|
'subscription_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'gid': gid,
|
||||||
|
})
|
||||||
|
|
||||||
def add_rcl_subscription(
|
def add_rcl_subscription(
|
||||||
self, handle, timestamp, node_handle, rmw_handle, topic_name, depth
|
self, handle, timestamp, node_handle, rmw_handle, topic_name, depth
|
||||||
) -> None:
|
) -> None:
|
||||||
self.subscriptions.loc[handle] = [timestamp, node_handle, rmw_handle, topic_name, depth]
|
self._rcl_subscriptions.append({
|
||||||
|
'subscription_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'node_handle': node_handle,
|
||||||
|
'rmw_handle': rmw_handle,
|
||||||
|
'topic_name': topic_name,
|
||||||
|
'depth': depth,
|
||||||
|
})
|
||||||
|
|
||||||
def add_rclcpp_subscription(
|
def add_rclcpp_subscription(
|
||||||
self, subscription_pointer, timestamp, subscription_handle
|
self, subscription_pointer, timestamp, subscription_handle
|
||||||
) -> None:
|
) -> None:
|
||||||
self.subscription_objects.loc[subscription_pointer] = [timestamp, subscription_handle]
|
self._subscription_objects.append({
|
||||||
|
'subscription': subscription_pointer,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'subscription_handle': subscription_handle,
|
||||||
|
})
|
||||||
|
|
||||||
def add_service(
|
def add_service(
|
||||||
self, handle, timestamp, node_handle, rmw_handle, service_name
|
self, handle, timestamp, node_handle, rmw_handle, service_name
|
||||||
) -> None:
|
) -> None:
|
||||||
self.services.loc[handle] = [timestamp, node_handle, rmw_handle, service_name]
|
self._services.append({
|
||||||
|
'service_handle': timestamp,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'node_handle': node_handle,
|
||||||
|
'rmw_handle': rmw_handle,
|
||||||
|
'service_name': service_name,
|
||||||
|
})
|
||||||
|
|
||||||
def add_client(
|
def add_client(
|
||||||
self, handle, timestamp, node_handle, rmw_handle, service_name
|
self, handle, timestamp, node_handle, rmw_handle, service_name
|
||||||
) -> None:
|
) -> None:
|
||||||
self.clients.loc[handle] = [timestamp, node_handle, rmw_handle, service_name]
|
self._clients.append({
|
||||||
|
'client_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'node_handle': node_handle,
|
||||||
|
'rmw_handle': rmw_handle,
|
||||||
|
'service_name': service_name,
|
||||||
|
})
|
||||||
|
|
||||||
def add_timer(
|
def add_timer(
|
||||||
self, handle, timestamp, period, tid
|
self, handle, timestamp, period, tid
|
||||||
) -> None:
|
) -> None:
|
||||||
self.timers.loc[handle] = [timestamp, period, tid]
|
self._timers.append({
|
||||||
|
'timer_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'period': period,
|
||||||
|
'tid': tid,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_timer_node_link(
|
||||||
|
self, handle, timestamp, node_handle
|
||||||
|
) -> None:
|
||||||
|
self._timer_node_links.append({
|
||||||
|
'timer_handle': handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'node_handle': node_handle,
|
||||||
|
})
|
||||||
|
|
||||||
def add_callback_object(
|
def add_callback_object(
|
||||||
self, reference, timestamp, callback_object
|
self, reference, timestamp, callback_object
|
||||||
) -> None:
|
) -> None:
|
||||||
self.callback_objects.loc[reference] = [timestamp, callback_object]
|
self._callback_objects.append({
|
||||||
|
'reference': reference,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'callback_object': callback_object,
|
||||||
|
})
|
||||||
|
|
||||||
def add_callback_symbol(
|
def add_callback_symbol(
|
||||||
self, callback_object, timestamp, symbol
|
self, callback_object, timestamp, symbol
|
||||||
) -> None:
|
) -> None:
|
||||||
self.callback_symbols.loc[callback_object] = [timestamp, symbol]
|
self._callback_symbols.append({
|
||||||
|
'callback_object': callback_object,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'symbol': symbol,
|
||||||
|
})
|
||||||
|
|
||||||
def add_callback_instance(
|
def add_callback_instance(
|
||||||
self, callback_object, timestamp, duration, intra_process
|
self, callback_object, timestamp, duration, intra_process
|
||||||
) -> None:
|
) -> None:
|
||||||
data = {
|
self._callback_instances.append({
|
||||||
'callback_object': callback_object,
|
'callback_object': callback_object,
|
||||||
'timestamp': timestamp,
|
'timestamp': np.datetime64(timestamp, 'ns'),
|
||||||
'duration': duration,
|
'duration': np.timedelta64(duration, 'ns'),
|
||||||
'intra_process': intra_process,
|
'intra_process': intra_process,
|
||||||
}
|
})
|
||||||
self.callback_instances = self.callback_instances.append(data, ignore_index=True)
|
|
||||||
|
def add_rmw_take_instance(
|
||||||
|
self, subscription_handle, timestamp, message, source_timestamp, taken
|
||||||
|
) -> None:
|
||||||
|
self._rmw_take_instances.append({
|
||||||
|
'subscription_handle': subscription_handle,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'message': message,
|
||||||
|
'source_timestamp': source_timestamp,
|
||||||
|
'taken': taken,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rcl_take_instance(
|
||||||
|
self, timestamp, message
|
||||||
|
) -> None:
|
||||||
|
self._rcl_take_instances.append({
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'message': message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_rclcpp_take_instance(
|
||||||
|
self, timestamp, message
|
||||||
|
) -> None:
|
||||||
|
self._rclcpp_take_instances.append({
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'message': message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_lifecycle_state_machine(
|
||||||
|
self, handle, node_handle
|
||||||
|
) -> None:
|
||||||
|
self._lifecycle_state_machines.append({
|
||||||
|
'state_machine_handle': handle,
|
||||||
|
'node_handle': node_handle,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_lifecycle_state_transition(
|
||||||
|
self, state_machine_handle, start_label, goal_label, timestamp
|
||||||
|
) -> None:
|
||||||
|
self._lifecycle_transitions.append({
|
||||||
|
'state_machine_handle': state_machine_handle,
|
||||||
|
'start_label': start_label,
|
||||||
|
'goal_label': goal_label,
|
||||||
|
'timestamp': timestamp,
|
||||||
|
})
|
||||||
|
|
||||||
|
def _finalize(self) -> None:
|
||||||
|
# Some of the lists of dicts might be empty, and setting
|
||||||
|
# the index for an empty dataframe leads to an error
|
||||||
|
self.contexts = pd.DataFrame.from_dict(self._contexts)
|
||||||
|
if self._contexts:
|
||||||
|
self.contexts.set_index('context_handle', inplace=True, drop=True)
|
||||||
|
self.nodes = pd.DataFrame.from_dict(self._nodes)
|
||||||
|
if self._nodes:
|
||||||
|
self.nodes.set_index('node_handle', inplace=True, drop=True)
|
||||||
|
self.rmw_publishers = pd.DataFrame.from_dict(self._rmw_publishers)
|
||||||
|
if self._rmw_publishers:
|
||||||
|
self.rmw_publishers.set_index('publisher_handle', inplace=True, drop=True)
|
||||||
|
self.rcl_publishers = pd.DataFrame.from_dict(self._rcl_publishers)
|
||||||
|
if self._rcl_publishers:
|
||||||
|
self.rcl_publishers.set_index('publisher_handle', inplace=True, drop=True)
|
||||||
|
self.rmw_subscriptions = pd.DataFrame.from_dict(self._rmw_subscriptions)
|
||||||
|
if self._rmw_subscriptions:
|
||||||
|
self.rmw_subscriptions.set_index('subscription_handle', inplace=True, drop=True)
|
||||||
|
self.rcl_subscriptions = pd.DataFrame.from_dict(self._rcl_subscriptions)
|
||||||
|
if self._rcl_subscriptions:
|
||||||
|
self.rcl_subscriptions.set_index('subscription_handle', inplace=True, drop=True)
|
||||||
|
self.subscription_objects = pd.DataFrame.from_dict(self._subscription_objects)
|
||||||
|
if self._subscription_objects:
|
||||||
|
self.subscription_objects.set_index('subscription', inplace=True, drop=True)
|
||||||
|
self.services = pd.DataFrame.from_dict(self._services)
|
||||||
|
if self._services:
|
||||||
|
self.services.set_index('service_handle', inplace=True, drop=True)
|
||||||
|
self.clients = pd.DataFrame.from_dict(self._clients)
|
||||||
|
if self._clients:
|
||||||
|
self.clients.set_index('client_handle', inplace=True, drop=True)
|
||||||
|
self.timers = pd.DataFrame.from_dict(self._timers)
|
||||||
|
if self._timers:
|
||||||
|
self.timers.set_index('timer_handle', inplace=True, drop=True)
|
||||||
|
self.timer_node_links = pd.DataFrame.from_dict(self._timer_node_links)
|
||||||
|
if self._timer_node_links:
|
||||||
|
self.timer_node_links.set_index('timer_handle', inplace=True, drop=True)
|
||||||
|
self.callback_objects = pd.DataFrame.from_dict(self._callback_objects)
|
||||||
|
if self._callback_objects:
|
||||||
|
self.callback_objects.set_index('reference', inplace=True, drop=True)
|
||||||
|
self.callback_symbols = pd.DataFrame.from_dict(self._callback_symbols)
|
||||||
|
if self._callback_symbols:
|
||||||
|
self.callback_symbols.set_index('callback_object', inplace=True, drop=True)
|
||||||
|
self.lifecycle_state_machines = pd.DataFrame.from_dict(self._lifecycle_state_machines)
|
||||||
|
if self._lifecycle_state_machines:
|
||||||
|
self.lifecycle_state_machines.set_index(
|
||||||
|
'state_machine_handle', inplace=True, drop=True)
|
||||||
|
self.rclcpp_publish_instances = pd.DataFrame.from_dict(self._rclcpp_publish_instances)
|
||||||
|
self.rcl_publish_instances = pd.DataFrame.from_dict(self._rcl_publish_instances)
|
||||||
|
self.rmw_publish_instances = pd.DataFrame.from_dict(self._rmw_publish_instances)
|
||||||
|
self.rmw_take_instances = pd.DataFrame.from_dict(self._rmw_take_instances)
|
||||||
|
self.rcl_take_instances = pd.DataFrame.from_dict(self._rcl_take_instances)
|
||||||
|
self.rclcpp_take_instances = pd.DataFrame.from_dict(self._rclcpp_take_instances)
|
||||||
|
self.callback_instances = pd.DataFrame.from_dict(self._callback_instances)
|
||||||
|
self.lifecycle_transitions = pd.DataFrame.from_dict(self._lifecycle_transitions)
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
print('====================ROS 2 DATA MODEL===================')
|
print('====================ROS 2 DATA MODEL===================')
|
||||||
|
@ -162,11 +332,17 @@ class Ros2DataModel(DataModel):
|
||||||
print('Nodes:')
|
print('Nodes:')
|
||||||
print(self.nodes.to_string())
|
print(self.nodes.to_string())
|
||||||
print()
|
print()
|
||||||
print('Publishers:')
|
print('Publishers (rmw):')
|
||||||
print(self.publishers.to_string())
|
print(self.rmw_publishers.to_string())
|
||||||
print()
|
print()
|
||||||
print('Subscriptions:')
|
print('Publishers (rcl):')
|
||||||
print(self.subscriptions.to_string())
|
print(self.rcl_publishers.to_string())
|
||||||
|
print()
|
||||||
|
print('Subscriptions (rmw):')
|
||||||
|
print(self.rmw_subscriptions.to_string())
|
||||||
|
print()
|
||||||
|
print('Subscriptions (rcl):')
|
||||||
|
print(self.rcl_subscriptions.to_string())
|
||||||
print()
|
print()
|
||||||
print('Subscription objects:')
|
print('Subscription objects:')
|
||||||
print(self.subscription_objects.to_string())
|
print(self.subscription_objects.to_string())
|
||||||
|
@ -180,6 +356,9 @@ class Ros2DataModel(DataModel):
|
||||||
print('Timers:')
|
print('Timers:')
|
||||||
print(self.timers.to_string())
|
print(self.timers.to_string())
|
||||||
print()
|
print()
|
||||||
|
print('Timer-node links:')
|
||||||
|
print(self.timer_node_links.to_string())
|
||||||
|
print()
|
||||||
print('Callback objects:')
|
print('Callback objects:')
|
||||||
print(self.callback_objects.to_string())
|
print(self.callback_objects.to_string())
|
||||||
print()
|
print()
|
||||||
|
@ -188,4 +367,28 @@ class Ros2DataModel(DataModel):
|
||||||
print()
|
print()
|
||||||
print('Callback instances:')
|
print('Callback instances:')
|
||||||
print(self.callback_instances.to_string())
|
print(self.callback_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Publish instances (rclcpp):')
|
||||||
|
print(self.rclcpp_publish_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Publish instances (rcl):')
|
||||||
|
print(self.rcl_publish_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Publish instances (rmw):')
|
||||||
|
print(self.rmw_publish_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Take instances (rmw):')
|
||||||
|
print(self.rmw_take_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Take instances (rcl):')
|
||||||
|
print(self.rcl_take_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Take instances (rclcpp):')
|
||||||
|
print(self.rclcpp_take_instances.to_string())
|
||||||
|
print()
|
||||||
|
print('Lifecycle state machines:')
|
||||||
|
print(self.lifecycle_state_machines.to_string())
|
||||||
|
print()
|
||||||
|
print('Lifecycle transitions:')
|
||||||
|
print(self.lifecycle_transitions.to_string())
|
||||||
print('==================================================')
|
print('==================================================')
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -31,20 +32,29 @@ def add_args(parser: argparse.ArgumentParser) -> None:
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'input_path',
|
'input_path',
|
||||||
help='the path to a converted file to import and process, '
|
help='the path to a converted file to import and process, '
|
||||||
'or the path to a CTF directory to convert and process')
|
'or the path to a trace directory to convert and process')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-f', '--force-conversion', dest='force_conversion',
|
'-f', '--force-conversion', dest='force_conversion',
|
||||||
action='store_true', default=False,
|
action='store_true', default=False,
|
||||||
help='re-convert trace directory even if converted file is found')
|
help='re-convert trace directory even if converted file is found')
|
||||||
parser.add_argument(
|
command_group = parser.add_mutually_exclusive_group()
|
||||||
|
command_group.add_argument(
|
||||||
'-s', '--hide-results', dest='hide_results',
|
'-s', '--hide-results', dest='hide_results',
|
||||||
action='store_true', default=False,
|
action='store_true', default=False,
|
||||||
help='hide/suppress results from being printed')
|
help='hide/suppress results from being printed')
|
||||||
|
command_group.add_argument(
|
||||||
|
'-c', '--convert-only', dest='convert_only',
|
||||||
|
action='store_true', default=False,
|
||||||
|
help=(
|
||||||
|
'only do the first step of converting the file, without processing it '
|
||||||
|
'(this should not be necessary, since conversion is done automatically and is mostly '
|
||||||
|
'just an implementation detail)'
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
def parse_args() -> argparse.Namespace:
|
||||||
parser = argparse.ArgumentParser(description='Process a file converted from a trace '
|
parser = argparse.ArgumentParser(
|
||||||
'directory and output model data.')
|
description='Process ROS 2 trace data and output model data.')
|
||||||
add_args(parser)
|
add_args(parser)
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
@ -53,13 +63,21 @@ def process(
|
||||||
input_path: str,
|
input_path: str,
|
||||||
force_conversion: bool = False,
|
force_conversion: bool = False,
|
||||||
hide_results: bool = False,
|
hide_results: bool = False,
|
||||||
|
convert_only: bool = False,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""
|
"""
|
||||||
Process converted trace file.
|
Process ROS 2 trace data and output model data.
|
||||||
|
|
||||||
|
The trace data will be automatically converted into
|
||||||
|
an internal intermediate representation if needed.
|
||||||
|
|
||||||
:param input_path: the path to a converted file or trace directory
|
:param input_path: the path to a converted file or trace directory
|
||||||
:param force_conversion: whether to re-creating converted file even if it is found
|
:param force_conversion: whether to re-creating converted file even if it is found
|
||||||
:param hide_results: whether to hide results and not print them
|
:param hide_results: whether to hide results and not print them
|
||||||
|
:param convert_only: whether to only convert the file into our internal intermediate
|
||||||
|
representation, without processing it. This should usually not be necessary since
|
||||||
|
conversion is done automatically only when needed or when explicitly requested with
|
||||||
|
force_conversion; conversion is mostly just an implementation detail
|
||||||
"""
|
"""
|
||||||
input_path = os.path.expanduser(input_path)
|
input_path = os.path.expanduser(input_path)
|
||||||
if not os.path.exists(input_path):
|
if not os.path.exists(input_path):
|
||||||
|
@ -69,6 +87,11 @@ def process(
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
events = load_file(input_path, do_convert_if_needed=True, force_conversion=force_conversion)
|
events = load_file(input_path, do_convert_if_needed=True, force_conversion=force_conversion)
|
||||||
|
|
||||||
|
# Return now if we only need to convert the file
|
||||||
|
if convert_only:
|
||||||
|
return 0
|
||||||
|
|
||||||
processor = Processor(Ros2Handler())
|
processor = Processor(Ros2Handler())
|
||||||
processor.process(events)
|
processor.process(events)
|
||||||
|
|
||||||
|
@ -86,4 +109,5 @@ def main():
|
||||||
args.input_path,
|
args.input_path,
|
||||||
args.force_conversion,
|
args.force_conversion,
|
||||||
args.hide_results,
|
args.hide_results,
|
||||||
|
args.convert_only,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -40,9 +41,9 @@ class EventMetadata():
|
||||||
event_name: str,
|
event_name: str,
|
||||||
timestamp: int,
|
timestamp: int,
|
||||||
cpu_id: int,
|
cpu_id: int,
|
||||||
procname: str = None,
|
procname: Optional[str] = None,
|
||||||
pid: int = None,
|
pid: Optional[int] = None,
|
||||||
tid: int = None,
|
tid: Optional[int] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Create an EventMetadata.
|
Create an EventMetadata.
|
||||||
|
@ -196,6 +197,15 @@ class EventHandler(Dependant):
|
||||||
processor.process(events)
|
processor.process(events)
|
||||||
return handler_object
|
return handler_object
|
||||||
|
|
||||||
|
def finalize(self) -> None:
|
||||||
|
"""
|
||||||
|
Finalize the event handler.
|
||||||
|
|
||||||
|
This should be called at the end, once all events have been processed.
|
||||||
|
"""
|
||||||
|
if self._data_model:
|
||||||
|
self._data_model.finalize()
|
||||||
|
|
||||||
|
|
||||||
class DependencySolver():
|
class DependencySolver():
|
||||||
"""
|
"""
|
||||||
|
@ -417,6 +427,7 @@ class Processor():
|
||||||
self._process_event(event)
|
self._process_event(event)
|
||||||
self._progress_display.did_work()
|
self._progress_display.did_work()
|
||||||
self._progress_display.done(erase=erase_progress)
|
self._progress_display.done(erase=erase_progress)
|
||||||
|
self._finalize_processing()
|
||||||
self._processing_done = True
|
self._processing_done = True
|
||||||
|
|
||||||
def _process_event(self, event: DictEvent) -> None:
|
def _process_event(self, event: DictEvent) -> None:
|
||||||
|
@ -450,6 +461,11 @@ class Processor():
|
||||||
metadata = EventMetadata(event_name, timestamp, cpu_id, procname, pid, tid)
|
metadata = EventMetadata(event_name, timestamp, cpu_id, procname, pid, tid)
|
||||||
handler_function(event, metadata)
|
handler_function(event, metadata)
|
||||||
|
|
||||||
|
def _finalize_processing(self) -> None:
|
||||||
|
"""Notify handlers that processing is done by calling corresponding method."""
|
||||||
|
for handler in self._expanded_handlers:
|
||||||
|
handler.finalize()
|
||||||
|
|
||||||
def print_data(self) -> None:
|
def print_data(self) -> None:
|
||||||
"""Print processed data."""
|
"""Print processed data."""
|
||||||
if self._processing_done:
|
if self._processing_done:
|
||||||
|
@ -495,7 +511,7 @@ class AutoProcessor():
|
||||||
Get applicable EventHandler instances for a list of events.
|
Get applicable EventHandler instances for a list of events.
|
||||||
|
|
||||||
:param events: the list of events
|
:param events: the list of events
|
||||||
:return the concrete EventHandler instances which are applicable
|
:return: the concrete EventHandler instances which are applicable
|
||||||
"""
|
"""
|
||||||
event_names = Processor.get_event_names(events)
|
event_names = Processor.get_event_names(events)
|
||||||
# Force import of all processor submodules (i.e. files) so that we can find all
|
# Force import of all processor submodules (i.e. files) so that we can find all
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
|
# Copyright 2020 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -19,6 +20,7 @@ from typing import Set
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
from tracetools_read import get_field
|
from tracetools_read import get_field
|
||||||
|
from tracetools_trace.tools import tracepoints as tp
|
||||||
|
|
||||||
from . import EventHandler
|
from . import EventHandler
|
||||||
from . import EventMetadata
|
from . import EventMetadata
|
||||||
|
@ -40,34 +42,56 @@ class Ros2Handler(EventHandler):
|
||||||
"""Create a Ros2Handler."""
|
"""Create a Ros2Handler."""
|
||||||
# Link a ROS trace event to its corresponding handling method
|
# Link a ROS trace event to its corresponding handling method
|
||||||
handler_map: HandlerMap = {
|
handler_map: HandlerMap = {
|
||||||
'ros2:rcl_init':
|
tp.rcl_init:
|
||||||
self._handle_rcl_init,
|
self._handle_rcl_init,
|
||||||
'ros2:rcl_node_init':
|
tp.rcl_node_init:
|
||||||
self._handle_rcl_node_init,
|
self._handle_rcl_node_init,
|
||||||
'ros2:rcl_publisher_init':
|
tp.rmw_publisher_init:
|
||||||
|
self._handle_rmw_publisher_init,
|
||||||
|
tp.rcl_publisher_init:
|
||||||
self._handle_rcl_publisher_init,
|
self._handle_rcl_publisher_init,
|
||||||
'ros2:rcl_subscription_init':
|
tp.rclcpp_publish:
|
||||||
|
self._handle_rclcpp_publish,
|
||||||
|
tp.rcl_publish:
|
||||||
|
self._handle_rcl_publish,
|
||||||
|
tp.rmw_publish:
|
||||||
|
self._handle_rmw_publish,
|
||||||
|
tp.rmw_subscription_init:
|
||||||
|
self._handle_rmw_subscription_init,
|
||||||
|
tp.rcl_subscription_init:
|
||||||
self._handle_rcl_subscription_init,
|
self._handle_rcl_subscription_init,
|
||||||
'ros2:rclcpp_subscription_init':
|
tp.rclcpp_subscription_init:
|
||||||
self._handle_rclcpp_subscription_init,
|
self._handle_rclcpp_subscription_init,
|
||||||
'ros2:rclcpp_subscription_callback_added':
|
tp.rclcpp_subscription_callback_added:
|
||||||
self._handle_rclcpp_subscription_callback_added,
|
self._handle_rclcpp_subscription_callback_added,
|
||||||
'ros2:rcl_service_init':
|
tp.rmw_take:
|
||||||
|
self._handle_rmw_take,
|
||||||
|
tp.rcl_take:
|
||||||
|
self._handle_rcl_take,
|
||||||
|
tp.rclcpp_take:
|
||||||
|
self._handle_rclcpp_take,
|
||||||
|
tp.rcl_service_init:
|
||||||
self._handle_rcl_service_init,
|
self._handle_rcl_service_init,
|
||||||
'ros2:rclcpp_service_callback_added':
|
tp.rclcpp_service_callback_added:
|
||||||
self._handle_rclcpp_service_callback_added,
|
self._handle_rclcpp_service_callback_added,
|
||||||
'ros2:rcl_client_init':
|
tp.rcl_client_init:
|
||||||
self._handle_rcl_client_init,
|
self._handle_rcl_client_init,
|
||||||
'ros2:rcl_timer_init':
|
tp.rcl_timer_init:
|
||||||
self._handle_rcl_timer_init,
|
self._handle_rcl_timer_init,
|
||||||
'ros2:rclcpp_timer_callback_added':
|
tp.rclcpp_timer_callback_added:
|
||||||
self._handle_rclcpp_timer_callback_added,
|
self._handle_rclcpp_timer_callback_added,
|
||||||
'ros2:rclcpp_callback_register':
|
tp.rclcpp_timer_link_node:
|
||||||
|
self._handle_rclcpp_timer_link_node,
|
||||||
|
tp.rclcpp_callback_register:
|
||||||
self._handle_rclcpp_callback_register,
|
self._handle_rclcpp_callback_register,
|
||||||
'ros2:callback_start':
|
tp.callback_start:
|
||||||
self._handle_callback_start,
|
self._handle_callback_start,
|
||||||
'ros2:callback_end':
|
tp.callback_end:
|
||||||
self._handle_callback_end,
|
self._handle_callback_end,
|
||||||
|
tp.rcl_lifecycle_state_machine_init:
|
||||||
|
self._handle_rcl_lifecycle_state_machine_init,
|
||||||
|
tp.rcl_lifecycle_transition:
|
||||||
|
self._handle_rcl_lifecycle_transition,
|
||||||
}
|
}
|
||||||
super().__init__(
|
super().__init__(
|
||||||
handler_map=handler_map,
|
handler_map=handler_map,
|
||||||
|
@ -81,7 +105,7 @@ class Ros2Handler(EventHandler):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def required_events() -> Set[str]:
|
def required_events() -> Set[str]:
|
||||||
return {
|
return {
|
||||||
'ros2:rcl_init',
|
tp.rcl_init,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -108,6 +132,14 @@ class Ros2Handler(EventHandler):
|
||||||
namespace = get_field(event, 'namespace')
|
namespace = get_field(event, 'namespace')
|
||||||
self.data.add_node(handle, timestamp, tid, rmw_handle, name, namespace)
|
self.data.add_node(handle, timestamp, tid, rmw_handle, name, namespace)
|
||||||
|
|
||||||
|
def _handle_rmw_publisher_init(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
handle = get_field(event, 'rmw_publisher_handle')
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
gid = get_field(event, 'gid')
|
||||||
|
self.data.add_rmw_publisher(handle, timestamp, gid)
|
||||||
|
|
||||||
def _handle_rcl_publisher_init(
|
def _handle_rcl_publisher_init(
|
||||||
self, event: Dict, metadata: EventMetadata,
|
self, event: Dict, metadata: EventMetadata,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -117,7 +149,37 @@ class Ros2Handler(EventHandler):
|
||||||
rmw_handle = get_field(event, 'rmw_publisher_handle')
|
rmw_handle = get_field(event, 'rmw_publisher_handle')
|
||||||
topic_name = get_field(event, 'topic_name')
|
topic_name = get_field(event, 'topic_name')
|
||||||
depth = get_field(event, 'queue_depth')
|
depth = get_field(event, 'queue_depth')
|
||||||
self.data.add_publisher(handle, timestamp, node_handle, rmw_handle, topic_name, depth)
|
self.data.add_rcl_publisher(handle, timestamp, node_handle, rmw_handle, topic_name, depth)
|
||||||
|
|
||||||
|
def _handle_rclcpp_publish(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
message = get_field(event, 'message')
|
||||||
|
self.data.add_rclcpp_publish_instance(timestamp, message)
|
||||||
|
|
||||||
|
def _handle_rcl_publish(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
handle = get_field(event, 'publisher_handle')
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
message = get_field(event, 'message')
|
||||||
|
self.data.add_rcl_publish_instance(handle, timestamp, message)
|
||||||
|
|
||||||
|
def _handle_rmw_publish(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
message = get_field(event, 'message')
|
||||||
|
self.data.add_rmw_publish_instance(timestamp, message)
|
||||||
|
|
||||||
|
def _handle_rmw_subscription_init(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
handle = get_field(event, 'rmw_subscription_handle')
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
gid = get_field(event, 'gid')
|
||||||
|
self.data.add_rmw_subscription(handle, timestamp, gid)
|
||||||
|
|
||||||
def _handle_rcl_subscription_init(
|
def _handle_rcl_subscription_init(
|
||||||
self, event: Dict, metadata: EventMetadata,
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
@ -148,6 +210,32 @@ class Ros2Handler(EventHandler):
|
||||||
callback_object = get_field(event, 'callback')
|
callback_object = get_field(event, 'callback')
|
||||||
self.data.add_callback_object(subscription_pointer, timestamp, callback_object)
|
self.data.add_callback_object(subscription_pointer, timestamp, callback_object)
|
||||||
|
|
||||||
|
def _handle_rmw_take(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
subscription_handle = get_field(event, 'rmw_subscription_handle')
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
message = get_field(event, 'message')
|
||||||
|
source_timestamp = get_field(event, 'source_timestamp')
|
||||||
|
taken = bool(get_field(event, 'taken'))
|
||||||
|
self.data.add_rmw_take_instance(
|
||||||
|
subscription_handle, timestamp, message, source_timestamp, taken
|
||||||
|
)
|
||||||
|
|
||||||
|
def _handle_rcl_take(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
message = get_field(event, 'message')
|
||||||
|
self.data.add_rcl_take_instance(timestamp, message)
|
||||||
|
|
||||||
|
def _handle_rclcpp_take(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
message = get_field(event, 'message')
|
||||||
|
self.data.add_rclcpp_take_instance(timestamp, message)
|
||||||
|
|
||||||
def _handle_rcl_service_init(
|
def _handle_rcl_service_init(
|
||||||
self, event: Dict, metadata: EventMetadata,
|
self, event: Dict, metadata: EventMetadata,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -193,6 +281,14 @@ class Ros2Handler(EventHandler):
|
||||||
callback_object = get_field(event, 'callback')
|
callback_object = get_field(event, 'callback')
|
||||||
self.data.add_callback_object(handle, timestamp, callback_object)
|
self.data.add_callback_object(handle, timestamp, callback_object)
|
||||||
|
|
||||||
|
def _handle_rclcpp_timer_link_node(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
handle = get_field(event, 'timer_handle')
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
node_handle = get_field(event, 'node_handle')
|
||||||
|
self.data.add_timer_node_link(handle, timestamp, node_handle)
|
||||||
|
|
||||||
def _handle_rclcpp_callback_register(
|
def _handle_rclcpp_callback_register(
|
||||||
self, event: Dict, metadata: EventMetadata,
|
self, event: Dict, metadata: EventMetadata,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -226,3 +322,19 @@ class Ros2Handler(EventHandler):
|
||||||
bool(is_intra_process))
|
bool(is_intra_process))
|
||||||
else:
|
else:
|
||||||
print(f'No matching callback start for callback object "{callback_object}"')
|
print(f'No matching callback start for callback object "{callback_object}"')
|
||||||
|
|
||||||
|
def _handle_rcl_lifecycle_state_machine_init(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
node_handle = get_field(event, 'node_handle')
|
||||||
|
state_machine = get_field(event, 'state_machine')
|
||||||
|
self.data.add_lifecycle_state_machine(state_machine, node_handle)
|
||||||
|
|
||||||
|
def _handle_rcl_lifecycle_transition(
|
||||||
|
self, event: Dict, metadata: EventMetadata,
|
||||||
|
) -> None:
|
||||||
|
timestamp = metadata.timestamp
|
||||||
|
state_machine = get_field(event, 'state_machine')
|
||||||
|
start_label = get_field(event, 'start_label')
|
||||||
|
goal_label = get_field(event, 'goal_label')
|
||||||
|
self.data.add_lifecycle_state_transition(state_machine, start_label, goal_label, timestamp)
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
from tracetools_analysis.loading import load_file
|
from tracetools_analysis.loading import load_file
|
||||||
|
@ -48,7 +49,8 @@ def main():
|
||||||
|
|
||||||
stat_data = []
|
stat_data = []
|
||||||
for ptr, name in du.get_callback_symbols().items():
|
for ptr, name in du.get_callback_symbols().items():
|
||||||
durations = du.get_callback_durations(ptr)['duration']
|
# Convert to milliseconds to display it
|
||||||
|
durations = du.get_callback_durations(ptr)['duration'] * 1000 / np.timedelta64(1, 's')
|
||||||
stat_data.append((
|
stat_data.append((
|
||||||
durations.count(),
|
durations.count(),
|
||||||
durations.sum(),
|
durations.sum(),
|
||||||
|
@ -57,5 +59,8 @@ def main():
|
||||||
format_fn(name),
|
format_fn(name),
|
||||||
))
|
))
|
||||||
|
|
||||||
stat_df = pd.DataFrame(columns=['Count', 'Sum', 'Mean', 'Std', 'Name'], data=stat_data)
|
stat_df = pd.DataFrame(
|
||||||
print(stat_df.sort_values(by='Sum', ascending=False).to_string())
|
columns=['Count', 'Sum (ms)', 'Mean (ms)', 'Std', 'Name'],
|
||||||
|
data=stat_data,
|
||||||
|
)
|
||||||
|
print(stat_df.sort_values(by='Sum (ms)', ascending=False).to_string())
|
||||||
|
|
|
@ -19,6 +19,7 @@ from typing import List
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from ..data_model import DataModel
|
from ..data_model import DataModel
|
||||||
|
@ -72,12 +73,12 @@ class DataModelUtil():
|
||||||
# Convert from ns to ms
|
# Convert from ns to ms
|
||||||
if len(columns_ns_to_ms) > 0:
|
if len(columns_ns_to_ms) > 0:
|
||||||
df[columns_ns_to_ms] = df[columns_ns_to_ms].applymap(
|
df[columns_ns_to_ms] = df[columns_ns_to_ms].applymap(
|
||||||
lambda t: t / 1000000.0
|
lambda t: t / 1000000.0 if not np.isnan(t) else t
|
||||||
)
|
)
|
||||||
# Convert from ns to ms + ms to datetime, as UTC
|
# Convert from ns to ms + ms to datetime, as UTC
|
||||||
if len(columns_ns_to_datetime) > 0:
|
if len(columns_ns_to_datetime) > 0:
|
||||||
df[columns_ns_to_datetime] = df[columns_ns_to_datetime].applymap(
|
df[columns_ns_to_datetime] = df[columns_ns_to_datetime].applymap(
|
||||||
lambda t: dt.utcfromtimestamp(t / 1000000000.0)
|
lambda t: dt.utcfromtimestamp(t / 1000000000.0) if not np.isnan(t) else t
|
||||||
)
|
)
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# Copyright 2019 Robert Bosch GmbH
|
# Copyright 2019 Robert Bosch GmbH
|
||||||
# Copyright 2019 Apex.AI, Inc.
|
# Copyright 2019 Apex.AI, Inc.
|
||||||
|
# Copyright 2021 Christophe Bedard
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -16,11 +17,15 @@
|
||||||
"""Module for ROS data model utils."""
|
"""Module for ROS data model utils."""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
from typing import Dict
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Mapping
|
from typing import Mapping
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from pandas import concat
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from . import DataModelUtil
|
from . import DataModelUtil
|
||||||
|
@ -113,17 +118,114 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
callback_instances = self.data.callback_instances
|
callback_instances = self.data.callback_instances
|
||||||
callback_symbols = self.data.callback_symbols
|
callback_symbols = self.data.callback_symbols
|
||||||
|
|
||||||
|
if callback_instances.empty:
|
||||||
|
return {}
|
||||||
|
|
||||||
# Get a list of callback objects
|
# Get a list of callback objects
|
||||||
callback_objects = set(callback_instances['callback_object'])
|
callback_objects = set(callback_instances['callback_object'])
|
||||||
# Get their symbol
|
# Get their symbol
|
||||||
return {
|
pretty_symbols = {}
|
||||||
obj: self._prettify(callback_symbols.loc[obj, 'symbol']) for obj in callback_objects
|
for obj in callback_objects:
|
||||||
}
|
# There could be multiple callback symbols for the same callback object (pointer),
|
||||||
|
# e.g., if we create and destroy subscriptions dynamically
|
||||||
|
symbols = callback_symbols.loc[obj, 'symbol']
|
||||||
|
symbols = symbols if isinstance(symbols, pd.Series) else [symbols]
|
||||||
|
# In that case, just combine the symbols
|
||||||
|
pretty_symbols[obj] = ' and '.join(self._prettify(symbol) for symbol in symbols)
|
||||||
|
return pretty_symbols
|
||||||
|
|
||||||
def get_tids(self) -> List[str]:
|
def get_tids(self) -> List[str]:
|
||||||
"""Get a list of thread ids corresponding to the nodes."""
|
"""Get a list of thread ids corresponding to the nodes."""
|
||||||
return self.data.nodes['tid'].unique().tolist()
|
return self.data.nodes['tid'].unique().tolist()
|
||||||
|
|
||||||
|
def get_rcl_publish_instances(self, topic_name) -> Optional[DataFrame]:
|
||||||
|
"""
|
||||||
|
Get rcl publish instances for all publishers with the given topic name.
|
||||||
|
|
||||||
|
:param topic_name: the topic name
|
||||||
|
:return: dataframe with [publisher handle, publish timestamp, message] columns,
|
||||||
|
or `None` if topic name not found
|
||||||
|
"""
|
||||||
|
# We could have more than one publisher for the topic
|
||||||
|
publisher_handles = self.data.rcl_publishers.loc[
|
||||||
|
self.data.rcl_publishers['topic_name'] == topic_name
|
||||||
|
].index.values.astype(int)
|
||||||
|
if len(publisher_handles) == 0:
|
||||||
|
return None
|
||||||
|
publish_instances = self.data.rcl_publish_instances.loc[
|
||||||
|
self.data.rcl_publish_instances['publisher_handle'].isin(publisher_handles)
|
||||||
|
]
|
||||||
|
publish_instances.reset_index(drop=True, inplace=True)
|
||||||
|
self.convert_time_columns(publish_instances, [], ['timestamp'], True)
|
||||||
|
return publish_instances
|
||||||
|
|
||||||
|
def get_publish_instances(self) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Get all publish instances (rclcpp, rcl, rmw) in a single dataframe.
|
||||||
|
|
||||||
|
The rows are ordered by publish timestamp, so the order will usually be: rclcpp, rcl, rmw.
|
||||||
|
However, this does not apply to publications from internal publishers, i.e.,
|
||||||
|
publications that originate from below rclcpp (rcl or rmw).
|
||||||
|
TODO(christophebedard) find heuristic to exclude those?
|
||||||
|
|
||||||
|
:return: dataframe with [timestamp, message, layer 'rclcpp'|'rcl'|'rmw', publisher handle]
|
||||||
|
columns, ordered by timestamp,
|
||||||
|
and where the publisher handle is only set (non-zero) for 'rcl' publish instances
|
||||||
|
"""
|
||||||
|
# Add publisher handle columns with zeros for dataframes that do not have this column,
|
||||||
|
# otherwise NaN is used and the publisher handle values for rcl are converted to float
|
||||||
|
rclcpp_instances = self.data.rclcpp_publish_instances.copy()
|
||||||
|
rclcpp_instances['layer'] = 'rclcpp'
|
||||||
|
rclcpp_instances['publisher_handle'] = 0
|
||||||
|
rcl_instances = self.data.rcl_publish_instances.copy()
|
||||||
|
rcl_instances['layer'] = 'rcl'
|
||||||
|
rmw_instances = self.data.rmw_publish_instances.copy()
|
||||||
|
rmw_instances['layer'] = 'rmw'
|
||||||
|
rmw_instances['publisher_handle'] = 0
|
||||||
|
publish_instances = concat([rclcpp_instances, rcl_instances, rmw_instances], axis=0)
|
||||||
|
publish_instances.sort_values('timestamp', inplace=True)
|
||||||
|
publish_instances.reset_index(drop=True, inplace=True)
|
||||||
|
self.convert_time_columns(publish_instances, [], ['timestamp'], True)
|
||||||
|
return publish_instances
|
||||||
|
|
||||||
|
def get_take_instances(self) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Get all take instances (rmw, rcl, rclcpp) in a single dataframe.
|
||||||
|
|
||||||
|
The rows are ordered by take timestamp, so the order will usually be: rmw, rcl, rclcpp.
|
||||||
|
However, thsi does not apply to takes from internal subscriptions, i.e.,
|
||||||
|
takes that originate from below rclcpp (rcl or rmw).
|
||||||
|
TODO(christophebedard) find heuristic to exclude those?
|
||||||
|
|
||||||
|
:return: dataframe with
|
||||||
|
[timestamp, message, source timestamp,
|
||||||
|
layer 'rmw'|'rcl'|'rmw', rmw subscription handle, taken]
|
||||||
|
columns, ordered by timestamp,
|
||||||
|
and where the rmw subscription handle, source timestamp, and taken flag are only set
|
||||||
|
(non-zero, non-False) for 'rmw' take instances
|
||||||
|
"""
|
||||||
|
rmw_instances = self.data.rmw_take_instances.copy()
|
||||||
|
rmw_instances['layer'] = 'rmw'
|
||||||
|
rmw_instances.rename(
|
||||||
|
columns={'subscription_handle': 'rmw_subscription_handle'},
|
||||||
|
inplace=True,
|
||||||
|
)
|
||||||
|
rcl_instances = self.data.rcl_take_instances.copy()
|
||||||
|
rcl_instances['layer'] = 'rcl'
|
||||||
|
rcl_instances['rmw_subscription_handle'] = 0
|
||||||
|
rcl_instances['source_timestamp'] = 0
|
||||||
|
rcl_instances['taken'] = False
|
||||||
|
rclcpp_instances = self.data.rclcpp_take_instances.copy()
|
||||||
|
rclcpp_instances['layer'] = 'rclcpp'
|
||||||
|
rclcpp_instances['rmw_subscription_handle'] = 0
|
||||||
|
rclcpp_instances['source_timestamp'] = 0
|
||||||
|
rclcpp_instances['taken'] = False
|
||||||
|
take_instances = concat([rmw_instances, rcl_instances, rclcpp_instances], axis=0)
|
||||||
|
take_instances.sort_values('timestamp', inplace=True)
|
||||||
|
take_instances.reset_index(drop=True, inplace=True)
|
||||||
|
self.convert_time_columns(take_instances, [], ['timestamp', 'source_timestamp'], True)
|
||||||
|
return take_instances
|
||||||
|
|
||||||
def get_callback_durations(
|
def get_callback_durations(
|
||||||
self,
|
self,
|
||||||
callback_obj: int,
|
callback_obj: int,
|
||||||
|
@ -132,15 +234,13 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
Get durations of callback instances for a given callback object.
|
Get durations of callback instances for a given callback object.
|
||||||
|
|
||||||
:param callback_obj: the callback object value
|
:param callback_obj: the callback object value
|
||||||
:return: a dataframe containing the start timestamp (datetime)
|
:return: a dataframe containing the start timestamp (np.timestamp64)
|
||||||
and duration (ms) of all callback instances for that object
|
and duration (np.timedelta64) of all callback instances for that object
|
||||||
"""
|
"""
|
||||||
data = self.data.callback_instances.loc[
|
return self.data.callback_instances.loc[
|
||||||
self.data.callback_instances.loc[:, 'callback_object'] == callback_obj,
|
self.data.callback_instances.loc[:, 'callback_object'] == callback_obj,
|
||||||
['timestamp', 'duration']
|
['timestamp', 'duration']
|
||||||
]
|
]
|
||||||
# Time conversion
|
|
||||||
return self.convert_time_columns(data, ['duration'], ['timestamp'])
|
|
||||||
|
|
||||||
def get_node_tid_from_name(
|
def get_node_tid_from_name(
|
||||||
self,
|
self,
|
||||||
|
@ -199,7 +299,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
if reference in self.data.timers.index:
|
if reference in self.data.timers.index:
|
||||||
type_name = 'Timer'
|
type_name = 'Timer'
|
||||||
info = self.get_timer_handle_info(reference)
|
info = self.get_timer_handle_info(reference)
|
||||||
elif reference in self.data.publishers.index:
|
elif reference in self.data.rcl_publishers.index:
|
||||||
type_name = 'Publisher'
|
type_name = 'Publisher'
|
||||||
info = self.get_publisher_handle_info(reference)
|
info = self.get_publisher_handle_info(reference)
|
||||||
elif reference in self.data.subscription_objects.index:
|
elif reference in self.data.subscription_objects.index:
|
||||||
|
@ -214,7 +314,8 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
|
|
||||||
if info is None:
|
if info is None:
|
||||||
return None
|
return None
|
||||||
return f'{type_name} -- {self.format_info_dict(info)}'
|
info_str = self.format_info_dict(info, sep='\n')
|
||||||
|
return f'{type_name}\n{info_str}'
|
||||||
|
|
||||||
def get_timer_handle_info(
|
def get_timer_handle_info(
|
||||||
self,
|
self,
|
||||||
|
@ -226,14 +327,18 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
:param timer_handle: the timer handle value
|
:param timer_handle: the timer handle value
|
||||||
:return: a dictionary with name:value info, or `None` if it fails
|
:return: a dictionary with name:value info, or `None` if it fails
|
||||||
"""
|
"""
|
||||||
# TODO find a way to link a timer to a specific node
|
|
||||||
if timer_handle not in self.data.timers.index:
|
if timer_handle not in self.data.timers.index:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
node_handle = self.data.timer_node_links.loc[timer_handle, 'node_handle']
|
||||||
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
|
if node_handle_info is None:
|
||||||
|
return None
|
||||||
|
|
||||||
tid = self.data.timers.loc[timer_handle, 'tid']
|
tid = self.data.timers.loc[timer_handle, 'tid']
|
||||||
period_ns = self.data.timers.loc[timer_handle, 'period']
|
period_ns = self.data.timers.loc[timer_handle, 'period']
|
||||||
period_ms = period_ns / 1000000.0
|
period_ms = period_ns / 1000000.0
|
||||||
return {'tid': tid, 'period': f'{period_ms:.0f} ms'}
|
return {**node_handle_info, 'tid': tid, 'period': f'{period_ms:.0f} ms'}
|
||||||
|
|
||||||
def get_publisher_handle_info(
|
def get_publisher_handle_info(
|
||||||
self,
|
self,
|
||||||
|
@ -245,14 +350,14 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
:param publisher_handle: the publisher handle value
|
:param publisher_handle: the publisher handle value
|
||||||
:return: a dictionary with name:value info, or `None` if it fails
|
:return: a dictionary with name:value info, or `None` if it fails
|
||||||
"""
|
"""
|
||||||
if publisher_handle not in self.data.publishers.index:
|
if publisher_handle not in self.data.rcl_publishers.index:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
node_handle = self.data.publishers.loc[publisher_handle, 'node_handle']
|
node_handle = self.data.rcl_publishers.loc[publisher_handle, 'node_handle']
|
||||||
node_handle_info = self.get_node_handle_info(node_handle)
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
if node_handle_info is None:
|
if node_handle_info is None:
|
||||||
return None
|
return None
|
||||||
topic_name = self.data.publishers.loc[publisher_handle, 'topic_name']
|
topic_name = self.data.rcl_publishers.loc[publisher_handle, 'topic_name']
|
||||||
publisher_info = {'topic': topic_name}
|
publisher_info = {'topic': topic_name}
|
||||||
return {**node_handle_info, **publisher_info}
|
return {**node_handle_info, **publisher_info}
|
||||||
|
|
||||||
|
@ -283,7 +388,7 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
columns=['timestamp'],
|
columns=['timestamp'],
|
||||||
axis=1,
|
axis=1,
|
||||||
)
|
)
|
||||||
subscriptions_simple = self.data.subscriptions.drop(
|
subscriptions_simple = self.data.rcl_subscriptions.drop(
|
||||||
columns=['timestamp', 'rmw_handle'],
|
columns=['timestamp', 'rmw_handle'],
|
||||||
inplace=False,
|
inplace=False,
|
||||||
)
|
)
|
||||||
|
@ -302,12 +407,27 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
right_index=True,
|
right_index=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
node_handle = subscriptions_info.loc[subscription_reference, 'node_handle']
|
# There could be multiple subscriptions for the same subscription object pointer, e.g., if
|
||||||
node_handle_info = self.get_node_handle_info(node_handle)
|
# we create and destroy subscriptions dynamically, so this subscription could belong to
|
||||||
if node_handle_info is None:
|
# more than one node
|
||||||
return None
|
# In that case, just combine the information
|
||||||
topic_name = subscriptions_info.loc[subscription_reference, 'topic_name']
|
node_handles = subscriptions_info.loc[subscription_reference, 'node_handle']
|
||||||
|
node_handles = node_handles if isinstance(node_handles, pd.Series) else [node_handles]
|
||||||
|
topic_names = subscriptions_info.loc[subscription_reference, 'topic_name']
|
||||||
|
topic_names = topic_names if isinstance(topic_names, pd.Series) else [topic_names]
|
||||||
|
nodes_handle_info = []
|
||||||
|
for node_handle in node_handles:
|
||||||
|
node_handle_info = self.get_node_handle_info(node_handle)
|
||||||
|
if node_handle_info is None:
|
||||||
|
return None
|
||||||
|
nodes_handle_info.append(node_handle_info)
|
||||||
|
topic_name = ' and '.join(topic_names)
|
||||||
subscription_info = {'topic': topic_name}
|
subscription_info = {'topic': topic_name}
|
||||||
|
# Turn list of dicts into dict of combined values
|
||||||
|
node_handle_info = {
|
||||||
|
key: ' and '.join({str(info[key]) for info in nodes_handle_info})
|
||||||
|
for key in nodes_handle_info[0]
|
||||||
|
}
|
||||||
return {**node_handle_info, **subscription_info}
|
return {**node_handle_info, **subscription_info}
|
||||||
|
|
||||||
def get_service_handle_info(
|
def get_service_handle_info(
|
||||||
|
@ -369,8 +489,87 @@ class Ros2DataModelUtil(DataModelUtil):
|
||||||
tid = self.data.nodes.loc[node_handle, 'tid']
|
tid = self.data.nodes.loc[node_handle, 'tid']
|
||||||
return {'node': node_name, 'tid': tid}
|
return {'node': node_name, 'tid': tid}
|
||||||
|
|
||||||
|
def get_lifecycle_node_handle_info(
|
||||||
|
self,
|
||||||
|
lifecycle_node_handle: int,
|
||||||
|
) -> Optional[Mapping[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get information about a lifecycle node handle.
|
||||||
|
|
||||||
|
:param lifecycle_node_handle: the lifecycle node handle value
|
||||||
|
:return: a dictionary with name:value info, or `None` if it fails
|
||||||
|
"""
|
||||||
|
node_info = self.get_node_handle_info(lifecycle_node_handle)
|
||||||
|
if not node_info:
|
||||||
|
return None
|
||||||
|
# TODO(christophebedard) validate that it is an actual lifecycle node and not just a node
|
||||||
|
node_info['lifecycle node'] = node_info.pop('node') # type: ignore
|
||||||
|
return node_info
|
||||||
|
|
||||||
|
def get_lifecycle_node_state_intervals(
|
||||||
|
self,
|
||||||
|
) -> Dict[int, DataFrame]:
|
||||||
|
"""
|
||||||
|
Get state intervals (start, end) for all lifecycle nodes.
|
||||||
|
|
||||||
|
The returned dictionary contains a dataframe for each lifecycle node handle:
|
||||||
|
(lifecycle node handle -> [state string, start timestamp, end timestamp])
|
||||||
|
|
||||||
|
In cases where there is no explicit timestamp (e.g. end of state),
|
||||||
|
`np.nan` is used instead.
|
||||||
|
The node creation timestamp is used as the start timestamp of the first state.
|
||||||
|
TODO(christophebedard) do the same with context shutdown for the last end time
|
||||||
|
|
||||||
|
:return: dictionary with a dataframe (with each row containing state interval information)
|
||||||
|
for each lifecycle node
|
||||||
|
"""
|
||||||
|
lifecycle_transitions = self.data.lifecycle_transitions.copy()
|
||||||
|
if lifecycle_transitions.empty:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
state_machine_handles = set(lifecycle_transitions['state_machine_handle'])
|
||||||
|
for state_machine_handle in state_machine_handles:
|
||||||
|
transitions = lifecycle_transitions.loc[
|
||||||
|
lifecycle_transitions.loc[:, 'state_machine_handle'] == state_machine_handle,
|
||||||
|
['start_label', 'goal_label', 'timestamp']
|
||||||
|
]
|
||||||
|
# Get lifecycle node handle from state machine handle
|
||||||
|
lifecycle_node_handle = self.data.lifecycle_state_machines.loc[
|
||||||
|
state_machine_handle, 'node_handle'
|
||||||
|
]
|
||||||
|
|
||||||
|
# Infer first start time from node creation timestamp
|
||||||
|
node_creation_timestamp = self.data.nodes.loc[lifecycle_node_handle, 'timestamp']
|
||||||
|
|
||||||
|
# Add initial and final timestamps
|
||||||
|
# Last states has an unknown end timestamp
|
||||||
|
first_state_label = transitions.loc[0, 'start_label']
|
||||||
|
last_state_label = transitions.loc[transitions.index[-1], 'goal_label']
|
||||||
|
transitions.loc[-1] = ['', first_state_label, node_creation_timestamp]
|
||||||
|
transitions.index = transitions.index + 1
|
||||||
|
transitions.sort_index(inplace=True)
|
||||||
|
transitions.loc[transitions.index.max() + 1] = [last_state_label, '', np.nan]
|
||||||
|
|
||||||
|
# Process transitions to get start/end timestamp of each instance of a state
|
||||||
|
end_timestamps = transitions[['timestamp']].shift(periods=-1)
|
||||||
|
end_timestamps.rename(
|
||||||
|
columns={end_timestamps.columns[0]: 'end_timestamp'}, inplace=True)
|
||||||
|
states = concat([transitions, end_timestamps], axis=1)
|
||||||
|
states.drop(['start_label'], axis=1, inplace=True)
|
||||||
|
states.rename(
|
||||||
|
columns={'goal_label': 'state', 'timestamp': 'start_timestamp'}, inplace=True)
|
||||||
|
states.drop(states.tail(1).index, inplace=True)
|
||||||
|
|
||||||
|
# Convert time columns
|
||||||
|
self.convert_time_columns(states, [], ['start_timestamp', 'end_timestamp'], True)
|
||||||
|
|
||||||
|
data[lifecycle_node_handle] = states
|
||||||
|
return data
|
||||||
|
|
||||||
def format_info_dict(
|
def format_info_dict(
|
||||||
self,
|
self,
|
||||||
info_dict: Mapping[str, Any],
|
info_dict: Mapping[str, Any],
|
||||||
|
sep: str = ', ',
|
||||||
) -> str:
|
) -> str:
|
||||||
return ', '.join([f'{key}: {val}' for key, val in info_dict.items()])
|
return sep.join(f'{key}: {val}' for key, val in info_dict.items())
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue