Fix lint errors in tracetools_analysis

This commit is contained in:
Christophe Bedard 2019-06-07 11:05:20 +02:00
parent c98d525d4b
commit e5b8d1782f
7 changed files with 20 additions and 11 deletions

View file

@ -1,6 +1,7 @@
# Event handler # Event handler
import sys import sys
from . import lttng_models from . import lttng_models

View file

@ -1,13 +1,17 @@
# Model objects for LTTng traces/events # Model objects for LTTng traces/events
def get_field(event, field_name, default=None): def get_field(event, field_name, default=None):
return event.get(field_name, default) return event.get(field_name, default)
def get_name(event): def get_name(event):
return get_field(event, '_name') return get_field(event, '_name')
class EventMetadata(): class EventMetadata():
"""Container for event metadata."""
def __init__(self, event_name, pid, tid, timestamp, procname): def __init__(self, event_name, pid, tid, timestamp, procname):
self._event_name = event_name self._event_name = event_name
self._pid = pid self._pid = pid

View file

@ -1,8 +1,8 @@
# Process trace events and create ROS model # Process trace events and create ROS model
import sys
from .lttng_models import get_field
from .handler import EventHandler from .handler import EventHandler
from .lttng_models import get_field
def ros_process(events): def ros_process(events):
""" """

View file

@ -1,7 +1,7 @@
# Convert processor object to pandas dataframe # Convert processor object to pandas dataframe
import pandas as pd import pandas as pd
from .ros_processor import RosProcessor
def callback_durations_to_df(ros_processor): def callback_durations_to_df(ros_processor):
callback_addresses = [] callback_addresses = []

View file

@ -1,9 +1,9 @@
# CTF to pickle conversion # CTF to pickle conversion
import babeltrace
from pickle import Pickler
import time import time
import babeltrace
# List of ignored CTF fields # List of ignored CTF fields
_IGNORED_FIELDS = [ _IGNORED_FIELDS = [
'content_size', 'cpu_id', 'events_discarded', 'id', 'packet_size', 'packet_seq_num', 'content_size', 'cpu_id', 'events_discarded', 'id', 'packet_size', 'packet_seq_num',
@ -11,6 +11,7 @@ _IGNORED_FIELDS = [
] ]
_DISCARD = 'events_discarded' _DISCARD = 'events_discarded'
def ctf_to_pickle(trace_directory, target): def ctf_to_pickle(trace_directory, target):
""" """
Load CTF trace and convert to a pickle file. Load CTF trace and convert to a pickle file.

View file

@ -2,13 +2,15 @@
# Entrypoint/script to convert CTF trace data to a pickle file # Entrypoint/script to convert CTF trace data to a pickle file
# TODO # TODO
import sys
from pickle import Pickler from pickle import Pickler
import sys
from tracetools_analysis.conversion import ctf from tracetools_analysis.conversion import ctf
def main(argv=sys.argv): def main(argv=sys.argv):
if len(argv) != 3: if len(argv) != 3:
print("usage: /trace/directory pickle_target_file") print('usage: /trace/directory pickle_target_file')
exit(1) exit(1)
trace_directory = sys.argv[1] trace_directory = sys.argv[1]

View file

@ -1,16 +1,17 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# Entrypoint/script to process events from a pickle file to build a ROS model # Entrypoint/script to process events from a pickle file to build a ROS model
import sys
import pickle import pickle
import pandas as pd import sys
from tracetools_analysis.analysis import ros_processor, to_pandas from tracetools_analysis.analysis import ros_processor, to_pandas
def main(argv=sys.argv): def main(argv=sys.argv):
if len(argv) != 2: if len(argv) != 2:
print('usage: pickle_file') print('usage: pickle_file')
exit(1) exit(1)
pickle_filename = sys.argv[1] pickle_filename = sys.argv[1]
with open(pickle_filename, 'rb') as f: with open(pickle_filename, 'rb') as f:
events = _get_events_from_pickled_file(f) events = _get_events_from_pickled_file(f)
@ -27,6 +28,6 @@ def _get_events_from_pickled_file(file):
while True: while True:
try: try:
events.append(p.load()) events.append(p.load())
except EOFError as _: except EOFError:
break # we're done break # we're done
return events return events