Skip to content

Commit b0894b0

Browse files
committed
messages: use pyulog for events extraction
Added in PX4/pyulog#94 Also removes the file caching for the events from the log, as decompressing on the fly is fast enough.
1 parent 603f6f7 commit b0894b0

File tree

4 files changed

+26
-161
lines changed

4 files changed

+26
-161
lines changed

app/plot_app/config.py

-5
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@
4646
__PARAMETERS_FILENAME = os.path.join(__CACHE_FILE_PATH, 'parameters.xml')
4747
__EVENTS_FILENAME = os.path.join(__CACHE_FILE_PATH, 'events.json.xz')
4848
__RELEASES_FILENAME = os.path.join(__CACHE_FILE_PATH, 'releases.json')
49-
__METADATA_CACHE_PATH = os.path.join(__CACHE_FILE_PATH, 'metadata')
5049

5150
__PRINT_TIMING = int(_conf.get('debug', 'print_timing'))
5251
__VERBOSE_OUTPUT = int(_conf.get('debug', 'verbose_output'))
@@ -126,10 +125,6 @@ def get_releases_filename():
126125
""" get configured releases file name """
127126
return __RELEASES_FILENAME
128127

129-
def get_metadata_cache_path():
130-
""" get configured metadata cache path """
131-
return __METADATA_CACHE_PATH
132-
133128
def get_parameters_filename():
134129
""" get configured parameters file name """
135130
return __PARAMETERS_FILENAME

app/plot_app/events.py

+22-152
Original file line numberDiff line numberDiff line change
@@ -1,169 +1,39 @@
11
""" Event parsing """
22
import json
33
import lzma
4-
import os
5-
import sys
6-
import random
4+
from typing import Optional, Any, List, Tuple
75

86
from helper import download_file_maybe
9-
from config import get_events_url, get_events_filename, get_metadata_cache_path
7+
from config import get_events_url, get_events_filename
108
from pyulog import ULog
9+
from pyulog.px4_events import PX4Events
1110

12-
#pylint: disable=wrong-import-position
13-
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'libevents/libs/python'))
14-
from libevents_parse.parser import Parser
11+
# pylint: disable=global-statement
12+
__event_parser: PX4Events = None # Keep the parser to cache the default event definitions
1513

16-
#pylint: disable=invalid-name,global-statement
1714

18-
class FileCache:
19-
""" Very simple file cache with a maximum number of files """
20-
21-
def __init__(self, path, max_num_files=1000):
22-
self._path = path
23-
self._max_num_files = max_num_files
24-
if not os.path.isdir(path):
25-
os.mkdir(path)
26-
27-
def access(self, file_name):
28-
""" check if a file exists in the cache """
29-
return os.path.isfile(os.path.join(self._path, file_name))
30-
31-
def insert(self, file_name, data):
32-
""" insert data (bytes) """
33-
# check cache size
34-
cache_files = os.listdir(self._path)
35-
if len(cache_files) >= self._max_num_files:
36-
# select a random file (could be improved to LRU)
37-
# (if flight review runs in multi-process mode, there's a minimal chance we delete a
38-
# file that's trying to be accessed by another instance)
39-
remove_index = random.randint(0, len(cache_files) - 1)
40-
os.remove(os.path.join(self._path, cache_files[remove_index]))
41-
42-
# write
43-
with open(os.path.join(self._path, file_name), 'wb') as cache_file:
44-
cache_file.write(data)
45-
46-
@property
47-
def path(self):
48-
""" Get configured path """
49-
return self._path
50-
51-
def get_event_definitions_from_log_file(ulog: ULog):
52-
"""
53-
Get the event definitions json file from the log file.
54-
:return: path to json file or None
55-
"""
56-
if 'metadata_events' in ulog.msg_info_multiple_dict and \
57-
'metadata_events_sha256' in ulog.msg_info_dict:
58-
file_hash = ulog.msg_info_dict['metadata_events_sha256']
59-
if len(file_hash) <= 64 and file_hash.isalnum():
60-
61-
file_cache = FileCache(get_metadata_cache_path())
62-
events_metadata_filename = 'events.' + file_hash + '.json'
63-
if not file_cache.access(events_metadata_filename):
64-
# insert into the cache
65-
metadata_events_bytes = b''.join(ulog.msg_info_multiple_dict['metadata_events'][0])
66-
metadata_events_json = lzma.decompress(metadata_events_bytes)
67-
file_cache.insert(events_metadata_filename, metadata_events_json)
68-
69-
return os.path.join(file_cache.path, events_metadata_filename)
70-
71-
return None
72-
73-
74-
__event_parser = None # fallback event parser, used if the log doesn't contain the event definitions
75-
def get_event_parser(ulog: ULog):
76-
""" get event parser instance or None on error """
77-
events_profile = 'dev'
78-
79-
event_definitions_json = get_event_definitions_from_log_file(ulog)
80-
if event_definitions_json is not None:
81-
with open(event_definitions_json, 'r', encoding="utf8") as json_file:
82-
p = Parser()
83-
p.load_definitions(json.load(json_file))
84-
p.set_profile(events_profile)
85-
return p
86-
87-
# No json definitions in the log -> use global definitions
88-
global __event_parser
89-
events_json_xz = get_events_filename()
90-
# check for cached file update
91-
downloaded = download_file_maybe(events_json_xz, get_events_url())
92-
if downloaded == 2 or (downloaded == 1 and __event_parser is None):
93-
# decompress
94-
with lzma.open(events_json_xz, 'rt') as json_file:
95-
p = Parser()
96-
p.load_definitions(json.load(json_file))
97-
p.set_profile(events_profile)
98-
__event_parser = p
99-
100-
return __event_parser
101-
102-
103-
def get_logged_events(ulog):
15+
def get_logged_events(ulog: ULog) -> List[Tuple[int, str, str]]:
10416
"""
10517
Get the events as list of messages
106-
:return: list of (timestamp, time str, log level str, message) tuples
18+
:return: list of (timestamp, log level str, message) tuples
10719
"""
10820

109-
try:
110-
event_parser = get_event_parser(ulog)
111-
except Exception as e:
112-
print('Failed to get event parser: {}'.format(e))
113-
return []
21+
def get_default_json_definitions(already_has_default_parser: bool) -> Optional[Any]:
22+
""" Retrieve the default json event definitions """
11423

115-
def event_log_level_str(log_level: int):
116-
return {0: 'EMERGENCY',
117-
1: 'ALERT',
118-
2: 'CRITICAL',
119-
3: 'ERROR',
120-
4: 'WARNING',
121-
5: 'NOTICE',
122-
6: 'INFO',
123-
7: 'DEBUG',
124-
8: 'PROTOCOL',
125-
9: 'DISABLED'}.get(log_level, 'UNKNOWN')
24+
events_json_xz = get_events_filename()
25+
# Check for cached file update
26+
downloaded = download_file_maybe(events_json_xz, get_events_url())
27+
if downloaded == 2 or (downloaded == 1 and not already_has_default_parser):
28+
# Decompress
29+
with lzma.open(events_json_xz, 'rt') as json_file:
30+
return json.load(json_file)
12631

127-
def time_str(t):
128-
m1, s1 = divmod(int(t/1e6), 60)
129-
h1, m1 = divmod(m1, 60)
130-
return "{:d}:{:02d}:{:02d}".format(h1, m1, s1)
32+
return None
13133

132-
# parse events
133-
messages = []
134-
try:
135-
events = ulog.get_dataset('event')
136-
all_ids = events.data['id']
137-
for event_idx, event_id in enumerate(all_ids):
138-
log_level = (events.data['log_levels'][event_idx] >> 4) & 0xf
139-
if log_level >= 8:
140-
continue
141-
args = []
142-
i = 0
143-
while True:
144-
arg_str = 'arguments[{}]'.format(i)
145-
if arg_str not in events.data:
146-
break
147-
arg = events.data[arg_str][event_idx]
148-
args.append(arg)
149-
i += 1
150-
log_level_str = event_log_level_str(log_level)
151-
t = events.data['timestamp'][event_idx]
152-
event = None
153-
if event_parser is not None:
154-
event = event_parser.parse(event_id, bytes(args))
155-
if event is None:
156-
messages.append((t, time_str(t), log_level_str, \
157-
'[Unknown event with ID {:}]'.format(event_id)))
158-
else:
159-
# only show default group
160-
if event.group() == "default":
161-
messages.append((t, time_str(t), log_level_str, event.message()))
162-
# we could expand this a bit for events:
163-
# - show the description too
164-
# - handle url's as link (currently it's shown as text, and all tags are escaped)
165-
except (KeyError, IndexError, ValueError) as error:
166-
# no events in log
167-
pass
34+
global __event_parser
35+
if __event_parser is None:
36+
__event_parser = PX4Events()
37+
__event_parser.set_default_json_definitions_cb(get_default_json_definitions)
16838

169-
return messages
39+
return __event_parser.get_logged_events(ulog)

app/plot_app/plotted_tables.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -556,12 +556,12 @@ def time_str(t):
556556
# in addition to an event with the same message so we can ignore those
557557
if m.message[-1] == '\t':
558558
continue
559-
messages.append((m.timestamp, time_str(m.timestamp), m.log_level_str(), m.message))
559+
messages.append((m.timestamp, m.log_level_str(), m.message))
560560

561561
messages = sorted(messages, key=lambda m: m[0])
562562

563-
log_times, log_times_str, log_levels, log_messages = \
564-
zip(*messages) if len(messages) > 0 else ([],[],[],[])
563+
log_times, log_levels, log_messages = zip(*messages) if len(messages) > 0 else ([],[],[])
564+
log_times_str = [time_str(t) for t in log_times]
565565
log_data = {
566566
'times': log_times_str,
567567
'levels': log_levels,

app/requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ jinja2
33
jupyter
44
pyfftw
55
pylint
6-
pyulog>=0.9
6+
pyulog>=1.1
77
requests
88
scipy>=1.8.1
99
simplekml

0 commit comments

Comments
 (0)