|
1 | 1 | """ Event parsing """
|
2 | 2 | import json
|
3 | 3 | import lzma
|
4 |
| -import os |
5 |
| -import sys |
6 |
| -import random |
| 4 | +from typing import Optional, Any, List, Tuple |
7 | 5 |
|
8 | 6 | from helper import download_file_maybe
|
9 |
| -from config import get_events_url, get_events_filename, get_metadata_cache_path |
| 7 | +from config import get_events_url, get_events_filename |
10 | 8 | from pyulog import ULog
|
| 9 | +from pyulog.px4_events import PX4Events |
11 | 10 |
|
12 |
| -#pylint: disable=wrong-import-position |
13 |
| -sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'libevents/libs/python')) |
14 |
| -from libevents_parse.parser import Parser |
| 11 | +# pylint: disable=global-statement |
| 12 | +__event_parser: PX4Events = None # Keep the parser to cache the default event definitions |
15 | 13 |
|
16 |
| -#pylint: disable=invalid-name,global-statement |
17 | 14 |
|
18 |
| -class FileCache: |
19 |
| - """ Very simple file cache with a maximum number of files """ |
20 |
| - |
21 |
| - def __init__(self, path, max_num_files=1000): |
22 |
| - self._path = path |
23 |
| - self._max_num_files = max_num_files |
24 |
| - if not os.path.isdir(path): |
25 |
| - os.mkdir(path) |
26 |
| - |
27 |
| - def access(self, file_name): |
28 |
| - """ check if a file exists in the cache """ |
29 |
| - return os.path.isfile(os.path.join(self._path, file_name)) |
30 |
| - |
31 |
| - def insert(self, file_name, data): |
32 |
| - """ insert data (bytes) """ |
33 |
| - # check cache size |
34 |
| - cache_files = os.listdir(self._path) |
35 |
| - if len(cache_files) >= self._max_num_files: |
36 |
| - # select a random file (could be improved to LRU) |
37 |
| - # (if flight review runs in multi-process mode, there's a minimal chance we delete a |
38 |
| - # file that's trying to be accessed by another instance) |
39 |
| - remove_index = random.randint(0, len(cache_files) - 1) |
40 |
| - os.remove(os.path.join(self._path, cache_files[remove_index])) |
41 |
| - |
42 |
| - # write |
43 |
| - with open(os.path.join(self._path, file_name), 'wb') as cache_file: |
44 |
| - cache_file.write(data) |
45 |
| - |
46 |
| - @property |
47 |
| - def path(self): |
48 |
| - """ Get configured path """ |
49 |
| - return self._path |
50 |
| - |
51 |
| -def get_event_definitions_from_log_file(ulog: ULog): |
52 |
| - """ |
53 |
| - Get the event definitions json file from the log file. |
54 |
| - :return: path to json file or None |
55 |
| - """ |
56 |
| - if 'metadata_events' in ulog.msg_info_multiple_dict and \ |
57 |
| - 'metadata_events_sha256' in ulog.msg_info_dict: |
58 |
| - file_hash = ulog.msg_info_dict['metadata_events_sha256'] |
59 |
| - if len(file_hash) <= 64 and file_hash.isalnum(): |
60 |
| - |
61 |
| - file_cache = FileCache(get_metadata_cache_path()) |
62 |
| - events_metadata_filename = 'events.' + file_hash + '.json' |
63 |
| - if not file_cache.access(events_metadata_filename): |
64 |
| - # insert into the cache |
65 |
| - metadata_events_bytes = b''.join(ulog.msg_info_multiple_dict['metadata_events'][0]) |
66 |
| - metadata_events_json = lzma.decompress(metadata_events_bytes) |
67 |
| - file_cache.insert(events_metadata_filename, metadata_events_json) |
68 |
| - |
69 |
| - return os.path.join(file_cache.path, events_metadata_filename) |
70 |
| - |
71 |
| - return None |
72 |
| - |
73 |
| - |
74 |
| -__event_parser = None # fallback event parser, used if the log doesn't contain the event definitions |
75 |
| -def get_event_parser(ulog: ULog): |
76 |
| - """ get event parser instance or None on error """ |
77 |
| - events_profile = 'dev' |
78 |
| - |
79 |
| - event_definitions_json = get_event_definitions_from_log_file(ulog) |
80 |
| - if event_definitions_json is not None: |
81 |
| - with open(event_definitions_json, 'r', encoding="utf8") as json_file: |
82 |
| - p = Parser() |
83 |
| - p.load_definitions(json.load(json_file)) |
84 |
| - p.set_profile(events_profile) |
85 |
| - return p |
86 |
| - |
87 |
| - # No json definitions in the log -> use global definitions |
88 |
| - global __event_parser |
89 |
| - events_json_xz = get_events_filename() |
90 |
| - # check for cached file update |
91 |
| - downloaded = download_file_maybe(events_json_xz, get_events_url()) |
92 |
| - if downloaded == 2 or (downloaded == 1 and __event_parser is None): |
93 |
| - # decompress |
94 |
| - with lzma.open(events_json_xz, 'rt') as json_file: |
95 |
| - p = Parser() |
96 |
| - p.load_definitions(json.load(json_file)) |
97 |
| - p.set_profile(events_profile) |
98 |
| - __event_parser = p |
99 |
| - |
100 |
| - return __event_parser |
101 |
| - |
102 |
| - |
103 |
| -def get_logged_events(ulog): |
| 15 | +def get_logged_events(ulog: ULog) -> List[Tuple[int, str, str]]: |
104 | 16 | """
|
105 | 17 | Get the events as list of messages
|
106 |
| - :return: list of (timestamp, time str, log level str, message) tuples |
| 18 | + :return: list of (timestamp, log level str, message) tuples |
107 | 19 | """
|
108 | 20 |
|
109 |
| - try: |
110 |
| - event_parser = get_event_parser(ulog) |
111 |
| - except Exception as e: |
112 |
| - print('Failed to get event parser: {}'.format(e)) |
113 |
| - return [] |
| 21 | + def get_default_json_definitions(already_has_default_parser: bool) -> Optional[Any]: |
| 22 | + """ Retrieve the default json event definitions """ |
114 | 23 |
|
115 |
| - def event_log_level_str(log_level: int): |
116 |
| - return {0: 'EMERGENCY', |
117 |
| - 1: 'ALERT', |
118 |
| - 2: 'CRITICAL', |
119 |
| - 3: 'ERROR', |
120 |
| - 4: 'WARNING', |
121 |
| - 5: 'NOTICE', |
122 |
| - 6: 'INFO', |
123 |
| - 7: 'DEBUG', |
124 |
| - 8: 'PROTOCOL', |
125 |
| - 9: 'DISABLED'}.get(log_level, 'UNKNOWN') |
| 24 | + events_json_xz = get_events_filename() |
| 25 | + # Check for cached file update |
| 26 | + downloaded = download_file_maybe(events_json_xz, get_events_url()) |
| 27 | + if downloaded == 2 or (downloaded == 1 and not already_has_default_parser): |
| 28 | + # Decompress |
| 29 | + with lzma.open(events_json_xz, 'rt') as json_file: |
| 30 | + return json.load(json_file) |
126 | 31 |
|
127 |
| - def time_str(t): |
128 |
| - m1, s1 = divmod(int(t/1e6), 60) |
129 |
| - h1, m1 = divmod(m1, 60) |
130 |
| - return "{:d}:{:02d}:{:02d}".format(h1, m1, s1) |
| 32 | + return None |
131 | 33 |
|
132 |
| - # parse events |
133 |
| - messages = [] |
134 |
| - try: |
135 |
| - events = ulog.get_dataset('event') |
136 |
| - all_ids = events.data['id'] |
137 |
| - for event_idx, event_id in enumerate(all_ids): |
138 |
| - log_level = (events.data['log_levels'][event_idx] >> 4) & 0xf |
139 |
| - if log_level >= 8: |
140 |
| - continue |
141 |
| - args = [] |
142 |
| - i = 0 |
143 |
| - while True: |
144 |
| - arg_str = 'arguments[{}]'.format(i) |
145 |
| - if arg_str not in events.data: |
146 |
| - break |
147 |
| - arg = events.data[arg_str][event_idx] |
148 |
| - args.append(arg) |
149 |
| - i += 1 |
150 |
| - log_level_str = event_log_level_str(log_level) |
151 |
| - t = events.data['timestamp'][event_idx] |
152 |
| - event = None |
153 |
| - if event_parser is not None: |
154 |
| - event = event_parser.parse(event_id, bytes(args)) |
155 |
| - if event is None: |
156 |
| - messages.append((t, time_str(t), log_level_str, \ |
157 |
| - '[Unknown event with ID {:}]'.format(event_id))) |
158 |
| - else: |
159 |
| - # only show default group |
160 |
| - if event.group() == "default": |
161 |
| - messages.append((t, time_str(t), log_level_str, event.message())) |
162 |
| - # we could expand this a bit for events: |
163 |
| - # - show the description too |
164 |
| - # - handle url's as link (currently it's shown as text, and all tags are escaped) |
165 |
| - except (KeyError, IndexError, ValueError) as error: |
166 |
| - # no events in log |
167 |
| - pass |
| 34 | + global __event_parser |
| 35 | + if __event_parser is None: |
| 36 | + __event_parser = PX4Events() |
| 37 | + __event_parser.set_default_json_definitions_cb(get_default_json_definitions) |
168 | 38 |
|
169 |
| - return messages |
| 39 | + return __event_parser.get_logged_events(ulog) |
0 commit comments