-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdriver.py
More file actions
118 lines (88 loc) · 3.82 KB
/
driver.py
File metadata and controls
118 lines (88 loc) · 3.82 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import pickle
import os.path
from collections import defaultdict
from http import HTTPStatus
from google.auth.transport.requests import Request
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from gardnr import constants, drivers
class GoogleSheets(drivers.Exporter):
blacklist = [constants.IMAGE]
def setup(self):
creds = None
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
self.service = build('sheets', 'v4', credentials=creds)
@staticmethod
def _build_log_groups(logs):
groups = defaultdict(list)
for log in logs:
groups[log.metric.name].append(log)
return groups
def export(self, logs):
groups = GoogleSheets._build_log_groups(logs)
groups_exported = []
try:
response = self.service.spreadsheets().get(
spreadsheetId=self.spreadsheet_id
).execute()
found_sheets = {sheet['properties']['title']
for sheet in response['sheets']}
missing_sheets = groups.keys() - found_sheets
if missing_sheets:
missing_sheet_requests = [
{'addSheet': {'properties': {'title': sheet}}}
for sheet in missing_sheets
]
self.service.spreadsheets().batchUpdate(
spreadsheetId=self.spreadsheet_id,
body={'requests': missing_sheet_requests}
).execute()
for metric_name in groups.keys():
values = []
for log in groups[metric_name]:
if type(log.value) is bytes:
log_value = log.value.decode('utf-8')
else:
log_value = log.value
values.append([log.timestamp.isoformat(), log_value])
resource = {
'values': values
}
self.service.spreadsheets().values().append(
spreadsheetId=self.spreadsheet_id,
range='{sheet}!A:A'.format(sheet=metric_name),
body=resource,
valueInputOption='USER_ENTERED'
).execute()
groups_exported.append(metric_name)
except HttpError as e:
if int(e.resp['status']) != HTTPStatus.TOO_MANY_REQUESTS:
raise
failed_logs = []
for group in groups.keys():
if group not in groups_exported:
failed_logs.extend(groups[group])
raise RateLimitError('Being rate limited, cannot continue exporting', failed_logs)
class RateLimitError(Exception):
def __init__(self, message, failed_logs):
super().__init__(message)
self.failed_logs = failed_logs