Skip to content

Commit aa65293

Browse files
committed
Fix some nits
1 parent fb308f7 commit aa65293

File tree

4 files changed

+16
-11
lines changed

4 files changed

+16
-11
lines changed

dftimewolf/cli/recipes/gcp_turbinia.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# -*- coding: utf-8 -*-
22
"""Process a GCP persistent disk with Turbinia and send output to Timesketch.
33
4-
This processes a disk that is already in the project where Turbinia exists. If
4+
This processes a disk that is already in the project where Turbinia exists. If
55
you want to copy the disk from another project, use the gcp_turbinia_import
66
recipe.
77
"""

dftimewolf/cli/recipes/gcp_turbinia_import.py

+6-7
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,16 @@
22
"""Imports a remote GCP persistent disk and sends to Turbinia and Timesketch.
33
44
This copies a disk from a remote GCP project and sends to Turbinia for
5-
processing and then sends those results to Timesketch. It will also start an
6-
analysis VM with the attached disk. If you want to process a disk already in
5+
processing and then sends those results to Timesketch. It will also start an
6+
analysis VM with the attached disk. If you want to process a disk already in
77
the same project as Turbinia you can use the gcp_turbinia recipe.
88
"""
99

1010
from __future__ import unicode_literals
1111
from datetime import datetime
1212

13-
_short_description = ('Imports a remote GCP persistent disk and sends to '
14-
'Turbinia and Timesketch.')
13+
_short_description = ('Imports a remote GCP persistent disk, processes it with '
14+
'Turbinia and sends results to Timesketch.')
1515

1616
contents = {
1717
'name': 'gcp_turbinia_import',
@@ -31,7 +31,7 @@
3131
}, {
3232
'name': 'TurbiniaProcessor',
3333
'args': {
34-
'disk_name': None,
34+
'disk_name': None, # Taken from GoogleCloudCollector's output
3535
'project': '@analysis_project_name',
3636
'zone': '@zone',
3737
},
@@ -50,7 +50,6 @@
5050
args = [
5151
('remote_project_name',
5252
'Name of the project containing the instance / disks to copy ', None),
53-
5453
('--zone', 'The GCP zone the disk to process (and Turbinia workers) are in',
5554
None),
5655
('--incident_id', 'Incident ID (used for Timesketch description)',
@@ -63,6 +62,6 @@
6362
('--all_disks', 'Copy all disks in the designated instance. '
6463
'Overrides disk_names if specified', False),
6564
('--analysis_project_name', 'Name of the project where the analysis VM will'
66-
' be created', 'turbinia-external-test'),
65+
' be created', None),
6766
('--boot_disk_size', 'The size of the analysis VM boot disk (in GB)', 50.0),
6867
]

dftimewolf/lib/collectors/gcloud.py

+1
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,7 @@ def setup(self,
112112
zone)
113113

114114
try:
115+
# TODO: Make creating an analysis VM optional
115116
self.analysis_vm, _ = libcloudforensics.start_analysis_vm(
116117
self.analysis_project.project_id, analysis_vm_name, zone,
117118
boot_disk_size)

dftimewolf/lib/processors/turbinia.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
"""Processes cloud artifacts using a remote Turbinia instance."""
33
from __future__ import unicode_literals
44
from __future__ import absolute_import
5+
from __future__ import print_function
56

67
import os
78
import tempfile
@@ -17,7 +18,7 @@
1718

1819

1920
class TurbiniaProcessor(BaseModule):
20-
"""Process cloud disks with remote Turbinia instance.
21+
"""Process cloud disks with a remote Turbinia instance.
2122
2223
Attributes:
2324
client: A TurbiniaClient object
@@ -52,6 +53,8 @@ def setup(self, disk_name, project, zone): # pylint: disable=arguments-differ
5253
project: The project containing the disk to process
5354
zone: The zone containing the disk to process
5455
"""
56+
# TODO: Consider the case when multiple disks are provided by the previous
57+
# module or by the CLI.
5558
if self.state.input and not disk_name:
5659
_, disk = self.state.input[0]
5760
disk_name = disk.name
@@ -81,6 +84,7 @@ def setup(self, disk_name, project, zone): # pylint: disable=arguments-differ
8184
self.client = turbinia_client.TurbiniaClient()
8285
except TurbiniaException as e:
8386
self.state.add_error(e, critical=True)
87+
return
8488

8589
def cleanup(self):
8690
pass
@@ -107,9 +111,9 @@ def process(self):
107111
task_data = self.client.get_task_data(
108112
instance=self.instance, project=self.project, region=self.region,
109113
request_id=request.request_id)
110-
print self.client.format_task_status(
114+
print(self.client.format_task_status(
111115
instance=self.instance, project=self.project, region=self.region,
112-
request_id=request.request_id, all_fields=True)
116+
request_id=request.request_id, all_fields=True))
113117
except TurbiniaException as e:
114118
self.state.add_error(e, critical=True)
115119
return
@@ -138,6 +142,7 @@ def process(self):
138142

139143
# For files remote in GCS we copy each plaso file back from GCS and then add
140144
# to output paths
145+
# TODO: Externalize fetching files from GCS buckets to a different module.
141146
for path in gs_paths:
142147
local_path = None
143148
try:

0 commit comments

Comments
 (0)