Skip to content

Commit 62edc61

Browse files
authored
Merge pull request #8 from sptramer/pep8
Run PEP8 on content
2 parents 7f92d43 + 3d3ed45 commit 62edc61

File tree

2 files changed

+56
-54
lines changed

2 files changed

+56
-54
lines changed

src/batch_python_tutorial_ffmpeg.py

Lines changed: 48 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
# unique to your accounts. These are used when constructing connection strings
2424
# for the Batch and Storage client objects.
2525

26+
2627
def query_yes_no(question, default="yes"):
2728
"""
2829
Prompts the user for yes/no input, displaying the specified question text.
@@ -92,11 +93,10 @@ def upload_file_to_container(block_blob_client, container_name, file_path):
9293
block_blob_client.create_blob_from_path(container_name,
9394
blob_name,
9495
file_path)
95-
96+
9697
# Obtain the SAS token for the container.
9798
sas_token = get_container_sas_token(block_blob_client,
98-
container_name, azureblob.BlobPermissions.READ)
99-
99+
container_name, azureblob.BlobPermissions.READ)
100100

101101
sas_url = block_blob_client.make_blob_url(container_name,
102102
blob_name,
@@ -105,6 +105,7 @@ def upload_file_to_container(block_blob_client, container_name, file_path):
105105
return batchmodels.ResourceFile(file_path=blob_name,
106106
http_url=sas_url)
107107

108+
108109
def get_container_sas_token(block_blob_client,
109110
container_name, blob_permissions):
110111
"""
@@ -130,9 +131,8 @@ def get_container_sas_token(block_blob_client,
130131
return container_sas_token
131132

132133

133-
134134
def get_container_sas_url(block_blob_client,
135-
container_name, blob_permissions):
135+
container_name, blob_permissions):
136136
"""
137137
Obtains a shared access signature URL that provides write access to the
138138
ouput container to which the tasks will upload their output.
@@ -146,10 +146,11 @@ def get_container_sas_url(block_blob_client,
146146
"""
147147
# Obtain the SAS token for the container.
148148
sas_token = get_container_sas_token(block_blob_client,
149-
container_name, azureblob.BlobPermissions.WRITE)
149+
container_name, azureblob.BlobPermissions.WRITE)
150150

151151
# Construct SAS URL for the container
152-
container_sas_url = "https://{}.blob.core.windows.net/{}?{}".format(config._STORAGE_ACCOUNT_NAME, container_name, sas_token)
152+
container_sas_url = "https://{}.blob.core.windows.net/{}?{}".format(
153+
config._STORAGE_ACCOUNT_NAME, container_name, sas_token)
153154

154155
return container_sas_url
155156

@@ -174,16 +175,16 @@ def create_pool(batch_service_client, pool_id):
174175

175176
# The start task installs ffmpeg on each node from an available repository, using
176177
# an administrator user identity.
177-
178+
178179
new_pool = batch.models.PoolAddParameter(
179180
id=pool_id,
180181
virtual_machine_configuration=batchmodels.VirtualMachineConfiguration(
181182
image_reference=batchmodels.ImageReference(
182-
publisher="Canonical",
183-
offer="UbuntuServer",
184-
sku="18.04-LTS",
185-
version="latest"
186-
),
183+
publisher="Canonical",
184+
offer="UbuntuServer",
185+
sku="18.04-LTS",
186+
version="latest"
187+
),
187188
node_agent_sku_id="batch.node.ubuntu 18.04"),
188189
vm_size=config._POOL_VM_SIZE,
189190
target_dedicated_nodes=config._DEDICATED_POOL_NODE_COUNT,
@@ -193,13 +194,14 @@ def create_pool(batch_service_client, pool_id):
193194
wait_for_success=True,
194195
user_identity=batchmodels.UserIdentity(
195196
auto_user=batchmodels.AutoUserSpecification(
196-
scope=batchmodels.AutoUserScope.pool,
197-
elevation_level=batchmodels.ElevationLevel.admin)),
198-
)
197+
scope=batchmodels.AutoUserScope.pool,
198+
elevation_level=batchmodels.ElevationLevel.admin)),
199+
)
199200
)
200201

201202
batch_service_client.pool.add(new_pool)
202203

204+
203205
def create_job(batch_service_client, job_id, pool_id):
204206
"""
205207
Creates a job with the specified ID, associated with the specified pool.
@@ -216,7 +218,8 @@ def create_job(batch_service_client, job_id, pool_id):
216218
pool_info=batch.models.PoolInformation(pool_id=pool_id))
217219

218220
batch_service_client.job.add(job)
219-
221+
222+
220223
def add_tasks(batch_service_client, job_id, input_files, output_container_sas_url):
221224
"""
222225
Adds a task for each input file in the collection to the specified job.
@@ -234,26 +237,26 @@ def add_tasks(batch_service_client, job_id, input_files, output_container_sas_ur
234237

235238
tasks = list()
236239

237-
for idx, input_file in enumerate(input_files):
238-
input_file_path=input_file.file_path
239-
output_file_path="".join((input_file_path).split('.')[:-1]) + '.mp3'
240-
command = "/bin/bash -c \"ffmpeg -i {} {} \"".format(input_file_path, output_file_path)
240+
for idx, input_file in enumerate(input_files):
241+
input_file_path = input_file.file_path
242+
output_file_path = "".join((input_file_path).split('.')[:-1]) + '.mp3'
243+
command = "/bin/bash -c \"ffmpeg -i {} {} \"".format(
244+
input_file_path, output_file_path)
241245
tasks.append(batch.models.TaskAddParameter(
242246
id='Task{}'.format(idx),
243247
command_line=command,
244248
resource_files=[input_file],
245249
output_files=[batchmodels.OutputFile(
246-
file_pattern=output_file_path,
247-
destination=batchmodels.OutputFileDestination(
248-
container=batchmodels.OutputFileBlobContainerDestination(
249-
container_url=output_container_sas_url)),
250-
upload_options=batchmodels.OutputFileUploadOptions(
251-
upload_condition=batchmodels.OutputFileUploadCondition.task_success))]
252-
)
253-
)
250+
file_pattern=output_file_path,
251+
destination=batchmodels.OutputFileDestination(
252+
container=batchmodels.OutputFileBlobContainerDestination(
253+
container_url=output_container_sas_url)),
254+
upload_options=batchmodels.OutputFileUploadOptions(
255+
upload_condition=batchmodels.OutputFileUploadCondition.task_success))]
256+
)
257+
)
254258
batch_service_client.task.add_collection(job_id, tasks)
255259

256-
257260

258261
def wait_for_tasks_to_complete(batch_service_client, job_id, timeout):
259262
"""
@@ -289,7 +292,6 @@ def wait_for_tasks_to_complete(batch_service_client, job_id, timeout):
289292
"timeout period of " + str(timeout))
290293

291294

292-
293295
if __name__ == '__main__':
294296

295297
start_time = datetime.datetime.now().replace(microsecond=0)
@@ -299,37 +301,37 @@ def wait_for_tasks_to_complete(batch_service_client, job_id, timeout):
299301
# Create the blob client, for use in obtaining references to
300302
# blob storage containers and uploading files to containers.
301303

302-
303304
blob_client = azureblob.BlockBlobService(
304305
account_name=config._STORAGE_ACCOUNT_NAME,
305306
account_key=config._STORAGE_ACCOUNT_KEY)
306307

307308
# Use the blob client to create the containers in Azure Storage if they
308309
# don't yet exist.
309-
310+
310311
input_container_name = 'input'
311312
output_container_name = 'output'
312313
blob_client.create_container(input_container_name, fail_on_exist=False)
313314
blob_client.create_container(output_container_name, fail_on_exist=False)
314315
print('Container [{}] created.'.format(input_container_name))
315316
print('Container [{}] created.'.format(output_container_name))
316317

317-
# Create a list of all MP4 files in the InputFiles directory.
318+
# Create a list of all MP4 files in the InputFiles directory.
318319
input_file_paths = []
319-
320-
for folder, subs, files in os.walk(os.path.join(sys.path[0],'InputFiles')):
320+
321+
for folder, subs, files in os.walk(os.path.join(sys.path[0], 'InputFiles')):
321322
for filename in files:
322323
if filename.endswith(".mp4"):
323-
input_file_paths.append(os.path.abspath(os.path.join(folder, filename)))
324+
input_file_paths.append(os.path.abspath(
325+
os.path.join(folder, filename)))
324326

325-
# Upload the input files. This is the collection of files that are to be processed by the tasks.
327+
# Upload the input files. This is the collection of files that are to be processed by the tasks.
326328
input_files = [
327329
upload_file_to_container(blob_client, input_container_name, file_path)
328330
for file_path in input_file_paths]
329331

330332
# Obtain a shared access signature URL that provides write access to the output
331333
# container to which the tasks will upload their output.
332-
334+
333335
output_container_sas_url = get_container_sas_url(
334336
blob_client,
335337
output_container_name,
@@ -348,30 +350,30 @@ def wait_for_tasks_to_complete(batch_service_client, job_id, timeout):
348350
# Create the pool that will contain the compute nodes that will execute the
349351
# tasks.
350352
create_pool(batch_client, config._POOL_ID)
351-
353+
352354
# Create the job that will run the tasks.
353355
create_job(batch_client, config._JOB_ID, config._POOL_ID)
354356

355-
# Add the tasks to the job. Pass the input files and a SAS URL
357+
# Add the tasks to the job. Pass the input files and a SAS URL
356358
# to the storage container for output files.
357-
add_tasks(batch_client, config._JOB_ID, input_files, output_container_sas_url)
359+
add_tasks(batch_client, config._JOB_ID,
360+
input_files, output_container_sas_url)
358361

359362
# Pause execution until tasks reach Completed state.
360363
wait_for_tasks_to_complete(batch_client,
361364
config._JOB_ID,
362365
datetime.timedelta(minutes=30))
363366

364367
print(" Success! All tasks reached the 'Completed' state within the "
365-
"specified timeout period.")
368+
"specified timeout period.")
366369

367370
except batchmodels.BatchErrorException as err:
368-
print_batch_exception(err)
369-
raise
371+
print_batch_exception(err)
372+
raise
370373

371374
# Delete input container in storage
372375
print('Deleting container [{}]...'.format(input_container_name))
373376
blob_client.delete_container(input_container_name)
374-
375377

376378
# Print out some timing info
377379
end_time = datetime.datetime.now().replace(microsecond=0)

src/config.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
1-
#-------------------------------------------------------------------------
2-
#
3-
# THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
4-
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES
1+
# -------------------------------------------------------------------------
2+
#
3+
# THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
4+
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES
55
# OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
6-
#----------------------------------------------------------------------------------
6+
# ----------------------------------------------------------------------------------
77
# The example companies, organizations, products, domain names,
88
# e-mail addresses, logos, people, places, and events depicted
99
# herein are fictitious. No association with any real company,
1010
# organization, product, domain name, email address, logo, person,
1111
# places, or events is intended or should be inferred.
12-
#--------------------------------------------------------------------------
12+
# --------------------------------------------------------------------------
1313

1414
# Global constant variables (Azure Storage account/Batch details)
1515

@@ -19,7 +19,7 @@
1919
# unique to your accounts. These are used when constructing connection strings
2020
# for the Batch and Storage client objects.
2121

22-
_BATCH_ACCOUNT_NAME =''
22+
_BATCH_ACCOUNT_NAME = ''
2323
_BATCH_ACCOUNT_KEY = ''
2424
_BATCH_ACCOUNT_URL = ''
2525
_STORAGE_ACCOUNT_NAME = ''
@@ -28,4 +28,4 @@
2828
_DEDICATED_POOL_NODE_COUNT = 0
2929
_LOW_PRIORITY_POOL_NODE_COUNT = 5
3030
_POOL_VM_SIZE = 'STANDARD_A1_v2'
31-
_JOB_ID = 'LinuxFfmpegJob'
31+
_JOB_ID = 'LinuxFfmpegJob'

0 commit comments

Comments
 (0)