-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathentrychecksumgenerator.py
153 lines (126 loc) · 6.58 KB
/
entrychecksumgenerator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
"""
Copyright (C) 2017- enen92
This file is part of screensaver.atv4 - https://github.com/enen92/screensaver.atv4
SPDX-License-Identifier: GPL-2.0-only
See LICENSE for more information.
Note: This is a standalone script to update the offline video entries and
their checksums. Extra modes allow for Apple JSON download and simple
printing of the different locations available in the JSON.
"""
import hashlib
import json
import os
import sys
import tarfile
from urllib import request
apple_local_feed = os.path.join("resources", "entries.json")
tmp_folder = "tmpvideos"
apple_resources_tar = "https://sylvan.apple.com/Aerials/resources-15.tar"
local_tar = "resources.tar"
# Fetch the TAR file containing the latest entries.json and overwrite the local copy
def get_latest_entries_from_apple():
print("Downloading the Apple Aerials resources.tar to disk")
request.urlretrieve(apple_resources_tar, local_tar)
# https://www.tutorialspoint.com/How-are-files-extracted-from-a-tar-file-using-Python
apple_tar = tarfile.open(local_tar)
print("Extracting entries.json from resources.tar and placing in ./resources")
apple_tar.extract("entries.json", "resources")
apple_tar.close()
os.remove(local_tar)
def generate_entries_and_checksums():
with open(apple_local_feed) as feed_file:
print("Starting checksum generator...")
# Create the local directory we'll temporarily store videos for checksumming
if not os.path.exists(tmp_folder):
os.mkdir(tmp_folder)
# Dictionary to store the filenames and checksum for each
checksums = {}
# Dictionary to store the quality levels and the size in megabytes for each
# Within each scene, there may be: H264/HEVC, 1080p/4K, SDR/HDR
quality_total_size_megabytes = {"url-1080-H264": 0,
"url-1080-SDR": 0,
"url-1080-HDR": 0,
"url-4K-SDR": 0,
"url-4K-HDR": 0}
quality_total_video_count = {"url-1080-H264": 0,
"url-1080-SDR": 0,
"url-1080-HDR": 0,
"url-4K-SDR": 0,
"url-4K-HDR": 0}
# Define the locations as a set so we get deduping
locations = set()
top_level = json.load(feed_file)
# Top-level JSON has assets array, initialAssetCount, version. Inspect each block in assets
for block in top_level["assets"]:
# Each block contains a location/scene whose name is stored in accessibilityLabel. These may recur
current_scene = block["accessibilityLabel"]
print("Processing videos for scene:", current_scene)
locations.add(current_scene)
# https://realpython.com/iterate-through-dictionary-python/#iterating-through-keys
for video_version in quality_total_size_megabytes.keys():
try:
# Try to look up the URL, but catch the KeyError and continue if it wasn't available
asset_url = block[video_version]
# If the URL contains HTTPS, we need revert to HTTP to avoid bad SSL cert
# NOTE: Old Apple URLs were HTTP, new URLs are HTTPS with a bad cert
if "https" in asset_url:
asset_url = asset_url.replace("https://", "http://")
print("Downloading video:", asset_url)
# Construct the name and path of the local file
local_file_name = asset_url.split("/")[-1]
local_file_path = os.path.join(tmp_folder, local_file_name)
# Download the file to local storage
request.urlretrieve(asset_url, local_file_path)
# Get the size of the file in bytes and add it to an overall size counter
quality_total_size_megabytes[video_version] += os.path.getsize(local_file_path) / 1000 / 1000
# We found a valid file for the given version, update the count
quality_total_video_count[video_version] += 1
# Try to open the file
with open(local_file_path, "rb") as f:
# Compute the checksum
checksum = hashlib.md5(f.read()).hexdigest()
# Add the checksum to the dict of checksums we're keeping
checksums[local_file_name] = checksum
# Delete the local copy of the file
os.remove(local_file_path)
print("File processed. Checksum:", checksum)
except KeyError:
print("Can't find URL for asset type:", video_version)
# Now that we've processed all videos, delete the temp directory
os.rmdir(tmp_folder)
# Then write the checksums to file
with open(os.path.join("resources", "checksums.json"), "w") as f:
print("Writing checksums to disk")
f.write(json.dumps(checksums))
print("Total Megabytes of all video files, per quality:")
print(quality_total_size_megabytes)
print("Total count of all video files, per quality:")
print(quality_total_video_count)
print("Locations seen:")
print(locations)
print("Stopping checksum generator...")
def get_locations():
with open(apple_local_feed) as feed_file:
# Define the locations as a set so we get deduping
locations = set()
top_level = json.load(feed_file)
# Top-level JSON has assets array, initialAssetCount, version. Inspect each block in assets
for block in top_level["assets"]:
# Each block contains a location/scene whose name is stored in accessibilityLabel. These may recur
locations.add(block["accessibilityLabel"])
# Now that all locations are added, sort in place and print
print("Locations seen:")
print(sorted(locations))
if __name__ == '__main__':
if len(sys.argv) > 1:
if sys.argv[1] == "1":
generate_entries_and_checksums()
elif sys.argv[1] == "2":
get_latest_entries_from_apple()
elif sys.argv[1] == "3":
get_locations()
else:
print("Please specify option:\n "
"1) Update checksums based on existing entries.json \n "
"2) Update entries.json from Apple \n "
"3) Print all locations in entries.json")