Skip to content

Commit 41de853

Browse files
committed
black formatting
1 parent a98c148 commit 41de853

File tree

2 files changed

+27
-31
lines changed

2 files changed

+27
-31
lines changed

caltechdata_api/caltechdata_write.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ def send_s3(filepath, token, production=False):
1717

1818
c = session()
1919

20-
#print(s3surl)
21-
#print(headers)
20+
# print(s3surl)
21+
# print(headers)
2222
response = c.get(s3surl, headers=headers)
2323
jresp = response.json()
2424
data = jresp["data"]
@@ -56,8 +56,8 @@ def send_s3(filepath, token, production=False):
5656
if response.text:
5757
raise Exception(response.text)
5858

59-
#print(chkurl + "/" + bucket + "/" + key + "/")
60-
#print(headers)
59+
# print(chkurl + "/" + bucket + "/" + key + "/")
60+
# print(headers)
6161
response = c.get(chkurl + "/" + bucket + "/" + key + "/", headers=headers)
6262
print(response)
6363
md5 = response.json()["md5"]

get_geo.py

Lines changed: 23 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -7,51 +7,47 @@
77
description="get_metadata queries the caltechDATA (Invenio 3) API\
88
and returns DataCite-compatable metadata"
99
)
10-
parser.add_argument(
11-
"output",
12-
help="Output file name",
13-
)
10+
parser.add_argument("output", help="Output file name")
1411
parser.add_argument("-keywords", nargs="*")
1512

1613
args = parser.parse_args()
1714

18-
url = 'https://data.caltech.edu/api/records/?size=5000'
15+
url = "https://data.caltech.edu/api/records/?size=5000"
1916

20-
search = ''
17+
search = ""
2118
if args.keywords:
2219
for key in args.keywords:
23-
if search == '':
20+
if search == "":
2421
search = f'&q=subjects:"{key}"'
2522
else:
26-
search = search+f'+"{key}"'
23+
search = search + f'+"{key}"'
2724
url = url + search
2825

2926
response = requests.get(url)
3027
hits = response.json()
3128

32-
outfile = open(args.output,'w')
29+
outfile = open(args.output, "w")
3330
writer = csv.writer(outfile)
34-
writer.writerow(['wkt','name','year','doi'])
35-
36-
for h in hits['hits']['hits']:
37-
metadata = decustomize_schema(h['metadata'])
38-
if 'geoLocations' in metadata:
39-
doi = 'https://doi.org/'+metadata['identifier']['identifier']
40-
title=metadata['titles'][0]['title'].split(':')[0]
41-
geo = metadata['geoLocations']
42-
year = metadata['publicationYear']
31+
writer.writerow(["wkt", "name", "year", "doi"])
32+
33+
for h in hits["hits"]["hits"]:
34+
metadata = decustomize_schema(h["metadata"])
35+
if "geoLocations" in metadata:
36+
doi = "https://doi.org/" + metadata["identifier"]["identifier"]
37+
title = metadata["titles"][0]["title"].split(":")[0]
38+
geo = metadata["geoLocations"]
39+
year = metadata["publicationYear"]
4340
for g in geo:
44-
if 'geoLocationBox' in g:
45-
box = g['geoLocationBox']
41+
if "geoLocationBox" in g:
42+
box = g["geoLocationBox"]
4643
p1 = f"{box['eastBoundLongitude']} {box['northBoundLatitude']}"
4744
p2 = f"{box['westBoundLongitude']} {box['northBoundLatitude']}"
4845
p3 = f"{box['westBoundLongitude']} {box['southBoundLatitude']}"
4946
p4 = f"{box['eastBoundLongitude']} {box['southBoundLatitude']}"
50-
wkt = f'POLYGON (({p1}, {p2}, {p3}, {p4}, {p1}))'
51-
writer.writerow([wkt,title,year,doi])
52-
53-
if 'geoLocationPoint' in g:
54-
point = g['geoLocationPoint']
55-
wkt = f"POINT ({point['pointLongitude']} {point['pointLatitude']})"
56-
writer.writerow([wkt,title,year,doi])
47+
wkt = f"POLYGON (({p1}, {p2}, {p3}, {p4}, {p1}))"
48+
writer.writerow([wkt, title, year, doi])
5749

50+
if "geoLocationPoint" in g:
51+
point = g["geoLocationPoint"]
52+
wkt = f"POINT ({point['pointLongitude']} {point['pointLatitude']})"
53+
writer.writerow([wkt, title, year, doi])

0 commit comments

Comments
 (0)