Skip to content
This repository was archived by the owner on Oct 7, 2022. It is now read-only.

Commit f9d0084

Browse files
committed
support url(s) in requirements.txt files
1 parent c828fb9 commit f9d0084

File tree

3 files changed

+62
-22
lines changed

3 files changed

+62
-22
lines changed

src/pypi2nix/cli.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def main(nix_path,
158158

159159
click.echo('Extracting metadata ...')
160160

161-
packages_metadata = pypi2nix.stage2.main(wheels, cache_dir)
161+
packages_metadata = pypi2nix.stage2.main(wheels, requirements_files, cache_dir)
162162

163163
click.echo('Generating Nix expressions ...')
164164

src/pypi2nix/pip.nix

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ in pkgs.stdenv.mkDerivation rec {
3636
unzip -qo $file
3737
done
3838
39+
PYTHONPATH=${wheelhouse_dir}:$PYTHONPATH pip freeze > ${project_tmp_dir}/requirements.txt
40+
3941
cp -Rf ${project_tmp_dir}/* ${cache_dir}
4042
'';
4143
}

src/pypi2nix/stage2.py

Lines changed: 59 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import json
66
import os.path
77
import requests
8+
import tempfile
89

910
from pypi2nix.utils import TO_IGNORE, safe
1011

@@ -87,23 +88,7 @@ def download_file(url, filename, chunk_size=1024):
8788
fd.write(chunk)
8889

8990

90-
def process_wheel(cache_dir, wheel, index=INDEX_URL):
91-
"""
92-
"""
93-
94-
url = "{}/{}/json".format(index, wheel['name'])
95-
r = requests.get(url)
96-
r.raise_for_status() # TODO: handle this nicer
97-
wheel_data = r.json()
98-
99-
if not wheel_data.get('releases'):
100-
raise click.ClickException(
101-
"Unable to find releases for packge {name}".format(**wheel))
102-
103-
if not wheel_data['releases'].get(wheel['version']):
104-
raise click.ClickException(
105-
"Unable to find releases for package {name} of version "
106-
"{version}".format(**wheel))
91+
def find_release(cache_dir, wheel, wheel_data):
10792

10893
release = None
10994
for possible_release in wheel_data['releases'][wheel['version']]:
@@ -137,16 +122,68 @@ def process_wheel(cache_dir, wheel, index=INDEX_URL):
137122
"Unable to find source releases for package {name} of version "
138123
"{version}".format(**wheel))
139124

140-
if release:
141-
wheel.update(release)
125+
return release
126+
127+
128+
def process_wheel(cache_dir, wheel, sources, index=INDEX_URL):
129+
"""
130+
"""
131+
132+
if wheel['name'] in sources:
133+
release = dict()
134+
release['url'] = sources[wheel['name']]
135+
release['hash_type'] = 'sha256'
136+
137+
r = requests.get(release['url'], stream=True)
138+
r.raise_for_status() # TODO: handle this nicer
139+
140+
chunk_size=1024
141+
with tempfile.TemporaryFile() as fd:
142+
for chunk in r.iter_content(chunk_size):
143+
fd.write(chunk)
144+
fd.seek(0)
145+
hash = hashlib.sha256(fd.read())
146+
147+
release['hash_value'] = hash.hexdigest()
148+
149+
else:
150+
url = "{}/{}/json".format(index, wheel['name'])
151+
r = requests.get(url)
152+
r.raise_for_status() # TODO: handle this nicer
153+
wheel_data = r.json()
154+
155+
if not wheel_data.get('releases'):
156+
raise click.ClickException(
157+
"Unable to find releases for packge {name}".format(**wheel))
158+
159+
if not wheel_data['releases'].get(wheel['version']):
160+
raise click.ClickException(
161+
"Unable to find releases for package {name} of version "
162+
"{version}".format(**wheel))
163+
164+
release = find_release(cache_dir, wheel, wheel_data)
165+
166+
wheel.update(release)
142167

143168
return wheel
144169

145170

146-
def main(wheels, cache_dir, index=INDEX_URL):
171+
def main(wheels, requirements_files, cache_dir, index=INDEX_URL):
147172
"""Extract packages metadata from wheels dist-info folders.
148173
"""
149174

175+
# get url's from requirements_files
176+
sources = dict()
177+
for requirements_file in requirements_files:
178+
with open(requirements_file) as f:
179+
lines = f.readlines()
180+
for line in lines:
181+
line = line.strip()
182+
if line.startswith('http://') or line.startswith('https://'):
183+
url, egg = line.split('#')
184+
name = egg.split('egg=')[1]
185+
sources[name] = url
186+
150187
metadata = []
151188
for wheel in wheels:
152189

@@ -156,6 +193,7 @@ def main(wheels, cache_dir, index=INDEX_URL):
156193
if not wheel_metadata:
157194
continue
158195

159-
metadata.append(process_wheel(cache_dir, wheel_metadata, index))
196+
metadata.append(
197+
process_wheel(cache_dir, wheel_metadata, sources, index))
160198

161199
return metadata

0 commit comments

Comments
 (0)