55import json
66import os .path
77import requests
8+ import tempfile
89
910from pypi2nix .utils import TO_IGNORE , safe
1011
@@ -87,23 +88,7 @@ def download_file(url, filename, chunk_size=1024):
8788 fd .write (chunk )
8889
8990
90- def process_wheel (cache_dir , wheel , index = INDEX_URL ):
91- """
92- """
93-
94- url = "{}/{}/json" .format (index , wheel ['name' ])
95- r = requests .get (url )
96- r .raise_for_status () # TODO: handle this nicer
97- wheel_data = r .json ()
98-
99- if not wheel_data .get ('releases' ):
100- raise click .ClickException (
101- "Unable to find releases for packge {name}" .format (** wheel ))
102-
103- if not wheel_data ['releases' ].get (wheel ['version' ]):
104- raise click .ClickException (
105- "Unable to find releases for package {name} of version "
106- "{version}" .format (** wheel ))
91+ def find_release (cache_dir , wheel , wheel_data ):
10792
10893 release = None
10994 for possible_release in wheel_data ['releases' ][wheel ['version' ]]:
@@ -137,16 +122,68 @@ def process_wheel(cache_dir, wheel, index=INDEX_URL):
137122 "Unable to find source releases for package {name} of version "
138123 "{version}" .format (** wheel ))
139124
140- if release :
141- wheel .update (release )
125+ return release
126+
127+
128+ def process_wheel (cache_dir , wheel , sources , index = INDEX_URL ):
129+ """
130+ """
131+
132+ if wheel ['name' ] in sources :
133+ release = dict ()
134+ release ['url' ] = sources [wheel ['name' ]]
135+ release ['hash_type' ] = 'sha256'
136+
137+ r = requests .get (release ['url' ], stream = True )
138+ r .raise_for_status () # TODO: handle this nicer
139+
140+ chunk_size = 1024
141+ with tempfile .TemporaryFile () as fd :
142+ for chunk in r .iter_content (chunk_size ):
143+ fd .write (chunk )
144+ fd .seek (0 )
145+ hash = hashlib .sha256 (fd .read ())
146+
147+ release ['hash_value' ] = hash .hexdigest ()
148+
149+ else :
150+ url = "{}/{}/json" .format (index , wheel ['name' ])
151+ r = requests .get (url )
152+ r .raise_for_status () # TODO: handle this nicer
153+ wheel_data = r .json ()
154+
155+ if not wheel_data .get ('releases' ):
156+ raise click .ClickException (
157+ "Unable to find releases for packge {name}" .format (** wheel ))
158+
159+ if not wheel_data ['releases' ].get (wheel ['version' ]):
160+ raise click .ClickException (
161+ "Unable to find releases for package {name} of version "
162+ "{version}" .format (** wheel ))
163+
164+ release = find_release (cache_dir , wheel , wheel_data )
165+
166+ wheel .update (release )
142167
143168 return wheel
144169
145170
146- def main (wheels , cache_dir , index = INDEX_URL ):
171+ def main (wheels , requirements_files , cache_dir , index = INDEX_URL ):
147172 """Extract packages metadata from wheels dist-info folders.
148173 """
149174
175+ # get url's from requirements_files
176+ sources = dict ()
177+ for requirements_file in requirements_files :
178+ with open (requirements_file ) as f :
179+ lines = f .readlines ()
180+ for line in lines :
181+ line = line .strip ()
182+ if line .startswith ('http://' ) or line .startswith ('https://' ):
183+ url , egg = line .split ('#' )
184+ name = egg .split ('egg=' )[1 ]
185+ sources [name ] = url
186+
150187 metadata = []
151188 for wheel in wheels :
152189
@@ -156,6 +193,7 @@ def main(wheels, cache_dir, index=INDEX_URL):
156193 if not wheel_metadata :
157194 continue
158195
159- metadata .append (process_wheel (cache_dir , wheel_metadata , index ))
196+ metadata .append (
197+ process_wheel (cache_dir , wheel_metadata , sources , index ))
160198
161199 return metadata
0 commit comments