Mercurial > hg > beaglert
comparison scripts/hvresources/uploader.py @ 549:ff0e9e827dcd prerelease
Updated uploader.py and build_pd_heavy for a more graceful failure
author | Giulio Moro <giuliomoro@yahoo.it> |
---|---|
date | Fri, 24 Jun 2016 12:42:48 +0100 |
parents | 67a746eea29e |
children |
comparison
equal
deleted
inserted
replaced
548:3980b707634d | 549:ff0e9e827dcd |
---|---|
3 import argparse | 3 import argparse |
4 import datetime | 4 import datetime |
5 import getpass | 5 import getpass |
6 import json | 6 import json |
7 import os | 7 import os |
8 import requests | 8 import requests # http://docs.python-requests.org/en/master/api/#exceptions |
9 import shutil | 9 import shutil |
10 import stat | 10 import stat |
11 import sys | |
11 import tempfile | 12 import tempfile |
12 import time | 13 import time |
13 import urlparse | 14 import urlparse |
14 import zipfile | 15 import zipfile |
15 | 16 |
23 red = "\033[91m" | 24 red = "\033[91m" |
24 bold = "\033[1m" | 25 bold = "\033[1m" |
25 underline = "\033[4m" | 26 underline = "\033[4m" |
26 end = "\033[0m" | 27 end = "\033[0m" |
27 | 28 |
29 class ErrorCodes(object): | |
30 # NOTE(mhroth): this class could inherit from Enum, but we choose not to | |
31 # as to not require an additional dependency | |
32 # http://www.tldp.org/LDP/abs/html/exitcodes.html | |
33 # http://stackoverflow.com/questions/1101957/are-there-any-standard-exit-status-codes-in-linux | |
34 CODE_OK = 0 # success! | |
35 CODE_MAIN_NOT_FOUND = 3 # _main.pd not found | |
36 CODE_HEAVY_COMPILE_ERRORS = 4 # heavy returned compiler errors | |
37 CODE_UPLOAD_ASSET_TOO_LARGE = 5 # the size of the uploadable asset is too large | |
38 CODE_RELEASE_NOT_AVAILABLE = 6 # the requested release is not available | |
39 CODE_CONNECTION_ERROR = 7 # HTTPS connection could not be made to the server | |
40 CODE_CONNECTION_TIMEOUT = 8 # HTTPS connection has timed out | |
41 CODE_CONNECTION_400_500 = 9 # a 400 or 500 error has occured | |
42 CODE_EXCEPTION = 125 # a generic execption has occurred | |
43 | |
44 class UploaderException(Exception): | |
45 def __init__(self, code, message=None, e=None): | |
46 self.code = code | |
47 self.message = message | |
48 self.e = e | |
49 | |
28 # the maxmimum file upload size of 1MB | 50 # the maxmimum file upload size of 1MB |
29 __HV_MAX_UPLOAD_SIZE = 1024*1024 | 51 __HV_MAX_UPLOAD_SIZE = 1 * 1024*1024 |
30 | 52 |
31 def __zip_dir(in_dir, zip_path, file_filter=None): | 53 def __zip_dir(in_dir, zip_path, file_filter=None): |
32 """Recursively zip an entire directory with an optional file filter | 54 """Recursively zip an entire directory with an optional file filter |
33 """ | 55 """ |
34 zf = zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) | 56 zf = zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) |
60 nargs="+", | 82 nargs="+", |
61 default=["c"], | 83 default=["c"], |
62 help="List of generator outputs. Currently supported generators are " | 84 help="List of generator outputs. Currently supported generators are " |
63 "'c', 'js', 'pdext', 'pdext-osx', 'unity', 'unity-osx', " | 85 "'c', 'js', 'pdext', 'pdext-osx', 'unity', 'unity-osx', " |
64 "'unity-win-x86', 'unity-win-x86_64', 'wwise', 'wwise-win-x86_64', " | 86 "'unity-win-x86', 'unity-win-x86_64', 'wwise', 'wwise-win-x86_64', " |
65 "'vst2' ,'vst2-osx', and 'vst2-win-x86_64'.") | 87 "'vst2' ,'vst2-osx', 'vst2-win-x86_64', and 'vst2-win-x86'.") |
66 parser.add_argument( | 88 parser.add_argument( |
67 "-b", | 89 "-b", |
68 help="All files will be placed in the output directory, placed in their own subdirectory corresponding to the generator name.", | 90 help="All files will be placed in the output directory, placed in their own subdirectory corresponding to the generator name.", |
69 action="count") | 91 action="count") |
70 parser.add_argument( | 92 parser.add_argument( |
100 "-v", "--verbose", | 122 "-v", "--verbose", |
101 help="Show debugging information.", | 123 help="Show debugging information.", |
102 action="count") | 124 action="count") |
103 parser.add_argument( | 125 parser.add_argument( |
104 "-t", "--token", | 126 "-t", "--token", |
105 help="Use the specified token.", | 127 help="Use the specified token.") |
106 ) | |
107 args = parser.parse_args() | 128 args = parser.parse_args() |
108 | 129 |
109 domain = args.domain or "https://enzienaudio.com" | 130 try: |
110 | 131 # set default values |
111 post_data = {} | 132 domain = args.domain or "https://enzienaudio.com" |
112 | 133 exit_code = ErrorCodes.CODE_OK |
113 # token should be stored in ~/.heavy/token | 134 temp_dir = None |
114 token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token")) | 135 post_data = {} |
115 | 136 |
116 if args.token is not None: | 137 # token should be stored in ~/.heavy/token |
117 # check if token has been passed as a command line arg... | 138 token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token")) |
118 post_data["credentials"] = {"token": args.token} | 139 |
119 elif os.path.exists(token_path) and not args.z: | 140 if args.token is not None: |
120 # ...or if it is stored in the user's home directory | 141 # check if token has been passed as a command line arg... |
121 with open(token_path, "r") as f: | 142 post_data["credentials"] = {"token": args.token} |
122 post_data["credentials"] = {"token": f.read()} | 143 elif os.path.exists(token_path) and not args.z: |
123 else: | 144 # ...or if it is stored in the user's home directory |
124 # otherwise, get the username and password | 145 with open(token_path, "r") as f: |
125 post_data["credentials"] = { | 146 post_data["credentials"] = {"token": f.read()} |
126 "username": raw_input("Enter username: "), | 147 else: |
127 "password": getpass.getpass("Enter password: ") | 148 # otherwise, get the username and password |
128 } | 149 post_data["credentials"] = { |
129 | 150 "username": raw_input("Enter username: "), |
130 tick = time.time() | 151 "password": getpass.getpass("Enter password: ") |
131 | 152 } |
132 # parse the optional release argument | 153 |
133 if args.release: | 154 tick = time.time() |
134 try: | 155 |
156 # parse the optional release argument | |
157 if args.release: | |
135 # check the validity of the current release | 158 # check the validity of the current release |
136 releases_json = requests.get(urlparse.urljoin(domain, "/a/releases")).json() | 159 releases_json = requests.get(urlparse.urljoin(domain, "/a/releases")).json() |
137 if args.release in releases_json: | 160 if args.release in releases_json: |
138 today = datetime.datetime.now() | 161 today = datetime.datetime.now() |
139 valid_until = datetime.datetime.strptime(releases_json[args.release]["validUntil"], "%Y-%m-%d") | 162 valid_until = datetime.datetime.strptime(releases_json[args.release]["validUntil"], "%Y-%m-%d") |
153 args.release) | 176 args.release) |
154 for k,v in releases_json.items(): | 177 for k,v in releases_json.items(): |
155 print "* {0} ({1})".format( | 178 print "* {0} ({1})".format( |
156 k, | 179 k, |
157 v["releaseDate"]) | 180 v["releaseDate"]) |
158 return | 181 raise UploaderException(ErrorCodes.CODE_RELEASE_NOT_AVAILABLE) |
159 except: | 182 |
160 pass # if the /a/releases request fails for whatever reason, just move on | 183 post_data["release"] = args.release |
161 | 184 |
162 post_data["release"] = args.release | 185 # make a temporary directory |
163 | 186 temp_dir = tempfile.mkdtemp(prefix="lroyal-") |
164 # make a temporary directory | 187 |
165 temp_dir = tempfile.mkdtemp(prefix="lroyal-") | 188 # zip up the pd directory into the temporary directory |
166 | |
167 # zip up the pd directory into the temporary directory | |
168 try: | |
169 if not os.path.exists(os.path.join(args.input_dir, "_main.pd")): | 189 if not os.path.exists(os.path.join(args.input_dir, "_main.pd")): |
170 raise Exception("Root Pd directory does not contain a file named _main.pd.") | 190 raise UploaderException( |
191 ErrorCodes.CODE_MAIN_NOT_FOUND, | |
192 "Root Pd directory does not contain a file named _main.pd.") | |
171 zip_path = __zip_dir( | 193 zip_path = __zip_dir( |
172 args.input_dir, | 194 args.input_dir, |
173 os.path.join(temp_dir, "archive.zip"), | 195 os.path.join(temp_dir, "archive.zip"), |
174 file_filter={"pd"}) | 196 file_filter={"pd"}) |
175 if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE: | 197 if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE: |
176 raise Exception("The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format( | 198 raise UploaderException( |
177 os.stat(zip_path).st_size)) | 199 ErrorCodes.CODE_UPLOAD_ASSET_TOO_LARGE, |
200 "The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format( | |
201 os.stat(zip_path).st_size)) | |
202 | |
203 post_data["name"] = args.name | |
204 | |
205 # the outputs to generate (always include c) | |
206 __SUPPORTED_GENERATOR_SET = { | |
207 "c", "js", | |
208 "pdext", "pdext-osx", | |
209 "unity", "unity-osx", "unity-win-x86", "unity-win-x86_64", | |
210 "wwise", "wwise-win-x86_64", | |
211 "vst2", "vst2-osx", "vst2-win-x86_64", | |
212 } | |
213 post_data["gen"] = list(({"c"} | {s.lower() for s in set(args.gen)}) & __SUPPORTED_GENERATOR_SET) | |
214 | |
215 # upload the job, get the response back | |
216 # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary, | |
217 # but we want to send a json encoded deep dictionary. So we do a bit of a hack. | |
218 r = requests.post( | |
219 urlparse.urljoin(domain, "/a/heavy"), | |
220 data={"json":json.dumps(post_data)}, | |
221 files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")}, | |
222 verify=False if args.noverify else True) | |
223 r.raise_for_status() | |
224 | |
225 """ | |
226 { | |
227 "data": { | |
228 "compileTime": 0.05078411102294922, | |
229 "id": "mhroth/asdf/Edp2G", | |
230 "slug": "Edp2G", | |
231 "index": 3, | |
232 "links": { | |
233 "files": { | |
234 "linkage": [ | |
235 { | |
236 "id": "mhroth/asdf/Edp2G/c", | |
237 "type": "file" | |
238 } | |
239 ], | |
240 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files" | |
241 }, | |
242 "project": { | |
243 "linkage": { | |
244 "id": "mhroth/asdf", | |
245 "type": "project" | |
246 }, | |
247 "self": "https://enzienaudio.com/h/mhroth/asdf" | |
248 }, | |
249 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G", | |
250 "user": { | |
251 "linkage": { | |
252 "id": "mhroth", | |
253 "type": "user" | |
254 }, | |
255 "self": "https://enzienaudio.com/h/mhroth" | |
256 } | |
257 }, | |
258 "type": "job" | |
259 }, | |
260 "included": [ | |
261 { | |
262 "filename": "file.c.zip", | |
263 "generator": "c", | |
264 "id": "mhroth/asdf/Edp2G/c", | |
265 "links": { | |
266 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip" | |
267 }, | |
268 "mime": "application/zip", | |
269 "type": "file" | |
270 } | |
271 ], | |
272 "warnings": [ | |
273 {"details": "blah blah blah"} | |
274 ], | |
275 "meta": { | |
276 "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz" | |
277 } | |
278 } | |
279 """ | |
280 # decode the JSON API response | |
281 reply_json = r.json() | |
282 if args.verbose: | |
283 print json.dumps( | |
284 reply_json, | |
285 sort_keys=True, | |
286 indent=2, | |
287 separators=(",", ": ")) | |
288 | |
289 # update the api token, if present | |
290 if "token" in reply_json.get("meta",{}) and not args.x: | |
291 if args.token is not None: | |
292 if reply_json["meta"]["token"] != args.token: | |
293 print "WARNING: Token returned by API is not the same as the " | |
294 "token supplied at the command line. (old = %s, new = %s)".format( | |
295 args.token, | |
296 reply_json["meta"]["token"]) | |
297 else: | |
298 if not os.path.exists(os.path.dirname(token_path)): | |
299 # ensure that the .heavy directory exists | |
300 os.makedirs(os.path.dirname(token_path)) | |
301 with open(token_path, "w") as f: | |
302 f.write(reply_json["meta"]["token"]) | |
303 # force rw------- permissions on the file | |
304 os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) | |
305 | |
306 # print any warnings | |
307 for i,x in enumerate(reply_json.get("warnings",[])): | |
308 print "{3}) {0}Warning:{1} {2}".format( | |
309 Colours.yellow, Colours.end, x["detail"], i+1) | |
310 | |
311 # check for errors | |
312 if len(reply_json.get("errors",[])) > 0: | |
313 for i,x in enumerate(reply_json["errors"]): | |
314 print "{3}) {0}Error:{1} {2}".format( | |
315 Colours.red, Colours.end, x["detail"], i+1) | |
316 raise UploaderException(ErrorCodes.CODE_HEAVY_COMPILE_ERRORS) | |
317 | |
318 # retrieve all requested files | |
319 for i,g in enumerate(args.gen): | |
320 file_url = __get_file_url_for_generator(reply_json, g) | |
321 if file_url and (len(args.out) > i or args.b): | |
322 r = requests.get( | |
323 file_url, | |
324 cookies={"token": reply_json["meta"]["token"]}, | |
325 verify=False if args.noverify else True) | |
326 r.raise_for_status() | |
327 | |
328 # write the reply to a temporary file | |
329 c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g)) | |
330 with open(c_zip_path, "wb") as f: | |
331 f.write(r.content) | |
332 | |
333 # unzip the files to where they belong | |
334 if args.b: | |
335 target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g) | |
336 else: | |
337 target_dir = os.path.abspath(os.path.expanduser(args.out[i])) | |
338 if not os.path.exists(target_dir): | |
339 os.makedirs(target_dir) # ensure that the output directory exists | |
340 __unzip(c_zip_path, target_dir) | |
341 | |
342 if g == "c" and args.y: | |
343 keep_files = ("_{0}.h".format(args.name), "_{0}.c".format(args.name)) | |
344 for f in os.listdir(target_dir): | |
345 if not f.endswith(keep_files): | |
346 os.remove(os.path.join(target_dir, f)); | |
347 | |
348 print "{0} files placed in {1}".format(g, target_dir) | |
349 else: | |
350 print "{0}Warning:{1} {2} files could not be retrieved.".format( | |
351 Colours.yellow, Colours.end, | |
352 g) | |
353 | |
354 print "Job URL:", reply_json["data"]["links"]["self"] | |
355 print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick))) | |
356 print "Heavy release:", reply_json.get("meta",{}).get("release", "default") | |
357 except UploaderException as e: | |
358 exit_code = e.code | |
359 if e.message: | |
360 print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e.message) | |
361 except requests.ConnectionError as e: | |
362 print "{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\n{2}".format(Colours.red, Colours.end, e) | |
363 exit_code = ErrorCodes.CODE_CONNECTION_ERROR | |
364 except requests.ConnectTimeout as e: | |
365 print "{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\n{2}".format(Colours.red, Colours.end, e) | |
366 exit_code = ErrorCodes.CODE_CONNECTION_TIMEOUT | |
367 except requests.HTTPError as e: | |
368 print "{0}Error:{1} An HTTP error has occurred.\n{2}".format(Colours.red, Colours.end, e) | |
369 exit_code = ErrorCodes.CODE_CONNECTION_400_500 | |
178 except Exception as e: | 370 except Exception as e: |
371 exit_code = ErrorCodes.CODE_EXCEPTION | |
179 print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e) | 372 print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e) |
180 shutil.rmtree(temp_dir) # clean up the temporary directory | |
181 return | |
182 | |
183 post_data["name"] = args.name | |
184 | |
185 # the outputs to generate (always include c) | |
186 __SUPPORTED_GENERATOR_SET = { | |
187 "c", "js", | |
188 "pdext", "pdext-osx", | |
189 "unity", "unity-osx", "unity-win-x86", "unity-win-x86_64", | |
190 "wwise", "wwise-win-x86_64", | |
191 "vst2", "vst2-osx", "vst2-win-x86_64", | |
192 } | |
193 post_data["gen"] = list(({"c"} | {s.lower() for s in set(args.gen)}) & __SUPPORTED_GENERATOR_SET) | |
194 | |
195 # upload the job, get the response back | |
196 # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary, | |
197 # but we want to send a json encoded deep dictionary. So we do a bit of a hack. | |
198 r = requests.post( | |
199 urlparse.urljoin(domain, "/a/heavy"), | |
200 data={"json":json.dumps(post_data)}, | |
201 files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")}, | |
202 verify=False if args.noverify else True) | |
203 | |
204 if r.status_code != requests.codes.ok: | |
205 shutil.rmtree(temp_dir) # clean up the temporary directory | |
206 print "Getting a weird error? Get the latest uploader at https://enzienaudio.com/static/uploader.py" | 373 print "Getting a weird error? Get the latest uploader at https://enzienaudio.com/static/uploader.py" |
207 r.raise_for_status() # raise an exception | 374 finally: |
208 | 375 if temp_dir: |
209 # decode the JSON API response | 376 shutil.rmtree(temp_dir) # delete the temporary directory no matter what |
210 r_json = r.json() | 377 |
211 | 378 # exit and return the exit code |
212 """ | 379 sys.exit(exit_code) |
213 { | |
214 "data": { | |
215 "compileTime": 0.05078411102294922, | |
216 "id": "mhroth/asdf/Edp2G", | |
217 "slug": "Edp2G", | |
218 "index": 3, | |
219 "links": { | |
220 "files": { | |
221 "linkage": [ | |
222 { | |
223 "id": "mhroth/asdf/Edp2G/c", | |
224 "type": "file" | |
225 } | |
226 ], | |
227 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files" | |
228 }, | |
229 "project": { | |
230 "linkage": { | |
231 "id": "mhroth/asdf", | |
232 "type": "project" | |
233 }, | |
234 "self": "https://enzienaudio.com/h/mhroth/asdf" | |
235 }, | |
236 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G", | |
237 "user": { | |
238 "linkage": { | |
239 "id": "mhroth", | |
240 "type": "user" | |
241 }, | |
242 "self": "https://enzienaudio.com/h/mhroth" | |
243 } | |
244 }, | |
245 "type": "job" | |
246 }, | |
247 "included": [ | |
248 { | |
249 "filename": "file.c.zip", | |
250 "generator": "c", | |
251 "id": "mhroth/asdf/Edp2G/c", | |
252 "links": { | |
253 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip" | |
254 }, | |
255 "mime": "application/zip", | |
256 "type": "file" | |
257 } | |
258 ], | |
259 "warnings": [ | |
260 {"details": "blah blah blah"} | |
261 ], | |
262 "meta": { | |
263 "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz" | |
264 } | |
265 } | |
266 """ | |
267 reply_json = r.json() | |
268 if args.verbose: | |
269 print json.dumps( | |
270 reply_json, | |
271 sort_keys=True, | |
272 indent=2, | |
273 separators=(",", ": ")) | |
274 | |
275 # update the api token, if present | |
276 if "token" in reply_json.get("meta",{}) and not args.x: | |
277 if args.token is not None: | |
278 if reply_json["meta"]["token"] != args.token: | |
279 print "WARNING: Token returned by API is not the same as the " | |
280 "token supplied at the command line. (old = %s, new = %s)".format( | |
281 args.token, | |
282 reply_json["meta"]["token"]) | |
283 else: | |
284 if not os.path.exists(os.path.dirname(token_path)): | |
285 # ensure that the .heavy directory exists | |
286 os.makedirs(os.path.dirname(token_path)) | |
287 with open(token_path, "w") as f: | |
288 f.write(reply_json["meta"]["token"]) | |
289 # force rw------- permissions on the file | |
290 os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) | |
291 | |
292 # print any warnings | |
293 for i,x in enumerate(r_json.get("warnings",[])): | |
294 print "{3}) {0}Warning:{1} {2}".format( | |
295 Colours.yellow, Colours.end, x["detail"], i+1) | |
296 | |
297 # check for errors | |
298 if len(r_json.get("errors",[])) > 0: | |
299 shutil.rmtree(temp_dir) # clean up the temporary directory | |
300 for i,x in enumerate(r_json["errors"]): | |
301 print "{3}) {0}Error:{1} {2}".format( | |
302 Colours.red, Colours.end, x["detail"], i+1) | |
303 return | |
304 | |
305 # retrieve all requested files | |
306 for i,g in enumerate(args.gen): | |
307 file_url = __get_file_url_for_generator(reply_json, g) | |
308 if file_url is not None and (len(args.out) > i or args.b): | |
309 r = requests.get( | |
310 file_url, | |
311 cookies={"token": reply_json["meta"]["token"]}, | |
312 verify=False if args.noverify else True) | |
313 r.raise_for_status() | |
314 | |
315 # write the reply to a temporary file | |
316 c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g)) | |
317 with open(c_zip_path, "wb") as f: | |
318 f.write(r.content) | |
319 | |
320 # unzip the files to where they belong | |
321 if args.b: | |
322 target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g) | |
323 else: | |
324 target_dir = os.path.abspath(os.path.expanduser(args.out[i])) | |
325 if not os.path.exists(target_dir): | |
326 os.makedirs(target_dir) # ensure that the output directory exists | |
327 __unzip(c_zip_path, target_dir) | |
328 | |
329 if g == "c" and args.y: | |
330 keep_files = ("_{0}.h".format(args.name), "_{0}.c".format(args.name)) | |
331 for f in os.listdir(target_dir): | |
332 if not f.endswith(keep_files): | |
333 os.remove(os.path.join(target_dir, f)); | |
334 | |
335 print "{0} files placed in {1}".format(g, target_dir) | |
336 else: | |
337 print "{0}Warning:{1} {2} files could not be retrieved.".format( | |
338 Colours.yellow, Colours.end, | |
339 g) | |
340 | |
341 # delete the temporary directory | |
342 shutil.rmtree(temp_dir) | |
343 | |
344 print "Job URL:", reply_json["data"]["links"]["self"] | |
345 print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick))) | |
346 print "Heavy release:", reply_json.get("meta",{}).get("release", "default") | |
347 | 380 |
348 def __get_file_url_for_generator(json_api, g): | 381 def __get_file_url_for_generator(json_api, g): |
349 """Returns the file link for a specific generator. | 382 """Returns the file link for a specific generator. |
350 Returns None if no link could be found. | 383 Returns None if no link could be found. |
351 """ | 384 """ |