Mercurial > hg > beaglert
changeset 549:ff0e9e827dcd prerelease
Updated uploader.py and build_pd_heavy for a more graceful failure
author | Giulio Moro <giuliomoro@yahoo.it> |
---|---|
date | Fri, 24 Jun 2016 12:42:48 +0100 |
parents | 3980b707634d |
children | 5d954690a7c1 |
files | scripts/build_pd_heavy.sh scripts/hvresources/uploader.py |
diffstat | 2 files changed, 247 insertions(+), 211 deletions(-) [+] |
line wrap: on
line diff
--- a/scripts/build_pd_heavy.sh Fri Jun 24 04:49:40 2016 +0100 +++ b/scripts/build_pd_heavy.sh Fri Jun 24 12:42:48 2016 +0100 @@ -126,28 +126,31 @@ #TODO: get a reliable, exhaustive, up-to-date list. HEAVY_FILES='Heavy* Hv*' +check_board_alive set_date reference_time_file="$projectpath"/ uploadBuildRun(){ if [ $NO_UPLOAD -eq 0 ]; then # remove old static files to avoid obsolete errors - # make sure the path is not empty, so avoiding to rm -rf / by mistake - [ -z $projectpath ] && { echo 'ERROR: $projectpath is empty.'; exit 0; } + # make sure the path is not empty, so avoiding to rm -rf / by mistake + [ -z $projectpath ] && { echo 'ERROR: $projectpath is empty.'; exit 0; } # use -rf to prevent warnings in case they do not exist for file in $HEAVY_FILES - do - rm -rf "$projectpath"/$file - done + do + rm -rf "$projectpath"/$file + done + + echo "Invoking the online compiler..." # invoke the online compiler "$BELA_PYTHON27" $HVRESOURCES_DIR/uploader.py "$pdpath"/ -n $ENZIENAUDIO_COM_PATCH_NAME -g c -o "$projectpath" $RELEASE_STRING ||\ - { echo "ERROR: an error occurred while executing the uploader.py script"; exit 1; } + { echo "ERROR: an error occurred while executing the uploader.py script"; exit $?; } fi; echo ""; # Test that files have been retrieved from the online compiler. - # TODO: find a more reliable way of doing this. e.g.: have uploader.py fail with a non-zero error code. + # TODO: skip this now that uplodaer.py returns meaningful exit codes for file in $HEAVY_FILES; do ls "$projectpath"/$file >/dev/null 2>&1 || {
--- a/scripts/hvresources/uploader.py Fri Jun 24 04:49:40 2016 +0100 +++ b/scripts/hvresources/uploader.py Fri Jun 24 12:42:48 2016 +0100 @@ -5,9 +5,10 @@ import getpass import json import os -import requests +import requests # http://docs.python-requests.org/en/master/api/#exceptions import shutil import stat +import sys import tempfile import time import urlparse @@ -25,8 +26,29 @@ underline = "\033[4m" end = "\033[0m" +class ErrorCodes(object): + # NOTE(mhroth): this class could inherit from Enum, but we choose not to + # as to not require an additional dependency + # http://www.tldp.org/LDP/abs/html/exitcodes.html + # http://stackoverflow.com/questions/1101957/are-there-any-standard-exit-status-codes-in-linux + CODE_OK = 0 # success! + CODE_MAIN_NOT_FOUND = 3 # _main.pd not found + CODE_HEAVY_COMPILE_ERRORS = 4 # heavy returned compiler errors + CODE_UPLOAD_ASSET_TOO_LARGE = 5 # the size of the uploadable asset is too large + CODE_RELEASE_NOT_AVAILABLE = 6 # the requested release is not available + CODE_CONNECTION_ERROR = 7 # HTTPS connection could not be made to the server + CODE_CONNECTION_TIMEOUT = 8 # HTTPS connection has timed out + CODE_CONNECTION_400_500 = 9 # a 400 or 500 error has occured + CODE_EXCEPTION = 125 # a generic execption has occurred + +class UploaderException(Exception): + def __init__(self, code, message=None, e=None): + self.code = code + self.message = message + self.e = e + # the maxmimum file upload size of 1MB -__HV_MAX_UPLOAD_SIZE = 1024*1024 +__HV_MAX_UPLOAD_SIZE = 1 * 1024*1024 def __zip_dir(in_dir, zip_path, file_filter=None): """Recursively zip an entire directory with an optional file filter @@ -62,7 +84,7 @@ help="List of generator outputs. Currently supported generators are " "'c', 'js', 'pdext', 'pdext-osx', 'unity', 'unity-osx', " "'unity-win-x86', 'unity-win-x86_64', 'wwise', 'wwise-win-x86_64', " - "'vst2' ,'vst2-osx', and 'vst2-win-x86_64'.") + "'vst2' ,'vst2-osx', 'vst2-win-x86_64', and 'vst2-win-x86'.") parser.add_argument( "-b", help="All files will be placed in the output directory, placed in their own subdirectory corresponding to the generator name.", @@ -102,36 +124,37 @@ action="count") parser.add_argument( "-t", "--token", - help="Use the specified token.", - ) + help="Use the specified token.") args = parser.parse_args() - domain = args.domain or "https://enzienaudio.com" + try: + # set default values + domain = args.domain or "https://enzienaudio.com" + exit_code = ErrorCodes.CODE_OK + temp_dir = None + post_data = {} - post_data = {} + # token should be stored in ~/.heavy/token + token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token")) - # token should be stored in ~/.heavy/token - token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token")) + if args.token is not None: + # check if token has been passed as a command line arg... + post_data["credentials"] = {"token": args.token} + elif os.path.exists(token_path) and not args.z: + # ...or if it is stored in the user's home directory + with open(token_path, "r") as f: + post_data["credentials"] = {"token": f.read()} + else: + # otherwise, get the username and password + post_data["credentials"] = { + "username": raw_input("Enter username: "), + "password": getpass.getpass("Enter password: ") + } - if args.token is not None: - # check if token has been passed as a command line arg... - post_data["credentials"] = {"token": args.token} - elif os.path.exists(token_path) and not args.z: - # ...or if it is stored in the user's home directory - with open(token_path, "r") as f: - post_data["credentials"] = {"token": f.read()} - else: - # otherwise, get the username and password - post_data["credentials"] = { - "username": raw_input("Enter username: "), - "password": getpass.getpass("Enter password: ") - } + tick = time.time() - tick = time.time() - - # parse the optional release argument - if args.release: - try: + # parse the optional release argument + if args.release: # check the validity of the current release releases_json = requests.get(urlparse.urljoin(domain, "/a/releases")).json() if args.release in releases_json: @@ -155,195 +178,205 @@ print "* {0} ({1})".format( k, v["releaseDate"]) - return - except: - pass # if the /a/releases request fails for whatever reason, just move on + raise UploaderException(ErrorCodes.CODE_RELEASE_NOT_AVAILABLE) - post_data["release"] = args.release + post_data["release"] = args.release - # make a temporary directory - temp_dir = tempfile.mkdtemp(prefix="lroyal-") + # make a temporary directory + temp_dir = tempfile.mkdtemp(prefix="lroyal-") - # zip up the pd directory into the temporary directory - try: + # zip up the pd directory into the temporary directory if not os.path.exists(os.path.join(args.input_dir, "_main.pd")): - raise Exception("Root Pd directory does not contain a file named _main.pd.") + raise UploaderException( + ErrorCodes.CODE_MAIN_NOT_FOUND, + "Root Pd directory does not contain a file named _main.pd.") zip_path = __zip_dir( args.input_dir, os.path.join(temp_dir, "archive.zip"), file_filter={"pd"}) if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE: - raise Exception("The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format( - os.stat(zip_path).st_size)) + raise UploaderException( + ErrorCodes.CODE_UPLOAD_ASSET_TOO_LARGE, + "The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format( + os.stat(zip_path).st_size)) + + post_data["name"] = args.name + + # the outputs to generate (always include c) + __SUPPORTED_GENERATOR_SET = { + "c", "js", + "pdext", "pdext-osx", + "unity", "unity-osx", "unity-win-x86", "unity-win-x86_64", + "wwise", "wwise-win-x86_64", + "vst2", "vst2-osx", "vst2-win-x86_64", + } + post_data["gen"] = list(({"c"} | {s.lower() for s in set(args.gen)}) & __SUPPORTED_GENERATOR_SET) + + # upload the job, get the response back + # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary, + # but we want to send a json encoded deep dictionary. So we do a bit of a hack. + r = requests.post( + urlparse.urljoin(domain, "/a/heavy"), + data={"json":json.dumps(post_data)}, + files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")}, + verify=False if args.noverify else True) + r.raise_for_status() + + """ + { + "data": { + "compileTime": 0.05078411102294922, + "id": "mhroth/asdf/Edp2G", + "slug": "Edp2G", + "index": 3, + "links": { + "files": { + "linkage": [ + { + "id": "mhroth/asdf/Edp2G/c", + "type": "file" + } + ], + "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files" + }, + "project": { + "linkage": { + "id": "mhroth/asdf", + "type": "project" + }, + "self": "https://enzienaudio.com/h/mhroth/asdf" + }, + "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G", + "user": { + "linkage": { + "id": "mhroth", + "type": "user" + }, + "self": "https://enzienaudio.com/h/mhroth" + } + }, + "type": "job" + }, + "included": [ + { + "filename": "file.c.zip", + "generator": "c", + "id": "mhroth/asdf/Edp2G/c", + "links": { + "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip" + }, + "mime": "application/zip", + "type": "file" + } + ], + "warnings": [ + {"details": "blah blah blah"} + ], + "meta": { + "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz" + } + } + """ + # decode the JSON API response + reply_json = r.json() + if args.verbose: + print json.dumps( + reply_json, + sort_keys=True, + indent=2, + separators=(",", ": ")) + + # update the api token, if present + if "token" in reply_json.get("meta",{}) and not args.x: + if args.token is not None: + if reply_json["meta"]["token"] != args.token: + print "WARNING: Token returned by API is not the same as the " + "token supplied at the command line. (old = %s, new = %s)".format( + args.token, + reply_json["meta"]["token"]) + else: + if not os.path.exists(os.path.dirname(token_path)): + # ensure that the .heavy directory exists + os.makedirs(os.path.dirname(token_path)) + with open(token_path, "w") as f: + f.write(reply_json["meta"]["token"]) + # force rw------- permissions on the file + os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) + + # print any warnings + for i,x in enumerate(reply_json.get("warnings",[])): + print "{3}) {0}Warning:{1} {2}".format( + Colours.yellow, Colours.end, x["detail"], i+1) + + # check for errors + if len(reply_json.get("errors",[])) > 0: + for i,x in enumerate(reply_json["errors"]): + print "{3}) {0}Error:{1} {2}".format( + Colours.red, Colours.end, x["detail"], i+1) + raise UploaderException(ErrorCodes.CODE_HEAVY_COMPILE_ERRORS) + + # retrieve all requested files + for i,g in enumerate(args.gen): + file_url = __get_file_url_for_generator(reply_json, g) + if file_url and (len(args.out) > i or args.b): + r = requests.get( + file_url, + cookies={"token": reply_json["meta"]["token"]}, + verify=False if args.noverify else True) + r.raise_for_status() + + # write the reply to a temporary file + c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g)) + with open(c_zip_path, "wb") as f: + f.write(r.content) + + # unzip the files to where they belong + if args.b: + target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g) + else: + target_dir = os.path.abspath(os.path.expanduser(args.out[i])) + if not os.path.exists(target_dir): + os.makedirs(target_dir) # ensure that the output directory exists + __unzip(c_zip_path, target_dir) + + if g == "c" and args.y: + keep_files = ("_{0}.h".format(args.name), "_{0}.c".format(args.name)) + for f in os.listdir(target_dir): + if not f.endswith(keep_files): + os.remove(os.path.join(target_dir, f)); + + print "{0} files placed in {1}".format(g, target_dir) + else: + print "{0}Warning:{1} {2} files could not be retrieved.".format( + Colours.yellow, Colours.end, + g) + + print "Job URL:", reply_json["data"]["links"]["self"] + print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick))) + print "Heavy release:", reply_json.get("meta",{}).get("release", "default") + except UploaderException as e: + exit_code = e.code + if e.message: + print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e.message) + except requests.ConnectionError as e: + print "{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\n{2}".format(Colours.red, Colours.end, e) + exit_code = ErrorCodes.CODE_CONNECTION_ERROR + except requests.ConnectTimeout as e: + print "{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\n{2}".format(Colours.red, Colours.end, e) + exit_code = ErrorCodes.CODE_CONNECTION_TIMEOUT + except requests.HTTPError as e: + print "{0}Error:{1} An HTTP error has occurred.\n{2}".format(Colours.red, Colours.end, e) + exit_code = ErrorCodes.CODE_CONNECTION_400_500 except Exception as e: + exit_code = ErrorCodes.CODE_EXCEPTION print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e) - shutil.rmtree(temp_dir) # clean up the temporary directory - return + print "Getting a weird error? Get the latest uploader at https://enzienaudio.com/static/uploader.py" + finally: + if temp_dir: + shutil.rmtree(temp_dir) # delete the temporary directory no matter what - post_data["name"] = args.name - - # the outputs to generate (always include c) - __SUPPORTED_GENERATOR_SET = { - "c", "js", - "pdext", "pdext-osx", - "unity", "unity-osx", "unity-win-x86", "unity-win-x86_64", - "wwise", "wwise-win-x86_64", - "vst2", "vst2-osx", "vst2-win-x86_64", - } - post_data["gen"] = list(({"c"} | {s.lower() for s in set(args.gen)}) & __SUPPORTED_GENERATOR_SET) - - # upload the job, get the response back - # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary, - # but we want to send a json encoded deep dictionary. So we do a bit of a hack. - r = requests.post( - urlparse.urljoin(domain, "/a/heavy"), - data={"json":json.dumps(post_data)}, - files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")}, - verify=False if args.noverify else True) - - if r.status_code != requests.codes.ok: - shutil.rmtree(temp_dir) # clean up the temporary directory - print "Getting a weird error? Get the latest uploader at https://enzienaudio.com/static/uploader.py" - r.raise_for_status() # raise an exception - - # decode the JSON API response - r_json = r.json() - - """ - { - "data": { - "compileTime": 0.05078411102294922, - "id": "mhroth/asdf/Edp2G", - "slug": "Edp2G", - "index": 3, - "links": { - "files": { - "linkage": [ - { - "id": "mhroth/asdf/Edp2G/c", - "type": "file" - } - ], - "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files" - }, - "project": { - "linkage": { - "id": "mhroth/asdf", - "type": "project" - }, - "self": "https://enzienaudio.com/h/mhroth/asdf" - }, - "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G", - "user": { - "linkage": { - "id": "mhroth", - "type": "user" - }, - "self": "https://enzienaudio.com/h/mhroth" - } - }, - "type": "job" - }, - "included": [ - { - "filename": "file.c.zip", - "generator": "c", - "id": "mhroth/asdf/Edp2G/c", - "links": { - "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip" - }, - "mime": "application/zip", - "type": "file" - } - ], - "warnings": [ - {"details": "blah blah blah"} - ], - "meta": { - "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz" - } - } - """ - reply_json = r.json() - if args.verbose: - print json.dumps( - reply_json, - sort_keys=True, - indent=2, - separators=(",", ": ")) - - # update the api token, if present - if "token" in reply_json.get("meta",{}) and not args.x: - if args.token is not None: - if reply_json["meta"]["token"] != args.token: - print "WARNING: Token returned by API is not the same as the " - "token supplied at the command line. (old = %s, new = %s)".format( - args.token, - reply_json["meta"]["token"]) - else: - if not os.path.exists(os.path.dirname(token_path)): - # ensure that the .heavy directory exists - os.makedirs(os.path.dirname(token_path)) - with open(token_path, "w") as f: - f.write(reply_json["meta"]["token"]) - # force rw------- permissions on the file - os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) - - # print any warnings - for i,x in enumerate(r_json.get("warnings",[])): - print "{3}) {0}Warning:{1} {2}".format( - Colours.yellow, Colours.end, x["detail"], i+1) - - # check for errors - if len(r_json.get("errors",[])) > 0: - shutil.rmtree(temp_dir) # clean up the temporary directory - for i,x in enumerate(r_json["errors"]): - print "{3}) {0}Error:{1} {2}".format( - Colours.red, Colours.end, x["detail"], i+1) - return - - # retrieve all requested files - for i,g in enumerate(args.gen): - file_url = __get_file_url_for_generator(reply_json, g) - if file_url is not None and (len(args.out) > i or args.b): - r = requests.get( - file_url, - cookies={"token": reply_json["meta"]["token"]}, - verify=False if args.noverify else True) - r.raise_for_status() - - # write the reply to a temporary file - c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g)) - with open(c_zip_path, "wb") as f: - f.write(r.content) - - # unzip the files to where they belong - if args.b: - target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g) - else: - target_dir = os.path.abspath(os.path.expanduser(args.out[i])) - if not os.path.exists(target_dir): - os.makedirs(target_dir) # ensure that the output directory exists - __unzip(c_zip_path, target_dir) - - if g == "c" and args.y: - keep_files = ("_{0}.h".format(args.name), "_{0}.c".format(args.name)) - for f in os.listdir(target_dir): - if not f.endswith(keep_files): - os.remove(os.path.join(target_dir, f)); - - print "{0} files placed in {1}".format(g, target_dir) - else: - print "{0}Warning:{1} {2} files could not be retrieved.".format( - Colours.yellow, Colours.end, - g) - - # delete the temporary directory - shutil.rmtree(temp_dir) - - print "Job URL:", reply_json["data"]["links"]["self"] - print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick))) - print "Heavy release:", reply_json.get("meta",{}).get("release", "default") + # exit and return the exit code + sys.exit(exit_code) def __get_file_url_for_generator(json_api, g): """Returns the file link for a specific generator.