annotate scripts/hvresources/uploader.py @ 160:5bcf04234f80 heavy-updated

- added -std=c99 to Makefile for user-supplied C files (required for heavy files) - changed heavy core render.cpp file to use latest API and removed all redundant functions (e.g. foleyDesigner/touchkey stuff) - use build_pd.sh to compile and run pd files (-h for usage instructions)
author chnrx <chris.heinrichs@gmail.com>
date Thu, 05 Nov 2015 18:58:26 +0000
parents
children c768ed1055b0
rev   line source
chris@160 1 #!/usr/bin/python
chris@160 2
chris@160 3 # Copyright 2015 Section6. All Rights Reserved.
chris@160 4
chris@160 5 import argparse
chris@160 6 import getpass
chris@160 7 import json
chris@160 8 import os
chris@160 9 import requests
chris@160 10 import shutil
chris@160 11 import stat
chris@160 12 import tempfile
chris@160 13 import time
chris@160 14 import urlparse
chris@160 15 import zipfile
chris@160 16 import sys
chris@160 17
chris@160 18 class Colours:
chris@160 19 purple = "\033[95m"
chris@160 20 cyan = "\033[96m"
chris@160 21 dark_cyan = "\033[36m"
chris@160 22 blue = "\033[94m"
chris@160 23 green = "\033[92m"
chris@160 24 yellow = "\033[93m"
chris@160 25 red = "\033[91m"
chris@160 26 bold = "\033[1m"
chris@160 27 underline = "\033[4m"
chris@160 28 end = "\033[0m"
chris@160 29
chris@160 30 def __zip_dir(in_dir, zip_path, file_filter=None):
chris@160 31 zf = zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED)
chris@160 32 for subdir, dirs, files in os.walk(in_dir):
chris@160 33 for file in files:
chris@160 34 if (file_filter is None) or (len(file_filter) > 0 and file.lower().split(".")[-1] in file_filter):
chris@160 35 zf.write(
chris@160 36 filename=os.path.join(subdir,file),
chris@160 37 arcname=os.path.relpath(os.path.join(subdir,file), start=in_dir))
chris@160 38 return zip_path
chris@160 39
chris@160 40 def __unzip(zip_path, target_dir):
chris@160 41 """Unzip a file to a given directory. All destination files are overwritten.
chris@160 42 """
chris@160 43 zipfile.ZipFile(zip_path).extractall(target_dir)
chris@160 44
chris@160 45 def main():
chris@160 46 parser = argparse.ArgumentParser(
chris@160 47 description="Compiles a Pure Data file.")
chris@160 48 parser.add_argument(
chris@160 49 "input_dir",
chris@160 50 help="A directory containing _main.pd. The entire directory will be uploaded.")
chris@160 51 parser.add_argument(
chris@160 52 "-n", "--name",
chris@160 53 default="heavy",
chris@160 54 help="Patch name. If it doesn't exist, the uploader will fail. Make sure that it exists on the Heavy website.")
chris@160 55 parser.add_argument(
chris@160 56 "-g", "--gen",
chris@160 57 nargs="+",
chris@160 58 default=["c"],
chris@160 59 help="List of generator outputs. Currently supported generators are 'c' and 'js'.")
chris@160 60 parser.add_argument(
chris@160 61 "-b",
chris@160 62 help="All files will be placed in the output directory, placed in their own subdirectory corresonding to the generator name.",
chris@160 63 action="count")
chris@160 64 parser.add_argument(
chris@160 65 "-o", "--out",
chris@160 66 nargs="+",
chris@160 67 default=["./"], # by default
chris@160 68 help="List of destination directories for retrieved files. Order should be the same as for --gen.")
chris@160 69 parser.add_argument(
chris@160 70 "-d", "--domain",
chris@160 71 default="https://enzienaudio.com",
chris@160 72 help="Domain. Default is https://enzienaudio.com.")
chris@160 73 parser.add_argument(
chris@160 74 "-x",
chris@160 75 help="Don't save the returned token.",
chris@160 76 action="count")
chris@160 77 parser.add_argument(
chris@160 78 "-z",
chris@160 79 help="Force the use of a password, regardless of saved token.",
chris@160 80 action="count")
chris@160 81 parser.add_argument(
chris@160 82 "--noverify",
chris@160 83 help="Don't verify the SSL connection. Generally a bad idea.",
chris@160 84 action="count")
chris@160 85 parser.add_argument(
chris@160 86 "-v", "--verbose",
chris@160 87 help="Show debugging information.",
chris@160 88 action="count")
chris@160 89 args = parser.parse_args()
chris@160 90
chris@160 91 domain = args.domain or "https://enzienaudio.com"
chris@160 92
chris@160 93 post_data = {}
chris@160 94
chris@160 95 # token should be stored in ~/.heavy/token
chris@160 96 token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token"))
chris@160 97 if os.path.exists(token_path) and not args.z:
chris@160 98 with open(token_path, "r") as f:
chris@160 99 post_data["credentials"] = {
chris@160 100 "token": f.read()
chris@160 101 }
chris@160 102 else:
chris@160 103 # otherwise, get the username and password
chris@160 104 post_data["credentials"] = {
chris@160 105 "username": raw_input("Enter username: "),
chris@160 106 "password": getpass.getpass("Enter password: ")
chris@160 107 }
chris@160 108
chris@160 109 tick = time.time()
chris@160 110
chris@160 111 # make a temporary directory
chris@160 112 temp_dir = tempfile.mkdtemp(prefix="lroyal-")
chris@160 113
chris@160 114 # zip up the pd directory into the temporary directory
chris@160 115 try:
chris@160 116 if not os.path.exists(os.path.join(args.input_dir, "_main.pd")):
chris@160 117 raise Exception("Root Pd directory does not contain a file named _main.pd.")
chris@160 118 zip_path = __zip_dir(
chris@160 119 args.input_dir,
chris@160 120 os.path.join(temp_dir, "archive.zip"),
chris@160 121 file_filter={"pd"})
chris@160 122 except Exception as e:
chris@160 123 print e
chris@160 124 shutil.rmtree(temp_dir) # clean up the temporary directory
chris@160 125 return
chris@160 126
chris@160 127 post_data["name"] = args.name
chris@160 128
chris@160 129 # the outputs to generate (always include c)
chris@160 130 __SUPPORTED_GENERATOR_SET = {"c", "js"}
chris@160 131 post_data["gen"] = list(({"c"} | set(args.gen)) & __SUPPORTED_GENERATOR_SET)
chris@160 132
chris@160 133 # upload the job, get the response back
chris@160 134 # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary,
chris@160 135 # but we want to send a json encoded deep dictionary. So we do a bit of a hack.
chris@160 136 r = requests.post(
chris@160 137 urlparse.urljoin(domain, "/a/heavy"),
chris@160 138 data={"json":json.dumps(post_data)},
chris@160 139 files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")},
chris@160 140 verify=False if args.noverify else True)
chris@160 141
chris@160 142 if r.status_code != requests.codes.ok:
chris@160 143 shutil.rmtree(temp_dir) # clean up the temporary directory
chris@160 144 r.raise_for_status() # raise an exception
chris@160 145
chris@160 146 # decode the JSON API response
chris@160 147 r_json = r.json()
chris@160 148
chris@160 149 """
chris@160 150 {
chris@160 151 "data": {
chris@160 152 "compileTime": 0.05078411102294922,
chris@160 153 "id": "mhroth/asdf/Edp2G",
chris@160 154 "slug": "Edp2G",
chris@160 155 "index": 3,
chris@160 156 "links": {
chris@160 157 "files": {
chris@160 158 "linkage": [
chris@160 159 {
chris@160 160 "id": "mhroth/asdf/Edp2G/c",
chris@160 161 "type": "file"
chris@160 162 }
chris@160 163 ],
chris@160 164 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files"
chris@160 165 },
chris@160 166 "project": {
chris@160 167 "linkage": {
chris@160 168 "id": "mhroth/asdf",
chris@160 169 "type": "project"
chris@160 170 },
chris@160 171 "self": "https://enzienaudio.com/h/mhroth/asdf"
chris@160 172 },
chris@160 173 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G",
chris@160 174 "user": {
chris@160 175 "linkage": {
chris@160 176 "id": "mhroth",
chris@160 177 "type": "user"
chris@160 178 },
chris@160 179 "self": "https://enzienaudio.com/h/mhroth"
chris@160 180 }
chris@160 181 },
chris@160 182 "type": "job"
chris@160 183 },
chris@160 184 "included": [
chris@160 185 {
chris@160 186 "filename": "file.c.zip",
chris@160 187 "generator": "c",
chris@160 188 "id": "mhroth/asdf/Edp2G/c",
chris@160 189 "links": {
chris@160 190 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip"
chris@160 191 },
chris@160 192 "mime": "application/zip",
chris@160 193 "type": "file"
chris@160 194 }
chris@160 195 ],
chris@160 196 "warnings": [],
chris@160 197 "meta": {
chris@160 198 "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz"
chris@160 199 }
chris@160 200 }
chris@160 201 """
chris@160 202 reply_json = r.json()
chris@160 203 if args.verbose:
chris@160 204 print json.dumps(
chris@160 205 reply_json,
chris@160 206 sort_keys=True,
chris@160 207 indent=2,
chris@160 208 separators=(",", ": "))
chris@160 209
chris@160 210 # update the api token, if present
chris@160 211 if "token" in reply_json.get("meta",{}) and not args.x:
chris@160 212 if not os.path.exists(os.path.dirname(token_path)):
chris@160 213 os.makedirs(os.path.dirname(token_path)) # ensure that the .heavy directory exists
chris@160 214 with open(token_path, "w") as f:
chris@160 215 f.write(reply_json["meta"]["token"])
chris@160 216 os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) # force rw------- permissions on the file
chris@160 217
chris@160 218 # print any warnings
chris@160 219 for x in r_json["warnings"]:
chris@160 220 print "{0}Warning:{1} {2}".format(Colours.yellow, Colours.end, x["detail"])
chris@160 221
chris@160 222 # check for errors
chris@160 223 if len(r_json.get("errors",[])) > 0:
chris@160 224 shutil.rmtree(temp_dir) # clean up the temporary directory
chris@160 225 for x in r_json["errors"]:
chris@160 226 print "{0}Error:{1} {2}".format(Colours.red, Colours.end, x["detail"])
chris@160 227 sys.exit(1)
chris@160 228 return
chris@160 229
chris@160 230 # retrieve all requested files
chris@160 231 for i,g in enumerate(args.gen):
chris@160 232 file_url = __get_file_url_for_generator(reply_json, g)
chris@160 233 if file_url is not None and (len(args.out) > i or args.b):
chris@160 234 r = requests.get(
chris@160 235 file_url,
chris@160 236 cookies={"token": reply_json["meta"]["token"]},
chris@160 237 verify=False if args.noverify else True)
chris@160 238 r.raise_for_status()
chris@160 239
chris@160 240 # write the reply to a temporary file
chris@160 241 c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g))
chris@160 242 with open(c_zip_path, "wb") as f:
chris@160 243 f.write(r.content)
chris@160 244
chris@160 245 # unzip the files to where they belong
chris@160 246 if args.b:
chris@160 247 target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g)
chris@160 248 else:
chris@160 249 target_dir = os.path.abspath(os.path.expanduser(args.out[i]))
chris@160 250 if not os.path.exists(target_dir):
chris@160 251 os.makedirs(target_dir) # ensure that the output directory exists
chris@160 252 __unzip(c_zip_path, target_dir)
chris@160 253
chris@160 254 print "{0} files placed in {1}".format(g, target_dir)
chris@160 255 else:
chris@160 256 print "{0}Warning:{1} {2} files could not be retrieved.".format(
chris@160 257 Colours.yellow, Colours.end,
chris@160 258 g)
chris@160 259
chris@160 260 # delete the temporary directory
chris@160 261 shutil.rmtree(temp_dir)
chris@160 262
chris@160 263 print "Job URL", reply_json["data"]["links"]["self"]
chris@160 264 print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick)))
chris@160 265
chris@160 266 sys.exit(0)
chris@160 267
chris@160 268 def __get_file_url_for_generator(json_api, g):
chris@160 269 """Returns the file link for a specific generator.
chris@160 270 Returns None if no link could be found.
chris@160 271 """
chris@160 272 for i in json_api["included"]:
chris@160 273 if g == i["generator"]:
chris@160 274 return i["links"]["self"]
chris@160 275 return None # by default, return None
chris@160 276
chris@160 277
chris@160 278
chris@160 279 if __name__ == "__main__":
chris@160 280 main()