Mercurial > hg > beaglert
comparison scripts/hvresources/uploader.py @ 160:5bcf04234f80 heavy-updated
- added -std=c99 to Makefile for user-supplied C files (required for heavy files)
- changed heavy core render.cpp file to use latest API and removed all redundant functions (e.g. foleyDesigner/touchkey stuff)
- use build_pd.sh to compile and run pd files (-h for usage instructions)
author | chnrx <chris.heinrichs@gmail.com> |
---|---|
date | Thu, 05 Nov 2015 18:58:26 +0000 |
parents | |
children | c768ed1055b0 |
comparison
equal
deleted
inserted
replaced
159:1e7db6610600 | 160:5bcf04234f80 |
---|---|
1 #!/usr/bin/python | |
2 | |
3 # Copyright 2015 Section6. All Rights Reserved. | |
4 | |
5 import argparse | |
6 import getpass | |
7 import json | |
8 import os | |
9 import requests | |
10 import shutil | |
11 import stat | |
12 import tempfile | |
13 import time | |
14 import urlparse | |
15 import zipfile | |
16 import sys | |
17 | |
18 class Colours: | |
19 purple = "\033[95m" | |
20 cyan = "\033[96m" | |
21 dark_cyan = "\033[36m" | |
22 blue = "\033[94m" | |
23 green = "\033[92m" | |
24 yellow = "\033[93m" | |
25 red = "\033[91m" | |
26 bold = "\033[1m" | |
27 underline = "\033[4m" | |
28 end = "\033[0m" | |
29 | |
30 def __zip_dir(in_dir, zip_path, file_filter=None): | |
31 zf = zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) | |
32 for subdir, dirs, files in os.walk(in_dir): | |
33 for file in files: | |
34 if (file_filter is None) or (len(file_filter) > 0 and file.lower().split(".")[-1] in file_filter): | |
35 zf.write( | |
36 filename=os.path.join(subdir,file), | |
37 arcname=os.path.relpath(os.path.join(subdir,file), start=in_dir)) | |
38 return zip_path | |
39 | |
40 def __unzip(zip_path, target_dir): | |
41 """Unzip a file to a given directory. All destination files are overwritten. | |
42 """ | |
43 zipfile.ZipFile(zip_path).extractall(target_dir) | |
44 | |
45 def main(): | |
46 parser = argparse.ArgumentParser( | |
47 description="Compiles a Pure Data file.") | |
48 parser.add_argument( | |
49 "input_dir", | |
50 help="A directory containing _main.pd. The entire directory will be uploaded.") | |
51 parser.add_argument( | |
52 "-n", "--name", | |
53 default="heavy", | |
54 help="Patch name. If it doesn't exist, the uploader will fail. Make sure that it exists on the Heavy website.") | |
55 parser.add_argument( | |
56 "-g", "--gen", | |
57 nargs="+", | |
58 default=["c"], | |
59 help="List of generator outputs. Currently supported generators are 'c' and 'js'.") | |
60 parser.add_argument( | |
61 "-b", | |
62 help="All files will be placed in the output directory, placed in their own subdirectory corresonding to the generator name.", | |
63 action="count") | |
64 parser.add_argument( | |
65 "-o", "--out", | |
66 nargs="+", | |
67 default=["./"], # by default | |
68 help="List of destination directories for retrieved files. Order should be the same as for --gen.") | |
69 parser.add_argument( | |
70 "-d", "--domain", | |
71 default="https://enzienaudio.com", | |
72 help="Domain. Default is https://enzienaudio.com.") | |
73 parser.add_argument( | |
74 "-x", | |
75 help="Don't save the returned token.", | |
76 action="count") | |
77 parser.add_argument( | |
78 "-z", | |
79 help="Force the use of a password, regardless of saved token.", | |
80 action="count") | |
81 parser.add_argument( | |
82 "--noverify", | |
83 help="Don't verify the SSL connection. Generally a bad idea.", | |
84 action="count") | |
85 parser.add_argument( | |
86 "-v", "--verbose", | |
87 help="Show debugging information.", | |
88 action="count") | |
89 args = parser.parse_args() | |
90 | |
91 domain = args.domain or "https://enzienaudio.com" | |
92 | |
93 post_data = {} | |
94 | |
95 # token should be stored in ~/.heavy/token | |
96 token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token")) | |
97 if os.path.exists(token_path) and not args.z: | |
98 with open(token_path, "r") as f: | |
99 post_data["credentials"] = { | |
100 "token": f.read() | |
101 } | |
102 else: | |
103 # otherwise, get the username and password | |
104 post_data["credentials"] = { | |
105 "username": raw_input("Enter username: "), | |
106 "password": getpass.getpass("Enter password: ") | |
107 } | |
108 | |
109 tick = time.time() | |
110 | |
111 # make a temporary directory | |
112 temp_dir = tempfile.mkdtemp(prefix="lroyal-") | |
113 | |
114 # zip up the pd directory into the temporary directory | |
115 try: | |
116 if not os.path.exists(os.path.join(args.input_dir, "_main.pd")): | |
117 raise Exception("Root Pd directory does not contain a file named _main.pd.") | |
118 zip_path = __zip_dir( | |
119 args.input_dir, | |
120 os.path.join(temp_dir, "archive.zip"), | |
121 file_filter={"pd"}) | |
122 except Exception as e: | |
123 print e | |
124 shutil.rmtree(temp_dir) # clean up the temporary directory | |
125 return | |
126 | |
127 post_data["name"] = args.name | |
128 | |
129 # the outputs to generate (always include c) | |
130 __SUPPORTED_GENERATOR_SET = {"c", "js"} | |
131 post_data["gen"] = list(({"c"} | set(args.gen)) & __SUPPORTED_GENERATOR_SET) | |
132 | |
133 # upload the job, get the response back | |
134 # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary, | |
135 # but we want to send a json encoded deep dictionary. So we do a bit of a hack. | |
136 r = requests.post( | |
137 urlparse.urljoin(domain, "/a/heavy"), | |
138 data={"json":json.dumps(post_data)}, | |
139 files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")}, | |
140 verify=False if args.noverify else True) | |
141 | |
142 if r.status_code != requests.codes.ok: | |
143 shutil.rmtree(temp_dir) # clean up the temporary directory | |
144 r.raise_for_status() # raise an exception | |
145 | |
146 # decode the JSON API response | |
147 r_json = r.json() | |
148 | |
149 """ | |
150 { | |
151 "data": { | |
152 "compileTime": 0.05078411102294922, | |
153 "id": "mhroth/asdf/Edp2G", | |
154 "slug": "Edp2G", | |
155 "index": 3, | |
156 "links": { | |
157 "files": { | |
158 "linkage": [ | |
159 { | |
160 "id": "mhroth/asdf/Edp2G/c", | |
161 "type": "file" | |
162 } | |
163 ], | |
164 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files" | |
165 }, | |
166 "project": { | |
167 "linkage": { | |
168 "id": "mhroth/asdf", | |
169 "type": "project" | |
170 }, | |
171 "self": "https://enzienaudio.com/h/mhroth/asdf" | |
172 }, | |
173 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G", | |
174 "user": { | |
175 "linkage": { | |
176 "id": "mhroth", | |
177 "type": "user" | |
178 }, | |
179 "self": "https://enzienaudio.com/h/mhroth" | |
180 } | |
181 }, | |
182 "type": "job" | |
183 }, | |
184 "included": [ | |
185 { | |
186 "filename": "file.c.zip", | |
187 "generator": "c", | |
188 "id": "mhroth/asdf/Edp2G/c", | |
189 "links": { | |
190 "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip" | |
191 }, | |
192 "mime": "application/zip", | |
193 "type": "file" | |
194 } | |
195 ], | |
196 "warnings": [], | |
197 "meta": { | |
198 "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz" | |
199 } | |
200 } | |
201 """ | |
202 reply_json = r.json() | |
203 if args.verbose: | |
204 print json.dumps( | |
205 reply_json, | |
206 sort_keys=True, | |
207 indent=2, | |
208 separators=(",", ": ")) | |
209 | |
210 # update the api token, if present | |
211 if "token" in reply_json.get("meta",{}) and not args.x: | |
212 if not os.path.exists(os.path.dirname(token_path)): | |
213 os.makedirs(os.path.dirname(token_path)) # ensure that the .heavy directory exists | |
214 with open(token_path, "w") as f: | |
215 f.write(reply_json["meta"]["token"]) | |
216 os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) # force rw------- permissions on the file | |
217 | |
218 # print any warnings | |
219 for x in r_json["warnings"]: | |
220 print "{0}Warning:{1} {2}".format(Colours.yellow, Colours.end, x["detail"]) | |
221 | |
222 # check for errors | |
223 if len(r_json.get("errors",[])) > 0: | |
224 shutil.rmtree(temp_dir) # clean up the temporary directory | |
225 for x in r_json["errors"]: | |
226 print "{0}Error:{1} {2}".format(Colours.red, Colours.end, x["detail"]) | |
227 sys.exit(1) | |
228 return | |
229 | |
230 # retrieve all requested files | |
231 for i,g in enumerate(args.gen): | |
232 file_url = __get_file_url_for_generator(reply_json, g) | |
233 if file_url is not None and (len(args.out) > i or args.b): | |
234 r = requests.get( | |
235 file_url, | |
236 cookies={"token": reply_json["meta"]["token"]}, | |
237 verify=False if args.noverify else True) | |
238 r.raise_for_status() | |
239 | |
240 # write the reply to a temporary file | |
241 c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g)) | |
242 with open(c_zip_path, "wb") as f: | |
243 f.write(r.content) | |
244 | |
245 # unzip the files to where they belong | |
246 if args.b: | |
247 target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g) | |
248 else: | |
249 target_dir = os.path.abspath(os.path.expanduser(args.out[i])) | |
250 if not os.path.exists(target_dir): | |
251 os.makedirs(target_dir) # ensure that the output directory exists | |
252 __unzip(c_zip_path, target_dir) | |
253 | |
254 print "{0} files placed in {1}".format(g, target_dir) | |
255 else: | |
256 print "{0}Warning:{1} {2} files could not be retrieved.".format( | |
257 Colours.yellow, Colours.end, | |
258 g) | |
259 | |
260 # delete the temporary directory | |
261 shutil.rmtree(temp_dir) | |
262 | |
263 print "Job URL", reply_json["data"]["links"]["self"] | |
264 print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick))) | |
265 | |
266 sys.exit(0) | |
267 | |
268 def __get_file_url_for_generator(json_api, g): | |
269 """Returns the file link for a specific generator. | |
270 Returns None if no link could be found. | |
271 """ | |
272 for i in json_api["included"]: | |
273 if g == i["generator"]: | |
274 return i["links"]["self"] | |
275 return None # by default, return None | |
276 | |
277 | |
278 | |
279 if __name__ == "__main__": | |
280 main() |