changeset 550:5d954690a7c1 prerelease

merge
author Giulio Moro <giuliomoro@yahoo.it>
date Fri, 24 Jun 2016 14:55:12 +0100
parents a2096488a21a (current diff) ff0e9e827dcd (diff)
children c6ccaf53381a
files
diffstat 3 files changed, 382 insertions(+), 319 deletions(-) [+]
line wrap: on
line diff
--- a/Makefile	Fri Jun 24 14:12:22 2016 +0100
+++ b/Makefile	Fri Jun 24 14:55:12 2016 +0100
@@ -3,11 +3,23 @@
 # (c) 2016 Andrew McPherson, Victor Zappi, Giulio Moro, Liam Donovan
 # Centre for Digital Music, Queen Mary University of London
 
-# This Makefile is intended for use on the BeagleBone Black itself
-# and not for cross-compiling
-
+## This Makefile is intended for use on the BeagleBone Black itself #
+## and not for cross-compiling #
+## available command line options: #
+## EXAMPLE=             -- name of the folder in examples/ to be copied to projects/ and built
+## PROJECT=             -- name of the folder in projects/ to be built
+## CL=                  -- list of command line options to pass to the program when running
+## INCLUDES=            -- list of additional include paths to pass to the compiler
+## CPP_FLAGS=           -- list of additional flags passed to the C++ compiler
+## C_FLAGS=             -- list of additional flags passed to the C compiler
+## COMPILER=            -- compiler to use (clang or gcc)
+## LIBS=                -- libs to link in
+## AT=                  -- used instead of @ to silence the output. Defaults AT=@, use AT= for a very verbose output
+###
+##available targets: #
 .DEFAULT_GOAL := Bela
 
+AT?=@
 NO_PROJECT_TARGETS=help coreclean distclean stop nostartup idestart idestop idestartup idenostartup connect ideconnect update checkupdate updateunsafe
 NO_PROJECT_TARGETS_MESSAGE=PROJECT or EXAMPLE should be set for all targets except: $(NO_PROJECT_TARGETS)
 # list of targets that automatically activate the QUIET=true flag
@@ -15,10 +27,11 @@
 
 # Type `$ make help` to get a description of the functionalities of this Makefile.
 help: ## Show this help
-	@echo 'Usage: make [target] CL=[command line options] [PROJECT=[projectName] | EXAMPLE=[exampleName]]'
-	@printf "\n$(NO_PROJECT_TARGETS_MESSAGE)\n\n"
-	@echo 'Targets: (default: $(.DEFAULT_GOAL))'
-	@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e  's/^\(.*\): .*##\(.*\)/\1:#\2/' | column -t -c 2 -s '#'
+	$(AT) echo 'Usage: make [target] CL=[command line options] [PROJECT=[projectName] | EXAMPLE=[exampleName]]'
+	$(AT) printf "\n$(NO_PROJECT_TARGETS_MESSAGE)\n\n"
+	$(AT) echo 'Targets: (default: $(.DEFAULT_GOAL))'
+	$(AT) echo list: $(MAKEFILE_LIST)
+	$(AT) fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/^\(.*\): .*##\(.*\)/\1:#\2/' | sed -e 's/^\(.*\)= .* -- \(.*\)/\1=#\2/' | sed 's/^##//' | awk -F"#" '{ printf "%-18s %-1s\n", $$1, $$2}' 
 
 # PROJECT or EXAMPLE must be set for targets that are not included in NO_PROJECT_TARGETS
 ifeq (,$(filter $(NO_PROJECT_TARGETS),$(MAKECMDGOALS)))
@@ -68,7 +81,7 @@
 
 RM := rm -rf
 STATIC_LIBS := ./libprussdrv.a ./libNE10.a
-LIBS := -lrt -lnative -lxenomai -lsndfile
+override LIBS += -lrt -lnative -lxenomai -lsndfile
 
 # refresh library cache and check if libpd is there
 #TEST_LIBPD := $(shell ldconfig; ldconfig -p | grep "libpd\.so")  # safest but slower way of checking
@@ -78,9 +91,10 @@
 # if libpd is there, link it in
   LIBS += -lpd -lpthread_rt
 endif
+DEFAULT_CPP_FLAGS := -O3 -march=armv7-a -mtune=cortex-a8 -mfloat-abi=hard -mfpu=neon -ftree-vectorize 
+override CPP_FLAGS := $(DEFAULT_CPP_FLAGS) $(CPP_FLAGS)
+override C_FLAGS := $(DEFAULT_CPP_FLAGS) $(C_FLAGS)
 
-CPP_FLAGS := -O3 -march=armv7-a -mtune=cortex-a8 -mfloat-abi=hard -mfpu=neon -ftree-vectorize 
-C_FLAGS := $(CPP_FLAGS)
 
 ifndef COMPILER
 # check whether clang is installed
@@ -98,14 +112,17 @@
   CXX=clang++
   CPP_FLAGS += -DNDEBUG 
   C_FLAGS += -DNDEBUG
-else
-  CC=gcc
-  CXX=g++
-  CPP_FLAGS += --fast-math
-  C_FLAGS += --fast-math
+else 
+  ifeq ($(COMPILER), gcc)
+    CC=gcc
+    CXX=g++
+    CPP_FLAGS += --fast-math
+    C_FLAGS += --fast-math
+  endif
 endif
 
-INCLUDES := -I$(PROJECT_DIR) -I./include -I/usr/include/ne10 -I/usr/xenomai/include -I/usr/arm-linux-gnueabihf/include/xenomai/include -I/usr/arm-linux-gnueabihf/include/ne10
+DEFAULT_INCLUDES := -I$(PROJECT_DIR) -I./include -I/usr/include/ne10 -I/usr/xenomai/include -I/usr/arm-linux-gnueabihf/include/xenomai/include -I/usr/arm-linux-gnueabihf/include/ne10
+override INCLUDES += $(DEFAULT_INCLUDES)
 
 ASM_SRCS := $(wildcard $(PROJECT_DIR)/*.S)
 ASM_OBJS := $(addprefix $(PROJECT_DIR)/build/,$(notdir $(ASM_SRCS:.S=.o)))
@@ -164,43 +181,43 @@
 
 # Rule for Bela core C++ files
 build/core/%.o: ./core/%.cpp
-	@echo 'Building $(notdir $<)...'
-#	@echo 'Invoking: C++ Compiler $(CXX)'
-	@$(CXX) $(SYNTAX_FLAG) $(INCLUDES) $(CPP_FLAGS) -Wall -c -fmessage-length=0 -U_FORTIFY_SOURCE -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<"
-	@echo ' ...done'
-	@echo ' '
+	$(AT) echo 'Building $(notdir $<)...'
+#	$(AT) echo 'Invoking: C++ Compiler $(CXX)'
+	$(AT) $(CXX) $(SYNTAX_FLAG) $(INCLUDES) $(CPP_FLAGS) -Wall -c -fmessage-length=0 -U_FORTIFY_SOURCE -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<"
+	$(AT) echo ' ...done'
+	$(AT) echo ' '
 
 # Rule for Bela core ASM files
 build/core/%.o: ./core/%.S
-	@echo 'Building $(notdir $<)...'
-#	@echo 'Invoking: GCC Assembler'
-	@as  -o "$@" "$<"
-	@echo ' ...done'
-	@echo ' '
+	$(AT) echo 'Building $(notdir $<)...'
+#	$(AT) echo 'Invoking: GCC Assembler'
+	$(AT) as  -o "$@" "$<"
+	$(AT) echo ' ...done'
+	$(AT) echo ' '
 
 # Rule for user-supplied C++ files
 $(PROJECT_DIR)/build/%.o: $(PROJECT_DIR)/%.cpp
-	@echo 'Building $(notdir $<)...'
-#	@echo 'Invoking: C++ Compiler $(CXX)'
-	@$(CXX) $(SYNTAX_FLAG) $(INCLUDES) $(CPP_FLAGS) -Wall -c -fmessage-length=0 -U_FORTIFY_SOURCE -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<"
-	@echo ' ...done'
-	@echo ' '
+	$(AT) echo 'Building $(notdir $<)...'
+#	$(AT) echo 'Invoking: C++ Compiler $(CXX)'
+	$(AT) $(CXX) $(SYNTAX_FLAG) $(INCLUDES) $(CPP_FLAGS) -Wall -c -fmessage-length=0 -U_FORTIFY_SOURCE -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<"
+	$(AT) echo ' ...done'
+	$(AT) echo ' '
 
 # Rule for user-supplied C files
 $(PROJECT_DIR)/build/%.o: $(PROJECT_DIR)/%.c
-	@echo 'Building $(notdir $<)...'
-#	@echo 'Invoking: C Compiler $(CC)'
-	@$(CC) $(SYNTAX_FLAG) $(INCLUDES) $(C_FLAGS) -Wall -c -fmessage-length=0 -U_FORTIFY_SOURCE -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<" -std=c99 
-	@echo ' ...done'
-	@echo ' '
+	$(AT) echo 'Building $(notdir $<)...'
+#	$(AT) echo 'Invoking: C Compiler $(CC)'
+	$(AT) $(CC) $(SYNTAX_FLAG) $(INCLUDES) $(C_FLAGS) -Wall -c -fmessage-length=0 -U_FORTIFY_SOURCE -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<" -std=c99 
+	$(AT) echo ' ...done'
+	$(AT) echo ' '
 
 # Rule for user-supplied assembly files
 $(PROJECT_DIR)/build/%.o: $(PROJECT_DIR)/%.S
-	@echo 'Building $(notdir $<)...'
-#	@echo 'Invoking: GCC Assembler'
-	@as  -o "$@" "$<"
-	@echo ' ...done'
-	@echo ' '
+	$(AT) echo 'Building $(notdir $<)...'
+#	$(AT) echo 'Invoking: GCC Assembler'
+	$(AT) as  -o "$@" "$<"
+	$(AT) echo ' ...done'
+	$(AT) echo ' '
 
 # This is a nasty kludge: we want to be able to optionally link in a default
 # main file if the user hasn't supplied one. We check for the presence of the main()
@@ -210,15 +227,15 @@
 $(OUTPUT_FILE): $(CORE_ASM_OBJS) $(CORE_OBJS) $(PROJECT_OBJS) $(STATIC_LIBS) $(DEFAULT_MAIN_OBJS) $(DEFAULT_PD_OBJS)
 	$(eval DEFAULT_MAIN_CONDITIONAL :=\
 	    $(shell bash -c '[ `nm $(PROJECT_OBJS) 2>/dev/null | grep -w T | grep -w main | wc -l` == '0' ] && echo "$(DEFAULT_MAIN_OBJS)" || : '))
-	@#If there is a .pd file AND there is no "render" symbol then link in the $(DEFAULT_PD_OBJS) 
+	$(AT) #If there is a .pd file AND there is no "render" symbol then link in the $(DEFAULT_PD_OBJS) 
 	$(eval DEFAULT_PD_CONDITIONAL :=\
 	    $(shell bash -c '{ ls $(PROJECT_DIR)/*.pd &>/dev/null && [ `nm $(PROJECT_OBJS) 2>/dev/null | grep -w T | grep "render.*BelaContext" | wc -l` -eq 0 ]; } && echo '$(DEFAULT_PD_OBJS)' || : ' ))
-	@echo 'Linking...'
-	@$(CXX) $(SYNTAX_FLAG) -L/usr/xenomai/lib -L/usr/arm-linux-gnueabihf/lib -L/usr/arm-linux-gnueabihf/lib/xenomai -L/usr/lib/arm-linux-gnueabihf -pthread -Wpointer-arith -o "$(PROJECT_DIR)/$(PROJECT)" $(CORE_ASM_OBJS) $(CORE_OBJS) $(DEFAULT_MAIN_CONDITIONAL) $(DEFAULT_PD_CONDITIONAL) $(ASM_OBJS) $(C_OBJS) $(CPP_OBJS) $(STATIC_LIBS) $(LIBS)
-	@echo ' ...done'
+	$(AT) echo 'Linking...'
+	$(AT) $(CXX) $(SYNTAX_FLAG) $(LDFLAGS) -L/usr/xenomai/lib -L/usr/arm-linux-gnueabihf/lib -L/usr/arm-linux-gnueabihf/lib/xenomai -L/usr/lib/arm-linux-gnueabihf -pthread -Wpointer-arith -o "$(PROJECT_DIR)/$(PROJECT)" $(CORE_ASM_OBJS) $(CORE_OBJS) $(DEFAULT_MAIN_CONDITIONAL) $(DEFAULT_PD_CONDITIONAL) $(ASM_OBJS) $(C_OBJS) $(CPP_OBJS) $(STATIC_LIBS) $(LIBS)
+	$(AT) echo ' ...done'
 	
 # Other Targets:
-projectclean:## Remove the PROJECT's build objects & binary
+projectclean: ## Remove the PROJECT's build objects & binary
 	-$(RM) $(PROJECT_DIR)/build/* $(OUTPUT_FILE)
 	-@echo ' '	
 
@@ -229,8 +246,8 @@
 	-$(RM) build/core/*
 
 prompt:
-	@printf "Warning: you are about to DELETE the projects/ folder and its content. This operation cannot be undone. Continue? (y/N) "
-	@read REPLY; if [ $$REPLY !=  y ] && [ $$REPLY != Y ]; then echo "Aborting..."; exit 1; fi
+	$(AT) printf "Warning: you are about to DELETE the projects/ folder and its content. This operation cannot be undone. Continue? (y/N) "
+	$(AT) read REPLY; if [ $$REPLY !=  y ] && [ $$REPLY != Y ]; then echo "Aborting..."; exit 1; fi
 	
 distclean: ## Restores the Bela folder to a pristine state: remove all the projects source and the built objects, including the core Bela objects.
 distclean: prompt distcleannoprompt
@@ -239,70 +256,74 @@
 	-$(RM) build/source/* $(CORE_OBJS) $(CORE_CPP_DEPS) $(DEFAULT_MAIN_OBJS) $(DEFAULT_MAIN_CPP_DEPS) $(OUTPUT_FILE)
 	-@echo ' '
 
+$(warning C_FLAGS $(C_FLAGS))
+$(warning CPP_FLAGS $(CPP_FLAGS))
+$(warning LIBS $(LIBS))
+$(warning INCLUDES $(INCLUDES))
 runfg: run
 run: ## Run PROJECT in the foreground
 run: stop Bela
-	@echo "Running $(RUN_COMMAND)"
-	@sync& cd $(RUN_FROM) && $(RUN_COMMAND)
+	$(AT) echo "Running $(RUN_COMMAND)"
+	$(AT) sync& cd $(RUN_FROM) && $(RUN_COMMAND)
 runide: ## Run PROJECT for IDE (foreground, no buffering)
 runide: stop Bela
-	@sync& cd $(RUN_FROM) && $(RUN_IDE_COMMAND)
+	$(AT) sync& cd $(RUN_FROM) && $(RUN_IDE_COMMAND)
 runscreen: ## Run PROJECT in the background (detached screen)
 runscreen: stop $(OUTPUT_FILE)
-	@echo "Running $(RUN_COMMAND) in a screen"
-	@cd $(RUN_FROM) && screen -S $(SCREEN_NAME) -d -m $(RUN_COMMAND)
+	$(AT) echo "Running $(RUN_COMMAND) in a screen"
+	$(AT) cd $(RUN_FROM) && screen -S $(SCREEN_NAME) -d -m $(RUN_COMMAND)
 runscreenfg: ## Run PROJECT in a screen in the foreground (can detach with ctrl-a ctrl-d)
 runscreenfg: stop $(OUTPUT_FILE)
-	@echo "Running $(RUN_COMMAND) in a screen"
-	@cd $(RUN_FROM) && screen -S $(SCREEN_NAME) -m $(RUN_COMMAND)
+	$(AT) echo "Running $(RUN_COMMAND) in a screen"
+	$(AT) cd $(RUN_FROM) && screen -S $(SCREEN_NAME) -m $(RUN_COMMAND)
 
 STARTUP_COMMAND=printf "\#!/bin/sh\n\#\n\# This file is autogenerated by Bela. Do not edit!\n\necho Running Bela...\nscreen -S $(SCREEN_NAME) -d -m %s $(RUN_COMMAND) %s\n"
 nostartup: ## No Bela project runs at startup 
 nostartup:
-	@echo "Disabling Bela at startup..."
-	@printf "#!/bin/sh\n#\n\n# This file is autogenerated by Bela. Do not edit!\n\n# Run on startup disabled -- nothing to do here\n" > $(BELA_STARTUP_SCRIPT)
+	$(AT) echo "Disabling Bela at startup..."
+	$(AT) printf "#!/bin/sh\n#\n\n# This file is autogenerated by Bela. Do not edit!\n\n# Run on startup disabled -- nothing to do here\n" > $(BELA_STARTUP_SCRIPT)
 
 startuploop: ## Makes PROJECT run at startup and restarts it if it crashes
 startuploop: Bela
-	@echo "Enabling Bela at startup in a loop..."
-	@$(STARTUP_COMMAND) 'bash -c "while sleep 0.5 ; do echo Running Bela...;' '; done"' > $(BELA_STARTUP_SCRIPT)
+	$(AT) echo "Enabling Bela at startup in a loop..."
+	$(AT) $(STARTUP_COMMAND) 'bash -c "while sleep 0.5 ; do echo Running Bela...;' '; done"' > $(BELA_STARTUP_SCRIPT)
 
 startup: ## Makes PROJECT run at startup
 startup: Bela
-	@echo "Enabling Bela at startup..."
-	@$(STARTUP_COMMAND) > $(BELA_STARTUP_SCRIPT)
-	@chmod +x $(BELA_STARTUP_SCRIPT)
+	$(AT) echo "Enabling Bela at startup..."
+	$(AT) $(STARTUP_COMMAND) > $(BELA_STARTUP_SCRIPT)
+	$(AT) chmod +x $(BELA_STARTUP_SCRIPT)
 stop: ## Stops any Bela program that is currently running
 stop:
-	@PID=`grep $(BELA_AUDIO_THREAD_NAME) /proc/xenomai/stat | cut -d " " -f 5 | sed s/\s//g`; if [ -z $$PID ]; then [ $(QUIET) = true ] || echo "No process to kill"; else [  $(QUIET) = true  ] || echo "Killing old Bela process $$PID"; kill -2 $$PID; fi; screen -X -S $(SCREEN_NAME) quit > /dev/null; exit 0;
+	$(AT) PID=`grep $(BELA_AUDIO_THREAD_NAME) /proc/xenomai/stat | cut -d " " -f 5 | sed s/\s//g`; if [ -z $$PID ]; then [ $(QUIET) = true ] || echo "No process to kill"; else [  $(QUIET) = true  ] || echo "Killing old Bela process $$PID"; kill -2 $$PID; fi; screen -X -S $(SCREEN_NAME) quit > /dev/null; exit 0;
 
 connect: ## Connects to the running Bela program (if any), can detach with ctrl-a ctrl-d.
-	@screen -r -S $(SCREEN_NAME)
+	$(AT) screen -r -S $(SCREEN_NAME)
 	
 idestart: ## Starts the on-board IDE
 idestart: idestop
-	@printf "Starting IDE..."
-	@$(BELA_IDE_RUN_COMMAND)
-	@printf "done\n"
+	$(AT) printf "Starting IDE..."
+	$(AT) $(BELA_IDE_RUN_COMMAND)
+	$(AT) printf "done\n"
 
 idestop: ## Stops the on-board IDE
-	@printf "Stopping currently running IDE..."
-	@screen -X -S $(BELA_IDE_SCREEN_NAME) quit > /dev/null; exit 0;
-	@printf "done\n"
+	$(AT) printf "Stopping currently running IDE..."
+	$(AT) screen -X -S $(BELA_IDE_SCREEN_NAME) quit > /dev/null; exit 0;
+	$(AT) printf "done\n"
 
 BELA_IDE_STARTUP_COMMAND=printf '\#!/bin/sh\n\#\n\# This file is autogenerated by Bela. Do not edit!\n\necho Running the Bela IDE...\n$(BELA_IDE_RUN_COMMAND)\n' > $(BELA_IDE_STARTUP_SCRIPT)
 
 idestartup: ## Enables the IDE at startup
-	@echo "Enabling the IDE at startup"
-	@$(BELA_IDE_STARTUP_COMMAND)
-	@chmod +x $(BELA_IDE_STARTUP_SCRIPT)
+	$(AT) echo "Enabling the IDE at startup"
+	$(AT) $(BELA_IDE_STARTUP_COMMAND)
+	$(AT) chmod +x $(BELA_IDE_STARTUP_SCRIPT)
 
 idenostartup: ## Disables the IDE at startup
-	@echo "Disabling the IDE at startup"
-	@printf "#!/bin/sh\n#\n\n# This file is autogenerated by Bela. Do not edit!\n\n# The Bela IDE is disabled on startup.\n" > $(BELA_IDE_STARTUP_SCRIPT)
+	$(AT) echo "Disabling the IDE at startup"
+	$(AT) printf "#!/bin/sh\n#\n\n# This file is autogenerated by Bela. Do not edit!\n\n# The Bela IDE is disabled on startup.\n" > $(BELA_IDE_STARTUP_SCRIPT)
 
 ideconnect: ## Brings in the foreground the IDE that currently is running in a screen (if any), can detach with ctrl-a ctrl-d.
-	@screen -r -S $(BELA_IDE_SCREEN_NAME)
+	$(AT) screen -r -S $(BELA_IDE_SCREEN_NAME)
 
 BELA_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
 UPDATES_DIR?=/root/Bela/updates
@@ -314,54 +335,60 @@
 UPDATE_BELA_MV_BACKUP?=/tmp/belaMvBak
 
 updateclean: ## Cleans the $(UPDATES_DIR) folder
-	@[ -n $(UPDATE_DIR) ] && rm -rf $(UPDATE_DIR) && mkdir -p $(UPDATE_DIR)
+	$(AT) [ -n $(UPDATE_DIR) ] && rm -rf $(UPDATE_DIR) && mkdir -p $(UPDATE_DIR)
 
 checkupdate: ## Unzips the zip file in $(UPDATES_DIR) and checks that it contains a valid
-	@echo Validating archive...
-	@cd $(UPDATES_DIR) && COUNT=`ls -l *.zip | wc -l` && [ $$COUNT -eq 1 ] && rm -rf `ls | grep -v "\.zip$$"`
-	@#TODO: heuristics on available space. Use unzip -l and df
-	@echo uncompressed size: `unzip -l \`ls $(UPDATES_DIR)/*.zip\` | tail -n1 | awk '{print $$1}'`
-	@# Delete and re-create the temp directory (first, make sure it is not an empty string!)
-	@[ -n $(UPDATE_SOURCE_DIR_BASE) ] && rm -rf $(UPDATE_SOURCE_DIR_BASE) && mkdir -p $(UPDATE_SOURCE_DIR_BASE)
-	@echo Unzipping archive...
-	@cd $(UPDATE_SOURCE_DIR_BASE) && unzip -qq $(UPDATES_DIR)/*zip
+	$(AT) echo Validating archive...
+	$(AT) cd $(UPDATES_DIR) && COUNT=`ls -l *.zip | wc -l` && [ $$COUNT -eq 1 ] && rm -rf `ls | grep -v "\.zip$$"`
+	$(AT) #TODO: heuristics on available space. Use unzip -l and df
+	$(AT) echo uncompressed size: `unzip -l \`ls $(UPDATES_DIR)/*.zip\` | tail -n1 | awk '{print $$1}'`
+	$(AT) # Delete and re-create the temp directory (first, make sure it is not an empty string!)
+	$(AT) [ -n $(UPDATE_SOURCE_DIR_BASE) ] && rm -rf $(UPDATE_SOURCE_DIR_BASE) && mkdir -p $(UPDATE_SOURCE_DIR_BASE)
+	$(AT) echo Unzipping archive...
+	$(AT) cd $(UPDATE_SOURCE_DIR_BASE) && unzip -qq $(UPDATES_DIR)/*zip
 #TODO: this should not be needed. Remove comments.  Strip the top-level folder ( if there is only one )
 #@DIR=`ls -d $(UPDATE_SOURCE_DIR)` && COUNT=`echo $$DIR | wc -l` &&\
 	  [ $$COUNT -eq 1 ] && mv $(UPDATE_SOURCE_DIR)/* /tmp/supertemp && rm -rf $(UPDATE_SOURCE_DIR) && mv /tmp/supertemp $(UPDATE_SOURCE_DIR)
 	
-	@echo Validating unzipped archive...
-	@cd $(UPDATE_SOURCE_DIR) && FAIL=0 && for path in $(UPDATE_REQUIRED_PATHS); do `ls $$path >/dev/null 2>&1` || { FAIL=1; break; }; done;\
+	$(AT) echo Validating unzipped archive...
+	$(AT) cd $(UPDATE_SOURCE_DIR) && FAIL=0 && for path in $(UPDATE_REQUIRED_PATHS); do `ls $$path >/dev/null 2>&1` || { FAIL=1; break; }; done;\
 	  [ $$FAIL -eq 0 ] || { echo "$$path was not found in the zip archive. Maybe it is corrupted?"; exit 1; }
-	@echo 	...done
+	$(AT) echo 	...done
 UPDATE_LOG?=~/update.log
 LOG=>> $(UPDATE_LOG) 2>&1
 updateunsafe: ## Installs the update from $(UPDATES_DIR) in a more brick-friendly way
-	@echo > $(UPDATE_LOG)
+	$(AT) echo > $(UPDATE_LOG)
 	# Re-perform the check, just in case ...	
-	@cd $(UPDATE_SOURCE_DIR) && FAIL=0 && for path in $(UPDATE_REQUIRED_PATHS); do `ls $$path >/dev/null 2>&1` || { FAIL=1; break; }; done;\
+	$(AT) cd $(UPDATE_SOURCE_DIR) && FAIL=0 && for path in $(UPDATE_REQUIRED_PATHS); do `ls $$path >/dev/null 2>&1` || { FAIL=1; break; }; done;\
 	  [ $$FAIL -eq 0 ] || { echo "$$path was not found in the zip archive. Maybe it is corrupted?"; exit 1; }
-	@cd $(UPDATE_SOURCE_DIR)/scripts && BBB_ADDRESS=root@127.0.0.1 BBB_BELA_HOME=$(BELA_DIR) ./update_board -y --no-frills
-	@screen -S update-Bela -d -m bash -c "echo Restart the IDE $(LOG) &&\
+	$(AT) cd $(UPDATE_SOURCE_DIR)/scripts && BBB_ADDRESS=root@127.0.0.1 BBB_BELA_HOME=$(BELA_DIR) ./update_board -y --no-frills
+	$(AT) screen -S update-Bela -d -m bash -c "echo Restart the IDE $(LOG) &&\
 	  $(MAKE) --no-print-directory idestart $(LOG) && echo Update succesful $(LOG);" $(LOG)
 update: ## Installs the update from $(UPDATES_DIR)
 update: stop
-	@# Truncate the log file
-	@echo > $(UPDATE_LOG)
-	@echo Re-perform the check, just in case ... >> $(UPDATE_LOG)
-	@cd $(UPDATE_SOURCE_DIR) && FAIL=0 && for path in $(UPDATE_REQUIRED_PATHS); do `ls $$path >/dev/null 2>&1` || { FAIL=1; break; }; done;\
+	$(AT) # Truncate the log file
+	$(AT) echo > $(UPDATE_LOG)
+	$(AT) echo Re-perform the check, just in case ... >> $(UPDATE_LOG)
+	$(AT) cd $(UPDATE_SOURCE_DIR) && FAIL=0 && for path in $(UPDATE_REQUIRED_PATHS); do `ls $$path >/dev/null 2>&1` || { FAIL=1; break; }; done;\
 	  [ $$FAIL -eq 0 ] || { echo "$$path was not found in the zip archive. Maybe it is corrupted?"; exit 1; }
-	@[ -n $(UPDATE_BELA_PATCH) ] && mkdir -p $(UPDATE_BELA_PATCH)
-	@#TODO: this would allow to trim trailing slashes in case we want to be safer: a="`pwd`/" ; target=${a%/} ; echo $target
-	@$(MAKE) --no-print-directory coreclean
-	@echo Backing up $(BELA_DIR) to $(UPDATE_BELA_PATCH) ... | tee -a $(UPDATE_LOG)
-	@rsync -a --delete-during --exclude Documentation $(BELA_DIR)/ $(UPDATE_BELA_PATCH)
-	@echo Backing up $(BELA_DIR) to $(UPDATE_BELA_BACKUP) ... | tee -a $(UPDATE_LOG)
-	@[ -n $(UPDATE_BELA_BACKUP) ] && mkdir -p $(UPDATE_BELA_BACKUP)
-	@rsync -a --delete-during $(BELA_DIR)/ $(UPDATE_BELA_BACKUP)
-	@echo Running update script... | tee -a $(UPDATE_LOG)
-	@cd $(UPDATE_SOURCE_DIR)/scripts && BBB_ADDRESS=root@127.0.0.1 BBB_BELA_HOME=$(UPDATE_BELA_PATCH) ./update_board -y --no-frills
-	@echo Restoring directory structure... | tee -a $(UPDATE_LOG)
-	@screen -S update-Bela -d -m bash -c '\
+	$(AT) [ -n $(UPDATE_BELA_PATCH) ] && mkdir -p $(UPDATE_BELA_PATCH)
+	$(AT) #TODO: this would allow to trim trailing slashes in case we want to be safer: a="`pwd`/" ; target=${a%/} ; echo $target
+	$(AT) $(MAKE) --no-print-directory coreclean
+	$(AT) echo Backing up $(BELA_DIR) to $(UPDATE_BELA_PATCH) ... | tee -a $(UPDATE_LOG)
+	$(AT) rsync -a --delete-during --exclude Documentation $(BELA_DIR)/ $(UPDATE_BELA_PATCH)
+	$(AT) echo Backing up $(BELA_DIR) to $(UPDATE_BELA_BACKUP) ... | tee -a $(UPDATE_LOG)
+	$(AT) [ -n $(UPDATE_BELA_BACKUP) ] && mkdir -p $(UPDATE_BELA_BACKUP)
+	$(AT) rsync -a --delete-during $(BELA_DIR)/ $(UPDATE_BELA_BACKUP)
+	$(AT) echo Running update script... | tee -a $(UPDATE_LOG)
+	$(AT) cd $(UPDATE_SOURCE_DIR)/scripts && BBB_ADDRESS=root@127.0.0.1 BBB_BELA_HOME=$(UPDATE_BELA_PATCH) ./update_board -y --no-frills
+	$(AT) # If everything went ok, we now have the updated version of $(BELA_DIR) in $(UPDATE_BELA_PATCH) and a backup of $(BELA_DIR) in $(UPDATE_BELA_BACKUP)
+	$(AT) # So let's operate the magic swap. $(BELA_DIR) is moved to $(UPDATE_BELA_MV_BACKUP) and $(UPDATE_BELA_PATCH) is moved to $(BELA_DIR).
+	$(AT) # If something goes wrong at thie stage, you can always find your old $(BELA_DIR) folder at $(UPDATE_BELA_BACKUP)
+	$(AT) # The fun part is that this Makefile is moved as well...
+	$(AT) # We are about to kill the IDE, so just in case you are running this from within the IDE, we run the remainder of this update in a screen.
+	$(AT) # Output will be logged to $(UPDATE_LOG)
+	$(AT) echo Restoring directory structure... | tee -a $(UPDATE_LOG)
+	$(AT) screen -S update-Bela -d -m bash -c '\
 	  [ -n $(UPDATE_BELA_MV_BACKUP) ] $(LOG) && rm -rf $(UPDATE_BELA_MV_BACKUP) $(LOG) &&\
 	  echo Kill the IDE $(LOG) && \
 	  $(MAKE) --no-print-directory idestop $(LOG) &&\
--- a/scripts/build_pd_heavy.sh	Fri Jun 24 14:12:22 2016 +0100
+++ b/scripts/build_pd_heavy.sh	Fri Jun 24 14:55:12 2016 +0100
@@ -126,28 +126,31 @@
 #TODO: get a reliable, exhaustive, up-to-date list.
 HEAVY_FILES='Heavy* Hv*'
 
+check_board_alive
 set_date
 reference_time_file="$projectpath"/
 
 uploadBuildRun(){
     if [ $NO_UPLOAD -eq 0 ]; then
         # remove old static files to avoid obsolete errors
-	# make sure the path is not empty, so avoiding to rm -rf / by mistake 
-	[ -z $projectpath ] && { echo 'ERROR: $projectpath is empty.'; exit 0; } 
+        # make sure the path is not empty, so avoiding to rm -rf / by mistake 
+        [ -z $projectpath ] && { echo 'ERROR: $projectpath is empty.'; exit 0; } 
         # use -rf to prevent warnings in case they do not exist
         for file in $HEAVY_FILES
-	do 
-	    rm -rf "$projectpath"/$file
-	done
+	    do 
+	        rm -rf "$projectpath"/$file
+	    done
+        
+		echo "Invoking the online compiler..."
         # invoke the online compiler
         "$BELA_PYTHON27" $HVRESOURCES_DIR/uploader.py "$pdpath"/ -n $ENZIENAUDIO_COM_PATCH_NAME -g c -o "$projectpath" $RELEASE_STRING ||\
-            { echo "ERROR: an error occurred while executing the uploader.py script"; exit 1; }
+            { echo "ERROR: an error occurred while executing the uploader.py script"; exit $?; }
     fi;
 
     echo "";
 
     # Test that files have been retrieved from the online compiler.
-    # TODO: find a more reliable way of doing this. e.g.: have uploader.py fail with a non-zero error code.
+	# TODO: skip this now that uplodaer.py returns meaningful exit codes 
     for file in $HEAVY_FILES;
     do
         ls "$projectpath"/$file >/dev/null 2>&1 || { 
--- a/scripts/hvresources/uploader.py	Fri Jun 24 14:12:22 2016 +0100
+++ b/scripts/hvresources/uploader.py	Fri Jun 24 14:55:12 2016 +0100
@@ -5,9 +5,10 @@
 import getpass
 import json
 import os
-import requests
+import requests # http://docs.python-requests.org/en/master/api/#exceptions
 import shutil
 import stat
+import sys
 import tempfile
 import time
 import urlparse
@@ -25,8 +26,29 @@
     underline = "\033[4m"
     end = "\033[0m"
 
+class ErrorCodes(object):
+    # NOTE(mhroth): this class could inherit from Enum, but we choose not to
+    # as to not require an additional dependency
+    # http://www.tldp.org/LDP/abs/html/exitcodes.html
+    # http://stackoverflow.com/questions/1101957/are-there-any-standard-exit-status-codes-in-linux
+    CODE_OK = 0 # success!
+    CODE_MAIN_NOT_FOUND = 3 # _main.pd not found
+    CODE_HEAVY_COMPILE_ERRORS = 4 # heavy returned compiler errors
+    CODE_UPLOAD_ASSET_TOO_LARGE = 5 # the size of the uploadable asset is too large
+    CODE_RELEASE_NOT_AVAILABLE = 6 # the requested release is not available
+    CODE_CONNECTION_ERROR = 7 # HTTPS connection could not be made to the server
+    CODE_CONNECTION_TIMEOUT = 8 # HTTPS connection has timed out
+    CODE_CONNECTION_400_500 = 9 # a 400 or 500 error has occured
+    CODE_EXCEPTION = 125 # a generic execption has occurred
+
+class UploaderException(Exception):
+    def __init__(self, code, message=None, e=None):
+        self.code = code
+        self.message = message
+        self.e = e
+
 # the maxmimum file upload size of 1MB
-__HV_MAX_UPLOAD_SIZE = 1024*1024
+__HV_MAX_UPLOAD_SIZE = 1 * 1024*1024
 
 def __zip_dir(in_dir, zip_path, file_filter=None):
     """Recursively zip an entire directory with an optional file filter
@@ -62,7 +84,7 @@
         help="List of generator outputs. Currently supported generators are "
             "'c', 'js', 'pdext', 'pdext-osx', 'unity', 'unity-osx', "
             "'unity-win-x86', 'unity-win-x86_64', 'wwise', 'wwise-win-x86_64', "
-            "'vst2' ,'vst2-osx', and 'vst2-win-x86_64'.")
+            "'vst2' ,'vst2-osx', 'vst2-win-x86_64', and 'vst2-win-x86'.")
     parser.add_argument(
         "-b",
         help="All files will be placed in the output directory, placed in their own subdirectory corresponding to the generator name.",
@@ -102,36 +124,37 @@
         action="count")
     parser.add_argument(
         "-t", "--token",
-        help="Use the specified token.",
-    )
+        help="Use the specified token.")
     args = parser.parse_args()
 
-    domain = args.domain or "https://enzienaudio.com"
+    try:
+        # set default values
+        domain = args.domain or "https://enzienaudio.com"
+        exit_code = ErrorCodes.CODE_OK
+        temp_dir = None
+        post_data = {}
 
-    post_data = {}
+        # token should be stored in ~/.heavy/token
+        token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token"))
 
-    # token should be stored in ~/.heavy/token
-    token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token"))
+        if args.token is not None:
+            # check if token has been passed as a command line arg...
+            post_data["credentials"] = {"token": args.token}
+        elif os.path.exists(token_path) and not args.z:
+            # ...or if it is stored in the user's home directory
+            with open(token_path, "r") as f:
+                post_data["credentials"] = {"token": f.read()}
+        else:
+            # otherwise, get the username and password
+            post_data["credentials"] = {
+                "username": raw_input("Enter username: "),
+                "password": getpass.getpass("Enter password: ")
+            }
 
-    if args.token is not None:
-        # check if token has been passed as a command line arg...
-        post_data["credentials"] = {"token": args.token}
-    elif os.path.exists(token_path) and not args.z:
-        # ...or if it is stored in the user's home directory
-        with open(token_path, "r") as f:
-            post_data["credentials"] = {"token": f.read()}
-    else:
-        # otherwise, get the username and password
-        post_data["credentials"] = {
-            "username": raw_input("Enter username: "),
-            "password": getpass.getpass("Enter password: ")
-        }
+        tick = time.time()
 
-    tick = time.time()
-
-    # parse the optional release argument
-    if args.release:
-        try:
+        # parse the optional release argument
+        if args.release:
             # check the validity of the current release
             releases_json = requests.get(urlparse.urljoin(domain, "/a/releases")).json()
             if args.release in releases_json:
@@ -155,195 +178,205 @@
                     print "* {0} ({1})".format(
                         k,
                         v["releaseDate"])
-                return
-        except:
-            pass # if the /a/releases request fails for whatever reason, just move on
+                raise UploaderException(ErrorCodes.CODE_RELEASE_NOT_AVAILABLE)
 
-        post_data["release"] = args.release
+            post_data["release"] = args.release
 
-    # make a temporary directory
-    temp_dir = tempfile.mkdtemp(prefix="lroyal-")
+        # make a temporary directory
+        temp_dir = tempfile.mkdtemp(prefix="lroyal-")
 
-    # zip up the pd directory into the temporary directory
-    try:
+        # zip up the pd directory into the temporary directory
         if not os.path.exists(os.path.join(args.input_dir, "_main.pd")):
-            raise Exception("Root Pd directory does not contain a file named _main.pd.")
+            raise UploaderException(
+                ErrorCodes.CODE_MAIN_NOT_FOUND,
+                "Root Pd directory does not contain a file named _main.pd.")
         zip_path = __zip_dir(
             args.input_dir,
             os.path.join(temp_dir, "archive.zip"),
             file_filter={"pd"})
         if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE:
-            raise Exception("The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format(
-                os.stat(zip_path).st_size))
+            raise UploaderException(
+                ErrorCodes.CODE_UPLOAD_ASSET_TOO_LARGE,
+                "The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format(
+                    os.stat(zip_path).st_size))
+
+        post_data["name"] = args.name
+
+        # the outputs to generate (always include c)
+        __SUPPORTED_GENERATOR_SET = {
+            "c", "js",
+            "pdext", "pdext-osx",
+            "unity", "unity-osx", "unity-win-x86", "unity-win-x86_64",
+            "wwise", "wwise-win-x86_64",
+            "vst2", "vst2-osx", "vst2-win-x86_64",
+        }
+        post_data["gen"] = list(({"c"} | {s.lower() for s in set(args.gen)}) & __SUPPORTED_GENERATOR_SET)
+
+        # upload the job, get the response back
+        # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary,
+        # but we want to send a json encoded deep dictionary. So we do a bit of a hack.
+        r = requests.post(
+            urlparse.urljoin(domain, "/a/heavy"),
+            data={"json":json.dumps(post_data)},
+            files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")},
+            verify=False if args.noverify else True)
+        r.raise_for_status()
+
+        """
+        {
+          "data": {
+            "compileTime": 0.05078411102294922,
+            "id": "mhroth/asdf/Edp2G",
+            "slug": "Edp2G",
+            "index": 3,
+            "links": {
+              "files": {
+                "linkage": [
+                  {
+                    "id": "mhroth/asdf/Edp2G/c",
+                    "type": "file"
+                  }
+                ],
+                "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files"
+              },
+              "project": {
+                "linkage": {
+                  "id": "mhroth/asdf",
+                  "type": "project"
+                },
+                "self": "https://enzienaudio.com/h/mhroth/asdf"
+              },
+              "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G",
+              "user": {
+                "linkage": {
+                  "id": "mhroth",
+                  "type": "user"
+                },
+                "self": "https://enzienaudio.com/h/mhroth"
+              }
+            },
+            "type": "job"
+          },
+          "included": [
+            {
+              "filename": "file.c.zip",
+              "generator": "c",
+              "id": "mhroth/asdf/Edp2G/c",
+              "links": {
+                "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip"
+              },
+              "mime": "application/zip",
+              "type": "file"
+            }
+          ],
+          "warnings": [
+            {"details": "blah blah blah"}
+          ],
+          "meta": {
+            "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz"
+          }
+        }
+        """
+        # decode the JSON API response
+        reply_json = r.json()
+        if args.verbose:
+            print json.dumps(
+                reply_json,
+                sort_keys=True,
+                indent=2,
+                separators=(",", ": "))
+
+        # update the api token, if present
+        if "token" in reply_json.get("meta",{}) and not args.x:
+            if args.token is not None:
+                if reply_json["meta"]["token"] != args.token:
+                    print "WARNING: Token returned by API is not the same as the "
+                    "token supplied at the command line. (old = %s, new = %s)".format(
+                        args.token,
+                        reply_json["meta"]["token"])
+            else:
+                if not os.path.exists(os.path.dirname(token_path)):
+                    # ensure that the .heavy directory exists
+                    os.makedirs(os.path.dirname(token_path))
+                with open(token_path, "w") as f:
+                    f.write(reply_json["meta"]["token"])
+                # force rw------- permissions on the file
+                os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR)
+
+        # print any warnings
+        for i,x in enumerate(reply_json.get("warnings",[])):
+            print "{3}) {0}Warning:{1} {2}".format(
+                Colours.yellow, Colours.end, x["detail"], i+1)
+
+        # check for errors
+        if len(reply_json.get("errors",[])) > 0:
+            for i,x in enumerate(reply_json["errors"]):
+                print "{3}) {0}Error:{1} {2}".format(
+                    Colours.red, Colours.end, x["detail"], i+1)
+            raise UploaderException(ErrorCodes.CODE_HEAVY_COMPILE_ERRORS)
+
+        # retrieve all requested files
+        for i,g in enumerate(args.gen):
+            file_url = __get_file_url_for_generator(reply_json, g)
+            if file_url and (len(args.out) > i or args.b):
+                r = requests.get(
+                    file_url,
+                    cookies={"token": reply_json["meta"]["token"]},
+                    verify=False if args.noverify else True)
+                r.raise_for_status()
+
+                # write the reply to a temporary file
+                c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g))
+                with open(c_zip_path, "wb") as f:
+                    f.write(r.content)
+
+                # unzip the files to where they belong
+                if args.b:
+                    target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g)
+                else:
+                    target_dir = os.path.abspath(os.path.expanduser(args.out[i]))
+                if not os.path.exists(target_dir):
+                    os.makedirs(target_dir) # ensure that the output directory exists
+                __unzip(c_zip_path, target_dir)
+
+                if g == "c" and args.y:
+                    keep_files = ("_{0}.h".format(args.name), "_{0}.c".format(args.name))
+                    for f in os.listdir(target_dir):
+                        if not f.endswith(keep_files):
+                            os.remove(os.path.join(target_dir, f));
+
+                print "{0} files placed in {1}".format(g, target_dir)
+            else:
+                print "{0}Warning:{1} {2} files could not be retrieved.".format(
+                    Colours.yellow, Colours.end,
+                    g)
+
+            print "Job URL:", reply_json["data"]["links"]["self"]
+            print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick)))
+            print "Heavy release:", reply_json.get("meta",{}).get("release", "default")
+    except UploaderException as e:
+        exit_code = e.code
+        if e.message:
+            print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e.message)
+    except requests.ConnectionError as e:
+        print "{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\n{2}".format(Colours.red, Colours.end, e)
+        exit_code = ErrorCodes.CODE_CONNECTION_ERROR
+    except requests.ConnectTimeout as e:
+        print "{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\n{2}".format(Colours.red, Colours.end, e)
+        exit_code = ErrorCodes.CODE_CONNECTION_TIMEOUT
+    except requests.HTTPError as e:
+        print "{0}Error:{1} An HTTP error has occurred.\n{2}".format(Colours.red, Colours.end, e)
+        exit_code = ErrorCodes.CODE_CONNECTION_400_500
     except Exception as e:
+        exit_code = ErrorCodes.CODE_EXCEPTION
         print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e)
-        shutil.rmtree(temp_dir) # clean up the temporary directory
-        return
+        print "Getting a weird error? Get the latest uploader at https://enzienaudio.com/static/uploader.py"
+    finally:
+        if temp_dir:
+            shutil.rmtree(temp_dir) # delete the temporary directory no matter what
 
-    post_data["name"] = args.name
-
-    # the outputs to generate (always include c)
-    __SUPPORTED_GENERATOR_SET = {
-        "c", "js",
-        "pdext", "pdext-osx",
-        "unity", "unity-osx", "unity-win-x86", "unity-win-x86_64",
-        "wwise", "wwise-win-x86_64",
-        "vst2", "vst2-osx", "vst2-win-x86_64",
-    }
-    post_data["gen"] = list(({"c"} | {s.lower() for s in set(args.gen)}) & __SUPPORTED_GENERATOR_SET)
-
-    # upload the job, get the response back
-    # NOTE(mhroth): multipart-encoded file can only be sent as a flat dictionary,
-    # but we want to send a json encoded deep dictionary. So we do a bit of a hack.
-    r = requests.post(
-        urlparse.urljoin(domain, "/a/heavy"),
-        data={"json":json.dumps(post_data)},
-        files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")},
-        verify=False if args.noverify else True)
-
-    if r.status_code != requests.codes.ok:
-        shutil.rmtree(temp_dir) # clean up the temporary directory
-        print "Getting a weird error? Get the latest uploader at https://enzienaudio.com/static/uploader.py"
-        r.raise_for_status() # raise an exception
-
-    # decode the JSON API response
-    r_json = r.json()
-
-    """
-    {
-      "data": {
-        "compileTime": 0.05078411102294922,
-        "id": "mhroth/asdf/Edp2G",
-        "slug": "Edp2G",
-        "index": 3,
-        "links": {
-          "files": {
-            "linkage": [
-              {
-                "id": "mhroth/asdf/Edp2G/c",
-                "type": "file"
-              }
-            ],
-            "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/files"
-          },
-          "project": {
-            "linkage": {
-              "id": "mhroth/asdf",
-              "type": "project"
-            },
-            "self": "https://enzienaudio.com/h/mhroth/asdf"
-          },
-          "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G",
-          "user": {
-            "linkage": {
-              "id": "mhroth",
-              "type": "user"
-            },
-            "self": "https://enzienaudio.com/h/mhroth"
-          }
-        },
-        "type": "job"
-      },
-      "included": [
-        {
-          "filename": "file.c.zip",
-          "generator": "c",
-          "id": "mhroth/asdf/Edp2G/c",
-          "links": {
-            "self": "https://enzienaudio.com/h/mhroth/asdf/Edp2G/c/file.c.zip"
-          },
-          "mime": "application/zip",
-          "type": "file"
-        }
-      ],
-      "warnings": [
-        {"details": "blah blah blah"}
-      ],
-      "meta": {
-        "token": "11AS0qPRmjTUHEMSovPEvzjodnzB1xaz"
-      }
-    }
-    """
-    reply_json = r.json()
-    if args.verbose:
-        print json.dumps(
-            reply_json,
-            sort_keys=True,
-            indent=2,
-            separators=(",", ": "))
-
-    # update the api token, if present
-    if "token" in reply_json.get("meta",{}) and not args.x:
-        if args.token is not None:
-            if reply_json["meta"]["token"] != args.token:
-                print "WARNING: Token returned by API is not the same as the "
-                "token supplied at the command line. (old = %s, new = %s)".format(
-                    args.token,
-                    reply_json["meta"]["token"])
-        else:
-            if not os.path.exists(os.path.dirname(token_path)):
-                # ensure that the .heavy directory exists
-                os.makedirs(os.path.dirname(token_path))
-            with open(token_path, "w") as f:
-                f.write(reply_json["meta"]["token"])
-            # force rw------- permissions on the file
-            os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR)
-
-    # print any warnings
-    for i,x in enumerate(r_json.get("warnings",[])):
-        print "{3}) {0}Warning:{1} {2}".format(
-            Colours.yellow, Colours.end, x["detail"], i+1)
-
-    # check for errors
-    if len(r_json.get("errors",[])) > 0:
-        shutil.rmtree(temp_dir) # clean up the temporary directory
-        for i,x in enumerate(r_json["errors"]):
-            print "{3}) {0}Error:{1} {2}".format(
-                Colours.red, Colours.end, x["detail"], i+1)
-        return
-
-    # retrieve all requested files
-    for i,g in enumerate(args.gen):
-        file_url = __get_file_url_for_generator(reply_json, g)
-        if file_url is not None and (len(args.out) > i or args.b):
-            r = requests.get(
-                file_url,
-                cookies={"token": reply_json["meta"]["token"]},
-                verify=False if args.noverify else True)
-            r.raise_for_status()
-
-            # write the reply to a temporary file
-            c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g))
-            with open(c_zip_path, "wb") as f:
-                f.write(r.content)
-
-            # unzip the files to where they belong
-            if args.b:
-                target_dir = os.path.join(os.path.abspath(os.path.expanduser(args.out[0])), g)
-            else:
-                target_dir = os.path.abspath(os.path.expanduser(args.out[i]))
-            if not os.path.exists(target_dir):
-                os.makedirs(target_dir) # ensure that the output directory exists
-            __unzip(c_zip_path, target_dir)
-
-            if g == "c" and args.y:
-                keep_files = ("_{0}.h".format(args.name), "_{0}.c".format(args.name))
-                for f in os.listdir(target_dir):
-                    if not f.endswith(keep_files):
-                        os.remove(os.path.join(target_dir, f));
-
-            print "{0} files placed in {1}".format(g, target_dir)
-        else:
-            print "{0}Warning:{1} {2} files could not be retrieved.".format(
-                Colours.yellow, Colours.end,
-                g)
-
-    # delete the temporary directory
-    shutil.rmtree(temp_dir)
-
-    print "Job URL:", reply_json["data"]["links"]["self"]
-    print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick)))
-    print "Heavy release:", reply_json.get("meta",{}).get("release", "default")
+    # exit and return the exit code
+    sys.exit(exit_code)
 
 def __get_file_url_for_generator(json_api, g):
     """Returns the file link for a specific generator.