changeset 1618:fb03674bdde1

Merge from live branch
author Chris Cannam
date Thu, 22 Feb 2018 09:04:43 +0000
parents 4e21f91ad4ff (current diff) 5584bee75daa (diff)
children 3a510bf6a9bc
files extra/fast-export/Makefile extra/fast-export/svn-archive.c extra/fast-export/svn-fast-export.c extra/fast-export/svn-fast-export.py
diffstat 79 files changed, 1860 insertions(+), 684 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/.dockerignore	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,6 @@
+.dockerignore
+.hg
+.hgignore
+deploy/docker/Dockerfile
+deploy/docker/start.sh
+*.log
--- a/.hgignore	Thu Feb 04 08:47:09 2016 +0000
+++ b/.hgignore	Thu Feb 22 09:04:43 2018 +0000
@@ -1,5 +1,4 @@
 syntax: glob
-
 .project
 .loadpath
 .powrc
@@ -35,7 +34,13 @@
 *~
 public/themes/soundsoftware/stylesheets/fonts/*
 .bundle
-Gemfile.lock
 Gemfile.local
-
+*.interpolated
 re:^config\.ru$
+.vagrant
+*.orig
+*.pyc
+*-console.log
+postgres-dumpall
+*.gen
+deploy/test/output
--- a/Gemfile	Thu Feb 04 08:47:09 2016 +0000
+++ b/Gemfile	Thu Feb 22 09:04:43 2018 +0000
@@ -9,10 +9,6 @@
 gem "mime-types"
 gem "awesome_nested_set", "2.1.6"
 
-#cc -- CiteProc v1.0.0 broke our citations (CiteProc.process returns nil).
-# Until I've managed to work out what's up and fix that...
-gem "citeproc", "0.0.6"
-
 # Optional gem for LDAP authentication
 group :ldap do
   gem "net-ldap", "~> 0.3.1"
@@ -64,7 +60,11 @@
         gem "mysql", "~> 2.8.1", :platforms => [:mri, :mingw]
         gem "activerecord-jdbcmysql-adapter", :platforms => :jruby
       when /postgresql/
-        gem "pg", ">= 0.11.0", :platforms => [:mri, :mingw]
+        #!!! Lock (temporarily?) to 0.20 to avoid being spammed by
+        # deprecation errors from one of the other users of the gem
+        # outside our control
+#        gem "pg", ">= 0.11.0", :platforms => [:mri, :mingw]
+        gem "pg", "= 0.20.0", :platforms => [:mri, :mingw]
         gem "activerecord-jdbcpostgresql-adapter", :platforms => :jruby
       when /sqlite3/
         gem "sqlite3", :platforms => [:mri, :mingw]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Gemfile.lock	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,178 @@
+GEM
+  remote: https://rubygems.org/
+  specs:
+    actionmailer (3.2.22.5)
+      actionpack (= 3.2.22.5)
+      mail (~> 2.5.4)
+    actionpack (3.2.22.5)
+      activemodel (= 3.2.22.5)
+      activesupport (= 3.2.22.5)
+      builder (~> 3.0.0)
+      erubis (~> 2.7.0)
+      journey (~> 1.0.4)
+      rack (~> 1.4.5)
+      rack-cache (~> 1.2)
+      rack-test (~> 0.6.1)
+      sprockets (~> 2.2.1)
+    activemodel (3.2.22.5)
+      activesupport (= 3.2.22.5)
+      builder (~> 3.0.0)
+    activerecord (3.2.22.5)
+      activemodel (= 3.2.22.5)
+      activesupport (= 3.2.22.5)
+      arel (~> 3.0.2)
+      tzinfo (~> 0.3.29)
+    activeresource (3.2.22.5)
+      activemodel (= 3.2.22.5)
+      activesupport (= 3.2.22.5)
+    activesupport (3.2.22.5)
+      i18n (~> 0.6, >= 0.6.4)
+      multi_json (~> 1.0)
+    acts-as-taggable-on (2.3.3)
+      rails (~> 3.0)
+    arel (3.0.3)
+    awesome_nested_set (2.1.6)
+      activerecord (>= 3.0.0)
+    bibtex-ruby (4.4.4)
+      latex-decode (~> 0.0)
+    builder (3.0.0)
+    capybara (2.1.0)
+      mime-types (>= 1.16)
+      nokogiri (>= 1.3.3)
+      rack (>= 1.0.0)
+      rack-test (>= 0.5.4)
+      xpath (~> 2.0)
+    childprocess (0.7.1)
+      ffi (~> 1.0, >= 1.0.11)
+    citeproc (1.0.5)
+      namae (~> 0.8)
+    citeproc-ruby (1.1.7)
+      citeproc (>= 1.0.4, < 2.0)
+      csl (~> 1.4)
+    coderay (1.1.1)
+    csl (1.4.5)
+      namae (~> 0.7)
+    csl-styles (1.0.1.8)
+      csl (~> 1.0)
+    erubis (2.7.0)
+    fastercsv (1.5.5)
+    ffi (1.9.18)
+    hike (1.2.3)
+    i18n (0.8.6)
+    iconv (1.0.4)
+    journey (1.0.4)
+    jquery-rails (2.0.3)
+      railties (>= 3.1.0, < 5.0)
+      thor (~> 0.14)
+    json (1.8.6)
+    latex-decode (0.2.2)
+      unicode (~> 0.4)
+    mail (2.5.5)
+      mime-types (~> 1.16)
+      treetop (~> 1.4.8)
+    metaclass (0.0.4)
+    mime-types (1.25.1)
+    mini_portile2 (2.2.0)
+    mocha (1.0.0)
+      metaclass (~> 0.0.1)
+    multi_json (1.12.1)
+    namae (0.11.3)
+    net-ldap (0.3.1)
+    nokogiri (1.8.0)
+      mini_portile2 (~> 2.2.0)
+    pg (0.20.0)
+    polyglot (0.3.5)
+    rack (1.4.7)
+    rack-cache (1.7.0)
+      rack (>= 0.4)
+    rack-openid (1.4.2)
+      rack (>= 1.1.0)
+      ruby-openid (>= 2.1.8)
+    rack-ssl (1.3.4)
+      rack
+    rack-test (0.6.3)
+      rack (>= 1.0)
+    rails (3.2.22.5)
+      actionmailer (= 3.2.22.5)
+      actionpack (= 3.2.22.5)
+      activerecord (= 3.2.22.5)
+      activeresource (= 3.2.22.5)
+      activesupport (= 3.2.22.5)
+      bundler (~> 1.0)
+      railties (= 3.2.22.5)
+    railties (3.2.22.5)
+      actionpack (= 3.2.22.5)
+      activesupport (= 3.2.22.5)
+      rack-ssl (~> 1.3.2)
+      rake (>= 0.8.7)
+      rdoc (~> 3.4)
+      thor (>= 0.14.6, < 2.0)
+    rake (10.1.1)
+    rdoc (3.12.2)
+      json (~> 1.4)
+    redcarpet (2.3.0)
+    rmagick (2.16.0)
+    ruby-openid (2.3.0)
+    rubyzip (1.2.1)
+    selenium-webdriver (3.5.1)
+      childprocess (~> 0.5)
+      rubyzip (~> 1.0)
+    shoulda (3.3.2)
+      shoulda-context (~> 1.0.1)
+      shoulda-matchers (~> 1.4.1)
+    shoulda-context (1.0.2)
+    shoulda-matchers (1.4.1)
+      activesupport (>= 3.0.0)
+    sprockets (2.2.3)
+      hike (~> 1.2)
+      multi_json (~> 1.0)
+      rack (~> 1.0)
+      tilt (~> 1.1, != 1.3.0)
+    thor (0.19.4)
+    tilt (1.4.1)
+    treetop (1.4.15)
+      polyglot
+      polyglot (>= 0.3.1)
+    tzinfo (0.3.53)
+    unicode (0.4.4.4)
+    xpath (2.1.0)
+      nokogiri (~> 1.3)
+    yard (0.9.9)
+
+PLATFORMS
+  ruby
+
+DEPENDENCIES
+  activerecord-jdbc-adapter (~> 1.3.2)
+  activerecord-jdbcpostgresql-adapter
+  acts-as-taggable-on (= 2.3.3)
+  awesome_nested_set (= 2.1.6)
+  bibtex-ruby
+  builder (= 3.0.0)
+  capybara (~> 2.1.0)
+  citeproc
+  citeproc-ruby
+  coderay (~> 1.1.0)
+  csl
+  csl-styles
+  fastercsv (~> 1.5.0)
+  iconv
+  jquery-rails (~> 2.0.2)
+  mime-types
+  mocha (~> 1.0.0)
+  net-ldap (~> 0.3.1)
+  nokogiri
+  pg (= 0.20.0)
+  rack-openid
+  rails (~> 3.2.22)
+  rake (~> 10.1.1)
+  rdoc (>= 2.4.2)
+  redcarpet (~> 2.3.0)
+  rmagick (>= 2.0.0)
+  ruby-openid (~> 2.3.0)
+  selenium-webdriver
+  shoulda (~> 3.3.2)
+  yard
+
+BUNDLED WITH
+   1.15.4
--- a/app/models/mailer.rb	Thu Feb 04 08:47:09 2016 +0000
+++ b/app/models/mailer.rb	Thu Feb 22 09:04:43 2018 +0000
@@ -400,7 +400,8 @@
             'X-Auto-Response-Suppress' => 'OOF',
             'Auto-Submitted' => 'auto-generated',
             'From' => Setting.mail_from,
-            'List-Id' => "<#{Setting.mail_from.to_s.gsub('@', '.')}>"
+            'Reply-To' => Setting.mail_reply_to,
+            'List-Id' => "<#{Setting.mail_reply_to.to_s.gsub('@', '.')}>"
 
     # Removes the author from the recipients and cc
     # if the author does not want to receive notifications
--- a/app/views/projects/explore.html.erb	Thu Feb 04 08:47:09 2016 +0000
+++ b/app/views/projects/explore.html.erb	Thu Feb 22 09:04:43 2018 +0000
@@ -11,11 +11,12 @@
 <h2><%= l(:label_explore_projects) %></h2>
 
 <div class="threecolumnleft">
-  <div class="tags box">
-  <h3><%=l(:label_project_tags_all)%></h3>
-    <% cache(:action => 'explore', :action_suffix => 'tags') do %>
-    <%= render :partial => 'projects/tagcloud' %>
+  <div class="projects box">
+  <h3><%=l(:label_projects_busy)%></h3>
+    <% cache(:action => 'explore', :action_suffix => 'busy_projects') do %>
+    <%= render :partial => 'activities/busy' %>
     <% end %>
+    <%= link_to l(:label_overall_activity), { :controller => 'activities', :action => 'index' }, :class => 'more' %>
   </div>
 </div>
 
@@ -27,26 +28,6 @@
   </div>
 </div>
 
-<div class="threecolumnleft">
-  <div class="institutions box">
-  <h3><%=l(:label_institutions_busy)%></h3>
-  <% cache(:action => 'explore', :action_suffix => 'busy_institutions') do %>
-    <%= render :partial => 'activities/busy_institution' %>
-  <% end %>
-  <%= link_to l(:label_overall_activity), { :controller => 'activities', :action => 'index' }, :class => 'more' %>
-  </div>
-</div>
-
-<div class="threecolumnright">
-  <div class="projects box">
-  <h3><%=l(:label_projects_busy)%></h3>
-    <% cache(:action => 'explore', :action_suffix => 'busy_projects') do %>
-    <%= render :partial => 'activities/busy' %>
-    <% end %>
-    <%= link_to l(:label_overall_activity), { :controller => 'activities', :action => 'index' }, :class => 'more' %>
-  </div>
-</div>
-
 <div class="threecolumnmid">
   <div class="projects box">
   <h3><%=l(:label_projects_mature)%></h3>
@@ -57,4 +38,12 @@
   </div>
 </div>
 
+  <div class="tags box" style="clear:all">
+  <h3><%=l(:label_project_tags_all)%></h3>
+    <% cache(:action => 'explore', :action_suffix => 'tags') do %>
+    <%= render :partial => 'projects/tagcloud' %>
+    <% end %>
+  </div>
+
+
 <% html_title(l(:label_explore_projects)) -%>
--- a/app/views/repositories/_dir_list.html.erb	Thu Feb 04 08:47:09 2016 +0000
+++ b/app/views/repositories/_dir_list.html.erb	Thu Feb 22 09:04:43 2018 +0000
@@ -1,4 +1,4 @@
-<div class="autoscroll">
+<div class="autoscroll" id="browser-autoscroll">
 <table class="list entries" id="browser">
 <thead>
 <tr id="root">
--- a/config/settings.yml	Thu Feb 04 08:47:09 2016 +0000
+++ b/config/settings.yml	Thu Feb 22 09:04:43 2018 +0000
@@ -61,6 +61,8 @@
   default: '25,50,100'
 mail_from:
   default: redmine@example.net
+mail_reply_to:
+  default: info@soundsoftware.ac.uk
 bcc_recipients:
   default: 1
 plain_text_mail:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/README	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,56 @@
+
+Deploying the SoundSoftware site
+================================
+
+These scripts can be used for test or staging deployments reproducing
+much of the configuration of the live site. Currently it's assumed
+that you are providing a database dump to load -- there is no
+provisioning step to initialise a new database.
+
+
+You will need
+-------------
+
+Required:
+
+ * A database dump to load. This should be left in a file called
+   postgres-dumpall in the soundsoftware-site root
+
+ * The database password and /sys API key for the target site. (This
+   can be queried from the db: settings table, name "sys_api_key". You
+   can change it in the admin UI; grep API config/*.in to see the
+   files you'll need to update if you change it)
+
+ * The (copyrighted) web font files used in our deployment. Leave
+   these in /public/themes/soundsoftware/stylesheets/fonts/
+
+Optional (or required for proper deployments):
+
+ * HTTPS key/cert files
+
+
+Three ways to deploy
+--------------------
+
+ 1. Using Vagrant to set up a development VM: Run ./vagrant/start.sh
+
+ 2. Using Docker to set up a development container: Run ./docker/start.sh
+
+ 3. On a "real" VM or server:
+
+    * Ensure the soundsoftware-site repo is checked out at /code-to-deploy
+    * Run /code-to-deploy/deploy/any/run-provisioning.sh as root
+
+    But be very careful with this! You could screw up a dev box -- or
+    an existing live server! -- if you accidentally provision the site
+    directly onto it when you should have used Vagrant or a container.
+
+
+After deployment
+----------------
+
+There is a smoke test script at test/smoketest.sh which checks that
+the home page, a project page, a repo page etc can be retrieved. Some
+of the pages it tries to retrieve are dependent on their generating
+cron scripts having run at least once since the server was set up.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/any/prepare.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,75 @@
+#!/bin/bash
+
+# To be sourced into a container-specific start.sh file, not run
+# standalone
+
+usage() {
+    echo "Usage: $0 <database-password> <api-key> <api-httpauth-password>" 1>&2
+    exit 2
+}
+
+dbpass="$1"
+if [ -z "$dbpass" ]; then
+    usage
+fi
+
+apikey="$2"
+if [ -z "$apikey" ]; then
+    usage
+fi
+
+apipass="$3"
+if [ -z "$apipass" ]; then
+    usage
+fi
+
+set -eu -o pipefail
+
+rootdir="$mydir/../.."
+
+deploydir="$rootdir"/deploy
+if [ ! -d "$deploydir" ]; then
+    echo "ERROR: Unexpected repository layout - expected directory at $deploydir"
+    exit 2
+fi
+
+managerdir="$deploydir/vagrant"
+if [ ! -d "$managerdir" ]; then
+    echo "ERROR: Required directory $managerdir not found"
+    exit 2
+fi
+
+configdir="$deploydir/config"
+if [ ! -d "$configdir" ]; then
+    echo "ERROR: Required directory $configdir not found"
+    exit 2
+fi
+
+if [ ! -f "$rootdir/postgres-dumpall" ]; then
+    echo "ERROR: I expect to find a Postgres SQL multi-db dump file in $rootdir/postgres-dumpall. Create an empty file there if you don't want to load a database."
+    exit 2
+fi
+
+fontdir="$rootdir"/public/themes/soundsoftware/stylesheets/fonts
+if [ ! -f "$fontdir/24BC0E_0_0.woff" ]; then
+    echo "ERROR: I expect to find necessary webfonts in $fontdir"
+    exit 2
+fi
+
+#apischeme=http
+#apihost=localhost
+
+apischeme=https
+apihost=code.soundsoftware.ac.uk
+
+for f in "$configdir"/*.in "$rootdir"/extra/soundsoftware/extract-docs.sh ; do
+    out="$configdir"/$(basename "$f" .in).gen
+    cat "$f" | sed \
+                   -e 's/INSERT_DATABASE_PASSWORD_HERE/'"$dbpass"'/g' \
+                   -e 's/INSERT_API_KEY_HERE/'"$apikey"'/g' \
+                   -e 's/INSERT_API_SCHEME_HERE/'"$apischeme"'/g' \
+                   -e 's/INSERT_API_HOST_HERE/'"$apihost"'/g' \
+                   -e 's/INSERT_API_USER_HERE/user/g' \
+                   -e 's/INSERT_API_PASSWORD_HERE/'"$apipass"'/g' \
+                   > "$out"
+done
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/any/run-cron-scripts.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+set -e
+
+cd /var/www/code
+
+for t in minutely hourly daily monthly; do
+    for s in deploy/config/cron.$t/[0-9]* ; do
+        name=$(basename $s)
+        actual="/etc/cron.$t/$name"
+        echo "Running cron script $actual..."
+        if "$actual"; then
+            echo "Cron script $actual ran successfully"
+        else
+            echo "Cron script $actual failed with error code $?"
+            exit 1
+        fi
+    done
+done
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/any/run-provisioning.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+mydir=$(dirname "$0")
+case "$mydir" in
+    /*) ;;
+    *) mydir=$(echo "$(pwd)/$mydir" | sed 's,/\./,/,g')
+esac
+
+if [ "$mydir" != "/code-to-deploy/deploy/any" ]; then
+    echo "ERROR: Expected repository to be at /code-to-deploy prior to provisioning"
+    echo "       (My directory is $mydir, expected /code-to-deploy/deploy/any)"
+    exit 2
+fi
+
+set -e
+
+. "$mydir"/prepare.sh
+
+for f in "$mydir"/../provision.d/[0-9]*.sh ; do
+    case "$f" in
+        *~) ;;
+        *) echo "Running provisioning script: $f"
+           /bin/bash "$f" </dev/null ;; # close stdin to avoid interactivity
+    esac
+done
+
+echo "All provisioning scripts complete"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/code-ssl.conf.in	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,148 @@
+
+# Apache config with SSL and admin auth stubbed in. You must provide
+# the key/cert and auth files.
+
+# Note this has been updated for Apache 2.4, which introduced a number
+# of (welcome) changes to access control directives.
+
+PerlLoadModule Apache::Authn::SoundSoftware
+
+<VirtualHost *:80>
+        ServerName code.soundsoftware.ac.uk
+        ServerAdmin chris.cannam@soundsoftware.ac.uk
+
+        DocumentRoot /var/www/code/public
+        PassengerRestartDir restart_files
+        PassengerHighPerformance on
+        PassengerMaxRequests 50000
+        PassengerStatThrottleRate 5
+	PassengerFriendlyErrorPages off
+        RailsSpawnMethod smart
+        ExpiresDefault "access plus 1 minute"
+
+        # Redirect all activity to secure site
+        Redirect seeother / "https://code.soundsoftware.ac.uk/"
+
+        <DirectoryMatch "^/.*/\.svn/">
+                Require all denied
+        </DirectoryMatch>
+
+        <DirectoryMatch "^/.*/\.hg/">
+                Require all denied
+        </DirectoryMatch>
+
+        <DirectoryMatch "^/.*/\.git/">
+                Require all denied
+        </DirectoryMatch>
+
+        <Directory /var/www/code/public>
+                Options -MultiViews
+	</Directory>
+
+	ErrorLog /var/log/apache2/code-error.log
+	CustomLog /var/log/apache2/code-access.log vhost_combined
+
+        LogLevel warn
+        ServerSignature Off
+</VirtualHost>
+
+<VirtualHost *:443>
+        ServerName code.soundsoftware.ac.uk
+        ServerAdmin chris.cannam@soundsoftware.ac.uk
+
+        SSLEngine on
+	SSLCertificateFile /etc/apache2/certs/code.soundsoftware.ac.uk.crt
+	SSLCertificateKeyFile /etc/apache2/certs/code.soundsoftware.ac.uk.key
+	SSLCertificateChainFile /etc/apache2/certs/code.soundsoftware.ac.uk.ca-bundle
+	SSLVerifyClient none
+	SSLProtocol all -SSLv2 -SSLv3
+	SSLCipherSuite ALL:!ADH:!EXPORT:!SSLv2:RC4+RSA:+HIGH:+MEDIUM:+LOW	
+
+        DocumentRoot /var/www/code/public
+        PassengerRestartDir restart_files
+        PassengerHighPerformance on
+        PassengerMaxRequests 50000
+        PassengerStatThrottleRate 5
+	PassengerStartTimeout 60
+	PassengerFriendlyErrorPages off
+        RailsSpawnMethod smart
+        ExpiresDefault "access plus 1 minute"
+
+        <Location /sys>
+		AuthType Basic
+		AuthUserFile "/etc/apache2/auth/user.htpasswd"
+		AuthName "code.soundsoftware.ac.uk"
+		Require user user
+	</Location>
+
+	<Location /admin>
+		AuthType Digest
+		AuthUserFile "/etc/apache2/auth/admin.htdigest"
+		AuthName "code.soundsoftware.ac.uk admin interface"
+		Require user admin
+	</Location>
+
+        <DirectoryMatch "^/.*/\.svn/">
+                Require all denied
+        </DirectoryMatch>
+
+        <DirectoryMatch "^/.*/\.hg/">
+                Require all denied
+        </DirectoryMatch>
+
+        <DirectoryMatch "^/.*/\.git/">
+                Require all denied
+        </DirectoryMatch>
+
+        <Directory /var/www/code/public>
+                Options -MultiViews
+	</Directory>
+
+        <Directory /var/www/code/public/themes/soundsoftware/stylesheets/fonts>
+		# Avoid other sites embedding our fonts
+		RewriteEngine on
+		RewriteCond %{HTTP_REFERER} !^$
+		RewriteCond %{HTTP_REFERER} !^http(s)?://code.soundsoftware.ac.uk/.*$ [NC]
+		RewriteRule \.(ttf|woff|eot|otf|svg|zip|gz|html|txt)$ - [F]
+	</Directory>
+
+	ScriptAlias /hg "/var/hg/index.cgi"
+
+	<Location /hg>
+               	AuthName "Mercurial"
+                AuthType Basic
+                Require valid-user
+		PerlAccessHandler Apache::Authn::SoundSoftware::access_handler
+      		PerlAuthenHandler Apache::Authn::SoundSoftware::authen_handler
+		PerlSetVar HTTPS "on"
+		SoundSoftwareDSN "dbi:Pg:database=code;host=localhost"
+    		SoundSoftwareDbUser "code"
+     		SoundSoftwareDbPass "INSERT_DATABASE_PASSWORD_HERE"
+		SoundSoftwareRepoPrefix "/var/hg/"
+                SoundSoftwareSslRequired "on"
+		Options +ExecCGI
+		AddHandler cgi-script .cgi
+		ExpiresDefault now
+        </Location>
+
+	Alias /git "/var/files/git-mirror"	
+
+	<Directory "/var/files/git-mirror">
+		Options -Indexes +FollowSymLinks
+                Require all granted
+	</Directory>
+	<Directory ~ "/var/files/git-mirror/.*\.workdir">
+                Require all denied
+	</Directory>
+	<Directory ~ "/var/files/git-mirror/__.*">
+                Require all denied
+	</Directory>
+
+	ErrorLog /var/log/apache2/code-error.log
+	CustomLog /var/log/apache2/code-access.log vhost_combined
+
+        LogLevel warn
+        ServerSignature Off
+        
+</VirtualHost>
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/code.conf.in	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,102 @@
+
+# A test Apache config. Lacks SSL, lacks a desirable extra layer of
+# authentication for admin interface paths. Do not deploy this.
+
+# Note this has been updated for Apache 2.4, which introduced a number
+# of (welcome) changes to access control directives.
+
+PerlLoadModule Apache::Authn::SoundSoftware
+
+<VirtualHost *:80>
+        ServerName code.soundsoftware.ac.uk
+        ServerAdmin chris.cannam@soundsoftware.ac.uk
+
+        DocumentRoot /var/www/code/public
+        PassengerRestartDir restart_files
+        PassengerHighPerformance on
+        PassengerMaxRequests 50000
+        PassengerStatThrottleRate 5
+	PassengerStartTimeout 60
+	PassengerFriendlyErrorPages on
+        RailsSpawnMethod smart
+        ExpiresDefault "access plus 1 minute"
+
+#        <Location /sys>
+#		AuthType Basic
+#		AuthUserFile "/etc/apache2/auth/user.htpasswd"
+#		AuthName "code.soundsoftware.ac.uk"
+#		Require user user
+#	</Location>
+
+#	<Location /admin>
+#		AuthType Digest
+#		AuthUserFile "/etc/apache2/auth/admin.htdigest"
+#		AuthName "code.soundsoftware.ac.uk admin interface"
+#		Require user admin
+#	</Location>
+
+        <DirectoryMatch "^/.*/\.svn/">
+                Require all denied
+        </DirectoryMatch>
+
+        <DirectoryMatch "^/.*/\.hg/">
+                Require all denied
+        </DirectoryMatch>
+
+        <DirectoryMatch "^/.*/\.git/">
+                Require all denied
+        </DirectoryMatch>
+
+        <Directory /var/www/code/public>
+                Options -MultiViews
+	</Directory>
+
+        <Directory /var/www/code/public/themes/soundsoftware/stylesheets/fonts>
+		# Avoid other sites embedding our fonts
+		RewriteEngine on
+		RewriteCond %{HTTP_REFERER} !^$
+		RewriteCond %{HTTP_REFERER} !^http(s)?://code.soundsoftware.ac.uk/.*$ [NC]
+		RewriteRule \.(ttf|woff|eot|otf|svg|zip|gz|html|txt)$ - [F]
+	</Directory>
+
+	ScriptAlias /hg "/var/hg/index.cgi"
+
+	<Location /hg>
+               	AuthName "Mercurial"
+                AuthType Basic
+                Require valid-user
+		PerlAccessHandler Apache::Authn::SoundSoftware::access_handler
+      		PerlAuthenHandler Apache::Authn::SoundSoftware::authen_handler
+		PerlSetVar HTTPS "off"
+		SoundSoftwareDSN "dbi:Pg:database=code;host=localhost"
+    		SoundSoftwareDbUser "code"
+     		SoundSoftwareDbPass "INSERT_DATABASE_PASSWORD_HERE"
+		SoundSoftwareRepoPrefix "/var/hg/"
+                #!!! "on" in production please!:
+                SoundSoftwareSslRequired "off"
+		Options +ExecCGI
+		AddHandler cgi-script .cgi
+		ExpiresDefault now
+        </Location>
+
+	Alias /git "/var/files/git-mirror"	
+
+	<Directory "/var/files/git-mirror">
+		Options -Indexes +FollowSymLinks
+                Require all granted
+	</Directory>
+	<Directory ~ "/var/files/git-mirror/.*\.workdir">
+                Require all denied
+	</Directory>
+	<Directory ~ "/var/files/git-mirror/__.*">
+                Require all denied
+	</Directory>
+
+	ErrorLog /var/log/apache2/code-error.log
+	CustomLog /var/log/apache2/code-access.log vhost_combined
+
+        LogLevel warn
+        ServerSignature Off
+        
+</VirtualHost>
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.daily/00-backup-db	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+outfile="/var/files/backups/postgres-dumpall-`date +%Y%m%d%H%M`"
+
+oldmask=`umask`
+umask 0277
+
+su postgres -c /usr/bin/pg_dumpall > "$outfile" && bzip2 "$outfile"
+
+umask "$oldmask"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.daily/10-extract-docs	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+cd /tmp
+
+/var/www/code/docgen/extract-docs.sh
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.daily/15-get-statistics	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+sudo -u code sh -c "cd /var/www/code ; ./script/rails runner -e production extra/soundsoftware/get-statistics.rb >> log/statistics.log"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.daily/20-check-end-of-external-repo-log	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+tail -2 /var/log/external-repos.log
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.hourly/00-drupal-cron	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+## No longer used - this site is now static
+
+# /usr/bin/wget -O - -q -t 1 http://www.soundsoftware.ac.uk/cron.php
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.hourly/10-redmine-fetch-changesets	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+sudo -u code sh -c "cd /var/www/code ; ./script/rails runner \"Repository.fetch_changesets\" -e production 2>&1 | grep -v 'Not trusting' | grep -v 'svn:' | grep -v 'working copy' | grep -v 'deprecated' | grep -v 'version_requirements'"
+exit 0
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.hourly/20-convert-external-repos	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+sudo -H -u www-data /var/www/code/reposman/run-external.sh
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.hourly/30-expire-explore-cache	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+rm -f /var/www/code/tmp/cache/*/*/views*explore*
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.hourly/40-export-git	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+logfile="/var/www/code/log/export-git.log"
+
+sudo -u code sh -c "cd /tmp ; /var/www/code/extra/soundsoftware/export-git.sh production /var/hg /var/files/git-mirror >> $logfile 2>&1"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.minutely/00-redmine-repositories	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+sudo -u www-data /var/www/code/reposman/run-reposman.sh
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/cron.monthly/00-backup-files	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+cd /
+for location in var/www etc/apache2 etc/cron.*; do
+	target="/var/files/backups/`echo $location | sed 's,/,_,g'`-`date +%Y%m%d%H%M`"
+	oldmask=`umask`
+	umask 0277
+	tar cjf "$target".tar.bz2 "$location"
+	umask "$oldmask"
+done
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/database.yml.in	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,7 @@
+production:
+  adapter: postgresql
+  database: code
+  host: localhost
+  username: code
+  password: "INSERT_DATABASE_PASSWORD_HERE"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/hgweb.config	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,6 @@
+[paths]
+/ = /var/hg/*
+
+[web]
+allow_archive = gz, zip, bz2
+allow_push = *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/index.cgi	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+#
+# An example CGI script to export multiple hgweb repos, edit as necessary
+
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
+# Uncomment to send python tracebacks to the browser if an error occurs:
+import cgitb
+cgitb.enable()
+
+# If you'd like to serve pages with UTF-8 instead of your default
+# locale charset, you can do so by uncommenting the following lines.
+# Note that this will cause your .hgrc files to be interpreted in
+# UTF-8 and all your repo files to be displayed using UTF-8.
+#
+import os
+os.environ["HGENCODING"] = "UTF-8"
+
+from mercurial.hgweb.hgwebdir_mod import hgwebdir
+import mercurial.hgweb.wsgicgi as wsgicgi
+
+# The config file looks like this.  You can have paths to individual
+# repos, collections of repos in a directory tree, or both.
+#
+# [paths]
+# virtual/path1 = /real/path1
+# virtual/path2 = /real/path2
+# virtual/root = /real/root/*
+# / = /real/root2/*
+# virtual/root2 = /real/root2/**
+#
+# [collections]
+# /prefix/to/strip/off = /root/of/tree/full/of/repos
+#
+# paths example: 
+#
+# * First two lines mount one repository into one virtual path, like
+# '/real/path1' into 'virtual/path1'.
+#
+# * The third entry mounts every mercurial repository found in '/real/root'
+# in 'virtual/root'. This format is preferred over the [collections] one,
+# since using absolute paths as configuration keys is not supported on every
+# platform (especially on Windows).
+#
+# * The fourth entry is a special case mounting all repositories in
+# /'real/root2' in the root of the virtual directory.
+#
+# * The fifth entry recursively finds all repositories under the real root,
+# and mounts them using their relative path (to given real root) under the
+# virtual root.
+#
+# collections example: say directory tree /foo contains repos /foo/bar,
+# /foo/quux/baz.  Give this config section:
+#   [collections]
+#   /foo = /foo
+# Then repos will list as bar and quux/baz.
+#
+# Alternatively you can pass a list of ('virtual/path', '/real/path') tuples
+# or use a dictionary with entries like 'virtual/path': '/real/path'
+
+application = hgwebdir('hgweb.config')
+wsgicgi.launch(application)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/logrotate.conf	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,69 @@
+# see "man logrotate" for details
+# rotate log files weekly
+weekly
+
+# use the syslog group by default, since this is the owning group
+# of /var/log/syslog.
+su root syslog
+
+# keep 4 weeks worth of backlogs
+rotate 4
+
+# create new (empty) log files after rotating old ones
+create
+
+# uncomment this if you want your log files compressed
+#compress
+
+# packages drop log rotation information into this directory
+include /etc/logrotate.d
+
+# no packages own wtmp, or btmp -- we'll rotate them here
+/var/log/wtmp {
+    missingok
+    monthly
+    create 0664 root utmp
+    rotate 1
+}
+
+/var/log/btmp {
+    missingok
+    monthly
+    create 0660 root utmp
+    rotate 1
+}
+
+# system-specific logs may be configured here
+/var/www/code/log/*.log {
+	weekly
+	missingok
+	rotate 52
+	compress
+	delaycompress
+	create 640 code code
+	sharedscripts
+	postrotate
+		touch /var/www/code/restart_files/restart.txt
+	endscript
+}
+
+/var/log/reposman.log {
+        weekly
+        missingok
+        rotate 52
+        compress
+        delaycompress
+        create 640 www-data code
+        sharedscripts
+}
+
+/var/log/external-repos.log {
+        weekly
+        missingok
+        rotate 52
+        compress
+        delaycompress
+        create 640 www-data code
+        sharedscripts
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/passenger.conf	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,2 @@
+PassengerMaxPoolSize 60
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/passenger.load	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,3 @@
+LoadModule passenger_module /var/lib/gems/2.3.0/gems/passenger-4.0.60/buildout/apache2/mod_passenger.so
+PassengerRoot /var/lib/gems/2.3.0/gems/passenger-4.0.60
+PassengerDefaultRuby /usr/bin/ruby2.3
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/perl.conf	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,4 @@
+# Apache::DBI is supposed to be a transparent replacement for Perl DBI with
+# better performance when multiple connections are made with common DSN, user
+# and password
+PerlModule Apache::DBI
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/run-external.sh.in	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,18 @@
+#!/bin/bash
+logfile=/var/log/external-repos.log
+( 
+flock -s 200 
+echo >> $logfile
+date >> $logfile
+/var/www/code/reposman/convert-external-repos.rb \
+	-s /var/hg \
+	-r INSERT_API_SCHEME_HERE://INSERT_API_HOST_HERE/ \
+	-k INSERT_API_KEY_HERE \
+	-v \
+	--http-user=INSERT_API_USER_HERE \
+	--http-pass=INSERT_API_PASSWORD_HERE \
+        -c "/var/www/code/reposman/update-external-repo.sh" \
+	>> $logfile 2>&1
+date >> $logfile
+echo Done >> $logfile
+)200>>$logfile
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/run-reposman.sh.in	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,17 @@
+#!/bin/bash
+logfile=/var/log/reposman.log
+( 
+flock -s 200 
+echo >> $logfile
+/var/www/code/reposman/reposman-soundsoftware.rb \
+	-s /var/hg \
+	-r INSERT_API_SCHEME_HERE://INSERT_API_HOST_HERE/ \
+	-k INSERT_API_KEY_HERE \
+	--http-user=INSERT_API_USER_HERE \
+	--http-pass=INSERT_API_PASSWORD_HERE \
+	-o www-data \
+	-g code \
+	-c "/var/www/code/reposman/run-hginit.sh" \
+	--scm=Mercurial \
+	>> $logfile 2>&1
+)200>>$logfile
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/config/soundsoftware-static.conf	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,56 @@
+<VirtualHost *:80>
+        ServerName soundsoftware.ac.uk
+	ServerAlias www.soundsoftware.ac.uk
+        ServerAdmin chris.cannam@eecs.qmul.ac.uk
+
+        DocumentRoot /var/www/soundsoftware-static/soundsoftware.ac.uk
+
+	ErrorLog /var/log/apache2/soundsoftware-error.log
+	CustomLog /var/log/apache2/soundsoftware-access.log vhost_combined
+
+	<Directory /var/www/soundsoftware-static/soundsoftware.ac.uk>
+		RewriteEngine on
+		RewriteCond %{REQUEST_FILENAME} !-d
+		RewriteCond %{REQUEST_FILENAME}\.html -f
+		RewriteRule ^(.*)$ $1.html
+	</Directory>
+
+	<FilesMatch "^.*\.(install|inc)$">
+	     Order Deny,Allow
+	     deny from all
+	</FilesMatch>
+
+        <DirectoryMatch "\.(hg|svn|git)">
+                Order allow,deny
+                Deny from all
+                Satisfy All
+        </DirectoryMatch>
+
+	LogLevel warn
+	ServerSignature Off
+</VirtualHost>
+		
+<VirtualHost *:443>
+	# We don't serve SSL: redirect to the code site
+	ServerName soundsoftware.ac.uk
+        ServerAlias www.soundsoftware.ac.uk
+        ServerAdmin chris.cannam@eecs.qmul.ac.uk
+
+	SSLEngine on
+	SSLCertificateFile /etc/apache2/certs/code.soundsoftware.ac.uk.crt
+	SSLCertificateKeyFile /etc/apache2/certs/code.soundsoftware.ac.uk.key
+	SSLCertificateChainFile /etc/apache2/certs/code.soundsoftware.ac.uk.ca-bundle
+	SSLVerifyClient none
+	SSLCipherSuite ALL:!ADH:!EXPORT:!SSLv2:RC4+RSA:+HIGH:+MEDIUM:+LOW	
+
+        DocumentRoot /var/www/soundsoftware-static/soundsoftware.ac.uk
+
+	Redirect permanent / https://code.soundsoftware.ac.uk/
+
+        ErrorLog /var/log/apache2/code-error.log
+        CustomLog /var/log/apache2/code-access.log vhost_combined
+
+        LogLevel warn
+        ServerSignature Off
+</VirtualHost>
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/docker/Dockerfile.in	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,20 @@
+
+FROM ubuntu:16.04
+MAINTAINER Chris Cannam <cannam@all-day-breakfast.com>
+
+COPY . /var/www/code
+
+WORKDIR /var/www/code
+
+INSERT_PROVISIONING_HERE
+
+# Start Postgres and foregrounded Apache
+
+RUN echo "#!/bin/bash"                      > container-run.sh
+RUN echo "/etc/init.d/postgresql start"    >> container-run.sh
+RUN echo "apache2ctl -D FOREGROUND"        >> container-run.sh
+RUN chmod +x container-run.sh
+
+EXPOSE 80
+CMD ./container-run.sh
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/docker/Dockerfile.inline	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,139 @@
+
+# For documentation and experimental purposes only. As a
+# reconstruction of the machine image that runs this application,
+# there are lots of things missing here; but as a good Docker
+# configuration, it fails by mixing together rather a lot of concerns.
+
+FROM ubuntu:16.04
+MAINTAINER Chris Cannam <cannam@all-day-breakfast.com>
+
+RUN apt-get update && \
+    apt-get install -y \
+    apache2 \
+    apache2-dev \
+    apt-utils \
+    build-essential \
+    cron \
+    curl \
+    doxygen \
+    exim4 \
+    git \
+    graphviz \
+    imagemagick \
+    libapache-dbi-perl \
+    libapache2-mod-perl2 \
+    libapr1-dev \
+    libaprutil1-dev \
+    libauthen-simple-ldap-perl \
+    libcurl4-openssl-dev \
+    libdbd-pg-perl \
+    libpq-dev \
+    libmagickwand-dev \
+    libio-socket-ssl-perl \
+    logrotate \
+    mercurial \
+    postgresql \
+    rsync \
+    ruby \
+    ruby-dev \
+    sudo
+
+# Also used on the live site, for javadoc extraction, but this is
+# would be by far the biggest package here: let's omit it while we're
+# not making use of it
+#   openjdk-9-jdk-headless
+
+RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+
+# Passenger gets installed through gem, not apt
+
+RUN gem install passenger -v 4.0.60 --no-rdoc --no-ri
+RUN passenger-install-apache2-module --languages=ruby
+
+
+# Copy across webapp, set up ownership
+
+COPY . /var/www/code
+
+RUN groupadd code
+RUN useradd -g code -G www-data code
+RUN chown -R code.www-data /var/www/code
+RUN find /var/www/code -type d -exec chmod g+s \{\} \;
+
+
+# Initialise /var/hg (in reality this would be mounted from somewhere)
+
+RUN mkdir -p /var/hg
+RUN chown code.www-data /var/hg
+RUN chmod g+s /var/hg
+COPY extra/soundsoftware/scripted-deploy/config/index.cgi /var/hg/
+COPY extra/soundsoftware/scripted-deploy/config/hgweb.config /var/hg/
+RUN chmod +x /var/hg/index.cgi
+
+
+# We're based in the code webapp directory from here on
+
+WORKDIR /var/www/code
+
+
+# Set up database config etc
+
+RUN cp extra/soundsoftware/scripted-deploy/config/database.yml.interpolated config/database.yml
+
+
+# Install Rails and dependencies (database.yml must be populated before this)
+
+RUN gem install bundler
+RUN bundle install
+
+
+# Initialise Redmine token (bundler must be installed before this)
+
+RUN bundle exec rake generate_secret_token
+
+
+# Import Postgres database from postgres-dumpall file
+
+RUN chown postgres postgres-dumpall
+RUN /etc/init.d/postgresql start && sudo -u postgres psql -f postgres-dumpall postgres
+RUN rm postgres-dumpall
+
+
+# Install Perl auth module for Hg access
+
+RUN mkdir -p /usr/local/lib/site_perl/Apache/Authn/
+RUN cp extra/soundsoftware/SoundSoftware.pm /usr/local/lib/site_perl/Apache/Authn/
+
+
+# Set up Apache config (todo: insert variables)
+
+RUN rm -f /etc/apache2/sites-enabled/000-default.conf
+
+RUN cp extra/soundsoftware/scripted-deploy/config/passenger.conf /etc/apache2/mods-available/
+RUN cp extra/soundsoftware/scripted-deploy/config/passenger.load /etc/apache2/mods-available/
+RUN cp extra/soundsoftware/scripted-deploy/config/perl.conf      /etc/apache2/mods-available/
+
+RUN ln -s ../mods-available/passenger.conf  /etc/apache2/mods-enabled/
+RUN ln -s ../mods-available/passenger.load  /etc/apache2/mods-enabled/
+RUN ln -s ../mods-available/perl.conf       /etc/apache2/mods-enabled/
+RUN ln -s ../mods-available/expires.load    /etc/apache2/mods-enabled/
+RUN ln -s ../mods-available/rewrite.load    /etc/apache2/mods-enabled/
+RUN ln -s ../mods-available/cgi.load        /etc/apache2/mods-enabled/
+
+RUN cp extra/soundsoftware/scripted-deploy/config/code.conf.interpolated /etc/apache2/sites-available/code.conf
+RUN ln -s ../sites-available/code.conf /etc/apache2/sites-enabled/10-code.conf
+
+RUN apache2ctl configtest
+
+
+# Start Postgres and foregrounded Apache
+
+RUN echo "#!/bin/bash"                      > container-run.sh
+RUN echo "/etc/init.d/postgresql start"    >> container-run.sh
+RUN echo "apache2ctl -D FOREGROUND"        >> container-run.sh
+RUN chmod +x container-run.sh
+
+EXPOSE 80
+CMD ./container-run.sh
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/docker/start.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+mydir=$(dirname "$0")
+. "$mydir"/../any/prepare.sh
+
+provisioning_commands=$(
+    for x in "$deploydir"/provision.d/[0-9]*.sh; do
+        echo "RUN /bin/bash /var/www/code/deploy/provision.d/$(basename $x)"
+    done | sed 's/$/\\n/' | fmt -2000 | sed 's/ RUN/RUN/g' )
+
+( echo
+  echo "### DO NOT EDIT THIS FILE - it is generated from Dockerfile.in"
+  echo
+) > "$managerdir/Dockerfile"
+
+cat "$managerdir/Dockerfile.in" |
+    sed 's,INSERT_PROVISIONING_HERE,'"$provisioning_commands"',' >> \
+        "$managerdir/Dockerfile.gen"
+
+cd "$rootdir"
+
+dockertag="cannam/soundsoftware-site"
+
+sudo docker build -t "$dockertag" -f "deploy/docker/Dockerfile.gen" .
+sudo docker run -p 8080:80 -d "$dockertag"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/000-system-packages.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+set -e
+
+# Install necessary system packages. This assumes we are deploying on
+# Ubuntu 16.04.
+
+# We aim to make all of these provisioning scripts non-destructive if
+# run more than once. In this case, running the script again will
+# install any outstanding updates.
+
+apt-get update && \
+    apt-get dist-upgrade -y && \
+    apt-get install -y \
+            ack-grep \
+            apache2 \
+            apache2-dev \
+            apt-utils \
+            build-essential \
+            cron \
+            curl \
+            doxygen \
+            exim4 \
+            git \
+            graphviz \
+            imagemagick \
+            libapache-dbi-perl \
+            libapache2-mod-perl2 \
+            libapr1-dev \
+            libaprutil1-dev \
+            libauthen-simple-ldap-perl \
+            libcurl4-openssl-dev \
+            libdbd-pg-perl \
+            libpq-dev \
+            libmagickwand-dev \
+            libio-socket-ssl-perl \
+            logrotate \
+            lynx \
+            mercurial \
+            mercurial-git \
+            openjdk-9-jdk-headless \
+            postgresql \
+            rsync \
+            ruby \
+            ruby-dev \
+            sudo
+
+locale-gen en_US.UTF-8
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/010-passenger.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+set -e
+
+# Phusion Passenger as application server.
+# This gets installed through gem, not apt, and we ask for a specific
+# version (the last in the 4.0.x line).
+
+if [ ! -f /var/lib/gems/2.3.0/gems/passenger-4.0.60/buildout/apache2/mod_passenger.so ]; then
+    gem install passenger -v 4.0.60 --no-rdoc --no-ri
+    passenger-install-apache2-module --languages=ruby
+fi
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/020-users.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -e
+
+# The webapp directory is owned and run by the code user, in group
+# www-data. The repos and other things served directly are the other
+# way around -- owned by the www-data user, in group code.
+
+for user in code docgen ; do
+    if ! grep -q "^$user:" /etc/passwd ; then
+        groupadd "$user"
+        useradd -g "$user" -G www-data "$user"
+    fi
+done
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/030-webapp-dir.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+set -e
+
+# We might be running in one of two ways:
+#
+# 1. The code directory is already at /var/www/code, either because a
+# previous provisioning step has imported it there or because this
+# script has been run before -- in this situation all we do is
+# re-check the ownership and permissions. OR
+#
+# 2. The code directory has not yet been copied to /var/www/code, in
+# which case we expect to find it at /code-to-deploy, e.g. as a
+# Vagrant shared folder, and we copy it over from there. (We don't
+# deploy directly from shared folders as we might not be able to
+# manipulate ownership and permissions properly there.)
+
+if [ ! -d /var/www/code ]; then
+    if [ ! -d /code-to-deploy ]; then
+        echo "ERROR: Expected to find code tree at /var/www/code or /code-to-deploy: is the deployment script being invoked correctly?"
+        exit 2
+    fi
+    cp -a /code-to-deploy /var/www/code
+fi
+
+chown -R code.www-data /var/www/code
+chmod 755 /var/www/code
+find /var/www/code -type d -exec chmod g+s \{\} \;
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/040-hg-dir.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -e
+
+# In a real deployment, /var/hg is probably mounted from somewhere
+# else. But in an empty deployment we need to create it, and in both
+# cases we set up the config files with their current versions here.
+
+if [ ! -f /var/hg/index.cgi ]; then
+    mkdir -p /var/hg
+fi
+
+cp /var/www/code/deploy/config/index.cgi /var/hg/
+cp /var/www/code/deploy/config/hgweb.config /var/hg/
+
+chmod +x /var/hg/index.cgi
+
+chown -R www-data.code /var/hg
+find /var/hg -type d -exec chmod g+s \{\} \;
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/050-webapp-db.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -e
+
+# Copy across the database config file (the source file has presumably
+# been generated from a skeleton, earlier in provisioning)
+
+infile=/var/www/code/deploy/config/database.yml.gen
+outfile=/var/www/code/config/database.yml
+
+if [ ! -f "$outfile" ]; then
+    if [ ! -f "$infile" ]; then
+        echo "ERROR: Database config file $infile not found - has the database secret been interpolated from $infile.in correctly?"
+        exit 2
+    fi
+    cp "$infile" "$outfile"
+fi
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/060-bundler.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -e
+
+# Install Ruby gems for the web app.
+
+# We aim to make all of these provisioning scripts non-destructive if
+# run more than once. In this case, running the script again will
+# install any outstanding updates.
+
+cd /var/www/code
+gem install bundler
+bundle install
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/070-secret-token.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+set -e
+
+# Create a session token if it hasn't already been created.
+
+cd /var/www/code
+
+if [ ! -f config/initializers/secret_token.rb ]; then
+    bundle exec rake generate_secret_token
+fi
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/080-database-load.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+set -e
+
+# Start the database and if a dump file is found, load it. The dump
+# file is then deleted so that the db won't be overwritten on
+# subsequent runs. (The original repo contains no dump file, so it
+# should exist only if you have provided some data to load.)
+
+/etc/init.d/postgresql start
+
+dumpdir="/code-to-deploy"
+if [ ! -d "$dumpdir" ]; then
+    dumpdir=/var/www/code
+fi
+
+cd "$dumpdir"
+
+if [ -f postgres-dumpall ]; then
+    chmod ugo+r postgres-dumpall
+    sudo -u postgres psql -f postgres-dumpall postgres
+    rm postgres-dumpall
+fi
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/090-perl-auth-module.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+set -e
+
+# Install the Apache mod_perl module used for hg repo access control
+
+if [ ! -f /usr/local/lib/site_perl/Apache/Authn/SoundSoftware.pm ]; then
+    mkdir -p /usr/local/lib/site_perl/Apache/Authn/
+    cp /var/www/code/extra/soundsoftware/SoundSoftware.pm \
+       /usr/local/lib/site_perl/Apache/Authn/
+fi
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/100-apache-config.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+set -e
+
+# Install Apache config files and module loaders
+
+cd /var/www/code
+
+codeconf=/var/www/code/deploy/config/code.conf.gen
+codeconfssl=/var/www/code/deploy/config/code-ssl.conf.gen
+staticconf=/var/www/code/deploy/config/soundsoftware-static.conf
+
+if [ ! -f "$codeconf" ]; then
+    echo "ERROR: Apache config file $codeconf not found - has the database secret been interpolated from its input file correctly?"
+    exit 2
+fi
+
+if [ ! -f /etc/apache2/sites-enabled/10-code.conf ]; then
+    
+    rm -f /etc/apache2/sites-enabled/000-default.conf
+
+    cp deploy/config/passenger.conf /etc/apache2/mods-available/
+    cp deploy/config/passenger.load /etc/apache2/mods-available/
+    cp deploy/config/perl.conf      /etc/apache2/mods-available/
+
+    ln -s ../mods-available/passenger.conf   /etc/apache2/mods-enabled/
+    ln -s ../mods-available/passenger.load   /etc/apache2/mods-enabled/
+    ln -s ../mods-available/perl.conf        /etc/apache2/mods-enabled/
+    ln -s ../mods-available/expires.load     /etc/apache2/mods-enabled/
+    ln -s ../mods-available/rewrite.load     /etc/apache2/mods-enabled/
+    ln -s ../mods-available/cgi.load         /etc/apache2/mods-enabled/
+    ln -s ../mods-available/ssl.load         /etc/apache2/mods-enabled/
+    ln -s ../mods-available/auth_digest.load /etc/apache2/mods-enabled/
+
+    cp "$codeconf" /etc/apache2/sites-available/code.conf
+    cp "$codeconfssl" /etc/apache2/sites-available/code-ssl.conf
+    cp "$staticconf" /etc/apache2/sites-available/soundsoftware-static.conf
+    ln -s ../sites-available/code.conf /etc/apache2/sites-enabled/10-code.conf
+
+    apache2ctl configtest
+
+fi
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/110-hg-testdir.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -e
+
+# In case we are running without a properly mounted /var/hg directory,
+# check for the existence of one repo and, if absent, attempt to clone
+# it so that we have something we can serve for test purposes.
+
+if [ ! -d /var/hg/vamp-plugin-sdk ]; then
+    echo "Cloning vamp-plugin-sdk repo for testing..."
+    cd /var/hg
+    hg clone https://code.soundsoftware.ac.uk/hg/vamp-plugin-sdk
+    chown -R www-data.code vamp-plugin-sdk
+fi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/115-other-dirs.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+set -e
+
+# Initialise directories used as targets for cron activity (if they
+# don't already exist)
+
+# Reminder: the webapp directory is owned and run by the code user, in
+# group www-data. The repos and other things served directly are
+# usually the other way around -- owned by the www-data user, in group
+# code. I don't recall whether there is a good reason for this.
+
+for dir in \
+    /var/files/backups \
+    /var/doc \
+    /var/files/code \
+    /var/files/git-mirror ; do
+    if [ ! -d "$dir" ]; then
+        mkdir -p "$dir"
+        chown -R code.www-data "$dir"
+        chmod g+s "$dir"
+    fi
+done
+
+for dir in \
+    /var/mirror ; do
+    if [ ! -d "$dir" ]; then
+        mkdir -p "$dir"
+        chown -R www-data.code "$dir"
+        chmod g+s "$dir"
+    fi
+done
+
+if [ ! -e /var/www/code/files ]; then
+    ln -s /var/files/code /var/www/code/files
+fi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/120-docgen.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+set -e
+
+# Copy docgen scripts, including the generated scripts with
+# interpolated API key etc, to the directory they will be run from.
+
+# These are run from cron jobs to do the (currently daily) update of
+# extracted documentation from Doxygen, Javadoc, and MATLAB, and to
+# enable displaying them with the redmine_embedded plugin. (The API
+# key is needed to automatically switch on the embedded module for a
+# project the first time its docs are extracted.)
+
+cd /var/www/code
+
+mkdir -p docgen
+
+for file in \
+    doxysafe.pl \
+    extract-doxygen.sh \
+    extract-javadoc.sh \
+    extract-matlabdocs.sh \
+    matlab-docs.conf \
+    matlab-docs-credit.html \
+    matlab-docs.pl ; do
+    if [ ! -f docgen/"$file" ]; then
+        cp extra/soundsoftware/"$file" docgen/
+    fi
+done
+
+for file in \
+    extract-docs.sh ; do
+    if [ ! -f docgen/"$file" ]; then
+        cp deploy/config/"$file".gen docgen/"$file"
+    fi
+done
+
+chown code.www-data docgen/*
+chmod +x docgen/*.sh
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/130-reposman.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+set -e
+
+# Copy reposman (repository manager) scripts, including the generated
+# scripts with interpolated API key etc, to the directory they will be
+# run from.
+
+# There are two sets of scripts here:
+#
+# 1. The reposman script that plods through all the projects that have
+# repositories defined, creates those repositories on disc, and
+# registers their locations with the projects. This happens often,
+# currently every minute.
+#
+# 2. The external repo management script that plods through all the
+# projects that have external repositories defined, clones or updates
+# those external repos to their local locations, and if necessary
+# registers them with the projects. This happens less often, currently
+# every hour.
+
+cd /var/www/code
+
+mkdir -p reposman
+
+for file in \
+    convert-external-repos.rb \
+    reposman-soundsoftware.rb \
+    run-hginit.sh \
+    update-external-repo.sh ; do
+    if [ ! -f reposman/"$file" ]; then
+        cp extra/soundsoftware/"$file" reposman/
+    fi
+done
+
+for file in \
+    run-external.sh \
+    run-reposman.sh ; do
+    if [ ! -f reposman/"$file" ]; then
+        cp deploy/config/"$file".gen reposman/"$file"
+    fi
+done
+
+chown code.www-data reposman/*
+chmod +x reposman/*.sh
+chmod +x reposman/*.rb
+
+touch /var/log/reposman.log
+touch /var/log/external-repos.log
+chown www-data.code /var/log/reposman.log
+chown www-data.code /var/log/external-repos.log
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/140-cron.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+set -e
+
+# Copy cron scripts to the appropriate destinations
+
+cd /var/www/code
+
+if [ ! -d /etc/cron.minutely ]; then
+    mkdir -p /etc/cron.minutely
+    echo '*  *    * * *   root    test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.minutely )' >> /etc/crontab
+fi
+
+for t in minutely hourly daily monthly; do
+    for s in deploy/config/cron.$t/[0-9]* ; do
+        name=$(basename $s)
+        dest="/etc/cron.$t/$name"
+        if [ ! -f "$dest" ]; then
+            cp "$s" "$dest"
+            chmod +x "$dest"
+        fi
+    done
+done
+
+
+             
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/150-logrotate.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+cd /var/www/code
+cp deploy/config/logrotate.conf /etc/logrotate.conf
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/190-reminders.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+set -e
+
+# Print reminders of the things that we haven't covered in the deploy
+# scripts
+
+cat <<EOF
+
+*** APACHE SSL CONFIGURATION
+
+    The provisioning scripts set up a simple HTTP site only. Refer to
+    code-ssl.conf for an example HTTPS configuration (you will of
+    course need to provide the key/cert files).
+
+*** CRON SCRIPTS
+
+    A number of cron scripts have been installed. It might be no bad
+    thing to prime and test them by running them all once now. Some of
+    the services tested by the smoke test script (below) may depend on
+    their having run. Use deploy/any/run-cron-scripts.sh for this.
+
+*** SMOKE TEST
+
+    There is a smoke test script in the deploy/test directory. That
+    is, a quick automated acceptance test that checks that basic
+    services are returning successful HTTP codes. Consider running it
+    against this server from another host, i.e. not just localhost.
+
+*** EMAIL
+
+    Outgoing email is required for notifications, but has not been
+    configured as part of this provisioning setup. You'll need to set
+    up the server's outgoing mail support and also edit the application
+    email settings in config/configuration.yml.
+
+*** STATIC FRONT PAGE
+
+    We have set up only the code/repository site -- if you want a
+    separate front page, remember to configure that!
+
+EOF
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/provision.d/200-apache-start.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+set -e
+
+# Last action: check & start the webserver
+
+apache2ctl configtest
+
+apache2ctl restart
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/test/smoketest.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,95 @@
+#!/bin/bash
+
+# The big problem with this test script is that it needs the cron
+# scripts that generate some of this stuff to have been run at least
+# once
+
+usage() {
+    echo 1>&2
+    echo "Usage: $0 <uri-base>" 1>&2
+    echo 1>&2
+    echo "  e.g. $0 https://code.soundsoftware.ac.uk" 1>&2
+    echo "    or $0 http://localhost:8080" 1>&2
+    echo 1>&2
+    exit 2
+}
+
+uribase="$1"
+if [ -z "$uribase" ]; then
+    usage
+fi
+
+set -eu
+
+# A project known to exist, be public, and have a repository
+project_with_repo=vamp-plugin-sdk
+
+# A project known to exist, be public, and have embedded documentation
+project_with_docs=vamp-plugin-sdk
+
+# A project known to exist, be public, and have a bibliography
+project_with_biblio=sonic-visualiser
+
+# A project known not to exist
+nonexistent_project=nonexistent-project
+
+# A file for download known to exist
+file_for_download=/attachments/download/2210/vamp-plugin-sdk-2.7.1-binaries-osx.tar.gz
+
+tried=0
+succeeded=0
+
+mydir=$(dirname "$0")
+
+try() {
+    mkdir -p "$mydir/output"
+    origin=$(pwd)
+    cd "$mydir/output"
+    path="$1"
+    description="$2"
+    expected="$3"
+    url="$uribase$path"
+    echo
+    echo "Trying \"$description\" [$url]..."
+    echo
+    if wget "$url" ; then
+        echo "+++ Succeeded"
+        succeeded=$(($succeeded + 1))
+    else
+        returned="$?"
+        if [ "$returned" = "$expected" ]; then
+            echo "+++ Succeeded [returned expected code $expected]"
+            succeeded=$(($succeeded + 1))
+        else
+            echo "--- FAILED with return code $returned"
+        fi
+    fi
+    tried=$(($tried + 1))
+    cd "$origin"
+}
+
+assert() {
+    try "$1" "$2" 0
+}
+
+fail() {
+    try "$1" "$2" "$3"
+}
+
+assert "/" "Front page"
+assert "/projects/$project_with_repo" "Project page"
+assert "/projects/$project_with_biblio" "Project page with bibliography"
+assert "/projects/$project_with_repo/repository" "Repository page"
+assert "/hg/$project_with_repo" "Mercurial repo"
+assert "/projects/$project_with_docs/embedded" "Project documentation page (from docgen cron script)"
+assert "/git/$project_with_repo/info/refs" "Git repo mirror"
+assert "$file_for_download" "File for download"
+
+# we expect this to return an http auth requirement, not a 404 - the
+# value 6 is wget's return code for auth failure
+fail "/hg/$nonexistent_project" "Mercurial repo" 6
+
+echo
+echo "Passed $succeeded of $tried"
+echo
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/vagrant/Vagrantfile	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,9 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+Vagrant.configure("2") do |config|
+  config.vm.box = "ubuntu/xenial64"
+  config.vm.network "forwarded_port", guest: 80, host: 8080
+  config.vm.synced_folder "../..", "/code-to-deploy"
+  config.vm.provision :shell, path: "vagrant-provision.sh"
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/vagrant/start.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+mydir=$(dirname "$0")
+. "$mydir"/../any/prepare.sh
+
+cd "$managerdir"
+vagrant up
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy/vagrant/vagrant-provision.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+set -e
+
+for f in /code-to-deploy/deploy/provision.d/[0-9]*.sh ; do
+    case "$f" in
+        *~) ;;
+        *) echo "Running provisioning script: $f"
+           /bin/bash "$f";;
+    esac
+done
+
+echo "All provisioning scripts complete"
--- a/extra/fast-export/Makefile	Thu Feb 04 08:47:09 2016 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-SVN ?= /usr/local/svn
-APR_INCLUDES ?= /usr/include/apr-1.0
-CFLAGS += -I${APR_INCLUDES} -I${SVN}/include/subversion-1 -pipe -O2 -std=c99
-LDFLAGS += -L${SVN}/lib -lsvn_fs-1 -lsvn_repos-1
-
-all: svn-fast-export svn-archive
-
-svn-fast-export: svn-fast-export.c
-svn-archive: svn-archive.c
-
-.PHONY: clean
-
-clean:
-	rm -rf svn-fast-export svn-archive
--- a/extra/fast-export/hg-fast-export.py	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/fast-export/hg-fast-export.py	Thu Feb 22 09:04:43 2018 +0000
@@ -145,14 +145,23 @@
   if max>cfg_export_boundary:
     sys.stderr.write('Exported %d/%d files\n' % (count,max))
 
-def sanitize_name(name,what="branch"):
+def sanitize_name(name,what="branch", mapping={}):
   """Sanitize input roughly according to git-check-ref-format(1)"""
 
+  # NOTE: Do not update this transform to work around
+  # incompatibilities on your platform. If you change it and it starts
+  # modifying names which previously were not touched it will break
+  # preexisting setups which are doing incremental imports.
+  #
+  # Use the -B and -T options to mangle branch and tag names
+  # instead. If you have a source repository where this is too much
+  # work to do manually, write a tool that does it for you.
+
   def dot(name):
     if name[0] == '.': return '_'+name[1:]
     return name
 
-  n=name
+  n=mapping.get(name,name)
   p=re.compile('([[ ~^:?\\\\*]|\.\.)')
   n=p.sub('_', n)
   if n[-1] in ('/', '.'): n=n[:-1]+'_'
@@ -170,11 +179,11 @@
   return filename
 
 def export_commit(ui,repo,revision,old_marks,max,count,authors,
-                  branchesmap,sob,brmap,hgtags,notes,encoding='',fn_encoding=''):
+                  branchesmap,sob,brmap,hgtags,encoding='',fn_encoding=''):
   def get_branchname(name):
     if brmap.has_key(name):
       return brmap[name]
-    n=sanitize_name(branchesmap.get(name,name))
+    n=sanitize_name(name, "branch", branchesmap)
     brmap[name]=n
     return n
 
@@ -235,28 +244,34 @@
   export_file_contents(ctx,man,changed,hgtags,fn_encoding)
   wr()
 
-  count=checkpoint(count)
-  count=generate_note(user,time,timezone,revision,ctx,count,notes)
-  return count
+  return checkpoint(count)
 
-def generate_note(user,time,timezone,revision,ctx,count,notes):
-  if not notes:
-    return count
+def export_note(ui,repo,revision,count,authors,encoding,is_first):
+  (revnode,_,user,(time,timezone),_,_,_,_)=get_changeset(ui,repo,revision,authors,encoding)
+
+  parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0]
+
   wr('commit refs/notes/hg')
   wr('committer %s %d %s' % (user,time,timezone))
   wr('data 0')
+  if is_first:
+    wr('from refs/notes/hg^0')
   wr('N inline :%d' % (revision+1))
-  hg_hash=ctx.hex()
+  hg_hash=repo.changectx(str(revision)).hex()
   wr('data %d' % (len(hg_hash)))
   wr_no_nl(hg_hash)
   wr()
   return checkpoint(count)
-  
+
+  wr('data %d' % (len(desc)+1)) # wtf?
+  wr(desc)
+  wr()
+
 def export_tags(ui,repo,old_marks,mapping_cache,count,authors,tagsmap):
   l=repo.tagslist()
   for tag,node in l:
     # Remap the branch name
-    tag=sanitize_name(tagsmap.get(tag,tag),"tag")
+    tag=sanitize_name(tag,"tag",tagsmap)
     # ignore latest revision
     if tag=='tip': continue
     # ignore tags to nodes that are missing (ie, 'in the future')
@@ -281,6 +296,7 @@
 def load_mapping(name, filename):
   cache={}
   if not os.path.exists(filename):
+    sys.stderr.write('Could not open mapping file [%s]\n' % (filename))
     return cache
   f=open(filename,'r')
   l=0
@@ -311,7 +327,7 @@
       break
   return tip
 
-def verify_heads(ui,repo,cache,force):
+def verify_heads(ui,repo,cache,force,branchesmap):
   branches={}
   for bn, heads in repo.branchmap().iteritems():
     branches[bn] = branchtip(repo, heads)
@@ -321,8 +337,9 @@
   # get list of hg's branches to verify, don't take all git has
   for _,_,b in l:
     b=get_branch(b)
-    sha1=get_git_sha1(b)
-    c=cache.get(b)
+    sanitized_name=sanitize_name(b,"branch",branchesmap)
+    sha1=get_git_sha1(sanitized_name)
+    c=cache.get(sanitized_name)
     if sha1!=c:
       sys.stderr.write('Error: Branch [%s] modified outside hg-fast-export:'
         '\n%s (repo) != %s (cache)\n' % (b,sha1,c))
@@ -343,6 +360,10 @@
 def hg2git(repourl,m,marksfile,mappingfile,headsfile,tipfile,
            authors={},branchesmap={},tagsmap={},
            sob=False,force=False,hgtags=False,notes=False,encoding='',fn_encoding=''):
+  def check_cache(filename, contents):
+    if len(contents) == 0:
+      sys.stderr.write('Warning: %s does not contain any data, this will probably make an incremental import fail\n' % filename)
+
   _max=int(m)
 
   old_marks=load_cache(marksfile,lambda s: int(s)-1)
@@ -350,9 +371,15 @@
   heads_cache=load_cache(headsfile)
   state_cache=load_cache(tipfile)
 
+  if len(state_cache) != 0:
+    for (name, data) in [(marksfile, old_marks),
+                         (mappingfile, mapping_cache),
+                         (headsfile, state_cache)]:
+      check_cache(name, data)
+
   ui,repo=setup_repo(repourl)
 
-  if not verify_heads(ui,repo,heads_cache,force):
+  if not verify_heads(ui,repo,heads_cache,force,branchesmap):
     return 1
 
   try:
@@ -374,7 +401,10 @@
   brmap={}
   for rev in range(min,max):
     c=export_commit(ui,repo,rev,old_marks,max,c,authors,branchesmap,
-                    sob,brmap,hgtags,notes,encoding,fn_encoding)
+                    sob,brmap,hgtags,encoding,fn_encoding)
+  if notes:
+    for rev in range(min,max):
+      c=export_note(ui,repo,rev,c,authors, encoding, rev == min and min != 0)
 
   state_cache['tip']=max
   state_cache['repo']=repourl
--- a/extra/fast-export/hg-fast-export.sh	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/fast-export/hg-fast-export.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -48,8 +48,16 @@
       echo "$LONG_USAGE"
       exit 0
 esac
-. "$(git --exec-path)/git-sh-setup"
-cd_to_toplevel
+
+IS_BARE=$(git rev-parse --is-bare-repository) \
+    || (echo "Could not find git repo" ; exit 1)
+if test "z$IS_BARE" != ztrue; then
+   # This is not a bare repo, cd to the toplevel
+   TOPLEVEL=$(git rev-parse --show-toplevel) \
+       || (echo "Could not find git repo toplevel" ; exit 1)
+   cd $TOPLEVEL || exit 1
+fi
+GIT_DIR=$(git rev-parse --git-dir) || (echo "Could not find git repo" ; exit 1)
 
 while case "$#" in 0) break ;; esac
 do
--- a/extra/fast-export/hg-reset.sh	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/fast-export/hg-reset.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -24,8 +24,15 @@
 	-r	Mercurial repository to use
 "
 
-. "$(git --exec-path)/git-sh-setup"
-cd_to_toplevel
+IS_BARE=$(git rev-parse --is-bare-repository) \
+    || (echo "Could not find git repo" ; exit 1)
+if test "z$IS_BARE" != ztrue; then
+   # This is not a bare repo, cd to the toplevel
+   TOPLEVEL=$(git rev-parse --show-toplevel) \
+       || (echo "Could not find git repo toplevel" ; exit 1)
+   cd $TOPLEVEL || exit 1
+fi
+GIT_DIR=$(git rev-parse --git-dir) || (echo "Could not find git repo" ; exit 1)
 
 while case "$#" in 0) break ;; esac
 do
--- a/extra/fast-export/hg2git.py	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/fast-export/hg2git.py	Thu Feb 22 09:04:43 2018 +0000
@@ -7,6 +7,7 @@
 import re
 import os
 import sys
+import subprocess
 
 # default git branch name
 cfg_master='master'
@@ -105,12 +106,10 @@
 def get_git_sha1(name,type='heads'):
   try:
     # use git-rev-parse to support packed refs
-    cmd="git rev-parse --verify refs/%s/%s 2>%s" % (type,name,os.devnull)
-    p=os.popen(cmd)
-    l=p.readline()
-    p.close()
+    ref="refs/%s/%s" % (type,name)
+    l=subprocess.check_output(["git", "rev-parse", "--verify", "--quiet", ref])
     if l == None or len(l) == 0:
       return None
     return l[0:40]
-  except IOError:
+  except subprocess.CalledProcessError:
     return None
--- a/extra/fast-export/svn-archive.c	Thu Feb 04 08:47:09 2016 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,240 +0,0 @@
-/*
- * svn-archive.c
- * ----------
- *  Walk through a given revision of a local Subversion repository and export 
- *  all of the contents as a tarfile.
- *
- * Author: Chris Lee <clee@kde.org>
- * License: MIT <http://www.opensource.org/licenses/mit-license.php>
- */
-
-#define _XOPEN_SOURCE
-#include <unistd.h>
-#include <string.h>
-#include <stdio.h>
-#include <time.h>
-
-#ifndef PATH_MAX
-#define PATH_MAX 4096
-#endif
-
-#include <apr_general.h>
-#include <apr_strings.h>
-#include <apr_getopt.h>
-#include <apr_lib.h>
-
-#include <svn_types.h>
-#include <svn_pools.h>
-#include <svn_repos.h>
-#include <svn_fs.h>
-
-#undef SVN_ERR
-#define SVN_ERR(expr) SVN_INT_ERR(expr)
-#define apr_sane_push(arr, contents) *(char **)apr_array_push(arr) = contents
-
-#define TRUNK "/trunk"
-
-static time_t archive_time;
-
-time_t get_epoch(char *svn_date)
-{
-    struct tm tm = {0};
-    char *date = malloc(strlen(svn_date) * sizeof(char *));
-    strncpy(date, svn_date, strlen(svn_date) - 8);
-    strptime(date, "%Y-%m-%dT%H:%M:%S", &tm);
-    free(date);
-    return mktime(&tm);
-}
-
-int tar_header(apr_pool_t *pool, char *path, char *node, size_t f_size)
-{
-    char          buf[512];
-    unsigned int  i, checksum;
-    svn_boolean_t is_dir;
-
-    memset(buf, 0, sizeof(buf));
-
-    if ((strlen(path) == 0) && (strlen(node) == 0)) {
-        return 0;
-    }
-
-    if (strlen(node) == 0) {
-        is_dir = 1;
-    } else {
-        is_dir = 0;
-    }
-
-    if (strlen(path) == 0) {
-        strncpy(buf, apr_psprintf(pool, "%s", node), 99);
-    } else if (strlen(path) + strlen(node) < 100) {
-        strncpy(buf, apr_psprintf(pool, "%s/%s", path+1, node), 99);
-    } else {
-        fprintf(stderr, "really long file path...\n");
-        strncpy(&buf[0], node, 99);
-        strncpy(&buf[345], path+1, 154);
-    }
-
-    strncpy(&buf[100], apr_psprintf(pool, "%07o", (is_dir ? 0755 : 0644)), 7);
-    strncpy(&buf[108], apr_psprintf(pool, "%07o", 1000), 7);
-    strncpy(&buf[116], apr_psprintf(pool, "%07o", 1000), 7);
-    strncpy(&buf[124], apr_psprintf(pool, "%011lo", f_size), 11);
-    strncpy(&buf[136], apr_psprintf(pool, "%011lo", archive_time), 11);
-    strncpy(&buf[156], (is_dir ? "5" : "0"), 1);
-    strncpy(&buf[257], "ustar  ", 8);
-    strncpy(&buf[265], "clee", 31);
-    strncpy(&buf[297], "clee", 31);
-    // strncpy(&buf[329], apr_psprintf(pool, "%07o", 0), 7);
-    // strncpy(&buf[337], apr_psprintf(pool, "%07o", 0), 7);
-
-    strncpy(&buf[148], "        ", 8);
-    checksum = 0;
-    for (i = 0; i < sizeof(buf); i++) {
-        checksum += buf[i];
-    }
-    strncpy(&buf[148], apr_psprintf(pool, "%07o", checksum & 0x1fffff), 7);
-
-    fwrite(buf, sizeof(char), sizeof(buf), stdout);
-
-    return 0;
-}
-
-int tar_footer()
-{
-    char block[1024];
-    memset(block, 0, sizeof(block));
-    fwrite(block, sizeof(char), sizeof(block), stdout);
-}
-
-int dump_blob(svn_fs_root_t *root, char *prefix, char *path, char *node, apr_pool_t *pool)
-{
-    char           *full_path, buf[512];
-    apr_size_t     len;
-    svn_stream_t   *stream;
-    svn_filesize_t stream_length;
-
-    full_path = apr_psprintf(pool, "%s%s/%s", prefix, path, node);
-
-    SVN_ERR(svn_fs_file_length(&stream_length, root, full_path, pool));
-    SVN_ERR(svn_fs_file_contents(&stream, root, full_path, pool));
-
-    tar_header(pool, path, node, stream_length);
-
-    do {
-        len = sizeof(buf);
-        memset(buf, '\0', sizeof(buf));
-        SVN_ERR(svn_stream_read(stream, buf, &len));
-        fwrite(buf, sizeof(char), sizeof(buf), stdout);
-    } while (len == sizeof(buf));
-
-    return 0;
-}
-
-int dump_tree(svn_fs_root_t *root, char *prefix, char *path, apr_pool_t *pool)
-{
-    const void       *key;
-    void             *val;
-    char             *node, *subpath, *full_path;
-
-    apr_pool_t       *subpool;
-    apr_hash_t       *dir_entries;
-    apr_hash_index_t *i;
-
-    svn_boolean_t    is_dir;
-
-    tar_header(pool, path, "", 0);
-
-    SVN_ERR(svn_fs_dir_entries(&dir_entries, root, apr_psprintf(pool, "%s/%s", prefix, path), pool));
-
-    subpool = svn_pool_create(pool);
-
-    for (i = apr_hash_first(pool, dir_entries); i; i = apr_hash_next(i)) {
-        svn_pool_clear(subpool);
-        apr_hash_this(i, &key, NULL, &val);
-        node = (char *)key;
-
-        subpath = apr_psprintf(subpool, "%s/%s", path, node);
-        full_path = apr_psprintf(subpool, "%s%s", prefix, subpath);
-
-        svn_fs_is_dir(&is_dir, root, full_path, subpool);
-
-        if (is_dir) {
-            dump_tree(root, prefix, subpath, subpool);
-        } else {
-            dump_blob(root, prefix, path, node, subpool);
-        }
-    }
-
-    svn_pool_destroy(subpool);
-
-    return 0;
-}
-
-int crawl_filesystem(char *repos_path, char *root_path, apr_pool_t *pool)
-{
-    char                 *path;
-
-    apr_hash_t           *props;
-    apr_hash_index_t     *i;
-
-    svn_repos_t          *repos;
-    svn_fs_t             *fs;
-    svn_string_t         *svndate;
-    svn_revnum_t         youngest_rev, export_rev;
-    svn_fs_root_t        *fs_root;
-
-    SVN_ERR(svn_fs_initialize(pool));
-    SVN_ERR(svn_repos_open(&repos, repos_path, pool));
-    if ((fs = svn_repos_fs(repos)) == NULL)
-      return -1;
-    SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
-
-    export_rev = youngest_rev;
-
-    SVN_ERR(svn_fs_revision_root(&fs_root, fs, export_rev, pool));
-    SVN_ERR(svn_fs_revision_proplist(&props, fs, export_rev, pool));
-
-    svndate = apr_hash_get(props, "svn:date", APR_HASH_KEY_STRING);
-    archive_time = get_epoch((char *)svndate->data);
-
-    fprintf(stderr, "Exporting archive of r%ld... \n", export_rev);
-
-    dump_tree(fs_root, root_path, "", pool);
-
-    tar_footer();
-
-    fprintf(stderr, "done!\n");
-
-    return 0;
-}
-
-int main(int argc, char *argv[])
-{
-    apr_pool_t           *pool;
-    apr_getopt_t         *options;
-
-    apr_getopt_option_t long_options[] = {
-        { "help",     'h', 0 },
-        { "prefix",   'p', 0 },
-        { "basename", 'b', 0 },
-        { "revision", 'r', 0 },
-        { NULL,       0,   0 }
-    };
-
-    if (argc < 2) {
-        fprintf(stderr, "usage: %s REPOS_PATH [prefix]\n", argv[0]);
-        return -1;
-    }
-
-    if (apr_initialize() != APR_SUCCESS) {
-        fprintf(stderr, "You lose at apr_initialize().\n");
-        return -1;
-    }
-
-    pool = svn_pool_create(NULL);
-
-    crawl_filesystem(argv[1], (argc == 3 ? argv[2] : TRUNK), pool);
-
-    apr_terminate();
-
-    return 0;
-}
--- a/extra/fast-export/svn-fast-export.c	Thu Feb 04 08:47:09 2016 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,187 +0,0 @@
-/*
- * svn-fast-export.c
- * ----------
- *  Walk through each revision of a local Subversion repository and export it
- *  in a stream that git-fast-import can consume.
- *
- * Author: Chris Lee <clee@kde.org>
- * License: MIT <http://www.opensource.org/licenses/mit-license.php>
- */
-
-#define _XOPEN_SOURCE
-#include <unistd.h>
-#include <string.h>
-#include <stdio.h>
-#include <time.h>
-
-#ifndef PATH_MAX
-#define PATH_MAX 4096
-#endif
-
-#include <apr_lib.h>
-#include <apr_getopt.h>
-#include <apr_general.h>
-
-#include <svn_fs.h>
-#include <svn_repos.h>
-#include <svn_pools.h>
-#include <svn_types.h>
-
-#undef SVN_ERR
-#define SVN_ERR(expr) SVN_INT_ERR(expr)
-#define apr_sane_push(arr, contents) *(char **)apr_array_push(arr) = contents
-
-#define TRUNK "/trunk/"
-
-time_t get_epoch(char *svn_date)
-{
-    struct tm tm = {0};
-    char *date = malloc(strlen(svn_date) * sizeof(char *));
-    strncpy(date, svn_date, strlen(svn_date) - 8);
-    strptime(date, "%Y-%m-%dT%H:%M:%S", &tm);
-    free(date);
-    return mktime(&tm);
-}
-
-int dump_blob(svn_fs_root_t *root, char *full_path, apr_pool_t *pool)
-{
-    apr_size_t     len;
-    svn_stream_t   *stream, *outstream;
-    svn_filesize_t stream_length;
-
-    SVN_ERR(svn_fs_file_length(&stream_length, root, full_path, pool));
-    SVN_ERR(svn_fs_file_contents(&stream, root, full_path, pool));
-
-    fprintf(stdout, "data %lu\n", stream_length);
-    fflush(stdout);
-
-    SVN_ERR(svn_stream_for_stdout(&outstream, pool));
-    SVN_ERR(svn_stream_copy(stream, outstream, pool));
-
-    fprintf(stdout, "\n");
-    fflush(stdout);
-
-    return 0;
-}
-
-int export_revision(svn_revnum_t rev, svn_fs_t *fs, apr_pool_t *pool)
-{
-    unsigned int         mark;
-    const void           *key;
-    void                 *val;
-    char                 *path, *file_change;
-    apr_pool_t           *revpool;
-    apr_hash_t           *changes, *props;
-    apr_hash_index_t     *i;
-    apr_array_header_t   *file_changes;
-    svn_string_t         *author, *committer, *svndate, *svnlog;
-    svn_boolean_t        is_dir;
-    svn_fs_root_t        *fs_root;
-    svn_fs_path_change_t *change;
-
-    fprintf(stderr, "Exporting revision %ld... ", rev);
-
-    SVN_ERR(svn_fs_revision_root(&fs_root, fs, rev, pool));
-    SVN_ERR(svn_fs_paths_changed(&changes, fs_root, pool));
-    SVN_ERR(svn_fs_revision_proplist(&props, fs, rev, pool));
-
-    revpool = svn_pool_create(pool);
-
-    file_changes = apr_array_make(pool, apr_hash_count(changes), sizeof(char *));
-    mark = 1;
-    for (i = apr_hash_first(pool, changes); i; i = apr_hash_next(i)) {
-        svn_pool_clear(revpool);
-        apr_hash_this(i, &key, NULL, &val);
-        path = (char *)key;
-        change = (svn_fs_path_change_t *)val;
-
-        SVN_ERR(svn_fs_is_dir(&is_dir, fs_root, path, revpool));
-
-        if (is_dir || strncmp(TRUNK, path, strlen(TRUNK))) {
-            continue;
-        }
-
-        if (change->change_kind == svn_fs_path_change_delete) {
-            apr_sane_push(file_changes, (char *)svn_string_createf(pool, "D %s", path + strlen(TRUNK))->data);
-        } else {
-            apr_sane_push(file_changes, (char *)svn_string_createf(pool, "M 644 :%u %s", mark, path + strlen(TRUNK))->data);
-            fprintf(stdout, "blob\nmark :%u\n", mark++);
-            dump_blob(fs_root, (char *)path, revpool);
-        }
-    }
-
-    if (file_changes->nelts == 0) {
-        fprintf(stderr, "skipping.\n");
-        svn_pool_destroy(revpool);
-        return 0;
-    }
-
-    author = apr_hash_get(props, "svn:author", APR_HASH_KEY_STRING);
-    if (svn_string_isempty(author))
-        author = svn_string_create("nobody", pool);
-    svndate = apr_hash_get(props, "svn:date", APR_HASH_KEY_STRING);
-    svnlog = apr_hash_get(props, "svn:log", APR_HASH_KEY_STRING);
-
-    fprintf(stdout, "commit refs/heads/master\n");
-    fprintf(stdout, "committer %s <%s@localhost> %ld -0000\n", author->data, author->data, get_epoch((char *)svndate->data));
-    fprintf(stdout, "data %d\n", svnlog->len);
-    fputs(svnlog->data, stdout);
-    fprintf(stdout, "\n");
-    fputs(apr_array_pstrcat(pool, file_changes, '\n'), stdout);
-    fprintf(stdout, "\n\n");
-    fflush(stdout);
-
-    svn_pool_destroy(revpool);
-
-    fprintf(stderr, "done!\n");
-
-    return 0;
-}
-
-int crawl_revisions(char *repos_path)
-{
-    apr_pool_t   *pool, *subpool;
-    svn_fs_t     *fs;
-    svn_repos_t  *repos;
-    svn_revnum_t youngest_rev, min_rev, max_rev, rev;
-
-    pool = svn_pool_create(NULL);
-
-    SVN_ERR(svn_fs_initialize(pool));
-    SVN_ERR(svn_repos_open(&repos, repos_path, pool));
-    if ((fs = svn_repos_fs(repos)) == NULL)
-        return -1;
-    SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
-
-    min_rev = 1;
-    max_rev = youngest_rev;
-
-    subpool = svn_pool_create(pool);
-    for (rev = min_rev; rev <= max_rev; rev++) {
-        svn_pool_clear(subpool);
-        export_revision(rev, fs, subpool);
-    }
-
-    svn_pool_destroy(pool);
-
-    return 0;
-}
-
-int main(int argc, char *argv[])
-{
-    if (argc != 2) {
-        fprintf(stderr, "usage: %s REPOS_PATH\n", argv[0]);
-        return -1;
-    }
-
-    if (apr_initialize() != APR_SUCCESS) {
-        fprintf(stderr, "You lose at apr_initialize().\n");
-        return -1;
-    }
-
-    crawl_revisions(argv[1]);
-
-    apr_terminate();
-
-    return 0;
-}
--- a/extra/fast-export/svn-fast-export.py	Thu Feb 04 08:47:09 2016 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,160 +0,0 @@
-#!/usr/bin/python
-#
-# svn-fast-export.py
-# ----------
-#  Walk through each revision of a local Subversion repository and export it
-#  in a stream that git-fast-import can consume.
-#
-# Author: Chris Lee <clee@kde.org>
-# License: MIT <http://www.opensource.org/licenses/mit-license.php>
-
-trunk_path = '/trunk/'
-branches_path = '/branches/'
-tags_path = '/tags/'
-
-first_rev = 1
-final_rev = 0
-
-import sys, os.path
-from optparse import OptionParser
-from time import mktime, strptime
-from svn.fs import svn_fs_file_length, svn_fs_file_contents, svn_fs_is_dir, svn_fs_revision_root, svn_fs_youngest_rev, svn_fs_revision_proplist, svn_fs_paths_changed
-from svn.core import svn_pool_create, svn_pool_clear, svn_pool_destroy, svn_stream_for_stdout, svn_stream_copy, svn_stream_close, run_app
-from svn.repos import svn_repos_open, svn_repos_fs
-
-ct_short = ['M', 'A', 'D', 'R', 'X']
-
-def dump_file_blob(root, full_path, pool):
-    stream_length = svn_fs_file_length(root, full_path, pool)
-    stream = svn_fs_file_contents(root, full_path, pool)
-    sys.stdout.write("data %s\n" % stream_length)
-    sys.stdout.flush()
-    ostream = svn_stream_for_stdout(pool)
-    svn_stream_copy(stream, ostream, pool)
-    svn_stream_close(ostream)
-    sys.stdout.write("\n")
-
-
-def export_revision(rev, repo, fs, pool):
-    sys.stderr.write("Exporting revision %s... " % rev)
-
-    revpool = svn_pool_create(pool)
-    svn_pool_clear(revpool)
-
-    # Open a root object representing the youngest (HEAD) revision.
-    root = svn_fs_revision_root(fs, rev, revpool)
-
-    # And the list of what changed in this revision.
-    changes = svn_fs_paths_changed(root, revpool)
-
-    i = 1
-    marks = {}
-    file_changes = []
-
-    for path, change_type in changes.iteritems():
-        c_t = ct_short[change_type.change_kind]
-        if svn_fs_is_dir(root, path, revpool):
-            continue
-
-        if not path.startswith(trunk_path):
-            # We don't handle branches. Or tags. Yet.
-            pass
-        else:
-            if c_t == 'D':
-                file_changes.append("D %s" % path.replace(trunk_path, ''))
-            else:
-                marks[i] = path.replace(trunk_path, '')
-                file_changes.append("M 644 :%s %s" % (i, marks[i]))
-                sys.stdout.write("blob\nmark :%s\n" % i)
-                dump_file_blob(root, path, revpool)
-                i += 1
-
-    # Get the commit author and message
-    props = svn_fs_revision_proplist(fs, rev, revpool)
-
-    # Do the recursive crawl.
-    if props.has_key('svn:author'):
-        author = "%s <%s@localhost>" % (props['svn:author'], props['svn:author'])
-    else:
-        author = 'nobody <nobody@localhost>'
-
-    if len(file_changes) == 0:
-        svn_pool_destroy(revpool)
-        sys.stderr.write("skipping.\n")
-        return
-
-    svndate = props['svn:date'][0:-8]
-    commit_time = mktime(strptime(svndate, '%Y-%m-%dT%H:%M:%S'))
-    sys.stdout.write("commit refs/heads/master\n")
-    sys.stdout.write("committer %s %s -0000\n" % (author, int(commit_time)))
-    sys.stdout.write("data %s\n" % len(props['svn:log']))
-    sys.stdout.write(props['svn:log'])
-    sys.stdout.write("\n")
-    sys.stdout.write('\n'.join(file_changes))
-    sys.stdout.write("\n\n")
-
-    svn_pool_destroy(revpool)
-
-    sys.stderr.write("done!\n")
-
-    #if rev % 1000 == 0:
-    #    sys.stderr.write("gc: %s objects\n" % len(gc.get_objects()))
-    #    sleep(5)
-
-
-def crawl_revisions(pool, repos_path):
-    """Open the repository at REPOS_PATH, and recursively crawl all its
-    revisions."""
-    global final_rev
-
-    # Open the repository at REPOS_PATH, and get a reference to its
-    # versioning filesystem.
-    repos_obj = svn_repos_open(repos_path, pool)
-    fs_obj = svn_repos_fs(repos_obj)
-
-    # Query the current youngest revision.
-    youngest_rev = svn_fs_youngest_rev(fs_obj, pool)
-
-
-    first_rev = 1
-    if final_rev == 0:
-        final_rev = youngest_rev
-    for rev in xrange(first_rev, final_rev + 1):
-        export_revision(rev, repos_obj, fs_obj, pool)
-
-
-if __name__ == '__main__':
-    usage = '%prog [options] REPOS_PATH'
-    parser = OptionParser()
-    parser.set_usage(usage)
-    parser.add_option('-f', '--final-rev', help='Final revision to import', 
-                      dest='final_rev', metavar='FINAL_REV', type='int')
-    parser.add_option('-t', '--trunk-path', help='Path in repo to /trunk',
-                      dest='trunk_path', metavar='TRUNK_PATH')
-    parser.add_option('-b', '--branches-path', help='Path in repo to /branches',
-                      dest='branches_path', metavar='BRANCHES_PATH')
-    parser.add_option('-T', '--tags-path', help='Path in repo to /tags',
-                      dest='tags_path', metavar='TAGS_PATH')
-    (options, args) = parser.parse_args()
-
-    if options.trunk_path != None:
-        trunk_path = options.trunk_path
-    if options.branches_path != None:
-        branches_path = options.branches_path
-    if options.tags_path != None:
-        tags_path = options.tags_path
-    if options.final_rev != None:
-        final_rev = options.final_rev
-
-    if len(args) != 1:
-        parser.print_help()
-        sys.exit(2)
-
-    # Canonicalize (enough for Subversion, at least) the repository path.
-    repos_path = os.path.normpath(args[0])
-    if repos_path == '.': 
-        repos_path = ''
-
-    # Call the app-wrapper, which takes care of APR initialization/shutdown
-    # and the creation and cleanup of our top-level memory pool.
-    run_app(crawl_revisions, repos_path)
--- a/extra/soundsoftware/SoundSoftware.pm	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/soundsoftware/SoundSoftware.pm	Thu Feb 22 09:04:43 2018 +0000
@@ -32,7 +32,8 @@
 Debian/ubuntu:
 
   apt-get install libapache-dbi-perl libapache2-mod-perl2 \
-    libdbd-mysql-perl libauthen-simple-ldap-perl libio-socket-ssl-perl
+    libdbd-mysql-perl libdbd-pg-perl libio-socket-ssl-perl \
+    libauthen-simple-ldap-perl
 
 Note that LDAP support is hardcoded "on" in this script (it is
 optional in the original Redmine.pm).
@@ -179,7 +180,7 @@
     my $method = $r->method;
 
     print STDERR "SoundSoftware.pm:$$: Method: $method, uri " . $r->uri . ", location " . $r->location . "\n";
-    print STDERR "SoundSoftware.pm:$$: Accept: " . $r->headers_in->{Accept} . "\n";
+#    print STDERR "SoundSoftware.pm:$$: Accept: " . $r->headers_in->{Accept} . "\n";
 
     my $dbh = connect_database($r);
     unless ($dbh) {
@@ -248,6 +249,16 @@
 	    # case we can decide for certain to accept in this function
 	    print STDERR "SoundSoftware.pm:$$: Method is read-only, no restriction here\n";
 	    $r->set_handlers(PerlAuthenHandler => [\&OK]);
+            if (!defined $r->user or $r->user eq '') {
+                # Apache 2.4+ requires auth module to set user if no
+                # auth was needed. Note that this actually tells
+                # apache that user has been identified, so authen
+                # handler will never be called (i.e. we must not do
+                # this unless we are actually approving the auth-free
+                # access). If we don't do this, we get a 500 error
+                # here after the set_handlers call above
+                $r->user('*anon*');
+            }
 	    return OK;
 	}
 
--- a/extra/soundsoftware/extract-docs.sh	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/soundsoftware/extract-docs.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -7,15 +7,17 @@
 
 hgdir="/var/hg"
 docdir="/var/doc"
-logfile="/var/www/test-cannam/log/extract-docs.log"
+logfile="/var/www/code/log/extract-docs.log"
 
-redgrp="redmine"
+redgrp="code"
 
-apikey=""
-apischeme="https"
-apihost=""
-apiuser=""
-apipass=""
+apikey="INSERT_API_KEY_HERE"
+apischeme="INSERT_API_SCHEME_HERE"
+apihost="INSERT_API_HOST_HERE"
+
+# HTTP auth username/password for /sys api calls
+apiuser="INSERT_API_USER_HERE"
+apipass="INSERT_API_PASSWORD_HERE"
 
 progdir=$(dirname $0)
 case "$progdir" in
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/extra/soundsoftware/run-hginit.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,3 @@
+#!/bin/sh
+location="$1"
+hg init "$location" && mkdir "$location/.hg/store/data"
--- a/extra/soundsoftware/update-external-repo.sh	Thu Feb 04 08:47:09 2016 +0000
+++ b/extra/soundsoftware/update-external-repo.sh	Thu Feb 22 09:04:43 2018 +0000
@@ -125,7 +125,7 @@
 	"$hg" init "$local_repo"
     fi
     if [ -d "$project_repo_mirror/.git" ]; then
-	( cd "$local_repo" && "$hg" --config extensions.hggit= pull "$project_repo_mirror" ) && echo "$remote_repo" > "$successfile"
+	( cd "$local_repo" && "$hg" --config extensions.git= pull "$project_repo_mirror" ) && echo "$remote_repo" > "$successfile"
     else 
 	( cd "$local_repo" && "$hg" pull "$project_repo_mirror" ) && echo "$remote_repo" > "$successfile"
     fi
--- a/lib/redmine/version.rb	Thu Feb 04 08:47:09 2016 +0000
+++ b/lib/redmine/version.rb	Thu Feb 22 09:04:43 2018 +0000
@@ -14,17 +14,18 @@
 
     # Retrieves the revision from the working copy
     def self.revision
-      if File.directory?(File.join(Rails.root, '.svn'))
-        begin
-          path = Redmine::Scm::Adapters::AbstractAdapter.shell_quote(Rails.root.to_s)
-          if `svn info --xml #{path}` =~ /revision="(\d+)"/
-            return $1.to_i
-          end
-        rescue
+      return 0
+   #   if File.directory?(File.join(Rails.root, '.svn'))
+   #     begin
+   #       path = Redmine::Scm::Adapters::AbstractAdapter.shell_quote(Rails.root.to_s)
+   #       if `svn info --xml #{path}` =~ /revision="(\d+)"/
+   #         return $1.to_i
+   #       end
+   #     rescue
           # Could not find the current revision
-        end
-      end
-      nil
+   #     end
+   #   end
+   #   nil
     end
 
     REVISION = self.revision
--- a/plugins/redmine_bibliography/Gemfile	Thu Feb 04 08:47:09 2016 +0000
+++ b/plugins/redmine_bibliography/Gemfile	Thu Feb 22 09:04:43 2018 +0000
@@ -1,3 +1,6 @@
 gem 'bibtex-ruby'
 gem 'nokogiri'
-gem 'citeproc-ruby'
\ No newline at end of file
+gem 'csl'
+gem 'csl-styles'
+gem 'citeproc'
+gem 'citeproc-ruby'
--- a/plugins/redmine_bibliography/app/models/publication.rb	Thu Feb 04 08:47:09 2016 +0000
+++ b/plugins/redmine_bibliography/app/models/publication.rb	Thu Feb 22 09:04:43 2018 +0000
@@ -101,7 +101,12 @@
     end
 
     if style == :ieee
-      CiteProc.process(bib.to_citeproc, :style => :ieee, :format => :html)
+      cite = bib.to_citeproc
+      cite_id = cite["id"]
+      cp = CiteProc::Processor.new style: 'ieee', format: 'html'
+      cp.import [cite]
+      texts = cp.render :bibliography, id: cite_id
+      texts[0]
     else
       bibtex = bib.to_s :include => :meta_content
       bibtex.strip!
--- a/plugins/redmine_bibliography/init.rb	Thu Feb 04 08:47:09 2016 +0000
+++ b/plugins/redmine_bibliography/init.rb	Thu Feb 22 09:04:43 2018 +0000
@@ -2,6 +2,8 @@
 
 require 'bibtex'
 require 'citeproc'
+require 'citeproc/ruby'
+require 'csl/styles'
 
 # Patches to the Redmine core.
 ActionDispatch::Callbacks.to_prepare do
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/plugins/redmine_embedded/Gemfile	Thu Feb 22 09:04:43 2018 +0000
@@ -0,0 +1,1 @@
+gem 'iconv'
--- a/public/stylesheets/application.css	Thu Feb 04 08:47:09 2016 +0000
+++ b/public/stylesheets/application.css	Thu Feb 22 09:04:43 2018 +0000
@@ -515,6 +515,8 @@
 
 p.pagination {margin-top:8px; font-size: 90%}
 
+#browser-autoscroll { clear: right }
+
 /***** Tabular forms ******/
 .tabular p{
   margin: 0;