annotate pyspark/test_timeside_vamp_spark.py @ 0:e34cf1b6fe09 tip

commit
author Daniel Wolff
date Sat, 20 Feb 2016 18:14:24 +0100
parents
children
rev   line source
Daniel@0 1 # Part of DML (Digital Music Laboratory)
Daniel@0 2 #
Daniel@0 3 # This program is free software; you can redistribute it and/or
Daniel@0 4 # modify it under the terms of the GNU General Public License
Daniel@0 5 # as published by the Free Software Foundation; either version 2
Daniel@0 6 # of the License, or (at your option) any later version.
Daniel@0 7 #
Daniel@0 8 # This program is distributed in the hope that it will be useful,
Daniel@0 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
Daniel@0 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Daniel@0 11 # GNU General Public License for more details.
Daniel@0 12 #
Daniel@0 13 # You should have received a copy of the GNU General Public
Daniel@0 14 # License along with this library; if not, write to the Free Software
Daniel@0 15 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Daniel@0 16
Daniel@0 17 #!/usr/local/spark-1.0.0-bin-hadoop2/bin/spark-submit
Daniel@0 18 # -*- coding: utf-8 -*-
Daniel@0 19 __author__="wolffd"
Daniel@0 20 __date__ ="$11-Jul-2014 15:31:01$"
Daniel@0 21
Daniel@0 22 from pyspark import SparkConf, SparkContext
Daniel@0 23 # @todo: timeside has to be packed for multi-pc usage
Daniel@0 24 from timeside_vamp import *
Daniel@0 25 from os import walk
Daniel@0 26
Daniel@0 27 # Running python applications through ./bin/pyspark is deprecated as of Spark 1.0.
Daniel@0 28 # Use ./bin/spark-submit <python file>
Daniel@0 29
Daniel@0 30
Daniel@0 31 def main():
Daniel@0 32 print "PySpark Telemeta and Vamp Test"
Daniel@0 33 conf = (SparkConf()
Daniel@0 34 .setMaster("local")
Daniel@0 35 .setAppName("My app")
Daniel@0 36 .set("spark.executor.memory", "1g"))
Daniel@0 37 sc = SparkContext(conf = conf)
Daniel@0 38
Daniel@0 39 # here come the wav file names
Daniel@0 40
Daniel@0 41 mypath = '../../TimeSide/tests/samples/'
Daniel@0 42 data = []
Daniel@0 43 for (dirpath, dirnames, filenames) in walk(mypath):
Daniel@0 44 for file in filenames:
Daniel@0 45 if file.endswith(".wav"):
Daniel@0 46 data.append(os.path.join(dirpath, file))
Daniel@0 47
Daniel@0 48 # define distributed dataset
Daniel@0 49 distData = sc.parallelize(data)
Daniel@0 50
Daniel@0 51 # define map
Daniel@0 52 m1 = distData.map(lambda x: transform(wav_file=x))
Daniel@0 53
Daniel@0 54 #process 2
Daniel@0 55 m1.take(2)
Daniel@0 56
Daniel@0 57 if __name__ == "__main__":
Daniel@0 58 main()
Daniel@0 59