Daniel@0
|
1 # Part of DML (Digital Music Laboratory)
|
Daniel@0
|
2 #
|
Daniel@0
|
3 # This program is free software; you can redistribute it and/or
|
Daniel@0
|
4 # modify it under the terms of the GNU General Public License
|
Daniel@0
|
5 # as published by the Free Software Foundation; either version 2
|
Daniel@0
|
6 # of the License, or (at your option) any later version.
|
Daniel@0
|
7 #
|
Daniel@0
|
8 # This program is distributed in the hope that it will be useful,
|
Daniel@0
|
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
Daniel@0
|
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
Daniel@0
|
11 # GNU General Public License for more details.
|
Daniel@0
|
12 #
|
Daniel@0
|
13 # You should have received a copy of the GNU General Public
|
Daniel@0
|
14 # License along with this library; if not, write to the Free Software
|
Daniel@0
|
15 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
Daniel@0
|
16
|
Daniel@0
|
17 #!/usr/local/spark-1.0.0-bin-hadoop2/bin/spark-submit
|
Daniel@0
|
18 # -*- coding: utf-8 -*-
|
Daniel@0
|
19 __author__="wolffd"
|
Daniel@0
|
20 __date__ ="$11-Jul-2014 15:31:01$"
|
Daniel@0
|
21
|
Daniel@0
|
22 # How to run this?
|
Daniel@0
|
23
|
Daniel@0
|
24 # to start hdfs: /usr/local/hadoop/sbin/start-dfs.sh
|
Daniel@0
|
25
|
Daniel@0
|
26 # Running python applications through ./bin/pyspark is deprecated as of Spark 1.0.
|
Daniel@0
|
27 # Use ./bin/spark-submit
|
Daniel@0
|
28 # spark-submit test_timeside_vamp_spark_charm.py --py-files vamp_plugin_dml.py,timeside_vamp.py,decode_to_wav.py
|
Daniel@0
|
29
|
Daniel@0
|
30 #import pydoop.hdfs as hdfs
|
Daniel@0
|
31 from pyspark import SparkConf, SparkContext
|
Daniel@0
|
32 # @todo: timeside has to be packed for multi-pc usage
|
Daniel@0
|
33 import os.path
|
Daniel@0
|
34 import os
|
Daniel@0
|
35 import sys
|
Daniel@0
|
36 from os import walk
|
Daniel@0
|
37 # NOTE: this is only for debugging purposes, we can
|
Daniel@0
|
38 # now use a regular timeside installation, e.g. installed by
|
Daniel@0
|
39 sys.path.append(os.getcwd() + '/../TimeSide/')
|
Daniel@0
|
40
|
Daniel@0
|
41 # mappers
|
Daniel@0
|
42 from timeside_vamp import *
|
Daniel@0
|
43 from decode_to_wav import *
|
Daniel@0
|
44
|
Daniel@0
|
45 def main():
|
Daniel@0
|
46 print "PySpark Telemeta and Vamp Test on CHARM"
|
Daniel@0
|
47
|
Daniel@0
|
48 # configure the Spark Setup
|
Daniel@0
|
49 conf = (SparkConf()
|
Daniel@0
|
50 .setMaster("spark://0.0.0.0:7077")
|
Daniel@0
|
51 #.setMaster("local")
|
Daniel@0
|
52 .setAppName("CharmVamp")
|
Daniel@0
|
53 .set("spark.executor.memory", "1g"))
|
Daniel@0
|
54 sc = SparkContext(conf = conf)
|
Daniel@0
|
55
|
Daniel@0
|
56 # SMB Share
|
Daniel@0
|
57 # mount.cifs //10.2.165.194/mirg /home/wolffd/wansteadshare -o username=dml,password=xxx,domain=ENTERPRISE")
|
Daniel@0
|
58
|
Daniel@0
|
59
|
Daniel@0
|
60 # uses local paths
|
Daniel@0
|
61 # get list of obkects to process
|
Daniel@0
|
62 mypath = '/samples/'
|
Daniel@0
|
63 data = []
|
Daniel@0
|
64 for (dirpath, dirnames, filenames) in walk(mypath):
|
Daniel@0
|
65 for file in filenames:
|
Daniel@0
|
66 if file.endswith(".wav") or file.endswith(".flac"):
|
Daniel@0
|
67 data.append(os.path.join(dirpath, file))
|
Daniel@0
|
68
|
Daniel@0
|
69 data = data[0:2]
|
Daniel@0
|
70 # HDFS
|
Daniel@0
|
71 # note: for HDFS we need wrappers for VAMP and gstreamer :/
|
Daniel@0
|
72 # copy to hdfs (put in different file before)
|
Daniel@0
|
73 #hdfs.mkdir("test")
|
Daniel@0
|
74 #hdfs.chmod("test","o+rw")
|
Daniel@0
|
75 ##this copies the test wavs to hdfs
|
Daniel@0
|
76 #hdfs.put("samples/","test/")
|
Daniel@0
|
77 # get hdfs paths
|
Daniel@0
|
78 # data = []
|
Daniel@0
|
79 # filenames = hdfs.ls("hdfs://0.0.0.0:9000/user/hduser/test/samples")
|
Daniel@0
|
80 # print filenames
|
Daniel@0
|
81 # for file in filenames:
|
Daniel@0
|
82 # if file[-4:]== ".wav" or file[-4:]==".flac":
|
Daniel@0
|
83 # data.append(file)
|
Daniel@0
|
84 #
|
Daniel@0
|
85 # define distributed dataset
|
Daniel@0
|
86 # todo: can we do this with the wav data itself?
|
Daniel@0
|
87 distData = sc.parallelize(data)
|
Daniel@0
|
88
|
Daniel@0
|
89 # define map that decodes to wav
|
Daniel@0
|
90 m0 = distData.map(lambda x: decode_to_wav(source=x))
|
Daniel@0
|
91
|
Daniel@0
|
92 # define map that applies the vamp plugin
|
Daniel@0
|
93 m1 = m0.map(lambda x: transform(wav_file=x)).collect()
|
Daniel@0
|
94 print m1
|
Daniel@0
|
95 return m1
|
Daniel@0
|
96 #process 2
|
Daniel@0
|
97 #m1.take(2)
|
Daniel@0
|
98
|
Daniel@0
|
99 if __name__ == "__main__":
|
Daniel@0
|
100 main()
|
Daniel@0
|
101
|