comparison notebooks/results_for_30_seconds.ipynb @ 71:04fc6e809a42 branch-tests

notebooks
author mpanteli <m.x.panteli@gmail.com>
date Fri, 22 Sep 2017 18:03:41 +0100
parents 9b10b688c2ac
children 9e526f7c9715
comparison
equal deleted inserted replaced
65:9b10b688c2ac 71:04fc6e809a42
1 { 1 {
2 "cells": [ 2 "cells": [
3 { 3 {
4 "cell_type": "code", 4 "cell_type": "code",
5 "execution_count": 36, 5 "execution_count": 1,
6 "metadata": {}, 6 "metadata": {},
7 "outputs": [ 7 "outputs": [
8 { 8 {
9 "name": "stdout", 9 "name": "stderr",
10 "output_type": "stream", 10 "output_type": "stream",
11 "text": [ 11 "text": [
12 "The autoreload extension is already loaded. To reload it, use:\n", 12 "/homes/mp305/anaconda/lib/python2.7/site-packages/librosa/core/audio.py:33: UserWarning: Could not import scikits.samplerate. Falling back to scipy.signal\n",
13 " %reload_ext autoreload\n" 13 " warnings.warn('Could not import scikits.samplerate. '\n"
14 ] 14 ]
15 } 15 }
16 ], 16 ],
17 "source": [ 17 "source": [
18 "import numpy as np\n", 18 "import numpy as np\n",
85 "print np.array_equal(np.unique(testset[1]), np.unique(trainset[1]))" 85 "print np.array_equal(np.unique(testset[1]), np.unique(trainset[1]))"
86 ] 86 ]
87 }, 87 },
88 { 88 {
89 "cell_type": "code", 89 "cell_type": "code",
90 "execution_count": 37, 90 "execution_count": 3,
91 "metadata": {}, 91 "metadata": {},
92 "outputs": [ 92 "outputs": [
93 { 93 {
94 "name": "stdout", 94 "name": "stdout",
95 "output_type": "stream", 95 "output_type": "stream",
121 " pickle.dump(testset, f)" 121 " pickle.dump(testset, f)"
122 ] 122 ]
123 }, 123 },
124 { 124 {
125 "cell_type": "code", 125 "cell_type": "code",
126 "execution_count": 38, 126 "execution_count": 4,
127 "metadata": {}, 127 "metadata": {},
128 "outputs": [ 128 "outputs": [
129 { 129 {
130 "name": "stdout", 130 "name": "stdout",
131 "output_type": "stream", 131 "output_type": "stream",
132 "text": [ 132 "text": [
133 "['/import/c4dm-04/mariap/train_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/val_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/test_data_melodia_8_30sec.pickle'] ['/import/c4dm-04/mariap/lda_data_melodia_8_30sec_30sec.pickle', '/import/c4dm-04/mariap/pca_data_melodia_8_30sec_30sec.pickle', '/import/c4dm-04/mariap/nmf_data_melodia_8_30sec_30sec.pickle', '/import/c4dm-04/mariap/ssnmf_data_melodia_8_30sec_30sec.pickle', '/import/c4dm-04/mariap/na_data_melodia_8_30sec_30sec.pickle']\n" 133 "['/import/c4dm-04/mariap/train_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/val_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/test_data_melodia_8_30sec.pickle'] ['/import/c4dm-04/mariap/lda_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/pca_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/nmf_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/ssnmf_data_melodia_8_30sec.pickle', '/import/c4dm-04/mariap/na_data_melodia_8_30sec.pickle']\n"
134 ] 134 ]
135 } 135 }
136 ], 136 ],
137 "source": [ 137 "source": [
138 "mapper.INPUT_FILES = OUTPUT_FILES\n", 138 "mapper.INPUT_FILES = OUTPUT_FILES\n",
142 "print mapper.INPUT_FILES, mapper.OUTPUT_FILES" 142 "print mapper.INPUT_FILES, mapper.OUTPUT_FILES"
143 ] 143 ]
144 }, 144 },
145 { 145 {
146 "cell_type": "code", 146 "cell_type": "code",
147 "execution_count": 14, 147 "execution_count": null,
148 "metadata": {}, 148 "metadata": {},
149 "outputs": [ 149 "outputs": [
150 { 150 {
151 "name": "stdout", 151 "name": "stdout",
152 "output_type": "stream", 152 "output_type": "stream",
156 "mapping rhy\n", 156 "mapping rhy\n",
157 "training with PCA transform...\n", 157 "training with PCA transform...\n",
158 "variance explained 1.0\n", 158 "variance explained 1.0\n",
159 "138 400\n", 159 "138 400\n",
160 "training with PCA transform...\n", 160 "training with PCA transform...\n",
161 "variance explained 0.989999211296\n", 161 "variance explained 0.989994197011\n",
162 "training with LDA transform...\n" 162 "training with LDA transform...\n"
163 ] 163 ]
164 }, 164 },
165 { 165 {
166 "name": "stderr", 166 "name": "stderr",
167 "output_type": "stream", 167 "output_type": "stream",
168 "text": [ 168 "text": [
169 "/homes/mp305/anaconda/lib/python2.7/site-packages/sklearn/utils/validation.py:526: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().\n",
170 " y = column_or_1d(y, warn=True)\n",
169 "/homes/mp305/anaconda/lib/python2.7/site-packages/sklearn/discriminant_analysis.py:455: UserWarning: The priors do not sum to 1. Renormalizing\n", 171 "/homes/mp305/anaconda/lib/python2.7/site-packages/sklearn/discriminant_analysis.py:455: UserWarning: The priors do not sum to 1. Renormalizing\n",
170 " UserWarning)\n" 172 " UserWarning)\n"
171 ] 173 ]
172 }, 174 },
173 { 175 {
174 "name": "stdout", 176 "name": "stdout",
175 "output_type": "stream", 177 "output_type": "stream",
176 "text": [ 178 "text": [
177 "variance explained 1.0\n", 179 "variance explained 1.0\n",
180 "training with NMF transform...\n",
181 "reconstruction error 6.59195506061\n",
182 "training with SSNMF transform...\n",
183 "reconstruction error 25.0727210368\n",
178 "transform test data...\n", 184 "transform test data...\n",
179 "mapping mel\n", 185 "mapping mel\n",
180 "training with PCA transform...\n", 186 "training with PCA transform...\n",
181 "variance explained 1.0\n", 187 "variance explained 1.0\n",
182 "214 240\n", 188 "214 240\n",
183 "training with PCA transform...\n", 189 "training with PCA transform...\n",
184 "variance explained 0.990347897477\n", 190 "variance explained 0.990347897477\n",
185 "training with LDA transform...\n", 191 "training with LDA transform...\n",
186 "variance explained 1.0\n", 192 "variance explained 1.0\n",
187 "transform test data...\n", 193 "training with NMF transform...\n"
188 "mapping mfc\n",
189 "training with PCA transform...\n",
190 "variance explained 1.0\n",
191 "39 80\n",
192 "training with PCA transform...\n",
193 "variance explained 0.991458741216\n",
194 "training with LDA transform...\n",
195 "variance explained 0.942657629903\n",
196 "transform test data...\n",
197 "mapping chr\n",
198 "training with PCA transform...\n",
199 "variance explained 1.0\n",
200 "70 120\n",
201 "training with PCA transform...\n",
202 "variance explained 0.990503308525\n",
203 "training with LDA transform...\n",
204 "variance explained 0.954607427999\n",
205 "transform test data...\n"
206 ] 194 ]
207 } 195 }
208 ], 196 ],
209 "source": [ 197 "source": [
210 "print \"mapping...\"\n", 198 "print \"mapping...\"\n",
211 "_, _, ldadata_list, _, _, Y, Yaudio = mapper.lda_map_and_average_frames(min_variance=0.99)\n", 199 "#_, _, ldadata_list, _, _, Y, Yaudio = mapper.lda_map_and_average_frames(min_variance=0.99)\n",
212 "mapper.write_output([], [], ldadata_list, [], [], Y, Yaudio)" 200 "#mapper.write_output([], [], ldadata_list, [], [], Y, Yaudio)\n",
201 "data_list, pcadata_list, ldadata_list, nmfdata_list, ssnmfdata_list, classlabs, audiolabs = mapper.map_and_average_frames(min_variance=0.99)\n",
202 "mapper.write_output(data_list, pcadata_list, ldadata_list, nmfdata_list, ssnmfdata_list, classlabs, audiolabs)"
213 ] 203 ]
214 }, 204 },
215 { 205 {
216 "cell_type": "code", 206 "cell_type": "code",
217 "execution_count": 29, 207 "execution_count": 29,