Mercurial > hg > pycsalgos
comparison var/omp_app.py @ 55:020399d027b1
Changed directory structure - part 3
author | nikcleju |
---|---|
date | Wed, 14 Dec 2011 14:48:06 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
54:527b0f6a9ffc | 55:020399d027b1 |
---|---|
1 """ | |
2 #=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=# | |
3 # Bob L. Sturm <bst@create.aau.dk> 20111018 | |
4 # Department of Architecture, Design and Media Technology | |
5 # Aalborg University Copenhagen | |
6 # Lautrupvang 15, 2750 Ballerup, Denmark | |
7 #=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=# | |
8 """ | |
9 | |
10 import numpy as np | |
11 from sklearn.utils import check_random_state | |
12 import time | |
13 | |
14 from omp_sk_bugfix import orthogonal_mp | |
15 from omp_QR import greed_omp_qr | |
16 from omp_QR import omp_qr | |
17 | |
18 """ | |
19 Run a problem suite involving sparse vectors in | |
20 ambientDimension dimensional space, with a resolution | |
21 in the phase plane of numGradations x numGradations, | |
22 and at each indeterminacy and sparsity pair run | |
23 numTrials independent trials. | |
24 | |
25 Outputs a text file denoting successes at each phase point. | |
26 For more on phase transitions, see: | |
27 D. L. Donoho and J. Tanner, "Precise undersampling theorems," | |
28 Proc. IEEE, vol. 98, no. 6, pp. 913-924, June 2010. | |
29 """ | |
30 | |
31 def runProblemSuite(ambientDimension,numGradations,numTrials): | |
32 | |
33 idx = np.arange(ambientDimension) | |
34 phaseDelta = np.linspace(0.05,1,numGradations) | |
35 phaseRho = np.linspace(0.05,1,numGradations) | |
36 success = np.zeros((numGradations, numGradations)) | |
37 | |
38 #Nic: init timers | |
39 t1all = 0 | |
40 t2all = 0 | |
41 t3all = 0 | |
42 | |
43 deltaCounter = 0 | |
44 # delta is number of measurements/ | |
45 for delta in phaseDelta[:17]: | |
46 rhoCounter = 0 | |
47 for rho in phaseRho: | |
48 print(deltaCounter,rhoCounter) | |
49 numMeasurements = int(delta*ambientDimension) | |
50 sparsity = int(rho*numMeasurements) | |
51 # how do I set the following to be random each time? | |
52 generator = check_random_state(100) | |
53 # create unit norm dictionary | |
54 D = generator.randn(numMeasurements, ambientDimension) | |
55 D /= np.sqrt(np.sum((D ** 2), axis=0)) | |
56 # compute Gramian (for efficiency) | |
57 DTD = np.dot(D.T,D) | |
58 | |
59 successCounter = 0 | |
60 trial = numTrials | |
61 while trial > 0: | |
62 # generate sparse signal with a minimum non-zero value | |
63 x = np.zeros((ambientDimension, 1)) | |
64 idx2 = idx | |
65 generator.shuffle(idx2) | |
66 idx3 = idx2[:sparsity] | |
67 while np.min(np.abs(x[idx3,0])) < 1e-10 : | |
68 x[idx3,0] = generator.randn(sparsity) | |
69 # sense sparse signal | |
70 y = np.dot(D, x) | |
71 | |
72 # Nic: Use sparsify OMP function (translated from Matlab) | |
73 ompopts = dict({'stopCrit':'M', 'stopTol':2*sparsity}) | |
74 starttime = time.time() # start timer | |
75 x_r2, errs, times = greed_omp_qr(y.squeeze().copy(), D.copy(), D.shape[1], ompopts) | |
76 t2all = t2all + time.time() - starttime # stop timer | |
77 idx_r2 = np.nonzero(x_r2)[0] | |
78 | |
79 # run to two times expected sparsity, or tolerance | |
80 # why? Often times, OMP can retrieve the correct solution | |
81 # when it is run for more than the expected sparsity | |
82 #x_r, idx_r = omp_qr(y,D,DTD,2*sparsity,1e-5) | |
83 # Nic: adjust tolerance to match with other function | |
84 starttime = time.time() # start timer | |
85 x_r, idx_r = omp_qr(y.copy(),D.copy(),DTD.copy(),2*sparsity,numMeasurements*1e-14/np.vdot(y,y)) | |
86 t1all = t1all + time.time() - starttime # stop timer | |
87 | |
88 # Nic: test sklearn omp | |
89 starttime = time.time() # start timer | |
90 x_r3 = orthogonal_mp(D.copy(), y.copy(), n_nonzero_coefs=2*sparsity, tol=numMeasurements*1e-14, precompute_gram=False, copy_X=True) | |
91 idx_r3 = np.nonzero(x_r3)[0] | |
92 t3all = t3all + time.time() - starttime # stop timer | |
93 | |
94 # Nic: compare results | |
95 print 'diff1 = ',np.linalg.norm(x_r.squeeze() - x_r2.squeeze()) | |
96 print 'diff2 = ',np.linalg.norm(x_r.squeeze() - x_r3.squeeze()) | |
97 print 'diff3 = ',np.linalg.norm(x_r2.squeeze() - x_r3.squeeze()) | |
98 print "Bob's total time = ", t1all | |
99 print "Nic's total time = ", t2all | |
100 print "Skl's total time = ", t3all | |
101 if np.linalg.norm(x_r.squeeze() - x_r2.squeeze()) > 1e-10 or \ | |
102 np.linalg.norm(x_r.squeeze() - x_r3.squeeze()) > 1e-10 or \ | |
103 np.linalg.norm(x_r2.squeeze() - x_r3.squeeze()) > 1e-10: | |
104 print "STOP: Different results" | |
105 print "Bob's residual: ||y - D x_r ||_2 = ",np.linalg.norm(y.squeeze() - np.dot(D,x_r).squeeze()) | |
106 print "Nic's residual: ||y - D x_r ||_2 = ",np.linalg.norm(y.squeeze() - np.dot(D,x_r2).squeeze()) | |
107 print "Skl's residual: ||y - D x_r ||_2 = ",np.linalg.norm(y.squeeze() - np.dot(D,x_r3).squeeze()) | |
108 raise ValueError("Different results") | |
109 | |
110 # debais to remove small entries | |
111 for nn in idx_r: | |
112 if abs(x_r[nn]) < 1e-10: | |
113 x_r[nn] = 0 | |
114 | |
115 # exact recovery condition using support | |
116 #if sorted(np.flatnonzero(x_r)) == sorted(np.flatnonzero(x)): | |
117 # successCounter += 1 | |
118 # exact recovery condition using error in solution | |
119 error = x - x_r | |
120 """ the following is the exact recovery condition in: A. Maleki | |
121 and D. L. Donoho, "Optimally tuned iterative reconstruction | |
122 algorithms for compressed sensing," IEEE J. Selected Topics | |
123 in Signal Process., vol. 4, pp. 330-341, Apr. 2010. """ | |
124 if np.vdot(error,error) < np.vdot(x,x)*1e-4: | |
125 successCounter += 1 | |
126 trial -= 1 | |
127 | |
128 success[rhoCounter,deltaCounter] = successCounter | |
129 if successCounter == 0: | |
130 break | |
131 | |
132 rhoCounter += 1 | |
133 #np.savetxt('test.txt',success,fmt='#2.1d',delimiter=',') | |
134 deltaCounter += 1 | |
135 | |
136 if __name__ == '__main__': | |
137 print ('Running problem suite') | |
138 ambientDimension = 400 | |
139 numGradations = 30 | |
140 numTrials = 1 | |
141 | |
142 #import cProfile | |
143 #cProfile.run('runProblemSuite(ambientDimension,numGradations,numTrials)','profres') | |
144 runProblemSuite(ambientDimension,numGradations,numTrials) | |
145 print "Done" | |
146 | |
147 #import pstats | |
148 #p = pstats.Stats('D:\Nic\Dev2\profres') | |
149 #p.sort_stats('cumulative').print_stats(10) |