EI328 Final Project Review (II)

  A further step of this project is to implement Min-Max Modular Neural Network with parallel computing, which makes use of Liblinear library as the basic classifier.

  We use Java multi-threading to solve this problem, and create a class called MinMax (./src/MinMax.java) to accomplish task 2 and task 3. At first, the main thread (a static method called by main) generates all the working threads (MinMax objects) after some requisite initializations. Then main_thread decomposes sub-problems (either randomly in task 2, or in light of prior knowledge in task 3) and puts them into a BlockingQueue called probBuf, while the working threads at the same time take sub-problems from probBuf, use Liblinear to generate models and put models into another queue called modBuf. After main_thread has provided all the sub-problems (as well as a sentinel), it will turn to modBuf, process the models generated by working threads (modify the thresholds) and put new models into another queue called newModBuf. When all the working threads have finished training, they will release waitTask signals to main_thread, and when main_thread gets all such signals it will give its order by releasing another signals called nextTask. In this way, testing procedure starts, and working threads repeatedly extract models from newModBuf and repeatedly revise minResult until newModBuf is empty. After that, main_thread will do the "maximization" to consummate the classification.

  1 /**
  2 * Argument Format:
  3 * <task # = 2 or 3> <number of threads> <min group size> <max group size> <group size step> 
  4 *                <min threshold> <max threshold> <threshold step>
  5 **/
  6 
  7 import de.bwaldvogel.liblinear.*;
  8 import java.util.concurrent.atomic.*;
  9 import java.util.concurrent.*;
 10 import java.util.*;
 11 import java.io.*;
 12 
 13 class Subprob {
 14     private List<Integer> data;
 15     private Integer minIndex;
 16     
 17     public Subprob(int idx) {
 18         minIndex = new Integer(idx);
 19         data = new LinkedList<Integer>();
 20     }
 21     public void add(Integer item) {
 22         data.add(item);
 23     }
 24     public int size() {
 25         return data.size();
 26     }
 27     public Integer getItem(int idx) {
 28         if (idx<0||idx>=data.size()) {
 29             throw new RuntimeException("Subprob: IndexOutOfBound");
 30         }
 31         return data.get(idx);
 32     }
 33     public Integer getIndex() {
 34         return minIndex;
 35     }
 36 }
 37 
 38 class DataItem implements Comparable<DataItem> {
 39     private Integer index;
 40     private String info;
 41     
 42     public DataItem(int idx,String info) {
 43         index = new Integer(idx);
 44         this.info = info;
 45     }
 46     public int compareTo(DataItem other) {
 47         return info.compareTo(other.info);
 48     }
 49     public Integer getValue() {
 50         return index;
 51     }
 52 }
 53 
 54 public class MinMax extends AbstractTask {
 55     private static int NUM;
 56     private static int NUM_OF_THREAD;
 57     
 58     private static Semaphore nextTask;
 59     private static Semaphore waitTask;
 60     private static Semaphore timeMutex;
 61     private static Subprob probSentinel;
 62     private static Model modSentinel;
 63     
 64     private static BlockingQueue<Subprob> probBuf;
 65     private static BlockingQueue<Model> modBuf;
 66     private static BlockingQueue<Model> newModBuf;
 67     private static BlockingQueue<Integer> minIdx;
 68     
 69     private static AtomicInteger[][] minResult;
 70     
 71     static {
 72         try {
 73             init();
 74             nextTask = new Semaphore(0);
 75             waitTask = new Semaphore(0);
 76             timeMutex = new Semaphore(1);
 77             probSentinel = new Subprob(-1);
 78             modSentinel = new Model();
 79             probBuf = new LinkedBlockingQueue<Subprob>();
 80             modBuf = new LinkedBlockingQueue<Model>();
 81             newModBuf = new LinkedBlockingQueue<Model>();
 82             minIdx = new LinkedBlockingQueue<Integer>();
 83         } catch (Exception e) {
 84             System.err.println("STATIC Error: "+e);
 85         }
 86     }
 87     private static void separate(List<Integer> poslst,List<Integer> neglst,boolean prior) {
 88         // List the positive samples and negative samples:
 89         // Precondition: poslst and neglst are non-null empty lists
 90         // Postcondition: poslst and neglst are filled with indices of positive
 91         //            and negative training data respectively
 92         if (!prior) {
 93             for (int i=0;i<train.l;i++) {
 94                 if (train.y[i]>0) {
 95                     poslst.add(new Integer(i));
 96                 } else {
 97                     neglst.add(new Integer(i));
 98                 }
 99             }
100             Random rand = new Random();
101             Collections.shuffle(poslst,rand);
102             Collections.shuffle(neglst,rand);
103         } else {
104             try {
105                 System.out.println("Gathering Prior Knowledge ... ");
106                 BufferedReader pin = new BufferedReader(new FileReader("./data/train.txt"));
107                 List<DataItem> posData = new LinkedList<DataItem>();
108                 List<DataItem> negData = new LinkedList<DataItem>();
109                 for (int i=0;i<train.l;i++) {
110                     String line = pin.readLine();
111                     if (train.y[i]>0) {
112                         posData.add(new DataItem(i,line.substring(0,3)));
113                     } else {
114                         negData.add(new DataItem(i,line.substring(0,3)));
115                     }
116                 }
117                 pin.close();
118                 //System.in.read();
119                 Collections.sort(posData);
120                 Collections.sort(negData);
121                 for (int i=0;i<posData.size();i++) {
122                     poslst.add(posData.get(i).getValue());
123                 }
124                 for (int i=0;i<negData.size();i++) {
125                     neglst.add(negData.get(i).getValue());
126                 }
127             } catch (Exception e) {
128                 System.err.println("SEPARATE Error: "+e);
129             }
130         }
131     }
132     private static void distribute(boolean prior) {
133         List<Integer> poslst = new LinkedList<Integer>();
134         List<Integer> neglst = new LinkedList<Integer>();
135         separate(poslst,neglst,prior);
136         // Group the positive samples and negative samples:
137         int posGrpNum = (poslst.size()+NUM-1)/NUM;
138         int negGrpNum = (neglst.size()+NUM-1)/NUM;
139         int[] posGrps = new int[posGrpNum+1];
140         int[] negGrps = new int[negGrpNum+1];
141         for (int i=0;i<posGrpNum;i++) {
142             posGrps[i+1] = posGrps[i]+(poslst.size()+i)/posGrpNum;
143         }
144         for (int i=0;i<negGrpNum;i++) {
145             negGrps[i+1] = negGrps[i]+(neglst.size()+i)/negGrpNum;
146         }
147         try {    // Add tasks to the buffer:
148             System.out.println("Distribute subproblems ... ");
149             start = System.currentTimeMillis();        // start training
150             for (int i=0;i<posGrpNum;i++) {
151                 for (int j=0;j<negGrpNum;j++) {
152                     Subprob sub = new Subprob(i);
153                     for (int k=posGrps[i];k<posGrps[i+1];k++) {
154                         sub.add(poslst.get(k));
155                     }
156                     for (int k=negGrps[j];k<negGrps[j+1];k++) {
157                         sub.add(neglst.get(k));
158                     }
159                     probBuf.put(sub);
160                 }
161             }
162         } catch (Exception e) {
163             System.err.println("DISTRIBUTE Error 1: "+e);
164         }
165         try {    // Prepare for the MIN modules:
166             minResult = new AtomicInteger[posGrpNum][test.l];
167             for (int i=0;i<posGrpNum;i++) {
168                 for (int j=0;j<test.l;j++) {
169                     minResult[i][j] = new AtomicInteger(1);
170                 }
171             }
172             probBuf.put(probSentinel);    // "STOP TRAINING" signal
173         } catch (Exception e) {
174             System.err.println("DISTRIBUTE Error 2: "+e);
175         }
176     }
177     private static void setMins(double threshold) {
178         int cnt = 0;
179         Model model = null;
180         try {
181             System.out.println("Preparations for MIN modules ...");
182             while (cnt<NUM_OF_THREAD) {
183                 model = modBuf.take();
184                 if (model==modSentinel) {
185                     cnt++;
186                     continue;
187                 }
188                 model = modModify(model,threshold);
189                 newModBuf.put(model);
190             }
191             newModBuf.put(modSentinel);    // "STOP TESTING" signal
192         } catch (Exception e) {
193             System.err.println("SETMINS Error: "+e);
194         }
195     }
196     private static void main_thread(double threshold,boolean prior) {
197         try {
198             MinMax[] tsks = new MinMax[NUM_OF_THREAD];
199             //distribute(prior);    start = System.currentTimeMillis();    // for task 5
200             end = -1;
201             for (int i=0;i<NUM_OF_THREAD;i++) {
202                 tsks[i] = new MinMax();
203                 tsks[i].start();
204             }
205             distribute(prior);
206             setMins(threshold);
207             for (int i=0;i<NUM_OF_THREAD;i++) {
208                 waitTask.acquire();                    // training finished
209             }
210             for (int i=0;i<NUM_OF_THREAD;i++) {
211                 nextTask.release();                    // testing enabled
212             }
213             start = System.currentTimeMillis();        // start testing
214             for (int i=0;i<NUM_OF_THREAD;i++) {
215                 tsks[i].join();
216             }
217             int[] res = new int[test.l];
218             Arrays.fill(res,-1);
219             for (int i=0;i<minResult.length;i++) {
220                 for (int j=0;j<test.l;j++) {
221                     if (minResult[i][j].get()>0) {
222                         res[j] = 1;                    // MAX Module
223                     }
224                 }
225             }
226             end = System.currentTimeMillis();        // finish testing
227             printTime(end-start,false);
228             stats(res);
229         } catch (Exception e) {
230             System.err.println("MAIN_THRAD Error: "+e);
231         }
232     }
233     public static void main(String[] args) {
234         try {
235             out = new PrintWriter(new FileWriter("./result/task"+args[0]+"_result.out"));
236             NUM_OF_THREAD = Integer.parseInt(args[1]);
237             int numMin = Integer.parseInt(args[2]);
238             int numMax = Integer.parseInt(args[3]);
239             int numStep = Integer.parseInt(args[4]);
240             double tmin = Double.parseDouble(args[5]);
241             double tmax = Double.parseDouble(args[6]);
242             double tstep = Double.parseDouble(args[7]);
243             for (NUM=numMin;NUM<=numMax;NUM+=numStep) {
244                 for (double t=tmin;t<tmax+tstep/2;t+=tstep) {
245                     print("Group Size = "+NUM+",	");
246                     print(NUM_OF_THREAD+" Threads,	");
247                     println("Threshold = "+t);
248                     probBuf.clear();
249                     modBuf.clear();
250                     newModBuf.clear();
251                     minIdx.clear();
252                     if (args[0].equals("2")) {
253                         main_thread(t,false);
254                     } else {
255                         main_thread(t,true);
256                     }
257                 }
258                 rocWrtLine("_next_");
259             }
260             rocWrtLine("_exit_");
261             out.close();
262         } catch (Exception e) {
263             System.err.println("MAIN Error: "+e);
264         }
265     }
266     
267     public void run() {
268         try {
269             train();
270             waitTask.release();
271             nextTask.acquire();
272             test();
273         } catch (Exception e) {
274             System.err.println("RUN Error: "+e);
275         }
276     }
277     private void train() {
278         Subprob sub = null;
279         try {
280             while (true) {
281                 sub = probBuf.take();
282                 if (sub==probSentinel) {    // signal of termination
283                     timeMutex.acquire();
284                     if (end<0) {            // finish training
285                         end = System.currentTimeMillis();
286                         printTime(end-start,true);
287                     }
288                     timeMutex.release();
289                     probBuf.put(sub);
290                     break;
291                 }
292                 Problem prob = new Problem();
293                 prob.l = sub.size();
294                 prob.n = train.n;
295                 prob.x = new Feature[prob.l][];
296                 prob.y = new double[prob.l];
297                 prob.bias = 1;
298                 for (int i=0;i<sub.size();i++) {
299                     int pos = sub.getItem(i).intValue();
300                     prob.x[i] = train.x[pos];
301                     prob.y[i] = train.y[pos];
302                 }
303                 modBuf.put(Linear.train(prob,param));
304                 minIdx.put(sub.getIndex());
305             }
306             modBuf.put(modSentinel);
307         } catch (Exception e) {
308             System.err.println("TRAIN Error: "+e);
309         }
310     }
311     private void test() {
312         Model model = null;
313         int idx = -1;
314         try {
315             while (true) {
316                 model = newModBuf.take();
317                 if (model==modSentinel) {    // signal of termination
318                     newModBuf.put(model);
319                     break;
320                 }
321                 idx = minIdx.poll().intValue();
322                 for (int i=0;i<test.l;i++) {
323                     if (Linear.predict(model,test.x[i])<0) {
324                         minResult[idx][i].getAndSet(-1);    // MIN Modules
325                     }
326                 }
327             }
328         } catch (Exception e) {
329             System.err.println("TEST Error:"+e);
330         }
331     }
332 }

  The distribute method in the MinMax class above exploits an average distribution technique. We use a parameter called NUM to balance the both positive and negative samples in a training sub-problem.

  At first, NUM will determine the number of positive groups and negative groups:

      $n_1=lceil N_1/NUM ceil$  and  $n_2=lceil N_2/NUM ceil$, 

where $N_1$ and $N_2$ are the numbers of positive and negative samples respectively.

  Then, we distribute to $n_1$ positive groups and $n_2$ negative groups approximately equal samples by using the following equation:

      $N=sum_{i=0}^{n-1}lfloor (N+i)/n floor$

where we let $N=N_1, n=n_1$ and $N=N_2, n=n_2$ respectively.

  For any positive group (j=0) or negative group (j=1), the upper bound of sample number is:

      $lceil frac{N_j}{lceil N_j/NUM ceil} ceil< frac{N_j}{lceil N_j/NUM ceil}+1leq NUM+1$

and the lower bound of sample number will be:

      $lfloor frac{N_j}{lceil N_j/NUM ceil} floor> frac{N_j}{lceil N_j/NUM ceil}-1> (frac{1}{N_j}+frac{1}{NUM})^{-1}-1$

  Thus, suppose $N_jleq N_{1-j}$, when $1ll NUMll N_j$ holds, the ratio of two kinds of samples in a group will be:

      $frac{({N_j}^{-1}+NUM^{-1})^{-1}-1}{NUM+1}approx frac{NUM-1}{NUM+1}approx 1$

  In task 5, we are required to measure the time performance of this program when prior knowledge is used. Here we wrote two python scripts to collect data and finally plot line charts to show both the training time and testing time in different NUM_OF_THREADs.

  This is ./src/timeHelp, which collects data from ./result/task3_result.out:

 1 #! /usr/bin/python
 2 import sys
 3 fin = open("./result/task3_result.out","r")
 4 fout = open("./result/task3_time.txt","a")
 5 # number of threads
 6 fout.write(sys.argv[1]+' ')
 7 # training time
 8 fin.readline()
 9 tmp = fin.readline().split("	")
10 tok = tmp[2].split('ms')
11 fout.write(tok[0]+' ')
12 # testing time
13 tmp = fin.readline().split('	')
14 tok = tmp[2].split('ms')
15 fout.write(tok[0]+'
')
16 fin.close()
17 fout.close()

  And this is the source code of ./src/timePlot. which plots two line charts:

 1 #! /usr/bin/python
 2 import pylab as pl
 3 import numpy as np
 4 data = np.loadtxt("./result/task3_time.txt");
 5 num = data[:,0]
 6 ltrain = [1626]*len(num)
 7 train = data[:,1]
 8 ltest = [41]*len(num)
 9 test = data[:,2]
10 fg1 = pl.figure()
11 plot1, = pl.plot(num,ltrain)
12 plot2, = pl.plot(num,train)
13 pl.title('Training Time vs NUM_OF_THREAD')
14 pl.xlabel('NUM_OF_THREAD')
15 pl.ylabel('Training Time')
16 pl.legend((plot1,plot2),('LIBLINEAR','Min-Max Module'),'best',numpoints=1)
17 fg2 = pl.figure()
18 plot3, = pl.plot(num,ltest)
19 plot4, = pl.plot(num,test)
20 pl.title('Testing Time vs NUM_OF_THREAD')
21 pl.xlabel('NUM_OF_THREAD')
22 pl.ylabel('Testing Time')
23 pl.legend((plot3,plot4),('LIBLINEAR','Min-Max Module'),'best',numpoints=1)
24 pl.show()

  We wrote a shell script to run the programs above and obtained two line charts:

 1 #! /bin/bash
 2 reset
 3 make
 4 for var in 1 2 3 4 5 6 7 8 9 10 15 20 25 30 35 40
 5 do
 6     java -classpath ./bin/:./bin/liblinear-java-1.95.jar MinMax 3 $var 8000 8000 1000 -0.00 0.00 0.01
 7     python ./src/timeHelp $var
 8 done
 9 
10 python ./src/timePlot

原文地址:https://www.cnblogs.com/DevinZ/p/4495945.html