1 package com.android.cts.verifier.sensors;
2 
3 /*
4  * Copyright (C) 2014 The Android Open Source Project
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *      http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 import android.media.MediaCodec;
19 import android.media.MediaExtractor;
20 import android.media.MediaFormat;
21 import android.os.Debug;
22 import android.os.Environment;
23 import android.os.PowerManager;
24 import android.util.JsonWriter;
25 import android.util.Log;
26 
27 import org.opencv.core.Mat;
28 import org.opencv.core.CvType;
29 import org.opencv.core.MatOfDouble;
30 import org.opencv.core.MatOfFloat;
31 import org.opencv.core.MatOfPoint2f;
32 import org.opencv.core.MatOfPoint3f;
33 import org.opencv.core.Size;
34 import org.opencv.highgui.Highgui;
35 import org.opencv.imgproc.Imgproc;
36 import org.opencv.calib3d.Calib3d;
37 import org.opencv.core.Core;
38 
39 import org.json.JSONObject;
40 import org.json.JSONException;
41 
42 import java.io.BufferedReader;
43 import java.io.ByteArrayOutputStream;
44 import java.io.File;
45 import java.io.FileNotFoundException;
46 import java.io.FileOutputStream;
47 import java.io.FileReader;
48 import java.io.IOException;
49 import java.io.OutputStream;
50 import java.io.OutputStreamWriter;
51 import java.nio.ByteBuffer;
52 import java.util.ArrayList;
53 
54 import android.opengl.GLES20;
55 import javax.microedition.khronos.opengles.GL10;
56 
57 /**
58  *  This class does analysis on the recorded RVCVCXCheck data sets.
59  */
60 public class RVCVXCheckAnalyzer {
61     private static final String TAG = "RVCVXAnalysis";
62     private static final boolean LOCAL_LOGV = false;
63     private static final boolean LOCAL_LOGD = true;
64     private final String mPath;
65 
66     private static final boolean OUTPUT_DEBUG_IMAGE = false;
67     private static final double VALID_FRAME_THRESHOLD = 0.8;
68     private static final double REPROJECTION_THREASHOLD = 4.0;
69     private static final boolean FORCE_CV_ANALYSIS  = false;
70     private static final boolean TRACE_VIDEO_ANALYSIS = false;
71     private static final double DECIMATION_FPS_TARGET = 15.0;
72 
RVCVXCheckAnalyzer(String path)73     RVCVXCheckAnalyzer(String path)
74     {
75         mPath = path;
76     }
77 
78     /**
79      * A class that contains  the analysis results
80      *
81      */
82     class AnalyzeReport {
83         public boolean error=true;
84         public String reason = "incomplete";
85 
86         // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 })
87         // unit in rad
88         public double roll_rms_error;
89         public double pitch_rms_error;
90         public double yaw_rms_error;
91 
92         // roll pitch yaw max error
93         // unit in rad
94         public double roll_max_error;
95         public double pitch_max_error;
96         public double yaw_max_error;
97 
98         // optimal t delta between sensor and camera data set to make best match
99         public double optimal_delta_t;
100         // the associate yaw offset based on initial values
101         public double yaw_offset;
102 
103         public int n_of_frame;
104         public int n_of_valid_frame;
105 
106         // both data below are in [sec]
107         public double sensor_period_avg;
108         public double sensor_period_stdev;
109 
110         /**
111          * write Json format serialization to a file in case future processing need the data
112          */
writeToFile(File file)113         public void writeToFile(File file) {
114             try {
115                 writeJSONToStream(new FileOutputStream(file));
116             } catch (FileNotFoundException e) {
117                 e.printStackTrace();
118                 Log.e(TAG, "Cannot create analyze report file.");
119             }
120         }
121 
122         /**
123          * Get the JSON format serialization
124          *@return Json format serialization as String
125          */
126         @Override
toString()127         public String toString() {
128             ByteArrayOutputStream s = new ByteArrayOutputStream();
129             writeJSONToStream(s);
130             return new String(s.toByteArray(),  java.nio.charset.StandardCharsets.UTF_8);
131         }
132 
writeJSONToStream(OutputStream s)133         private void writeJSONToStream(OutputStream s) {
134             try{
135                 JsonWriter writer =
136                         new JsonWriter(
137                                 new OutputStreamWriter( s )
138                         );
139                 writer.beginObject();
140                 writer.setLenient(true);
141 
142                 writer.name("roll_rms_error").value(roll_rms_error);
143                 writer.name("pitch_rms_error").value(pitch_rms_error);
144                 writer.name("yaw_rms_error").value(yaw_rms_error);
145                 writer.name("roll_max_error").value(roll_max_error);
146                 writer.name("pitch_max_error").value(pitch_max_error);
147                 writer.name("yaw_max_error").value(yaw_max_error);
148                 writer.name("optimal_delta_t").value(optimal_delta_t);
149                 writer.name("yaw_offset").value(yaw_offset);
150                 writer.name("n_of_frame").value(n_of_frame);
151                 writer.name("n_of_valid_frame").value(n_of_valid_frame);
152                 writer.name("sensor_period_avg").value(sensor_period_avg);
153                 writer.name("sensor_period_stdev").value(sensor_period_stdev);
154 
155                 writer.endObject();
156 
157                 writer.close();
158             } catch (IOException e) {
159                 // do nothing
160                 Log.e(TAG, "Error in serialize analyze report to JSON");
161             } catch (IllegalArgumentException e) {
162                 e.printStackTrace();
163                 Log.e(TAG, "Invalid parameter to write into JSON format");
164             }
165         }
166     }
167 
168     /**
169      *  Process data set stored in the path specified in constructor
170      *  and return an analyze report to caller
171      *
172      *  @return An AnalyzeReport that contains detailed information about analysis
173      */
processDataSet()174     public AnalyzeReport processDataSet() {
175         int nframe;// number of frames in video
176         int nslog; // number of sensor log
177         int nvlog; // number of video generated log
178 
179 
180         AnalyzeReport report = new AnalyzeReport();
181 
182         ArrayList<AttitudeRec> srecs = new ArrayList<>();
183         ArrayList<AttitudeRec> vrecs = new ArrayList<>();
184         ArrayList<AttitudeRec> srecs2 = new ArrayList<>();
185 
186 
187         final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS;
188 
189         if (use_solved) {
190             nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
191             nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs);
192         }else {
193             nframe = analyzeVideo(vrecs);
194             nvlog = vrecs.size();
195 
196             if (LOCAL_LOGV) {
197                 Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe);
198             }
199             if (nvlog <= 0 || nframe <= 0) {
200                 // invalid results
201                 report.reason = "Unable to to load recorded video.";
202                 return report;
203             }
204             if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) {
205                 // too many invalid frames
206                 Log.w(TAG, "Too many invalid frames, n valid frame = " + nvlog +
207                         ", n total frame = " + nframe);
208                 report.reason = "Too many invalid frames.";
209                 return report;
210             }
211 
212             fixFlippedAxis(vrecs);
213 
214             nslog = loadSensorLog(srecs);
215         }
216 
217         // Gradient descent will have faster performance than this simple search,
218         // but the performance is dominated by the vision part, so it is not very necessary.
219         double delta_t;
220         double min_rms = Double.MAX_VALUE;
221         double min_delta_t =0.;
222         double min_yaw_offset =0.;
223 
224         // pre-allocation
225         for (AttitudeRec i: vrecs) {
226             srecs2.add(new AttitudeRec(0,0,0,0));
227         }
228 
229         // find optimal offset
230         for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) {
231             double rms;
232             resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2);
233             rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1));
234             if (rms < min_rms) {
235                 min_rms = rms;
236                 min_delta_t = delta_t;
237                 min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw;
238             }
239         }
240         // sample at optimal offset
241         resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2);
242 
243         if (!use_solved) {
244             dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
245             dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs);
246         }
247         dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2);
248         dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2);
249 
250         // fill report fields
251         report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0));
252         report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1));
253         report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2));
254 
255         report.roll_max_error = calcMaxErr(vrecs, srecs2, 0);
256         report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1);
257         report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2);
258 
259         report.optimal_delta_t = min_delta_t;
260         report.yaw_offset = (min_yaw_offset);
261 
262         report.n_of_frame = nframe;
263         report.n_of_valid_frame = nvlog;
264 
265         double [] sensor_period_stat = calcSensorPeriodStat(srecs);
266         report.sensor_period_avg = sensor_period_stat[0];
267         report.sensor_period_stdev = sensor_period_stat[1];
268 
269         // output report to file and log in JSON format as well
270         report.writeToFile(new File(mPath, "report.json"));
271         if (LOCAL_LOGV)    Log.v(TAG, "Report in JSON:" + report.toString());
272 
273         report.reason = "Completed";
274         report.error = false;
275         return report;
276     }
277 
278     /**
279      * Generate pattern geometry like this one
280      * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png
281      *
282      * @return Array of 3D points
283      */
asymmetricalCircleGrid(Size size)284     private MatOfPoint3f asymmetricalCircleGrid(Size size) {
285         final int cn = 3;
286 
287         int n = (int)(size.width * size.height);
288         float positions[] = new float[n * cn];
289         float unit=0.02f;
290         MatOfPoint3f grid = new MatOfPoint3f();
291 
292         for (int i = 0; i < size.height; i++) {
293             for (int j = 0; j < size.width * cn; j += cn) {
294                 positions[(int) (i * size.width * cn + j + 0)] =
295                         (2 * (j / cn) + i % 2) * (float) unit;
296                 positions[(int) (i * size.width * cn + j + 1)] =
297                         i * unit;
298                 positions[(int) (i * size.width * cn + j + 2)] = 0;
299             }
300         }
301         grid.create(n, 1, CvType.CV_32FC3);
302         grid.put(0, 0, positions);
303         return grid;
304     }
305 
306     /**
307      *  Create a camera intrinsic matrix using input parameters
308      *
309      *  The camera intrinsic matrix will be like:
310      *
311      *       +-                       -+
312      *       |  f   0    center.width  |
313      *   A = |  0   f    center.height |
314      *       |  0   0         1        |
315      *       +-                       -+
316      *
317      *  @return An approximated (not actually calibrated) camera matrix
318      */
cameraMatrix(float f, Size center)319     private static Mat cameraMatrix(float f, Size center) {
320         final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f};
321         Mat m = new Mat(3,3, CvType.CV_64F);
322         m.put(0, 0, data);
323         return m;
324     }
325 
326     /**
327      *  Attitude record in time roll pitch yaw format.
328      *
329      */
330     private class AttitudeRec {
331         public double time;
332         public double roll;
333         public double pitch;
334         public double yaw;
335 
336         // ctor
AttitudeRec(double atime, double aroll, double apitch, double ayaw)337         AttitudeRec(double atime, double aroll, double apitch, double ayaw) {
338             time = atime;
339             roll = aroll;
340             pitch = apitch;
341             yaw = ayaw;
342         }
343 
344         // ctor
AttitudeRec(double atime, double [] rpy)345         AttitudeRec(double atime, double [] rpy) {
346             time = atime;
347             roll = rpy[0];
348             pitch = rpy[1];
349             yaw = rpy[2];
350         }
351 
352         // copy value of another to this
assign(AttitudeRec rec)353         void assign(AttitudeRec rec) {
354             time = rec.time;
355             roll = rec.time;
356             pitch = rec.pitch;
357             yaw = rec.yaw;
358         }
359 
360         // copy roll-pitch-yaw value but leave the time specified by atime
assign(AttitudeRec rec, double atime)361         void assign(AttitudeRec rec, double atime) {
362             time = atime;
363             roll = rec.time;
364             pitch = rec.pitch;
365             yaw = rec.yaw;
366         }
367 
368         // set each field separately
set(double atime, double aroll, double apitch, double ayaw)369         void set(double atime, double aroll, double apitch, double ayaw) {
370             time = atime;
371             roll = aroll;
372             pitch = apitch;
373             yaw = ayaw;
374         }
375     }
376 
377 
378     /**
379      *  Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec>
380      *
381      *  @return the number of sensor log items
382      */
loadSensorLog(ArrayList<AttitudeRec> recs)383     private int loadSensorLog(ArrayList<AttitudeRec> recs) {
384         //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>();
385         File csvFile = new File(mPath, "sensor.log");
386         BufferedReader br=null;
387         String line;
388 
389         // preallocate and reuse
390         double [] quat = new double[4];
391         double [] rpy = new double[3];
392 
393         double t0 = -1;
394 
395         try {
396             br = new BufferedReader(new FileReader(csvFile));
397             while ((line = br.readLine()) != null) {
398                 //space separator
399                 String[] items = line.split(" ");
400 
401                 if (items.length != 5) {
402                     recs.clear();
403                     return -1;
404                 }
405 
406                 quat[0] = Double.parseDouble(items[1]);
407                 quat[1] = Double.parseDouble(items[2]);
408                 quat[2] = Double.parseDouble(items[3]);
409                 quat[3] = Double.parseDouble(items[4]);
410 
411                 //
412                 quat2rpy(quat, rpy);
413 
414                 if (t0 < 0) {
415                     t0 = Long.parseLong(items[0])/1e9;
416                 }
417                 recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy));
418             }
419 
420         } catch (FileNotFoundException e) {
421             e.printStackTrace();
422             Log.e(TAG, "Cannot find sensor logging data");
423         } catch (IOException e) {
424             e.printStackTrace();
425             Log.e(TAG, "Cannot read sensor logging data");
426         } finally {
427             if (br != null) {
428                 try {
429                     br.close();
430                 } catch (IOException e) {
431                     e.printStackTrace();
432                 }
433             }
434         }
435 
436         return recs.size();
437     }
438 
439     /**
440      * Read video meta info
441      */
442     private class VideoMetaInfo {
443         public double fps;
444         public int frameWidth;
445         public int frameHeight;
446         public double fovWidth;
447         public double fovHeight;
448         public boolean valid = false;
449 
VideoMetaInfo(File file)450         VideoMetaInfo(File file) {
451 
452             BufferedReader br=null;
453             String line;
454             String content="";
455             try {
456                 br = new BufferedReader(new FileReader(file));
457                 while ((line = br.readLine()) != null) {
458                     content = content +line;
459                 }
460 
461             } catch (FileNotFoundException e) {
462                 e.printStackTrace();
463                 Log.e(TAG, "Cannot find video meta info file");
464             } catch (IOException e) {
465                 e.printStackTrace();
466                 Log.e(TAG, "Cannot read video meta info file");
467             } finally {
468                 if (br != null) {
469                     try {
470                         br.close();
471                     } catch (IOException e) {
472                         e.printStackTrace();
473                     }
474                 }
475             }
476 
477             if (content.isEmpty()) {
478                 return;
479             }
480 
481             try {
482                 JSONObject json = new JSONObject(content);
483                 frameWidth = json.getInt("width");
484                 frameHeight = json.getInt("height");
485                 fps = json.getDouble("frameRate");
486                 fovWidth = json.getDouble("fovW")*Math.PI/180.0;
487                 fovHeight = json.getDouble("fovH")*Math.PI/180.0;
488             } catch (JSONException e) {
489                 return;
490             }
491 
492             valid = true;
493 
494         }
495     }
496 
497 
498 
499     /**
500      * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by
501      * dumpAttitudeRecs
502      */
loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs)503     private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
504         BufferedReader br=null;
505         String line;
506         double time;
507         double [] rpy = new double[3];
508 
509         try {
510             br = new BufferedReader(new FileReader(file));
511             while ((line = br.readLine()) != null) {
512                 //space separator
513                 String[] items = line.split(" ");
514 
515                 if (items.length != 4) {
516                     recs.clear();
517                     return -1;
518                 }
519 
520                 time = Double.parseDouble(items[0]);
521                 rpy[0] = Double.parseDouble(items[1]);
522                 rpy[1] = Double.parseDouble(items[2]);
523                 rpy[2] = Double.parseDouble(items[3]);
524 
525                 recs.add(new AttitudeRec(time, rpy));
526             }
527 
528         } catch (FileNotFoundException e) {
529             e.printStackTrace();
530             Log.e(TAG, "Cannot find AttitudeRecs file specified.");
531         } catch (IOException e) {
532             e.printStackTrace();
533             Log.e(TAG, "Read AttitudeRecs file failure");
534         } finally {
535             if (br != null) {
536                 try {
537                     br.close();
538                 } catch (IOException e) {
539                     e.printStackTrace();
540                 }
541             }
542         }
543 
544         return recs.size();
545     }
546     /**
547      * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file
548      */
dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs)549     private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
550         OutputStreamWriter w=null;
551         try {
552             w = new OutputStreamWriter(new FileOutputStream(file));
553 
554             for (AttitudeRec r : recs) {
555                 w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw));
556             }
557             w.close();
558         } catch(FileNotFoundException e) {
559             e.printStackTrace();
560             Log.e(TAG, "Cannot create AttitudeRecs file.");
561         } catch (IOException e) {
562             Log.e(TAG, "Write AttitudeRecs file failure");
563         } finally {
564             if (w!=null) {
565                 try {
566                     w.close();
567                 } catch (IOException e) {
568                     e.printStackTrace();
569                 }
570             }
571         }
572     }
573 
574     /**
575      *  Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time
576      *  statistics: mean and standard deviation.
577      *
578      *  @return The returned value will be a double array with exact 2 items, first [0] will be
579      *  mean and the second [1]  will be the standard deviation.
580      *
581      */
calcSensorPeriodStat(ArrayList<AttitudeRec> srec)582     private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec)   {
583         double tp = srec.get(0).time;
584         int i;
585         double sum = 0.0;
586         double sumsq = 0.0;
587         for(i=1; i<srec.size(); ++i) {
588             double dt;
589             dt = srec.get(i).time - tp;
590             sum += dt;
591             sumsq += dt*dt;
592             tp += dt;
593         }
594         double [] ret = new double[2];
595         ret[0] = sum/srec.size();
596         ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]);
597         return ret;
598     }
599 
600     /**
601      * Flipping the axis as the image are flipped upside down in OpenGL frames
602      */
fixFlippedAxis(ArrayList<AttitudeRec> vrecs)603     private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs)   {
604         for (AttitudeRec i: vrecs) {
605             i.yaw = -i.yaw;
606         }
607     }
608 
609     /**
610      *  Calculate the maximum error on the specified axis between two time aligned (resampled)
611      *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
612      *
613      * @param ra  one ArrayList of AttitudeRec
614      * @param rb  the other ArrayList of AttitudeRec
615      * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
616      * @return Maximum error
617      */
calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)618     private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)  {
619         // check if they are valid and comparable data
620         if (ra.size() != rb.size()) {
621             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
622         }
623         // check input parameter validity
624         if (axis<0 || axis > 2) {
625             throw new IllegalArgumentException("Invalid data axis.");
626         }
627 
628         int i;
629         double max = 0.0;
630         double diff = 0.0;
631         for(i=0; i<ra.size(); ++i) {
632             // make sure they are aligned data
633             if (ra.get(i).time != rb.get(i).time) {
634                 throw new IllegalArgumentException("Element "+i+
635                         " of two inputs has different time.");
636             }
637             switch(axis) {
638                 case 0:
639                     diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other..
640                     break;
641                 case 1:
642                     diff = ra.get(i).pitch - rb.get(i).pitch;
643                     break;
644                 case 2:
645                     diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
646                             -Math.PI)-Math.PI;
647                     break;
648             }
649             diff = Math.abs(diff);
650             if (diff>max) {
651                 max = diff;
652             }
653         }
654         return max;
655     }
656 
657     /**
658      *  Calculate the RMS error on the specified axis between two time aligned (resampled)
659      *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
660      *
661      * @param ra  one ArrayList of AttitudeRec
662      * @param rb  the other ArrayList of AttitudeRec
663      * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
664      * @return Mean square error
665      */
calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)666     private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
667         // check if they are valid and comparable data
668         if (ra.size() != rb.size()) {
669             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
670         }
671         // check input parameter validity
672         if (axis<0 || axis > 2) {
673             throw new IllegalArgumentException("Invalid data axis.");
674         }
675 
676         int i;
677         double sum = 0.0;
678         double diff = 0.0;
679         for(i=0; i<ra.size(); ++i) {
680             // check input data validity
681             if (ra.get(i).time != rb.get(i).time) {
682                 throw new IllegalArgumentException("Element "+i+
683                         " of two inputs has different time.");
684             }
685 
686             switch(axis) {
687                 case 0:
688                     diff = ra.get(i).roll - rb.get(i).roll;
689                     break;
690                 case 1:
691                     diff = ra.get(i).pitch - rb.get(i).pitch;
692                     break;
693                 case 2:
694                     diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))-
695                             Math.PI)-Math.PI;
696                     break;
697             }
698 
699             sum += diff*diff;
700         }
701         return sum/ra.size();
702     }
703 
704     /**
705      * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s
706      *
707      * @param file File to write to
708      * @param ra  one ArrayList of AttitudeRec
709      * @param rb  the other ArrayList of AttitudeRec
710      */
dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb)711     private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){
712         if (ra.size() != rb.size()) {
713             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
714         }
715 
716         int i;
717 
718         ArrayList<AttitudeRec> rerr = new ArrayList<>();
719         for(i=0; i<ra.size(); ++i) {
720             if (ra.get(i).time != rb.get(i).time) {
721                 throw new IllegalArgumentException("Element "+ i
722                         + " of two inputs has different time.");
723             }
724 
725             rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll,
726                     ra.get(i).pitch - rb.get(i).pitch,
727                     (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
728                             -Math.PI)-Math.PI)));
729 
730         }
731         dumpAttitudeRecs(file, rerr);
732     }
733 
734     /**
735      * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec>
736      *
737      * @param rec           the ArrayList of AttitudeRec to be sampled
738      * @param timebase      the other ArrayList of AttitudeRec that serves as time base
739      * @param delta_t       offset in time before resample
740      * @param yaw_offset    offset in yaw axis
741      * @param resampled     output ArrayList of AttitudeRec
742      */
743 
resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase, double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)744     private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase,
745             double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)    {
746         int i;
747         int j = -1;
748         for(i=0; i<timebase.size(); i++) {
749             double time = timebase.get(i).time + delta_t;
750 
751             while(j<rec.size()-1 && rec.get(j+1).time < time) j++;
752 
753             if (j == -1) {
754                 //use first
755                 resampled.get(i).assign(rec.get(0), timebase.get(i).time);
756             } else if (j == rec.size()-1) {
757                 // use last
758                 resampled.get(i).assign(rec.get(j), timebase.get(i).time);
759             } else {
760                 // do linear resample
761                 double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time));
762                 double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll;
763                 double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch;
764                 double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset;
765                 resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw);
766             }
767         }
768     }
769 
770     /**
771      * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
772      *
773      * @param recs  output ArrayList of AttitudeRec
774      * @return total number of frame of the video
775      */
analyzeVideo(ArrayList<AttitudeRec> recs)776     private int analyzeVideo(ArrayList<AttitudeRec> recs) {
777         VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));
778 
779         int decimation = 1;
780         boolean use_timestamp = true;
781 
782         // roughly determine if decimation is necessary
783         if (meta.fps > DECIMATION_FPS_TARGET) {
784             decimation = (int)(meta.fps / DECIMATION_FPS_TARGET);
785             meta.fps /=decimation;
786         }
787 
788         VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(
789                 new File(mPath, "video.mp4"), decimation);
790 
791 
792         Mat frame;
793         Mat gray = new Mat();
794         int i = -1;
795 
796         Size frameSize = videoDecoder.getSize();
797 
798         if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
799             // this is very unlikely
800             return -1;
801         }
802 
803         if (TRACE_VIDEO_ANALYSIS) {
804             Debug.startMethodTracing("cvprocess");
805         }
806 
807         Size patternSize = new Size(4,11);
808 
809         float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0));
810         Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2));
811         MatOfDouble coeff = new MatOfDouble(); // dummy
812 
813         MatOfPoint2f centers = new MatOfPoint2f();
814         MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
815         Mat rvec = new MatOfFloat();
816         Mat tvec = new MatOfFloat();
817 
818         MatOfPoint2f reprojCenters = new MatOfPoint2f();
819 
820         if (LOCAL_LOGV) {
821             Log.v(TAG, "Camera Mat = \n" + camMat.dump());
822         }
823 
824         long startTime = System.nanoTime();
825         long [] ts = new long[1];
826 
827         while ((frame = videoDecoder.getFrame(ts)) !=null) {
828             if (LOCAL_LOGV) {
829                 Log.v(TAG, "got a frame " + i);
830             }
831 
832             if (use_timestamp && ts[0] == -1) {
833                 use_timestamp = false;
834             }
835 
836             // has to be in front, as there are cases where execution
837             // will skip the later part of this while
838             i++;
839 
840             // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
841             Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);
842 
843             boolean foundPattern = Calib3d.findCirclesGridDefault(
844                     gray,  patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
845 
846             if (!foundPattern) {
847                 // skip to next frame
848                 continue;
849             }
850 
851             if (OUTPUT_DEBUG_IMAGE) {
852                 Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
853             }
854 
855             // figure out the extrinsic parameters using real ground truth 3D points and the pixel
856             // position of blobs found in findCircleGrid, an estimated camera matrix and
857             // no-distortion are assumed.
858             boolean foundSolution =
859                     Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec,
860                             false, Calib3d.CV_ITERATIVE);
861 
862             if (!foundSolution) {
863                 // skip to next frame
864                 if (LOCAL_LOGV) {
865                     Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
866                 }
867                 continue;
868             }
869 
870             // reproject points to for evaluation of result accuracy of solvePnP
871             Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);
872 
873             // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
874             double error = Core.norm(centers, reprojCenters, Core.NORM_L2);
875 
876             if (LOCAL_LOGV) {
877                 Log.v(TAG, "Found attitude, re-projection error = " + error);
878             }
879 
880             // if error is reasonable, add it into the results
881             if (error < REPROJECTION_THREASHOLD) {
882                 double [] rv = new double[3];
883                 double timestamp;
884 
885                 rvec.get(0,0, rv);
886                 if (use_timestamp) {
887                     timestamp = (double)ts[0] / 1e6;
888                 } else {
889                     timestamp = (double) i / meta.fps;
890                 }
891                 if (LOCAL_LOGV) Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
892                 recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
893             }
894 
895             if (OUTPUT_DEBUG_IMAGE) {
896                 Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
897                 Highgui.imwrite(Environment.getExternalStorageDirectory().getPath()
898                         + "/RVCVRecData/DebugCV/img" + i + ".png", frame);
899             }
900         }
901 
902         if (LOCAL_LOGV) {
903             Log.v(TAG, "Finished decoding");
904         }
905 
906         if (TRACE_VIDEO_ANALYSIS) {
907             Debug.stopMethodTracing();
908         }
909 
910         if (LOCAL_LOGV) {
911             // time analysis
912             double totalTime = (System.nanoTime()-startTime)/1e9;
913             Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i );
914         }
915         return i;
916     }
917 
918     /**
919      * OpenCV for Android have not support the VideoCapture from file
920      * This is a make shift solution before it is supported.
921      * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame
922      */
923     private class VideoDecoderForOpenCV implements Runnable {
924         static final String TAG = "VideoDecoderForOpenCV";
925 
926         private MediaExtractor extractor=null;
927         private MediaCodec decoder=null;
928         private CtsMediaOutputSurface surface=null;
929 
930         private MatBuffer mMatBuffer;
931 
932         private final File mVideoFile;
933 
934         private boolean valid;
935         private Object setupSignal;
936 
937         private Thread mThread;
938         private int mDecimation;
939 
940         /**
941          * Constructor
942          * @param file video file
943          * @param decimation process every "decimation" number of frame
944          */
VideoDecoderForOpenCV(File file, int decimation)945         VideoDecoderForOpenCV(File file, int decimation) {
946             mVideoFile = file;
947             mDecimation = decimation;
948             valid = false;
949 
950             start();
951         }
952 
953         /**
954          * Constructor
955          * @param file video file
956          */
VideoDecoderForOpenCV(File file)957         VideoDecoderForOpenCV(File file)   {
958             this(file, 1);
959         }
960 
961         /**
962          * Test if video decoder is in valid states ready to output video.
963          * @return true of force.
964          */
isValid()965         public boolean isValid() {
966             return valid;
967         }
968 
start()969         private void start() {
970             setupSignal = new Object();
971             mThread = new Thread(this);
972             mThread.start();
973 
974             synchronized (setupSignal) {
975                 try {
976                     setupSignal.wait();
977                 } catch (InterruptedException e) {
978                     Log.e(TAG, "Interrupted when waiting for video decoder setup ready");
979                 }
980             }
981         }
stop()982         private void stop() {
983             if (mThread != null) {
984                 mThread.interrupt();
985                 try {
986                     mThread.join();
987                 } catch (InterruptedException e) {
988                     Log.e(TAG, "Interrupted when waiting for video decoder thread to stop");
989                 }
990                 try {
991                     decoder.stop();
992                 }catch (IllegalStateException e) {
993                     Log.e(TAG, "Video decoder is not in a state that can be stopped");
994                 }
995             }
996             mThread = null;
997         }
998 
teardown()999         void teardown() {
1000             if (decoder!=null) {
1001                 decoder.release();
1002                 decoder = null;
1003             }
1004             if (surface!=null) {
1005                 surface.release();
1006                 surface = null;
1007             }
1008             if (extractor!=null) {
1009                 extractor.release();
1010                 extractor = null;
1011             }
1012         }
1013 
setup()1014         void setup() {
1015             int width=0, height=0;
1016 
1017             extractor = new MediaExtractor();
1018 
1019             try {
1020                 extractor.setDataSource(mVideoFile.getPath());
1021             } catch (IOException e) {
1022                 return;
1023             }
1024 
1025             for (int i = 0; i < extractor.getTrackCount(); i++) {
1026                 MediaFormat format = extractor.getTrackFormat(i);
1027                 String mime = format.getString(MediaFormat.KEY_MIME);
1028                 width = format.getInteger(MediaFormat.KEY_WIDTH);
1029                 height = format.getInteger(MediaFormat.KEY_HEIGHT);
1030 
1031                 if (mime.startsWith("video/")) {
1032                     extractor.selectTrack(i);
1033                     try {
1034                         decoder = MediaCodec.createDecoderByType(mime);
1035                     }catch (IOException e) {
1036                         continue;
1037                     }
1038                     // Decode to surface
1039                     //decoder.configure(format, surface, null, 0);
1040 
1041                     // Decode to offscreen surface
1042                     surface = new CtsMediaOutputSurface(width, height);
1043                     mMatBuffer = new MatBuffer(width, height);
1044 
1045                     decoder.configure(format, surface.getSurface(), null, 0);
1046                     break;
1047                 }
1048             }
1049 
1050             if (decoder == null) {
1051                 Log.e(TAG, "Can't find video info!");
1052                 return;
1053             }
1054             valid = true;
1055         }
1056 
1057         @Override
run()1058         public void run() {
1059             setup();
1060 
1061             synchronized (setupSignal) {
1062                 setupSignal.notify();
1063             }
1064 
1065             if (!valid) {
1066                 return;
1067             }
1068 
1069             decoder.start();
1070 
1071             ByteBuffer[] inputBuffers = decoder.getInputBuffers();
1072             ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
1073             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1074 
1075             boolean isEOS = false;
1076             long startMs = System.currentTimeMillis();
1077             long timeoutUs = 10000;
1078 
1079             int iframe = 0;
1080             long frameTimestamp = 0;
1081 
1082             while (!Thread.interrupted()) {
1083                 if (!isEOS) {
1084                     int inIndex = decoder.dequeueInputBuffer(10000);
1085                     if (inIndex >= 0) {
1086                         ByteBuffer buffer = inputBuffers[inIndex];
1087                         int sampleSize = extractor.readSampleData(buffer, 0);
1088                         if (sampleSize < 0) {
1089                             if (LOCAL_LOGD) {
1090                                 Log.d("VideoDecoderForOpenCV",
1091                                         "InputBuffer BUFFER_FLAG_END_OF_STREAM");
1092                             }
1093                             decoder.queueInputBuffer(inIndex, 0, 0, 0,
1094                                     MediaCodec.BUFFER_FLAG_END_OF_STREAM);
1095                             isEOS = true;
1096                         } else {
1097                             frameTimestamp = extractor.getSampleTime();
1098                             decoder.queueInputBuffer(inIndex, 0, sampleSize, frameTimestamp, 0);
1099                             if (LOCAL_LOGD) {
1100                                 Log.d(TAG, String.format("Frame %d sample time %f s",
1101                                             iframe, (double)frameTimestamp/1e6));
1102                             }
1103                             extractor.advance();
1104                         }
1105                     }
1106                 }
1107 
1108                 int outIndex = decoder.dequeueOutputBuffer(info, 10000);
1109                 MediaFormat outFormat;
1110                 switch (outIndex) {
1111                     case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
1112                         if (LOCAL_LOGD) {
1113                             Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
1114                         }
1115                         outputBuffers = decoder.getOutputBuffers();
1116                         break;
1117                     case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
1118                         outFormat = decoder.getOutputFormat();
1119                         if (LOCAL_LOGD) {
1120                             Log.d(TAG, "New format " + outFormat);
1121                         }
1122                         break;
1123                     case MediaCodec.INFO_TRY_AGAIN_LATER:
1124                         if (LOCAL_LOGD) {
1125                             Log.d(TAG, "dequeueOutputBuffer timed out!");
1126                         }
1127                         break;
1128                     default:
1129 
1130                         ByteBuffer buffer = outputBuffers[outIndex];
1131                         boolean doRender = (info.size != 0);
1132 
1133                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
1134                         // to SurfaceTexture to convert to a texture.  The API doesn't
1135                         // guarantee that the texture will be available before the call
1136                         // returns, so we need to wait for the onFrameAvailable callback to
1137                         // fire.  If we don't wait, we risk rendering from the previous frame.
1138                         decoder.releaseOutputBuffer(outIndex, doRender);
1139 
1140                         if (doRender) {
1141                             surface.awaitNewImage();
1142                             surface.drawImage();
1143                             if (LOCAL_LOGV) {
1144                                 Log.v(TAG, "Finish drawing a frame!");
1145                             }
1146                             if ((iframe++ % mDecimation) == 0) {
1147                                 //Send the frame for processing
1148                                 mMatBuffer.put(frameTimestamp);
1149                             }
1150                         }
1151                         break;
1152                 }
1153 
1154                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1155                     if (LOCAL_LOGD) {
1156                         Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
1157                     }
1158                     break;
1159                 }
1160             }
1161             mMatBuffer.invalidate();
1162 
1163             decoder.stop();
1164 
1165             teardown();
1166             mThread = null;
1167         }
1168 
1169 
1170         /**
1171          * Get next valid frame
1172          * @return Frame in OpenCV mat
1173          */
getFrame(long ts[])1174         public Mat getFrame(long ts[]) {
1175             return mMatBuffer.get(ts);
1176         }
1177 
1178         /**
1179          * Get the size of the frame
1180          * @return size of the frame
1181          */
getSize()1182         Size getSize() {
1183             return mMatBuffer.getSize();
1184         }
1185 
1186         /**
1187          * A synchronized buffer
1188          */
1189         class MatBuffer {
1190             private Mat mat;
1191             private byte[] bytes;
1192             private ByteBuffer buf;
1193             private long timestamp;
1194             private boolean full;
1195 
1196             private int mWidth, mHeight;
1197             private boolean mValid = false;
1198 
MatBuffer(int width, int height)1199             MatBuffer(int width, int height) {
1200                 mWidth = width;
1201                 mHeight = height;
1202 
1203                 mat = new Mat(height, width, CvType.CV_8UC4); //RGBA
1204                 buf = ByteBuffer.allocateDirect(width*height*4);
1205                 bytes = new byte[width*height*4];
1206                 timestamp = -1;
1207 
1208                 mValid = true;
1209                 full = false;
1210             }
1211 
invalidate()1212             public synchronized void invalidate() {
1213                 mValid = false;
1214                 notifyAll();
1215             }
1216 
get(long ts[])1217             public synchronized Mat get(long ts[]) {
1218 
1219                 if (!mValid) return null;
1220                 while (full == false) {
1221                     try {
1222                         wait();
1223                         if (!mValid) return null;
1224                     } catch (InterruptedException e) {
1225                         return null;
1226                     }
1227                 }
1228                 mat.put(0,0, bytes);
1229                 full = false;
1230                 notifyAll();
1231                 ts[0] = timestamp;
1232                 return mat;
1233             }
1234 
put(long ts)1235             public synchronized void put(long ts) {
1236                 while (full) {
1237                     try {
1238                         wait();
1239                     } catch (InterruptedException e) {
1240                         Log.e(TAG, "Interrupted when waiting for space in buffer");
1241                     }
1242                 }
1243                 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA,
1244                         GL10.GL_UNSIGNED_BYTE, buf);
1245                 buf.get(bytes);
1246                 buf.rewind();
1247 
1248                 timestamp = ts;
1249                 full = true;
1250                 notifyAll();
1251             }
1252 
getSize()1253             public Size getSize() {
1254                 if (valid) {
1255                     return mat.size();
1256                 }
1257                 return new Size();
1258             }
1259         }
1260     }
1261 
1262 
1263     /* a small set of math functions */
quat2rpy( double [] q)1264     private static double [] quat2rpy( double [] q) {
1265         double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
1266                 Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
1267                 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
1268         return rpy;
1269     }
1270 
quat2rpy( double [] q, double[] rpy)1271     private static void quat2rpy( double [] q, double[] rpy) {
1272         rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2]));
1273         rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1]));
1274         rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]));
1275     }
1276 
quat2rpy(Mat quat)1277     private static Mat quat2rpy(Mat quat) {
1278         double [] q = new double[4];
1279         quat.get(0,0,q);
1280 
1281         double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
1282                 Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
1283                 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
1284 
1285         Mat rpym = new Mat(3,1, CvType.CV_64F);
1286         rpym.put(0,0, rpy);
1287         return rpym;
1288     }
1289 
rodr2quat( double [] r)1290     private static double [] rodr2quat( double [] r) {
1291         double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
1292         double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
1293                 Math.sin(t/2)*r[2]/t};
1294         return quat;
1295     }
1296 
rodr2quat( double [] r, double [] quat)1297     private static void rodr2quat( double [] r, double [] quat) {
1298         double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
1299         quat[0] = Math.cos(t/2);
1300         quat[1] = Math.sin(t/2)*r[0]/t;
1301         quat[2] = Math.sin(t/2)*r[1]/t;
1302         quat[3] = Math.sin(t/2)*r[2]/t;
1303     }
1304 
rodr2quat(Mat rodr)1305     private static Mat rodr2quat(Mat rodr) {
1306         double t = Core.norm(rodr);
1307         double [] r = new double[3];
1308         rodr.get(0,0,r);
1309 
1310         double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
1311                 Math.sin(t/2)*r[2]/t};
1312         Mat quatm = new Mat(4,1, CvType.CV_64F);
1313         quatm.put(0, 0, quat);
1314         return quatm;
1315     }
1316 
rodr2rpy( double [] r)1317     private static double [] rodr2rpy( double [] r) {
1318         return quat2rpy(rodr2quat(r));
1319     }
1320     //////////////////
1321 
1322 }
1323