1 package com.android.cts.verifier.sensors;
2 
3 /*
4  * Copyright (C) 2014 The Android Open Source Project
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *      http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 import android.media.MediaCodec;
19 import android.media.MediaExtractor;
20 import android.media.MediaFormat;
21 import android.os.Debug;
22 import android.os.Environment;
23 import android.os.PowerManager;
24 import android.util.JsonWriter;
25 import android.util.Log;
26 
27 import org.opencv.core.Mat;
28 import org.opencv.core.CvType;
29 import org.opencv.core.MatOfDouble;
30 import org.opencv.core.MatOfFloat;
31 import org.opencv.core.MatOfPoint2f;
32 import org.opencv.core.MatOfPoint3f;
33 import org.opencv.core.Size;
34 import org.opencv.imgcodecs.Imgcodecs;
35 import org.opencv.imgproc.Imgproc;
36 import org.opencv.calib3d.Calib3d;
37 import org.opencv.core.Core;
38 
39 import org.json.JSONObject;
40 import org.json.JSONException;
41 
42 import java.io.BufferedReader;
43 import java.io.ByteArrayOutputStream;
44 import java.io.File;
45 import java.io.FileNotFoundException;
46 import java.io.FileOutputStream;
47 import java.io.FileReader;
48 import java.io.IOException;
49 import java.io.OutputStream;
50 import java.io.OutputStreamWriter;
51 import java.nio.ByteBuffer;
52 import java.util.ArrayList;
53 
54 import android.opengl.GLES20;
55 import javax.microedition.khronos.opengles.GL10;
56 
57 /**
58  *  This class does analysis on the recorded RVCVCXCheck data sets.
59  */
60 public class RVCVXCheckAnalyzer {
61     private static final String TAG = "RVCVXAnalysis";
62     private static final boolean LOCAL_LOGV = false;
63     private static final boolean LOCAL_LOGD = true;
64     private final String mPath;
65 
66     private static final boolean OUTPUT_DEBUG_IMAGE = false;
67     private static final double VALID_FRAME_THRESHOLD = 0.8;
68     private static final double REPROJECTION_THREASHOLD_RATIO = 0.008;
69     private static final boolean FORCE_CV_ANALYSIS  = false;
70     private static final boolean TRACE_VIDEO_ANALYSIS = false;
71     private static final double DECIMATION_FPS_TARGET = 15.0;
72     private static final double MIN_VIDEO_LENGTH_SEC = 10;
73 
RVCVXCheckAnalyzer(String path)74     RVCVXCheckAnalyzer(String path)
75     {
76         mPath = path;
77     }
78 
79     /**
80      * A class that contains  the analysis results
81      *
82      */
83     class AnalyzeReport {
84         public boolean error=true;
85         public String reason = "incomplete";
86 
87         // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 })
88         // unit in rad
89         public double roll_rms_error;
90         public double pitch_rms_error;
91         public double yaw_rms_error;
92 
93         // roll pitch yaw max error
94         // unit in rad
95         public double roll_max_error;
96         public double pitch_max_error;
97         public double yaw_max_error;
98 
99         // optimal t delta between sensor and camera data set to make best match
100         public double optimal_delta_t;
101         // the associate yaw offset based on initial values
102         public double yaw_offset;
103 
104         public int n_of_frame;
105         public int n_of_valid_frame;
106 
107         // both data below are in [sec]
108         public double sensor_period_avg;
109         public double sensor_period_stdev;
110 
111         /**
112          * write Json format serialization to a file in case future processing need the data
113          */
writeToFile(File file)114         public void writeToFile(File file) {
115             try {
116                 writeJSONToStream(new FileOutputStream(file));
117             } catch (FileNotFoundException e) {
118                 e.printStackTrace();
119                 Log.e(TAG, "Cannot create analyze report file.");
120             }
121         }
122 
123         /**
124          * Get the JSON format serialization
125          *@return Json format serialization as String
126          */
127         @Override
toString()128         public String toString() {
129             ByteArrayOutputStream s = new ByteArrayOutputStream();
130             writeJSONToStream(s);
131             return new String(s.toByteArray(),  java.nio.charset.StandardCharsets.UTF_8);
132         }
133 
writeJSONToStream(OutputStream s)134         private void writeJSONToStream(OutputStream s) {
135             try{
136                 JsonWriter writer =
137                         new JsonWriter(
138                                 new OutputStreamWriter( s )
139                         );
140                 writer.beginObject();
141                 writer.setLenient(true);
142 
143                 writer.name("roll_rms_error").value(roll_rms_error);
144                 writer.name("pitch_rms_error").value(pitch_rms_error);
145                 writer.name("yaw_rms_error").value(yaw_rms_error);
146                 writer.name("roll_max_error").value(roll_max_error);
147                 writer.name("pitch_max_error").value(pitch_max_error);
148                 writer.name("yaw_max_error").value(yaw_max_error);
149                 writer.name("optimal_delta_t").value(optimal_delta_t);
150                 writer.name("yaw_offset").value(yaw_offset);
151                 writer.name("n_of_frame").value(n_of_frame);
152                 writer.name("n_of_valid_frame").value(n_of_valid_frame);
153                 writer.name("sensor_period_avg").value(sensor_period_avg);
154                 writer.name("sensor_period_stdev").value(sensor_period_stdev);
155 
156                 writer.endObject();
157 
158                 writer.close();
159             } catch (IOException e) {
160                 // do nothing
161                 Log.e(TAG, "Error in serialize analyze report to JSON");
162             } catch (IllegalArgumentException e) {
163                 e.printStackTrace();
164                 Log.e(TAG, "Invalid parameter to write into JSON format");
165             }
166         }
167     }
168 
169     /**
170      *  Process data set stored in the path specified in constructor
171      *  and return an analyze report to caller
172      *
173      *  @return An AnalyzeReport that contains detailed information about analysis
174      */
processDataSet()175     public AnalyzeReport processDataSet() {
176         int nframe;// number of frames in video
177         int nslog; // number of sensor log
178         int nvlog; // number of video generated log
179 
180 
181         AnalyzeReport report = new AnalyzeReport();
182 
183         ArrayList<AttitudeRec> srecs = new ArrayList<>();
184         ArrayList<AttitudeRec> vrecs = new ArrayList<>();
185         ArrayList<AttitudeRec> srecs2 = new ArrayList<>();
186 
187 
188         final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS;
189 
190         if (use_solved) {
191             nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
192             nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs);
193         }else {
194             nframe = analyzeVideo(vrecs);
195             nvlog = vrecs.size();
196 
197             if (LOCAL_LOGV) {
198                 Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe);
199             }
200             if (nvlog <= 0 || nframe <= 0) {
201                 // invalid results
202                 report.reason = "Unable to to load recorded video.";
203                 return report;
204             }
205             if (nframe < MIN_VIDEO_LENGTH_SEC*VALID_FRAME_THRESHOLD) {
206                 // video is too short
207                 Log.w(TAG, "Video record is to short, n frame = " + nframe);
208                 report.reason = "Video too short.";
209                 return report;
210             }
211             if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) {
212                 // too many invalid frames
213                 Log.w(TAG, "Too many invalid frames, n valid frame = " + nvlog +
214                         ", n total frame = " + nframe);
215                 report.reason = "Too many invalid frames.";
216                 return report;
217             }
218 
219             fixFlippedAxis(vrecs);
220 
221             nslog = loadSensorLog(srecs);
222         }
223 
224         // Gradient descent will have faster performance than this simple search,
225         // but the performance is dominated by the vision part, so it is not very necessary.
226         double delta_t;
227         double min_rms = Double.MAX_VALUE;
228         double min_delta_t =0.;
229         double min_yaw_offset =0.;
230 
231         // pre-allocation
232         for (AttitudeRec i: vrecs) {
233             srecs2.add(new AttitudeRec(0,0,0,0));
234         }
235 
236         // find optimal offset
237         for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) {
238             double rms;
239             resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2);
240             rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1));
241             if (rms < min_rms) {
242                 min_rms = rms;
243                 min_delta_t = delta_t;
244                 min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw;
245             }
246         }
247         // sample at optimal offset
248         resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2);
249 
250         if (!use_solved) {
251             dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
252             dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs);
253         }
254         dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2);
255         dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2);
256 
257         // fill report fields
258         report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0));
259         report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1));
260         report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2));
261 
262         report.roll_max_error = calcMaxErr(vrecs, srecs2, 0);
263         report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1);
264         report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2);
265 
266         report.optimal_delta_t = min_delta_t;
267         report.yaw_offset = (min_yaw_offset);
268 
269         report.n_of_frame = nframe;
270         report.n_of_valid_frame = nvlog;
271 
272         double [] sensor_period_stat = calcSensorPeriodStat(srecs);
273         report.sensor_period_avg = sensor_period_stat[0];
274         report.sensor_period_stdev = sensor_period_stat[1];
275 
276         // output report to file and log in JSON format as well
277         report.writeToFile(new File(mPath, "report.json"));
278         if (LOCAL_LOGV)    Log.v(TAG, "Report in JSON:" + report.toString());
279 
280         report.reason = "Completed";
281         report.error = false;
282         return report;
283     }
284 
285     /**
286      * Generate pattern geometry like this one
287      * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png
288      *
289      * @return Array of 3D points
290      */
asymmetricalCircleGrid(Size size)291     private MatOfPoint3f asymmetricalCircleGrid(Size size) {
292         final int cn = 3;
293 
294         int n = (int)(size.width * size.height);
295         float positions[] = new float[n * cn];
296         float unit=0.02f;
297         MatOfPoint3f grid = new MatOfPoint3f();
298 
299         for (int i = 0; i < size.height; i++) {
300             for (int j = 0; j < size.width * cn; j += cn) {
301                 positions[(int) (i * size.width * cn + j + 0)] =
302                         (2 * (j / cn) + i % 2) * (float) unit;
303                 positions[(int) (i * size.width * cn + j + 1)] =
304                         i * unit;
305                 positions[(int) (i * size.width * cn + j + 2)] = 0;
306             }
307         }
308         grid.create(n, 1, CvType.CV_32FC3);
309         grid.put(0, 0, positions);
310         return grid;
311     }
312 
313     /**
314      *  Create a camera intrinsic matrix using input parameters
315      *
316      *  The camera intrinsic matrix will be like:
317      *
318      *       +-                       -+
319      *       |  f   0    center.width  |
320      *   A = |  0   f    center.height |
321      *       |  0   0         1        |
322      *       +-                       -+
323      *
324      *  @return An approximated (not actually calibrated) camera matrix
325      */
cameraMatrix(float f, Size center)326     private static Mat cameraMatrix(float f, Size center) {
327         final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f};
328         Mat m = new Mat(3,3, CvType.CV_64F);
329         m.put(0, 0, data);
330         return m;
331     }
332 
333     /**
334      *  Attitude record in time roll pitch yaw format.
335      *
336      */
337     private class AttitudeRec {
338         public double time;
339         public double roll;
340         public double pitch;
341         public double yaw;
342 
343         // ctor
AttitudeRec(double atime, double aroll, double apitch, double ayaw)344         AttitudeRec(double atime, double aroll, double apitch, double ayaw) {
345             time = atime;
346             roll = aroll;
347             pitch = apitch;
348             yaw = ayaw;
349         }
350 
351         // ctor
AttitudeRec(double atime, double [] rpy)352         AttitudeRec(double atime, double [] rpy) {
353             time = atime;
354             roll = rpy[0];
355             pitch = rpy[1];
356             yaw = rpy[2];
357         }
358 
359         // copy value of another to this
assign(AttitudeRec rec)360         void assign(AttitudeRec rec) {
361             time = rec.time;
362             roll = rec.time;
363             pitch = rec.pitch;
364             yaw = rec.yaw;
365         }
366 
367         // copy roll-pitch-yaw value but leave the time specified by atime
assign(AttitudeRec rec, double atime)368         void assign(AttitudeRec rec, double atime) {
369             time = atime;
370             roll = rec.time;
371             pitch = rec.pitch;
372             yaw = rec.yaw;
373         }
374 
375         // set each field separately
set(double atime, double aroll, double apitch, double ayaw)376         void set(double atime, double aroll, double apitch, double ayaw) {
377             time = atime;
378             roll = aroll;
379             pitch = apitch;
380             yaw = ayaw;
381         }
382     }
383 
384 
385     /**
386      *  Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec>
387      *
388      *  @return the number of sensor log items
389      */
loadSensorLog(ArrayList<AttitudeRec> recs)390     private int loadSensorLog(ArrayList<AttitudeRec> recs) {
391         //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>();
392         File csvFile = new File(mPath, "sensor.log");
393         BufferedReader br=null;
394         String line;
395 
396         // preallocate and reuse
397         double [] quat = new double[4];
398         double [] rpy = new double[3];
399 
400         double t0 = -1;
401 
402         try {
403             br = new BufferedReader(new FileReader(csvFile));
404             while ((line = br.readLine()) != null) {
405                 //space separator
406                 String[] items = line.split(" ");
407 
408                 if (items.length != 5) {
409                     recs.clear();
410                     return -1;
411                 }
412 
413                 quat[0] = Double.parseDouble(items[1]);
414                 quat[1] = Double.parseDouble(items[2]);
415                 quat[2] = Double.parseDouble(items[3]);
416                 quat[3] = Double.parseDouble(items[4]);
417 
418                 //
419                 quat2rpy(quat, rpy);
420 
421                 if (t0 < 0) {
422                     t0 = Long.parseLong(items[0])/1e9;
423                 }
424                 recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy));
425             }
426 
427         } catch (FileNotFoundException e) {
428             e.printStackTrace();
429             Log.e(TAG, "Cannot find sensor logging data");
430         } catch (IOException e) {
431             e.printStackTrace();
432             Log.e(TAG, "Cannot read sensor logging data");
433         } finally {
434             if (br != null) {
435                 try {
436                     br.close();
437                 } catch (IOException e) {
438                     e.printStackTrace();
439                 }
440             }
441         }
442 
443         return recs.size();
444     }
445 
446     /**
447      * Read video meta info
448      */
449     private class VideoMetaInfo {
450         public double fps;
451         public int frameWidth;
452         public int frameHeight;
453         public double fovWidth;
454         public double fovHeight;
455         public boolean valid = false;
456 
VideoMetaInfo(File file)457         VideoMetaInfo(File file) {
458 
459             BufferedReader br=null;
460             String line;
461             String content="";
462             try {
463                 br = new BufferedReader(new FileReader(file));
464                 while ((line = br.readLine()) != null) {
465                     content = content +line;
466                 }
467 
468             } catch (FileNotFoundException e) {
469                 e.printStackTrace();
470                 Log.e(TAG, "Cannot find video meta info file");
471             } catch (IOException e) {
472                 e.printStackTrace();
473                 Log.e(TAG, "Cannot read video meta info file");
474             } finally {
475                 if (br != null) {
476                     try {
477                         br.close();
478                     } catch (IOException e) {
479                         e.printStackTrace();
480                     }
481                 }
482             }
483 
484             if (content.isEmpty()) {
485                 return;
486             }
487 
488             try {
489                 JSONObject json = new JSONObject(content);
490                 frameWidth = json.getInt("width");
491                 frameHeight = json.getInt("height");
492                 fps = json.getDouble("frameRate");
493                 fovWidth = json.getDouble("fovW")*Math.PI/180.0;
494                 fovHeight = json.getDouble("fovH")*Math.PI/180.0;
495             } catch (JSONException e) {
496                 return;
497             }
498 
499             valid = true;
500 
501         }
502     }
503 
504 
505 
506     /**
507      * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by
508      * dumpAttitudeRecs
509      */
loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs)510     private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
511         BufferedReader br=null;
512         String line;
513         double time;
514         double [] rpy = new double[3];
515 
516         try {
517             br = new BufferedReader(new FileReader(file));
518             while ((line = br.readLine()) != null) {
519                 //space separator
520                 String[] items = line.split(" ");
521 
522                 if (items.length != 4) {
523                     recs.clear();
524                     return -1;
525                 }
526 
527                 time = Double.parseDouble(items[0]);
528                 rpy[0] = Double.parseDouble(items[1]);
529                 rpy[1] = Double.parseDouble(items[2]);
530                 rpy[2] = Double.parseDouble(items[3]);
531 
532                 recs.add(new AttitudeRec(time, rpy));
533             }
534 
535         } catch (FileNotFoundException e) {
536             e.printStackTrace();
537             Log.e(TAG, "Cannot find AttitudeRecs file specified.");
538         } catch (IOException e) {
539             e.printStackTrace();
540             Log.e(TAG, "Read AttitudeRecs file failure");
541         } finally {
542             if (br != null) {
543                 try {
544                     br.close();
545                 } catch (IOException e) {
546                     e.printStackTrace();
547                 }
548             }
549         }
550 
551         return recs.size();
552     }
553     /**
554      * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file
555      */
dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs)556     private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
557         OutputStreamWriter w=null;
558         try {
559             w = new OutputStreamWriter(new FileOutputStream(file));
560 
561             for (AttitudeRec r : recs) {
562                 w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw));
563             }
564             w.close();
565         } catch(FileNotFoundException e) {
566             e.printStackTrace();
567             Log.e(TAG, "Cannot create AttitudeRecs file.");
568         } catch (IOException e) {
569             Log.e(TAG, "Write AttitudeRecs file failure");
570         } finally {
571             if (w!=null) {
572                 try {
573                     w.close();
574                 } catch (IOException e) {
575                     e.printStackTrace();
576                 }
577             }
578         }
579     }
580 
581     /**
582      *  Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time
583      *  statistics: mean and standard deviation.
584      *
585      *  @return The returned value will be a double array with exact 2 items, first [0] will be
586      *  mean and the second [1]  will be the standard deviation.
587      *
588      */
calcSensorPeriodStat(ArrayList<AttitudeRec> srec)589     private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec)   {
590         double tp = srec.get(0).time;
591         int i;
592         double sum = 0.0;
593         double sumsq = 0.0;
594         for(i=1; i<srec.size(); ++i) {
595             double dt;
596             dt = srec.get(i).time - tp;
597             sum += dt;
598             sumsq += dt*dt;
599             tp += dt;
600         }
601         double [] ret = new double[2];
602         ret[0] = sum/srec.size();
603         ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]);
604         return ret;
605     }
606 
607     /**
608      * Flipping the axis as the image are flipped upside down in OpenGL frames
609      */
fixFlippedAxis(ArrayList<AttitudeRec> vrecs)610     private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs)   {
611         for (AttitudeRec i: vrecs) {
612             i.yaw = -i.yaw;
613         }
614     }
615 
616     /**
617      *  Calculate the maximum error on the specified axis between two time aligned (resampled)
618      *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
619      *
620      * @param ra  one ArrayList of AttitudeRec
621      * @param rb  the other ArrayList of AttitudeRec
622      * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
623      * @return Maximum error
624      */
calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)625     private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)  {
626         // check if they are valid and comparable data
627         if (ra.size() != rb.size()) {
628             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
629         }
630         // check input parameter validity
631         if (axis<0 || axis > 2) {
632             throw new IllegalArgumentException("Invalid data axis.");
633         }
634 
635         int i;
636         double max = 0.0;
637         double diff = 0.0;
638         for(i=0; i<ra.size(); ++i) {
639             // make sure they are aligned data
640             if (ra.get(i).time != rb.get(i).time) {
641                 throw new IllegalArgumentException("Element "+i+
642                         " of two inputs has different time.");
643             }
644             switch(axis) {
645                 case 0:
646                     diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other..
647                     break;
648                 case 1:
649                     diff = ra.get(i).pitch - rb.get(i).pitch;
650                     break;
651                 case 2:
652                     diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
653                             -Math.PI)-Math.PI;
654                     break;
655             }
656             diff = Math.abs(diff);
657             if (diff>max) {
658                 max = diff;
659             }
660         }
661         return max;
662     }
663 
664     /**
665      *  Calculate the RMS error on the specified axis between two time aligned (resampled)
666      *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
667      *
668      * @param ra  one ArrayList of AttitudeRec
669      * @param rb  the other ArrayList of AttitudeRec
670      * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
671      * @return Mean square error
672      */
calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)673     private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
674         // check if they are valid and comparable data
675         if (ra.size() != rb.size()) {
676             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
677         }
678         // check input parameter validity
679         if (axis<0 || axis > 2) {
680             throw new IllegalArgumentException("Invalid data axis.");
681         }
682 
683         int i;
684         double sum = 0.0;
685         double diff = 0.0;
686         for(i=0; i<ra.size(); ++i) {
687             // check input data validity
688             if (ra.get(i).time != rb.get(i).time) {
689                 throw new IllegalArgumentException("Element "+i+
690                         " of two inputs has different time.");
691             }
692 
693             switch(axis) {
694                 case 0:
695                     diff = ra.get(i).roll - rb.get(i).roll;
696                     break;
697                 case 1:
698                     diff = ra.get(i).pitch - rb.get(i).pitch;
699                     break;
700                 case 2:
701                     diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))-
702                             Math.PI)-Math.PI;
703                     break;
704             }
705 
706             sum += diff*diff;
707         }
708         return sum/ra.size();
709     }
710 
711     /**
712      * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s
713      *
714      * @param file File to write to
715      * @param ra  one ArrayList of AttitudeRec
716      * @param rb  the other ArrayList of AttitudeRec
717      */
dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb)718     private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){
719         if (ra.size() != rb.size()) {
720             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
721         }
722 
723         int i;
724 
725         ArrayList<AttitudeRec> rerr = new ArrayList<>();
726         for(i=0; i<ra.size(); ++i) {
727             if (ra.get(i).time != rb.get(i).time) {
728                 throw new IllegalArgumentException("Element "+ i
729                         + " of two inputs has different time.");
730             }
731 
732             rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll,
733                     ra.get(i).pitch - rb.get(i).pitch,
734                     (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
735                             -Math.PI)-Math.PI)));
736 
737         }
738         dumpAttitudeRecs(file, rerr);
739     }
740 
741     /**
742      * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec>
743      *
744      * @param rec           the ArrayList of AttitudeRec to be sampled
745      * @param timebase      the other ArrayList of AttitudeRec that serves as time base
746      * @param delta_t       offset in time before resample
747      * @param yaw_offset    offset in yaw axis
748      * @param resampled     output ArrayList of AttitudeRec
749      */
750 
resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase, double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)751     private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase,
752             double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)    {
753         int i;
754         int j = -1;
755         for(i=0; i<timebase.size(); i++) {
756             double time = timebase.get(i).time + delta_t;
757 
758             while(j<rec.size()-1 && rec.get(j+1).time < time) j++;
759 
760             if (j == -1) {
761                 //use first
762                 resampled.get(i).assign(rec.get(0), timebase.get(i).time);
763             } else if (j == rec.size()-1) {
764                 // use last
765                 resampled.get(i).assign(rec.get(j), timebase.get(i).time);
766             } else {
767                 // do linear resample
768                 double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time));
769                 double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll;
770                 double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch;
771                 double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset;
772                 resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw);
773             }
774         }
775     }
776 
777     /**
778      * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
779      *
780      * @param recs  output ArrayList of AttitudeRec
781      * @return total number of frame of the video
782      */
analyzeVideo(ArrayList<AttitudeRec> recs)783     private int analyzeVideo(ArrayList<AttitudeRec> recs) {
784         VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));
785 
786         int decimation = 1;
787         boolean use_timestamp = true;
788 
789         // roughly determine if decimation is necessary
790         if (meta.fps > DECIMATION_FPS_TARGET) {
791             decimation = (int)(meta.fps / DECIMATION_FPS_TARGET);
792             meta.fps /=decimation;
793         }
794 
795         VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(
796                 new File(mPath, "video.mp4"), decimation);
797 
798 
799         Mat frame;
800         Mat gray = new Mat();
801         int i = -1;
802 
803         Size frameSize = videoDecoder.getSize();
804 
805         if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
806             // this is very unlikely
807             return -1;
808         }
809 
810         if (TRACE_VIDEO_ANALYSIS) {
811             Debug.startMethodTracing("cvprocess");
812         }
813 
814         Size patternSize = new Size(4,11);
815 
816         float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0));
817         Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2));
818         MatOfDouble coeff = new MatOfDouble(); // dummy
819 
820         MatOfPoint2f centers = new MatOfPoint2f();
821         MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
822         Mat rvec = new MatOfFloat();
823         Mat tvec = new MatOfFloat();
824 
825         MatOfPoint2f reprojCenters = new MatOfPoint2f();
826 
827         if (LOCAL_LOGV) {
828             Log.v(TAG, "Camera Mat = \n" + camMat.dump());
829         }
830 
831         long startTime = System.nanoTime();
832         long [] ts = new long[1];
833 
834         while ((frame = videoDecoder.getFrame(ts)) !=null) {
835             if (LOCAL_LOGV) {
836                 Log.v(TAG, "got a frame " + i);
837             }
838 
839             if (use_timestamp && ts[0] == -1) {
840                 use_timestamp = false;
841             }
842 
843             // has to be in front, as there are cases where execution
844             // will skip the later part of this while
845             i++;
846 
847             // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
848             Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);
849 
850             boolean foundPattern = Calib3d.findCirclesGrid(
851                     gray,  patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
852 
853             if (!foundPattern) {
854                 // skip to next frame
855                 continue;
856             }
857 
858             if (OUTPUT_DEBUG_IMAGE) {
859                 Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
860             }
861 
862             // figure out the extrinsic parameters using real ground truth 3D points and the pixel
863             // position of blobs found in findCircleGrid, an estimated camera matrix and
864             // no-distortion are assumed.
865             boolean foundSolution =
866                     Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec,
867                             false, Calib3d.CV_ITERATIVE);
868 
869             if (!foundSolution) {
870                 // skip to next frame
871                 if (LOCAL_LOGV) {
872                     Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
873                 }
874                 continue;
875             }
876 
877             // reproject points to for evaluation of result accuracy of solvePnP
878             Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);
879 
880             // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
881             double error = Core.norm(centers, reprojCenters, Core.NORM_L2);
882 
883             if (LOCAL_LOGV) {
884                 Log.v(TAG, "Found attitude, re-projection error = " + error);
885             }
886 
887             // if error is reasonable, add it into the results. use ratio to frame height to avoid
888             // discriminating higher definition videos
889             if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) {
890                 double [] rv = new double[3];
891                 double timestamp;
892 
893                 rvec.get(0,0, rv);
894                 if (use_timestamp) {
895                     timestamp = (double)ts[0] / 1e6;
896                 } else {
897                     timestamp = (double) i / meta.fps;
898                 }
899                 if (LOCAL_LOGV) Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
900                 recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
901             }
902 
903             if (OUTPUT_DEBUG_IMAGE) {
904                 Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
905                 Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath()
906                         + "/RVCVRecData/DebugCV/img" + i + ".png", frame);
907             }
908         }
909 
910         if (LOCAL_LOGV) {
911             Log.v(TAG, "Finished decoding");
912         }
913 
914         if (TRACE_VIDEO_ANALYSIS) {
915             Debug.stopMethodTracing();
916         }
917 
918         if (LOCAL_LOGV) {
919             // time analysis
920             double totalTime = (System.nanoTime()-startTime)/1e9;
921             Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i );
922         }
923         return i;
924     }
925 
926     /**
927      * OpenCV for Android have not support the VideoCapture from file
928      * This is a make shift solution before it is supported.
929      * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame
930      */
931     private class VideoDecoderForOpenCV implements Runnable {
932         static final String TAG = "VideoDecoderForOpenCV";
933 
934         private MediaExtractor extractor=null;
935         private MediaCodec decoder=null;
936         private CtsMediaOutputSurface surface=null;
937 
938         private MatBuffer mMatBuffer;
939 
940         private final File mVideoFile;
941 
942         private boolean valid;
943         private Object setupSignal;
944 
945         private Thread mThread;
946         private int mDecimation;
947 
948         /**
949          * Constructor
950          * @param file video file
951          * @param decimation process every "decimation" number of frame
952          */
VideoDecoderForOpenCV(File file, int decimation)953         VideoDecoderForOpenCV(File file, int decimation) {
954             mVideoFile = file;
955             mDecimation = decimation;
956             valid = false;
957 
958             start();
959         }
960 
961         /**
962          * Constructor
963          * @param file video file
964          */
VideoDecoderForOpenCV(File file)965         VideoDecoderForOpenCV(File file)   {
966             this(file, 1);
967         }
968 
969         /**
970          * Test if video decoder is in valid states ready to output video.
971          * @return true of force.
972          */
isValid()973         public boolean isValid() {
974             return valid;
975         }
976 
start()977         private void start() {
978             setupSignal = new Object();
979             mThread = new Thread(this);
980             mThread.start();
981 
982             synchronized (setupSignal) {
983                 try {
984                     setupSignal.wait();
985                 } catch (InterruptedException e) {
986                     Log.e(TAG, "Interrupted when waiting for video decoder setup ready");
987                 }
988             }
989         }
stop()990         private void stop() {
991             if (mThread != null) {
992                 mThread.interrupt();
993                 try {
994                     mThread.join();
995                 } catch (InterruptedException e) {
996                     Log.e(TAG, "Interrupted when waiting for video decoder thread to stop");
997                 }
998                 try {
999                     decoder.stop();
1000                 }catch (IllegalStateException e) {
1001                     Log.e(TAG, "Video decoder is not in a state that can be stopped");
1002                 }
1003             }
1004             mThread = null;
1005         }
1006 
teardown()1007         void teardown() {
1008             if (decoder!=null) {
1009                 decoder.release();
1010                 decoder = null;
1011             }
1012             if (surface!=null) {
1013                 surface.release();
1014                 surface = null;
1015             }
1016             if (extractor!=null) {
1017                 extractor.release();
1018                 extractor = null;
1019             }
1020         }
1021 
setup()1022         void setup() {
1023             int width=0, height=0;
1024 
1025             extractor = new MediaExtractor();
1026 
1027             try {
1028                 extractor.setDataSource(mVideoFile.getPath());
1029             } catch (IOException e) {
1030                 return;
1031             }
1032 
1033             for (int i = 0; i < extractor.getTrackCount(); i++) {
1034                 MediaFormat format = extractor.getTrackFormat(i);
1035                 String mime = format.getString(MediaFormat.KEY_MIME);
1036                 width = format.getInteger(MediaFormat.KEY_WIDTH);
1037                 height = format.getInteger(MediaFormat.KEY_HEIGHT);
1038 
1039                 if (mime.startsWith("video/")) {
1040                     extractor.selectTrack(i);
1041                     try {
1042                         decoder = MediaCodec.createDecoderByType(mime);
1043                     }catch (IOException e) {
1044                         continue;
1045                     }
1046                     // Decode to surface
1047                     //decoder.configure(format, surface, null, 0);
1048 
1049                     // Decode to offscreen surface
1050                     surface = new CtsMediaOutputSurface(width, height);
1051                     mMatBuffer = new MatBuffer(width, height);
1052 
1053                     decoder.configure(format, surface.getSurface(), null, 0);
1054                     break;
1055                 }
1056             }
1057 
1058             if (decoder == null) {
1059                 Log.e(TAG, "Can't find video info!");
1060                 return;
1061             }
1062             valid = true;
1063         }
1064 
1065         @Override
run()1066         public void run() {
1067             setup();
1068 
1069             synchronized (setupSignal) {
1070                 setupSignal.notify();
1071             }
1072 
1073             if (!valid) {
1074                 return;
1075             }
1076 
1077             decoder.start();
1078 
1079             ByteBuffer[] inputBuffers = decoder.getInputBuffers();
1080             ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
1081             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1082 
1083             boolean isEOS = false;
1084             long startMs = System.currentTimeMillis();
1085             long timeoutUs = 10000;
1086 
1087             int iframe = 0;
1088             long frameTimestamp = 0;
1089 
1090             while (!Thread.interrupted()) {
1091                 if (!isEOS) {
1092                     int inIndex = decoder.dequeueInputBuffer(10000);
1093                     if (inIndex >= 0) {
1094                         ByteBuffer buffer = inputBuffers[inIndex];
1095                         int sampleSize = extractor.readSampleData(buffer, 0);
1096                         if (sampleSize < 0) {
1097                             if (LOCAL_LOGD) {
1098                                 Log.d("VideoDecoderForOpenCV",
1099                                         "InputBuffer BUFFER_FLAG_END_OF_STREAM");
1100                             }
1101                             decoder.queueInputBuffer(inIndex, 0, 0, 0,
1102                                     MediaCodec.BUFFER_FLAG_END_OF_STREAM);
1103                             isEOS = true;
1104                         } else {
1105                             frameTimestamp = extractor.getSampleTime();
1106                             decoder.queueInputBuffer(inIndex, 0, sampleSize, frameTimestamp, 0);
1107                             if (LOCAL_LOGD) {
1108                                 Log.d(TAG, String.format("Frame %d sample time %f s",
1109                                             iframe, (double)frameTimestamp/1e6));
1110                             }
1111                             extractor.advance();
1112                         }
1113                     }
1114                 }
1115 
1116                 int outIndex = decoder.dequeueOutputBuffer(info, 10000);
1117                 MediaFormat outFormat;
1118                 switch (outIndex) {
1119                     case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
1120                         if (LOCAL_LOGD) {
1121                             Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
1122                         }
1123                         outputBuffers = decoder.getOutputBuffers();
1124                         break;
1125                     case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
1126                         outFormat = decoder.getOutputFormat();
1127                         if (LOCAL_LOGD) {
1128                             Log.d(TAG, "New format " + outFormat);
1129                         }
1130                         break;
1131                     case MediaCodec.INFO_TRY_AGAIN_LATER:
1132                         if (LOCAL_LOGD) {
1133                             Log.d(TAG, "dequeueOutputBuffer timed out!");
1134                         }
1135                         break;
1136                     default:
1137 
1138                         ByteBuffer buffer = outputBuffers[outIndex];
1139                         boolean doRender = (info.size != 0);
1140 
1141                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
1142                         // to SurfaceTexture to convert to a texture.  The API doesn't
1143                         // guarantee that the texture will be available before the call
1144                         // returns, so we need to wait for the onFrameAvailable callback to
1145                         // fire.  If we don't wait, we risk rendering from the previous frame.
1146                         decoder.releaseOutputBuffer(outIndex, doRender);
1147 
1148                         if (doRender) {
1149                             surface.awaitNewImage();
1150                             surface.drawImage();
1151                             if (LOCAL_LOGV) {
1152                                 Log.v(TAG, "Finish drawing a frame!");
1153                             }
1154                             if ((iframe++ % mDecimation) == 0) {
1155                                 //Send the frame for processing
1156                                 mMatBuffer.put(frameTimestamp);
1157                             }
1158                         }
1159                         break;
1160                 }
1161 
1162                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1163                     if (LOCAL_LOGD) {
1164                         Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
1165                     }
1166                     break;
1167                 }
1168             }
1169             mMatBuffer.invalidate();
1170 
1171             decoder.stop();
1172 
1173             teardown();
1174             mThread = null;
1175         }
1176 
1177 
1178         /**
1179          * Get next valid frame
1180          * @return Frame in OpenCV mat
1181          */
getFrame(long ts[])1182         public Mat getFrame(long ts[]) {
1183             return mMatBuffer.get(ts);
1184         }
1185 
1186         /**
1187          * Get the size of the frame
1188          * @return size of the frame
1189          */
getSize()1190         Size getSize() {
1191             return mMatBuffer.getSize();
1192         }
1193 
1194         /**
1195          * A synchronized buffer
1196          */
1197         class MatBuffer {
1198             private Mat mat;
1199             private byte[] bytes;
1200             private ByteBuffer buf;
1201             private long timestamp;
1202             private boolean full;
1203 
1204             private int mWidth, mHeight;
1205             private boolean mValid = false;
1206 
MatBuffer(int width, int height)1207             MatBuffer(int width, int height) {
1208                 mWidth = width;
1209                 mHeight = height;
1210 
1211                 mat = new Mat(height, width, CvType.CV_8UC4); //RGBA
1212                 buf = ByteBuffer.allocateDirect(width*height*4);
1213                 bytes = new byte[width*height*4];
1214                 timestamp = -1;
1215 
1216                 mValid = true;
1217                 full = false;
1218             }
1219 
invalidate()1220             public synchronized void invalidate() {
1221                 mValid = false;
1222                 notifyAll();
1223             }
1224 
get(long ts[])1225             public synchronized Mat get(long ts[]) {
1226 
1227                 if (!mValid) return null;
1228                 while (full == false) {
1229                     try {
1230                         wait();
1231                         if (!mValid) return null;
1232                     } catch (InterruptedException e) {
1233                         return null;
1234                     }
1235                 }
1236                 mat.put(0,0, bytes);
1237                 full = false;
1238                 notifyAll();
1239                 ts[0] = timestamp;
1240                 return mat;
1241             }
1242 
put(long ts)1243             public synchronized void put(long ts) {
1244                 while (full) {
1245                     try {
1246                         wait();
1247                     } catch (InterruptedException e) {
1248                         Log.e(TAG, "Interrupted when waiting for space in buffer");
1249                     }
1250                 }
1251                 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA,
1252                         GL10.GL_UNSIGNED_BYTE, buf);
1253                 buf.get(bytes);
1254                 buf.rewind();
1255 
1256                 timestamp = ts;
1257                 full = true;
1258                 notifyAll();
1259             }
1260 
getSize()1261             public Size getSize() {
1262                 if (valid) {
1263                     return mat.size();
1264                 }
1265                 return new Size();
1266             }
1267         }
1268     }
1269 
1270 
1271     /* a small set of math functions */
quat2rpy( double [] q)1272     private static double [] quat2rpy( double [] q) {
1273         double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
1274                 Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
1275                 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
1276         return rpy;
1277     }
1278 
quat2rpy( double [] q, double[] rpy)1279     private static void quat2rpy( double [] q, double[] rpy) {
1280         rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2]));
1281         rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1]));
1282         rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]));
1283     }
1284 
quat2rpy(Mat quat)1285     private static Mat quat2rpy(Mat quat) {
1286         double [] q = new double[4];
1287         quat.get(0,0,q);
1288 
1289         double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
1290                 Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
1291                 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
1292 
1293         Mat rpym = new Mat(3,1, CvType.CV_64F);
1294         rpym.put(0,0, rpy);
1295         return rpym;
1296     }
1297 
rodr2quat( double [] r)1298     private static double [] rodr2quat( double [] r) {
1299         double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
1300         double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
1301                 Math.sin(t/2)*r[2]/t};
1302         return quat;
1303     }
1304 
rodr2quat( double [] r, double [] quat)1305     private static void rodr2quat( double [] r, double [] quat) {
1306         double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
1307         quat[0] = Math.cos(t/2);
1308         quat[1] = Math.sin(t/2)*r[0]/t;
1309         quat[2] = Math.sin(t/2)*r[1]/t;
1310         quat[3] = Math.sin(t/2)*r[2]/t;
1311     }
1312 
rodr2quat(Mat rodr)1313     private static Mat rodr2quat(Mat rodr) {
1314         double t = Core.norm(rodr);
1315         double [] r = new double[3];
1316         rodr.get(0,0,r);
1317 
1318         double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
1319                 Math.sin(t/2)*r[2]/t};
1320         Mat quatm = new Mat(4,1, CvType.CV_64F);
1321         quatm.put(0, 0, quat);
1322         return quatm;
1323     }
1324 
rodr2rpy( double [] r)1325     private static double [] rodr2rpy( double [] r) {
1326         return quat2rpy(rodr2quat(r));
1327     }
1328     //////////////////
1329 
1330 }
1331