1 /* 2 * Copyright (C) 2019 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.server.accessibility.gestures; 18 19 import static com.android.server.accessibility.gestures.GestureUtils.MM_PER_CM; 20 import static com.android.server.accessibility.gestures.TouchExplorer.DEBUG; 21 22 import android.content.Context; 23 import android.graphics.PointF; 24 import android.os.Handler; 25 import android.util.DisplayMetrics; 26 import android.util.Slog; 27 import android.util.TypedValue; 28 import android.view.MotionEvent; 29 import android.view.ViewConfiguration; 30 31 import java.util.ArrayList; 32 33 /** 34 * This class is responsible for matching one-finger swipe gestures. Each instance matches one swipe 35 * gesture. A swipe is specified as a series of one or more directions e.g. left, left and up, etc. 36 * At this time swipes with more than two directions are not supported. 37 */ 38 class Swipe extends GestureMatcher { 39 40 // Direction constants. 41 public static final int LEFT = 0; 42 public static final int RIGHT = 1; 43 public static final int UP = 2; 44 public static final int DOWN = 3; 45 // This is the calculated movement threshold used track if the user is still 46 // moving their finger. 47 private final float mGestureDetectionThresholdPixels; 48 49 // Buffer for storing points for gesture detection. 50 private final ArrayList<PointF> mStrokeBuffer = new ArrayList<>(100); 51 52 // Constants for sampling motion event points. 53 // We sample based on a minimum distance between points, primarily to improve accuracy by 54 // reducing noisy minor changes in direction. 55 private static final float MIN_CM_BETWEEN_SAMPLES = 0.25f; 56 57 // Distance a finger must travel before we decide if it is a gesture or not. 58 public static final int GESTURE_CONFIRM_CM = 1; 59 60 // Time threshold used to determine if an interaction is a gesture or not. 61 // If the first movement of 1cm takes longer than this value, we assume it's 62 // a slow movement, and therefore not a gesture. 63 // 64 // This value was determined by measuring the time for the first 1cm 65 // movement when gesturing, and touch exploring. Based on user testing, 66 // all gestures started with the initial movement taking less than 100ms. 67 // When touch exploring, the first movement almost always takes longer than 68 // 200ms. 69 public static final long MAX_TIME_TO_START_SWIPE_MS = 150 * GESTURE_CONFIRM_CM; 70 71 // Time threshold used to determine if a gesture should be cancelled. If 72 // the finger takes more than this time to move to the next sample point, the ongoing gesture 73 // is cancelled. 74 public static final long MAX_TIME_TO_CONTINUE_SWIPE_MS = 350 * GESTURE_CONFIRM_CM; 75 76 private int[] mDirections; 77 private float mBaseX; 78 private float mBaseY; 79 private long mBaseTime; 80 private float mPreviousGestureX; 81 private float mPreviousGestureY; 82 private final float mMinPixelsBetweenSamplesX; 83 private final float mMinPixelsBetweenSamplesY; 84 // The minmimum distance the finger must travel before we evaluate the initial direction of the 85 // swipe. 86 // Anything less is still considered a touch. 87 private int mTouchSlop; 88 89 // Constants for separating gesture segments 90 private static final float ANGLE_THRESHOLD = 0.0f; 91 Swipe( Context context, int direction, int gesture, GestureMatcher.StateChangeListener listener)92 Swipe( 93 Context context, 94 int direction, 95 int gesture, 96 GestureMatcher.StateChangeListener listener) { 97 this(context, new int[] {direction}, gesture, listener); 98 } 99 Swipe( Context context, int direction1, int direction2, int gesture, GestureMatcher.StateChangeListener listener)100 Swipe( 101 Context context, 102 int direction1, 103 int direction2, 104 int gesture, 105 GestureMatcher.StateChangeListener listener) { 106 this(context, new int[] {direction1, direction2}, gesture, listener); 107 } 108 Swipe( Context context, int[] directions, int gesture, GestureMatcher.StateChangeListener listener)109 private Swipe( 110 Context context, 111 int[] directions, 112 int gesture, 113 GestureMatcher.StateChangeListener listener) { 114 super(gesture, new Handler(context.getMainLooper()), listener); 115 mDirections = directions; 116 DisplayMetrics displayMetrics = context.getResources().getDisplayMetrics(); 117 mGestureDetectionThresholdPixels = 118 TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_MM, MM_PER_CM, displayMetrics) 119 * GESTURE_CONFIRM_CM; 120 // Calculate minimum gesture velocity 121 final float pixelsPerCmX = displayMetrics.xdpi / 2.54f; 122 final float pixelsPerCmY = displayMetrics.ydpi / 2.54f; 123 mMinPixelsBetweenSamplesX = MIN_CM_BETWEEN_SAMPLES * pixelsPerCmX; 124 mMinPixelsBetweenSamplesY = MIN_CM_BETWEEN_SAMPLES * pixelsPerCmY; 125 mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); 126 clear(); 127 } 128 129 @Override clear()130 public void clear() { 131 mBaseX = Float.NaN; 132 mBaseY = Float.NaN; 133 mBaseTime = 0; 134 mPreviousGestureX = Float.NaN; 135 mPreviousGestureY = Float.NaN; 136 mStrokeBuffer.clear(); 137 super.clear(); 138 } 139 140 @Override onDown(MotionEvent event, MotionEvent rawEvent, int policyFlags)141 protected void onDown(MotionEvent event, MotionEvent rawEvent, int policyFlags) { 142 if (Float.isNaN(mBaseX) && Float.isNaN(mBaseY)) { 143 mBaseX = rawEvent.getX(); 144 mBaseY = rawEvent.getY(); 145 mBaseTime = rawEvent.getEventTime(); 146 mPreviousGestureX = mBaseX; 147 mPreviousGestureY = mBaseY; 148 } 149 // Otherwise do nothing because this event doesn't make sense in the middle of a gesture. 150 } 151 152 @Override onMove(MotionEvent event, MotionEvent rawEvent, int policyFlags)153 protected void onMove(MotionEvent event, MotionEvent rawEvent, int policyFlags) { 154 final float x = rawEvent.getX(); 155 final float y = rawEvent.getY(); 156 final long time = rawEvent.getEventTime(); 157 final float dX = Math.abs(x - mPreviousGestureX); 158 final float dY = Math.abs(y - mPreviousGestureY); 159 final double moveDelta = Math.hypot(Math.abs(x - mBaseX), Math.abs(y - mBaseY)); 160 final long timeDelta = time - mBaseTime; 161 if (DEBUG) { 162 Slog.d( 163 getGestureName(), 164 "moveDelta:" 165 + Double.toString(moveDelta) 166 + " mGestureDetectionThreshold: " 167 + Float.toString(mGestureDetectionThresholdPixels)); 168 } 169 if (getState() == STATE_CLEAR) { 170 if (moveDelta < mTouchSlop) { 171 // This still counts as a touch not a swipe. 172 return; 173 } else if (mStrokeBuffer.size() == 0) { 174 // First, make sure the pointer is going in the right direction. 175 int direction = toDirection(x - mBaseX, y - mBaseY); 176 if (direction != mDirections[0]) { 177 cancelGesture(event, rawEvent, policyFlags); 178 return; 179 } 180 // This is confirmed to be some kind of swipe so start tracking points. 181 mStrokeBuffer.add(new PointF(mBaseX, mBaseY)); 182 } 183 } 184 if (moveDelta > mGestureDetectionThresholdPixels) { 185 // This is a gesture, not touch exploration. 186 mBaseX = x; 187 mBaseY = y; 188 mBaseTime = time; 189 startGesture(event, rawEvent, policyFlags); 190 } else if (getState() == STATE_CLEAR) { 191 if (timeDelta > MAX_TIME_TO_START_SWIPE_MS) { 192 // The user isn't moving fast enough. 193 cancelGesture(event, rawEvent, policyFlags); 194 return; 195 } 196 } else if (getState() == STATE_GESTURE_STARTED) { 197 if (timeDelta > MAX_TIME_TO_CONTINUE_SWIPE_MS) { 198 cancelGesture(event, rawEvent, policyFlags); 199 return; 200 } 201 } 202 if (dX >= mMinPixelsBetweenSamplesX || dY >= mMinPixelsBetweenSamplesY) { 203 // At this point gesture detection has started and we are sampling points. 204 mPreviousGestureX = x; 205 mPreviousGestureY = y; 206 mStrokeBuffer.add(new PointF(x, y)); 207 } 208 } 209 210 @Override onUp(MotionEvent event, MotionEvent rawEvent, int policyFlags)211 protected void onUp(MotionEvent event, MotionEvent rawEvent, int policyFlags) { 212 if (getState() != STATE_GESTURE_STARTED) { 213 cancelGesture(event, rawEvent, policyFlags); 214 return; 215 } 216 217 final float x = rawEvent.getX(); 218 final float y = rawEvent.getY(); 219 final float dX = Math.abs(x - mPreviousGestureX); 220 final float dY = Math.abs(y - mPreviousGestureY); 221 if (dX >= mMinPixelsBetweenSamplesX || dY >= mMinPixelsBetweenSamplesY) { 222 mStrokeBuffer.add(new PointF(x, y)); 223 } 224 recognizeGesture(event, rawEvent, policyFlags); 225 } 226 227 @Override onPointerDown(MotionEvent event, MotionEvent rawEvent, int policyFlags)228 protected void onPointerDown(MotionEvent event, MotionEvent rawEvent, int policyFlags) { 229 cancelGesture(event, rawEvent, policyFlags); 230 } 231 232 @Override onPointerUp(MotionEvent event, MotionEvent rawEvent, int policyFlags)233 protected void onPointerUp(MotionEvent event, MotionEvent rawEvent, int policyFlags) { 234 cancelGesture(event, rawEvent, policyFlags); 235 } 236 237 /** 238 * Looks at the sequence of motions in mStrokeBuffer, classifies the gesture, then calls 239 * Listener callbacks for success or failure. 240 * 241 * @param event The raw motion event to pass to the listener callbacks. 242 * @param policyFlags Policy flags for the event. 243 * @return true if the event is consumed, else false 244 */ recognizeGesture(MotionEvent event, MotionEvent rawEvent, int policyFlags)245 private void recognizeGesture(MotionEvent event, MotionEvent rawEvent, int policyFlags) { 246 if (mStrokeBuffer.size() < 2) { 247 cancelGesture(event, rawEvent, policyFlags); 248 return; 249 } 250 251 // Look at mStrokeBuffer and extract 2 line segments, delimited by near-perpendicular 252 // direction change. 253 // Method: for each sampled motion event, check the angle of the most recent motion vector 254 // versus the preceding motion vector, and segment the line if the angle is about 255 // 90 degrees. 256 257 ArrayList<PointF> path = new ArrayList<>(); 258 PointF lastDelimiter = mStrokeBuffer.get(0); 259 path.add(lastDelimiter); 260 261 float dX = 0; // Sum of unit vectors from last delimiter to each following point 262 float dY = 0; 263 int count = 0; // Number of points since last delimiter 264 float length = 0; // Vector length from delimiter to most recent point 265 266 PointF next = null; 267 for (int i = 1; i < mStrokeBuffer.size(); ++i) { 268 next = mStrokeBuffer.get(i); 269 if (count > 0) { 270 // Average of unit vectors from delimiter to following points 271 float currentDX = dX / count; 272 float currentDY = dY / count; 273 274 // newDelimiter is a possible new delimiter, based on a vector with length from 275 // the last delimiter to the previous point, but in the direction of the average 276 // unit vector from delimiter to previous points. 277 // Using the averaged vector has the effect of "squaring off the curve", 278 // creating a sharper angle between the last motion and the preceding motion from 279 // the delimiter. In turn, this sharper angle achieves the splitting threshold 280 // even in a gentle curve. 281 PointF newDelimiter = 282 new PointF( 283 length * currentDX + lastDelimiter.x, 284 length * currentDY + lastDelimiter.y); 285 286 // Unit vector from newDelimiter to the most recent point 287 float nextDX = next.x - newDelimiter.x; 288 float nextDY = next.y - newDelimiter.y; 289 float nextLength = (float) Math.sqrt(nextDX * nextDX + nextDY * nextDY); 290 nextDX = nextDX / nextLength; 291 nextDY = nextDY / nextLength; 292 293 // Compare the initial motion direction to the most recent motion direction, 294 // and segment the line if direction has changed by about 90 degrees. 295 float dot = currentDX * nextDX + currentDY * nextDY; 296 if (dot < ANGLE_THRESHOLD) { 297 path.add(newDelimiter); 298 lastDelimiter = newDelimiter; 299 dX = 0; 300 dY = 0; 301 count = 0; 302 } 303 } 304 305 // Vector from last delimiter to most recent point 306 float currentDX = next.x - lastDelimiter.x; 307 float currentDY = next.y - lastDelimiter.y; 308 length = (float) Math.sqrt(currentDX * currentDX + currentDY * currentDY); 309 310 // Increment sum of unit vectors from delimiter to each following point 311 count = count + 1; 312 dX = dX + currentDX / length; 313 dY = dY + currentDY / length; 314 } 315 316 path.add(next); 317 if (DEBUG) { 318 Slog.d(getGestureName(), "path=" + path.toString()); 319 } 320 // Classify line segments, and call Listener callbacks. 321 recognizeGesturePath(event, rawEvent, policyFlags, path); 322 } 323 324 /** 325 * Classifies a pair of line segments, by direction. Calls Listener callbacks for success or 326 * failure. 327 * 328 * @param event The raw motion event to pass to the listener's onGestureCanceled method. 329 * @param policyFlags Policy flags for the event. 330 * @param path A sequence of motion line segments derived from motion points in mStrokeBuffer. 331 * @return true if the event is consumed, else false 332 */ recognizeGesturePath( MotionEvent event, MotionEvent rawEvent, int policyFlags, ArrayList<PointF> path)333 private void recognizeGesturePath( 334 MotionEvent event, MotionEvent rawEvent, int policyFlags, ArrayList<PointF> path) { 335 336 final int displayId = event.getDisplayId(); 337 if (path.size() != mDirections.length + 1) { 338 cancelGesture(event, rawEvent, policyFlags); 339 return; 340 } 341 for (int i = 0; i < path.size() - 1; ++i) { 342 PointF start = path.get(i); 343 PointF end = path.get(i + 1); 344 345 float dX = end.x - start.x; 346 float dY = end.y - start.y; 347 int direction = toDirection(dX, dY); 348 if (direction != mDirections[i]) { 349 if (DEBUG) { 350 Slog.d( 351 getGestureName(), 352 "Found direction " 353 + directionToString(direction) 354 + " when expecting " 355 + directionToString(mDirections[i])); 356 } 357 cancelGesture(event, rawEvent, policyFlags); 358 return; 359 } 360 } 361 if (DEBUG) { 362 Slog.d(getGestureName(), "Completed."); 363 } 364 completeGesture(event, rawEvent, policyFlags); 365 } 366 toDirection(float dX, float dY)367 private static int toDirection(float dX, float dY) { 368 if (Math.abs(dX) > Math.abs(dY)) { 369 // Horizontal 370 return (dX < 0) ? LEFT : RIGHT; 371 } else { 372 // Vertical 373 return (dY < 0) ? UP : DOWN; 374 } 375 } 376 directionToString(int direction)377 public static String directionToString(int direction) { 378 switch (direction) { 379 case LEFT: 380 return "left"; 381 case RIGHT: 382 return "right"; 383 case UP: 384 return "up"; 385 case DOWN: 386 return "down"; 387 default: 388 return "Unknown Direction"; 389 } 390 } 391 392 @Override getGestureName()393 protected String getGestureName() { 394 StringBuilder builder = new StringBuilder(); 395 builder.append("Swipe ").append(directionToString(mDirections[0])); 396 for (int i = 1; i < mDirections.length; ++i) { 397 builder.append(" and ").append(directionToString(mDirections[i])); 398 } 399 return builder.toString(); 400 } 401 402 @Override toString()403 public String toString() { 404 StringBuilder builder = new StringBuilder(super.toString()); 405 if (getState() != STATE_GESTURE_CANCELED) { 406 builder.append(", mBaseX: ") 407 .append(mBaseX) 408 .append(", mBaseY: ") 409 .append(mBaseY) 410 .append(", mGestureDetectionThreshold:") 411 .append(mGestureDetectionThresholdPixels) 412 .append(", mMinPixelsBetweenSamplesX:") 413 .append(mMinPixelsBetweenSamplesX) 414 .append(", mMinPixelsBetweenSamplesY:") 415 .append(mMinPixelsBetweenSamplesY); 416 } 417 return builder.toString(); 418 } 419 } 420