Home
last modified time | relevance | path

Searched refs:frames (Results 1 – 15 of 15) sorted by relevance

/development/tools/winscope/src/trace/
Dframe_map.ts102 getEntriesRange(frames: FramesRange): EntriesRange | undefined {
103 frames = this.clampFramesRangeToFitBounds(frames);
104 if (frames.start >= frames.end) {
109 frames.start,
112 frames.end - 1,
169 private clampFramesRangeToFitBounds(frames: FramesRange): FramesRange {
171 start: Math.max(frames.start, 0),
172 end: Math.min(frames.end, this.lengthFrames),
Dtrace_position.ts32 const frames = entry.getFramesRange(); constant
33 frame = frames && frames.start < frames.end ? frames.start : undefined;
Dtrace.ts176 return this.getEntryInternal(index, (index, timestamp, frames) => {
182 frames,
195 return this.getEntryInternal(index, (index, timestamp, frames) => {
201 frames,
349 const frames = this.frameMap?.getFramesRange(entries);
350 return this.createSlice(entries, frames);
376 const frames = this.frameMap?.getFramesRange(entries);
377 return this.createSlice(entries, frames);
385 const frames: FramesRange = {
389 const entries = this.frameMap!.getEntriesRange(frames);
[all …]
Dframe_mapper.ts148 let frames = vsyncIdToFrames.get(vsyncId);
149 if (!frames) {
150 frames = {start: Number.MAX_VALUE, end: Number.MIN_VALUE};
152 frames.start = Math.min(frames.start, srcFrames.start);
153 frames.end = Math.max(frames.end, srcFrames.end);
154 vsyncIdToFrames.set(vsyncId, frames);
159 const frames = vsyncIdToFrames.get(vsyncId); constant
160 if (frames === undefined) {
163 frameMapBuilder.setFrames(dstEntry.getIndex(), frames);
/development/tools/winscope/src/test/unit/
Dtraces_utils.ts44 const frames = new Map<AbsoluteFrameIndex, Map<TraceType, Array<{}>>>(); constant
47 frames.set(index, new Map<TraceType, Array<{}>>());
49 assertDefined(frames.get(index)).set(
58 return frames;
Dtrace_utils.ts39 const frames = new Map<AbsoluteFrameIndex, T[]>(); constant
41 frames.set(index, await TraceUtils.extractEntries(frame));
44 return frames;
/development/samples/browseable/MediaEffects/
D_index.jd9 These APIs let you apply effects to image frames represented as OpenGL ES 2.0 textures.
10 Image frames can be images loaded from disk, frames from the device\'s camera, or other
/development/tools/bugreport/src/com/android/bugreport/stacks/
DThreadSnapshot.java38 public ArrayList<StackFrameSnapshot> frames = new ArrayList<StackFrameSnapshot>(); field in ThreadSnapshot
76 N = that.frames.size(); in ThreadSnapshot()
78 this.frames.add(that.frames.get(i).clone()); in ThreadSnapshot()
DThreadSnapshotParser.java182 result.frames.add(frame); in parse()
190 result.frames.add(frame); in parse()
198 result.frames.add(frame); in parse()
203 result.frames.add(frame); in parse()
214 result.frames.add(frame); in parse()
223 result.frames.add(frame); in parse()
281 result.frames.add(frame); in parse()
288 result.frames.add(frame); in parse()
307 for (StackFrameSnapshot frame: result.frames) { in parse()
/development/tools/bugreport/src/com/android/bugreport/inspector/
DInspector.java203 for (StackFrameSnapshot frame: thread.frames) { in combineLocks()
239 final int N = thread.frames.size(); in markOutgoingBinderThread()
242 frame = thread.frames.get(i); in markOutgoingBinderThread()
262 frame = thread.frames.get(i); in markOutgoingBinderThread()
278 frame = thread.frames.get(i); in markOutgoingBinderThread()
296 final int N = thread.frames.size(); in markIncomingBinderThread()
299 frame = thread.frames.get(i); in markIncomingBinderThread()
319 frame = thread.frames.get(i); in markIncomingBinderThread()
333 frame = thread.frames.get(i); in markIncomingBinderThread()
505 final ArrayList<StackFrameSnapshot> frames = thread.frames; in matchesJavaStack() local
[all …]
/development/tools/winscope/src/viewers/common/
Dabstract_presenter_input_method.ts296 const frames = imeEntry.getFramesRange(); constant
297 if (!frames || frames.start === frames.end) {
301 const frame = frames.start;
/development/samples/browseable/BasicMediaDecoder/
D_index.jd8 This activity uses a TextureView to render the frames of a video decoded using the
/development/tools/bugreport/src/com/android/bugreport/html/
DRenderer.java294 N = thread.frames.size(); in makeThreadSnapshotHdf()
297 thread.frames.get(i)); in makeThreadSnapshotHdf()
/development/tools/winscope/protos/surfaceflinger/udc/
Dlayers.proto86 // The number of frames available.
/development/samples/VirtualDeviceManager/
DREADME.md22 physical client device by sending audio and frames of the virtual displays,
26 * **Client**: installed on the client device. It receives the audio and frames