Home
last modified time | relevance | path

Searched refs:streams (Results 1 – 25 of 31) sorted by relevance

12

/frameworks/av/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/
Dvolumes.pfw4 /Policy/policy/streams/voice_call/applicable_volume_profile/volume_profile = voice_call
5 /Policy/policy/streams/system/applicable_volume_profile/volume_profile = system
6 /Policy/policy/streams/ring/applicable_volume_profile/volume_profile = ring
7 /Policy/policy/streams/music/applicable_volume_profile/volume_profile = music
8 /Policy/policy/streams/alarm/applicable_volume_profile/volume_profile = alarm
9 /Policy/policy/streams/notification/applicable_volume_profile/volume_profile = notification
10 /Policy/policy/streams/bluetooth_sco/applicable_volume_profile/volume_profile = bluetooth_sco
11 …/Policy/policy/streams/enforced_audible/applicable_volume_profile/volume_profile = enforced_audible
12 /Policy/policy/streams/tts/applicable_volume_profile/volume_profile = tts
13 /Policy/policy/streams/accessibility/applicable_volume_profile/volume_profile = accessibility
[all …]
/frameworks/av/services/audiopolicy/common/include/
Dpolicy.h172 static inline bool hasStream(const android::StreamTypeVector &streams, in hasStream() argument
175 return std::find(begin(streams), end(streams), streamType) != end(streams); in hasStream()
183 static inline bool hasVoiceStream(const android::StreamTypeVector &streams) in hasVoiceStream() argument
185 return hasStream(streams, AUDIO_STREAM_VOICE_CALL); in hasVoiceStream()
/frameworks/av/services/audiopolicy/engine/common/src/
DProductStrategy.cpp97 StreamTypeVector streams; in getSupportedStreams() local
99 if (std::find(begin(streams), end(streams), supportedAttr.mStream) == end(streams) && in getSupportedStreams()
101 streams.push_back(supportedAttr.mStream); in getSupportedStreams()
104 return streams; in getSupportedStreams()
/frameworks/av/include/media/
DAudioVolumeGroup.h34 const StreamTypeVector &streams) : in AudioVolumeGroup() argument
35 mName(name), mGroupId(group), mAudioAttributes(attributes), mStreams(streams) {} in AudioVolumeGroup()
/frameworks/av/media/libaudioclient/include/media/
DAudioVolumeGroup.h34 const StreamTypeVector &streams) : in AudioVolumeGroup() argument
35 mName(name), mGroupId(group), mAudioAttributes(attributes), mStreams(streams) {} in AudioVolumeGroup()
/frameworks/av/services/audiopolicy/common/managerdefinitions/src/
DAudioOutputDescriptor.cpp398 StreamTypeVector streams = streamTypes; in setVolume() local
402 if (streams.empty()) { in setVolume()
403 streams.push_back(AUDIO_STREAM_MUSIC); in setVolume()
417 for (const auto &stream : streams) { in setVolume()
437 if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) { in setVolume()
440 for (const auto &stream : streams) { in setVolume()
634 VolumeSource volumeSource, const StreamTypeVector &streams, in setVolume() argument
640 AudioOutputDescriptor::setVolume(volumeDb, volumeSource, streams, device, delayMs, force); in setVolume()
/frameworks/base/media/java/android/media/
DAudioManagerInternal.java62 int getRingerModeAffectedStreams(int streams); in getRingerModeAffectedStreams() argument
/frameworks/av/services/camera/libcameraservice/device3/
DCamera3Device.cpp344 std::vector<wp<Camera3StreamInterface>> streams; in disconnectImpl() local
378 streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0)); in disconnectImpl()
380 streams.push_back(mOutputStreams[i]); in disconnectImpl()
383 streams.push_back(mInputStream); in disconnectImpl()
425 for (auto& weakStream : streams) { in disconnectImpl()
921 camera_metadata_ro_entry streams = request.find(ANDROID_REQUEST_OUTPUT_STREAMS); in convertToRequestList() local
924 for (size_t i = 0; i < streams.count; i++) { in convertToRequestList()
925 surfaceMap[streams.data.i32[i]].push_back(0); in convertToRequestList()
2649 camera_metadata_entry_t streams = in createCaptureRequest() local
2651 if (streams.count == 0) { in createCaptureRequest()
[all …]
/frameworks/av/services/audiopolicy/common/managerdefinitions/include/
DAudioOutputDescriptor.h160 VolumeSource volumeSource, const StreamTypeVector &streams,
336 VolumeSource volumeSource, const StreamTypeVector &streams,
410 VolumeSource volumeSource, const StreamTypeVector &streams,
/frameworks/av/media/libeffects/config/src/
DEffectsConfig.cpp225 bool parseStream(const XMLElement& xmlStream, Effects& effects, std::vector<Stream>* streams) { in parseStream() argument
250 streams->push_back(std::move(stream)); in parseStream()
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/src/google/protobuf/io/
Dzero_copy_stream_impl.cc353 ZeroCopyInputStream* const streams[], int count) in ConcatenatingInputStream() argument
354 : streams_(streams), stream_count_(count), bytes_retired_(0) { in ConcatenatingInputStream()
Dzero_copy_stream_impl.h308 ConcatenatingInputStream(ZeroCopyInputStream* const streams[], int count);
Dzero_copy_stream_unittest.cc942 ZeroCopyInputStream* streams[] = in TEST_F() local
946 ConcatenatingInputStream input(streams, GOOGLE_ARRAYSIZE(streams)); in TEST_F()
/frameworks/hardware/interfaces/cameraservice/device/2.0/
Dtypes.hal320 * cases when they would require create more streams than the limits the
340 * buffer streams simultaneously. The ICameraDevice may be able to share the buffers used
342 * footprint. The client must only set the same set id for the streams that are not
358 * Note: this must only be used when using deferred streams. Otherwise, it
366 * Note: this must only be used when using deferred streams. Otherwise, it
/frameworks/av/services/audioflinger/
DPatchPanel.cpp711 if (module.second.streams.count(stream)) { in getDownstreamSoftwarePatches()
734 mInsertedModules[audioHwDevice->handle()].streams.insert(stream); in notifyStreamOpened()
741 module.second.streams.erase(stream); in notifyStreamClosed()
795 if (!module.second.streams.empty() || !module.second.sw_patches.empty()) { in dump()
801 for (const auto& stream : module.second.streams) { in dump()
DPatchPanel.h217 std::set<audio_io_handle_t> streams; member
/frameworks/base/services/core/java/com/android/server/notification/
DZenModeHelper.java1248 public int getRingerModeAffectedStreams(int streams) { in getRingerModeAffectedStreams() argument
1251 streams |= (1 << AudioSystem.STREAM_RING) | in getRingerModeAffectedStreams()
1257 streams |= (1 << AudioSystem.STREAM_ALARM) | in getRingerModeAffectedStreams()
1260 streams &= ~((1 << AudioSystem.STREAM_ALARM) | in getRingerModeAffectedStreams()
1268 streams &= ~(1 << AudioSystem.STREAM_SYSTEM); in getRingerModeAffectedStreams()
1270 streams |= (1 << AudioSystem.STREAM_SYSTEM); in getRingerModeAffectedStreams()
1272 return streams; in getRingerModeAffectedStreams()
/frameworks/base/tests/AccessoryDisplay/
DREADME36 an encoder and streams the output to the sink over USB. Then
/frameworks/base/core/proto/android/providers/settings/
Dsystem.proto228 // Determines which streams are affected by ringer mode changes. The stream
232 // Which streams are affected by mute. The stream type's bit should be set
/frameworks/base/core/java/android/content/
DIntent.java10881 final ArrayList<Uri> streams = getParcelableArrayListExtra(EXTRA_STREAM); in fixUris() local
10882 if (streams != null) { in fixUris()
10884 for (int i = 0; i < streams.size(); i++) { in fixUris()
10885 newStreams.add(maybeAddUserId(streams.get(i), contentUserHint)); in fixUris()
10961 final ArrayList<Uri> streams = getParcelableArrayListExtra(EXTRA_STREAM); in migrateExtraStreamToClipData() local
10965 if (streams != null) { in migrateExtraStreamToClipData()
10966 num = streams.size(); in migrateExtraStreamToClipData()
10985 makeClipItem(streams, texts, htmlTexts, 0)); in migrateExtraStreamToClipData()
10988 clipData.addItem(makeClipItem(streams, texts, htmlTexts, i)); in migrateExtraStreamToClipData()
11037 private static ClipData.Item makeClipItem(ArrayList<Uri> streams, ArrayList<CharSequence> texts, in makeClipItem() argument
[all …]
/frameworks/av/services/camera/libcameraservice/api2/
DCameraDeviceClient.cpp656 streamConfiguration.streams.resize(streamCount); in isSessionConfigurationSupported()
659 streamConfiguration.streams[streamIdx++] = {{/*streamId*/0, in isSessionConfigurationSupported()
699 &streamConfiguration.streams[streamIdx++]); in isSessionConfigurationSupported()
746 streamConfiguration.streams.resize(streamCount); in isSessionConfigurationSupported()
752 physicalCameraId, &streamConfiguration.streams[streamIdx++]); in isSessionConfigurationSupported()
757 physicalCameraId, &streamConfiguration.streams[streamIdx++]); in isSessionConfigurationSupported()
/frameworks/av/services/audiopolicy/engine/config/src/
DEngineConfig.cpp468 StreamVector streams = {}; in deserialize() local
503 for (const auto &stream : streams) { in deserialize()
/frameworks/base/core/proto/android/stats/mediametrics/
Dmediametrics.proto160 * Track Media Extractor (pulling video/audio streams out of containers) usage
/frameworks/av/services/audiopolicy/service/
DAudioPolicyEffects.cpp900 auto loadProcessingChain = [](auto& processingChain, auto& streams) { in loadAudioEffectXmlConfig() argument
907 streams.add(stream.type, effectDescs.release()); in loadAudioEffectXmlConfig()
/frameworks/av/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Settings/
Ddevice_for_product_strategy_accessibility.pfw8 # Other case are handled programmatically has involving activity of streams.

12