upload android base code part4
This commit is contained in:
parent
b9e30e05b1
commit
78ea2404cd
23455 changed files with 5250148 additions and 0 deletions
110
android/hardware/interfaces/audio/2.0/Android.bp
Normal file
110
android/hardware/interfaces/audio/2.0/Android.bp
Normal file
|
@ -0,0 +1,110 @@
|
|||
// This file is autogenerated by hidl-gen. Do not edit manually.
|
||||
|
||||
filegroup {
|
||||
name: "android.hardware.audio@2.0_hal",
|
||||
srcs: [
|
||||
"types.hal",
|
||||
"IDevice.hal",
|
||||
"IDevicesFactory.hal",
|
||||
"IPrimaryDevice.hal",
|
||||
"IStream.hal",
|
||||
"IStreamIn.hal",
|
||||
"IStreamOut.hal",
|
||||
"IStreamOutCallback.hal",
|
||||
],
|
||||
}
|
||||
|
||||
genrule {
|
||||
name: "android.hardware.audio@2.0_genc++",
|
||||
tools: ["hidl-gen"],
|
||||
cmd: "$(location hidl-gen) -o $(genDir) -Lc++-sources -randroid.hardware:hardware/interfaces -randroid.hidl:system/libhidl/transport android.hardware.audio@2.0",
|
||||
srcs: [
|
||||
":android.hardware.audio@2.0_hal",
|
||||
],
|
||||
out: [
|
||||
"android/hardware/audio/2.0/types.cpp",
|
||||
"android/hardware/audio/2.0/DeviceAll.cpp",
|
||||
"android/hardware/audio/2.0/DevicesFactoryAll.cpp",
|
||||
"android/hardware/audio/2.0/PrimaryDeviceAll.cpp",
|
||||
"android/hardware/audio/2.0/StreamAll.cpp",
|
||||
"android/hardware/audio/2.0/StreamInAll.cpp",
|
||||
"android/hardware/audio/2.0/StreamOutAll.cpp",
|
||||
"android/hardware/audio/2.0/StreamOutCallbackAll.cpp",
|
||||
],
|
||||
}
|
||||
|
||||
genrule {
|
||||
name: "android.hardware.audio@2.0_genc++_headers",
|
||||
tools: ["hidl-gen"],
|
||||
cmd: "$(location hidl-gen) -o $(genDir) -Lc++-headers -randroid.hardware:hardware/interfaces -randroid.hidl:system/libhidl/transport android.hardware.audio@2.0",
|
||||
srcs: [
|
||||
":android.hardware.audio@2.0_hal",
|
||||
],
|
||||
out: [
|
||||
"android/hardware/audio/2.0/types.h",
|
||||
"android/hardware/audio/2.0/hwtypes.h",
|
||||
"android/hardware/audio/2.0/IDevice.h",
|
||||
"android/hardware/audio/2.0/IHwDevice.h",
|
||||
"android/hardware/audio/2.0/BnHwDevice.h",
|
||||
"android/hardware/audio/2.0/BpHwDevice.h",
|
||||
"android/hardware/audio/2.0/BsDevice.h",
|
||||
"android/hardware/audio/2.0/IDevicesFactory.h",
|
||||
"android/hardware/audio/2.0/IHwDevicesFactory.h",
|
||||
"android/hardware/audio/2.0/BnHwDevicesFactory.h",
|
||||
"android/hardware/audio/2.0/BpHwDevicesFactory.h",
|
||||
"android/hardware/audio/2.0/BsDevicesFactory.h",
|
||||
"android/hardware/audio/2.0/IPrimaryDevice.h",
|
||||
"android/hardware/audio/2.0/IHwPrimaryDevice.h",
|
||||
"android/hardware/audio/2.0/BnHwPrimaryDevice.h",
|
||||
"android/hardware/audio/2.0/BpHwPrimaryDevice.h",
|
||||
"android/hardware/audio/2.0/BsPrimaryDevice.h",
|
||||
"android/hardware/audio/2.0/IStream.h",
|
||||
"android/hardware/audio/2.0/IHwStream.h",
|
||||
"android/hardware/audio/2.0/BnHwStream.h",
|
||||
"android/hardware/audio/2.0/BpHwStream.h",
|
||||
"android/hardware/audio/2.0/BsStream.h",
|
||||
"android/hardware/audio/2.0/IStreamIn.h",
|
||||
"android/hardware/audio/2.0/IHwStreamIn.h",
|
||||
"android/hardware/audio/2.0/BnHwStreamIn.h",
|
||||
"android/hardware/audio/2.0/BpHwStreamIn.h",
|
||||
"android/hardware/audio/2.0/BsStreamIn.h",
|
||||
"android/hardware/audio/2.0/IStreamOut.h",
|
||||
"android/hardware/audio/2.0/IHwStreamOut.h",
|
||||
"android/hardware/audio/2.0/BnHwStreamOut.h",
|
||||
"android/hardware/audio/2.0/BpHwStreamOut.h",
|
||||
"android/hardware/audio/2.0/BsStreamOut.h",
|
||||
"android/hardware/audio/2.0/IStreamOutCallback.h",
|
||||
"android/hardware/audio/2.0/IHwStreamOutCallback.h",
|
||||
"android/hardware/audio/2.0/BnHwStreamOutCallback.h",
|
||||
"android/hardware/audio/2.0/BpHwStreamOutCallback.h",
|
||||
"android/hardware/audio/2.0/BsStreamOutCallback.h",
|
||||
],
|
||||
}
|
||||
|
||||
cc_library {
|
||||
name: "android.hardware.audio@2.0",
|
||||
defaults: ["hidl-module-defaults"],
|
||||
generated_sources: ["android.hardware.audio@2.0_genc++"],
|
||||
generated_headers: ["android.hardware.audio@2.0_genc++_headers"],
|
||||
export_generated_headers: ["android.hardware.audio@2.0_genc++_headers"],
|
||||
vendor_available: true,
|
||||
vndk: {
|
||||
enabled: true,
|
||||
},
|
||||
shared_libs: [
|
||||
"libhidlbase",
|
||||
"libhidltransport",
|
||||
"libhwbinder",
|
||||
"liblog",
|
||||
"libutils",
|
||||
"libcutils",
|
||||
"android.hardware.audio.common@2.0",
|
||||
],
|
||||
export_shared_lib_headers: [
|
||||
"libhidlbase",
|
||||
"libhidltransport",
|
||||
"libhwbinder",
|
||||
"libutils",
|
||||
"android.hardware.audio.common@2.0",
|
||||
],
|
||||
}
|
46
android/hardware/interfaces/audio/2.0/Android.mk
Normal file
46
android/hardware/interfaces/audio/2.0/Android.mk
Normal file
|
@ -0,0 +1,46 @@
|
|||
# This file is autogenerated by hidl-gen. Do not edit manually.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
################################################################################
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
LOCAL_MODULE := android.hardware.audio-V2.0-java-constants
|
||||
LOCAL_MODULE_CLASS := JAVA_LIBRARIES
|
||||
|
||||
intermediates := $(call local-generated-sources-dir, COMMON)
|
||||
|
||||
HIDL := $(HOST_OUT_EXECUTABLES)/hidl-gen$(HOST_EXECUTABLE_SUFFIX)
|
||||
#
|
||||
GEN := $(intermediates)/android/hardware/audio/V2_0/Constants.java
|
||||
$(GEN): $(HIDL)
|
||||
$(GEN): $(LOCAL_PATH)/types.hal
|
||||
$(GEN): $(LOCAL_PATH)/IDevice.hal
|
||||
$(GEN): $(LOCAL_PATH)/IDevicesFactory.hal
|
||||
$(GEN): $(LOCAL_PATH)/IPrimaryDevice.hal
|
||||
$(GEN): $(LOCAL_PATH)/IStream.hal
|
||||
$(GEN): $(LOCAL_PATH)/IStreamIn.hal
|
||||
$(GEN): $(LOCAL_PATH)/IStreamOut.hal
|
||||
$(GEN): $(LOCAL_PATH)/IStreamOutCallback.hal
|
||||
|
||||
$(GEN): PRIVATE_HIDL := $(HIDL)
|
||||
$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates)
|
||||
$(GEN): PRIVATE_CUSTOM_TOOL = \
|
||||
$(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \
|
||||
-Ljava-constants \
|
||||
-randroid.hardware:hardware/interfaces \
|
||||
-randroid.hidl:system/libhidl/transport \
|
||||
android.hardware.audio@2.0
|
||||
|
||||
$(GEN):
|
||||
$(transform-generated-source)
|
||||
LOCAL_GENERATED_SOURCES += $(GEN)
|
||||
# Avoid dependency cycle of framework.jar -> this-library -> framework.jar
|
||||
LOCAL_NO_STANDARD_LIBRARIES := true
|
||||
LOCAL_JAVA_LIBRARIES := core-oj
|
||||
|
||||
include $(BUILD_STATIC_JAVA_LIBRARY)
|
||||
|
||||
|
||||
|
||||
include $(call all-makefiles-under,$(LOCAL_PATH))
|
246
android/hardware/interfaces/audio/2.0/IDevice.hal
Normal file
246
android/hardware/interfaces/audio/2.0/IDevice.hal
Normal file
|
@ -0,0 +1,246 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
import IStreamIn;
|
||||
import IStreamOut;
|
||||
|
||||
interface IDevice {
|
||||
typedef android.hardware.audio@2.0::Result Result;
|
||||
|
||||
/**
|
||||
* Returns whether the audio hardware interface has been initialized.
|
||||
*
|
||||
* @return retval OK on success, NOT_INITIALIZED on failure.
|
||||
*/
|
||||
initCheck() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Sets the audio volume for all audio activities other than voice call. If
|
||||
* NOT_SUPPORTED is returned, the software mixer will emulate this
|
||||
* capability.
|
||||
*
|
||||
* @param volume 1.0f means unity, 0.0f is zero.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setMasterVolume(float volume) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Get the current master volume value for the HAL, if the HAL supports
|
||||
* master volume control. For example, AudioFlinger will query this value
|
||||
* from the primary audio HAL when the service starts and use the value for
|
||||
* setting the initial master volume across all HALs. HALs which do not
|
||||
* support this method must return NOT_SUPPORTED in 'retval'.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return volume 1.0f means unity, 0.0f is zero.
|
||||
*/
|
||||
getMasterVolume() generates (Result retval, float volume);
|
||||
|
||||
/**
|
||||
* Sets microphone muting state.
|
||||
*
|
||||
* @param mute whether microphone is muted.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setMicMute(bool mute) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Gets whether microphone is muted.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return mute whether microphone is muted.
|
||||
*/
|
||||
getMicMute() generates (Result retval, bool mute);
|
||||
|
||||
/**
|
||||
* Set the audio mute status for all audio activities. If the return value
|
||||
* is NOT_SUPPORTED, the software mixer will emulate this capability.
|
||||
*
|
||||
* @param mute whether audio is muted.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setMasterMute(bool mute) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Get the current master mute status for the HAL, if the HAL supports
|
||||
* master mute control. AudioFlinger will query this value from the primary
|
||||
* audio HAL when the service starts and use the value for setting the
|
||||
* initial master mute across all HALs. HAL must indicate that the feature
|
||||
* is not supported by returning NOT_SUPPORTED status.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return mute whether audio is muted.
|
||||
*/
|
||||
getMasterMute() generates (Result retval, bool mute);
|
||||
|
||||
/**
|
||||
* Returns audio input buffer size according to parameters passed or
|
||||
* INVALID_ARGUMENTS if one of the parameters is not supported.
|
||||
*
|
||||
* @param config audio configuration.
|
||||
* @return retval operation completion status.
|
||||
* @return bufferSize input buffer size in bytes.
|
||||
*/
|
||||
getInputBufferSize(AudioConfig config)
|
||||
generates (Result retval, uint64_t bufferSize);
|
||||
|
||||
/**
|
||||
* This method creates and opens the audio hardware output stream.
|
||||
* If the stream can not be opened with the proposed audio config,
|
||||
* HAL must provide suggested values for the audio config.
|
||||
*
|
||||
* @param ioHandle handle assigned by AudioFlinger.
|
||||
* @param device device type and (if needed) address.
|
||||
* @param config stream configuration.
|
||||
* @param flags additional flags.
|
||||
* @return retval operation completion status.
|
||||
* @return outStream created output stream.
|
||||
* @return suggestedConfig in case of invalid parameters, suggested config.
|
||||
*/
|
||||
openOutputStream(
|
||||
AudioIoHandle ioHandle,
|
||||
DeviceAddress device,
|
||||
AudioConfig config,
|
||||
AudioOutputFlag flags) generates (
|
||||
Result retval,
|
||||
IStreamOut outStream,
|
||||
AudioConfig suggestedConfig);
|
||||
|
||||
/**
|
||||
* This method creates and opens the audio hardware input stream.
|
||||
* If the stream can not be opened with the proposed audio config,
|
||||
* HAL must provide suggested values for the audio config.
|
||||
*
|
||||
* @param ioHandle handle assigned by AudioFlinger.
|
||||
* @param device device type and (if needed) address.
|
||||
* @param config stream configuration.
|
||||
* @param flags additional flags.
|
||||
* @param source source specification.
|
||||
* @return retval operation completion status.
|
||||
* @return inStream in case of success, created input stream.
|
||||
* @return suggestedConfig in case of invalid parameters, suggested config.
|
||||
*/
|
||||
openInputStream(
|
||||
AudioIoHandle ioHandle,
|
||||
DeviceAddress device,
|
||||
AudioConfig config,
|
||||
AudioInputFlag flags,
|
||||
AudioSource source) generates (
|
||||
Result retval,
|
||||
IStreamIn inStream,
|
||||
AudioConfig suggestedConfig);
|
||||
|
||||
/**
|
||||
* Returns whether HAL supports audio patches.
|
||||
*
|
||||
* @return supports true if audio patches are supported.
|
||||
*/
|
||||
supportsAudioPatches() generates (bool supports);
|
||||
|
||||
/**
|
||||
* Creates an audio patch between several source and sink ports. The handle
|
||||
* is allocated by the HAL and must be unique for this audio HAL module.
|
||||
*
|
||||
* @param sources patch sources.
|
||||
* @param sinks patch sinks.
|
||||
* @return retval operation completion status.
|
||||
* @return patch created patch handle.
|
||||
*/
|
||||
createAudioPatch(vec<AudioPortConfig> sources, vec<AudioPortConfig> sinks)
|
||||
generates (Result retval, AudioPatchHandle patch);
|
||||
|
||||
/**
|
||||
* Release an audio patch.
|
||||
*
|
||||
* @param patch patch handle.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
releaseAudioPatch(AudioPatchHandle patch) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Returns the list of supported attributes for a given audio port.
|
||||
*
|
||||
* As input, 'port' contains the information (type, role, address etc...)
|
||||
* needed by the HAL to identify the port.
|
||||
*
|
||||
* As output, 'resultPort' contains possible attributes (sampling rates,
|
||||
* formats, channel masks, gain controllers...) for this port.
|
||||
*
|
||||
* @param port port identifier.
|
||||
* @return retval operation completion status.
|
||||
* @return resultPort port descriptor with all parameters filled up.
|
||||
*/
|
||||
getAudioPort(AudioPort port)
|
||||
generates (Result retval, AudioPort resultPort);
|
||||
|
||||
/**
|
||||
* Set audio port configuration.
|
||||
*
|
||||
* @param config audio port configuration.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setAudioPortConfig(AudioPortConfig config) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Gets the HW synchronization source of the device. Calling this method is
|
||||
* equivalent to getting AUDIO_PARAMETER_HW_AV_SYNC on the legacy HAL.
|
||||
*
|
||||
* @return hwAvSync HW synchronization source
|
||||
*/
|
||||
getHwAvSync() generates (AudioHwSync hwAvSync);
|
||||
|
||||
/**
|
||||
* Sets whether the screen is on. Calling this method is equivalent to
|
||||
* setting AUDIO_PARAMETER_KEY_SCREEN_STATE on the legacy HAL.
|
||||
*
|
||||
* @param turnedOn whether the screen is turned on.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setScreenState(bool turnedOn) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Generic method for retrieving vendor-specific parameter values.
|
||||
* The framework does not interpret the parameters, they are passed
|
||||
* in an opaque manner between a vendor application and HAL.
|
||||
*
|
||||
* @param keys parameter keys.
|
||||
* @return retval operation completion status.
|
||||
* @return parameters parameter key value pairs.
|
||||
*/
|
||||
getParameters(vec<string> keys)
|
||||
generates (Result retval, vec<ParameterValue> parameters);
|
||||
|
||||
/**
|
||||
* Generic method for setting vendor-specific parameter values.
|
||||
* The framework does not interpret the parameters, they are passed
|
||||
* in an opaque manner between a vendor application and HAL.
|
||||
*
|
||||
* @param parameters parameter key value pairs.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setParameters(vec<ParameterValue> parameters) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Dumps information about the stream into the provided file descriptor.
|
||||
* This is used for the dumpsys facility.
|
||||
*
|
||||
* @param fd dump file descriptor.
|
||||
*/
|
||||
debugDump(handle fd);
|
||||
};
|
45
android/hardware/interfaces/audio/2.0/IDevicesFactory.hal
Normal file
45
android/hardware/interfaces/audio/2.0/IDevicesFactory.hal
Normal file
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
import IDevice;
|
||||
|
||||
interface IDevicesFactory {
|
||||
typedef android.hardware.audio@2.0::Result Result;
|
||||
|
||||
enum Device : int32_t {
|
||||
PRIMARY,
|
||||
A2DP,
|
||||
USB,
|
||||
R_SUBMIX,
|
||||
STUB
|
||||
};
|
||||
|
||||
/**
|
||||
* Opens an audio device. To close the device, it is necessary to release
|
||||
* references to the returned device object.
|
||||
*
|
||||
* @param device device type.
|
||||
* @return retval operation completion status. Returns INVALID_ARGUMENTS
|
||||
* if there is no corresponding hardware module found,
|
||||
* NOT_INITIALIZED if an error occured while opening the hardware
|
||||
* module.
|
||||
* @return result the interface for the created device.
|
||||
*/
|
||||
openDevice(Device device) generates (Result retval, IDevice result);
|
||||
};
|
123
android/hardware/interfaces/audio/2.0/IPrimaryDevice.hal
Normal file
123
android/hardware/interfaces/audio/2.0/IPrimaryDevice.hal
Normal file
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
import IDevice;
|
||||
|
||||
interface IPrimaryDevice extends IDevice {
|
||||
typedef android.hardware.audio@2.0::Result Result;
|
||||
|
||||
/**
|
||||
* Sets the audio volume of a voice call.
|
||||
*
|
||||
* @param volume 1.0f means unity, 0.0f is zero.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setVoiceVolume(float volume) generates (Result retval);
|
||||
|
||||
/**
|
||||
* This method is used to notify the HAL about audio mode changes.
|
||||
*
|
||||
* @param mode new mode.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setMode(AudioMode mode) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Gets whether BT SCO Noise Reduction and Echo Cancellation are enabled.
|
||||
* Calling this method is equivalent to getting AUDIO_PARAMETER_KEY_BT_NREC
|
||||
* on the legacy HAL.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return enabled whether BT SCO NR + EC are enabled.
|
||||
*/
|
||||
getBtScoNrecEnabled() generates (Result retval, bool enabled);
|
||||
|
||||
/**
|
||||
* Sets whether BT SCO Noise Reduction and Echo Cancellation are enabled.
|
||||
* Calling this method is equivalent to setting AUDIO_PARAMETER_KEY_BT_NREC
|
||||
* on the legacy HAL.
|
||||
*
|
||||
* @param enabled whether BT SCO NR + EC are enabled.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setBtScoNrecEnabled(bool enabled) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Gets whether BT SCO Wideband mode is enabled. Calling this method is
|
||||
* equivalent to getting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return enabled whether BT Wideband is enabled.
|
||||
*/
|
||||
getBtScoWidebandEnabled() generates (Result retval, bool enabled);
|
||||
|
||||
/**
|
||||
* Sets whether BT SCO Wideband mode is enabled. Calling this method is
|
||||
* equivalent to setting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL.
|
||||
*
|
||||
* @param enabled whether BT Wideband is enabled.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setBtScoWidebandEnabled(bool enabled) generates (Result retval);
|
||||
|
||||
enum TtyMode : int32_t {
|
||||
OFF,
|
||||
VCO,
|
||||
HCO,
|
||||
FULL
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets current TTY mode selection. Calling this method is equivalent to
|
||||
* getting AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return mode TTY mode.
|
||||
*/
|
||||
getTtyMode() generates (Result retval, TtyMode mode);
|
||||
|
||||
/**
|
||||
* Sets current TTY mode. Calling this method is equivalent to setting
|
||||
* AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL.
|
||||
*
|
||||
* @param mode TTY mode.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setTtyMode(TtyMode mode) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Gets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is
|
||||
* enabled. Calling this method is equivalent to getting
|
||||
* AUDIO_PARAMETER_KEY_HAC on the legacy HAL.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return enabled whether HAC mode is enabled.
|
||||
*/
|
||||
getHacEnabled() generates (Result retval, bool enabled);
|
||||
|
||||
/**
|
||||
* Sets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is
|
||||
* enabled. Calling this method is equivalent to setting
|
||||
* AUDIO_PARAMETER_KEY_HAC on the legacy HAL.
|
||||
*
|
||||
* @param enabled whether HAC mode is enabled.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setHacEnabled(bool enabled) generates (Result retval);
|
||||
};
|
294
android/hardware/interfaces/audio/2.0/IStream.hal
Normal file
294
android/hardware/interfaces/audio/2.0/IStream.hal
Normal file
|
@ -0,0 +1,294 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
import android.hardware.audio.effect@2.0::IEffect;
|
||||
|
||||
interface IStream {
|
||||
typedef android.hardware.audio@2.0::Result Result;
|
||||
|
||||
/**
|
||||
* Return the frame size (number of bytes per sample).
|
||||
*
|
||||
* @return frameSize frame size in bytes.
|
||||
*/
|
||||
getFrameSize() generates (uint64_t frameSize);
|
||||
|
||||
/**
|
||||
* Return the frame count of the buffer. Calling this method is equivalent
|
||||
* to getting AUDIO_PARAMETER_STREAM_FRAME_COUNT on the legacy HAL.
|
||||
*
|
||||
* @return count frame count.
|
||||
*/
|
||||
getFrameCount() generates (uint64_t count);
|
||||
|
||||
/**
|
||||
* Return the size of input/output buffer in bytes for this stream.
|
||||
* It must be a multiple of the frame size.
|
||||
*
|
||||
* @return buffer buffer size in bytes.
|
||||
*/
|
||||
getBufferSize() generates (uint64_t bufferSize);
|
||||
|
||||
/**
|
||||
* Return the sampling rate in Hz.
|
||||
*
|
||||
* @return sampleRateHz sample rate in Hz.
|
||||
*/
|
||||
getSampleRate() generates (uint32_t sampleRateHz);
|
||||
|
||||
/**
|
||||
* Return supported sampling rates of the stream. Calling this method is
|
||||
* equivalent to getting AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES on the
|
||||
* legacy HAL.
|
||||
*
|
||||
* @return sampleRateHz supported sample rates.
|
||||
*/
|
||||
getSupportedSampleRates() generates (vec<uint32_t> sampleRates);
|
||||
|
||||
/**
|
||||
* Sets the sampling rate of the stream. Calling this method is equivalent
|
||||
* to setting AUDIO_PARAMETER_STREAM_SAMPLING_RATE on the legacy HAL.
|
||||
*
|
||||
* @param sampleRateHz sample rate in Hz.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setSampleRate(uint32_t sampleRateHz) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Return the channel mask of the stream.
|
||||
*
|
||||
* @return mask channel mask.
|
||||
*/
|
||||
getChannelMask() generates (AudioChannelMask mask);
|
||||
|
||||
/**
|
||||
* Return supported channel masks of the stream. Calling this method is
|
||||
* equivalent to getting AUDIO_PARAMETER_STREAM_SUP_CHANNELS on the legacy
|
||||
* HAL.
|
||||
*
|
||||
* @return masks supported audio masks.
|
||||
*/
|
||||
getSupportedChannelMasks() generates (vec<AudioChannelMask> masks);
|
||||
|
||||
/**
|
||||
* Sets the channel mask of the stream. Calling this method is equivalent to
|
||||
* setting AUDIO_PARAMETER_STREAM_CHANNELS on the legacy HAL.
|
||||
*
|
||||
* @param format audio format.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setChannelMask(AudioChannelMask mask) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Return the audio format of the stream.
|
||||
*
|
||||
* @return format audio format.
|
||||
*/
|
||||
getFormat() generates (AudioFormat format);
|
||||
|
||||
/**
|
||||
* Return supported audio formats of the stream. Calling this method is
|
||||
* equivalent to getting AUDIO_PARAMETER_STREAM_SUP_FORMATS on the legacy
|
||||
* HAL.
|
||||
*
|
||||
* @return formats supported audio formats.
|
||||
*/
|
||||
getSupportedFormats() generates (vec<AudioFormat> formats);
|
||||
|
||||
/**
|
||||
* Sets the audio format of the stream. Calling this method is equivalent to
|
||||
* setting AUDIO_PARAMETER_STREAM_FORMAT on the legacy HAL.
|
||||
*
|
||||
* @param format audio format.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setFormat(AudioFormat format) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Convenience method for retrieving several stream parameters in
|
||||
* one transaction.
|
||||
*
|
||||
* @return sampleRateHz sample rate in Hz.
|
||||
* @return mask channel mask.
|
||||
* @return format audio format.
|
||||
*/
|
||||
getAudioProperties() generates (
|
||||
uint32_t sampleRateHz, AudioChannelMask mask, AudioFormat format);
|
||||
|
||||
/**
|
||||
* Applies audio effect to the stream.
|
||||
*
|
||||
* @param effectId effect ID (obtained from IEffectsFactory.createEffect) of
|
||||
* the effect to apply.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
addEffect(uint64_t effectId) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Stops application of the effect to the stream.
|
||||
*
|
||||
* @param effectId effect ID (obtained from IEffectsFactory.createEffect) of
|
||||
* the effect to remove.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
removeEffect(uint64_t effectId) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Put the audio hardware input/output into standby mode.
|
||||
* Driver must exit from standby mode at the next I/O operation.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
standby() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Return the set of device(s) which this stream is connected to.
|
||||
*
|
||||
* @return device set of device(s) which this stream is connected to.
|
||||
*/
|
||||
getDevice() generates (AudioDevice device);
|
||||
|
||||
/**
|
||||
* Connects the stream to the device.
|
||||
*
|
||||
* This method must only be used for HALs that do not support
|
||||
* 'IDevice.createAudioPatch' method. Calling this method is
|
||||
* equivalent to setting AUDIO_PARAMETER_STREAM_ROUTING in the legacy HAL
|
||||
* interface.
|
||||
*
|
||||
* @param address device to connect the stream to.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setDevice(DeviceAddress address) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Notifies the stream about device connection state. Calling this method is
|
||||
* equivalent to setting AUDIO_PARAMETER_DEVICE_[DIS]CONNECT on the legacy
|
||||
* HAL.
|
||||
*
|
||||
* @param address audio device specification.
|
||||
* @param connected whether the device is connected.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setConnectedState(DeviceAddress address, bool connected)
|
||||
generates (Result retval);
|
||||
|
||||
/**
|
||||
* Sets the HW synchronization source. Calling this method is equivalent to
|
||||
* setting AUDIO_PARAMETER_STREAM_HW_AV_SYNC on the legacy HAL.
|
||||
*
|
||||
* @param hwAvSync HW synchronization source
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setHwAvSync(AudioHwSync hwAvSync) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Generic method for retrieving vendor-specific parameter values.
|
||||
* The framework does not interpret the parameters, they are passed
|
||||
* in an opaque manner between a vendor application and HAL.
|
||||
*
|
||||
* @param keys parameter keys.
|
||||
* @return retval operation completion status.
|
||||
* @return parameters parameter key value pairs.
|
||||
*/
|
||||
getParameters(vec<string> keys)
|
||||
generates (Result retval, vec<ParameterValue> parameters);
|
||||
|
||||
/**
|
||||
* Generic method for setting vendor-specific parameter values.
|
||||
* The framework does not interpret the parameters, they are passed
|
||||
* in an opaque manner between a vendor application and HAL.
|
||||
*
|
||||
* @param parameters parameter key value pairs.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setParameters(vec<ParameterValue> parameters) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Dumps information about the stream into the provided file descriptor.
|
||||
* This is used for the dumpsys facility.
|
||||
*
|
||||
* @param fd dump file descriptor.
|
||||
*/
|
||||
debugDump(handle fd);
|
||||
|
||||
/**
|
||||
* Called by the framework to start a stream operating in mmap mode.
|
||||
* createMmapBuffer() must be called before calling start().
|
||||
* Function only implemented by streams operating in mmap mode.
|
||||
*
|
||||
* @return retval OK in case the success.
|
||||
* NOT_SUPPORTED on non mmap mode streams
|
||||
* INVALID_STATE if called out of sequence
|
||||
*/
|
||||
start() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Called by the framework to stop a stream operating in mmap mode.
|
||||
* Function only implemented by streams operating in mmap mode.
|
||||
*
|
||||
* @return retval OK in case the succes.
|
||||
* NOT_SUPPORTED on non mmap mode streams
|
||||
* INVALID_STATE if called out of sequence
|
||||
*/
|
||||
stop() generates (Result retval) ;
|
||||
|
||||
/**
|
||||
* Called by the framework to retrieve information on the mmap buffer used for audio
|
||||
* samples transfer.
|
||||
* Function only implemented by streams operating in mmap mode.
|
||||
*
|
||||
* @param minSizeFrames minimum buffer size requested. The actual buffer
|
||||
* size returned in struct MmapBufferInfo can be larger.
|
||||
* @return retval OK in case the success.
|
||||
* NOT_SUPPORTED on non mmap mode streams
|
||||
* NOT_INITIALIZED in case of memory allocation error
|
||||
* INVALID_ARGUMENTS if the requested buffer size is too large
|
||||
* INVALID_STATE if called out of sequence
|
||||
* @return info a MmapBufferInfo struct containing information on the MMMAP buffer created.
|
||||
*/
|
||||
createMmapBuffer(int32_t minSizeFrames)
|
||||
generates (Result retval, MmapBufferInfo info);
|
||||
|
||||
/**
|
||||
* Called by the framework to read current read/write position in the mmap buffer
|
||||
* with associated time stamp.
|
||||
* Function only implemented by streams operating in mmap mode.
|
||||
*
|
||||
* @return retval OK in case the success.
|
||||
* NOT_SUPPORTED on non mmap mode streams
|
||||
* INVALID_STATE if called out of sequence
|
||||
* @return position a MmapPosition struct containing current HW read/write position in frames
|
||||
* with associated time stamp.
|
||||
*/
|
||||
getMmapPosition()
|
||||
generates (Result retval, MmapPosition position);
|
||||
|
||||
/**
|
||||
* Called by the framework to deinitialize the stream and free up
|
||||
* all the currently allocated resources. It is recommended to close
|
||||
* the stream on the client side as soon as it is becomes unused.
|
||||
*
|
||||
* @return retval OK in case the success.
|
||||
* NOT_SUPPORTED if called on IStream instead of input or
|
||||
* output stream interface.
|
||||
* INVALID_STATE if the stream was already closed.
|
||||
*/
|
||||
close() generates (Result retval);
|
||||
};
|
151
android/hardware/interfaces/audio/2.0/IStreamIn.hal
Normal file
151
android/hardware/interfaces/audio/2.0/IStreamIn.hal
Normal file
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
import IStream;
|
||||
|
||||
interface IStreamIn extends IStream {
|
||||
typedef android.hardware.audio@2.0::Result Result;
|
||||
|
||||
/**
|
||||
* Returns the source descriptor of the input stream. Calling this method is
|
||||
* equivalent to getting AUDIO_PARAMETER_STREAM_INPUT_SOURCE on the legacy
|
||||
* HAL.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return source audio source.
|
||||
*/
|
||||
getAudioSource() generates (Result retval, AudioSource source);
|
||||
|
||||
/**
|
||||
* Set the input gain for the audio driver.
|
||||
*
|
||||
* @param gain 1.0f is unity, 0.0f is zero.
|
||||
* @result retval operation completion status.
|
||||
*/
|
||||
setGain(float gain) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Commands that can be executed on the driver reader thread.
|
||||
*/
|
||||
enum ReadCommand : int32_t {
|
||||
READ,
|
||||
GET_CAPTURE_POSITION
|
||||
};
|
||||
|
||||
/**
|
||||
* Data structure passed to the driver for executing commands
|
||||
* on the driver reader thread.
|
||||
*/
|
||||
struct ReadParameters {
|
||||
ReadCommand command; // discriminator
|
||||
union Params {
|
||||
uint64_t read; // READ command, amount of bytes to read, >= 0.
|
||||
// No parameters for GET_CAPTURE_POSITION.
|
||||
} params;
|
||||
};
|
||||
|
||||
/**
|
||||
* Data structure passed back to the client via status message queue
|
||||
* of 'read' operation.
|
||||
*
|
||||
* Possible values of 'retval' field:
|
||||
* - OK, read operation was successful;
|
||||
* - INVALID_ARGUMENTS, stream was not configured properly;
|
||||
* - INVALID_STATE, stream is in a state that doesn't allow reads.
|
||||
*/
|
||||
struct ReadStatus {
|
||||
Result retval;
|
||||
ReadCommand replyTo; // discriminator
|
||||
union Reply {
|
||||
uint64_t read; // READ command, amount of bytes read, >= 0.
|
||||
struct CapturePosition { // same as generated by getCapturePosition.
|
||||
uint64_t frames;
|
||||
uint64_t time;
|
||||
} capturePosition;
|
||||
} reply;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set up required transports for receiving audio buffers from the driver.
|
||||
*
|
||||
* The transport consists of three message queues:
|
||||
* -- command queue is used to instruct the reader thread what operation
|
||||
* to perform;
|
||||
* -- data queue is used for passing audio data from the driver
|
||||
* to the client;
|
||||
* -- status queue is used for reporting operation status
|
||||
* (e.g. amount of bytes actually read or error code).
|
||||
*
|
||||
* The driver operates on a dedicated thread. The client must ensure that
|
||||
* the thread is given an appropriate priority and assigned to correct
|
||||
* scheduler and cgroup. For this purpose, the method returns identifiers
|
||||
* of the driver thread.
|
||||
*
|
||||
* @param frameSize the size of a single frame, in bytes.
|
||||
* @param framesCount the number of frames in a buffer.
|
||||
* @param threadPriority priority of the driver thread.
|
||||
* @return retval OK if both message queues were created successfully.
|
||||
* INVALID_STATE if the method was already called.
|
||||
* INVALID_ARGUMENTS if there was a problem setting up
|
||||
* the queues.
|
||||
* @return commandMQ a message queue used for passing commands.
|
||||
* @return dataMQ a message queue used for passing audio data in the format
|
||||
* specified at the stream opening.
|
||||
* @return statusMQ a message queue used for passing status from the driver
|
||||
* using ReadStatus structures.
|
||||
* @return threadInfo identifiers of the driver's dedicated thread.
|
||||
*/
|
||||
prepareForReading(uint32_t frameSize, uint32_t framesCount)
|
||||
generates (
|
||||
Result retval,
|
||||
fmq_sync<ReadParameters> commandMQ,
|
||||
fmq_sync<uint8_t> dataMQ,
|
||||
fmq_sync<ReadStatus> statusMQ,
|
||||
ThreadInfo threadInfo);
|
||||
|
||||
/**
|
||||
* Return the amount of input frames lost in the audio driver since the last
|
||||
* call of this function.
|
||||
*
|
||||
* Audio driver is expected to reset the value to 0 and restart counting
|
||||
* upon returning the current value by this function call. Such loss
|
||||
* typically occurs when the user space process is blocked longer than the
|
||||
* capacity of audio driver buffers.
|
||||
*
|
||||
* @return framesLost the number of input audio frames lost.
|
||||
*/
|
||||
getInputFramesLost() generates (uint32_t framesLost);
|
||||
|
||||
/**
|
||||
* Return a recent count of the number of audio frames received and the
|
||||
* clock time associated with that frame count.
|
||||
*
|
||||
* @return retval INVALID_STATE if the device is not ready/available,
|
||||
* NOT_SUPPORTED if the command is not supported,
|
||||
* OK otherwise.
|
||||
* @return frames the total frame count received. This must be as early in
|
||||
* the capture pipeline as possible. In general, frames
|
||||
* must be non-negative and must not go "backwards".
|
||||
* @return time is the clock monotonic time when frames was measured. In
|
||||
* general, time must be a positive quantity and must not
|
||||
* go "backwards".
|
||||
*/
|
||||
getCapturePosition()
|
||||
generates (Result retval, uint64_t frames, uint64_t time);
|
||||
};
|
253
android/hardware/interfaces/audio/2.0/IStreamOut.hal
Normal file
253
android/hardware/interfaces/audio/2.0/IStreamOut.hal
Normal file
|
@ -0,0 +1,253 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
import IStream;
|
||||
import IStreamOutCallback;
|
||||
|
||||
interface IStreamOut extends IStream {
|
||||
typedef android.hardware.audio@2.0::Result Result;
|
||||
|
||||
/**
|
||||
* Return the audio hardware driver estimated latency in milliseconds.
|
||||
*
|
||||
* @return latencyMs latency in milliseconds.
|
||||
*/
|
||||
getLatency() generates (uint32_t latencyMs);
|
||||
|
||||
/**
|
||||
* This method is used in situations where audio mixing is done in the
|
||||
* hardware. This method serves as a direct interface with hardware,
|
||||
* allowing to directly set the volume as apposed to via the framework.
|
||||
* This method might produce multiple PCM outputs or hardware accelerated
|
||||
* codecs, such as MP3 or AAC.
|
||||
*
|
||||
* @param left left channel attenuation, 1.0f is unity, 0.0f is zero.
|
||||
* @param right right channel attenuation, 1.0f is unity, 0.0f is zero.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setVolume(float left, float right) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Commands that can be executed on the driver writer thread.
|
||||
*/
|
||||
enum WriteCommand : int32_t {
|
||||
WRITE,
|
||||
GET_PRESENTATION_POSITION,
|
||||
GET_LATENCY
|
||||
};
|
||||
|
||||
/**
|
||||
* Data structure passed back to the client via status message queue
|
||||
* of 'write' operation.
|
||||
*
|
||||
* Possible values of 'retval' field:
|
||||
* - OK, write operation was successful;
|
||||
* - INVALID_ARGUMENTS, stream was not configured properly;
|
||||
* - INVALID_STATE, stream is in a state that doesn't allow writes;
|
||||
* - INVALID_OPERATION, retrieving presentation position isn't supported.
|
||||
*/
|
||||
struct WriteStatus {
|
||||
Result retval;
|
||||
WriteCommand replyTo; // discriminator
|
||||
union Reply {
|
||||
uint64_t written; // WRITE command, amount of bytes written, >= 0.
|
||||
struct PresentationPosition { // same as generated by
|
||||
uint64_t frames; // getPresentationPosition.
|
||||
TimeSpec timeStamp;
|
||||
} presentationPosition;
|
||||
uint32_t latencyMs; // Same as generated by getLatency.
|
||||
} reply;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set up required transports for passing audio buffers to the driver.
|
||||
*
|
||||
* The transport consists of three message queues:
|
||||
* -- command queue is used to instruct the writer thread what operation
|
||||
* to perform;
|
||||
* -- data queue is used for passing audio data from the client
|
||||
* to the driver;
|
||||
* -- status queue is used for reporting operation status
|
||||
* (e.g. amount of bytes actually written or error code).
|
||||
*
|
||||
* The driver operates on a dedicated thread. The client must ensure that
|
||||
* the thread is given an appropriate priority and assigned to correct
|
||||
* scheduler and cgroup. For this purpose, the method returns identifiers
|
||||
* of the driver thread.
|
||||
*
|
||||
* @param frameSize the size of a single frame, in bytes.
|
||||
* @param framesCount the number of frames in a buffer.
|
||||
* @return retval OK if both message queues were created successfully.
|
||||
* INVALID_STATE if the method was already called.
|
||||
* INVALID_ARGUMENTS if there was a problem setting up
|
||||
* the queues.
|
||||
* @return commandMQ a message queue used for passing commands.
|
||||
* @return dataMQ a message queue used for passing audio data in the format
|
||||
* specified at the stream opening.
|
||||
* @return statusMQ a message queue used for passing status from the driver
|
||||
* using WriteStatus structures.
|
||||
* @return threadInfo identifiers of the driver's dedicated thread.
|
||||
*/
|
||||
prepareForWriting(uint32_t frameSize, uint32_t framesCount)
|
||||
generates (
|
||||
Result retval,
|
||||
fmq_sync<WriteCommand> commandMQ,
|
||||
fmq_sync<uint8_t> dataMQ,
|
||||
fmq_sync<WriteStatus> statusMQ,
|
||||
ThreadInfo threadInfo);
|
||||
|
||||
/**
|
||||
* Return the number of audio frames written by the audio DSP to DAC since
|
||||
* the output has exited standby.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return dspFrames number of audio frames written.
|
||||
*/
|
||||
getRenderPosition() generates (Result retval, uint32_t dspFrames);
|
||||
|
||||
/**
|
||||
* Get the local time at which the next write to the audio driver will be
|
||||
* presented. The units are microseconds, where the epoch is decided by the
|
||||
* local audio HAL.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return timestampUs time of the next write.
|
||||
*/
|
||||
getNextWriteTimestamp() generates (Result retval, int64_t timestampUs);
|
||||
|
||||
/**
|
||||
* Set the callback interface for notifying completion of non-blocking
|
||||
* write and drain.
|
||||
*
|
||||
* Calling this function implies that all future 'write' and 'drain'
|
||||
* must be non-blocking and use the callback to signal completion.
|
||||
*
|
||||
* 'clearCallback' method needs to be called in order to release the local
|
||||
* callback proxy on the server side and thus dereference the callback
|
||||
* implementation on the client side.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
setCallback(IStreamOutCallback callback) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Clears the callback previously set via 'setCallback' method.
|
||||
*
|
||||
* Warning: failure to call this method results in callback implementation
|
||||
* on the client side being held until the HAL server termination.
|
||||
*
|
||||
* @return retval operation completion status: OK or NOT_SUPPORTED.
|
||||
*/
|
||||
clearCallback() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Returns whether HAL supports pausing and resuming of streams.
|
||||
*
|
||||
* @return supportsPause true if pausing is supported.
|
||||
* @return supportsResume true if resume is supported.
|
||||
*/
|
||||
supportsPauseAndResume()
|
||||
generates (bool supportsPause, bool supportsResume);
|
||||
|
||||
/**
|
||||
* Notifies to the audio driver to stop playback however the queued buffers
|
||||
* are retained by the hardware. Useful for implementing pause/resume. Empty
|
||||
* implementation if not supported however must be implemented for hardware
|
||||
* with non-trivial latency. In the pause state, some audio hardware may
|
||||
* still be using power. Client code may consider calling 'suspend' after a
|
||||
* timeout to prevent that excess power usage.
|
||||
*
|
||||
* Implementation of this function is mandatory for offloaded playback.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
pause() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Notifies to the audio driver to resume playback following a pause.
|
||||
* Returns error INVALID_STATE if called without matching pause.
|
||||
*
|
||||
* Implementation of this function is mandatory for offloaded playback.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
resume() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Returns whether HAL supports draining of streams.
|
||||
*
|
||||
* @return supports true if draining is supported.
|
||||
*/
|
||||
supportsDrain() generates (bool supports);
|
||||
|
||||
/**
|
||||
* Requests notification when data buffered by the driver/hardware has been
|
||||
* played. If 'setCallback' has previously been called to enable
|
||||
* non-blocking mode, then 'drain' must not block, instead it must return
|
||||
* quickly and completion of the drain is notified through the callback. If
|
||||
* 'setCallback' has not been called, then 'drain' must block until
|
||||
* completion.
|
||||
*
|
||||
* If 'type' is 'ALL', the drain completes when all previously written data
|
||||
* has been played.
|
||||
*
|
||||
* If 'type' is 'EARLY_NOTIFY', the drain completes shortly before all data
|
||||
* for the current track has played to allow time for the framework to
|
||||
* perform a gapless track switch.
|
||||
*
|
||||
* Drain must return immediately on 'stop' and 'flush' calls.
|
||||
*
|
||||
* Implementation of this function is mandatory for offloaded playback.
|
||||
*
|
||||
* @param type type of drain.
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
drain(AudioDrain type) generates (Result retval);
|
||||
|
||||
/**
|
||||
* Notifies to the audio driver to flush the queued data. Stream must
|
||||
* already be paused before calling 'flush'.
|
||||
*
|
||||
* Implementation of this function is mandatory for offloaded playback.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
*/
|
||||
flush() generates (Result retval);
|
||||
|
||||
/**
|
||||
* Return a recent count of the number of audio frames presented to an
|
||||
* external observer. This excludes frames which have been written but are
|
||||
* still in the pipeline. The count is not reset to zero when output enters
|
||||
* standby. Also returns the value of CLOCK_MONOTONIC as of this
|
||||
* presentation count. The returned count is expected to be 'recent', but
|
||||
* does not need to be the most recent possible value. However, the
|
||||
* associated time must correspond to whatever count is returned.
|
||||
*
|
||||
* Example: assume that N+M frames have been presented, where M is a 'small'
|
||||
* number. Then it is permissible to return N instead of N+M, and the
|
||||
* timestamp must correspond to N rather than N+M. The terms 'recent' and
|
||||
* 'small' are not defined. They reflect the quality of the implementation.
|
||||
*
|
||||
* @return retval operation completion status.
|
||||
* @return frames count of presented audio frames.
|
||||
* @return timeStamp associated clock time.
|
||||
*/
|
||||
getPresentationPosition()
|
||||
generates (Result retval, uint64_t frames, TimeSpec timeStamp);
|
||||
};
|
37
android/hardware/interfaces/audio/2.0/IStreamOutCallback.hal
Normal file
37
android/hardware/interfaces/audio/2.0/IStreamOutCallback.hal
Normal file
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
/**
|
||||
* Asynchronous write callback interface.
|
||||
*/
|
||||
interface IStreamOutCallback {
|
||||
/**
|
||||
* Non blocking write completed.
|
||||
*/
|
||||
oneway onWriteReady();
|
||||
|
||||
/**
|
||||
* Drain completed.
|
||||
*/
|
||||
oneway onDrainReady();
|
||||
|
||||
/**
|
||||
* Stream hit an error.
|
||||
*/
|
||||
oneway onError();
|
||||
};
|
|
@ -0,0 +1,531 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Copyright (C) 2017 The Android Open Source Project
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<!-- TODO: define a targetNamespace. Note that it will break retrocompatibility -->
|
||||
<xs:schema version="2.0"
|
||||
elementFormDefault="qualified"
|
||||
attributeFormDefault="unqualified"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
<!-- List the config versions supported by audio policy. -->
|
||||
<xs:simpleType name="version">
|
||||
<xs:restriction base="xs:decimal">
|
||||
<xs:enumeration value="1.0"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:simpleType name="halVersion">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
Version of the interface the hal implements.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:restriction base="xs:decimal">
|
||||
<!-- List of HAL versions supported by the framework. -->
|
||||
<xs:enumeration value="2.0"/>
|
||||
<xs:enumeration value="3.0"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:element name="audioPolicyConfiguration">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="globalConfiguration" type="globalConfiguration"/>
|
||||
<xs:element name="modules" type="modules" maxOccurs="unbounded"/>
|
||||
<xs:element name="volumes" type="volumes" maxOccurs="unbounded"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="version" type="version"/>
|
||||
</xs:complexType>
|
||||
<xs:key name="moduleNameKey">
|
||||
<xs:selector xpath="modules/module"/>
|
||||
<xs:field xpath="@name"/>
|
||||
</xs:key>
|
||||
<xs:key name="devicePortNameGlobalKey">
|
||||
<xs:selector xpath="modules/module/devicePorts/devicePort"/>
|
||||
<xs:field xpath="@tagName"/>
|
||||
</xs:key>
|
||||
<xs:unique name="volumeTargetUniqueness">
|
||||
<xs:selector xpath="volumes/volume"/>
|
||||
<xs:field xpath="@stream"/>
|
||||
<xs:field xpath="@deviceCategory"/>
|
||||
</xs:unique>
|
||||
<xs:key name="volumeCurveNameKey">
|
||||
<xs:selector xpath="volumes/reference"/>
|
||||
<xs:field xpath="@name"/>
|
||||
</xs:key>
|
||||
<xs:keyref name="volumeCurveRef" refer="volumeCurveNameKey">
|
||||
<xs:selector xpath="volumes/volume"/>
|
||||
<xs:field xpath="@ref"/>
|
||||
</xs:keyref>
|
||||
</xs:element>
|
||||
<xs:complexType name="globalConfiguration">
|
||||
<xs:attribute name="speaker_drc_enabled" type="xs:boolean" use="required"/>
|
||||
</xs:complexType>
|
||||
<!-- Enum values of IDevicesFactory::Device
|
||||
TODO: generate from hidl to avoid manual sync. -->
|
||||
<xs:simpleType name="halName">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="primary"/>
|
||||
<xs:enumeration value="a2dp"/>
|
||||
<xs:enumeration value="usb"/>
|
||||
<xs:enumeration value="r_submix"/>
|
||||
<xs:enumeration value="codec_offload"/>
|
||||
<xs:enumeration value="stub"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="modules">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
There should be one section per audio HW module present on the platform.
|
||||
Each <module/> contains two mandatory tags: “halVersion” and “name”.
|
||||
The module "name" is the same as in previous .conf file.
|
||||
Each module must contain the following sections:
|
||||
- <devicePorts/>: a list of device descriptors for all
|
||||
input and output devices accessible via this module.
|
||||
This contains both permanently attached devices and removable devices.
|
||||
- <mixPorts/>: listing all output and input streams exposed by the audio HAL
|
||||
- <routes/>: list of possible connections between input
|
||||
and output devices or between stream and devices.
|
||||
A <route/> is defined by a set of 3 attributes:
|
||||
-"type": mux|mix means all sources are mutual exclusive (mux) or can be mixed (mix)
|
||||
-"sink": the sink involved in this route
|
||||
-"sources": all the sources than can be connected to the sink via this route
|
||||
- <attachedDevices/>: permanently attached devices.
|
||||
The attachedDevices section is a list of devices names.
|
||||
Their names correspond to device names defined in "devicePorts" section.
|
||||
- <defaultOutputDevice/> is the device to be used when no policy rule applies
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:sequence>
|
||||
<xs:element name="module" maxOccurs="unbounded">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="attachedDevices" type="attachedDevices" minOccurs="0">
|
||||
<xs:unique name="attachedDevicesUniqueness">
|
||||
<xs:selector xpath="item"/>
|
||||
<xs:field xpath="."/>
|
||||
</xs:unique>
|
||||
</xs:element>
|
||||
<xs:element name="defaultOutputDevice" type="xs:token" minOccurs="0"/>
|
||||
<xs:element name="mixPorts" type="mixPorts" minOccurs="0"/>
|
||||
<xs:element name="devicePorts" type="devicePorts" minOccurs="0"/>
|
||||
<xs:element name="routes" type="routes" minOccurs="0"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="name" type="halName" use="required"/>
|
||||
<xs:attribute name="halVersion" type="halVersion" use="required"/>
|
||||
</xs:complexType>
|
||||
<xs:unique name="mixPortNameUniqueness">
|
||||
<xs:selector xpath="mixPorts/mixPort"/>
|
||||
<xs:field xpath="@name"/>
|
||||
</xs:unique>
|
||||
<!-- Although this key constraint is redundant with devicePortNameGlobalKey,
|
||||
the set is used to constraint defaultOutputDevice and attachedDevice
|
||||
to reference a devicePort of the same module. -->
|
||||
<xs:key name="devicePortNameKey">
|
||||
<xs:selector xpath="devicePorts/devicePort"/>
|
||||
<xs:field xpath="@tagName"/>
|
||||
</xs:key>
|
||||
<xs:keyref name="defaultOutputDeviceRef" refer="devicePortNameKey">
|
||||
<xs:selector xpath="defaultOutputDevice"/>
|
||||
<xs:field xpath="."/>
|
||||
</xs:keyref>
|
||||
<xs:keyref name="attachedDeviceRef" refer="devicePortNameKey">
|
||||
<xs:selector xpath="attachedDevices/item"/>
|
||||
<xs:field xpath="."/>
|
||||
</xs:keyref>
|
||||
<!-- The following 3 constraints try to make sure each sink port
|
||||
is reference in one an only one route. -->
|
||||
<xs:key name="routeSinkKey">
|
||||
<!-- predicate [@type='sink'] does not work in xsd 1.0 -->
|
||||
<xs:selector xpath="devicePorts/devicePort|mixPorts/mixPort"/>
|
||||
<xs:field xpath="@tagName|@name"/>
|
||||
</xs:key>
|
||||
<xs:keyref name="routeSinkRef" refer="routeSinkKey">
|
||||
<xs:selector xpath="routes/route"/>
|
||||
<xs:field xpath="@sink"/>
|
||||
</xs:keyref>
|
||||
<xs:unique name="routeUniqueness">
|
||||
<xs:selector xpath="routes/route"/>
|
||||
<xs:field xpath="@sink"/>
|
||||
</xs:unique>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="attachedDevices">
|
||||
<xs:sequence>
|
||||
<xs:element name="item" type="xs:token" minOccurs="0" maxOccurs="unbounded"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<!-- TODO: separate values by space for better xsd validations. -->
|
||||
<xs:simpleType name="audioInOutFlags">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
"|" separated list of audio_output_flags_t or audio_input_flags_t.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:pattern value="|[_A-Z]+(\|[_A-Z]+)*"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:simpleType name="role">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="sink"/>
|
||||
<xs:enumeration value="source"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="mixPorts">
|
||||
<xs:sequence>
|
||||
<xs:element name="mixPort" minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
|
||||
<xs:element name="gains" type="gains" minOccurs="0"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||
<xs:attribute name="role" type="role" use="required"/>
|
||||
<xs:attribute name="flags" type="audioInOutFlags"/>
|
||||
</xs:complexType>
|
||||
<xs:unique name="mixPortProfileUniqueness">
|
||||
<xs:selector xpath="profile"/>
|
||||
<xs:field xpath="format"/>
|
||||
<xs:field xpath="samplingRate"/>
|
||||
<xs:field xpath="channelMasks"/>
|
||||
</xs:unique>
|
||||
<xs:unique name="mixPortGainUniqueness">
|
||||
<xs:selector xpath="gains/gain"/>
|
||||
<xs:field xpath="@name"/>
|
||||
</xs:unique>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<!-- Enum values of audio_device_t in audio.h
|
||||
TODO: generate from hidl to avoid manual sync.
|
||||
TODO: separate source and sink in the xml for better xsd validations. -->
|
||||
<xs:simpleType name="audioDevice">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="AUDIO_DEVICE_NONE"/>
|
||||
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_EARPIECE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER_SAFE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADPHONE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_ALL_SCO"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_ALL_A2DP"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_AUX_DIGITAL"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_HDMI"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_USB_ACCESSORY"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_USB_DEVICE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_ALL_USB"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_TELEPHONY_TX"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_LINE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_HDMI_ARC"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_SPDIF"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_FM"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_AUX_LINE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_IP"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_BUS"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_PROXY"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_USB_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_DEFAULT"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_OUT_STUB"/>
|
||||
|
||||
<!-- Due to the xml format, IN types can not be a separated from OUT types -->
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_COMMUNICATION"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_AMBIENT"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_BUILTIN_MIC"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_ALL_SCO"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_WIRED_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_AUX_DIGITAL"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_HDMI"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_TELEPHONY_RX"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_VOICE_CALL"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_BACK_MIC"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_REMOTE_SUBMIX"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_USB_ACCESSORY"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_USB_DEVICE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_ALL_USB"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_FM_TUNER"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_TV_TUNER"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_LINE"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_SPDIF"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_A2DP"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_LOOPBACK"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_IP"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_BUS"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_PROXY"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_USB_HEADSET"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_DEFAULT"/>
|
||||
<xs:enumeration value="AUDIO_DEVICE_IN_STUB"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<!-- Enum values of audio_format_t in audio.h
|
||||
TODO: generate from hidl to avoid manual sync. -->
|
||||
<xs:simpleType name="audioFormat">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="AUDIO_FORMAT_PCM_16_BIT" />
|
||||
<xs:enumeration value="AUDIO_FORMAT_PCM_8_BIT"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_PCM_32_BIT"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_PCM_8_24_BIT"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_PCM_FLOAT"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_PCM_24_BIT_PACKED"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_MP3"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AMR_NB"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AMR_WB"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_MAIN"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_LC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_SSR"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_LTP"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_HE_V1"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_SCALABLE"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ERLC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_LD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_HE_V2"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ELD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_MAIN"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_SSR"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LTP"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_HE_V1"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_SCALABLE"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_ERLC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_HE_V2"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_ELD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_VORBIS"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_HE_AAC_V1"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_HE_AAC_V2"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_OPUS"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AC3"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_E_AC3"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_DTS"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_DTS_HD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_IEC61937"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_DOLBY_TRUEHD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_EVRC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_EVRCB"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_EVRCWB"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_EVRCNW"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADIF"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_WMA"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_WMA_PRO"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AMR_WB_PLUS"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_MP2"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_QCELP"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_DSD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_FLAC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_ALAC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_APE"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_SBC"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_APTX"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_APTX_HD"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_AC4"/>
|
||||
<xs:enumeration value="AUDIO_FORMAT_LDAC"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<!-- TODO: Change to a space separated list to xsd enforce correctness. -->
|
||||
<xs:simpleType name="samplingRates">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:pattern value="[0-9]+(,[0-9]+)*"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<!-- TODO: Change to a space separated list to xsd enforce correctness. -->
|
||||
<xs:simpleType name="channelMask">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
Comma (",") separated list of channel flags
|
||||
from audio_channel_mask_t.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:pattern value="[_A-Z][_A-Z0-9]*(,[_A-Z][_A-Z0-9]*)*"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="profile">
|
||||
<xs:attribute name="name" type="xs:token" use="optional"/>
|
||||
<xs:attribute name="format" type="audioFormat" use="optional"/>
|
||||
<xs:attribute name="samplingRates" type="samplingRates" use="optional"/>
|
||||
<xs:attribute name="channelMasks" type="channelMask" use="optional"/>
|
||||
</xs:complexType>
|
||||
<xs:simpleType name="gainMode">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="AUDIO_GAIN_MODE_JOINT"/>
|
||||
<xs:enumeration value="AUDIO_GAIN_MODE_CHANNELS"/>
|
||||
<xs:enumeration value="AUDIO_GAIN_MODE_RAMP"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="gains">
|
||||
<xs:sequence>
|
||||
<xs:element name="gain" minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:complexType>
|
||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||
<xs:attribute name="mode" type="gainMode" use="required"/>
|
||||
<xs:attribute name="channel_mask" type="channelMask" use="optional"/>
|
||||
<xs:attribute name="minValueMB" type="xs:int" use="optional"/>
|
||||
<xs:attribute name="maxValueMB" type="xs:int" use="optional"/>
|
||||
<xs:attribute name="defaultValueMB" type="xs:int" use="optional"/>
|
||||
<xs:attribute name="stepValueMB" type="xs:int" use="optional"/>
|
||||
<xs:attribute name="minRampMs" type="xs:int" use="optional"/>
|
||||
<xs:attribute name="maxRampMs" type="xs:int" use="optional"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="devicePorts">
|
||||
<xs:sequence>
|
||||
<xs:element name="devicePort" minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
|
||||
<xs:element name="gains" type="gains" minOccurs="0"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="tagName" type="xs:token" use="required"/>
|
||||
<xs:attribute name="type" type="audioDevice" use="required"/>
|
||||
<xs:attribute name="role" type="role" use="required"/>
|
||||
<xs:attribute name="address" type="xs:string" use="optional"/>
|
||||
</xs:complexType>
|
||||
<xs:unique name="devicePortProfileUniqueness">
|
||||
<xs:selector xpath="profile"/>
|
||||
<xs:field xpath="format"/>
|
||||
<xs:field xpath="samplingRate"/>
|
||||
<xs:field xpath="channelMasks"/>
|
||||
</xs:unique>
|
||||
<xs:unique name="devicePortGainUniqueness">
|
||||
<xs:selector xpath="gains/gain"/>
|
||||
<xs:field xpath="@name"/>
|
||||
</xs:unique>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:simpleType name="mixType">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="mix"/>
|
||||
<xs:enumeration value="mux"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="routes">
|
||||
<xs:sequence>
|
||||
<xs:element name="route" minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
List all available sources for a given sink.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:complexType>
|
||||
<xs:attribute name="type" type="mixType" use="required"/>
|
||||
<xs:attribute name="sink" type="xs:string" use="required"/>
|
||||
<xs:attribute name="sources" type="xs:string" use="required"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="volumes">
|
||||
<xs:sequence>
|
||||
<xs:element name="volume" type="volume" minOccurs="0" maxOccurs="unbounded"/>
|
||||
<xs:element name="reference" type="reference" minOccurs="0" maxOccurs="unbounded">
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<!-- TODO: Always require a ref for better xsd validations.
|
||||
Currently a volume could have no points nor ref
|
||||
as it can not be forbidden by xsd 1.0.-->
|
||||
<xs:simpleType name="volumePoint">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
Comma separated pair of number.
|
||||
The fist one is the framework level (between 0 and 100).
|
||||
The second one is the volume to send to the HAL.
|
||||
The framework will interpolate volumes not specified.
|
||||
Their MUST be at least 2 points specified.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:pattern value="([0-9]{1,2}|100),-?[0-9]+"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<!-- Enum values of audio_stream_type_t in audio-base.h
|
||||
TODO: generate from hidl to avoid manual sync. -->
|
||||
<xs:simpleType name="stream">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="AUDIO_STREAM_VOICE_CALL"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_SYSTEM"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_RING"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_MUSIC"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_ALARM"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_NOTIFICATION"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_BLUETOOTH_SCO"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_ENFORCED_AUDIBLE"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_DTMF"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_TTS"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_ACCESSIBILITY"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_REROUTING"/>
|
||||
<xs:enumeration value="AUDIO_STREAM_PATCH"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<!-- Enum values of device_category from Volume.h.
|
||||
TODO: generate from hidl to avoid manual sync. -->
|
||||
<xs:simpleType name="deviceCategory">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="DEVICE_CATEGORY_HEADSET"/>
|
||||
<xs:enumeration value="DEVICE_CATEGORY_SPEAKER"/>
|
||||
<xs:enumeration value="DEVICE_CATEGORY_EARPIECE"/>
|
||||
<xs:enumeration value="DEVICE_CATEGORY_EXT_MEDIA"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="volume">
|
||||
<xs:annotation>
|
||||
<xs:documentation xml:lang="en">
|
||||
Volume section defines a volume curve for a given use case and device category.
|
||||
It contains a list of points of this curve expressing the attenuation in Millibels
|
||||
for a given volume index from 0 to 100.
|
||||
<volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_SPEAKER">
|
||||
<point>0,-9600</point>
|
||||
<point>100,0</point>
|
||||
</volume>
|
||||
|
||||
It may also reference a reference/@name to avoid duplicating curves.
|
||||
<volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_SPEAKER"
|
||||
ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
|
||||
<reference name="DEFAULT_MEDIA_VOLUME_CURVE">
|
||||
<point>0,-9600</point>
|
||||
<point>100,0</point>
|
||||
</reference>
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:sequence>
|
||||
<xs:element name="point" type="volumePoint" minOccurs="0" maxOccurs="unbounded"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="stream" type="stream"/>
|
||||
<xs:attribute name="deviceCategory" type="deviceCategory"/>
|
||||
<xs:attribute name="ref" type="xs:token" use="optional"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="reference">
|
||||
<xs:sequence>
|
||||
<xs:element name="point" type="volumePoint" minOccurs="2" maxOccurs="unbounded"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||
</xs:complexType>
|
||||
</xs:schema>
|
87
android/hardware/interfaces/audio/2.0/default/Android.mk
Normal file
87
android/hardware/interfaces/audio/2.0/default/Android.mk
Normal file
|
@ -0,0 +1,87 @@
|
|||
#
|
||||
# Copyright (C) 2016 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
LOCAL_MODULE := android.hardware.audio@2.0-impl
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_PROPRIETARY_MODULE := true
|
||||
LOCAL_SRC_FILES := \
|
||||
Conversions.cpp \
|
||||
Device.cpp \
|
||||
DevicesFactory.cpp \
|
||||
ParametersUtil.cpp \
|
||||
PrimaryDevice.cpp \
|
||||
Stream.cpp \
|
||||
StreamIn.cpp \
|
||||
StreamOut.cpp \
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := \
|
||||
libbase \
|
||||
libcutils \
|
||||
libfmq \
|
||||
libhardware \
|
||||
libhidlbase \
|
||||
libhidltransport \
|
||||
liblog \
|
||||
libutils \
|
||||
android.hardware.audio@2.0 \
|
||||
android.hardware.audio.common@2.0 \
|
||||
android.hardware.audio.common@2.0-util \
|
||||
|
||||
LOCAL_HEADER_LIBRARIES := \
|
||||
libaudioclient_headers \
|
||||
libaudio_system_headers \
|
||||
libhardware_headers \
|
||||
libmedia_headers \
|
||||
|
||||
LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
#
|
||||
# Service
|
||||
#
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
LOCAL_MODULE := android.hardware.audio@2.0-service
|
||||
LOCAL_INIT_RC := android.hardware.audio@2.0-service.rc
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_PROPRIETARY_MODULE := true
|
||||
LOCAL_SRC_FILES := \
|
||||
service.cpp
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := \
|
||||
libhidlbase \
|
||||
libhidltransport \
|
||||
liblog \
|
||||
libutils \
|
||||
libhardware \
|
||||
android.hardware.audio@2.0 \
|
||||
android.hardware.audio.common@2.0 \
|
||||
android.hardware.audio.effect@2.0 \
|
||||
android.hardware.soundtrigger@2.0 \
|
||||
android.hardware.broadcastradio@1.0 \
|
||||
android.hardware.broadcastradio@1.1
|
||||
|
||||
ifeq ($(strip $(AUDIOSERVER_MULTILIB)),)
|
||||
LOCAL_MULTILIB := 32
|
||||
else
|
||||
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
|
||||
endif
|
||||
|
||||
include $(BUILD_EXECUTABLE)
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include "Conversions.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
std::string deviceAddressToHal(const DeviceAddress& address) {
|
||||
// HAL assumes that the address is NUL-terminated.
|
||||
char halAddress[AUDIO_DEVICE_MAX_ADDRESS_LEN];
|
||||
memset(halAddress, 0, sizeof(halAddress));
|
||||
uint32_t halDevice = static_cast<uint32_t>(address.device);
|
||||
const bool isInput = (halDevice & AUDIO_DEVICE_BIT_IN) != 0;
|
||||
if (isInput) halDevice &= ~AUDIO_DEVICE_BIT_IN;
|
||||
if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_A2DP) != 0)
|
||||
|| (isInput && (halDevice & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) != 0)) {
|
||||
snprintf(halAddress, sizeof(halAddress),
|
||||
"%02X:%02X:%02X:%02X:%02X:%02X",
|
||||
address.address.mac[0], address.address.mac[1], address.address.mac[2],
|
||||
address.address.mac[3], address.address.mac[4], address.address.mac[5]);
|
||||
} else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_IP) != 0)
|
||||
|| (isInput && (halDevice & AUDIO_DEVICE_IN_IP) != 0)) {
|
||||
snprintf(halAddress, sizeof(halAddress),
|
||||
"%d.%d.%d.%d",
|
||||
address.address.ipv4[0], address.address.ipv4[1],
|
||||
address.address.ipv4[2], address.address.ipv4[3]);
|
||||
} else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_USB) != 0)
|
||||
|| (isInput && (halDevice & AUDIO_DEVICE_IN_ALL_USB) != 0)) {
|
||||
snprintf(halAddress, sizeof(halAddress),
|
||||
"card=%d;device=%d",
|
||||
address.address.alsa.card, address.address.alsa.device);
|
||||
} else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_BUS) != 0)
|
||||
|| (isInput && (halDevice & AUDIO_DEVICE_IN_BUS) != 0)) {
|
||||
snprintf(halAddress, sizeof(halAddress),
|
||||
"%s", address.busAddress.c_str());
|
||||
} else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) != 0
|
||||
|| (isInput && (halDevice & AUDIO_DEVICE_IN_REMOTE_SUBMIX) != 0)) {
|
||||
snprintf(halAddress, sizeof(halAddress),
|
||||
"%s", address.rSubmixAddress.c_str());
|
||||
}
|
||||
return halAddress;
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
41
android/hardware/interfaces/audio/2.0/default/Conversions.h
Normal file
41
android/hardware/interfaces/audio/2.0/default/Conversions.h
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef android_hardware_audio_V2_0_Conversions_H_
|
||||
#define android_hardware_audio_V2_0_Conversions_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <android/hardware/audio/2.0/types.h>
|
||||
#include <system/audio.h>
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::V2_0::DeviceAddress;
|
||||
|
||||
std::string deviceAddressToHal(const DeviceAddress& address);
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // android_hardware_audio_V2_0_Conversions_H_
|
319
android/hardware/interfaces/audio/2.0/default/Device.cpp
Normal file
319
android/hardware/interfaces/audio/2.0/default/Device.cpp
Normal file
|
@ -0,0 +1,319 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "DeviceHAL"
|
||||
//#define LOG_NDEBUG 0
|
||||
|
||||
#include <algorithm>
|
||||
#include <memory.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <android/log.h>
|
||||
|
||||
#include "Conversions.h"
|
||||
#include "Device.h"
|
||||
#include "HidlUtils.h"
|
||||
#include "StreamIn.h"
|
||||
#include "StreamOut.h"
|
||||
#include "Util.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
Device::Device(audio_hw_device_t* device)
|
||||
: mDevice(device) {
|
||||
}
|
||||
|
||||
Device::~Device() {
|
||||
int status = audio_hw_device_close(mDevice);
|
||||
ALOGW_IF(status, "Error closing audio hw device %p: %s", mDevice,
|
||||
strerror(-status));
|
||||
mDevice = nullptr;
|
||||
}
|
||||
|
||||
Result Device::analyzeStatus(const char* funcName, int status) {
|
||||
if (status != 0) {
|
||||
ALOGW("Device %p %s: %s", mDevice, funcName, strerror(-status));
|
||||
}
|
||||
switch (status) {
|
||||
case 0:
|
||||
return Result::OK;
|
||||
case -EINVAL:
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
case -ENODATA:
|
||||
return Result::INVALID_STATE;
|
||||
case -ENODEV:
|
||||
return Result::NOT_INITIALIZED;
|
||||
case -ENOSYS:
|
||||
return Result::NOT_SUPPORTED;
|
||||
default:
|
||||
return Result::INVALID_STATE;
|
||||
}
|
||||
}
|
||||
|
||||
void Device::closeInputStream(audio_stream_in_t* stream) {
|
||||
mDevice->close_input_stream(mDevice, stream);
|
||||
}
|
||||
|
||||
void Device::closeOutputStream(audio_stream_out_t* stream) {
|
||||
mDevice->close_output_stream(mDevice, stream);
|
||||
}
|
||||
|
||||
char* Device::halGetParameters(const char* keys) {
|
||||
return mDevice->get_parameters(mDevice, keys);
|
||||
}
|
||||
|
||||
int Device::halSetParameters(const char* keysAndValues) {
|
||||
return mDevice->set_parameters(mDevice, keysAndValues);
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IDevice follow.
|
||||
Return<Result> Device::initCheck() {
|
||||
return analyzeStatus("init_check", mDevice->init_check(mDevice));
|
||||
}
|
||||
|
||||
Return<Result> Device::setMasterVolume(float volume) {
|
||||
if (mDevice->set_master_volume == NULL) {
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
if (!isGainNormalized(volume)) {
|
||||
ALOGW("Can not set a master volume (%f) outside [0,1]", volume);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
return analyzeStatus("set_master_volume",
|
||||
mDevice->set_master_volume(mDevice, volume));
|
||||
}
|
||||
|
||||
Return<void> Device::getMasterVolume(getMasterVolume_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
float volume = 0;
|
||||
if (mDevice->get_master_volume != NULL) {
|
||||
retval = analyzeStatus("get_master_volume",
|
||||
mDevice->get_master_volume(mDevice, &volume));
|
||||
}
|
||||
_hidl_cb(retval, volume);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Device::setMicMute(bool mute) {
|
||||
return analyzeStatus("set_mic_mute", mDevice->set_mic_mute(mDevice, mute));
|
||||
}
|
||||
|
||||
Return<void> Device::getMicMute(getMicMute_cb _hidl_cb) {
|
||||
bool mute = false;
|
||||
Result retval =
|
||||
analyzeStatus("get_mic_mute", mDevice->get_mic_mute(mDevice, &mute));
|
||||
_hidl_cb(retval, mute);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Device::setMasterMute(bool mute) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
if (mDevice->set_master_mute != NULL) {
|
||||
retval = analyzeStatus("set_master_mute",
|
||||
mDevice->set_master_mute(mDevice, mute));
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
Return<void> Device::getMasterMute(getMasterMute_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
bool mute = false;
|
||||
if (mDevice->get_master_mute != NULL) {
|
||||
retval = analyzeStatus("get_master_mute",
|
||||
mDevice->get_master_mute(mDevice, &mute));
|
||||
}
|
||||
_hidl_cb(retval, mute);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<void> Device::getInputBufferSize(const AudioConfig& config,
|
||||
getInputBufferSize_cb _hidl_cb) {
|
||||
audio_config_t halConfig;
|
||||
HidlUtils::audioConfigToHal(config, &halConfig);
|
||||
size_t halBufferSize = mDevice->get_input_buffer_size(mDevice, &halConfig);
|
||||
Result retval(Result::INVALID_ARGUMENTS);
|
||||
uint64_t bufferSize = 0;
|
||||
if (halBufferSize != 0) {
|
||||
retval = Result::OK;
|
||||
bufferSize = halBufferSize;
|
||||
}
|
||||
_hidl_cb(retval, bufferSize);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<void> Device::openOutputStream(int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioOutputFlag flags,
|
||||
openOutputStream_cb _hidl_cb) {
|
||||
audio_config_t halConfig;
|
||||
HidlUtils::audioConfigToHal(config, &halConfig);
|
||||
audio_stream_out_t* halStream;
|
||||
ALOGV(
|
||||
"open_output_stream handle: %d devices: %x flags: %#x "
|
||||
"srate: %d format %#x channels %x address %s",
|
||||
ioHandle, static_cast<audio_devices_t>(device.device),
|
||||
static_cast<audio_output_flags_t>(flags), halConfig.sample_rate,
|
||||
halConfig.format, halConfig.channel_mask,
|
||||
deviceAddressToHal(device).c_str());
|
||||
int status = mDevice->open_output_stream(
|
||||
mDevice, ioHandle, static_cast<audio_devices_t>(device.device),
|
||||
static_cast<audio_output_flags_t>(flags), &halConfig, &halStream,
|
||||
deviceAddressToHal(device).c_str());
|
||||
ALOGV("open_output_stream status %d stream %p", status, halStream);
|
||||
sp<IStreamOut> streamOut;
|
||||
if (status == OK) {
|
||||
streamOut = new StreamOut(this, halStream);
|
||||
}
|
||||
AudioConfig suggestedConfig;
|
||||
HidlUtils::audioConfigFromHal(halConfig, &suggestedConfig);
|
||||
_hidl_cb(analyzeStatus("open_output_stream", status), streamOut,
|
||||
suggestedConfig);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<void> Device::openInputStream(int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioInputFlag flags, AudioSource source,
|
||||
openInputStream_cb _hidl_cb) {
|
||||
audio_config_t halConfig;
|
||||
HidlUtils::audioConfigToHal(config, &halConfig);
|
||||
audio_stream_in_t* halStream;
|
||||
ALOGV(
|
||||
"open_input_stream handle: %d devices: %x flags: %#x "
|
||||
"srate: %d format %#x channels %x address %s source %d",
|
||||
ioHandle, static_cast<audio_devices_t>(device.device),
|
||||
static_cast<audio_input_flags_t>(flags), halConfig.sample_rate,
|
||||
halConfig.format, halConfig.channel_mask,
|
||||
deviceAddressToHal(device).c_str(),
|
||||
static_cast<audio_source_t>(source));
|
||||
int status = mDevice->open_input_stream(
|
||||
mDevice, ioHandle, static_cast<audio_devices_t>(device.device),
|
||||
&halConfig, &halStream, static_cast<audio_input_flags_t>(flags),
|
||||
deviceAddressToHal(device).c_str(),
|
||||
static_cast<audio_source_t>(source));
|
||||
ALOGV("open_input_stream status %d stream %p", status, halStream);
|
||||
sp<IStreamIn> streamIn;
|
||||
if (status == OK) {
|
||||
streamIn = new StreamIn(this, halStream);
|
||||
}
|
||||
AudioConfig suggestedConfig;
|
||||
HidlUtils::audioConfigFromHal(halConfig, &suggestedConfig);
|
||||
_hidl_cb(analyzeStatus("open_input_stream", status), streamIn,
|
||||
suggestedConfig);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<bool> Device::supportsAudioPatches() {
|
||||
return version() >= AUDIO_DEVICE_API_VERSION_3_0;
|
||||
}
|
||||
|
||||
Return<void> Device::createAudioPatch(const hidl_vec<AudioPortConfig>& sources,
|
||||
const hidl_vec<AudioPortConfig>& sinks,
|
||||
createAudioPatch_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
AudioPatchHandle patch = 0;
|
||||
if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
|
||||
std::unique_ptr<audio_port_config[]> halSources(
|
||||
HidlUtils::audioPortConfigsToHal(sources));
|
||||
std::unique_ptr<audio_port_config[]> halSinks(
|
||||
HidlUtils::audioPortConfigsToHal(sinks));
|
||||
audio_patch_handle_t halPatch = AUDIO_PATCH_HANDLE_NONE;
|
||||
retval = analyzeStatus(
|
||||
"create_audio_patch",
|
||||
mDevice->create_audio_patch(mDevice, sources.size(), &halSources[0],
|
||||
sinks.size(), &halSinks[0], &halPatch));
|
||||
if (retval == Result::OK) {
|
||||
patch = static_cast<AudioPatchHandle>(halPatch);
|
||||
}
|
||||
}
|
||||
_hidl_cb(retval, patch);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Device::releaseAudioPatch(int32_t patch) {
|
||||
if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
|
||||
return analyzeStatus(
|
||||
"release_audio_patch",
|
||||
mDevice->release_audio_patch(
|
||||
mDevice, static_cast<audio_patch_handle_t>(patch)));
|
||||
}
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<void> Device::getAudioPort(const AudioPort& port,
|
||||
getAudioPort_cb _hidl_cb) {
|
||||
audio_port halPort;
|
||||
HidlUtils::audioPortToHal(port, &halPort);
|
||||
Result retval = analyzeStatus("get_audio_port",
|
||||
mDevice->get_audio_port(mDevice, &halPort));
|
||||
AudioPort resultPort = port;
|
||||
if (retval == Result::OK) {
|
||||
HidlUtils::audioPortFromHal(halPort, &resultPort);
|
||||
}
|
||||
_hidl_cb(retval, resultPort);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Device::setAudioPortConfig(const AudioPortConfig& config) {
|
||||
if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
|
||||
struct audio_port_config halPortConfig;
|
||||
HidlUtils::audioPortConfigToHal(config, &halPortConfig);
|
||||
return analyzeStatus(
|
||||
"set_audio_port_config",
|
||||
mDevice->set_audio_port_config(mDevice, &halPortConfig));
|
||||
}
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<AudioHwSync> Device::getHwAvSync() {
|
||||
int halHwAvSync;
|
||||
Result retval = getParam(AudioParameter::keyHwAvSync, &halHwAvSync);
|
||||
return retval == Result::OK ? halHwAvSync : AUDIO_HW_SYNC_INVALID;
|
||||
}
|
||||
|
||||
Return<Result> Device::setScreenState(bool turnedOn) {
|
||||
return setParam(AudioParameter::keyScreenState, turnedOn);
|
||||
}
|
||||
|
||||
Return<void> Device::getParameters(const hidl_vec<hidl_string>& keys,
|
||||
getParameters_cb _hidl_cb) {
|
||||
getParametersImpl(keys, _hidl_cb);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Device::setParameters(
|
||||
const hidl_vec<ParameterValue>& parameters) {
|
||||
return setParametersImpl(parameters);
|
||||
}
|
||||
|
||||
Return<void> Device::debugDump(const hidl_handle& fd) {
|
||||
if (fd.getNativeHandle() != nullptr && fd->numFds == 1) {
|
||||
analyzeStatus("dump", mDevice->dump(mDevice, fd->data[0]));
|
||||
}
|
||||
return Void();
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
123
android/hardware/interfaces/audio/2.0/default/Device.h
Normal file
123
android/hardware/interfaces/audio/2.0/default/Device.h
Normal file
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <media/AudioParameter.h>
|
||||
#include <hardware/audio.h>
|
||||
|
||||
#include <android/hardware/audio/2.0/IDevice.h>
|
||||
#include <hidl/Status.h>
|
||||
|
||||
#include <hidl/MQDescriptor.h>
|
||||
|
||||
#include "ParametersUtil.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::AudioConfig;
|
||||
using ::android::hardware::audio::common::V2_0::AudioHwSync;
|
||||
using ::android::hardware::audio::common::V2_0::AudioInputFlag;
|
||||
using ::android::hardware::audio::common::V2_0::AudioOutputFlag;
|
||||
using ::android::hardware::audio::common::V2_0::AudioPatchHandle;
|
||||
using ::android::hardware::audio::common::V2_0::AudioPort;
|
||||
using ::android::hardware::audio::common::V2_0::AudioPortConfig;
|
||||
using ::android::hardware::audio::common::V2_0::AudioSource;
|
||||
using ::android::hardware::audio::V2_0::DeviceAddress;
|
||||
using ::android::hardware::audio::V2_0::IDevice;
|
||||
using ::android::hardware::audio::V2_0::IStreamIn;
|
||||
using ::android::hardware::audio::V2_0::IStreamOut;
|
||||
using ::android::hardware::audio::V2_0::ParameterValue;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::Return;
|
||||
using ::android::hardware::Void;
|
||||
using ::android::hardware::hidl_vec;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::sp;
|
||||
|
||||
struct Device : public IDevice, public ParametersUtil {
|
||||
explicit Device(audio_hw_device_t* device);
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IDevice follow.
|
||||
Return<Result> initCheck() override;
|
||||
Return<Result> setMasterVolume(float volume) override;
|
||||
Return<void> getMasterVolume(getMasterVolume_cb _hidl_cb) override;
|
||||
Return<Result> setMicMute(bool mute) override;
|
||||
Return<void> getMicMute(getMicMute_cb _hidl_cb) override;
|
||||
Return<Result> setMasterMute(bool mute) override;
|
||||
Return<void> getMasterMute(getMasterMute_cb _hidl_cb) override;
|
||||
Return<void> getInputBufferSize(
|
||||
const AudioConfig& config, getInputBufferSize_cb _hidl_cb) override;
|
||||
Return<void> openOutputStream(
|
||||
int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioOutputFlag flags,
|
||||
openOutputStream_cb _hidl_cb) override;
|
||||
Return<void> openInputStream(
|
||||
int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioInputFlag flags,
|
||||
AudioSource source,
|
||||
openInputStream_cb _hidl_cb) override;
|
||||
Return<bool> supportsAudioPatches() override;
|
||||
Return<void> createAudioPatch(
|
||||
const hidl_vec<AudioPortConfig>& sources,
|
||||
const hidl_vec<AudioPortConfig>& sinks,
|
||||
createAudioPatch_cb _hidl_cb) override;
|
||||
Return<Result> releaseAudioPatch(int32_t patch) override;
|
||||
Return<void> getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) override;
|
||||
Return<Result> setAudioPortConfig(const AudioPortConfig& config) override;
|
||||
Return<AudioHwSync> getHwAvSync() override;
|
||||
Return<Result> setScreenState(bool turnedOn) override;
|
||||
Return<void> getParameters(
|
||||
const hidl_vec<hidl_string>& keys, getParameters_cb _hidl_cb) override;
|
||||
Return<Result> setParameters(const hidl_vec<ParameterValue>& parameters) override;
|
||||
Return<void> debugDump(const hidl_handle& fd) override;
|
||||
|
||||
// Utility methods for extending interfaces.
|
||||
Result analyzeStatus(const char* funcName, int status);
|
||||
void closeInputStream(audio_stream_in_t* stream);
|
||||
void closeOutputStream(audio_stream_out_t* stream);
|
||||
audio_hw_device_t* device() const { return mDevice; }
|
||||
|
||||
private:
|
||||
audio_hw_device_t *mDevice;
|
||||
|
||||
virtual ~Device();
|
||||
|
||||
// Methods from ParametersUtil.
|
||||
char* halGetParameters(const char* keys) override;
|
||||
int halSetParameters(const char* keysAndValues) override;
|
||||
|
||||
uint32_t version() const { return mDevice->common.version; }
|
||||
};
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H
|
108
android/hardware/interfaces/audio/2.0/default/DevicesFactory.cpp
Normal file
108
android/hardware/interfaces/audio/2.0/default/DevicesFactory.cpp
Normal file
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "DevicesFactoryHAL"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <android/log.h>
|
||||
|
||||
#include "Device.h"
|
||||
#include "DevicesFactory.h"
|
||||
#include "PrimaryDevice.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
// static
|
||||
const char* DevicesFactory::deviceToString(IDevicesFactory::Device device) {
|
||||
switch (device) {
|
||||
case IDevicesFactory::Device::PRIMARY: return AUDIO_HARDWARE_MODULE_ID_PRIMARY;
|
||||
case IDevicesFactory::Device::A2DP: return AUDIO_HARDWARE_MODULE_ID_A2DP;
|
||||
case IDevicesFactory::Device::USB: return AUDIO_HARDWARE_MODULE_ID_USB;
|
||||
case IDevicesFactory::Device::R_SUBMIX: return AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX;
|
||||
case IDevicesFactory::Device::STUB: return AUDIO_HARDWARE_MODULE_ID_STUB;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// static
|
||||
int DevicesFactory::loadAudioInterface(const char *if_name, audio_hw_device_t **dev)
|
||||
{
|
||||
const hw_module_t *mod;
|
||||
int rc;
|
||||
|
||||
rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
|
||||
if (rc) {
|
||||
ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__,
|
||||
AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
|
||||
goto out;
|
||||
}
|
||||
rc = audio_hw_device_open(mod, dev);
|
||||
if (rc) {
|
||||
ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__,
|
||||
AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
|
||||
goto out;
|
||||
}
|
||||
if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
|
||||
ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
|
||||
rc = -EINVAL;
|
||||
audio_hw_device_close(*dev);
|
||||
goto out;
|
||||
}
|
||||
return OK;
|
||||
|
||||
out:
|
||||
*dev = NULL;
|
||||
return rc;
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IDevicesFactory follow.
|
||||
Return<void> DevicesFactory::openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) {
|
||||
audio_hw_device_t *halDevice;
|
||||
Result retval(Result::INVALID_ARGUMENTS);
|
||||
sp<IDevice> result;
|
||||
const char* moduleName = deviceToString(device);
|
||||
if (moduleName != nullptr) {
|
||||
int halStatus = loadAudioInterface(moduleName, &halDevice);
|
||||
if (halStatus == OK) {
|
||||
if (device == IDevicesFactory::Device::PRIMARY) {
|
||||
result = new PrimaryDevice(halDevice);
|
||||
} else {
|
||||
result = new ::android::hardware::audio::V2_0::implementation::
|
||||
Device(halDevice);
|
||||
}
|
||||
retval = Result::OK;
|
||||
} else if (halStatus == -EINVAL) {
|
||||
retval = Result::NOT_INITIALIZED;
|
||||
}
|
||||
}
|
||||
_hidl_cb(retval, result);
|
||||
return Void();
|
||||
}
|
||||
|
||||
IDevicesFactory* HIDL_FETCH_IDevicesFactory(const char* /* name */) {
|
||||
return new DevicesFactory();
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H
|
||||
|
||||
#include <hardware/audio.h>
|
||||
|
||||
#include <android/hardware/audio/2.0/IDevicesFactory.h>
|
||||
#include <hidl/Status.h>
|
||||
|
||||
#include <hidl/MQDescriptor.h>
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::V2_0::IDevice;
|
||||
using ::android::hardware::audio::V2_0::IDevicesFactory;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::Return;
|
||||
using ::android::hardware::Void;
|
||||
using ::android::hardware::hidl_vec;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::sp;
|
||||
|
||||
struct DevicesFactory : public IDevicesFactory {
|
||||
// Methods from ::android::hardware::audio::V2_0::IDevicesFactory follow.
|
||||
Return<void> openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) override;
|
||||
|
||||
private:
|
||||
static const char* deviceToString(IDevicesFactory::Device device);
|
||||
static int loadAudioInterface(const char *if_name, audio_hw_device_t **dev);
|
||||
|
||||
};
|
||||
|
||||
extern "C" IDevicesFactory* HIDL_FETCH_IDevicesFactory(const char* name);
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H
|
3
android/hardware/interfaces/audio/2.0/default/OWNERS
Normal file
3
android/hardware/interfaces/audio/2.0/default/OWNERS
Normal file
|
@ -0,0 +1,3 @@
|
|||
elaurent@google.com
|
||||
krocard@google.com
|
||||
mnaganov@google.com
|
156
android/hardware/interfaces/audio/2.0/default/ParametersUtil.cpp
Normal file
156
android/hardware/interfaces/audio/2.0/default/ParametersUtil.cpp
Normal file
|
@ -0,0 +1,156 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "ParametersUtil.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
// Static method and not private method to avoid leaking status_t dependency
|
||||
static Result getHalStatusToResult(status_t status) {
|
||||
switch (status) {
|
||||
case OK:
|
||||
return Result::OK;
|
||||
case BAD_VALUE: // Nothing was returned, probably because the HAL does
|
||||
// not handle it
|
||||
return Result::NOT_SUPPORTED;
|
||||
case INVALID_OPERATION: // Conversion from string to the requested type
|
||||
// failed
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
default: // Should not happen
|
||||
ALOGW("Unexpected status returned by getParam: %u", status);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
}
|
||||
|
||||
Result ParametersUtil::getParam(const char* name, bool* value) {
|
||||
String8 halValue;
|
||||
Result retval = getParam(name, &halValue);
|
||||
*value = false;
|
||||
if (retval == Result::OK) {
|
||||
if (halValue.empty()) {
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
*value = !(halValue == AudioParameter::valueOff);
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
Result ParametersUtil::getParam(const char* name, int* value) {
|
||||
const String8 halName(name);
|
||||
AudioParameter keys;
|
||||
keys.addKey(halName);
|
||||
std::unique_ptr<AudioParameter> params = getParams(keys);
|
||||
return getHalStatusToResult(params->getInt(halName, *value));
|
||||
}
|
||||
|
||||
Result ParametersUtil::getParam(const char* name, String8* value) {
|
||||
const String8 halName(name);
|
||||
AudioParameter keys;
|
||||
keys.addKey(halName);
|
||||
std::unique_ptr<AudioParameter> params = getParams(keys);
|
||||
return getHalStatusToResult(params->get(halName, *value));
|
||||
}
|
||||
|
||||
void ParametersUtil::getParametersImpl(
|
||||
const hidl_vec<hidl_string>& keys,
|
||||
std::function<void(Result retval,
|
||||
const hidl_vec<ParameterValue>& parameters)>
|
||||
cb) {
|
||||
AudioParameter halKeys;
|
||||
for (size_t i = 0; i < keys.size(); ++i) {
|
||||
halKeys.addKey(String8(keys[i].c_str()));
|
||||
}
|
||||
std::unique_ptr<AudioParameter> halValues = getParams(halKeys);
|
||||
Result retval = (keys.size() == 0 || halValues->size() != 0)
|
||||
? Result::OK
|
||||
: Result::NOT_SUPPORTED;
|
||||
hidl_vec<ParameterValue> result;
|
||||
result.resize(halValues->size());
|
||||
String8 halKey, halValue;
|
||||
for (size_t i = 0; i < halValues->size(); ++i) {
|
||||
status_t status = halValues->getAt(i, halKey, halValue);
|
||||
if (status != OK) {
|
||||
result.resize(0);
|
||||
retval = getHalStatusToResult(status);
|
||||
break;
|
||||
}
|
||||
result[i].key = halKey.string();
|
||||
result[i].value = halValue.string();
|
||||
}
|
||||
cb(retval, result);
|
||||
}
|
||||
|
||||
std::unique_ptr<AudioParameter> ParametersUtil::getParams(
|
||||
const AudioParameter& keys) {
|
||||
String8 paramsAndValues;
|
||||
char* halValues = halGetParameters(keys.keysToString().string());
|
||||
if (halValues != NULL) {
|
||||
paramsAndValues.setTo(halValues);
|
||||
free(halValues);
|
||||
} else {
|
||||
paramsAndValues.clear();
|
||||
}
|
||||
return std::unique_ptr<AudioParameter>(new AudioParameter(paramsAndValues));
|
||||
}
|
||||
|
||||
Result ParametersUtil::setParam(const char* name, bool value) {
|
||||
AudioParameter param;
|
||||
param.add(String8(name), String8(value ? AudioParameter::valueOn
|
||||
: AudioParameter::valueOff));
|
||||
return setParams(param);
|
||||
}
|
||||
|
||||
Result ParametersUtil::setParam(const char* name, int value) {
|
||||
AudioParameter param;
|
||||
param.addInt(String8(name), value);
|
||||
return setParams(param);
|
||||
}
|
||||
|
||||
Result ParametersUtil::setParam(const char* name, const char* value) {
|
||||
AudioParameter param;
|
||||
param.add(String8(name), String8(value));
|
||||
return setParams(param);
|
||||
}
|
||||
|
||||
Result ParametersUtil::setParametersImpl(
|
||||
const hidl_vec<ParameterValue>& parameters) {
|
||||
AudioParameter params;
|
||||
for (size_t i = 0; i < parameters.size(); ++i) {
|
||||
params.add(String8(parameters[i].key.c_str()),
|
||||
String8(parameters[i].value.c_str()));
|
||||
}
|
||||
return setParams(params);
|
||||
}
|
||||
|
||||
Result ParametersUtil::setParams(const AudioParameter& param) {
|
||||
int halStatus = halSetParameters(param.toString().string());
|
||||
if (halStatus == OK)
|
||||
return Result::OK;
|
||||
else if (halStatus == -ENOSYS)
|
||||
return Result::INVALID_STATE;
|
||||
else
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef android_hardware_audio_V2_0_ParametersUtil_H_
|
||||
#define android_hardware_audio_V2_0_ParametersUtil_H_
|
||||
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
|
||||
#include <android/hardware/audio/2.0/types.h>
|
||||
#include <hidl/HidlSupport.h>
|
||||
#include <media/AudioParameter.h>
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::V2_0::ParameterValue;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::hardware::hidl_vec;
|
||||
|
||||
class ParametersUtil {
|
||||
public:
|
||||
Result getParam(const char* name, bool* value);
|
||||
Result getParam(const char* name, int* value);
|
||||
Result getParam(const char* name, String8* value);
|
||||
void getParametersImpl(
|
||||
const hidl_vec<hidl_string>& keys,
|
||||
std::function<void(Result retval, const hidl_vec<ParameterValue>& parameters)> cb);
|
||||
std::unique_ptr<AudioParameter> getParams(const AudioParameter& keys);
|
||||
Result setParam(const char* name, bool value);
|
||||
Result setParam(const char* name, int value);
|
||||
Result setParam(const char* name, const char* value);
|
||||
Result setParametersImpl(const hidl_vec<ParameterValue>& parameters);
|
||||
Result setParams(const AudioParameter& param);
|
||||
|
||||
protected:
|
||||
virtual ~ParametersUtil() {}
|
||||
|
||||
virtual char* halGetParameters(const char* keys) = 0;
|
||||
virtual int halSetParameters(const char* keysAndValues) = 0;
|
||||
};
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // android_hardware_audio_V2_0_ParametersUtil_H_
|
210
android/hardware/interfaces/audio/2.0/default/PrimaryDevice.cpp
Normal file
210
android/hardware/interfaces/audio/2.0/default/PrimaryDevice.cpp
Normal file
|
@ -0,0 +1,210 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "PrimaryDeviceHAL"
|
||||
|
||||
#include "PrimaryDevice.h"
|
||||
#include "Util.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
PrimaryDevice::PrimaryDevice(audio_hw_device_t* device)
|
||||
: mDevice(new Device(device)) {
|
||||
}
|
||||
|
||||
PrimaryDevice::~PrimaryDevice() {}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IDevice follow.
|
||||
Return<Result> PrimaryDevice::initCheck() {
|
||||
return mDevice->initCheck();
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setMasterVolume(float volume) {
|
||||
return mDevice->setMasterVolume(volume);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getMasterVolume(getMasterVolume_cb _hidl_cb) {
|
||||
return mDevice->getMasterVolume(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setMicMute(bool mute) {
|
||||
return mDevice->setMicMute(mute);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getMicMute(getMicMute_cb _hidl_cb) {
|
||||
return mDevice->getMicMute(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setMasterMute(bool mute) {
|
||||
return mDevice->setMasterMute(mute);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getMasterMute(getMasterMute_cb _hidl_cb) {
|
||||
return mDevice->getMasterMute(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getInputBufferSize(const AudioConfig& config,
|
||||
getInputBufferSize_cb _hidl_cb) {
|
||||
return mDevice->getInputBufferSize(config, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::openOutputStream(int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioOutputFlag flags,
|
||||
openOutputStream_cb _hidl_cb) {
|
||||
return mDevice->openOutputStream(ioHandle, device, config, flags, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::openInputStream(
|
||||
int32_t ioHandle, const DeviceAddress& device, const AudioConfig& config,
|
||||
AudioInputFlag flags, AudioSource source, openInputStream_cb _hidl_cb) {
|
||||
return mDevice->openInputStream(ioHandle, device, config, flags, source,
|
||||
_hidl_cb);
|
||||
}
|
||||
|
||||
Return<bool> PrimaryDevice::supportsAudioPatches() {
|
||||
return mDevice->supportsAudioPatches();
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::createAudioPatch(
|
||||
const hidl_vec<AudioPortConfig>& sources,
|
||||
const hidl_vec<AudioPortConfig>& sinks, createAudioPatch_cb _hidl_cb) {
|
||||
return mDevice->createAudioPatch(sources, sinks, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::releaseAudioPatch(int32_t patch) {
|
||||
return mDevice->releaseAudioPatch(patch);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getAudioPort(const AudioPort& port,
|
||||
getAudioPort_cb _hidl_cb) {
|
||||
return mDevice->getAudioPort(port, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setAudioPortConfig(
|
||||
const AudioPortConfig& config) {
|
||||
return mDevice->setAudioPortConfig(config);
|
||||
}
|
||||
|
||||
Return<AudioHwSync> PrimaryDevice::getHwAvSync() {
|
||||
return mDevice->getHwAvSync();
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setScreenState(bool turnedOn) {
|
||||
return mDevice->setScreenState(turnedOn);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getParameters(const hidl_vec<hidl_string>& keys,
|
||||
getParameters_cb _hidl_cb) {
|
||||
return mDevice->getParameters(keys, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setParameters(
|
||||
const hidl_vec<ParameterValue>& parameters) {
|
||||
return mDevice->setParameters(parameters);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::debugDump(const hidl_handle& fd) {
|
||||
return mDevice->debugDump(fd);
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IPrimaryDevice follow.
|
||||
Return<Result> PrimaryDevice::setVoiceVolume(float volume) {
|
||||
if (!isGainNormalized(volume)) {
|
||||
ALOGW("Can not set a voice volume (%f) outside [0,1]", volume);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
return mDevice->analyzeStatus(
|
||||
"set_voice_volume",
|
||||
mDevice->device()->set_voice_volume(mDevice->device(), volume));
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setMode(AudioMode mode) {
|
||||
// INVALID, CURRENT, CNT, MAX are reserved for internal use.
|
||||
// TODO: remove the values from the HIDL interface
|
||||
switch (mode) {
|
||||
case AudioMode::NORMAL:
|
||||
case AudioMode::RINGTONE:
|
||||
case AudioMode::IN_CALL:
|
||||
case AudioMode::IN_COMMUNICATION:
|
||||
break; // Valid values
|
||||
default:
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
};
|
||||
|
||||
return mDevice->analyzeStatus(
|
||||
"set_mode", mDevice->device()->set_mode(
|
||||
mDevice->device(), static_cast<audio_mode_t>(mode)));
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getBtScoNrecEnabled(
|
||||
getBtScoNrecEnabled_cb _hidl_cb) {
|
||||
bool enabled;
|
||||
Result retval = mDevice->getParam(AudioParameter::keyBtNrec, &enabled);
|
||||
_hidl_cb(retval, enabled);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setBtScoNrecEnabled(bool enabled) {
|
||||
return mDevice->setParam(AudioParameter::keyBtNrec, enabled);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getBtScoWidebandEnabled(
|
||||
getBtScoWidebandEnabled_cb _hidl_cb) {
|
||||
bool enabled;
|
||||
Result retval = mDevice->getParam(AUDIO_PARAMETER_KEY_BT_SCO_WB, &enabled);
|
||||
_hidl_cb(retval, enabled);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setBtScoWidebandEnabled(bool enabled) {
|
||||
return mDevice->setParam(AUDIO_PARAMETER_KEY_BT_SCO_WB, enabled);
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getTtyMode(getTtyMode_cb _hidl_cb) {
|
||||
int halMode;
|
||||
Result retval = mDevice->getParam(AUDIO_PARAMETER_KEY_TTY_MODE, &halMode);
|
||||
TtyMode mode = retval == Result::OK ? TtyMode(halMode) : TtyMode::OFF;
|
||||
_hidl_cb(retval, mode);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setTtyMode(IPrimaryDevice::TtyMode mode) {
|
||||
return mDevice->setParam(AUDIO_PARAMETER_KEY_TTY_MODE,
|
||||
static_cast<int>(mode));
|
||||
}
|
||||
|
||||
Return<void> PrimaryDevice::getHacEnabled(getHacEnabled_cb _hidl_cb) {
|
||||
bool enabled;
|
||||
Result retval = mDevice->getParam(AUDIO_PARAMETER_KEY_HAC, &enabled);
|
||||
_hidl_cb(retval, enabled);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> PrimaryDevice::setHacEnabled(bool enabled) {
|
||||
return mDevice->setParam(AUDIO_PARAMETER_KEY_HAC, enabled);
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
118
android/hardware/interfaces/audio/2.0/default/PrimaryDevice.h
Normal file
118
android/hardware/interfaces/audio/2.0/default/PrimaryDevice.h
Normal file
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H
|
||||
|
||||
#include <android/hardware/audio/2.0/IPrimaryDevice.h>
|
||||
#include <hidl/Status.h>
|
||||
|
||||
#include <hidl/MQDescriptor.h>
|
||||
|
||||
#include "Device.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::AudioConfig;
|
||||
using ::android::hardware::audio::common::V2_0::AudioInputFlag;
|
||||
using ::android::hardware::audio::common::V2_0::AudioMode;
|
||||
using ::android::hardware::audio::common::V2_0::AudioOutputFlag;
|
||||
using ::android::hardware::audio::common::V2_0::AudioPort;
|
||||
using ::android::hardware::audio::common::V2_0::AudioPortConfig;
|
||||
using ::android::hardware::audio::common::V2_0::AudioSource;
|
||||
using ::android::hardware::audio::V2_0::DeviceAddress;
|
||||
using ::android::hardware::audio::V2_0::IDevice;
|
||||
using ::android::hardware::audio::V2_0::IPrimaryDevice;
|
||||
using ::android::hardware::audio::V2_0::IStreamIn;
|
||||
using ::android::hardware::audio::V2_0::IStreamOut;
|
||||
using ::android::hardware::audio::V2_0::ParameterValue;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::Return;
|
||||
using ::android::hardware::Void;
|
||||
using ::android::hardware::hidl_vec;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::sp;
|
||||
|
||||
struct PrimaryDevice : public IPrimaryDevice {
|
||||
explicit PrimaryDevice(audio_hw_device_t* device);
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IDevice follow.
|
||||
Return<Result> initCheck() override;
|
||||
Return<Result> setMasterVolume(float volume) override;
|
||||
Return<void> getMasterVolume(getMasterVolume_cb _hidl_cb) override;
|
||||
Return<Result> setMicMute(bool mute) override;
|
||||
Return<void> getMicMute(getMicMute_cb _hidl_cb) override;
|
||||
Return<Result> setMasterMute(bool mute) override;
|
||||
Return<void> getMasterMute(getMasterMute_cb _hidl_cb) override;
|
||||
Return<void> getInputBufferSize(
|
||||
const AudioConfig& config, getInputBufferSize_cb _hidl_cb) override;
|
||||
Return<void> openOutputStream(
|
||||
int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioOutputFlag flags,
|
||||
openOutputStream_cb _hidl_cb) override;
|
||||
Return<void> openInputStream(
|
||||
int32_t ioHandle,
|
||||
const DeviceAddress& device,
|
||||
const AudioConfig& config,
|
||||
AudioInputFlag flags,
|
||||
AudioSource source,
|
||||
openInputStream_cb _hidl_cb) override;
|
||||
Return<bool> supportsAudioPatches() override;
|
||||
Return<void> createAudioPatch(
|
||||
const hidl_vec<AudioPortConfig>& sources,
|
||||
const hidl_vec<AudioPortConfig>& sinks,
|
||||
createAudioPatch_cb _hidl_cb) override;
|
||||
Return<Result> releaseAudioPatch(int32_t patch) override;
|
||||
Return<void> getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) override;
|
||||
Return<Result> setAudioPortConfig(const AudioPortConfig& config) override;
|
||||
Return<AudioHwSync> getHwAvSync() override;
|
||||
Return<Result> setScreenState(bool turnedOn) override;
|
||||
Return<void> getParameters(
|
||||
const hidl_vec<hidl_string>& keys, getParameters_cb _hidl_cb) override;
|
||||
Return<Result> setParameters(const hidl_vec<ParameterValue>& parameters) override;
|
||||
Return<void> debugDump(const hidl_handle& fd) override;
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IPrimaryDevice follow.
|
||||
Return<Result> setVoiceVolume(float volume) override;
|
||||
Return<Result> setMode(AudioMode mode) override;
|
||||
Return<void> getBtScoNrecEnabled(getBtScoNrecEnabled_cb _hidl_cb) override;
|
||||
Return<Result> setBtScoNrecEnabled(bool enabled) override;
|
||||
Return<void> getBtScoWidebandEnabled(getBtScoWidebandEnabled_cb _hidl_cb) override;
|
||||
Return<Result> setBtScoWidebandEnabled(bool enabled) override;
|
||||
Return<void> getTtyMode(getTtyMode_cb _hidl_cb) override;
|
||||
Return<Result> setTtyMode(IPrimaryDevice::TtyMode mode) override;
|
||||
Return<void> getHacEnabled(getHacEnabled_cb _hidl_cb) override;
|
||||
Return<Result> setHacEnabled(bool enabled) override;
|
||||
|
||||
private:
|
||||
sp<Device> mDevice;
|
||||
|
||||
virtual ~PrimaryDevice();
|
||||
};
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H
|
278
android/hardware/interfaces/audio/2.0/default/Stream.cpp
Normal file
278
android/hardware/interfaces/audio/2.0/default/Stream.cpp
Normal file
|
@ -0,0 +1,278 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <inttypes.h>
|
||||
|
||||
#define LOG_TAG "StreamHAL"
|
||||
|
||||
#include <hardware/audio.h>
|
||||
#include <hardware/audio_effect.h>
|
||||
#include <media/TypeConverter.h>
|
||||
#include <android/log.h>
|
||||
#include <utils/SortedVector.h>
|
||||
#include <utils/Vector.h>
|
||||
|
||||
#include "Conversions.h"
|
||||
#include "EffectMap.h"
|
||||
#include "Stream.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
Stream::Stream(audio_stream_t* stream)
|
||||
: mStream(stream) {
|
||||
}
|
||||
|
||||
Stream::~Stream() {
|
||||
mStream = nullptr;
|
||||
}
|
||||
|
||||
// static
|
||||
Result Stream::analyzeStatus(const char* funcName, int status) {
|
||||
static const std::vector<int> empty;
|
||||
return analyzeStatus(funcName, status, empty);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline bool element_in(T e, const std::vector<T>& v) {
|
||||
return std::find(v.begin(), v.end(), e) != v.end();
|
||||
}
|
||||
|
||||
// static
|
||||
Result Stream::analyzeStatus(const char* funcName, int status,
|
||||
const std::vector<int>& ignoreErrors) {
|
||||
if (status != 0 && (ignoreErrors.empty() || !element_in(-status, ignoreErrors))) {
|
||||
ALOGW("Error from HAL stream in function %s: %s", funcName, strerror(-status));
|
||||
}
|
||||
switch (status) {
|
||||
case 0: return Result::OK;
|
||||
case -EINVAL: return Result::INVALID_ARGUMENTS;
|
||||
case -ENODATA: return Result::INVALID_STATE;
|
||||
case -ENODEV: return Result::NOT_INITIALIZED;
|
||||
case -ENOSYS: return Result::NOT_SUPPORTED;
|
||||
default: return Result::INVALID_STATE;
|
||||
}
|
||||
}
|
||||
|
||||
char* Stream::halGetParameters(const char* keys) {
|
||||
return mStream->get_parameters(mStream, keys);
|
||||
}
|
||||
|
||||
int Stream::halSetParameters(const char* keysAndValues) {
|
||||
return mStream->set_parameters(mStream, keysAndValues);
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStream follow.
|
||||
Return<uint64_t> Stream::getFrameSize() {
|
||||
// Needs to be implemented by interface subclasses. But can't be declared as pure virtual,
|
||||
// since interface subclasses implementation do not inherit from this class.
|
||||
LOG_ALWAYS_FATAL("Stream::getFrameSize is pure abstract");
|
||||
return uint64_t {};
|
||||
}
|
||||
|
||||
Return<uint64_t> Stream::getFrameCount() {
|
||||
int halFrameCount;
|
||||
Result retval = getParam(AudioParameter::keyFrameCount, &halFrameCount);
|
||||
return retval == Result::OK ? halFrameCount : 0;
|
||||
}
|
||||
|
||||
Return<uint64_t> Stream::getBufferSize() {
|
||||
return mStream->get_buffer_size(mStream);
|
||||
}
|
||||
|
||||
Return<uint32_t> Stream::getSampleRate() {
|
||||
return mStream->get_sample_rate(mStream);
|
||||
}
|
||||
|
||||
Return<void> Stream::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) {
|
||||
String8 halListValue;
|
||||
Result result = getParam(AudioParameter::keyStreamSupportedSamplingRates, &halListValue);
|
||||
hidl_vec<uint32_t> sampleRates;
|
||||
SortedVector<uint32_t> halSampleRates;
|
||||
if (result == Result::OK) {
|
||||
halSampleRates = samplingRatesFromString(
|
||||
halListValue.string(), AudioParameter::valueListSeparator);
|
||||
sampleRates.setToExternal(halSampleRates.editArray(), halSampleRates.size());
|
||||
}
|
||||
_hidl_cb(sampleRates);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::setSampleRate(uint32_t sampleRateHz) {
|
||||
return setParam(AudioParameter::keySamplingRate, static_cast<int>(sampleRateHz));
|
||||
}
|
||||
|
||||
Return<AudioChannelMask> Stream::getChannelMask() {
|
||||
return AudioChannelMask(mStream->get_channels(mStream));
|
||||
}
|
||||
|
||||
Return<void> Stream::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) {
|
||||
String8 halListValue;
|
||||
Result result = getParam(AudioParameter::keyStreamSupportedChannels, &halListValue);
|
||||
hidl_vec<AudioChannelMask> channelMasks;
|
||||
SortedVector<audio_channel_mask_t> halChannelMasks;
|
||||
if (result == Result::OK) {
|
||||
halChannelMasks = channelMasksFromString(
|
||||
halListValue.string(), AudioParameter::valueListSeparator);
|
||||
channelMasks.resize(halChannelMasks.size());
|
||||
for (size_t i = 0; i < halChannelMasks.size(); ++i) {
|
||||
channelMasks[i] = AudioChannelMask(halChannelMasks[i]);
|
||||
}
|
||||
}
|
||||
_hidl_cb(channelMasks);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::setChannelMask(AudioChannelMask mask) {
|
||||
return setParam(AudioParameter::keyChannels, static_cast<int>(mask));
|
||||
}
|
||||
|
||||
Return<AudioFormat> Stream::getFormat() {
|
||||
return AudioFormat(mStream->get_format(mStream));
|
||||
}
|
||||
|
||||
Return<void> Stream::getSupportedFormats(getSupportedFormats_cb _hidl_cb) {
|
||||
String8 halListValue;
|
||||
Result result = getParam(AudioParameter::keyStreamSupportedFormats, &halListValue);
|
||||
hidl_vec<AudioFormat> formats;
|
||||
Vector<audio_format_t> halFormats;
|
||||
if (result == Result::OK) {
|
||||
halFormats = formatsFromString(halListValue.string(), AudioParameter::valueListSeparator);
|
||||
formats.resize(halFormats.size());
|
||||
for (size_t i = 0; i < halFormats.size(); ++i) {
|
||||
formats[i] = AudioFormat(halFormats[i]);
|
||||
}
|
||||
}
|
||||
_hidl_cb(formats);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::setFormat(AudioFormat format) {
|
||||
return setParam(AudioParameter::keyFormat, static_cast<int>(format));
|
||||
}
|
||||
|
||||
Return<void> Stream::getAudioProperties(getAudioProperties_cb _hidl_cb) {
|
||||
uint32_t halSampleRate = mStream->get_sample_rate(mStream);
|
||||
audio_channel_mask_t halMask = mStream->get_channels(mStream);
|
||||
audio_format_t halFormat = mStream->get_format(mStream);
|
||||
_hidl_cb(halSampleRate, AudioChannelMask(halMask), AudioFormat(halFormat));
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::addEffect(uint64_t effectId) {
|
||||
effect_handle_t halEffect = EffectMap::getInstance().get(effectId);
|
||||
if (halEffect != NULL) {
|
||||
return analyzeStatus("add_audio_effect", mStream->add_audio_effect(mStream, halEffect));
|
||||
} else {
|
||||
ALOGW("Invalid effect ID passed from client: %" PRIu64, effectId);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
}
|
||||
|
||||
Return<Result> Stream::removeEffect(uint64_t effectId) {
|
||||
effect_handle_t halEffect = EffectMap::getInstance().get(effectId);
|
||||
if (halEffect != NULL) {
|
||||
return analyzeStatus(
|
||||
"remove_audio_effect", mStream->remove_audio_effect(mStream, halEffect));
|
||||
} else {
|
||||
ALOGW("Invalid effect ID passed from client: %" PRIu64, effectId);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
}
|
||||
|
||||
Return<Result> Stream::standby() {
|
||||
return analyzeStatus("standby", mStream->standby(mStream));
|
||||
}
|
||||
|
||||
Return<AudioDevice> Stream::getDevice() {
|
||||
int device;
|
||||
Result retval = getParam(AudioParameter::keyRouting, &device);
|
||||
return retval == Result::OK ? static_cast<AudioDevice>(device) : AudioDevice::NONE;
|
||||
}
|
||||
|
||||
Return<Result> Stream::setDevice(const DeviceAddress& address) {
|
||||
char* halDeviceAddress =
|
||||
audio_device_address_to_parameter(
|
||||
static_cast<audio_devices_t>(address.device),
|
||||
deviceAddressToHal(address).c_str());
|
||||
AudioParameter params((String8(halDeviceAddress)));
|
||||
free(halDeviceAddress);
|
||||
params.addInt(
|
||||
String8(AudioParameter::keyRouting), static_cast<audio_devices_t>(address.device));
|
||||
return setParams(params);
|
||||
}
|
||||
|
||||
Return<Result> Stream::setConnectedState(const DeviceAddress& address, bool connected) {
|
||||
return setParam(
|
||||
connected ? AudioParameter::keyStreamConnect : AudioParameter::keyStreamDisconnect,
|
||||
deviceAddressToHal(address).c_str());
|
||||
}
|
||||
|
||||
Return<Result> Stream::setHwAvSync(uint32_t hwAvSync) {
|
||||
return setParam(AudioParameter::keyStreamHwAvSync, static_cast<int>(hwAvSync));
|
||||
}
|
||||
|
||||
Return<void> Stream::getParameters(const hidl_vec<hidl_string>& keys, getParameters_cb _hidl_cb) {
|
||||
getParametersImpl(keys, _hidl_cb);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::setParameters(const hidl_vec<ParameterValue>& parameters) {
|
||||
return setParametersImpl(parameters);
|
||||
}
|
||||
|
||||
Return<void> Stream::debugDump(const hidl_handle& fd) {
|
||||
if (fd.getNativeHandle() != nullptr && fd->numFds == 1) {
|
||||
analyzeStatus("dump", mStream->dump(mStream, fd->data[0]));
|
||||
}
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::start() {
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<Result> Stream::stop() {
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<void> Stream::createMmapBuffer(int32_t minSizeFrames __unused,
|
||||
createMmapBuffer_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
MmapBufferInfo info;
|
||||
_hidl_cb(retval, info);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<void> Stream::getMmapPosition(getMmapPosition_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
MmapPosition position;
|
||||
_hidl_cb(retval, position);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> Stream::close() {
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
189
android/hardware/interfaces/audio/2.0/default/Stream.h
Normal file
189
android/hardware/interfaces/audio/2.0/default/Stream.h
Normal file
|
@ -0,0 +1,189 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <android/hardware/audio/2.0/IStream.h>
|
||||
#include <hardware/audio.h>
|
||||
#include <hidl/Status.h>
|
||||
|
||||
#include <hidl/MQDescriptor.h>
|
||||
|
||||
#include "ParametersUtil.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::AudioChannelMask;
|
||||
using ::android::hardware::audio::common::V2_0::AudioDevice;
|
||||
using ::android::hardware::audio::common::V2_0::AudioFormat;
|
||||
using ::android::hardware::audio::V2_0::DeviceAddress;
|
||||
using ::android::hardware::audio::V2_0::IStream;
|
||||
using ::android::hardware::audio::V2_0::ParameterValue;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::Return;
|
||||
using ::android::hardware::Void;
|
||||
using ::android::hardware::hidl_vec;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::sp;
|
||||
|
||||
struct Stream : public IStream, public ParametersUtil {
|
||||
explicit Stream(audio_stream_t* stream);
|
||||
|
||||
/** 1GiB is the maximum buffer size the HAL client is allowed to request.
|
||||
* This value has been chosen to be under SIZE_MAX and still big enough
|
||||
* for all audio use case.
|
||||
* Keep private for 2.0, put in .hal in 2.1
|
||||
*/
|
||||
static constexpr uint32_t MAX_BUFFER_SIZE = 2 << 30 /* == 1GiB */;
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStream follow.
|
||||
Return<uint64_t> getFrameSize() override;
|
||||
Return<uint64_t> getFrameCount() override;
|
||||
Return<uint64_t> getBufferSize() override;
|
||||
Return<uint32_t> getSampleRate() override;
|
||||
Return<void> getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override;
|
||||
Return<Result> setSampleRate(uint32_t sampleRateHz) override;
|
||||
Return<AudioChannelMask> getChannelMask() override;
|
||||
Return<void> getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override;
|
||||
Return<Result> setChannelMask(AudioChannelMask mask) override;
|
||||
Return<AudioFormat> getFormat() override;
|
||||
Return<void> getSupportedFormats(getSupportedFormats_cb _hidl_cb) override;
|
||||
Return<Result> setFormat(AudioFormat format) override;
|
||||
Return<void> getAudioProperties(getAudioProperties_cb _hidl_cb) override;
|
||||
Return<Result> addEffect(uint64_t effectId) override;
|
||||
Return<Result> removeEffect(uint64_t effectId) override;
|
||||
Return<Result> standby() override;
|
||||
Return<AudioDevice> getDevice() override;
|
||||
Return<Result> setDevice(const DeviceAddress& address) override;
|
||||
Return<Result> setConnectedState(const DeviceAddress& address, bool connected) override;
|
||||
Return<Result> setHwAvSync(uint32_t hwAvSync) override;
|
||||
Return<void> getParameters(
|
||||
const hidl_vec<hidl_string>& keys, getParameters_cb _hidl_cb) override;
|
||||
Return<Result> setParameters(const hidl_vec<ParameterValue>& parameters) override;
|
||||
Return<void> debugDump(const hidl_handle& fd) override;
|
||||
Return<Result> start() override;
|
||||
Return<Result> stop() override;
|
||||
Return<void> createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override;
|
||||
Return<void> getMmapPosition(getMmapPosition_cb _hidl_cb) override;
|
||||
Return<Result> close() override;
|
||||
|
||||
// Utility methods for extending interfaces.
|
||||
static Result analyzeStatus(const char* funcName, int status);
|
||||
static Result analyzeStatus(const char* funcName, int status,
|
||||
const std::vector<int>& ignoreErrors);
|
||||
|
||||
private:
|
||||
audio_stream_t *mStream;
|
||||
|
||||
virtual ~Stream();
|
||||
|
||||
// Methods from ParametersUtil.
|
||||
char* halGetParameters(const char* keys) override;
|
||||
int halSetParameters(const char* keysAndValues) override;
|
||||
};
|
||||
|
||||
|
||||
template <typename T>
|
||||
struct StreamMmap : public RefBase {
|
||||
explicit StreamMmap(T* stream) : mStream(stream) {}
|
||||
|
||||
Return<Result> start();
|
||||
Return<Result> stop();
|
||||
Return<void> createMmapBuffer(
|
||||
int32_t minSizeFrames, size_t frameSize, IStream::createMmapBuffer_cb _hidl_cb);
|
||||
Return<void> getMmapPosition(IStream::getMmapPosition_cb _hidl_cb);
|
||||
|
||||
private:
|
||||
StreamMmap() {}
|
||||
|
||||
T *mStream;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
Return<Result> StreamMmap<T>::start() {
|
||||
if (mStream->start == NULL) return Result::NOT_SUPPORTED;
|
||||
int result = mStream->start(mStream);
|
||||
return Stream::analyzeStatus("start", result);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Return<Result> StreamMmap<T>::stop() {
|
||||
if (mStream->stop == NULL) return Result::NOT_SUPPORTED;
|
||||
int result = mStream->stop(mStream);
|
||||
return Stream::analyzeStatus("stop", result);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Return<void> StreamMmap<T>::createMmapBuffer(int32_t minSizeFrames, size_t frameSize,
|
||||
IStream::createMmapBuffer_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
MmapBufferInfo info;
|
||||
native_handle_t* hidlHandle = nullptr;
|
||||
|
||||
if (mStream->create_mmap_buffer != NULL) {
|
||||
struct audio_mmap_buffer_info halInfo;
|
||||
retval = Stream::analyzeStatus(
|
||||
"create_mmap_buffer",
|
||||
mStream->create_mmap_buffer(mStream, minSizeFrames, &halInfo));
|
||||
if (retval == Result::OK) {
|
||||
hidlHandle = native_handle_create(1, 0);
|
||||
hidlHandle->data[0] = halInfo.shared_memory_fd;
|
||||
info.sharedMemory = hidl_memory("audio_buffer", hidlHandle,
|
||||
frameSize *halInfo.buffer_size_frames);
|
||||
info.bufferSizeFrames = halInfo.buffer_size_frames;
|
||||
info.burstSizeFrames = halInfo.burst_size_frames;
|
||||
}
|
||||
}
|
||||
_hidl_cb(retval, info);
|
||||
if (hidlHandle != nullptr) {
|
||||
native_handle_delete(hidlHandle);
|
||||
}
|
||||
return Void();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Return<void> StreamMmap<T>::getMmapPosition(IStream::getMmapPosition_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
MmapPosition position;
|
||||
|
||||
if (mStream->get_mmap_position != NULL) {
|
||||
struct audio_mmap_position halPosition;
|
||||
retval = Stream::analyzeStatus(
|
||||
"get_mmap_position",
|
||||
mStream->get_mmap_position(mStream, &halPosition));
|
||||
if (retval == Result::OK) {
|
||||
position.timeNanoseconds = halPosition.time_nanoseconds;
|
||||
position.positionFrames = halPosition.position_frames;
|
||||
}
|
||||
}
|
||||
_hidl_cb(retval, position);
|
||||
return Void();
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H
|
442
android/hardware/interfaces/audio/2.0/default/StreamIn.cpp
Normal file
442
android/hardware/interfaces/audio/2.0/default/StreamIn.cpp
Normal file
|
@ -0,0 +1,442 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "StreamInHAL"
|
||||
//#define LOG_NDEBUG 0
|
||||
#define ATRACE_TAG ATRACE_TAG_AUDIO
|
||||
|
||||
#include <android/log.h>
|
||||
#include <hardware/audio.h>
|
||||
#include <utils/Trace.h>
|
||||
#include <memory>
|
||||
|
||||
#include "StreamIn.h"
|
||||
#include "Util.h"
|
||||
|
||||
using ::android::hardware::audio::V2_0::MessageQueueFlagBits;
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::ThreadInfo;
|
||||
|
||||
namespace {
|
||||
|
||||
class ReadThread : public Thread {
|
||||
public:
|
||||
// ReadThread's lifespan never exceeds StreamIn's lifespan.
|
||||
ReadThread(std::atomic<bool>* stop, audio_stream_in_t* stream,
|
||||
StreamIn::CommandMQ* commandMQ, StreamIn::DataMQ* dataMQ,
|
||||
StreamIn::StatusMQ* statusMQ, EventFlag* efGroup)
|
||||
: Thread(false /*canCallJava*/),
|
||||
mStop(stop),
|
||||
mStream(stream),
|
||||
mCommandMQ(commandMQ),
|
||||
mDataMQ(dataMQ),
|
||||
mStatusMQ(statusMQ),
|
||||
mEfGroup(efGroup),
|
||||
mBuffer(nullptr) {}
|
||||
bool init() {
|
||||
mBuffer.reset(new (std::nothrow) uint8_t[mDataMQ->getQuantumCount()]);
|
||||
return mBuffer != nullptr;
|
||||
}
|
||||
virtual ~ReadThread() {}
|
||||
|
||||
private:
|
||||
std::atomic<bool>* mStop;
|
||||
audio_stream_in_t* mStream;
|
||||
StreamIn::CommandMQ* mCommandMQ;
|
||||
StreamIn::DataMQ* mDataMQ;
|
||||
StreamIn::StatusMQ* mStatusMQ;
|
||||
EventFlag* mEfGroup;
|
||||
std::unique_ptr<uint8_t[]> mBuffer;
|
||||
IStreamIn::ReadParameters mParameters;
|
||||
IStreamIn::ReadStatus mStatus;
|
||||
|
||||
bool threadLoop() override;
|
||||
|
||||
void doGetCapturePosition();
|
||||
void doRead();
|
||||
};
|
||||
|
||||
void ReadThread::doRead() {
|
||||
size_t availableToWrite = mDataMQ->availableToWrite();
|
||||
size_t requestedToRead = mParameters.params.read;
|
||||
if (requestedToRead > availableToWrite) {
|
||||
ALOGW(
|
||||
"truncating read data from %d to %d due to insufficient data queue "
|
||||
"space",
|
||||
(int32_t)requestedToRead, (int32_t)availableToWrite);
|
||||
requestedToRead = availableToWrite;
|
||||
}
|
||||
ssize_t readResult = mStream->read(mStream, &mBuffer[0], requestedToRead);
|
||||
mStatus.retval = Result::OK;
|
||||
uint64_t read = 0;
|
||||
if (readResult >= 0) {
|
||||
mStatus.reply.read = readResult;
|
||||
if (!mDataMQ->write(&mBuffer[0], readResult)) {
|
||||
ALOGW("data message queue write failed");
|
||||
}
|
||||
} else {
|
||||
mStatus.retval = Stream::analyzeStatus("read", readResult);
|
||||
}
|
||||
}
|
||||
|
||||
void ReadThread::doGetCapturePosition() {
|
||||
mStatus.retval = StreamIn::getCapturePositionImpl(
|
||||
mStream, &mStatus.reply.capturePosition.frames,
|
||||
&mStatus.reply.capturePosition.time);
|
||||
}
|
||||
|
||||
bool ReadThread::threadLoop() {
|
||||
// This implementation doesn't return control back to the Thread until it
|
||||
// decides to stop,
|
||||
// as the Thread uses mutexes, and this can lead to priority inversion.
|
||||
while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) {
|
||||
uint32_t efState = 0;
|
||||
mEfGroup->wait(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL),
|
||||
&efState);
|
||||
if (!(efState &
|
||||
static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL))) {
|
||||
continue; // Nothing to do.
|
||||
}
|
||||
if (!mCommandMQ->read(&mParameters)) {
|
||||
continue; // Nothing to do.
|
||||
}
|
||||
mStatus.replyTo = mParameters.command;
|
||||
switch (mParameters.command) {
|
||||
case IStreamIn::ReadCommand::READ:
|
||||
doRead();
|
||||
break;
|
||||
case IStreamIn::ReadCommand::GET_CAPTURE_POSITION:
|
||||
doGetCapturePosition();
|
||||
break;
|
||||
default:
|
||||
ALOGE("Unknown read thread command code %d",
|
||||
mParameters.command);
|
||||
mStatus.retval = Result::NOT_SUPPORTED;
|
||||
break;
|
||||
}
|
||||
if (!mStatusMQ->write(&mStatus)) {
|
||||
ALOGW("status message queue write failed");
|
||||
}
|
||||
mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
StreamIn::StreamIn(const sp<Device>& device, audio_stream_in_t* stream)
|
||||
: mIsClosed(false),
|
||||
mDevice(device),
|
||||
mStream(stream),
|
||||
mStreamCommon(new Stream(&stream->common)),
|
||||
mStreamMmap(new StreamMmap<audio_stream_in_t>(stream)),
|
||||
mEfGroup(nullptr),
|
||||
mStopReadThread(false) {}
|
||||
|
||||
StreamIn::~StreamIn() {
|
||||
ATRACE_CALL();
|
||||
close();
|
||||
if (mReadThread.get()) {
|
||||
ATRACE_NAME("mReadThread->join");
|
||||
status_t status = mReadThread->join();
|
||||
ALOGE_IF(status, "read thread exit error: %s", strerror(-status));
|
||||
}
|
||||
if (mEfGroup) {
|
||||
status_t status = EventFlag::deleteEventFlag(&mEfGroup);
|
||||
ALOGE_IF(status, "read MQ event flag deletion error: %s",
|
||||
strerror(-status));
|
||||
}
|
||||
mDevice->closeInputStream(mStream);
|
||||
mStream = nullptr;
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStream follow.
|
||||
Return<uint64_t> StreamIn::getFrameSize() {
|
||||
return audio_stream_in_frame_size(mStream);
|
||||
}
|
||||
|
||||
Return<uint64_t> StreamIn::getFrameCount() {
|
||||
return mStreamCommon->getFrameCount();
|
||||
}
|
||||
|
||||
Return<uint64_t> StreamIn::getBufferSize() {
|
||||
return mStreamCommon->getBufferSize();
|
||||
}
|
||||
|
||||
Return<uint32_t> StreamIn::getSampleRate() {
|
||||
return mStreamCommon->getSampleRate();
|
||||
}
|
||||
|
||||
Return<void> StreamIn::getSupportedSampleRates(
|
||||
getSupportedSampleRates_cb _hidl_cb) {
|
||||
return mStreamCommon->getSupportedSampleRates(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setSampleRate(uint32_t sampleRateHz) {
|
||||
return mStreamCommon->setSampleRate(sampleRateHz);
|
||||
}
|
||||
|
||||
Return<AudioChannelMask> StreamIn::getChannelMask() {
|
||||
return mStreamCommon->getChannelMask();
|
||||
}
|
||||
|
||||
Return<void> StreamIn::getSupportedChannelMasks(
|
||||
getSupportedChannelMasks_cb _hidl_cb) {
|
||||
return mStreamCommon->getSupportedChannelMasks(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setChannelMask(AudioChannelMask mask) {
|
||||
return mStreamCommon->setChannelMask(mask);
|
||||
}
|
||||
|
||||
Return<AudioFormat> StreamIn::getFormat() {
|
||||
return mStreamCommon->getFormat();
|
||||
}
|
||||
|
||||
Return<void> StreamIn::getSupportedFormats(getSupportedFormats_cb _hidl_cb) {
|
||||
return mStreamCommon->getSupportedFormats(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setFormat(AudioFormat format) {
|
||||
return mStreamCommon->setFormat(format);
|
||||
}
|
||||
|
||||
Return<void> StreamIn::getAudioProperties(getAudioProperties_cb _hidl_cb) {
|
||||
return mStreamCommon->getAudioProperties(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::addEffect(uint64_t effectId) {
|
||||
return mStreamCommon->addEffect(effectId);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::removeEffect(uint64_t effectId) {
|
||||
return mStreamCommon->removeEffect(effectId);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::standby() {
|
||||
return mStreamCommon->standby();
|
||||
}
|
||||
|
||||
Return<AudioDevice> StreamIn::getDevice() {
|
||||
return mStreamCommon->getDevice();
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setDevice(const DeviceAddress& address) {
|
||||
return mStreamCommon->setDevice(address);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setConnectedState(const DeviceAddress& address,
|
||||
bool connected) {
|
||||
return mStreamCommon->setConnectedState(address, connected);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setHwAvSync(uint32_t hwAvSync) {
|
||||
return mStreamCommon->setHwAvSync(hwAvSync);
|
||||
}
|
||||
|
||||
Return<void> StreamIn::getParameters(const hidl_vec<hidl_string>& keys,
|
||||
getParameters_cb _hidl_cb) {
|
||||
return mStreamCommon->getParameters(keys, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setParameters(
|
||||
const hidl_vec<ParameterValue>& parameters) {
|
||||
return mStreamCommon->setParameters(parameters);
|
||||
}
|
||||
|
||||
Return<void> StreamIn::debugDump(const hidl_handle& fd) {
|
||||
return mStreamCommon->debugDump(fd);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::start() {
|
||||
return mStreamMmap->start();
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::stop() {
|
||||
return mStreamMmap->stop();
|
||||
}
|
||||
|
||||
Return<void> StreamIn::createMmapBuffer(int32_t minSizeFrames,
|
||||
createMmapBuffer_cb _hidl_cb) {
|
||||
return mStreamMmap->createMmapBuffer(
|
||||
minSizeFrames, audio_stream_in_frame_size(mStream), _hidl_cb);
|
||||
}
|
||||
|
||||
Return<void> StreamIn::getMmapPosition(getMmapPosition_cb _hidl_cb) {
|
||||
return mStreamMmap->getMmapPosition(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::close() {
|
||||
if (mIsClosed) return Result::INVALID_STATE;
|
||||
mIsClosed = true;
|
||||
if (mReadThread.get()) {
|
||||
mStopReadThread.store(true, std::memory_order_release);
|
||||
}
|
||||
if (mEfGroup) {
|
||||
mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
|
||||
}
|
||||
return Result::OK;
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStreamIn follow.
|
||||
Return<void> StreamIn::getAudioSource(getAudioSource_cb _hidl_cb) {
|
||||
int halSource;
|
||||
Result retval =
|
||||
mStreamCommon->getParam(AudioParameter::keyInputSource, &halSource);
|
||||
AudioSource source(AudioSource::DEFAULT);
|
||||
if (retval == Result::OK) {
|
||||
source = AudioSource(halSource);
|
||||
}
|
||||
_hidl_cb(retval, source);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> StreamIn::setGain(float gain) {
|
||||
if (!isGainNormalized(gain)) {
|
||||
ALOGW("Can not set a stream input gain (%f) outside [0,1]", gain);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
return Stream::analyzeStatus("set_gain", mStream->set_gain(mStream, gain));
|
||||
}
|
||||
|
||||
Return<void> StreamIn::prepareForReading(uint32_t frameSize,
|
||||
uint32_t framesCount,
|
||||
prepareForReading_cb _hidl_cb) {
|
||||
status_t status;
|
||||
ThreadInfo threadInfo = {0, 0};
|
||||
|
||||
// Wrap the _hidl_cb to return an error
|
||||
auto sendError = [this, &threadInfo, &_hidl_cb](Result result) {
|
||||
_hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(),
|
||||
StatusMQ::Descriptor(), threadInfo);
|
||||
|
||||
};
|
||||
|
||||
// Create message queues.
|
||||
if (mDataMQ) {
|
||||
ALOGE("the client attempts to call prepareForReading twice");
|
||||
sendError(Result::INVALID_STATE);
|
||||
return Void();
|
||||
}
|
||||
std::unique_ptr<CommandMQ> tempCommandMQ(new CommandMQ(1));
|
||||
|
||||
// Check frameSize and framesCount
|
||||
if (frameSize == 0 || framesCount == 0) {
|
||||
ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize,
|
||||
framesCount);
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
|
||||
if (frameSize > Stream::MAX_BUFFER_SIZE / framesCount) {
|
||||
ALOGE("Buffer too big: %u*%u bytes > MAX_BUFFER_SIZE (%u)", frameSize, framesCount,
|
||||
Stream::MAX_BUFFER_SIZE);
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
std::unique_ptr<DataMQ> tempDataMQ(
|
||||
new DataMQ(frameSize * framesCount, true /* EventFlag */));
|
||||
|
||||
std::unique_ptr<StatusMQ> tempStatusMQ(new StatusMQ(1));
|
||||
if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() ||
|
||||
!tempStatusMQ->isValid()) {
|
||||
ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid");
|
||||
ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid");
|
||||
ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid");
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
EventFlag* tempRawEfGroup{};
|
||||
status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(),
|
||||
&tempRawEfGroup);
|
||||
std::unique_ptr<EventFlag, void (*)(EventFlag*)> tempElfGroup(
|
||||
tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); });
|
||||
if (status != OK || !tempElfGroup) {
|
||||
ALOGE("failed creating event flag for data MQ: %s", strerror(-status));
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
|
||||
// Create and launch the thread.
|
||||
auto tempReadThread = std::make_unique<ReadThread>(
|
||||
&mStopReadThread, mStream, tempCommandMQ.get(), tempDataMQ.get(),
|
||||
tempStatusMQ.get(), tempElfGroup.get());
|
||||
if (!tempReadThread->init()) {
|
||||
ALOGW("failed to start reader thread: %s", strerror(-status));
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
status = tempReadThread->run("reader", PRIORITY_URGENT_AUDIO);
|
||||
if (status != OK) {
|
||||
ALOGW("failed to start reader thread: %s", strerror(-status));
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
|
||||
mCommandMQ = std::move(tempCommandMQ);
|
||||
mDataMQ = std::move(tempDataMQ);
|
||||
mStatusMQ = std::move(tempStatusMQ);
|
||||
mReadThread = tempReadThread.release();
|
||||
mEfGroup = tempElfGroup.release();
|
||||
threadInfo.pid = getpid();
|
||||
threadInfo.tid = mReadThread->getTid();
|
||||
_hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(),
|
||||
*mStatusMQ->getDesc(), threadInfo);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<uint32_t> StreamIn::getInputFramesLost() {
|
||||
return mStream->get_input_frames_lost(mStream);
|
||||
}
|
||||
|
||||
// static
|
||||
Result StreamIn::getCapturePositionImpl(audio_stream_in_t* stream,
|
||||
uint64_t* frames, uint64_t* time) {
|
||||
// HAL may have a stub function, always returning ENOSYS, don't
|
||||
// spam the log in this case.
|
||||
static const std::vector<int> ignoredErrors{ENOSYS};
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
if (stream->get_capture_position != NULL) return retval;
|
||||
int64_t halFrames, halTime;
|
||||
retval = Stream::analyzeStatus("get_capture_position",
|
||||
stream->get_capture_position(stream, &halFrames, &halTime),
|
||||
ignoredErrors);
|
||||
if (retval == Result::OK) {
|
||||
*frames = halFrames;
|
||||
*time = halTime;
|
||||
}
|
||||
return retval;
|
||||
};
|
||||
|
||||
Return<void> StreamIn::getCapturePosition(getCapturePosition_cb _hidl_cb) {
|
||||
uint64_t frames = 0, time = 0;
|
||||
Result retval = getCapturePositionImpl(mStream, &frames, &time);
|
||||
_hidl_cb(retval, frames, time);
|
||||
return Void();
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
125
android/hardware/interfaces/audio/2.0/default/StreamIn.h
Normal file
125
android/hardware/interfaces/audio/2.0/default/StreamIn.h
Normal file
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H
|
||||
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
|
||||
#include <android/hardware/audio/2.0/IStreamIn.h>
|
||||
#include <hidl/MQDescriptor.h>
|
||||
#include <fmq/EventFlag.h>
|
||||
#include <fmq/MessageQueue.h>
|
||||
#include <hidl/Status.h>
|
||||
#include <utils/Thread.h>
|
||||
|
||||
#include "Device.h"
|
||||
#include "Stream.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::AudioChannelMask;
|
||||
using ::android::hardware::audio::common::V2_0::AudioDevice;
|
||||
using ::android::hardware::audio::common::V2_0::AudioFormat;
|
||||
using ::android::hardware::audio::common::V2_0::AudioSource;
|
||||
using ::android::hardware::audio::V2_0::DeviceAddress;
|
||||
using ::android::hardware::audio::V2_0::IStream;
|
||||
using ::android::hardware::audio::V2_0::IStreamIn;
|
||||
using ::android::hardware::audio::V2_0::ParameterValue;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::Return;
|
||||
using ::android::hardware::Void;
|
||||
using ::android::hardware::hidl_vec;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::sp;
|
||||
|
||||
struct StreamIn : public IStreamIn {
|
||||
typedef MessageQueue<ReadParameters, kSynchronizedReadWrite> CommandMQ;
|
||||
typedef MessageQueue<uint8_t, kSynchronizedReadWrite> DataMQ;
|
||||
typedef MessageQueue<ReadStatus, kSynchronizedReadWrite> StatusMQ;
|
||||
|
||||
StreamIn(const sp<Device>& device, audio_stream_in_t* stream);
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStream follow.
|
||||
Return<uint64_t> getFrameSize() override;
|
||||
Return<uint64_t> getFrameCount() override;
|
||||
Return<uint64_t> getBufferSize() override;
|
||||
Return<uint32_t> getSampleRate() override;
|
||||
Return<void> getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override;
|
||||
Return<Result> setSampleRate(uint32_t sampleRateHz) override;
|
||||
Return<AudioChannelMask> getChannelMask() override;
|
||||
Return<void> getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override;
|
||||
Return<Result> setChannelMask(AudioChannelMask mask) override;
|
||||
Return<AudioFormat> getFormat() override;
|
||||
Return<void> getSupportedFormats(getSupportedFormats_cb _hidl_cb) override;
|
||||
Return<Result> setFormat(AudioFormat format) override;
|
||||
Return<void> getAudioProperties(getAudioProperties_cb _hidl_cb) override;
|
||||
Return<Result> addEffect(uint64_t effectId) override;
|
||||
Return<Result> removeEffect(uint64_t effectId) override;
|
||||
Return<Result> standby() override;
|
||||
Return<AudioDevice> getDevice() override;
|
||||
Return<Result> setDevice(const DeviceAddress& address) override;
|
||||
Return<Result> setConnectedState(const DeviceAddress& address, bool connected) override;
|
||||
Return<Result> setHwAvSync(uint32_t hwAvSync) override;
|
||||
Return<void> getParameters(
|
||||
const hidl_vec<hidl_string>& keys, getParameters_cb _hidl_cb) override;
|
||||
Return<Result> setParameters(const hidl_vec<ParameterValue>& parameters) override;
|
||||
Return<void> debugDump(const hidl_handle& fd) override;
|
||||
Return<Result> close() override;
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStreamIn follow.
|
||||
Return<void> getAudioSource(getAudioSource_cb _hidl_cb) override;
|
||||
Return<Result> setGain(float gain) override;
|
||||
Return<void> prepareForReading(
|
||||
uint32_t frameSize, uint32_t framesCount, prepareForReading_cb _hidl_cb) override;
|
||||
Return<uint32_t> getInputFramesLost() override;
|
||||
Return<void> getCapturePosition(getCapturePosition_cb _hidl_cb) override;
|
||||
Return<Result> start() override;
|
||||
Return<Result> stop() override;
|
||||
Return<void> createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override;
|
||||
Return<void> getMmapPosition(getMmapPosition_cb _hidl_cb) override;
|
||||
|
||||
static Result getCapturePositionImpl(
|
||||
audio_stream_in_t *stream, uint64_t *frames, uint64_t *time);
|
||||
|
||||
private:
|
||||
bool mIsClosed;
|
||||
const sp<Device> mDevice;
|
||||
audio_stream_in_t *mStream;
|
||||
const sp<Stream> mStreamCommon;
|
||||
const sp<StreamMmap<audio_stream_in_t>> mStreamMmap;
|
||||
std::unique_ptr<CommandMQ> mCommandMQ;
|
||||
std::unique_ptr<DataMQ> mDataMQ;
|
||||
std::unique_ptr<StatusMQ> mStatusMQ;
|
||||
EventFlag* mEfGroup;
|
||||
std::atomic<bool> mStopReadThread;
|
||||
sp<Thread> mReadThread;
|
||||
|
||||
virtual ~StreamIn();
|
||||
};
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H
|
534
android/hardware/interfaces/audio/2.0/default/StreamOut.cpp
Normal file
534
android/hardware/interfaces/audio/2.0/default/StreamOut.cpp
Normal file
|
@ -0,0 +1,534 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "StreamOutHAL"
|
||||
//#define LOG_NDEBUG 0
|
||||
#define ATRACE_TAG ATRACE_TAG_AUDIO
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <android/log.h>
|
||||
#include <hardware/audio.h>
|
||||
#include <utils/Trace.h>
|
||||
|
||||
#include "StreamOut.h"
|
||||
#include "Util.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::ThreadInfo;
|
||||
|
||||
namespace {
|
||||
|
||||
class WriteThread : public Thread {
|
||||
public:
|
||||
// WriteThread's lifespan never exceeds StreamOut's lifespan.
|
||||
WriteThread(std::atomic<bool>* stop, audio_stream_out_t* stream,
|
||||
StreamOut::CommandMQ* commandMQ, StreamOut::DataMQ* dataMQ,
|
||||
StreamOut::StatusMQ* statusMQ, EventFlag* efGroup)
|
||||
: Thread(false /*canCallJava*/),
|
||||
mStop(stop),
|
||||
mStream(stream),
|
||||
mCommandMQ(commandMQ),
|
||||
mDataMQ(dataMQ),
|
||||
mStatusMQ(statusMQ),
|
||||
mEfGroup(efGroup),
|
||||
mBuffer(nullptr) {}
|
||||
bool init() {
|
||||
mBuffer.reset(new (std::nothrow) uint8_t[mDataMQ->getQuantumCount()]);
|
||||
return mBuffer != nullptr;
|
||||
}
|
||||
virtual ~WriteThread() {}
|
||||
|
||||
private:
|
||||
std::atomic<bool>* mStop;
|
||||
audio_stream_out_t* mStream;
|
||||
StreamOut::CommandMQ* mCommandMQ;
|
||||
StreamOut::DataMQ* mDataMQ;
|
||||
StreamOut::StatusMQ* mStatusMQ;
|
||||
EventFlag* mEfGroup;
|
||||
std::unique_ptr<uint8_t[]> mBuffer;
|
||||
IStreamOut::WriteStatus mStatus;
|
||||
|
||||
bool threadLoop() override;
|
||||
|
||||
void doGetLatency();
|
||||
void doGetPresentationPosition();
|
||||
void doWrite();
|
||||
};
|
||||
|
||||
void WriteThread::doWrite() {
|
||||
const size_t availToRead = mDataMQ->availableToRead();
|
||||
mStatus.retval = Result::OK;
|
||||
mStatus.reply.written = 0;
|
||||
if (mDataMQ->read(&mBuffer[0], availToRead)) {
|
||||
ssize_t writeResult = mStream->write(mStream, &mBuffer[0], availToRead);
|
||||
if (writeResult >= 0) {
|
||||
mStatus.reply.written = writeResult;
|
||||
} else {
|
||||
mStatus.retval = Stream::analyzeStatus("write", writeResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void WriteThread::doGetPresentationPosition() {
|
||||
mStatus.retval = StreamOut::getPresentationPositionImpl(
|
||||
mStream, &mStatus.reply.presentationPosition.frames,
|
||||
&mStatus.reply.presentationPosition.timeStamp);
|
||||
}
|
||||
|
||||
void WriteThread::doGetLatency() {
|
||||
mStatus.retval = Result::OK;
|
||||
mStatus.reply.latencyMs = mStream->get_latency(mStream);
|
||||
}
|
||||
|
||||
bool WriteThread::threadLoop() {
|
||||
// This implementation doesn't return control back to the Thread until it
|
||||
// decides to stop,
|
||||
// as the Thread uses mutexes, and this can lead to priority inversion.
|
||||
while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) {
|
||||
uint32_t efState = 0;
|
||||
mEfGroup->wait(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY),
|
||||
&efState);
|
||||
if (!(efState &
|
||||
static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY))) {
|
||||
continue; // Nothing to do.
|
||||
}
|
||||
if (!mCommandMQ->read(&mStatus.replyTo)) {
|
||||
continue; // Nothing to do.
|
||||
}
|
||||
switch (mStatus.replyTo) {
|
||||
case IStreamOut::WriteCommand::WRITE:
|
||||
doWrite();
|
||||
break;
|
||||
case IStreamOut::WriteCommand::GET_PRESENTATION_POSITION:
|
||||
doGetPresentationPosition();
|
||||
break;
|
||||
case IStreamOut::WriteCommand::GET_LATENCY:
|
||||
doGetLatency();
|
||||
break;
|
||||
default:
|
||||
ALOGE("Unknown write thread command code %d", mStatus.replyTo);
|
||||
mStatus.retval = Result::NOT_SUPPORTED;
|
||||
break;
|
||||
}
|
||||
if (!mStatusMQ->write(&mStatus)) {
|
||||
ALOGE("status message queue write failed");
|
||||
}
|
||||
mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
StreamOut::StreamOut(const sp<Device>& device, audio_stream_out_t* stream)
|
||||
: mIsClosed(false),
|
||||
mDevice(device),
|
||||
mStream(stream),
|
||||
mStreamCommon(new Stream(&stream->common)),
|
||||
mStreamMmap(new StreamMmap<audio_stream_out_t>(stream)),
|
||||
mEfGroup(nullptr),
|
||||
mStopWriteThread(false) {}
|
||||
|
||||
StreamOut::~StreamOut() {
|
||||
ATRACE_CALL();
|
||||
close();
|
||||
if (mWriteThread.get()) {
|
||||
ATRACE_NAME("mWriteThread->join");
|
||||
status_t status = mWriteThread->join();
|
||||
ALOGE_IF(status, "write thread exit error: %s", strerror(-status));
|
||||
}
|
||||
if (mEfGroup) {
|
||||
status_t status = EventFlag::deleteEventFlag(&mEfGroup);
|
||||
ALOGE_IF(status, "write MQ event flag deletion error: %s",
|
||||
strerror(-status));
|
||||
}
|
||||
mCallback.clear();
|
||||
mDevice->closeOutputStream(mStream);
|
||||
mStream = nullptr;
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStream follow.
|
||||
Return<uint64_t> StreamOut::getFrameSize() {
|
||||
return audio_stream_out_frame_size(mStream);
|
||||
}
|
||||
|
||||
Return<uint64_t> StreamOut::getFrameCount() {
|
||||
return mStreamCommon->getFrameCount();
|
||||
}
|
||||
|
||||
Return<uint64_t> StreamOut::getBufferSize() {
|
||||
return mStreamCommon->getBufferSize();
|
||||
}
|
||||
|
||||
Return<uint32_t> StreamOut::getSampleRate() {
|
||||
return mStreamCommon->getSampleRate();
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getSupportedSampleRates(
|
||||
getSupportedSampleRates_cb _hidl_cb) {
|
||||
return mStreamCommon->getSupportedSampleRates(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setSampleRate(uint32_t sampleRateHz) {
|
||||
return mStreamCommon->setSampleRate(sampleRateHz);
|
||||
}
|
||||
|
||||
Return<AudioChannelMask> StreamOut::getChannelMask() {
|
||||
return mStreamCommon->getChannelMask();
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getSupportedChannelMasks(
|
||||
getSupportedChannelMasks_cb _hidl_cb) {
|
||||
return mStreamCommon->getSupportedChannelMasks(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setChannelMask(AudioChannelMask mask) {
|
||||
return mStreamCommon->setChannelMask(mask);
|
||||
}
|
||||
|
||||
Return<AudioFormat> StreamOut::getFormat() {
|
||||
return mStreamCommon->getFormat();
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getSupportedFormats(getSupportedFormats_cb _hidl_cb) {
|
||||
return mStreamCommon->getSupportedFormats(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setFormat(AudioFormat format) {
|
||||
return mStreamCommon->setFormat(format);
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getAudioProperties(getAudioProperties_cb _hidl_cb) {
|
||||
return mStreamCommon->getAudioProperties(_hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::addEffect(uint64_t effectId) {
|
||||
return mStreamCommon->addEffect(effectId);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::removeEffect(uint64_t effectId) {
|
||||
return mStreamCommon->removeEffect(effectId);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::standby() {
|
||||
return mStreamCommon->standby();
|
||||
}
|
||||
|
||||
Return<AudioDevice> StreamOut::getDevice() {
|
||||
return mStreamCommon->getDevice();
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setDevice(const DeviceAddress& address) {
|
||||
return mStreamCommon->setDevice(address);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setConnectedState(const DeviceAddress& address,
|
||||
bool connected) {
|
||||
return mStreamCommon->setConnectedState(address, connected);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setHwAvSync(uint32_t hwAvSync) {
|
||||
return mStreamCommon->setHwAvSync(hwAvSync);
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getParameters(const hidl_vec<hidl_string>& keys,
|
||||
getParameters_cb _hidl_cb) {
|
||||
return mStreamCommon->getParameters(keys, _hidl_cb);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setParameters(
|
||||
const hidl_vec<ParameterValue>& parameters) {
|
||||
return mStreamCommon->setParameters(parameters);
|
||||
}
|
||||
|
||||
Return<void> StreamOut::debugDump(const hidl_handle& fd) {
|
||||
return mStreamCommon->debugDump(fd);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::close() {
|
||||
if (mIsClosed) return Result::INVALID_STATE;
|
||||
mIsClosed = true;
|
||||
if (mWriteThread.get()) {
|
||||
mStopWriteThread.store(true, std::memory_order_release);
|
||||
}
|
||||
if (mEfGroup) {
|
||||
mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY));
|
||||
}
|
||||
return Result::OK;
|
||||
}
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStreamOut follow.
|
||||
Return<uint32_t> StreamOut::getLatency() {
|
||||
return mStream->get_latency(mStream);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setVolume(float left, float right) {
|
||||
if (mStream->set_volume == NULL) {
|
||||
return Result::NOT_SUPPORTED;
|
||||
}
|
||||
if (!isGainNormalized(left)) {
|
||||
ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left,
|
||||
right);
|
||||
return Result::INVALID_ARGUMENTS;
|
||||
}
|
||||
return Stream::analyzeStatus("set_volume",
|
||||
mStream->set_volume(mStream, left, right));
|
||||
}
|
||||
|
||||
Return<void> StreamOut::prepareForWriting(uint32_t frameSize,
|
||||
uint32_t framesCount,
|
||||
prepareForWriting_cb _hidl_cb) {
|
||||
status_t status;
|
||||
ThreadInfo threadInfo = {0, 0};
|
||||
|
||||
// Wrap the _hidl_cb to return an error
|
||||
auto sendError = [this, &threadInfo, &_hidl_cb](Result result) {
|
||||
_hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(),
|
||||
StatusMQ::Descriptor(), threadInfo);
|
||||
|
||||
};
|
||||
|
||||
// Create message queues.
|
||||
if (mDataMQ) {
|
||||
ALOGE("the client attempts to call prepareForWriting twice");
|
||||
sendError(Result::INVALID_STATE);
|
||||
return Void();
|
||||
}
|
||||
std::unique_ptr<CommandMQ> tempCommandMQ(new CommandMQ(1));
|
||||
|
||||
// Check frameSize and framesCount
|
||||
if (frameSize == 0 || framesCount == 0) {
|
||||
ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize,
|
||||
framesCount);
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
if (frameSize > Stream::MAX_BUFFER_SIZE / framesCount) {
|
||||
ALOGE("Buffer too big: %u*%u bytes > MAX_BUFFER_SIZE (%u)", frameSize, framesCount,
|
||||
Stream::MAX_BUFFER_SIZE);
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
std::unique_ptr<DataMQ> tempDataMQ(
|
||||
new DataMQ(frameSize * framesCount, true /* EventFlag */));
|
||||
|
||||
std::unique_ptr<StatusMQ> tempStatusMQ(new StatusMQ(1));
|
||||
if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() ||
|
||||
!tempStatusMQ->isValid()) {
|
||||
ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid");
|
||||
ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid");
|
||||
ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid");
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
EventFlag* tempRawEfGroup{};
|
||||
status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(),
|
||||
&tempRawEfGroup);
|
||||
std::unique_ptr<EventFlag, void (*)(EventFlag*)> tempElfGroup(
|
||||
tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); });
|
||||
if (status != OK || !tempElfGroup) {
|
||||
ALOGE("failed creating event flag for data MQ: %s", strerror(-status));
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
|
||||
// Create and launch the thread.
|
||||
auto tempWriteThread = std::make_unique<WriteThread>(
|
||||
&mStopWriteThread, mStream, tempCommandMQ.get(), tempDataMQ.get(),
|
||||
tempStatusMQ.get(), tempElfGroup.get());
|
||||
if (!tempWriteThread->init()) {
|
||||
ALOGW("failed to start writer thread: %s", strerror(-status));
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
status = tempWriteThread->run("writer", PRIORITY_URGENT_AUDIO);
|
||||
if (status != OK) {
|
||||
ALOGW("failed to start writer thread: %s", strerror(-status));
|
||||
sendError(Result::INVALID_ARGUMENTS);
|
||||
return Void();
|
||||
}
|
||||
|
||||
mCommandMQ = std::move(tempCommandMQ);
|
||||
mDataMQ = std::move(tempDataMQ);
|
||||
mStatusMQ = std::move(tempStatusMQ);
|
||||
mWriteThread = tempWriteThread.release();
|
||||
mEfGroup = tempElfGroup.release();
|
||||
threadInfo.pid = getpid();
|
||||
threadInfo.tid = mWriteThread->getTid();
|
||||
_hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(),
|
||||
*mStatusMQ->getDesc(), threadInfo);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getRenderPosition(getRenderPosition_cb _hidl_cb) {
|
||||
uint32_t halDspFrames;
|
||||
Result retval = Stream::analyzeStatus(
|
||||
"get_render_position",
|
||||
mStream->get_render_position(mStream, &halDspFrames));
|
||||
_hidl_cb(retval, halDspFrames);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getNextWriteTimestamp(
|
||||
getNextWriteTimestamp_cb _hidl_cb) {
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
int64_t timestampUs = 0;
|
||||
if (mStream->get_next_write_timestamp != NULL) {
|
||||
retval = Stream::analyzeStatus(
|
||||
"get_next_write_timestamp",
|
||||
mStream->get_next_write_timestamp(mStream, ×tampUs));
|
||||
}
|
||||
_hidl_cb(retval, timestampUs);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::setCallback(const sp<IStreamOutCallback>& callback) {
|
||||
if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
|
||||
int result = mStream->set_callback(mStream, StreamOut::asyncCallback, this);
|
||||
if (result == 0) {
|
||||
mCallback = callback;
|
||||
}
|
||||
return Stream::analyzeStatus("set_callback", result);
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::clearCallback() {
|
||||
if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
|
||||
mCallback.clear();
|
||||
return Result::OK;
|
||||
}
|
||||
|
||||
// static
|
||||
int StreamOut::asyncCallback(stream_callback_event_t event, void*,
|
||||
void* cookie) {
|
||||
wp<StreamOut> weakSelf(reinterpret_cast<StreamOut*>(cookie));
|
||||
sp<StreamOut> self = weakSelf.promote();
|
||||
if (self == nullptr || self->mCallback == nullptr) return 0;
|
||||
ALOGV("asyncCallback() event %d", event);
|
||||
switch (event) {
|
||||
case STREAM_CBK_EVENT_WRITE_READY:
|
||||
self->mCallback->onWriteReady();
|
||||
break;
|
||||
case STREAM_CBK_EVENT_DRAIN_READY:
|
||||
self->mCallback->onDrainReady();
|
||||
break;
|
||||
case STREAM_CBK_EVENT_ERROR:
|
||||
self->mCallback->onError();
|
||||
break;
|
||||
default:
|
||||
ALOGW("asyncCallback() unknown event %d", event);
|
||||
break;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
Return<void> StreamOut::supportsPauseAndResume(
|
||||
supportsPauseAndResume_cb _hidl_cb) {
|
||||
_hidl_cb(mStream->pause != NULL, mStream->resume != NULL);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::pause() {
|
||||
return mStream->pause != NULL
|
||||
? Stream::analyzeStatus("pause", mStream->pause(mStream))
|
||||
: Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::resume() {
|
||||
return mStream->resume != NULL
|
||||
? Stream::analyzeStatus("resume", mStream->resume(mStream))
|
||||
: Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<bool> StreamOut::supportsDrain() {
|
||||
return mStream->drain != NULL;
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::drain(AudioDrain type) {
|
||||
return mStream->drain != NULL
|
||||
? Stream::analyzeStatus(
|
||||
"drain",
|
||||
mStream->drain(mStream,
|
||||
static_cast<audio_drain_type_t>(type)))
|
||||
: Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::flush() {
|
||||
return mStream->flush != NULL
|
||||
? Stream::analyzeStatus("flush", mStream->flush(mStream))
|
||||
: Result::NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
// static
|
||||
Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream,
|
||||
uint64_t* frames,
|
||||
TimeSpec* timeStamp) {
|
||||
// Don't logspam on EINVAL--it's normal for get_presentation_position
|
||||
// to return it sometimes. EAGAIN may be returned by A2DP audio HAL
|
||||
// implementation. ENODATA can also be reported while the writer is
|
||||
// continuously querying it, but the stream has been stopped.
|
||||
static const std::vector<int> ignoredErrors{EINVAL, EAGAIN, ENODATA};
|
||||
Result retval(Result::NOT_SUPPORTED);
|
||||
if (stream->get_presentation_position == NULL) return retval;
|
||||
struct timespec halTimeStamp;
|
||||
retval = Stream::analyzeStatus("get_presentation_position",
|
||||
stream->get_presentation_position(stream, frames, &halTimeStamp),
|
||||
ignoredErrors);
|
||||
if (retval == Result::OK) {
|
||||
timeStamp->tvSec = halTimeStamp.tv_sec;
|
||||
timeStamp->tvNSec = halTimeStamp.tv_nsec;
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getPresentationPosition(
|
||||
getPresentationPosition_cb _hidl_cb) {
|
||||
uint64_t frames = 0;
|
||||
TimeSpec timeStamp = {0, 0};
|
||||
Result retval = getPresentationPositionImpl(mStream, &frames, &timeStamp);
|
||||
_hidl_cb(retval, frames, timeStamp);
|
||||
return Void();
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::start() {
|
||||
return mStreamMmap->start();
|
||||
}
|
||||
|
||||
Return<Result> StreamOut::stop() {
|
||||
return mStreamMmap->stop();
|
||||
}
|
||||
|
||||
Return<void> StreamOut::createMmapBuffer(int32_t minSizeFrames,
|
||||
createMmapBuffer_cb _hidl_cb) {
|
||||
return mStreamMmap->createMmapBuffer(
|
||||
minSizeFrames, audio_stream_out_frame_size(mStream), _hidl_cb);
|
||||
}
|
||||
|
||||
Return<void> StreamOut::getMmapPosition(getMmapPosition_cb _hidl_cb) {
|
||||
return mStreamMmap->getMmapPosition(_hidl_cb);
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
139
android/hardware/interfaces/audio/2.0/default/StreamOut.h
Normal file
139
android/hardware/interfaces/audio/2.0/default/StreamOut.h
Normal file
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H
|
||||
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
|
||||
#include <android/hardware/audio/2.0/IStreamOut.h>
|
||||
#include <hidl/MQDescriptor.h>
|
||||
#include <hidl/Status.h>
|
||||
#include <fmq/EventFlag.h>
|
||||
#include <fmq/MessageQueue.h>
|
||||
#include <utils/Thread.h>
|
||||
|
||||
#include "Device.h"
|
||||
#include "Stream.h"
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
using ::android::hardware::audio::common::V2_0::AudioChannelMask;
|
||||
using ::android::hardware::audio::common::V2_0::AudioDevice;
|
||||
using ::android::hardware::audio::common::V2_0::AudioFormat;
|
||||
using ::android::hardware::audio::V2_0::AudioDrain;
|
||||
using ::android::hardware::audio::V2_0::DeviceAddress;
|
||||
using ::android::hardware::audio::V2_0::IStream;
|
||||
using ::android::hardware::audio::V2_0::IStreamOut;
|
||||
using ::android::hardware::audio::V2_0::IStreamOutCallback;
|
||||
using ::android::hardware::audio::V2_0::ParameterValue;
|
||||
using ::android::hardware::audio::V2_0::Result;
|
||||
using ::android::hardware::audio::V2_0::TimeSpec;
|
||||
using ::android::hardware::Return;
|
||||
using ::android::hardware::Void;
|
||||
using ::android::hardware::hidl_vec;
|
||||
using ::android::hardware::hidl_string;
|
||||
using ::android::sp;
|
||||
|
||||
struct StreamOut : public IStreamOut {
|
||||
typedef MessageQueue<WriteCommand, kSynchronizedReadWrite> CommandMQ;
|
||||
typedef MessageQueue<uint8_t, kSynchronizedReadWrite> DataMQ;
|
||||
typedef MessageQueue<WriteStatus, kSynchronizedReadWrite> StatusMQ;
|
||||
|
||||
StreamOut(const sp<Device>& device, audio_stream_out_t* stream);
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStream follow.
|
||||
Return<uint64_t> getFrameSize() override;
|
||||
Return<uint64_t> getFrameCount() override;
|
||||
Return<uint64_t> getBufferSize() override;
|
||||
Return<uint32_t> getSampleRate() override;
|
||||
Return<void> getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override;
|
||||
Return<Result> setSampleRate(uint32_t sampleRateHz) override;
|
||||
Return<AudioChannelMask> getChannelMask() override;
|
||||
Return<void> getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override;
|
||||
Return<Result> setChannelMask(AudioChannelMask mask) override;
|
||||
Return<AudioFormat> getFormat() override;
|
||||
Return<void> getSupportedFormats(getSupportedFormats_cb _hidl_cb) override;
|
||||
Return<Result> setFormat(AudioFormat format) override;
|
||||
Return<void> getAudioProperties(getAudioProperties_cb _hidl_cb) override;
|
||||
Return<Result> addEffect(uint64_t effectId) override;
|
||||
Return<Result> removeEffect(uint64_t effectId) override;
|
||||
Return<Result> standby() override;
|
||||
Return<AudioDevice> getDevice() override;
|
||||
Return<Result> setDevice(const DeviceAddress& address) override;
|
||||
Return<Result> setConnectedState(const DeviceAddress& address, bool connected) override;
|
||||
Return<Result> setHwAvSync(uint32_t hwAvSync) override;
|
||||
Return<void> getParameters(
|
||||
const hidl_vec<hidl_string>& keys, getParameters_cb _hidl_cb) override;
|
||||
Return<Result> setParameters(const hidl_vec<ParameterValue>& parameters) override;
|
||||
Return<void> debugDump(const hidl_handle& fd) override;
|
||||
Return<Result> close() override;
|
||||
|
||||
// Methods from ::android::hardware::audio::V2_0::IStreamOut follow.
|
||||
Return<uint32_t> getLatency() override;
|
||||
Return<Result> setVolume(float left, float right) override;
|
||||
Return<void> prepareForWriting(
|
||||
uint32_t frameSize, uint32_t framesCount, prepareForWriting_cb _hidl_cb) override;
|
||||
Return<void> getRenderPosition(getRenderPosition_cb _hidl_cb) override;
|
||||
Return<void> getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) override;
|
||||
Return<Result> setCallback(const sp<IStreamOutCallback>& callback) override;
|
||||
Return<Result> clearCallback() override;
|
||||
Return<void> supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) override;
|
||||
Return<Result> pause() override;
|
||||
Return<Result> resume() override;
|
||||
Return<bool> supportsDrain() override;
|
||||
Return<Result> drain(AudioDrain type) override;
|
||||
Return<Result> flush() override;
|
||||
Return<void> getPresentationPosition(getPresentationPosition_cb _hidl_cb) override;
|
||||
Return<Result> start() override;
|
||||
Return<Result> stop() override;
|
||||
Return<void> createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override;
|
||||
Return<void> getMmapPosition(getMmapPosition_cb _hidl_cb) override;
|
||||
|
||||
static Result getPresentationPositionImpl(
|
||||
audio_stream_out_t *stream, uint64_t *frames, TimeSpec *timeStamp);
|
||||
|
||||
private:
|
||||
bool mIsClosed;
|
||||
const sp<Device> mDevice;
|
||||
audio_stream_out_t *mStream;
|
||||
const sp<Stream> mStreamCommon;
|
||||
const sp<StreamMmap<audio_stream_out_t>> mStreamMmap;
|
||||
sp<IStreamOutCallback> mCallback;
|
||||
std::unique_ptr<CommandMQ> mCommandMQ;
|
||||
std::unique_ptr<DataMQ> mDataMQ;
|
||||
std::unique_ptr<StatusMQ> mStatusMQ;
|
||||
EventFlag* mEfGroup;
|
||||
std::atomic<bool> mStopWriteThread;
|
||||
sp<Thread> mWriteThread;
|
||||
|
||||
virtual ~StreamOut();
|
||||
|
||||
static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
|
||||
};
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H
|
37
android/hardware/interfaces/audio/2.0/default/Util.h
Normal file
37
android/hardware/interfaces/audio/2.0/default/Util.h
Normal file
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright (C) 2017 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H
|
||||
#define ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H
|
||||
|
||||
namespace android {
|
||||
namespace hardware {
|
||||
namespace audio {
|
||||
namespace V2_0 {
|
||||
namespace implementation {
|
||||
|
||||
/** @return true if gain is between 0 and 1 included. */
|
||||
constexpr bool isGainNormalized(float gain) {
|
||||
return gain >= 0.0 && gain <= 1.0;
|
||||
}
|
||||
|
||||
} // namespace implementation
|
||||
} // namespace V2_0
|
||||
} // namespace audio
|
||||
} // namespace hardware
|
||||
} // namespace android
|
||||
|
||||
#endif // ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H
|
|
@ -0,0 +1,11 @@
|
|||
service audio-hal-2-0 /vendor/bin/hw/android.hardware.audio@2.0-service
|
||||
class hal
|
||||
user audioserver
|
||||
# media gid needed for /dev/fm (radio) and for /data/misc/media (tee)
|
||||
group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct
|
||||
ioprio rt 4
|
||||
writepid /dev/cpuset/foreground/tasks /dev/stune/foreground/tasks
|
||||
# audioflinger restarts itself when it loses connection with the hal
|
||||
# and its .rc file has an "onrestart restart audio-hal" rule, thus
|
||||
# an additional auto-restart from the init process isn't needed.
|
||||
oneshot
|
48
android/hardware/interfaces/audio/2.0/default/service.cpp
Normal file
48
android/hardware/interfaces/audio/2.0/default/service.cpp
Normal file
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "audiohalservice"
|
||||
|
||||
#include <hidl/HidlTransportSupport.h>
|
||||
#include <hidl/LegacySupport.h>
|
||||
#include <android/hardware/audio/2.0/IDevicesFactory.h>
|
||||
#include <android/hardware/audio/effect/2.0/IEffectsFactory.h>
|
||||
#include <android/hardware/soundtrigger/2.0/ISoundTriggerHw.h>
|
||||
|
||||
using android::hardware::configureRpcThreadpool;
|
||||
using android::hardware::joinRpcThreadpool;
|
||||
using android::hardware::registerPassthroughServiceImplementation;
|
||||
|
||||
using android::hardware::audio::effect::V2_0::IEffectsFactory;
|
||||
using android::hardware::audio::V2_0::IDevicesFactory;
|
||||
using android::hardware::soundtrigger::V2_0::ISoundTriggerHw;
|
||||
using android::hardware::registerPassthroughServiceImplementation;
|
||||
|
||||
using android::OK;
|
||||
|
||||
int main(int /* argc */, char* /* argv */ []) {
|
||||
configureRpcThreadpool(16, true /*callerWillJoin*/);
|
||||
android::status_t status;
|
||||
status = registerPassthroughServiceImplementation<IDevicesFactory>();
|
||||
LOG_ALWAYS_FATAL_IF(status != OK, "Error while registering audio service: %d", status);
|
||||
status = registerPassthroughServiceImplementation<IEffectsFactory>();
|
||||
LOG_ALWAYS_FATAL_IF(status != OK, "Error while registering audio effects service: %d", status);
|
||||
// Soundtrigger might be not present.
|
||||
status = registerPassthroughServiceImplementation<ISoundTriggerHw>();
|
||||
ALOGE_IF(status != OK, "Error while registering soundtrigger service: %d", status);
|
||||
joinRpcThreadpool();
|
||||
return status;
|
||||
}
|
102
android/hardware/interfaces/audio/2.0/types.hal
Normal file
102
android/hardware/interfaces/audio/2.0/types.hal
Normal file
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.hardware.audio@2.0;
|
||||
|
||||
import android.hardware.audio.common@2.0;
|
||||
|
||||
enum Result : int32_t {
|
||||
OK,
|
||||
NOT_INITIALIZED,
|
||||
INVALID_ARGUMENTS,
|
||||
INVALID_STATE,
|
||||
NOT_SUPPORTED
|
||||
};
|
||||
|
||||
@export(name="audio_drain_type_t", value_prefix="AUDIO_DRAIN_")
|
||||
enum AudioDrain : int32_t {
|
||||
/** drain() returns when all data has been played. */
|
||||
ALL,
|
||||
/**
|
||||
* drain() returns a short time before all data from the current track has
|
||||
* been played to give time for gapless track switch.
|
||||
*/
|
||||
EARLY_NOTIFY
|
||||
};
|
||||
|
||||
/**
|
||||
* A substitute for POSIX timespec.
|
||||
*/
|
||||
struct TimeSpec {
|
||||
uint64_t tvSec; // seconds
|
||||
uint64_t tvNSec; // nanoseconds
|
||||
};
|
||||
|
||||
/**
|
||||
* IEEE 802 MAC address.
|
||||
*/
|
||||
typedef uint8_t[6] MacAddress;
|
||||
|
||||
struct ParameterValue {
|
||||
string key;
|
||||
string value;
|
||||
};
|
||||
|
||||
/**
|
||||
* Specifies a device in case when several devices of the same type
|
||||
* can be connected (e.g. BT A2DP, USB).
|
||||
*/
|
||||
struct DeviceAddress {
|
||||
AudioDevice device; // discriminator
|
||||
union Address {
|
||||
MacAddress mac; // used for BLUETOOTH_A2DP_*
|
||||
uint8_t[4] ipv4; // used for IP
|
||||
struct Alsa {
|
||||
int32_t card;
|
||||
int32_t device;
|
||||
} alsa; // used for USB_*
|
||||
} address;
|
||||
string busAddress; // used for BUS
|
||||
string rSubmixAddress; // used for REMOTE_SUBMIX
|
||||
};
|
||||
|
||||
/**
|
||||
* Mmap buffer descriptor returned by IStream.createMmapBuffer().
|
||||
* Used by streams opened in mmap mode.
|
||||
*/
|
||||
struct MmapBufferInfo {
|
||||
memory sharedMemory; // mmap memory buffer
|
||||
int32_t bufferSizeFrames; // total buffer size in frames
|
||||
int32_t burstSizeFrames; // transfer size granularity in frames
|
||||
};
|
||||
|
||||
/**
|
||||
* Mmap buffer read/write position returned by IStream.getMmapPosition().
|
||||
* Used by streams opened in mmap mode.
|
||||
*/
|
||||
struct MmapPosition {
|
||||
int64_t timeNanoseconds; // time stamp in ns, CLOCK_MONOTONIC
|
||||
int32_t positionFrames; // increasing 32 bit frame count reset when IStream.stop() is called
|
||||
};
|
||||
|
||||
/**
|
||||
* The message queue flags used to synchronize reads and writes from
|
||||
* message queues used by StreamIn and StreamOut.
|
||||
*/
|
||||
enum MessageQueueFlagBits : uint32_t {
|
||||
NOT_EMPTY = 1 << 0,
|
||||
NOT_FULL = 1 << 1
|
||||
};
|
|
@ -0,0 +1,33 @@
|
|||
//
|
||||
// Copyright (C) 2017 The Android Open Source Project
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
cc_test {
|
||||
name: "VtsHalAudioV2_0TargetTest",
|
||||
defaults: ["VtsHalTargetTestDefaults"],
|
||||
srcs: [
|
||||
"AudioPrimaryHidlHalTest.cpp",
|
||||
"ValidateAudioConfiguration.cpp"
|
||||
],
|
||||
static_libs: [
|
||||
"android.hardware.audio.common.test.utility",
|
||||
"android.hardware.audio@2.0",
|
||||
"android.hardware.audio.common@2.0",
|
||||
"libxml2",
|
||||
],
|
||||
shared_libs: [
|
||||
"libicuuc",
|
||||
],
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright (C) 2017 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <string>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "utility/ValidateXml.h"
|
||||
|
||||
TEST(CheckConfig, audioPolicyConfigurationValidation) {
|
||||
const char* configName = "audio_policy_configuration.xml";
|
||||
const char* possibleConfigLocations[] = {"/odm/etc", "/vendor/etc", "/system/etc"};
|
||||
const char* configSchemaPath = "/data/local/tmp/audio_policy_configuration.xsd";
|
||||
|
||||
for (std::string folder : possibleConfigLocations) {
|
||||
const auto configPath = folder + '/' + configName;
|
||||
if (access(configPath.c_str(), R_OK) == 0) {
|
||||
ASSERT_VALID_XML(configPath.c_str(), configSchemaPath);
|
||||
return; // The framework does not read past the first config file found
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue