upload android base code part6

This commit is contained in:
August 2018-08-08 17:48:24 +08:00
parent 421e214c7d
commit 4e516ec6ed
35396 changed files with 9188716 additions and 0 deletions

View file

@ -0,0 +1,14 @@
#ifndef HW_EMULATOR_CAMERA_ALIGNMENT_H
#define HW_EMULATOR_CAMERA_ALIGNMENT_H
namespace android {
// Align |value| to the next larger value that is divisible by |alignment|
// |alignment| has to be a power of 2.
inline int align(int value, int alignment) {
return (value + alignment - 1) & (~(alignment - 1));
}
} // namespace android
#endif // HW_EMULATOR_CAMERA_ALIGNMENT_H

View file

@ -0,0 +1,123 @@
# Copyright (C) 2011 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ifneq ($(filter generic_x86 generic_x86_64 generic generic_arm64 generic_mips generic_mips64, $(TARGET_DEVICE)),)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
# Emulator camera module########################################################
emulator_camera_module_relative_path := hw
emulator_camera_cflags := -fno-short-enums -DQEMU_HARDWARE
emulator_camera_cflags += -Wno-unused-parameter -Wno-missing-field-initializers
emulator_camera_clang_flags := -Wno-c++11-narrowing
emulator_camera_shared_libraries := \
libbinder \
libexif \
liblog \
libutils \
libcutils \
libui \
libdl \
libjpeg \
libcamera_metadata \
libhardware
emulator_camera_static_libraries := \
android.hardware.camera.common@1.0-helper \
libyuv_static
emulator_camera_c_includes := external/libjpeg-turbo \
external/libexif \
external/libyuv/files/include \
frameworks/native/include/media/hardware \
$(LOCAL_PATH)/../include \
$(LOCAL_PATH)/../../goldfish-opengl/system/OpenglSystemCommon \
$(call include-path-for, camera)
emulator_camera_src := \
EmulatedCameraHal.cpp \
EmulatedCameraFactory.cpp \
EmulatedCameraHotplugThread.cpp \
EmulatedBaseCamera.cpp \
EmulatedCamera.cpp \
EmulatedCameraDevice.cpp \
EmulatedQemuCamera.cpp \
EmulatedQemuCameraDevice.cpp \
EmulatedFakeCamera.cpp \
EmulatedFakeCameraDevice.cpp \
Converters.cpp \
PreviewWindow.cpp \
CallbackNotifier.cpp \
QemuClient.cpp \
JpegCompressor.cpp \
EmulatedCamera2.cpp \
EmulatedFakeCamera2.cpp \
EmulatedQemuCamera2.cpp \
fake-pipeline2/Scene.cpp \
fake-pipeline2/Sensor.cpp \
fake-pipeline2/JpegCompressor.cpp \
EmulatedCamera3.cpp \
EmulatedFakeCamera3.cpp \
Exif.cpp \
Thumbnail.cpp \
WorkerThread.cpp \
# Emulated camera - goldfish / vbox_x86 build###################################
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_RELATIVE_PATH := ${emulator_camera_module_relative_path}
LOCAL_CFLAGS := ${emulator_camera_cflags}
LOCAL_CLANG_CFLAGS += ${emulator_camera_clang_flags}
LOCAL_SHARED_LIBRARIES := ${emulator_camera_shared_libraries}
LOCAL_STATIC_LIBRARIES := ${emulator_camera_static_libraries}
LOCAL_C_INCLUDES += ${emulator_camera_c_includes}
LOCAL_SRC_FILES := ${emulator_camera_src}
ifeq ($(TARGET_BOARD_PLATFORM),brilloemulator)
LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM)
else ifeq ($(TARGET_PRODUCT),vbox_x86)
LOCAL_MODULE := camera.vbox_x86
else
LOCAL_MODULE := camera.goldfish
endif
include $(BUILD_SHARED_LIBRARY)
# Emulator camera - ranchu build################################################
include ${CLEAR_VARS}
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_RELATIVE_PATH := ${emulator_camera_module_relative_path}
LOCAL_CFLAGS := ${emulator_camera_cflags}
LOCAL_CLANG_CFLAGS += ${emulator_camera_clang_flags}
LOCAL_SHARED_LIBRARIES := ${emulator_camera_shared_libraries}
LOCAL_STATIC_LIBRARIES := ${emulator_camera_static_libraries}
LOCAL_C_INCLUDES += ${emulator_camera_c_includes}
LOCAL_SRC_FILES := ${emulator_camera_src}
LOCAL_MODULE := camera.ranchu
include $(BUILD_SHARED_LIBRARY)
# Build all subdirectories #####################################################
include $(call all-makefiles-under,$(LOCAL_PATH))
endif

View file

@ -0,0 +1,354 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class CallbackNotifier that manages callbacks set
* via set_callbacks, enable_msg_type, and disable_msg_type camera HAL API.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_CallbackNotifier"
#include <cutils/log.h>
#include <MetadataBufferType.h>
#include "EmulatedCameraDevice.h"
#undef min
#undef max
#include "CallbackNotifier.h"
#include "Exif.h"
#include "JpegCompressor.h"
#include "Thumbnail.h"
namespace android {
/* String representation of camera messages. */
static const char* lCameraMessages[] =
{
"CAMERA_MSG_ERROR",
"CAMERA_MSG_SHUTTER",
"CAMERA_MSG_FOCUS",
"CAMERA_MSG_ZOOM",
"CAMERA_MSG_PREVIEW_FRAME",
"CAMERA_MSG_VIDEO_FRAME",
"CAMERA_MSG_POSTVIEW_FRAME",
"CAMERA_MSG_RAW_IMAGE",
"CAMERA_MSG_COMPRESSED_IMAGE",
"CAMERA_MSG_RAW_IMAGE_NOTIFY",
"CAMERA_MSG_PREVIEW_METADATA"
};
static const int lCameraMessagesNum = sizeof(lCameraMessages) / sizeof(char*);
/* Builds an array of strings for the given set of messages.
* Param:
* msg - Messages to get strings for,
* strings - Array where to save strings
* max - Maximum number of entries in the array.
* Return:
* Number of strings saved into the 'strings' array.
*/
static int GetMessageStrings(uint32_t msg, const char** strings, int max)
{
int index = 0;
int out = 0;
while (msg != 0 && out < max && index < lCameraMessagesNum) {
while ((msg & 0x1) == 0 && index < lCameraMessagesNum) {
msg >>= 1;
index++;
}
if ((msg & 0x1) != 0 && index < lCameraMessagesNum) {
strings[out] = lCameraMessages[index];
out++;
msg >>= 1;
index++;
}
}
return out;
}
/* Logs messages, enabled by the mask. */
static void PrintMessages(uint32_t msg)
{
const char* strs[lCameraMessagesNum];
const int translated = GetMessageStrings(msg, strs, lCameraMessagesNum);
for (int n = 0; n < translated; n++) {
ALOGV(" %s", strs[n]);
}
}
CallbackNotifier::CallbackNotifier()
: mNotifyCB(NULL),
mDataCB(NULL),
mDataCBTimestamp(NULL),
mGetMemoryCB(NULL),
mCBOpaque(NULL),
mLastFrameTimestamp(0),
mFrameRefreshFreq(0),
mMessageEnabler(0),
mJpegQuality(90),
mVideoRecEnabled(false),
mTakingPicture(false)
{
}
CallbackNotifier::~CallbackNotifier()
{
}
/****************************************************************************
* Camera API
***************************************************************************/
void CallbackNotifier::setCallbacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void* user)
{
ALOGV("%s: %p, %p, %p, %p (%p)",
__FUNCTION__, notify_cb, data_cb, data_cb_timestamp, get_memory, user);
Mutex::Autolock locker(&mObjectLock);
mNotifyCB = notify_cb;
mDataCB = data_cb;
mDataCBTimestamp = data_cb_timestamp;
mGetMemoryCB = get_memory;
mCBOpaque = user;
}
void CallbackNotifier::enableMessage(uint msg_type)
{
ALOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
PrintMessages(msg_type);
Mutex::Autolock locker(&mObjectLock);
mMessageEnabler |= msg_type;
ALOGV("**** Currently enabled messages:");
PrintMessages(mMessageEnabler);
}
void CallbackNotifier::disableMessage(uint msg_type)
{
ALOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
PrintMessages(msg_type);
Mutex::Autolock locker(&mObjectLock);
mMessageEnabler &= ~msg_type;
ALOGV("**** Currently enabled messages:");
PrintMessages(mMessageEnabler);
}
status_t CallbackNotifier::enableVideoRecording(int fps)
{
ALOGV("%s: FPS = %d", __FUNCTION__, fps);
Mutex::Autolock locker(&mObjectLock);
mVideoRecEnabled = true;
mLastFrameTimestamp = 0;
mFrameRefreshFreq = 1000000000LL / fps;
return NO_ERROR;
}
void CallbackNotifier::disableVideoRecording()
{
ALOGV("%s:", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
mVideoRecEnabled = false;
mLastFrameTimestamp = 0;
mFrameRefreshFreq = 0;
}
void CallbackNotifier::releaseRecordingFrame(const void* opaque)
{
List<camera_memory_t*>::iterator it = mCameraMemoryTs.begin();
for( ; it != mCameraMemoryTs.end(); ++it ) {
if ( (*it)->data == opaque ) {
(*it)->release( *it );
mCameraMemoryTs.erase(it);
break;
}
}
}
void CallbackNotifier::autoFocusComplete() {
// Even though we don't support auto-focus we are expected to send a fake
// success message according to the documentation.
// https://developer.android.com/reference/android/hardware/Camera.AutoFocusCallback.html
mNotifyCB(CAMERA_MSG_FOCUS, true, 0, mCBOpaque);
}
status_t CallbackNotifier::storeMetaDataInBuffers(bool enable)
{
// Return error if metadata is request, otherwise silently agree.
return enable ? INVALID_OPERATION : NO_ERROR;
}
/****************************************************************************
* Public API
***************************************************************************/
void CallbackNotifier::cleanupCBNotifier()
{
Mutex::Autolock locker(&mObjectLock);
mMessageEnabler = 0;
mNotifyCB = NULL;
mDataCB = NULL;
mDataCBTimestamp = NULL;
mGetMemoryCB = NULL;
mCBOpaque = NULL;
mLastFrameTimestamp = 0;
mFrameRefreshFreq = 0;
mJpegQuality = 90;
mVideoRecEnabled = false;
mTakingPicture = false;
}
void CallbackNotifier::onNextFrameAvailable(nsecs_t timestamp,
EmulatedCameraDevice* camera_dev)
{
if (isMessageEnabled(CAMERA_MSG_VIDEO_FRAME) && isVideoRecordingEnabled() &&
isNewVideoFrameTime(timestamp)) {
// This is the path for video frames, the format used here is not
// exposed to external users so it can be whatever the camera and the
// encoder can agree upon. The emulator system images use software
// encoders that expect a YUV420 format but the camera parameter
// constants cannot represent this. The closest we have is YV12 which is
// YVU420. So we produce YV12 frames so that we can serve those through
// the preview callback below and then we convert from YV12 to YUV420
// here. This is a pretty cheap conversion in most cases since we have
// to copy the frame here anyway. In the best (and most common) cases
// the conversion is just copying the U and V parts of the frame in
// different order. A slightly more expensive case is when the YV12
// frame has padding to ensure that rows are aligned to 16-byte
// boundaries. The YUV420 format expected by the encoders do not have
// this alignment so it has to be removed. This way the encoder gets the
// format it expects and the preview callback (or data callback) below
// gets the format that is configured in camera parameters.
const size_t frameSize = camera_dev->getVideoFrameBufferSize();
camera_memory_t* cam_buff = mGetMemoryCB(-1, frameSize, 1, mCBOpaque);
if (NULL != cam_buff && NULL != cam_buff->data) {
camera_dev->getCurrentFrame(cam_buff->data, V4L2_PIX_FMT_YUV420);
mDataCBTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME,
cam_buff, 0, mCBOpaque);
mCameraMemoryTs.push_back( cam_buff );
} else {
ALOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
}
}
if (isMessageEnabled(CAMERA_MSG_PREVIEW_FRAME)) {
camera_memory_t* cam_buff =
mGetMemoryCB(-1, camera_dev->getFrameBufferSize(), 1, mCBOpaque);
if (NULL != cam_buff && NULL != cam_buff->data) {
camera_dev->getCurrentFrame(cam_buff->data,
camera_dev->getOriginalPixelFormat());
mDataCB(CAMERA_MSG_PREVIEW_FRAME, cam_buff, 0, NULL, mCBOpaque);
cam_buff->release(cam_buff);
} else {
ALOGE("%s: Memory failure in CAMERA_MSG_PREVIEW_FRAME", __FUNCTION__);
}
}
if (mTakingPicture) {
/* This happens just once. */
mTakingPicture = false;
/* The sequence of callbacks during picture taking is:
* - CAMERA_MSG_SHUTTER
* - CAMERA_MSG_RAW_IMAGE_NOTIFY
* - CAMERA_MSG_COMPRESSED_IMAGE
*/
if (isMessageEnabled(CAMERA_MSG_SHUTTER)) {
mNotifyCB(CAMERA_MSG_SHUTTER, 0, 0, mCBOpaque);
}
if (isMessageEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
mNotifyCB(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCBOpaque);
}
if (isMessageEnabled(CAMERA_MSG_COMPRESSED_IMAGE)) {
// Create EXIF data from the camera parameters, this includes things
// like EXIF default fields, a timestamp and GPS information.
ExifData* exifData = createExifData(mCameraParameters);
// Hold the frame lock while accessing the current frame to prevent
// concurrent modifications. Then create our JPEG from that frame.
EmulatedCameraDevice::FrameLock lock(*camera_dev);
const void* frame = camera_dev->getCurrentFrame();
// Create a thumbnail and place the pointer and size in the EXIF
// data structure. This transfers ownership to the EXIF data and
// the memory will be deallocated in the freeExifData call below.
int width = camera_dev->getFrameWidth();
int height = camera_dev->getFrameHeight();
int thumbWidth = mCameraParameters.getInt(
CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
int thumbHeight = mCameraParameters.getInt(
CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
if (thumbWidth > 0 && thumbHeight > 0) {
if (!createThumbnail(static_cast<const unsigned char*>(frame),
width, height, thumbWidth, thumbHeight,
mJpegQuality, exifData)) {
// Not really a fatal error, we'll just keep going
ALOGE("%s: Failed to create thumbnail for image",
__FUNCTION__);
}
}
/* Compress the frame to JPEG. Note that when taking pictures, we
* have requested camera device to provide us with NV21 frames. */
NV21JpegCompressor compressor;
status_t res = compressor.compressRawImage(frame, width, height,
mJpegQuality, exifData);
if (res == NO_ERROR) {
camera_memory_t* jpeg_buff =
mGetMemoryCB(-1, compressor.getCompressedSize(), 1, mCBOpaque);
if (NULL != jpeg_buff && NULL != jpeg_buff->data) {
compressor.getCompressedImage(jpeg_buff->data);
mDataCB(CAMERA_MSG_COMPRESSED_IMAGE, jpeg_buff, 0, NULL, mCBOpaque);
jpeg_buff->release(jpeg_buff);
} else {
ALOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
}
} else {
ALOGE("%s: Compression failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
}
// The EXIF data has been consumed, free it
freeExifData(exifData);
}
}
}
void CallbackNotifier::onCameraDeviceError(int err)
{
if (isMessageEnabled(CAMERA_MSG_ERROR) && mNotifyCB != NULL) {
mNotifyCB(CAMERA_MSG_ERROR, err, 0, mCBOpaque);
}
}
/****************************************************************************
* Private API
***************************************************************************/
bool CallbackNotifier::isNewVideoFrameTime(nsecs_t timestamp)
{
Mutex::Autolock locker(&mObjectLock);
if ((timestamp - mLastFrameTimestamp) >= mFrameRefreshFreq) {
mLastFrameTimestamp = timestamp;
return true;
}
return false;
}
}; /* namespace android */

View file

@ -0,0 +1,255 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_CALLBACK_NOTIFIER_H
#define HW_EMULATOR_CAMERA_CALLBACK_NOTIFIER_H
/*
* Contains declaration of a class CallbackNotifier that manages callbacks set
* via set_callbacks, enable_msg_type, and disable_msg_type camera HAL API.
*/
#include <utils/List.h>
#include <CameraParameters.h>
using ::android::hardware::camera::common::V1_0::helper::CameraParameters;
using ::android::hardware::camera::common::V1_0::helper::Size;
namespace android {
class EmulatedCameraDevice;
class FrameProducer;
/* Manages callbacks set via set_callbacks, enable_msg_type, and disable_msg_type
* camera HAL API.
*
* Objects of this class are contained in EmulatedCamera objects, and handle
* relevant camera API callbacks.
* Locking considerations. Apparently, it's not allowed to call callbacks
* registered in this class, while holding a lock: recursion is quite possible,
* which will cause a deadlock.
*/
class CallbackNotifier {
public:
/* Constructs CallbackNotifier instance. */
CallbackNotifier();
/* Destructs CallbackNotifier instance. */
~CallbackNotifier();
/****************************************************************************
* Camera API
***************************************************************************/
public:
/* Actual handler for camera_device_ops_t::set_callbacks callback.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::set_callbacks callback.
*/
void setCallbacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void* user);
/* Actual handler for camera_device_ops_t::enable_msg_type callback.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::enable_msg_type callback.
*/
void enableMessage(uint msg_type);
/* Actual handler for camera_device_ops_t::disable_msg_type callback.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::disable_msg_type callback.
*/
void disableMessage(uint msg_type);
/* Actual handler for camera_device_ops_t::store_meta_data_in_buffers
* callback. This method is called by the containing emulated camera object
* when it is handing the camera_device_ops_t::store_meta_data_in_buffers
* callback.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
status_t storeMetaDataInBuffers(bool enable);
/* Enables video recording.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::start_recording callback.
* Param:
* fps - Video frame frequency. This parameter determins when a frame
* received via onNextFrameAvailable call will be pushed through the
* callback.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
status_t enableVideoRecording(int fps);
/* Disables video recording.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::stop_recording callback.
*/
void disableVideoRecording();
/* Releases video frame, sent to the framework.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::release_recording_frame callback.
*/
void releaseRecordingFrame(const void* opaque);
/* Send a message to the notify callback that auto-focus has completed.
* This method is called from the containing emulated camera object when it
* has received confirmation from the camera device that auto-focusing is
* completed.
*/
void autoFocusComplete();
/* Actual handler for camera_device_ops_t::msg_type_enabled callback.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::msg_type_enabled callback.
* Note: this method doesn't grab a lock while checking message status, since
* upon exit the status would be undefined anyway. So, grab a lock before
* calling this method if you care about persisting a defined message status.
* Return:
* 0 if message is disabled, or non-zero value, if message is enabled.
*/
inline int isMessageEnabled(uint msg_type)
{
return mMessageEnabler & msg_type;
}
/* Checks id video recording is enabled.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::recording_enabled callback.
* Note: this method doesn't grab a lock while checking video recordin status,
* since upon exit the status would be undefined anyway. So, grab a lock
* before calling this method if you care about persisting of a defined video
* recording status.
* Return:
* true if video recording is enabled, or false if it is disabled.
*/
inline bool isVideoRecordingEnabled() const
{
return mVideoRecEnabled;
}
/****************************************************************************
* Public API
***************************************************************************/
public:
/* Resets the callback notifier. */
void cleanupCBNotifier();
/* Next frame is available in the camera device.
* This is a notification callback that is invoked by the camera device when
* a new frame is available. The captured frame is available through the
* |camera_dev| obejct.
* Note that most likely this method is called in context of a worker thread
* that camera device has created for frame capturing.
* Param:
* timestamp - Frame's timestamp.
* camera_dev - Camera device instance that delivered the frame.
*/
void onNextFrameAvailable(nsecs_t timestamp,
EmulatedCameraDevice* camera_dev);
/* Entry point for notifications that occur in camera device.
* Param:
* err - CAMERA_ERROR_XXX error code.
*/
void onCameraDeviceError(int err);
/* Sets, or resets taking picture state.
* This state control whether or not to notify the framework about compressed
* image, shutter, and other picture related events.
*/
void setTakingPicture(bool taking)
{
mTakingPicture = taking;
}
/* Sets JPEG quality used to compress frame during picture taking. */
void setJpegQuality(int jpeg_quality)
{
mJpegQuality = jpeg_quality;
}
/* Sets the camera parameters that will be used to populate exif data in the
* picture.
*/
void setCameraParameters(CameraParameters cameraParameters)
{
mCameraParameters = cameraParameters;
}
/****************************************************************************
* Private API
***************************************************************************/
protected:
/* Checks if it's time to push new video frame.
* Note that this method must be called while object is locked.
* Param:
* timestamp - Timestamp for the new frame. */
bool isNewVideoFrameTime(nsecs_t timestamp);
/****************************************************************************
* Data members
***************************************************************************/
protected:
/* Locks this instance for data change. */
Mutex mObjectLock;
/*
* Callbacks, registered in set_callbacks.
*/
camera_notify_callback mNotifyCB;
camera_data_callback mDataCB;
camera_data_timestamp_callback mDataCBTimestamp;
camera_request_memory mGetMemoryCB;
void* mCBOpaque;
/* video frame queue for the CameraHeapMemory destruction */
List<camera_memory_t*> mCameraMemoryTs;
/* Timestamp when last frame has been delivered to the framework. */
nsecs_t mLastFrameTimestamp;
/* Video frequency in nanosec. */
nsecs_t mFrameRefreshFreq;
/* Message enabler. */
uint32_t mMessageEnabler;
/* JPEG quality used to compress frame during picture taking. */
int mJpegQuality;
/* Camera parameters used for EXIF data in picture */
CameraParameters mCameraParameters;
/* Video recording status. */
bool mVideoRecEnabled;
/* Picture taking status. */
bool mTakingPicture;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_CALLBACK_NOTIFIER_H */

View file

@ -0,0 +1,202 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implemenation of framebuffer conversion routines.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Converter"
#include <cutils/log.h>
#include "Converters.h"
#include "Alignment.h"
namespace android {
static void _YUV420SToRGB565(const uint8_t* Y,
const uint8_t* U,
const uint8_t* V,
int dUV,
uint16_t* rgb,
int width,
int height,
int y_stride,
int uv_stride)
{
const uint8_t* Y_pos = Y;
const uint8_t* U_pos = U;
const uint8_t* V_pos = V;
for (int y = 0; y < height; y++) {
Y = Y_pos + y_stride * y;
U = U_pos + uv_stride * (y / 2);
V = V_pos + uv_stride * (y / 2);
for (int x = 0; x < width; x += 2, U += dUV, V += dUV) {
const uint8_t nU = *U;
const uint8_t nV = *V;
*rgb = YUVToRGB565(*Y, nU, nV);
Y++; rgb++;
*rgb = YUVToRGB565(*Y, nU, nV);
Y++; rgb++;
}
}
}
static void _YUV420SToRGB32(const uint8_t* Y,
const uint8_t* U,
const uint8_t* V,
int dUV,
uint32_t* rgb,
int width,
int height,
int y_stride,
int uv_stride)
{
const uint8_t* Y_pos = Y;
const uint8_t* U_pos = U;
const uint8_t* V_pos = V;
for (int y = 0; y < height; y++) {
Y = Y_pos + y_stride * y;
U = U_pos + uv_stride * (y / 2);
V = V_pos + uv_stride * (y / 2);
for (int x = 0; x < width; x += 2, U += dUV, V += dUV) {
const uint8_t nU = *U;
const uint8_t nV = *V;
*rgb = YUVToRGB32(*Y, nU, nV);
Y++; rgb++;
*rgb = YUVToRGB32(*Y, nU, nV);
Y++; rgb++;
}
}
}
/* The YV12 and YU12 formats require that the row strides are aligned to 16 byte
* boundaries as per the format specification at:
* https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
*
* This means that we can't just use the width or assume that pixels are
* tightly packed, we have to calculate aligned strides and use them to find the
* next row.
*/
void YV12ToRGB565(const void* yv12, void* rgb, int width, int height)
{
// See note above about alignment
const int y_stride = align(width, 16);
const int uv_stride = align(y_stride / 2, 16);
const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
const uint8_t* U = Y + y_stride * height;
const uint8_t* V = U + uv_stride * (height / 2);
_YUV420SToRGB565(Y, U, V, 1, reinterpret_cast<uint16_t*>(rgb),
width, height, y_stride, uv_stride);
}
void YV12ToRGB32(const void* yv12, void* rgb, int width, int height)
{
// See note above about alignment
const int y_stride = align(width, 16);
const int uv_stride = align(y_stride / 2, 16);
const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
const uint8_t* V = Y + y_stride * height;
const uint8_t* U = V + uv_stride * (height / 2);
_YUV420SToRGB32(Y, U, V, 1, reinterpret_cast<uint32_t*>(rgb), width, height,
y_stride, uv_stride);
}
void YU12ToRGB32(const void* yu12, void* rgb, int width, int height)
{
// See note above about alignment
const int y_stride = align(width, 16);
const int uv_stride = align(y_stride / 2, 16);
const uint8_t* Y = reinterpret_cast<const uint8_t*>(yu12);
const uint8_t* U = Y + y_stride * height;
const uint8_t* V = U + uv_stride * (height / 2);
_YUV420SToRGB32(Y, U, V, 1, reinterpret_cast<uint32_t*>(rgb), width, height,
y_stride, uv_stride);
}
/* Common converter for YUV 4:2:0 interleaved to RGB565.
* y, u, and v point to Y,U, and V panes, where U and V values are interleaved.
*/
static void _NVXXToRGB565(const uint8_t* Y,
const uint8_t* U,
const uint8_t* V,
uint16_t* rgb,
int width,
int height)
{
// The UV stride for NV21 and NV12 is the same as the width because the
// U and V values are interleaved, making each row twice as wide even though
// each value covers a two pixel wide area. These formats do not require any
// kind of alignment.
int y_stride = width;
int uv_stride = width;
_YUV420SToRGB565(Y, U, V, 2, rgb, width, height, y_stride, uv_stride);
}
/* Common converter for YUV 4:2:0 interleaved to RGB32.
* y, u, and v point to Y,U, and V panes, where U and V values are interleaved.
*/
static void _NVXXToRGB32(const uint8_t* Y,
const uint8_t* U,
const uint8_t* V,
uint32_t* rgb,
int width,
int height)
{
// The UV stride for NV21 and NV12 is the same as the width because the
// U and V values are interleaved, making each row twice as wide even though
// each value covers a two pixel wide area. These formats do not require any
// kind of alignment.
int y_stride = width;
int uv_stride = width;
_YUV420SToRGB32(Y, U, V, 2, rgb, width, height, y_stride, uv_stride);
}
void NV12ToRGB565(const void* nv12, void* rgb, int width, int height)
{
const int pix_total = width * height;
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv12);
_NVXXToRGB565(y, y + pix_total, y + pix_total + 1,
reinterpret_cast<uint16_t*>(rgb), width, height);
}
void NV12ToRGB32(const void* nv12, void* rgb, int width, int height)
{
const int pix_total = width * height;
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv12);
_NVXXToRGB32(y, y + pix_total, y + pix_total + 1,
reinterpret_cast<uint32_t*>(rgb), width, height);
}
void NV21ToRGB565(const void* nv21, void* rgb, int width, int height)
{
const int pix_total = width * height;
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv21);
_NVXXToRGB565(y, y + pix_total + 1, y + pix_total,
reinterpret_cast<uint16_t*>(rgb), width, height);
}
void NV21ToRGB32(const void* nv21, void* rgb, int width, int height)
{
const int pix_total = width * height;
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv21);
_NVXXToRGB32(y, y + pix_total + 1, y + pix_total,
reinterpret_cast<uint32_t*>(rgb), width, height);
}
}; /* namespace android */

View file

@ -0,0 +1,314 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_CONVERTERS_H
#define HW_EMULATOR_CAMERA_CONVERTERS_H
#include <endian.h>
#ifndef __BYTE_ORDER
#error "could not determine byte order"
#endif
/*
* Contains declaration of framebuffer conversion routines.
*
* NOTE: RGB and big/little endian considerations. Wherewer in this code RGB
* pixels are represented as WORD, or DWORD, the color order inside the
* WORD / DWORD matches the one that would occur if that WORD / DWORD would have
* been read from the typecasted framebuffer:
*
* const uint32_t rgb = *reinterpret_cast<const uint32_t*>(framebuffer);
*
* So, if this code runs on the little endian CPU, red color in 'rgb' would be
* masked as 0x000000ff, and blue color would be masked as 0x00ff0000, while if
* the code runs on a big endian CPU, the red color in 'rgb' would be masked as
* 0xff000000, and blue color would be masked as 0x0000ff00,
*/
namespace android {
/*
* RGB565 color masks
*/
#if __BYTE_ORDER == __LITTLE_ENDIAN
static const uint16_t kRed5 = 0x001f;
static const uint16_t kGreen6 = 0x07e0;
static const uint16_t kBlue5 = 0xf800;
#else // __BYTE_ORDER
static const uint16_t kRed5 = 0xf800;
static const uint16_t kGreen6 = 0x07e0;
static const uint16_t kBlue5 = 0x001f;
#endif // __BYTE_ORDER
static const uint32_t kBlack16 = 0x0000;
static const uint32_t kWhite16 = kRed5 | kGreen6 | kBlue5;
/*
* RGB32 color masks
*/
#if __BYTE_ORDER == __LITTLE_ENDIAN
static const uint32_t kRed8 = 0x000000ff;
static const uint32_t kGreen8 = 0x0000ff00;
static const uint32_t kBlue8 = 0x00ff0000;
#else // __BYTE_ORDER
static const uint32_t kRed8 = 0x00ff0000;
static const uint32_t kGreen8 = 0x0000ff00;
static const uint32_t kBlue8 = 0x000000ff;
#endif // __BYTE_ORDER
static const uint32_t kBlack32 = 0x00000000;
static const uint32_t kWhite32 = kRed8 | kGreen8 | kBlue8;
/*
* Extracting, and saving color bytes from / to WORD / DWORD RGB.
*/
#if __BYTE_ORDER == __LITTLE_ENDIAN
/* Extract red, green, and blue bytes from RGB565 word. */
#define R16(rgb) static_cast<uint8_t>((rgb) & kRed5)
#define G16(rgb) static_cast<uint8_t>(((rgb) & kGreen6) >> 5)
#define B16(rgb) static_cast<uint8_t>(((rgb) & kBlue5) >> 11)
/* Make 8 bits red, green, and blue, extracted from RGB565 word. */
#define R16_32(rgb) static_cast<uint8_t>((((rgb) & kRed5) << 3) | (((rgb) & kRed5) >> 2))
#define G16_32(rgb) static_cast<uint8_t>((((rgb) & kGreen6) >> 3) | (((rgb) & kGreen6) >> 9))
#define B16_32(rgb) static_cast<uint8_t>((((rgb) & kBlue5) >> 8) | (((rgb) & kBlue5) >> 14))
/* Extract red, green, and blue bytes from RGB32 dword. */
#define R32(rgb) static_cast<uint8_t>((rgb) & kRed8)
#define G32(rgb) static_cast<uint8_t>((((rgb) & kGreen8) >> 8) & 0xff)
#define B32(rgb) static_cast<uint8_t>((((rgb) & kBlue8) >> 16) & 0xff)
/* Build RGB565 word from red, green, and blue bytes. */
#define RGB565(r, g, b) static_cast<uint16_t>((((static_cast<uint16_t>(b) << 6) | (g)) << 5) | (r))
/* Build RGB32 dword from red, green, and blue bytes. */
#define RGB32(r, g, b) static_cast<uint32_t>((((static_cast<uint32_t>(b) << 8) | (g)) << 8) | (r))
#else // __BYTE_ORDER
/* Extract red, green, and blue bytes from RGB565 word. */
#define R16(rgb) static_cast<uint8_t>(((rgb) & kRed5) >> 11)
#define G16(rgb) static_cast<uint8_t>(((rgb) & kGreen6) >> 5)
#define B16(rgb) static_cast<uint8_t>((rgb) & kBlue5)
/* Make 8 bits red, green, and blue, extracted from RGB565 word. */
#define R16_32(rgb) static_cast<uint8_t>((((rgb) & kRed5) >> 8) | (((rgb) & kRed5) >> 14))
#define G16_32(rgb) static_cast<uint8_t>((((rgb) & kGreen6) >> 3) | (((rgb) & kGreen6) >> 9))
#define B16_32(rgb) static_cast<uint8_t>((((rgb) & kBlue5) << 3) | (((rgb) & kBlue5) >> 2))
/* Extract red, green, and blue bytes from RGB32 dword. */
#define R32(rgb) static_cast<uint8_t>(((rgb) & kRed8) >> 16)
#define G32(rgb) static_cast<uint8_t>(((rgb) & kGreen8) >> 8)
#define B32(rgb) static_cast<uint8_t>((rgb) & kBlue8)
/* Build RGB565 word from red, green, and blue bytes. */
#define RGB565(r, g, b) static_cast<uint16_t>((((static_cast<uint16_t>(r) << 6) | g) << 5) | b)
/* Build RGB32 dword from red, green, and blue bytes. */
#define RGB32(r, g, b) static_cast<uint32_t>((((static_cast<uint32_t>(r) << 8) | g) << 8) | b)
#endif // __BYTE_ORDER
/* An union that simplifies breaking 32 bit RGB into separate R, G, and B colors.
*/
typedef union RGB32_t {
uint32_t color;
struct {
#if __BYTE_ORDER == __LITTLE_ENDIAN
uint8_t r; uint8_t g; uint8_t b; uint8_t a;
#else // __BYTE_ORDER
uint8_t a; uint8_t b; uint8_t g; uint8_t r;
#endif // __BYTE_ORDER
};
} RGB32_t;
/* Clips a value to the unsigned 0-255 range, treating negative values as zero.
*/
static __inline__ int
clamp(int x)
{
if (x > 255) return 255;
if (x < 0) return 0;
return x;
}
/********************************************************************************
* Basics of RGB -> YUV conversion
*******************************************************************************/
/*
* RGB -> YUV conversion macros
*/
#define RGB2Y(r, g, b) (uint8_t)(((66 * (r) + 129 * (g) + 25 * (b) + 128) >> 8) + 16)
#define RGB2U(r, g, b) (uint8_t)(((-38 * (r) - 74 * (g) + 112 * (b) + 128) >> 8) + 128)
#define RGB2V(r, g, b) (uint8_t)(((112 * (r) - 94 * (g) - 18 * (b) + 128) >> 8) + 128)
/* Converts R8 G8 B8 color to YUV. */
static __inline__ void
R8G8B8ToYUV(uint8_t r, uint8_t g, uint8_t b, uint8_t* y, uint8_t* u, uint8_t* v)
{
*y = RGB2Y((int)r, (int)g, (int)b);
*u = RGB2U((int)r, (int)g, (int)b);
*v = RGB2V((int)r, (int)g, (int)b);
}
/* Converts RGB565 color to YUV. */
static __inline__ void
RGB565ToYUV(uint16_t rgb, uint8_t* y, uint8_t* u, uint8_t* v)
{
R8G8B8ToYUV(R16_32(rgb), G16_32(rgb), B16_32(rgb), y, u, v);
}
/* Converts RGB32 color to YUV. */
static __inline__ void
RGB32ToYUV(uint32_t rgb, uint8_t* y, uint8_t* u, uint8_t* v)
{
RGB32_t rgb_c;
rgb_c.color = rgb;
R8G8B8ToYUV(rgb_c.r, rgb_c.g, rgb_c.b, y, u, v);
}
/********************************************************************************
* Basics of YUV -> RGB conversion.
* Note that due to the fact that guest uses RGB only on preview window, and the
* RGB format that is used is RGB565, we can limit YUV -> RGB conversions to
* RGB565 only.
*******************************************************************************/
/*
* YUV -> RGB conversion macros
*/
/* "Optimized" macros that take specialy prepared Y, U, and V values:
* C = Y - 16
* D = U - 128
* E = V - 128
*/
#define YUV2RO(C, D, E) clamp((298 * (C) + 409 * (E) + 128) >> 8)
#define YUV2GO(C, D, E) clamp((298 * (C) - 100 * (D) - 208 * (E) + 128) >> 8)
#define YUV2BO(C, D, E) clamp((298 * (C) + 516 * (D) + 128) >> 8)
/*
* Main macros that take the original Y, U, and V values
*/
#define YUV2R(y, u, v) clamp((298 * ((y)-16) + 409 * ((v)-128) + 128) >> 8)
#define YUV2G(y, u, v) clamp((298 * ((y)-16) - 100 * ((u)-128) - 208 * ((v)-128) + 128) >> 8)
#define YUV2B(y, u, v) clamp((298 * ((y)-16) + 516 * ((u)-128) + 128) >> 8)
/* Converts YUV color to RGB565. */
static __inline__ uint16_t
YUVToRGB565(int y, int u, int v)
{
/* Calculate C, D, and E values for the optimized macro. */
y -= 16; u -= 128; v -= 128;
const uint16_t r = (YUV2RO(y,u,v) >> 3) & 0x1f;
const uint16_t g = (YUV2GO(y,u,v) >> 2) & 0x3f;
const uint16_t b = (YUV2BO(y,u,v) >> 3) & 0x1f;
return RGB565(r, g, b);
}
/* Converts YUV color to RGB32. */
static __inline__ uint32_t
YUVToRGB32(int y, int u, int v)
{
/* Calculate C, D, and E values for the optimized macro. */
y -= 16; u -= 128; v -= 128;
RGB32_t rgb;
rgb.r = YUV2RO(y,u,v) & 0xff;
rgb.g = YUV2GO(y,u,v) & 0xff;
rgb.b = YUV2BO(y,u,v) & 0xff;
return rgb.color;
}
/* YUV pixel descriptor. */
struct YUVPixel {
uint8_t Y;
uint8_t U;
uint8_t V;
inline YUVPixel()
: Y(0), U(0), V(0)
{
}
inline explicit YUVPixel(uint16_t rgb565)
{
RGB565ToYUV(rgb565, &Y, &U, &V);
}
inline explicit YUVPixel(uint32_t rgb32)
{
RGB32ToYUV(rgb32, &Y, &U, &V);
}
inline void get(uint8_t* pY, uint8_t* pU, uint8_t* pV) const
{
*pY = Y; *pU = U; *pV = V;
}
};
/* Converts an YV12 framebuffer to RGB565 framebuffer.
* Param:
* yv12 - YV12 framebuffer.
* rgb - RGB565 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void YV12ToRGB565(const void* yv12, void* rgb, int width, int height);
/* Converts an YV12 framebuffer to RGB32 framebuffer.
* Param:
* yv12 - YV12 framebuffer.
* rgb - RGB32 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void YV12ToRGB32(const void* yv12, void* rgb, int width, int height);
/* Converts an YU12 framebuffer to RGB32 framebuffer.
* Param:
* yu12 - YU12 framebuffer.
* rgb - RGB32 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void YU12ToRGB32(const void* yu12, void* rgb, int width, int height);
/* Converts an NV12 framebuffer to RGB565 framebuffer.
* Param:
* nv12 - NV12 framebuffer.
* rgb - RGB565 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void NV12ToRGB565(const void* nv12, void* rgb, int width, int height);
/* Converts an NV12 framebuffer to RGB32 framebuffer.
* Param:
* nv12 - NV12 framebuffer.
* rgb - RGB32 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void NV12ToRGB32(const void* nv12, void* rgb, int width, int height);
/* Converts an NV21 framebuffer to RGB565 framebuffer.
* Param:
* nv21 - NV21 framebuffer.
* rgb - RGB565 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void NV21ToRGB565(const void* nv21, void* rgb, int width, int height);
/* Converts an NV21 framebuffer to RGB32 framebuffer.
* Param:
* nv21 - NV21 framebuffer.
* rgb - RGB32 framebuffer.
* width, height - Dimensions for both framebuffers.
*/
void NV21ToRGB32(const void* nv21, void* rgb, int width, int height);
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_CONVERTERS_H */

View file

@ -0,0 +1,89 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedBaseCamera that encapsulates
* functionality common to all emulated camera device versions ("fake",
* "webcam", "video file", "cam2.0" etc.). Instances of this class (for each
* emulated camera) are created during the construction of the
* EmulatedCameraFactory instance. This class serves as an entry point for all
* camera API calls that are common across all versions of the
* camera_device_t/camera_module_t structures.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_BaseCamera"
#include <cutils/log.h>
#include "EmulatedBaseCamera.h"
namespace android {
EmulatedBaseCamera::EmulatedBaseCamera(int cameraId,
uint32_t cameraVersion,
struct hw_device_t* device,
struct hw_module_t* module)
: mCameraInfo(NULL),
mCameraID(cameraId),
mCameraDeviceVersion(cameraVersion)
{
/*
* Initialize camera_device descriptor for this object.
*/
/* Common header */
device->tag = HARDWARE_DEVICE_TAG;
device->version = cameraVersion;
device->module = module;
device->close = NULL; // Must be filled in by child implementation
}
EmulatedBaseCamera::~EmulatedBaseCamera()
{
}
status_t EmulatedBaseCamera::getCameraInfo(struct camera_info* info)
{
ALOGV("%s", __FUNCTION__);
info->device_version = mCameraDeviceVersion;
if (mCameraDeviceVersion >= HARDWARE_DEVICE_API_VERSION(2, 0)) {
info->static_camera_characteristics = mCameraInfo;
} else {
info->static_camera_characteristics = (camera_metadata_t*)0xcafef00d;
}
return NO_ERROR;
}
status_t EmulatedBaseCamera::plugCamera() {
ALOGE("%s: not supported", __FUNCTION__);
return INVALID_OPERATION;
}
status_t EmulatedBaseCamera::unplugCamera() {
ALOGE("%s: not supported", __FUNCTION__);
return INVALID_OPERATION;
}
camera_device_status_t EmulatedBaseCamera::getHotplugStatus() {
return CAMERA_DEVICE_STATUS_PRESENT;
}
} /* namespace android */

View file

@ -0,0 +1,117 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_BASE_CAMERA_H
#define HW_EMULATOR_CAMERA_EMULATED_BASE_CAMERA_H
#include <hardware/camera_common.h>
#include <utils/Errors.h>
namespace android {
/*
* Contains declaration of a class EmulatedBaseCamera that encapsulates
* functionality common to all emulated camera device versions ("fake",
* "webcam", "video file", etc.). Instances of this class (for each emulated
* camera) are created during the construction of the EmulatedCameraFactory
* instance. This class serves as an entry point for all camera API calls that
* are common across all versions of the camera_device_t/camera_module_t
* structures.
*/
class EmulatedBaseCamera {
public:
EmulatedBaseCamera(int cameraId,
uint32_t cameraVersion,
struct hw_device_t* device,
struct hw_module_t* module);
virtual ~EmulatedBaseCamera();
/****************************************************************************
* Public API
***************************************************************************/
public:
/* Initializes EmulatedCamera instance.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
virtual status_t Initialize() = 0;
/****************************************************************************
* Camera API implementation
***************************************************************************/
public:
/* Creates connection to the emulated camera device.
* This method is called in response to hw_module_methods_t::open callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negative EXXX statuses.
*/
virtual status_t connectCamera(hw_device_t** device) = 0;
/* Plug the connection for the emulated camera. Until it's plugged in
* calls to connectCamera should fail with -ENODEV.
*/
virtual status_t plugCamera();
/* Unplug the connection from underneath the emulated camera.
* This is similar to closing the camera, except that
* all function calls into the camera device will return
* -EPIPE errors until the camera is reopened.
*/
virtual status_t unplugCamera();
virtual camera_device_status_t getHotplugStatus();
/* Closes connection to the emulated camera.
* This method is called in response to camera_device::close callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negative EXXX statuses.
*/
virtual status_t closeCamera() = 0;
/* Gets camera information.
* This method is called in response to camera_module_t::get_camera_info
* callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negative EXXX statuses.
*/
virtual status_t getCameraInfo(struct camera_info* info) = 0;
/****************************************************************************
* Data members
***************************************************************************/
protected:
/* Fixed camera information for camera2 devices. Must be valid to access if
* mCameraDeviceVersion is >= HARDWARE_DEVICE_API_VERSION(2,0) */
camera_metadata_t *mCameraInfo;
/* Zero-based ID assigned to this camera. */
int mCameraID;
private:
/* Version of the camera device HAL implemented by this camera */
int mCameraDeviceVersion;
};
} /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_BASE_CAMERA_H */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,412 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_H
/*
* Contains declaration of a class EmulatedCamera that encapsulates
* functionality common to all version 1.0 emulated camera devices ("fake",
* "webcam", "video file", etc.). Instances of this class (for each emulated
* camera) are created during the construction of the EmulatedCameraFactory
* instance. This class serves as an entry point for all camera API calls that
* defined by camera_device_ops_t API.
*/
#include <CameraParameters.h>
#include "EmulatedBaseCamera.h"
#include "EmulatedCameraDevice.h"
#include "PreviewWindow.h"
#include "CallbackNotifier.h"
using ::android::hardware::camera::common::V1_0::helper::CameraParameters;
using ::android::hardware::camera::common::V1_0::helper::Size;
namespace android {
/* Encapsulates functionality common to all version 1.0 emulated camera devices
* ("fake", "webcam", "file stream", etc.).
*
* Note that EmulatedCameraFactory instantiates object of this class just once,
* when EmulatedCameraFactory instance gets constructed. Connection to /
* disconnection from the actual camera device is handled by calls to
* connectDevice(), and closeCamera() methods of this class that are ivoked in
* response to hw_module_methods_t::open, and camera_device::close callbacks.
*/
class EmulatedCamera : public camera_device, public EmulatedBaseCamera {
public:
/* Constructs EmulatedCamera instance.
* Param:
* cameraId - Zero based camera identifier, which is an index of the camera
* instance in camera factory's array.
* module - Emulated camera HAL module descriptor.
*/
EmulatedCamera(int cameraId,
struct hw_module_t* module);
/* Destructs EmulatedCamera instance. */
virtual ~EmulatedCamera();
/****************************************************************************
* Abstract API
***************************************************************************/
public:
/* Gets emulated camera device used by this instance of the emulated camera.
*/
virtual EmulatedCameraDevice* getCameraDevice() = 0;
/****************************************************************************
* Public API
***************************************************************************/
public:
/** Override of base class method */
virtual status_t Initialize();
/* Next frame is available in the camera device.
* This is a notification callback that is invoked by the camera device when
* a new frame is available. The captured frame is available through
* the |camera_dev| object. Remember to create a
* EmulatedCameraDevice::FrameLock instance to lock the frame before
* accessing it.
* Note that most likely this method is called in context of a worker thread
* that camera device has created for frame capturing.
* Param:
* timestamp - Frame's timestamp.
* camera_dev - Camera device instance that delivered the frame.
*/
virtual void onNextFrameAvailable(nsecs_t timestamp,
EmulatedCameraDevice* camera_dev);
/* Entry point for notifications that occur in camera device.
* Param:
* err - CAMERA_ERROR_XXX error code.
*/
virtual void onCameraDeviceError(int err);
/* Signal to the callback notifier that a pictuer is being taken. */
void setTakingPicture(bool takingPicture);
/****************************************************************************
* Camera API implementation
***************************************************************************/
public:
/** Override of base class method */
virtual status_t connectCamera(hw_device_t** device);
/** Override of base class method */
virtual status_t closeCamera();
/** Override of base class method */
virtual status_t getCameraInfo(struct camera_info* info);
/****************************************************************************
* Camera API implementation.
* These methods are called from the camera API callback routines.
***************************************************************************/
public:
/* Signal that a requested auto-focus has completed. This will be called
* from the camera device's worker thread. */
void autoFocusComplete();
protected:
/* Actual handler for camera_device_ops_t::set_preview_window callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t setPreviewWindow(struct preview_stream_ops *window);
/* Actual handler for camera_device_ops_t::set_callbacks callback.
* NOTE: When this method is called the object is locked.
*/
virtual void setCallbacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void* user);
/* Actual handler for camera_device_ops_t::enable_msg_type callback.
* NOTE: When this method is called the object is locked.
*/
virtual void enableMsgType(int32_t msg_type);
/* Actual handler for camera_device_ops_t::disable_msg_type callback.
* NOTE: When this method is called the object is locked.
*/
virtual void disableMsgType(int32_t msg_type);
/* Actual handler for camera_device_ops_t::msg_type_enabled callback.
* NOTE: When this method is called the object is locked.
* Return:
* 0 if message(s) is (are) disabled, != 0 if enabled.
*/
virtual int isMsgTypeEnabled(int32_t msg_type);
/* Actual handler for camera_device_ops_t::start_preview callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t startPreview();
/* Actual handler for camera_device_ops_t::stop_preview callback.
* NOTE: When this method is called the object is locked.
*/
virtual void stopPreview();
/* Actual handler for camera_device_ops_t::preview_enabled callback.
* NOTE: When this method is called the object is locked.
* Return:
* 0 if preview is disabled, != 0 if enabled.
*/
virtual int isPreviewEnabled();
/* Actual handler for camera_device_ops_t::store_meta_data_in_buffers callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t storeMetaDataInBuffers(int enable);
/* Actual handler for camera_device_ops_t::start_recording callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t startRecording();
/* Actual handler for camera_device_ops_t::stop_recording callback.
* NOTE: When this method is called the object is locked.
*/
virtual void stopRecording();
/* Actual handler for camera_device_ops_t::recording_enabled callback.
* NOTE: When this method is called the object is locked.
* Return:
* 0 if recording is disabled, != 0 if enabled.
*/
virtual int isRecordingEnabled();
/* Actual handler for camera_device_ops_t::release_recording_frame callback.
* NOTE: When this method is called the object is locked.
*/
virtual void releaseRecordingFrame(const void* opaque);
/* Actual handler for camera_device_ops_t::auto_focus callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t setAutoFocus();
/* Actual handler for camera_device_ops_t::cancel_auto_focus callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t cancelAutoFocus();
/* Actual handler for camera_device_ops_t::take_picture callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t takePicture();
/* Actual handler for camera_device_ops_t::cancel_picture callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t cancelPicture();
/* Actual handler for camera_device_ops_t::set_parameters callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t setParameters(const char* parms);
/* Actual handler for camera_device_ops_t::get_parameters callback.
* NOTE: When this method is called the object is locked.
* Return:
* Flattened parameters string. The caller will free the buffer allocated
* for the string by calling camera_device_ops_t::put_parameters callback.
*/
virtual char* getParameters();
/* Actual handler for camera_device_ops_t::put_parameters callback.
* Called to free the string returned from camera_device_ops_t::get_parameters
* callback. There is nothing more to it: the name of the callback is just
* misleading.
* NOTE: When this method is called the object is locked.
*/
virtual void putParameters(char* params);
/* Actual handler for camera_device_ops_t::send_command callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
/* Actual handler for camera_device_ops_t::release callback.
* NOTE: When this method is called the object is locked.
*/
virtual void releaseCamera();
/* Actual handler for camera_device_ops_t::dump callback.
* NOTE: When this method is called the object is locked.
* Note that failures in this method are reported as negave EXXX statuses.
*/
virtual status_t dumpCamera(int fd);
/****************************************************************************
* Preview management.
***************************************************************************/
protected:
/* Starts preview.
* Note that when this method is called mPreviewWindow may be NULL,
* indicating that framework has an intention to start displaying video
* frames, but didn't create the preview window yet.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
virtual status_t doStartPreview();
/* Stops preview.
* This method reverts DoStartPreview.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
virtual status_t doStopPreview();
/****************************************************************************
* Private API.
***************************************************************************/
protected:
/* Cleans up camera when released. */
virtual status_t cleanupCamera();
private:
status_t getConfiguredPixelFormat(uint32_t* pixelFormat) const;
status_t getConfiguredFrameSize(int* width, int* height) const;
/****************************************************************************
* Camera API callbacks as defined by camera_device_ops structure.
* See hardware/libhardware/include/hardware/camera.h for information on
* each of these callbacks. Implemented in this class, these callbacks simply
* dispatch the call into an instance of EmulatedCamera class defined by the
* 'camera_device' parameter.
***************************************************************************/
private:
static int set_preview_window(struct camera_device* dev,
struct preview_stream_ops* window);
static void set_callbacks(struct camera_device* dev,
camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void* user);
static void enable_msg_type(struct camera_device* dev, int32_t msg_type);
static void disable_msg_type(struct camera_device* dev, int32_t msg_type);
static int msg_type_enabled(struct camera_device* dev, int32_t msg_type);
static int start_preview(struct camera_device* dev);
static void stop_preview(struct camera_device* dev);
static int preview_enabled(struct camera_device* dev);
static int store_meta_data_in_buffers(struct camera_device* dev, int enable);
static int start_recording(struct camera_device* dev);
static void stop_recording(struct camera_device* dev);
static int recording_enabled(struct camera_device* dev);
static void release_recording_frame(struct camera_device* dev,
const void* opaque);
static int auto_focus(struct camera_device* dev);
static int cancel_auto_focus(struct camera_device* dev);
static int take_picture(struct camera_device* dev);
static int cancel_picture(struct camera_device* dev);
static int set_parameters(struct camera_device* dev, const char* parms);
static char* get_parameters(struct camera_device* dev);
static void put_parameters(struct camera_device* dev, char* params);
static int send_command(struct camera_device* dev,
int32_t cmd,
int32_t arg1,
int32_t arg2);
static void release(struct camera_device* dev);
static int dump(struct camera_device* dev, int fd);
static int close(struct hw_device_t* device);
/****************************************************************************
* Data members
***************************************************************************/
protected:
/* Locks this instance for parameters, state, etc. change. */
Mutex mObjectLock;
/* Camera parameters. */
CameraParameters mParameters;
/* Preview window. */
PreviewWindow mPreviewWindow;
/* Callback notifier. */
CallbackNotifier mCallbackNotifier;
private:
/* Registered callbacks implementing camera API. */
static camera_device_ops_t mDeviceOps;
/****************************************************************************
* Common keys
***************************************************************************/
public:
static const char FACING_KEY[];
static const char ORIENTATION_KEY[];
static const char RECORDING_HINT_KEY[];
/****************************************************************************
* Common string values
***************************************************************************/
/* Possible values for FACING_KEY */
static const char FACING_BACK[];
static const char FACING_FRONT[];
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_H */

View file

@ -0,0 +1,410 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedCamera that encapsulates
* functionality common to all version 2.0 emulated camera devices. Instances
* of this class (for each emulated camera) are created during the construction
* of the EmulatedCameraFactory instance. This class serves as an entry point
* for all camera API calls that defined by camera2_device_ops_t API.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera2_Camera"
#include <cutils/log.h>
#include "EmulatedCamera2.h"
#include "system/camera_metadata.h"
namespace android {
/* Constructs EmulatedCamera2 instance.
* Param:
* cameraId - Zero based camera identifier, which is an index of the camera
* instance in camera factory's array.
* module - Emulated camera HAL module descriptor.
*/
EmulatedCamera2::EmulatedCamera2(int cameraId,
struct hw_module_t* module):
EmulatedBaseCamera(cameraId,
CAMERA_DEVICE_API_VERSION_2_0,
&common,
module)
{
common.close = EmulatedCamera2::close;
ops = &sDeviceOps;
priv = this;
mNotifyCb = NULL;
mRequestQueueSrc = NULL;
mFrameQueueDst = NULL;
mVendorTagOps.get_camera_vendor_section_name =
EmulatedCamera2::get_camera_vendor_section_name;
mVendorTagOps.get_camera_vendor_tag_name =
EmulatedCamera2::get_camera_vendor_tag_name;
mVendorTagOps.get_camera_vendor_tag_type =
EmulatedCamera2::get_camera_vendor_tag_type;
mVendorTagOps.parent = this;
mStatusPresent = true;
}
/* Destructs EmulatedCamera2 instance. */
EmulatedCamera2::~EmulatedCamera2() {
}
/****************************************************************************
* Abstract API
***************************************************************************/
/****************************************************************************
* Public API
***************************************************************************/
status_t EmulatedCamera2::Initialize() {
return NO_ERROR;
}
/****************************************************************************
* Camera API implementation
***************************************************************************/
status_t EmulatedCamera2::connectCamera(hw_device_t** device) {
*device = &common;
return NO_ERROR;
}
status_t EmulatedCamera2::closeCamera() {
return NO_ERROR;
}
status_t EmulatedCamera2::getCameraInfo(struct camera_info* info) {
return EmulatedBaseCamera::getCameraInfo(info);
}
/****************************************************************************
* Camera Device API implementation.
* These methods are called from the camera API callback routines.
***************************************************************************/
/** Request input queue */
int EmulatedCamera2::requestQueueNotify() {
return INVALID_OPERATION;
}
/** Count of requests in flight */
int EmulatedCamera2::getInProgressCount() {
return INVALID_OPERATION;
}
/** Cancel all captures in flight */
int EmulatedCamera2::flushCapturesInProgress() {
return INVALID_OPERATION;
}
/** Construct a default request for a given use case */
int EmulatedCamera2::constructDefaultRequest(
int request_template,
camera_metadata_t **request) {
return INVALID_OPERATION;
}
/** Output stream creation and management */
int EmulatedCamera2::allocateStream(
uint32_t width,
uint32_t height,
int format,
const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id,
uint32_t *format_actual,
uint32_t *usage,
uint32_t *max_buffers) {
return INVALID_OPERATION;
}
int EmulatedCamera2::registerStreamBuffers(
uint32_t stream_id,
int num_buffers,
buffer_handle_t *buffers) {
return INVALID_OPERATION;
}
int EmulatedCamera2::releaseStream(uint32_t stream_id) {
return INVALID_OPERATION;
}
/** Reprocessing input stream management */
int EmulatedCamera2::allocateReprocessStream(
uint32_t width,
uint32_t height,
uint32_t format,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id,
uint32_t *consumer_usage,
uint32_t *max_buffers) {
return INVALID_OPERATION;
}
int EmulatedCamera2::allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id) {
return INVALID_OPERATION;
}
int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
return INVALID_OPERATION;
}
/** 3A triggering */
int EmulatedCamera2::triggerAction(uint32_t trigger_id,
int ext1, int ext2) {
return INVALID_OPERATION;
}
/** Custom tag query methods */
const char* EmulatedCamera2::getVendorSectionName(uint32_t tag) {
return NULL;
}
const char* EmulatedCamera2::getVendorTagName(uint32_t tag) {
return NULL;
}
int EmulatedCamera2::getVendorTagType(uint32_t tag) {
return -1;
}
/** Debug methods */
int EmulatedCamera2::dump(int fd) {
return INVALID_OPERATION;
}
/****************************************************************************
* Private API.
***************************************************************************/
/****************************************************************************
* Camera API callbacks as defined by camera2_device_ops structure. See
* hardware/libhardware/include/hardware/camera2.h for information on each
* of these callbacks. Implemented in this class, these callbacks simply
* dispatch the call into an instance of EmulatedCamera2 class defined by the
* 'camera_device2' parameter, or set a member value in the same.
***************************************************************************/
EmulatedCamera2* getInstance(const camera2_device_t *d) {
const EmulatedCamera2* cec = static_cast<const EmulatedCamera2*>(d);
return const_cast<EmulatedCamera2*>(cec);
}
int EmulatedCamera2::set_request_queue_src_ops(const camera2_device_t *d,
const camera2_request_queue_src_ops *queue_src_ops) {
EmulatedCamera2* ec = getInstance(d);
ec->mRequestQueueSrc = queue_src_ops;
return NO_ERROR;
}
int EmulatedCamera2::notify_request_queue_not_empty(const camera2_device_t *d) {
EmulatedCamera2* ec = getInstance(d);
return ec->requestQueueNotify();
}
int EmulatedCamera2::set_frame_queue_dst_ops(const camera2_device_t *d,
const camera2_frame_queue_dst_ops *queue_dst_ops) {
EmulatedCamera2* ec = getInstance(d);
ec->mFrameQueueDst = queue_dst_ops;
return NO_ERROR;
}
int EmulatedCamera2::get_in_progress_count(const camera2_device_t *d) {
EmulatedCamera2* ec = getInstance(d);
return ec->getInProgressCount();
}
int EmulatedCamera2::flush_captures_in_progress(const camera2_device_t *d) {
EmulatedCamera2* ec = getInstance(d);
return ec->flushCapturesInProgress();
}
int EmulatedCamera2::construct_default_request(const camera2_device_t *d,
int request_template,
camera_metadata_t **request) {
EmulatedCamera2* ec = getInstance(d);
return ec->constructDefaultRequest(request_template, request);
}
int EmulatedCamera2::allocate_stream(const camera2_device_t *d,
uint32_t width,
uint32_t height,
int format,
const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id,
uint32_t *format_actual,
uint32_t *usage,
uint32_t *max_buffers) {
EmulatedCamera2* ec = getInstance(d);
return ec->allocateStream(width, height, format, stream_ops,
stream_id, format_actual, usage, max_buffers);
}
int EmulatedCamera2::register_stream_buffers(const camera2_device_t *d,
uint32_t stream_id,
int num_buffers,
buffer_handle_t *buffers) {
EmulatedCamera2* ec = getInstance(d);
return ec->registerStreamBuffers(stream_id,
num_buffers,
buffers);
}
int EmulatedCamera2::release_stream(const camera2_device_t *d,
uint32_t stream_id) {
EmulatedCamera2* ec = getInstance(d);
return ec->releaseStream(stream_id);
}
int EmulatedCamera2::allocate_reprocess_stream(const camera2_device_t *d,
uint32_t width,
uint32_t height,
uint32_t format,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id,
uint32_t *consumer_usage,
uint32_t *max_buffers) {
EmulatedCamera2* ec = getInstance(d);
return ec->allocateReprocessStream(width, height, format,
reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
}
int EmulatedCamera2::allocate_reprocess_stream_from_stream(
const camera2_device_t *d,
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id) {
EmulatedCamera2* ec = getInstance(d);
return ec->allocateReprocessStreamFromStream(output_stream_id,
reprocess_stream_ops, stream_id);
}
int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
uint32_t stream_id) {
EmulatedCamera2* ec = getInstance(d);
return ec->releaseReprocessStream(stream_id);
}
int EmulatedCamera2::trigger_action(const camera2_device_t *d,
uint32_t trigger_id,
int ext1,
int ext2) {
EmulatedCamera2* ec = getInstance(d);
return ec->triggerAction(trigger_id, ext1, ext2);
}
int EmulatedCamera2::set_notify_callback(const camera2_device_t *d,
camera2_notify_callback notify_cb, void* user) {
EmulatedCamera2* ec = getInstance(d);
Mutex::Autolock l(ec->mMutex);
ec->mNotifyCb = notify_cb;
ec->mNotifyUserPtr = user;
return NO_ERROR;
}
int EmulatedCamera2::get_metadata_vendor_tag_ops(const camera2_device_t *d,
vendor_tag_query_ops_t **ops) {
EmulatedCamera2* ec = getInstance(d);
*ops = static_cast<vendor_tag_query_ops_t*>(
&ec->mVendorTagOps);
return NO_ERROR;
}
const char* EmulatedCamera2::get_camera_vendor_section_name(
const vendor_tag_query_ops_t *v,
uint32_t tag) {
EmulatedCamera2* ec = static_cast<const TagOps*>(v)->parent;
return ec->getVendorSectionName(tag);
}
const char* EmulatedCamera2::get_camera_vendor_tag_name(
const vendor_tag_query_ops_t *v,
uint32_t tag) {
EmulatedCamera2* ec = static_cast<const TagOps*>(v)->parent;
return ec->getVendorTagName(tag);
}
int EmulatedCamera2::get_camera_vendor_tag_type(
const vendor_tag_query_ops_t *v,
uint32_t tag) {
EmulatedCamera2* ec = static_cast<const TagOps*>(v)->parent;
return ec->getVendorTagType(tag);
}
int EmulatedCamera2::dump(const camera2_device_t *d, int fd) {
EmulatedCamera2* ec = getInstance(d);
return ec->dump(fd);
}
int EmulatedCamera2::close(struct hw_device_t* device) {
EmulatedCamera2* ec =
static_cast<EmulatedCamera2*>(
reinterpret_cast<camera2_device_t*>(device) );
if (ec == NULL) {
ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
return -EINVAL;
}
return ec->closeCamera();
}
void EmulatedCamera2::sendNotification(int32_t msgType,
int32_t ext1, int32_t ext2, int32_t ext3) {
camera2_notify_callback notifyCb;
{
Mutex::Autolock l(mMutex);
notifyCb = mNotifyCb;
}
if (notifyCb != NULL) {
notifyCb(msgType, ext1, ext2, ext3, mNotifyUserPtr);
}
}
camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
EmulatedCamera2::set_request_queue_src_ops,
EmulatedCamera2::notify_request_queue_not_empty,
EmulatedCamera2::set_frame_queue_dst_ops,
EmulatedCamera2::get_in_progress_count,
EmulatedCamera2::flush_captures_in_progress,
EmulatedCamera2::construct_default_request,
EmulatedCamera2::allocate_stream,
EmulatedCamera2::register_stream_buffers,
EmulatedCamera2::release_stream,
EmulatedCamera2::allocate_reprocess_stream,
EmulatedCamera2::allocate_reprocess_stream_from_stream,
EmulatedCamera2::release_reprocess_stream,
EmulatedCamera2::trigger_action,
EmulatedCamera2::set_notify_callback,
EmulatedCamera2::get_metadata_vendor_tag_ops,
EmulatedCamera2::dump
};
}; /* namespace android */

View file

@ -0,0 +1,274 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA2_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA2_H
/*
* Contains declaration of a class EmulatedCamera that encapsulates
* functionality common to all version 2.0 emulated camera devices. Instances
* of this class (for each emulated camera) are created during the construction
* of the EmulatedCameraFactory instance. This class serves as an entry point
* for all camera API calls that defined by camera2_device_ops_t API.
*/
#include "hardware/camera2.h"
#include "system/camera_metadata.h"
#include "EmulatedBaseCamera.h"
#include <utils/Thread.h>
#include <utils/Mutex.h>
namespace android {
/* Encapsulates functionality common to all version 2.0 emulated camera devices
*
* Note that EmulatedCameraFactory instantiates object of this class just once,
* when EmulatedCameraFactory instance gets constructed. Connection to /
* disconnection from the actual camera device is handled by calls to
* connectDevice(), and closeCamera() methods of this class that are invoked in
* response to hw_module_methods_t::open, and camera_device::close callbacks.
*/
class EmulatedCamera2 : public camera2_device, public EmulatedBaseCamera {
public:
/* Constructs EmulatedCamera2 instance.
* Param:
* cameraId - Zero based camera identifier, which is an index of the camera
* instance in camera factory's array.
* module - Emulated camera HAL module descriptor.
*/
EmulatedCamera2(int cameraId,
struct hw_module_t* module);
/* Destructs EmulatedCamera2 instance. */
virtual ~EmulatedCamera2();
/****************************************************************************
* Abstract API
***************************************************************************/
public:
/****************************************************************************
* Public API
***************************************************************************/
public:
virtual status_t Initialize();
/****************************************************************************
* Camera module API and generic hardware device API implementation
***************************************************************************/
public:
virtual status_t connectCamera(hw_device_t** device);
virtual status_t closeCamera();
virtual status_t getCameraInfo(struct camera_info* info) = 0;
/****************************************************************************
* Camera API implementation.
* These methods are called from the camera API callback routines.
***************************************************************************/
protected:
/** Request input queue notification */
virtual int requestQueueNotify();
/** Count of requests in flight */
virtual int getInProgressCount();
/** Cancel all captures in flight */
virtual int flushCapturesInProgress();
virtual int constructDefaultRequest(
int request_template,
camera_metadata_t **request);
/** Output stream creation and management */
virtual int allocateStream(
uint32_t width,
uint32_t height,
int format,
const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id,
uint32_t *format_actual,
uint32_t *usage,
uint32_t *max_buffers);
virtual int registerStreamBuffers(
uint32_t stream_id,
int num_buffers,
buffer_handle_t *buffers);
virtual int releaseStream(uint32_t stream_id);
/** Input stream creation and management */
virtual int allocateReprocessStream(
uint32_t width,
uint32_t height,
uint32_t format,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id,
uint32_t *consumer_usage,
uint32_t *max_buffers);
virtual int allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id);
virtual int releaseReprocessStream(uint32_t stream_id);
/** 3A action triggering */
virtual int triggerAction(uint32_t trigger_id,
int32_t ext1, int32_t ext2);
/** Custom tag definitions */
virtual const char* getVendorSectionName(uint32_t tag);
virtual const char* getVendorTagName(uint32_t tag);
virtual int getVendorTagType(uint32_t tag);
/** Debug methods */
virtual int dump(int fd);
/****************************************************************************
* Camera API callbacks as defined by camera2_device_ops structure. See
* hardware/libhardware/include/hardware/camera2.h for information on each
* of these callbacks. Implemented in this class, these callbacks simply
* dispatch the call into an instance of EmulatedCamera2 class defined in
* the 'camera_device2' parameter.
***************************************************************************/
private:
/** Input request queue */
static int set_request_queue_src_ops(const camera2_device_t *,
const camera2_request_queue_src_ops *queue_src_ops);
static int notify_request_queue_not_empty(const camera2_device_t *);
/** Output frame queue */
static int set_frame_queue_dst_ops(const camera2_device_t *,
const camera2_frame_queue_dst_ops *queue_dst_ops);
/** In-progress request management */
static int get_in_progress_count(const camera2_device_t *);
static int flush_captures_in_progress(const camera2_device_t *);
/** Request template creation */
static int construct_default_request(const camera2_device_t *,
int request_template,
camera_metadata_t **request);
/** Stream management */
static int allocate_stream(const camera2_device_t *,
uint32_t width,
uint32_t height,
int format,
const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id,
uint32_t *format_actual,
uint32_t *usage,
uint32_t *max_buffers);
static int register_stream_buffers(const camera2_device_t *,
uint32_t stream_id,
int num_buffers,
buffer_handle_t *buffers);
static int release_stream(const camera2_device_t *,
uint32_t stream_id);
static int allocate_reprocess_stream(const camera2_device_t *,
uint32_t width,
uint32_t height,
uint32_t format,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id,
uint32_t *consumer_usage,
uint32_t *max_buffers);
static int allocate_reprocess_stream_from_stream(const camera2_device_t *,
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id);
static int release_reprocess_stream(const camera2_device_t *,
uint32_t stream_id);
/** 3A triggers*/
static int trigger_action(const camera2_device_t *,
uint32_t trigger_id,
int ext1,
int ext2);
/** Notifications to application */
static int set_notify_callback(const camera2_device_t *,
camera2_notify_callback notify_cb,
void *user);
/** Vendor metadata registration */
static int get_metadata_vendor_tag_ops(const camera2_device_t *,
vendor_tag_query_ops_t **ops);
// for get_metadata_vendor_tag_ops
static const char* get_camera_vendor_section_name(
const vendor_tag_query_ops_t *,
uint32_t tag);
static const char* get_camera_vendor_tag_name(
const vendor_tag_query_ops_t *,
uint32_t tag);
static int get_camera_vendor_tag_type(
const vendor_tag_query_ops_t *,
uint32_t tag);
static int dump(const camera2_device_t *, int fd);
/** For hw_device_t ops */
static int close(struct hw_device_t* device);
/****************************************************************************
* Data members shared with implementations
***************************************************************************/
protected:
/** Mutex for calls through camera2 device interface */
Mutex mMutex;
bool mStatusPresent;
const camera2_request_queue_src_ops *mRequestQueueSrc;
const camera2_frame_queue_dst_ops *mFrameQueueDst;
struct TagOps : public vendor_tag_query_ops {
EmulatedCamera2 *parent;
};
TagOps mVendorTagOps;
void sendNotification(int32_t msgType,
int32_t ext1, int32_t ext2, int32_t ext3);
/****************************************************************************
* Data members
***************************************************************************/
private:
static camera2_device_ops_t sDeviceOps;
camera2_notify_callback mNotifyCb;
void* mNotifyUserPtr;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA2_H */

View file

@ -0,0 +1,271 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains implementation of a class EmulatedCamera that encapsulates
* functionality common to all version 3.0 emulated camera devices. Instances
* of this class (for each emulated camera) are created during the construction
* of the EmulatedCameraFactory instance. This class serves as an entry point
* for all camera API calls that defined by camera3_device_ops_t API.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera3_Camera"
#include <cutils/log.h>
#include "EmulatedCamera3.h"
#include "system/camera_metadata.h"
namespace android {
/**
* Constructs EmulatedCamera3 instance.
* Param:
* cameraId - Zero based camera identifier, which is an index of the camera
* instance in camera factory's array.
* module - Emulated camera HAL module descriptor.
*/
EmulatedCamera3::EmulatedCamera3(int cameraId,
struct hw_module_t* module):
EmulatedBaseCamera(cameraId,
CAMERA_DEVICE_API_VERSION_3_3,
&common,
module),
mStatus(STATUS_ERROR)
{
common.close = EmulatedCamera3::close;
ops = &sDeviceOps;
mCallbackOps = NULL;
}
/* Destructs EmulatedCamera3 instance. */
EmulatedCamera3::~EmulatedCamera3() {
}
/****************************************************************************
* Abstract API
***************************************************************************/
/****************************************************************************
* Public API
***************************************************************************/
status_t EmulatedCamera3::Initialize() {
ALOGV("%s", __FUNCTION__);
mStatus = STATUS_CLOSED;
return NO_ERROR;
}
/****************************************************************************
* Camera API implementation
***************************************************************************/
status_t EmulatedCamera3::connectCamera(hw_device_t** device) {
ALOGV("%s", __FUNCTION__);
if (device == NULL) return BAD_VALUE;
if (mStatus != STATUS_CLOSED) {
ALOGE("%s: Trying to open a camera in state %d!",
__FUNCTION__, mStatus);
return INVALID_OPERATION;
}
*device = &common;
mStatus = STATUS_OPEN;
return NO_ERROR;
}
status_t EmulatedCamera3::closeCamera() {
mStatus = STATUS_CLOSED;
return NO_ERROR;
}
status_t EmulatedCamera3::getCameraInfo(struct camera_info* info) {
return EmulatedBaseCamera::getCameraInfo(info);
}
/****************************************************************************
* Camera Device API implementation.
* These methods are called from the camera API callback routines.
***************************************************************************/
status_t EmulatedCamera3::initializeDevice(
const camera3_callback_ops *callbackOps) {
if (callbackOps == NULL) {
ALOGE("%s: NULL callback ops provided to HAL!",
__FUNCTION__);
return BAD_VALUE;
}
if (mStatus != STATUS_OPEN) {
ALOGE("%s: Trying to initialize a camera in state %d!",
__FUNCTION__, mStatus);
return INVALID_OPERATION;
}
mCallbackOps = callbackOps;
mStatus = STATUS_READY;
return NO_ERROR;
}
status_t EmulatedCamera3::configureStreams(
camera3_stream_configuration *streamList) {
ALOGE("%s: Not implemented", __FUNCTION__);
return INVALID_OPERATION;
}
status_t EmulatedCamera3::registerStreamBuffers(
const camera3_stream_buffer_set *bufferSet) {
ALOGE("%s: Not implemented", __FUNCTION__);
return INVALID_OPERATION;
}
const camera_metadata_t* EmulatedCamera3::constructDefaultRequestSettings(
int type) {
ALOGE("%s: Not implemented", __FUNCTION__);
return NULL;
}
status_t EmulatedCamera3::processCaptureRequest(
camera3_capture_request *request) {
ALOGE("%s: Not implemented", __FUNCTION__);
return INVALID_OPERATION;
}
status_t EmulatedCamera3::flush() {
ALOGE("%s: Not implemented", __FUNCTION__);
return INVALID_OPERATION;
}
/** Debug methods */
void EmulatedCamera3::dump(int fd) {
ALOGE("%s: Not implemented", __FUNCTION__);
return;
}
/****************************************************************************
* Protected API. Callbacks to the framework.
***************************************************************************/
void EmulatedCamera3::sendCaptureResult(camera3_capture_result_t *result) {
mCallbackOps->process_capture_result(mCallbackOps, result);
}
void EmulatedCamera3::sendNotify(camera3_notify_msg_t *msg) {
mCallbackOps->notify(mCallbackOps, msg);
}
/****************************************************************************
* Private API.
***************************************************************************/
/****************************************************************************
* Camera API callbacks as defined by camera3_device_ops structure. See
* hardware/libhardware/include/hardware/camera3.h for information on each
* of these callbacks. Implemented in this class, these callbacks simply
* dispatch the call into an instance of EmulatedCamera3 class defined by the
* 'camera_device3' parameter, or set a member value in the same.
***************************************************************************/
EmulatedCamera3* getInstance(const camera3_device_t *d) {
const EmulatedCamera3* cec = static_cast<const EmulatedCamera3*>(d);
return const_cast<EmulatedCamera3*>(cec);
}
int EmulatedCamera3::initialize(const struct camera3_device *d,
const camera3_callback_ops_t *callback_ops) {
EmulatedCamera3* ec = getInstance(d);
return ec->initializeDevice(callback_ops);
}
int EmulatedCamera3::configure_streams(const struct camera3_device *d,
camera3_stream_configuration_t *stream_list) {
EmulatedCamera3* ec = getInstance(d);
return ec->configureStreams(stream_list);
}
int EmulatedCamera3::register_stream_buffers(
const struct camera3_device *d,
const camera3_stream_buffer_set_t *buffer_set) {
EmulatedCamera3* ec = getInstance(d);
return ec->registerStreamBuffers(buffer_set);
}
int EmulatedCamera3::process_capture_request(
const struct camera3_device *d,
camera3_capture_request_t *request) {
EmulatedCamera3* ec = getInstance(d);
return ec->processCaptureRequest(request);
}
const camera_metadata_t* EmulatedCamera3::construct_default_request_settings(
const camera3_device_t *d, int type) {
EmulatedCamera3* ec = getInstance(d);
return ec->constructDefaultRequestSettings(type);
}
void EmulatedCamera3::dump(const camera3_device_t *d, int fd) {
EmulatedCamera3* ec = getInstance(d);
ec->dump(fd);
}
int EmulatedCamera3::flush(const camera3_device_t *d) {
EmulatedCamera3* ec = getInstance(d);
return ec->flush();
}
int EmulatedCamera3::close(struct hw_device_t* device) {
EmulatedCamera3* ec =
static_cast<EmulatedCamera3*>(
reinterpret_cast<camera3_device_t*>(device) );
if (ec == NULL) {
ALOGE("%s: Unexpected NULL camera3 device", __FUNCTION__);
return BAD_VALUE;
}
return ec->closeCamera();
}
camera3_device_ops_t EmulatedCamera3::sDeviceOps = {
EmulatedCamera3::initialize,
EmulatedCamera3::configure_streams,
/* DEPRECATED: register_stream_buffers */ nullptr,
EmulatedCamera3::construct_default_request_settings,
EmulatedCamera3::process_capture_request,
/* DEPRECATED: get_metadata_vendor_tag_ops */ nullptr,
EmulatedCamera3::dump,
EmulatedCamera3::flush
};
const char* EmulatedCamera3::sAvailableCapabilitiesStrings[NUM_CAPABILITIES] = {
"BACKWARD_COMPATIBLE",
"MANUAL_SENSOR",
"MANUAL_POST_PROCESSING",
"RAW",
"PRIVATE_REPROCESSING",
"READ_SENSOR_SETTINGS",
"BURST_CAPTURE",
"YUV_REPROCESSING",
"DEPTH_OUTPUT",
"CONSTRAINED_HIGH_SPEED_VIDEO",
"FULL_LEVEL"
};
}; /* namespace android */

View file

@ -0,0 +1,203 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H
/**
* Contains declaration of a class EmulatedCamera that encapsulates
* functionality common to all version 3.0 emulated camera devices. Instances
* of this class (for each emulated camera) are created during the construction
* of the EmulatedCameraFactory instance. This class serves as an entry point
* for all camera API calls that defined by camera3_device_ops_t API.
*/
#include "hardware/camera3.h"
#include "system/camera_metadata.h"
#include "EmulatedBaseCamera.h"
namespace android {
/**
* Encapsulates functionality common to all version 3.0 emulated camera devices
*
* Note that EmulatedCameraFactory instantiates an object of this class just
* once, when EmulatedCameraFactory instance gets constructed. Connection to /
* disconnection from the actual camera device is handled by calls to
* connectDevice(), and closeCamera() methods of this class that are invoked in
* response to hw_module_methods_t::open, and camera_device::close callbacks.
*/
class EmulatedCamera3 : public camera3_device, public EmulatedBaseCamera {
public:
/* Constructs EmulatedCamera3 instance.
* Param:
* cameraId - Zero based camera identifier, which is an index of the camera
* instance in camera factory's array.
* module - Emulated camera HAL module descriptor.
*/
EmulatedCamera3(int cameraId,
struct hw_module_t* module);
/* Destructs EmulatedCamera2 instance. */
virtual ~EmulatedCamera3();
/* List of all defined capabilities plus useful HW levels */
enum AvailableCapabilities {
BACKWARD_COMPATIBLE,
MANUAL_SENSOR,
MANUAL_POST_PROCESSING,
RAW,
PRIVATE_REPROCESSING,
READ_SENSOR_SETTINGS,
BURST_CAPTURE,
YUV_REPROCESSING,
DEPTH_OUTPUT,
CONSTRAINED_HIGH_SPEED_VIDEO,
// Levels
FULL_LEVEL,
NUM_CAPABILITIES
};
// Char strings for above enum, with size NUM_CAPABILITIES
static const char *sAvailableCapabilitiesStrings[];
/****************************************************************************
* Abstract API
***************************************************************************/
public:
/****************************************************************************
* Public API
***************************************************************************/
public:
virtual status_t Initialize();
/****************************************************************************
* Camera module API and generic hardware device API implementation
***************************************************************************/
public:
virtual status_t connectCamera(hw_device_t** device);
virtual status_t closeCamera();
virtual status_t getCameraInfo(struct camera_info* info);
/****************************************************************************
* Camera API implementation.
* These methods are called from the camera API callback routines.
***************************************************************************/
protected:
virtual status_t initializeDevice(
const camera3_callback_ops *callbackOps);
virtual status_t configureStreams(
camera3_stream_configuration *streamList);
virtual status_t registerStreamBuffers(
const camera3_stream_buffer_set *bufferSet) ;
virtual const camera_metadata_t* constructDefaultRequestSettings(
int type);
virtual status_t processCaptureRequest(camera3_capture_request *request);
virtual status_t flush();
/** Debug methods */
virtual void dump(int fd);
/****************************************************************************
* Camera API callbacks as defined by camera3_device_ops structure. See
* hardware/libhardware/include/hardware/camera3.h for information on each
* of these callbacks. Implemented in this class, these callbacks simply
* dispatch the call into an instance of EmulatedCamera3 class defined in
* the 'camera_device3' parameter.
***************************************************************************/
private:
/** Startup */
static int initialize(const struct camera3_device *,
const camera3_callback_ops_t *callback_ops);
/** Stream configuration and buffer registration */
static int configure_streams(const struct camera3_device *,
camera3_stream_configuration_t *stream_list);
static int register_stream_buffers(const struct camera3_device *,
const camera3_stream_buffer_set_t *buffer_set);
/** Template request settings provision */
static const camera_metadata_t* construct_default_request_settings(
const struct camera3_device *, int type);
/** Submission of capture requests to HAL */
static int process_capture_request(const struct camera3_device *,
camera3_capture_request_t *request);
static void dump(const camera3_device_t *, int fd);
static int flush(const camera3_device_t *);
/** For hw_device_t ops */
static int close(struct hw_device_t* device);
/****************************************************************************
* Data members shared with implementations
***************************************************************************/
protected:
enum {
// State at construction time, and after a device operation error
STATUS_ERROR = 0,
// State after startup-time init and after device instance close
STATUS_CLOSED,
// State after being opened, before device instance init
STATUS_OPEN,
// State after device instance initialization
STATUS_READY,
// State while actively capturing data
STATUS_ACTIVE
} mStatus;
/**
* Callbacks back to the framework
*/
void sendCaptureResult(camera3_capture_result_t *result);
void sendNotify(camera3_notify_msg_t *msg);
/****************************************************************************
* Data members
***************************************************************************/
private:
static camera3_device_ops_t sDeviceOps;
const camera3_callback_ops_t *mCallbackOps;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H */

View file

@ -0,0 +1,60 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_COMMON_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_COMMON_H
/*
* Contains common declarations that are used across the camera emulation.
*/
#include <linux/videodev2.h>
#include <hardware/camera.h>
/* A helper class that tracks a routine execution.
* Basically, it dumps an enry message in its constructor, and an exit message
* in its destructor. Use LOGRE() macro (declared bellow) to create instances
* of this class at the beginning of the tracked routines / methods.
*/
class HWERoutineTracker {
public:
/* Constructor that prints an "entry" trace message. */
explicit HWERoutineTracker(const char* name)
: mName(name) {
ALOGV("Entering %s", mName);
}
/* Destructor that prints a "leave" trace message. */
~HWERoutineTracker() {
ALOGV("Leaving %s", mName);
}
private:
/* Stores the routine name. */
const char* mName;
};
/* Logs an execution of a routine / method. */
#define LOGRE() HWERoutineTracker hwertracker_##__LINE__(__FUNCTION__)
/*
* min / max macros
*/
#define min(a,b) (((a) < (b)) ? (a) : (b))
#define max(a,b) (((a) > (b)) ? (a) : (b))
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_COMMON_H */

View file

@ -0,0 +1,676 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of an abstract class EmulatedCameraDevice that defines
* functionality expected from an emulated physical camera device:
* - Obtaining and setting camera parameters
* - Capturing frames
* - Streaming video
* - etc.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Device"
#include <cutils/log.h>
#include <sys/select.h>
#include <cmath>
#include "Alignment.h"
#include "EmulatedCamera.h"
#include "EmulatedCameraDevice.h"
#undef min
#undef max
#include <algorithm>
namespace android {
const float GAMMA_CORRECTION = 2.2f;
EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
: mObjectLock(),
mCameraHAL(camera_hal),
mExposureCompensation(1.0f),
mWhiteBalanceScale(NULL),
mSupportedWhiteBalanceScale(),
mState(ECDS_CONSTRUCTED),
mTriggerAutoFocus(false)
{
}
EmulatedCameraDevice::~EmulatedCameraDevice()
{
ALOGV("EmulatedCameraDevice destructor");
for (size_t i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
delete[] mSupportedWhiteBalanceScale.valueAt(i);
}
}
}
/****************************************************************************
* Emulated camera device public API
***************************************************************************/
status_t EmulatedCameraDevice::Initialize()
{
if (isInitialized()) {
ALOGW("%s: Emulated camera device is already initialized: mState = %d",
__FUNCTION__, mState);
return NO_ERROR;
}
mState = ECDS_INITIALIZED;
return NO_ERROR;
}
status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
{
ALOGV("%s", __FUNCTION__);
if (!isStarted()) {
ALOGE("%s: Device is not started", __FUNCTION__);
return EINVAL;
}
/* Frames will be delivered from the thread routine. */
const status_t res = startWorkerThread(one_burst);
ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
return res;
}
status_t EmulatedCameraDevice::stopDeliveringFrames()
{
ALOGV("%s", __FUNCTION__);
if (!isStarted()) {
ALOGW("%s: Device is not started", __FUNCTION__);
return NO_ERROR;
}
const status_t res = stopWorkerThread();
ALOGE_IF(res != NO_ERROR, "%s: stopWorkerThread failed", __FUNCTION__);
return res;
}
status_t EmulatedCameraDevice::setPreviewFrameRate(int framesPerSecond) {
if (framesPerSecond <= 0) {
return EINVAL;
}
mFramesPerSecond = framesPerSecond;
return NO_ERROR;
}
void EmulatedCameraDevice::setExposureCompensation(const float ev) {
ALOGV("%s", __FUNCTION__);
if (!isStarted()) {
ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
}
mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
ALOGV("New exposure compensation is %f", mExposureCompensation);
}
void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
const float r_scale,
const float b_scale) {
ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
float* value = new float[3];
value[0] = r_scale; value[1] = 1.0f; value[2] = b_scale;
mSupportedWhiteBalanceScale.add(String8(mode), value);
}
void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
ALOGV("%s with white balance %s", __FUNCTION__, mode);
mWhiteBalanceScale =
mSupportedWhiteBalanceScale.valueFor(String8(mode));
}
/* Computes the pixel value after adjusting the white balance to the current
* one. The input the y, u, v channel of the pixel and the adjusted value will
* be stored in place. The adjustment is done in RGB space.
*/
void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y,
uint8_t& u,
uint8_t& v) const {
float r_scale = mWhiteBalanceScale[0];
float b_scale = mWhiteBalanceScale[2];
int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
int g = YUV2G(y, u, v);
int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
y = RGB2Y(r, g, b);
u = RGB2U(r, g, b);
v = RGB2V(r, g, b);
}
void EmulatedCameraDevice::checkAutoFocusTrigger() {
// The expected value is a reference so we need it to be a variable
bool expectedTrigger = true;
if (mTriggerAutoFocus.compare_exchange_strong(expectedTrigger, false)) {
// If the compare exchange returns true then the value was the expected
// 'true' and was successfully set to 'false'. So that means it's time
// to trigger an auto-focus event and that we have disabled that trigger
// so it won't happen until another request is received.
mCameraHAL->autoFocusComplete();
}
}
status_t EmulatedCameraDevice::getCurrentFrameImpl(const uint8_t* source,
uint8_t* dest,
uint32_t pixelFormat) const {
if (pixelFormat == mPixelFormat) {
memcpy(dest, source, mFrameBufferSize);
return NO_ERROR;
} else if (pixelFormat == V4L2_PIX_FMT_YUV420 &&
mPixelFormat == V4L2_PIX_FMT_YVU420) {
// Convert from YV12 to YUV420 without alignment
const int ySize = mYStride * mFrameHeight;
const int uvSize = mUVStride * (mFrameHeight / 2);
if (mYStride == mFrameWidth) {
// Copy Y straight up
memcpy(dest, source, ySize);
} else {
// Strip alignment
for (int y = 0; y < mFrameHeight; ++y) {
memcpy(dest + y * mFrameWidth,
source + y * mYStride,
mFrameWidth);
}
}
if (mUVStride == mFrameWidth / 2) {
// Swap U and V
memcpy(dest + ySize, source + ySize + uvSize, uvSize);
memcpy(dest + ySize + uvSize, source + ySize, uvSize);
} else {
// Strip alignment
uint8_t* uvDest = dest + mFrameWidth * mFrameHeight;
const uint8_t* uvSource = source + ySize + uvSize;
for (int i = 0; i < 2; ++i) {
for (int y = 0; y < mFrameHeight / 2; ++y) {
memcpy(uvDest + y * (mFrameWidth / 2),
uvSource + y * mUVStride,
mFrameWidth / 2);
}
uvDest += (mFrameHeight / 2) * (mFrameWidth / 2);
uvSource -= uvSize;
}
}
return NO_ERROR;
}
ALOGE("%s: Invalid pixel format conversion [%.4s to %.4s] requested",
__FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat),
reinterpret_cast<const char*>(&pixelFormat));
return EINVAL;
}
status_t EmulatedCameraDevice::getCurrentFrame(void* buffer,
uint32_t pixelFormat)
{
if (!isStarted()) {
ALOGE("%s: Device is not started", __FUNCTION__);
return EINVAL;
}
if (buffer == nullptr) {
ALOGE("%s: Invalid buffer provided", __FUNCTION__);
return EINVAL;
}
FrameLock lock(*this);
const void* source = mCameraThread->getPrimaryBuffer();
if (source == nullptr) {
ALOGE("%s: No framebuffer", __FUNCTION__);
return EINVAL;
}
return getCurrentFrameImpl(reinterpret_cast<const uint8_t*>(source),
reinterpret_cast<uint8_t*>(buffer),
pixelFormat);
}
status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
{
if (!isStarted()) {
ALOGE("%s: Device is not started", __FUNCTION__);
return EINVAL;
}
if (buffer == nullptr) {
ALOGE("%s: Invalid buffer provided", __FUNCTION__);
return EINVAL;
}
FrameLock lock(*this);
const void* currentFrame = mCameraThread->getPrimaryBuffer();
if (currentFrame == nullptr) {
ALOGE("%s: No framebuffer", __FUNCTION__);
return EINVAL;
}
/* In emulation the framebuffer is never RGB. */
switch (mPixelFormat) {
case V4L2_PIX_FMT_YVU420:
YV12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
return NO_ERROR;
case V4L2_PIX_FMT_YUV420:
YU12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
return NO_ERROR;
case V4L2_PIX_FMT_NV21:
NV21ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
return NO_ERROR;
case V4L2_PIX_FMT_NV12:
NV12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
return NO_ERROR;
default:
ALOGE("%s: Unknown pixel format %.4s",
__FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat));
return EINVAL;
}
}
const void* EmulatedCameraDevice::getCurrentFrame() {
if (mCameraThread.get()) {
return mCameraThread->getPrimaryBuffer();
}
return nullptr;
}
EmulatedCameraDevice::FrameLock::FrameLock(EmulatedCameraDevice& cameraDevice)
: mCameraDevice(cameraDevice) {
mCameraDevice.lockCurrentFrame();
}
EmulatedCameraDevice::FrameLock::~FrameLock() {
mCameraDevice.unlockCurrentFrame();
}
status_t EmulatedCameraDevice::setAutoFocus() {
mTriggerAutoFocus = true;
return NO_ERROR;
}
status_t EmulatedCameraDevice::cancelAutoFocus() {
mTriggerAutoFocus = false;
return NO_ERROR;
}
bool EmulatedCameraDevice::requestRestart(int width, int height,
uint32_t pixelFormat,
bool takingPicture, bool oneBurst) {
if (mCameraThread.get() == nullptr) {
ALOGE("%s: No thread alive to perform the restart, is preview on?",
__FUNCTION__);
return false;
}
mCameraThread->requestRestart(width, height, pixelFormat,
takingPicture, oneBurst);
return true;
}
/****************************************************************************
* Emulated camera device private API
***************************************************************************/
status_t EmulatedCameraDevice::commonStartDevice(int width,
int height,
uint32_t pix_fmt)
{
/* Validate pixel format, and calculate framebuffer size at the same time. */
switch (pix_fmt) {
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420:
// For these pixel formats the strides have to be aligned to 16 byte
// boundaries as per the format specification
// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
mYStride = align(width, 16);
mUVStride = align(mYStride / 2, 16);
// The second term should use half the height but since there are
// two planes the multiplication with two cancels that out
mFrameBufferSize = mYStride * height + mUVStride * height;
break;
case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_NV12:
mYStride = width;
// Because of interleaving the UV stride is the same as the Y stride
// since it covers two pixels, one U and one V.
mUVStride = mYStride;
// Since the U/V stride covers both U and V we don't multiply by two
mFrameBufferSize = mYStride * height + mUVStride * (height / 2);
break;
default:
ALOGE("%s: Unknown pixel format %.4s",
__FUNCTION__, reinterpret_cast<const char*>(&pix_fmt));
return EINVAL;
}
/* Cache framebuffer info. */
mFrameWidth = width;
mFrameHeight = height;
mPixelFormat = pix_fmt;
mTotalPixels = width * height;
/* Allocate framebuffer. */
mFrameBuffers[0].resize(mFrameBufferSize);
mFrameBuffers[1].resize(mFrameBufferSize);
ALOGV("%s: Allocated %zu bytes for %d pixels in %.4s[%dx%d] frame",
__FUNCTION__, mFrameBufferSize, mTotalPixels,
reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
return NO_ERROR;
}
void EmulatedCameraDevice::commonStopDevice()
{
mFrameWidth = mFrameHeight = mTotalPixels = 0;
mPixelFormat = 0;
mFrameBuffers[0].clear();
mFrameBuffers[1].clear();
// No need to keep all that memory allocated if the camera isn't running
mFrameBuffers[0].shrink_to_fit();
mFrameBuffers[1].shrink_to_fit();
}
/****************************************************************************
* Worker thread management.
***************************************************************************/
status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
{
ALOGV("%s", __FUNCTION__);
if (!isInitialized()) {
ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
return EINVAL;
}
mCameraThread = new CameraThread(this, staticProduceFrame, this);
if (mCameraThread == NULL) {
ALOGE("%s: Unable to instantiate CameraThread object", __FUNCTION__);
return ENOMEM;
}
status_t res = mCameraThread->startThread(one_burst);
if (res != NO_ERROR) {
ALOGE("%s: Unable to start CameraThread: %s",
__FUNCTION__, strerror(res));
return res;
}
return res;
}
status_t EmulatedCameraDevice::stopWorkerThread()
{
ALOGV("%s", __FUNCTION__);
if (!isInitialized()) {
ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
return EINVAL;
}
status_t res = mCameraThread->stopThread();
if (res != NO_ERROR) {
ALOGE("%s: Unable to stop CameraThread", __FUNCTION__);
return res;
}
res = mCameraThread->joinThread();
if (res != NO_ERROR) {
ALOGE("%s: Unable to join CameraThread", __FUNCTION__);
return res;
}
// Destroy the thread as well
mCameraThread.clear();
return res;
}
EmulatedCameraDevice::CameraThread::CameraThread(EmulatedCameraDevice* dev,
ProduceFrameFunc producer,
void* producerOpaque)
: WorkerThread("Camera_CameraThread", dev, dev->mCameraHAL),
mCurFrameTimestamp(0),
mProducerFunc(producer),
mProducerOpaque(producerOpaque),
mRestartWidth(0),
mRestartHeight(0),
mRestartPixelFormat(0),
mRestartOneBurst(false),
mRestartTakingPicture(false),
mRestartRequested(false) {
}
const void* EmulatedCameraDevice::CameraThread::getPrimaryBuffer() const {
if (mFrameProducer.get()) {
return mFrameProducer->getPrimaryBuffer();
}
return nullptr;
}
void EmulatedCameraDevice::CameraThread::lockPrimaryBuffer() {
mFrameProducer->lockPrimaryBuffer();
}
void EmulatedCameraDevice::CameraThread::unlockPrimaryBuffer() {
mFrameProducer->unlockPrimaryBuffer();
}
bool
EmulatedCameraDevice::CameraThread::waitForFrameOrTimeout(nsecs_t timeout) {
// Keep waiting until the frame producer indicates that a frame is available
// This does introduce some unnecessary latency to the first frame delivery
// but avoids a lot of thread synchronization.
do {
// We don't have any specific fd we want to select so we pass in -1
// timeout is in nanoseconds but Select expects microseconds
Mutex::Autolock lock(mRunningMutex);
mRunningCondition.waitRelative(mRunningMutex, timeout);
if (!mRunning) {
ALOGV("%s: CameraThread has been terminated.", __FUNCTION__);
return false;
}
// Set a short timeout in case there is no frame available and we are
// going to loop. This way we ensure a sleep but keep a decent latency
timeout = milliseconds(5);
} while (!mFrameProducer->hasFrame());
return true;
}
bool EmulatedCameraDevice::CameraThread::inWorkerThread() {
/* Wait till FPS timeout expires, or thread exit message is received. */
nsecs_t wakeAt =
mCurFrameTimestamp + 1000000000.0 / mCameraDevice->mFramesPerSecond;
nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
nsecs_t timeout = std::max<nsecs_t>(0, wakeAt - now);
if (!waitForFrameOrTimeout(timeout)) {
return false;
}
/* Check if a restart and potentially apply the requested changes */
if (!checkRestartRequest()) {
return false;
}
/* Check if an auto-focus event needs to be triggered */
mCameraDevice->checkAutoFocusTrigger();
mCurFrameTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
mCameraHAL->onNextFrameAvailable(mCurFrameTimestamp, mCameraDevice);
return true;
}
status_t EmulatedCameraDevice::CameraThread::onThreadStart() {
void* primaryBuffer = mCameraDevice->getPrimaryBuffer();
void* secondaryBuffer = mCameraDevice->getSecondaryBuffer();
mFrameProducer = new FrameProducer(mCameraDevice,
mProducerFunc, mProducerOpaque,
primaryBuffer, secondaryBuffer);
if (mFrameProducer.get() == nullptr) {
ALOGE("%s: Could not instantiate FrameProducer object", __FUNCTION__);
return ENOMEM;
}
return mFrameProducer->startThread(mOneBurst);
}
void EmulatedCameraDevice::CameraThread::onThreadExit() {
if (mFrameProducer.get()) {
if (mFrameProducer->stopThread() == NO_ERROR) {
mFrameProducer->joinThread();
mFrameProducer.clear();
}
}
}
EmulatedCameraDevice::CameraThread::FrameProducer::FrameProducer(
EmulatedCameraDevice* dev,
ProduceFrameFunc producer,
void* opaque,
void* primaryBuffer,
void* secondaryBuffer)
: WorkerThread("Camera_FrameProducer", dev, dev->mCameraHAL),
mProducer(producer),
mOpaque(opaque),
mPrimaryBuffer(primaryBuffer),
mSecondaryBuffer(secondaryBuffer),
mLastFrame(0),
mHasFrame(false) {
}
const void*
EmulatedCameraDevice::CameraThread::FrameProducer::getPrimaryBuffer() const {
return mPrimaryBuffer;
}
void EmulatedCameraDevice::CameraThread::FrameProducer::lockPrimaryBuffer() {
mBufferMutex.lock();
}
void EmulatedCameraDevice::CameraThread::FrameProducer::unlockPrimaryBuffer() {
mBufferMutex.unlock();
}
void EmulatedCameraDevice::CameraThread::requestRestart(int width,
int height,
uint32_t pixelFormat,
bool takingPicture,
bool oneBurst) {
Mutex::Autolock lock(mRequestMutex);
mRestartWidth = width;
mRestartHeight = height;
mRestartPixelFormat = pixelFormat;
mRestartTakingPicture = takingPicture;
mRestartOneBurst = oneBurst;
mRestartRequested = true;
}
bool EmulatedCameraDevice::CameraThread::FrameProducer::hasFrame() const {
return mHasFrame;
}
bool EmulatedCameraDevice::CameraThread::checkRestartRequest() {
Mutex::Autolock lock(mRequestMutex);
if (mRestartRequested) {
mRestartRequested = false;
status_t res = mFrameProducer->stopThread();
if (res != NO_ERROR) {
ALOGE("%s: Could not stop frame producer thread", __FUNCTION__);
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
return false;
}
res = mFrameProducer->joinThread();
if (res != NO_ERROR) {
ALOGE("%s: Could not join frame producer thread", __FUNCTION__);
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
return false;
}
mFrameProducer.clear();
res = mCameraDevice->stopDevice();
if (res != NO_ERROR) {
ALOGE("%s: Could not stop device", __FUNCTION__);
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
return false;
}
res = mCameraDevice->startDevice(mRestartWidth,
mRestartHeight,
mRestartPixelFormat);
if (res != NO_ERROR) {
ALOGE("%s: Could not start device", __FUNCTION__);
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
return false;
}
if (mRestartTakingPicture) {
mCameraHAL->setTakingPicture(true);
}
mOneBurst = mRestartOneBurst;
// Pretend like this a thread start, performs the remaining setup
if (onThreadStart() != NO_ERROR) {
mCameraDevice->stopDevice();
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
return false;
}
// Now wait for the frame producer to start producing before we proceed
return waitForFrameOrTimeout(0);
}
return true;
}
bool EmulatedCameraDevice::CameraThread::FrameProducer::inWorkerThread() {
nsecs_t nextFrame =
mLastFrame + 1000000000 / mCameraDevice->mFramesPerSecond;
nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
nsecs_t timeout = std::max<nsecs_t>(0, nextFrame - now);
{
Mutex::Autolock lock(mRunningMutex);
mRunningCondition.waitRelative(mRunningMutex, timeout);
if (!mRunning) {
ALOGV("%s: FrameProducer has been terminated.", __FUNCTION__);
return false;
}
}
// Produce one frame and place it in the secondary buffer
mLastFrame = systemTime(SYSTEM_TIME_MONOTONIC);
if (!mProducer(mOpaque, mSecondaryBuffer)) {
ALOGE("FrameProducer could not produce frame, exiting thread");
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
return false;
}
{
// Switch buffers now that the secondary buffer is ready
Mutex::Autolock lock(mBufferMutex);
std::swap(mPrimaryBuffer, mSecondaryBuffer);
}
mHasFrame = true;
return true;
}
void EmulatedCameraDevice::lockCurrentFrame() {
mCameraThread->lockPrimaryBuffer();
}
void EmulatedCameraDevice::unlockCurrentFrame() {
mCameraThread->unlockPrimaryBuffer();
}
}; /* namespace android */

View file

@ -0,0 +1,656 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
/*
* Contains declaration of an abstract class EmulatedCameraDevice that defines
* functionality expected from an emulated physical camera device:
* - Obtaining and setting camera device parameters
* - Capturing frames
* - Streaming video
* - etc.
*/
#include <utils/threads.h>
#include <utils/KeyedVector.h>
#include <utils/String8.h>
#include "EmulatedCameraCommon.h"
#include "Converters.h"
#include "WorkerThread.h"
#undef min
#undef max
#include <vector>
namespace android {
class EmulatedCamera;
/* Encapsulates an abstract class EmulatedCameraDevice that defines
* functionality expected from an emulated physical camera device:
* - Obtaining and setting camera device parameters
* - Capturing frames
* - Streaming video
* - etc.
*/
class EmulatedCameraDevice {
public:
/* Constructs EmulatedCameraDevice instance.
* Param:
* camera_hal - Emulated camera that implements the camera HAL API, and
* manages (contains) this object.
*/
explicit EmulatedCameraDevice(EmulatedCamera* camera_hal);
/* Destructs EmulatedCameraDevice instance. */
virtual ~EmulatedCameraDevice();
/***************************************************************************
* Emulated camera device abstract interface
**************************************************************************/
public:
/* Connects to the camera device.
* This method must be called on an initialized instance of this class.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t connectDevice() = 0;
/* Disconnects from the camera device.
* Return:
* NO_ERROR on success, or an appropriate error status. If this method is
* called for already disconnected, or uninitialized instance of this class,
* a successful status must be returned from this method. If this method is
* called for an instance that is in the "started" state, this method must
* return a failure.
*/
virtual status_t disconnectDevice() = 0;
/* Starts the camera device.
* This method tells the camera device to start capturing frames of the given
* dimensions for the given pixel format. Note that this method doesn't start
* the delivery of the captured frames to the emulated camera. Call
* startDeliveringFrames method to start delivering frames. This method must
* be called on a connected instance of this class. If it is called on a
* disconnected instance, this method must return a failure.
* Param:
* width, height - Frame dimensions to use when capturing video frames.
* pix_fmt - Pixel format to use when capturing video frames.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t startDevice(int width, int height, uint32_t pix_fmt) = 0;
/* Stops the camera device.
* This method tells the camera device to stop capturing frames. Note that
* this method doesn't stop delivering frames to the emulated camera. Always
* call stopDeliveringFrames prior to calling this method.
* Return:
* NO_ERROR on success, or an appropriate error status. If this method is
* called for an object that is not capturing frames, or is disconnected,
* or is uninitialized, a successful status must be returned from this
* method.
*/
virtual status_t stopDevice() = 0;
/***************************************************************************
* Emulated camera device public API
**************************************************************************/
public:
/* Initializes EmulatedCameraDevice instance.
* Derived classes should override this method in order to cache static
* properties of the physical device (list of supported pixel formats, frame
* sizes, etc.) If this method is called on an already initialized instance,
* it must return a successful status.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t Initialize();
/* Initializes the white balance modes parameters.
* The parameters are passed by each individual derived camera API to
* represent that different camera manufacturers may have different
* preferences on the white balance parameters. Green channel in the RGB
* color space is fixed to keep the luminance to be reasonably constant.
*
* Param:
* mode the text describing the current white balance mode
* r_scale the scale factor for the R channel in RGB space
* b_scale the scale factor for the B channel in RGB space.
*/
void initializeWhiteBalanceModes(const char* mode,
const float r_scale,
const float b_scale);
/* Starts delivering frames captured from the camera device.
* This method will start the worker thread that would be pulling frames from
* the camera device, and will deliver the pulled frames back to the emulated
* camera via onNextFrameAvailable callback. This method must be called on a
* connected instance of this class with a started camera device. If it is
* called on a disconnected instance, or camera device has not been started,
* this method must return a failure.
* Param:
* one_burst - Controls how many frames should be delivered. If this
* parameter is 'true', only one captured frame will be delivered to the
* emulated camera. If this parameter is 'false', frames will keep
* coming until stopDeliveringFrames method is called. Typically, this
* parameter is set to 'true' only in order to obtain a single frame
* that will be used as a "picture" in takePicture method of the
* emulated camera.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t startDeliveringFrames(bool one_burst);
/* Stops delivering frames captured from the camera device.
* This method will stop the worker thread started by startDeliveringFrames.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t stopDeliveringFrames();
/* Set the preview frame rate.
* Indicates the rate at which the camera should provide preview frames in
* frames per second. */
status_t setPreviewFrameRate(int framesPerSecond);
/* Sets the exposure compensation for the camera device.
*/
void setExposureCompensation(const float ev);
/* Sets the white balance mode for the device.
*/
void setWhiteBalanceMode(const char* mode);
/* Gets current framebuffer in a selected format
* This method must be called on a connected instance of this class with a
* started camera device. If it is called on a disconnected instance, or
* camera device has not been started, this method must return a failure.
* Note that this method should be called only after at least one frame has
* been captured and delivered. Otherwise it will return garbage in the
* preview frame buffer. Typically, this method should be called from
* onNextFrameAvailable callback. The method can perform some basic pixel
* format conversion for the most efficient conversions. If a conversion
* is not supported the method will fail. Note that this does NOT require
* that the current frame be locked using a FrameLock object.
*
* Param:
* buffer - Buffer, large enough to contain the entire frame.
* pixelFormat - The pixel format to convert to, use
* getOriginalPixelFormat() to get the configured pixel
* format (if using this no conversion will be needed)
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t getCurrentFrame(void* buffer, uint32_t pixelFormat);
/* Gets current framebuffer, converted into preview frame format.
* This method must be called on a connected instance of this class with a
* started camera device. If it is called on a disconnected instance, or
* camera device has not been started, this method must return a failure.
* Note that this method should be called only after at least one frame has
* been captured and delivered. Otherwise it will return garbage in the
* preview frame buffer. Typically, this method should be called from
* onNextFrameAvailable callback. Note that this does NOT require that the
* current frame be locked using a FrameLock object.
* Param:
* buffer - Buffer, large enough to contain the entire preview frame.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t getCurrentPreviewFrame(void* buffer);
/* Gets a pointer to the current frame buffer in its raw format.
* This method must be called on a connected instance of this class with a
* started camera device. If it is called on a disconnected instance, or
* camera device has not been started, this method must return NULL.
* This method should only be called when the frame lock is held through
* a FrameLock object. Otherwise the contents of the frame might change
* unexpectedly or its memory could be deallocated leading to a crash.
* Return:
* A pointer to the current frame buffer on success, NULL otherwise.
*/
virtual const void* getCurrentFrame();
class FrameLock {
public:
FrameLock(EmulatedCameraDevice& cameraDevice);
~FrameLock();
private:
EmulatedCameraDevice& mCameraDevice;
};
/* Gets width of the frame obtained from the physical device.
* Return:
* Width of the frame obtained from the physical device. Note that value
* returned from this method is valid only in case if camera device has been
* started.
*/
inline int getFrameWidth() const
{
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mFrameWidth;
}
/* Gets height of the frame obtained from the physical device.
* Return:
* Height of the frame obtained from the physical device. Note that value
* returned from this method is valid only in case if camera device has been
* started.
*/
inline int getFrameHeight() const
{
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mFrameHeight;
}
/* Gets byte size of the current frame buffer.
* Return:
* Byte size of the frame buffer. Note that value returned from this method
* is valid only in case if camera device has been started.
*/
inline size_t getFrameBufferSize() const
{
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mFrameBufferSize;
}
/* Get number of bytes required to store current video frame buffer. Note
* that this can be different from getFrameBufferSize depending on the pixel
* format and resolution. The video frames use a pixel format that is
* suitable for the encoding pipeline and this may have different alignment
* requirements than the pixel format used for regular frames.
*/
inline size_t getVideoFrameBufferSize() const
{
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
// Currently the video format is always YUV 420 without any kind of
// alignment. So each pixel uses 12 bits, and then we divide by 8 to get
// the size in bytes. If additional pixel formats are supported this
// should be updated to take the selected video format into
// consideration.
return (mFrameWidth * mFrameHeight * 12) / 8;
}
/* Gets number of pixels in the current frame buffer.
* Return:
* Number of pixels in the frame buffer. Note that value returned from this
* method is valid only in case if camera device has been started.
*/
inline int getPixelNum() const
{
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mTotalPixels;
}
/* Gets pixel format of the frame that camera device streams to this class.
* Throughout camera framework, there are three different forms of pixel
* format representation:
* - Original format, as reported by the actual camera device. Values for
* this format are declared in bionic/libc/kernel/common/linux/videodev2.h
* - String representation as defined in CameraParameters::PIXEL_FORMAT_XXX
* strings in frameworks/base/include/camera/CameraParameters.h
* - HAL_PIXEL_FORMAT_XXX format, as defined in system/core/include/system/graphics.h
* Since emulated camera device gets its data from the actual device, it gets
* pixel format in the original form. And that's the pixel format
* representation that will be returned from this method. HAL components will
* need to translate value returned from this method to the appropriate form.
* This method must be called only on started instance of this class, since
* it's applicable only when camera device is ready to stream frames.
* Param:
* pix_fmt - Upon success contains the original pixel format.
* Return:
* Current framebuffer's pixel format. Note that value returned from this
* method is valid only in case if camera device has been started.
*/
inline uint32_t getOriginalPixelFormat() const
{
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mPixelFormat;
}
/*
* State checkers.
*/
inline bool isInitialized() const {
return mState != ECDS_CONSTRUCTED;
}
inline bool isConnected() const {
/* Instance is connected when its status is either"connected", or
* "started". */
return mState == ECDS_CONNECTED || mState == ECDS_STARTED;
}
inline bool isStarted() const {
return mState == ECDS_STARTED;
}
/* Enable auto-focus for the camera, this is only possible between calls to
* startPreview and stopPreview, i.e. when preview frames are being
* delivered. This will eventually trigger a callback to the camera HAL
* saying auto-focus completed.
*/
virtual status_t setAutoFocus();
/* Cancel auto-focus if it's enabled.
*/
virtual status_t cancelAutoFocus();
/* Request an asynchronous camera restart with new image parameters. The
* restart will be performed on the same thread that delivers frames,
* ensuring that all callbacks are done from the same thread.
* Return
* false if the thread request cannot be honored because no thread is
* running or some other error occured.
*/
bool requestRestart(int width, int height, uint32_t pixelFormat,
bool takingPicture, bool oneBurst);
/****************************************************************************
* Emulated camera device private API
***************************************************************************/
protected:
/* Performs common validation and calculation of startDevice parameters.
* Param:
* width, height, pix_fmt - Parameters passed to the startDevice method.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t commonStartDevice(int width, int height, uint32_t pix_fmt);
/* Performs common cleanup on stopDevice.
* This method will undo what commonStartDevice had done.
*/
virtual void commonStopDevice();
/** Computes a luminance value after taking the exposure compensation.
* value into account.
*
* Param:
* inputY - The input luminance value.
* Return:
* The luminance value after adjusting the exposure compensation.
*/
inline uint8_t changeExposure(const uint8_t& inputY) const {
return static_cast<uint8_t>(clamp(static_cast<float>(inputY) *
mExposureCompensation));
}
/** Computes the pixel value in YUV space after adjusting to the current
* white balance mode.
*/
void changeWhiteBalance(uint8_t& y, uint8_t& u, uint8_t& v) const;
/* Check if there is a pending auto-focus trigger and send a notification
* if there is. This should be called from the worker thread loop if the
* camera device wishes to use the default behavior of immediately sending
* an auto-focus completion event on request. Otherwise the device should
* implement its own auto-focus behavior. */
void checkAutoFocusTrigger();
/* Implementation for getCurrentFrame that includes pixel format conversion
* if needed. This allows subclasses to easily use this method instead of
* having to reimplement the conversion all over.
*/
status_t getCurrentFrameImpl(const uint8_t* source, uint8_t* dest,
uint32_t pixelFormat) const;
/****************************************************************************
* Worker thread management.
* Typicaly when emulated camera device starts capturing frames from the
* actual device, it does that in a worker thread created in StartCapturing,
* and terminated in StopCapturing. Since this is such a typical scenario,
* it makes sence to encapsulate worker thread management in the base class
* for all emulated camera devices.
***************************************************************************/
protected:
/* Starts the worker thread.
* Typically, the worker thread is started from the startDeliveringFrames
* method of this class.
* Param:
* one_burst - Controls how many times thread loop should run. If this
* parameter is 'true', thread routine will run only once If this
* parameter is 'false', thread routine will run until
* stopWorkerThreads method is called. See startDeliveringFrames for
* more info.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t startWorkerThread(bool one_burst);
/* Stop the worker thread.
* Note that this method will always wait for the worker thread to
* terminate. Typically, the worker thread is stopped from the
* stopDeliveringFrames method of this class.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t stopWorkerThread();
/* Produce a camera frame and place it in buffer. The buffer is one of
* the two buffers provided to mFrameProducer during construction along with
* a pointer to this method. The method is expected to know what size frames
* it provided to the producer thread. Returning false indicates an
* unrecoverable error that will stop the frame production thread. */
virtual bool produceFrame(void* buffer) = 0;
/* Get the primary buffer to use when constructing the FrameProducer. */
virtual void* getPrimaryBuffer() {
return mFrameBuffers[0].data();
}
/* Get the seconary buffer to use when constructing the FrameProducer. */
virtual void* getSecondaryBuffer() {
return mFrameBuffers[1].data();
}
/* A class that encaspulates the asynchronous behavior of a camera. This
* includes asynchronous production (through another thread), frame delivery
* as well as asynchronous state changes that have to be synchronized with
* frame production and delivery but can't be blocking the camera HAL. */
class CameraThread : public WorkerThread {
public:
typedef bool (*ProduceFrameFunc)(void* opaque, void* destinationBuffer);
CameraThread(EmulatedCameraDevice* cameraDevice,
ProduceFrameFunc producer,
void* producerOpaque);
/* Access the primary buffer of the frame producer, this is the frame
* that is currently not being written to. The buffer will only have
* valid contents if hasFrame() returns true. Note that accessing this
* without first having created a Lock can lead to contents changing
* without notice. */
const void* getPrimaryBuffer() const;
/* Lock and unlock the primary buffer */
void lockPrimaryBuffer();
void unlockPrimaryBuffer();
void requestRestart(int width, int height, uint32_t pixelFormat,
bool takingPicture, bool oneBurst);
private:
bool checkRestartRequest();
bool waitForFrameOrTimeout(nsecs_t timeout);
bool inWorkerThread() override;
status_t onThreadStart() override;
void onThreadExit() override;
/* A class with a thread that will call a function at a specified
* interval to produce frames. This is done in a double-buffered fashion
* to make sure that one of the frames can be delivered without risk of
* overwriting its contents. Access to the primary buffer, the one NOT
* being drawn to, should be protected with the lock methods provided or
* the guarantee of not overwriting the contents does not hold.
*/
class FrameProducer : public WorkerThread {
public:
FrameProducer(EmulatedCameraDevice* cameraDevice,
ProduceFrameFunc producer, void* opaque,
void* primaryBuffer, void* secondaryBuffer);
/* Indicates if the producer has produced at least one frame. */
bool hasFrame() const;
const void* getPrimaryBuffer() const;
void lockPrimaryBuffer();
void unlockPrimaryBuffer();
protected:
bool inWorkerThread() override;
ProduceFrameFunc mProducer;
void* mOpaque;
void* mPrimaryBuffer;
void* mSecondaryBuffer;
nsecs_t mLastFrame;
mutable Mutex mBufferMutex;
std::atomic<bool> mHasFrame;
};
nsecs_t mCurFrameTimestamp;
/* Worker thread that will produce frames for the camera thread */
sp<FrameProducer> mFrameProducer;
ProduceFrameFunc mProducerFunc;
void* mProducerOpaque;
Mutex mRequestMutex;
int mRestartWidth;
int mRestartHeight;
uint32_t mRestartPixelFormat;
bool mRestartOneBurst;
bool mRestartTakingPicture;
bool mRestartRequested;
};
/****************************************************************************
* Data members
***************************************************************************/
protected:
/* Locks this instance for parameters, state, etc. change. */
Mutex mObjectLock;
/* A camera thread that is used in frame production, delivery and handling
* of asynchronous restarts. Internally the process of generating and
* delivering frames is split up into two threads. This way frames can
* always be delivered on time even if they cannot be produced fast enough
* to keep up with the expected frame rate. It also increases performance on
* multi-core systems. If the producer cannot keep up the last frame will
* simply be delivered again. */
sp<CameraThread> mCameraThread;
/* Emulated camera object containing this instance. */
EmulatedCamera* mCameraHAL;
/* Framebuffers containing the frame being drawn to and the frame being
* delivered. This is used by the double buffering producer thread and
* the consumer thread will copy frames from one of these buffers to
* mCurrentFrame to avoid being stalled by frame production. */
std::vector<uint8_t> mFrameBuffers[2];
/*
* Framebuffer properties.
*/
/* Byte size of the framebuffer. */
size_t mFrameBufferSize;
/* Original pixel format (one of the V4L2_PIX_FMT_XXX values, as defined in
* bionic/libc/kernel/common/linux/videodev2.h */
uint32_t mPixelFormat;
/* Frame width */
int mFrameWidth;
/* Frame height */
int mFrameHeight;
/* The number of frames per second that the camera should deliver */
int mFramesPerSecond;
/* Defines byte distance between the start of each Y row */
int mYStride;
/* Defines byte distance between the start of each U/V row. For formats with
* separate U and V planes this is the distance between rows in each plane.
* For formats with interleaved U and V components this is the distance
* between rows in the interleaved plane, meaning that it's the stride over
* the combined U and V components. */
int mUVStride;
/* Total number of pixels */
int mTotalPixels;
/* Exposure compensation value */
float mExposureCompensation;
float* mWhiteBalanceScale;
DefaultKeyedVector<String8, float*> mSupportedWhiteBalanceScale;
/* Defines possible states of the emulated camera device object.
*/
enum EmulatedCameraDeviceState {
/* Object has been constructed. */
ECDS_CONSTRUCTED,
/* Object has been initialized. */
ECDS_INITIALIZED,
/* Object has been connected to the physical device. */
ECDS_CONNECTED,
/* Camera device has been started. */
ECDS_STARTED,
};
/* Object state. */
EmulatedCameraDeviceState mState;
private:
/* Lock the current frame so that it can safely be accessed using
* getCurrentFrame. Prefer using a FrameLock object on the stack instead
* to ensure that the lock is always unlocked properly.
*/
void lockCurrentFrame();
/* Unlock the current frame after locking it. Prefer using a FrameLock
* object instead.
*/
void unlockCurrentFrame();
static bool staticProduceFrame(void* opaque, void* buffer) {
auto cameraDevice = reinterpret_cast<EmulatedCameraDevice*>(opaque);
return cameraDevice->produceFrame(buffer);
}
/* A flag indicating if an auto-focus completion event should be sent the
* next time the worker thread runs. This implies that auto-focus completion
* event can only be delivered while preview frames are being delivered.
* This is also a requirement specified in the documentation where a request
* to perform auto-focusing is only valid between calls to startPreview and
* stopPreview.
* https://developer.android.com/reference/android/hardware/Camera.html#autoFocus(android.hardware.Camera.AutoFocusCallback)
*/
std::atomic<bool> mTriggerAutoFocus;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H */

View file

@ -0,0 +1,564 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedCameraFactory that manages cameras
* available for emulation.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Factory"
#include <cutils/log.h>
#include <cutils/properties.h>
#include "EmulatedQemuCamera.h"
#include "EmulatedFakeCamera.h"
#include "EmulatedFakeCamera2.h"
#include "EmulatedFakeCamera3.h"
#include "EmulatedCameraHotplugThread.h"
#include "EmulatedCameraFactory.h"
extern camera_module_t HAL_MODULE_INFO_SYM;
/* A global instance of EmulatedCameraFactory is statically instantiated and
* initialized when camera emulation HAL is loaded.
*/
android::EmulatedCameraFactory gEmulatedCameraFactory;
namespace android {
EmulatedCameraFactory::EmulatedCameraFactory()
: mQemuClient(),
mEmulatedCameras(NULL),
mEmulatedCameraNum(0),
mFakeCameraNum(0),
mConstructedOK(false),
mCallbacks(NULL)
{
status_t res;
/* Connect to the factory service in the emulator, and create Qemu cameras. */
if (mQemuClient.connectClient(NULL) == NO_ERROR) {
/* Connection has succeeded. Create emulated cameras for each camera
* device, reported by the service. */
createQemuCameras();
}
waitForQemuSfFakeCameraPropertyAvailable();
if (isBackFakeCameraEmulationOn()) {
/* Camera ID. */
const int camera_id = mEmulatedCameraNum;
/* Use fake camera to emulate back-facing camera. */
mEmulatedCameraNum++;
/* Make sure that array is allocated (in case there were no 'qemu'
* cameras created. Note that we preallocate the array so it may contain
* two fake cameras: one facing back, and another facing front. */
if (mEmulatedCameras == NULL) {
mEmulatedCameras = new EmulatedBaseCamera*[mEmulatedCameraNum + 1];
if (mEmulatedCameras == NULL) {
ALOGE("%s: Unable to allocate emulated camera array for %d entries",
__FUNCTION__, mEmulatedCameraNum);
return;
}
memset(mEmulatedCameras, 0,
(mEmulatedCameraNum + 1) * sizeof(EmulatedBaseCamera*));
}
/* Create, and initialize the fake camera */
switch (getBackCameraHalVersion()) {
case 1:
mEmulatedCameras[camera_id] =
new EmulatedFakeCamera(camera_id, true,
&HAL_MODULE_INFO_SYM.common);
break;
case 2:
mEmulatedCameras[camera_id] =
new EmulatedFakeCamera2(camera_id, true,
&HAL_MODULE_INFO_SYM.common);
break;
case 3:
mEmulatedCameras[camera_id] =
new EmulatedFakeCamera3(camera_id, true,
&HAL_MODULE_INFO_SYM.common);
break;
default:
ALOGE("%s: Unknown back camera hal version requested: %d", __FUNCTION__,
getBackCameraHalVersion());
}
if (mEmulatedCameras[camera_id] != NULL) {
ALOGV("%s: Back camera device version is %d", __FUNCTION__,
getBackCameraHalVersion());
res = mEmulatedCameras[camera_id]->Initialize();
if (res != NO_ERROR) {
ALOGE("%s: Unable to intialize back camera %d: %s (%d)",
__FUNCTION__, camera_id, strerror(-res), res);
delete mEmulatedCameras[camera_id];
mEmulatedCameraNum--;
}
} else {
mEmulatedCameraNum--;
ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
}
}
if (isFrontFakeCameraEmulationOn()) {
/* Camera ID. */
const int camera_id = mEmulatedCameraNum;
/* Use fake camera to emulate front-facing camera. */
mEmulatedCameraNum++;
/* Make sure that array is allocated (in case there were no 'qemu'
* cameras created. */
if (mEmulatedCameras == NULL) {
mEmulatedCameras = new EmulatedBaseCamera*[mEmulatedCameraNum];
if (mEmulatedCameras == NULL) {
ALOGE("%s: Unable to allocate emulated camera array for %d entries",
__FUNCTION__, mEmulatedCameraNum);
return;
}
memset(mEmulatedCameras, 0,
mEmulatedCameraNum * sizeof(EmulatedBaseCamera*));
}
/* Create, and initialize the fake camera */
switch (getFrontCameraHalVersion()) {
case 1:
mEmulatedCameras[camera_id] =
new EmulatedFakeCamera(camera_id, false,
&HAL_MODULE_INFO_SYM.common);
break;
case 2:
mEmulatedCameras[camera_id] =
new EmulatedFakeCamera2(camera_id, false,
&HAL_MODULE_INFO_SYM.common);
break;
case 3:
mEmulatedCameras[camera_id] =
new EmulatedFakeCamera3(camera_id, false,
&HAL_MODULE_INFO_SYM.common);
break;
default:
ALOGE("%s: Unknown front camera hal version requested: %d",
__FUNCTION__,
getFrontCameraHalVersion());
}
if (mEmulatedCameras[camera_id] != NULL) {
ALOGV("%s: Front camera device version is %d", __FUNCTION__,
getFrontCameraHalVersion());
res = mEmulatedCameras[camera_id]->Initialize();
if (res != NO_ERROR) {
ALOGE("%s: Unable to intialize front camera %d: %s (%d)",
__FUNCTION__, camera_id, strerror(-res), res);
delete mEmulatedCameras[camera_id];
mEmulatedCameraNum--;
}
} else {
mEmulatedCameraNum--;
ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
}
}
ALOGE("%d cameras are being emulated. %d of them are fake cameras.",
mEmulatedCameraNum, mFakeCameraNum);
/* Create hotplug thread */
{
Vector<int> cameraIdVector;
for (int i = 0; i < mEmulatedCameraNum; ++i) {
cameraIdVector.push_back(i);
}
mHotplugThread = new EmulatedCameraHotplugThread(&cameraIdVector[0],
mEmulatedCameraNum);
mHotplugThread->run("EmulatedCameraHotplugThread");
}
mConstructedOK = true;
}
EmulatedCameraFactory::~EmulatedCameraFactory()
{
if (mEmulatedCameras != NULL) {
for (int n = 0; n < mEmulatedCameraNum; n++) {
if (mEmulatedCameras[n] != NULL) {
delete mEmulatedCameras[n];
}
}
delete[] mEmulatedCameras;
}
if (mHotplugThread != NULL) {
mHotplugThread->requestExit();
mHotplugThread->join();
}
}
/****************************************************************************
* Camera HAL API handlers.
*
* Each handler simply verifies existence of an appropriate EmulatedBaseCamera
* instance, and dispatches the call to that instance.
*
***************************************************************************/
int EmulatedCameraFactory::cameraDeviceOpen(int camera_id, hw_device_t** device)
{
ALOGV("%s: id = %d", __FUNCTION__, camera_id);
*device = NULL;
if (!isConstructedOK()) {
ALOGE("%s: EmulatedCameraFactory has failed to initialize", __FUNCTION__);
return -EINVAL;
}
if (camera_id < 0 || camera_id >= getEmulatedCameraNum()) {
ALOGE("%s: Camera id %d is out of bounds (%d)",
__FUNCTION__, camera_id, getEmulatedCameraNum());
return -ENODEV;
}
return mEmulatedCameras[camera_id]->connectCamera(device);
}
int EmulatedCameraFactory::getCameraInfo(int camera_id, struct camera_info* info)
{
ALOGV("%s: id = %d", __FUNCTION__, camera_id);
if (!isConstructedOK()) {
ALOGE("%s: EmulatedCameraFactory has failed to initialize", __FUNCTION__);
return -EINVAL;
}
if (camera_id < 0 || camera_id >= getEmulatedCameraNum()) {
ALOGE("%s: Camera id %d is out of bounds (%d)",
__FUNCTION__, camera_id, getEmulatedCameraNum());
return -ENODEV;
}
return mEmulatedCameras[camera_id]->getCameraInfo(info);
}
int EmulatedCameraFactory::setCallbacks(
const camera_module_callbacks_t *callbacks)
{
ALOGV("%s: callbacks = %p", __FUNCTION__, callbacks);
mCallbacks = callbacks;
return OK;
}
void EmulatedCameraFactory::getVendorTagOps(vendor_tag_ops_t* ops) {
ALOGV("%s: ops = %p", __FUNCTION__, ops);
// No vendor tags defined for emulator yet, so not touching ops
}
/****************************************************************************
* Camera HAL API callbacks.
***************************************************************************/
int EmulatedCameraFactory::device_open(const hw_module_t* module,
const char* name,
hw_device_t** device)
{
/*
* Simply verify the parameters, and dispatch the call inside the
* EmulatedCameraFactory instance.
*/
if (module != &HAL_MODULE_INFO_SYM.common) {
ALOGE("%s: Invalid module %p expected %p",
__FUNCTION__, module, &HAL_MODULE_INFO_SYM.common);
return -EINVAL;
}
if (name == NULL) {
ALOGE("%s: NULL name is not expected here", __FUNCTION__);
return -EINVAL;
}
return gEmulatedCameraFactory.cameraDeviceOpen(atoi(name), device);
}
int EmulatedCameraFactory::get_number_of_cameras(void)
{
return gEmulatedCameraFactory.getEmulatedCameraNum();
}
int EmulatedCameraFactory::get_camera_info(int camera_id,
struct camera_info* info)
{
return gEmulatedCameraFactory.getCameraInfo(camera_id, info);
}
int EmulatedCameraFactory::set_callbacks(
const camera_module_callbacks_t *callbacks)
{
return gEmulatedCameraFactory.setCallbacks(callbacks);
}
void EmulatedCameraFactory::get_vendor_tag_ops(vendor_tag_ops_t* ops)
{
gEmulatedCameraFactory.getVendorTagOps(ops);
}
int EmulatedCameraFactory::open_legacy(const struct hw_module_t* module,
const char* id, uint32_t halVersion, struct hw_device_t** device) {
// Not supporting legacy open
return -ENOSYS;
}
/********************************************************************************
* Internal API
*******************************************************************************/
/*
* Camera information tokens passed in response to the "list" factory query.
*/
/* Device name token. */
static const char lListNameToken[] = "name=";
/* Frame dimensions token. */
static const char lListDimsToken[] = "framedims=";
/* Facing direction token. */
static const char lListDirToken[] = "dir=";
void EmulatedCameraFactory::createQemuCameras()
{
/* Obtain camera list. */
char* camera_list = NULL;
status_t res = mQemuClient.listCameras(&camera_list);
/* Empty list, or list containing just an EOL means that there were no
* connected cameras found. */
if (res != NO_ERROR || camera_list == NULL || *camera_list == '\0' ||
*camera_list == '\n') {
if (camera_list != NULL) {
free(camera_list);
}
return;
}
/*
* Calculate number of connected cameras. Number of EOLs in the camera list
* is the number of the connected cameras.
*/
int num = 0;
const char* eol = strchr(camera_list, '\n');
while (eol != NULL) {
num++;
eol = strchr(eol + 1, '\n');
}
/* Allocate the array for emulated camera instances. Note that we allocate
* two more entries for back and front fake camera emulation. */
mEmulatedCameras = new EmulatedBaseCamera*[num + 2];
if (mEmulatedCameras == NULL) {
ALOGE("%s: Unable to allocate emulated camera array for %d entries",
__FUNCTION__, num + 1);
free(camera_list);
return;
}
memset(mEmulatedCameras, 0, sizeof(EmulatedBaseCamera*) * (num + 1));
/*
* Iterate the list, creating, and initializin emulated qemu cameras for each
* entry (line) in the list.
*/
int index = 0;
char* cur_entry = camera_list;
while (cur_entry != NULL && *cur_entry != '\0' && index < num) {
/* Find the end of the current camera entry, and terminate it with zero
* for simpler string manipulation. */
char* next_entry = strchr(cur_entry, '\n');
if (next_entry != NULL) {
*next_entry = '\0';
next_entry++; // Start of the next entry.
}
/* Find 'name', 'framedims', and 'dir' tokens that are required here. */
char* name_start = strstr(cur_entry, lListNameToken);
char* dim_start = strstr(cur_entry, lListDimsToken);
char* dir_start = strstr(cur_entry, lListDirToken);
if (name_start != NULL && dim_start != NULL && dir_start != NULL) {
/* Advance to the token values. */
name_start += strlen(lListNameToken);
dim_start += strlen(lListDimsToken);
dir_start += strlen(lListDirToken);
/* Terminate token values with zero. */
char* s = strchr(name_start, ' ');
if (s != NULL) {
*s = '\0';
}
s = strchr(dim_start, ' ');
if (s != NULL) {
*s = '\0';
}
s = strchr(dir_start, ' ');
if (s != NULL) {
*s = '\0';
}
/* Create and initialize qemu camera. */
EmulatedQemuCamera* qemu_cam =
new EmulatedQemuCamera(index, &HAL_MODULE_INFO_SYM.common);
if (NULL != qemu_cam) {
res = qemu_cam->Initialize(name_start, dim_start, dir_start);
if (res == NO_ERROR) {
mEmulatedCameras[index] = qemu_cam;
index++;
} else {
delete qemu_cam;
}
} else {
ALOGE("%s: Unable to instantiate EmulatedQemuCamera",
__FUNCTION__);
}
} else {
ALOGW("%s: Bad camera information: %s", __FUNCTION__, cur_entry);
}
cur_entry = next_entry;
}
mEmulatedCameraNum = index;
}
void EmulatedCameraFactory::waitForQemuSfFakeCameraPropertyAvailable() {
// Camera service may start running before qemu-props sets qemu.sf.fake_camera to
// any of the follwing four values: "none,front,back,both"; so we need to wait.
// android/camera/camera-service.c
// bug: 30768229
int numAttempts = 100;
char prop[PROPERTY_VALUE_MAX];
bool timeout = true;
for (int i = 0; i < numAttempts; ++i) {
if (property_get("qemu.sf.fake_camera", prop, NULL) != 0 ) {
timeout = false;
break;
}
usleep(5000);
}
if (timeout) {
ALOGE("timeout (%dms) waiting for property qemu.sf.fake_camera to be set\n", 5 * numAttempts);
}
}
bool EmulatedCameraFactory::isBackFakeCameraEmulationOn()
{
/* Defined by 'qemu.sf.fake_camera' boot property: if property exist, and
* is set to 'both', or 'back', then fake camera is used to emulate back
* camera. */
char prop[PROPERTY_VALUE_MAX];
if ((property_get("qemu.sf.fake_camera", prop, NULL) > 0) &&
(!strcmp(prop, "both") || !strcmp(prop, "back"))) {
return true;
} else {
return false;
}
}
int EmulatedCameraFactory::getBackCameraHalVersion()
{
/* Defined by 'qemu.sf.back_camera_hal_version' boot property: if the
* property doesn't exist, it is assumed to be 1. */
char prop[PROPERTY_VALUE_MAX];
if (property_get("qemu.sf.back_camera_hal", prop, NULL) > 0) {
char *prop_end = prop;
int val = strtol(prop, &prop_end, 10);
if (*prop_end == '\0') {
return val;
}
// Badly formatted property, should just be a number
ALOGE("qemu.sf.back_camera_hal is not a number: %s", prop);
}
return 1;
}
bool EmulatedCameraFactory::isFrontFakeCameraEmulationOn()
{
/* Defined by 'qemu.sf.fake_camera' boot property: if property exist, and
* is set to 'both', or 'front', then fake camera is used to emulate front
* camera. */
char prop[PROPERTY_VALUE_MAX];
if ((property_get("qemu.sf.fake_camera", prop, NULL) > 0) &&
(!strcmp(prop, "both") || !strcmp(prop, "front"))) {
return true;
} else {
return false;
}
}
int EmulatedCameraFactory::getFrontCameraHalVersion()
{
/* Defined by 'qemu.sf.front_camera_hal_version' boot property: if the
* property doesn't exist, it is assumed to be 1. */
char prop[PROPERTY_VALUE_MAX];
if (property_get("qemu.sf.front_camera_hal", prop, NULL) > 0) {
char *prop_end = prop;
int val = strtol(prop, &prop_end, 10);
if (*prop_end == '\0') {
return val;
}
// Badly formatted property, should just be a number
ALOGE("qemu.sf.front_camera_hal is not a number: %s", prop);
}
return 1;
}
void EmulatedCameraFactory::onStatusChanged(int cameraId, int newStatus) {
EmulatedBaseCamera *cam = mEmulatedCameras[cameraId];
if (!cam) {
ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
return;
}
/**
* (Order is important)
* Send the callback first to framework, THEN close the camera.
*/
if (newStatus == cam->getHotplugStatus()) {
ALOGW("%s: Ignoring transition to the same status", __FUNCTION__);
return;
}
const camera_module_callbacks_t* cb = mCallbacks;
if (cb != NULL && cb->camera_device_status_change != NULL) {
cb->camera_device_status_change(cb, cameraId, newStatus);
}
if (newStatus == CAMERA_DEVICE_STATUS_NOT_PRESENT) {
cam->unplugCamera();
} else if (newStatus == CAMERA_DEVICE_STATUS_PRESENT) {
cam->plugCamera();
}
}
/********************************************************************************
* Initializer for the static member structure.
*******************************************************************************/
/* Entry point for camera HAL API. */
struct hw_module_methods_t EmulatedCameraFactory::mCameraModuleMethods = {
open: EmulatedCameraFactory::device_open
};
}; /* namespace android */

View file

@ -0,0 +1,210 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_FACTORY_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_FACTORY_H
#include <utils/RefBase.h>
#include "EmulatedBaseCamera.h"
#include "QemuClient.h"
namespace android {
struct EmulatedCameraHotplugThread;
/*
* Contains declaration of a class EmulatedCameraFactory that manages cameras
* available for the emulation. A global instance of this class is statically
* instantiated and initialized when camera emulation HAL is loaded.
*/
/* Class EmulatedCameraFactoryManages cameras available for the emulation.
*
* When the global static instance of this class is created on the module load,
* it enumerates cameras available for the emulation by connecting to the
* emulator's 'camera' service. For every camera found out there it creates an
* instance of an appropriate class, and stores it an in array of emulated
* cameras. In addition to the cameras reported by the emulator, a fake camera
* emulator is always created, so there is always at least one camera that is
* available.
*
* Instance of this class is also used as the entry point for the camera HAL API,
* including:
* - hw_module_methods_t::open entry point
* - camera_module_t::get_number_of_cameras entry point
* - camera_module_t::get_camera_info entry point
*
*/
class EmulatedCameraFactory {
public:
/* Constructs EmulatedCameraFactory instance.
* In this constructor the factory will create and initialize a list of
* emulated cameras. All errors that occur on this constructor are reported
* via mConstructedOK data member of this class.
*/
EmulatedCameraFactory();
/* Destructs EmulatedCameraFactory instance. */
~EmulatedCameraFactory();
/****************************************************************************
* Camera HAL API handlers.
***************************************************************************/
public:
/* Opens (connects to) a camera device.
* This method is called in response to hw_module_methods_t::open callback.
*/
int cameraDeviceOpen(int camera_id, hw_device_t** device);
/* Gets emulated camera information.
* This method is called in response to camera_module_t::get_camera_info callback.
*/
int getCameraInfo(int camera_id, struct camera_info *info);
/* Sets emulated camera callbacks.
* This method is called in response to camera_module_t::set_callbacks callback.
*/
int setCallbacks(const camera_module_callbacks_t *callbacks);
/* Fill in vendor tags for the module
* This method is called in response to camera_module_t::get_vendor_tag_ops callback.
*/
void getVendorTagOps(vendor_tag_ops_t* ops);
/****************************************************************************
* Camera HAL API callbacks.
***************************************************************************/
public:
/* camera_module_t::get_number_of_cameras callback entry point. */
static int get_number_of_cameras(void);
/* camera_module_t::get_camera_info callback entry point. */
static int get_camera_info(int camera_id, struct camera_info *info);
/* camera_module_t::set_callbacks callback entry point. */
static int set_callbacks(const camera_module_callbacks_t *callbacks);
/* camera_module_t::get_vendor_tag_ops callback entry point */
static void get_vendor_tag_ops(vendor_tag_ops_t* ops);
/* camera_module_t::open_legacy callback entry point */
static int open_legacy(const struct hw_module_t* module, const char* id,
uint32_t halVersion, struct hw_device_t** device);
private:
/* hw_module_methods_t::open callback entry point. */
static int device_open(const hw_module_t* module,
const char* name,
hw_device_t** device);
/****************************************************************************
* Public API.
***************************************************************************/
public:
/* Gets fake camera orientation. */
int getFakeCameraOrientation() {
/* TODO: Have a boot property that controls that. */
return 0;
}
/* Gets qemu camera orientation. */
int getQemuCameraOrientation() {
/* TODO: Have a boot property that controls that. */
return 0;
}
/* Gets number of emulated cameras.
*/
int getEmulatedCameraNum() const {
return mEmulatedCameraNum;
}
/* Checks whether or not the constructor has succeeded.
*/
bool isConstructedOK() const {
return mConstructedOK;
}
void onStatusChanged(int cameraId, int newStatus);
/****************************************************************************
* Private API
***************************************************************************/
private:
/* Populates emulated cameras array with cameras that are available via
* 'camera' service in the emulator. For each such camera and instance of
* the EmulatedCameraQemud will be created and added to the mEmulatedCameras
* array.
*/
void createQemuCameras();
/* Waits till qemu-props has done setup, timeout after 500ms */
void waitForQemuSfFakeCameraPropertyAvailable();
/* Checks if fake camera emulation is on for the camera facing back. */
bool isBackFakeCameraEmulationOn();
/* Gets camera device version number to use for back camera emulation */
int getBackCameraHalVersion();
/* Checks if fake camera emulation is on for the camera facing front. */
bool isFrontFakeCameraEmulationOn();
/* Gets camera device version number to use for front camera emulation */
int getFrontCameraHalVersion();
/****************************************************************************
* Data members.
***************************************************************************/
private:
/* Connection to the camera service in the emulator. */
FactoryQemuClient mQemuClient;
/* Array of cameras available for the emulation. */
EmulatedBaseCamera** mEmulatedCameras;
/* Number of emulated cameras (including the fake ones). */
int mEmulatedCameraNum;
/* Number of emulated fake cameras. */
int mFakeCameraNum;
/* Flags whether or not constructor has succeeded. */
bool mConstructedOK;
/* Camera callbacks (for status changing) */
const camera_module_callbacks_t* mCallbacks;
/* Hotplug thread (to call onStatusChanged) */
sp<EmulatedCameraHotplugThread> mHotplugThread;
public:
/* Contains device open entry point, as required by HAL API. */
static struct hw_module_methods_t mCameraModuleMethods;
};
}; /* namespace android */
/* References the global EmulatedCameraFactory instance. */
extern android::EmulatedCameraFactory gEmulatedCameraFactory;
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_FACTORY_H */

View file

@ -0,0 +1,48 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of the camera HAL layer in the system running
* under the emulator.
*
* This file contains only required HAL header, which directs all the API calls
* to the EmulatedCameraFactory class implementation, wich is responsible for
* managing emulated cameras.
*/
#include "EmulatedCameraFactory.h"
/*
* Required HAL header.
*/
camera_module_t HAL_MODULE_INFO_SYM = {
common: {
tag: HARDWARE_MODULE_TAG,
module_api_version: CAMERA_MODULE_API_VERSION_2_3,
hal_api_version: HARDWARE_HAL_API_VERSION,
id: CAMERA_HARDWARE_MODULE_ID,
name: "Emulated Camera Module",
author: "The Android Open Source Project",
methods: &android::EmulatedCameraFactory::mCameraModuleMethods,
dso: NULL,
reserved: {0},
},
get_number_of_cameras: android::EmulatedCameraFactory::get_number_of_cameras,
get_camera_info: android::EmulatedCameraFactory::get_camera_info,
set_callbacks: android::EmulatedCameraFactory::set_callbacks,
get_vendor_tag_ops: android::EmulatedCameraFactory::get_vendor_tag_ops,
open_legacy: android::EmulatedCameraFactory::open_legacy
};

View file

@ -0,0 +1,372 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_HotplugThread"
#include <cutils/log.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/inotify.h>
#include "EmulatedCameraHotplugThread.h"
#include "EmulatedCameraFactory.h"
#define FAKE_HOTPLUG_FILE "/data/misc/media/emulator.camera.hotplug"
#define EVENT_SIZE (sizeof(struct inotify_event))
#define EVENT_BUF_LEN (1024*(EVENT_SIZE+16))
#define SubscriberInfo EmulatedCameraHotplugThread::SubscriberInfo
namespace android {
EmulatedCameraHotplugThread::EmulatedCameraHotplugThread(
const int* cameraIdArray,
size_t size) :
Thread(/*canCallJava*/false) {
mRunning = true;
mInotifyFd = 0;
for (size_t i = 0; i < size; ++i) {
int id = cameraIdArray[i];
if (createFileIfNotExists(id)) {
mSubscribedCameraIds.push_back(id);
}
}
}
EmulatedCameraHotplugThread::~EmulatedCameraHotplugThread() {
}
status_t EmulatedCameraHotplugThread::requestExitAndWait() {
ALOGE("%s: Not implemented. Use requestExit + join instead",
__FUNCTION__);
return INVALID_OPERATION;
}
void EmulatedCameraHotplugThread::requestExit() {
Mutex::Autolock al(mMutex);
ALOGV("%s: Requesting thread exit", __FUNCTION__);
mRunning = false;
bool rmWatchFailed = false;
Vector<SubscriberInfo>::iterator it;
for (it = mSubscribers.begin(); it != mSubscribers.end(); ++it) {
if (inotify_rm_watch(mInotifyFd, it->WatchID) == -1) {
ALOGE("%s: Could not remove watch for camID '%d',"
" error: '%s' (%d)",
__FUNCTION__, it->CameraID, strerror(errno),
errno);
rmWatchFailed = true ;
} else {
ALOGV("%s: Removed watch for camID '%d'",
__FUNCTION__, it->CameraID);
}
}
if (rmWatchFailed) { // unlikely
// Give the thread a fighting chance to error out on the next
// read
if (close(mInotifyFd) == -1) {
ALOGE("%s: close failure error: '%s' (%d)",
__FUNCTION__, strerror(errno), errno);
}
}
ALOGV("%s: Request exit complete.", __FUNCTION__);
}
status_t EmulatedCameraHotplugThread::readyToRun() {
Mutex::Autolock al(mMutex);
mInotifyFd = -1;
do {
ALOGV("%s: Initializing inotify", __FUNCTION__);
mInotifyFd = inotify_init();
if (mInotifyFd == -1) {
ALOGE("%s: inotify_init failure error: '%s' (%d)",
__FUNCTION__, strerror(errno), errno);
mRunning = false;
break;
}
/**
* For each fake camera file, add a watch for when
* the file is closed (if it was written to)
*/
Vector<int>::const_iterator it, end;
it = mSubscribedCameraIds.begin();
end = mSubscribedCameraIds.end();
for (; it != end; ++it) {
int cameraId = *it;
if (!addWatch(cameraId)) {
mRunning = false;
break;
}
}
} while(false);
if (!mRunning) {
status_t err = -errno;
if (mInotifyFd != -1) {
close(mInotifyFd);
}
return err;
}
return OK;
}
bool EmulatedCameraHotplugThread::threadLoop() {
// If requestExit was already called, mRunning will be false
while (mRunning) {
char buffer[EVENT_BUF_LEN];
int length = TEMP_FAILURE_RETRY(
read(mInotifyFd, buffer, EVENT_BUF_LEN));
if (length < 0) {
ALOGE("%s: Error reading from inotify FD, error: '%s' (%d)",
__FUNCTION__, strerror(errno),
errno);
mRunning = false;
break;
}
ALOGV("%s: Read %d bytes from inotify FD", __FUNCTION__, length);
int i = 0;
while (i < length) {
inotify_event* event = (inotify_event*) &buffer[i];
if (event->mask & IN_IGNORED) {
Mutex::Autolock al(mMutex);
if (!mRunning) {
ALOGV("%s: Shutting down thread", __FUNCTION__);
break;
} else {
ALOGE("%s: File was deleted, aborting",
__FUNCTION__);
mRunning = false;
break;
}
} else if (event->mask & IN_CLOSE_WRITE) {
int cameraId = getCameraId(event->wd);
if (cameraId < 0) {
ALOGE("%s: Got bad camera ID from WD '%d",
__FUNCTION__, event->wd);
} else {
// Check the file for the new hotplug event
String8 filePath = getFilePath(cameraId);
/**
* NOTE: we carefully avoid getting an inotify
* for the same exact file because it's opened for
* read-only, but our inotify is for write-only
*/
int newStatus = readFile(filePath);
if (newStatus < 0) {
mRunning = false;
break;
}
int halStatus = newStatus ?
CAMERA_DEVICE_STATUS_PRESENT :
CAMERA_DEVICE_STATUS_NOT_PRESENT;
gEmulatedCameraFactory.onStatusChanged(cameraId,
halStatus);
}
} else {
ALOGW("%s: Unknown mask 0x%x",
__FUNCTION__, event->mask);
}
i += EVENT_SIZE + event->len;
}
}
if (!mRunning) {
close(mInotifyFd);
return false;
}
return true;
}
String8 EmulatedCameraHotplugThread::getFilePath(int cameraId) const {
return String8::format(FAKE_HOTPLUG_FILE ".%d", cameraId);
}
bool EmulatedCameraHotplugThread::createFileIfNotExists(int cameraId) const
{
String8 filePath = getFilePath(cameraId);
// make sure this file exists and we have access to it
int fd = TEMP_FAILURE_RETRY(
open(filePath.string(), O_WRONLY | O_CREAT | O_TRUNC,
/* mode = ug+rwx */ S_IRWXU | S_IRWXG ));
if (fd == -1) {
ALOGE("%s: Could not create file '%s', error: '%s' (%d)",
__FUNCTION__, filePath.string(), strerror(errno), errno);
return false;
}
// File has '1' by default since we are plugged in by default
if (TEMP_FAILURE_RETRY(write(fd, "1\n", /*count*/2)) == -1) {
ALOGE("%s: Could not write '1' to file '%s', error: '%s' (%d)",
__FUNCTION__, filePath.string(), strerror(errno), errno);
return false;
}
close(fd);
return true;
}
int EmulatedCameraHotplugThread::getCameraId(const String8& filePath) const {
Vector<int>::const_iterator it, end;
it = mSubscribedCameraIds.begin();
end = mSubscribedCameraIds.end();
for (; it != end; ++it) {
String8 camPath = getFilePath(*it);
if (camPath == filePath) {
return *it;
}
}
return NAME_NOT_FOUND;
}
int EmulatedCameraHotplugThread::getCameraId(int wd) const {
for (size_t i = 0; i < mSubscribers.size(); ++i) {
if (mSubscribers[i].WatchID == wd) {
return mSubscribers[i].CameraID;
}
}
return NAME_NOT_FOUND;
}
SubscriberInfo* EmulatedCameraHotplugThread::getSubscriberInfo(int cameraId)
{
for (size_t i = 0; i < mSubscribers.size(); ++i) {
if (mSubscribers[i].CameraID == cameraId) {
return (SubscriberInfo*)&mSubscribers[i];
}
}
return NULL;
}
bool EmulatedCameraHotplugThread::addWatch(int cameraId) {
String8 camPath = getFilePath(cameraId);
int wd = inotify_add_watch(mInotifyFd,
camPath.string(),
IN_CLOSE_WRITE);
if (wd == -1) {
ALOGE("%s: Could not add watch for '%s', error: '%s' (%d)",
__FUNCTION__, camPath.string(), strerror(errno),
errno);
mRunning = false;
return false;
}
ALOGV("%s: Watch added for camID='%d', wd='%d'",
__FUNCTION__, cameraId, wd);
SubscriberInfo si = { cameraId, wd };
mSubscribers.push_back(si);
return true;
}
bool EmulatedCameraHotplugThread::removeWatch(int cameraId) {
SubscriberInfo* si = getSubscriberInfo(cameraId);
if (!si) return false;
if (inotify_rm_watch(mInotifyFd, si->WatchID) == -1) {
ALOGE("%s: Could not remove watch for camID '%d', error: '%s' (%d)",
__FUNCTION__, cameraId, strerror(errno),
errno);
return false;
}
Vector<SubscriberInfo>::iterator it;
for (it = mSubscribers.begin(); it != mSubscribers.end(); ++it) {
if (it->CameraID == cameraId) {
break;
}
}
if (it != mSubscribers.end()) {
mSubscribers.erase(it);
}
return true;
}
int EmulatedCameraHotplugThread::readFile(const String8& filePath) const {
int fd = TEMP_FAILURE_RETRY(
open(filePath.string(), O_RDONLY, /*mode*/0));
if (fd == -1) {
ALOGE("%s: Could not open file '%s', error: '%s' (%d)",
__FUNCTION__, filePath.string(), strerror(errno), errno);
return -1;
}
char buffer[1];
int length;
length = TEMP_FAILURE_RETRY(
read(fd, buffer, sizeof(buffer)));
int retval;
ALOGV("%s: Read file '%s', length='%d', buffer='%c'",
__FUNCTION__, filePath.string(), length, buffer[0]);
if (length == 0) { // EOF
retval = 0; // empty file is the same thing as 0
} else if (buffer[0] == '0') {
retval = 0;
} else { // anything non-empty that's not beginning with '0'
retval = 1;
}
close(fd);
return retval;
}
} //namespace android

View file

@ -0,0 +1,77 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_HOTPLUG_H
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_HOTPLUG_H
/**
* This class emulates hotplug events by inotifying on a file, specific
* to a camera ID. When the file changes between 1/0 the hotplug
* status goes between PRESENT and NOT_PRESENT.
*
* Refer to FAKE_HOTPLUG_FILE in EmulatedCameraHotplugThread.cpp
*/
#include "EmulatedCamera2.h"
#include <utils/String8.h>
#include <utils/Vector.h>
namespace android {
class EmulatedCameraHotplugThread : public Thread {
public:
EmulatedCameraHotplugThread(const int* cameraIdArray, size_t size);
~EmulatedCameraHotplugThread();
virtual void requestExit();
virtual status_t requestExitAndWait();
private:
virtual status_t readyToRun();
virtual bool threadLoop();
struct SubscriberInfo {
int CameraID;
int WatchID;
};
bool addWatch(int cameraId);
bool removeWatch(int cameraId);
SubscriberInfo* getSubscriberInfo(int cameraId);
int getCameraId(const String8& filePath) const;
int getCameraId(int wd) const;
String8 getFilePath(int cameraId) const;
int readFile(const String8& filePath) const;
bool createFileIfNotExists(int cameraId) const;
int mInotifyFd;
Vector<int> mSubscribedCameraIds;
Vector<SubscriberInfo> mSubscribers;
// variables above are unguarded:
// -- accessed in thread loop or in constructor only
Mutex mMutex;
bool mRunning; // guarding only when it's important
};
} // namespace android
#endif

View file

@ -0,0 +1,97 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedFakeCamera that encapsulates
* functionality of a fake camera.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_FakeCamera"
#include <cutils/log.h>
#include <cutils/properties.h>
#include "EmulatedFakeCamera.h"
#include "EmulatedCameraFactory.h"
namespace android {
EmulatedFakeCamera::EmulatedFakeCamera(int cameraId,
bool facingBack,
struct hw_module_t* module)
: EmulatedCamera(cameraId, module),
mFacingBack(facingBack),
mFakeCameraDevice(this)
{
}
EmulatedFakeCamera::~EmulatedFakeCamera()
{
}
/****************************************************************************
* Public API overrides
***************************************************************************/
status_t EmulatedFakeCamera::Initialize()
{
status_t res = mFakeCameraDevice.Initialize();
if (res != NO_ERROR) {
return res;
}
const char* facing = mFacingBack ? EmulatedCamera::FACING_BACK :
EmulatedCamera::FACING_FRONT;
mParameters.set(EmulatedCamera::FACING_KEY, facing);
ALOGD("%s: Fake camera is facing %s", __FUNCTION__, facing);
mParameters.set(EmulatedCamera::ORIENTATION_KEY,
gEmulatedCameraFactory.getFakeCameraOrientation());
mParameters.set(CameraParameters::KEY_ROTATION,
gEmulatedCameraFactory.getFakeCameraOrientation());
res = EmulatedCamera::Initialize();
if (res != NO_ERROR) {
return res;
}
/*
* Parameters provided by the camera device.
*/
/* 352x288, 320x240 and 176x144 frame dimensions are required by
* the framework for video mode preview and video recording. */
mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
"640x480,352x288,320x240");
mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
"640x480,352x288,320x240,176x144");
mParameters.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
"640x480,352x288,320x240,176x144");
mParameters.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO,
"640x480");
mParameters.setPreviewSize(640, 480);
mParameters.setPictureSize(640, 480);
return NO_ERROR;
}
EmulatedCameraDevice* EmulatedFakeCamera::getCameraDevice()
{
return &mFakeCameraDevice;
}
}; /* namespace android */

View file

@ -0,0 +1,74 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_H
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_H
/*
* Contains declaration of a class EmulatedFakeCamera that encapsulates
* functionality of a fake camera. This class is nothing more than a placeholder
* for EmulatedFakeCameraDevice instance.
*/
#include "EmulatedCamera.h"
#include "EmulatedFakeCameraDevice.h"
namespace android {
/* Encapsulates functionality of a fake camera.
* This class is nothing more than a placeholder for EmulatedFakeCameraDevice
* instance that emulates a fake camera device.
*/
class EmulatedFakeCamera : public EmulatedCamera {
public:
/* Constructs EmulatedFakeCamera instance. */
EmulatedFakeCamera(int cameraId, bool facingBack, struct hw_module_t* module);
/* Destructs EmulatedFakeCamera instance. */
~EmulatedFakeCamera();
/****************************************************************************
* EmulatedCamera virtual overrides.
***************************************************************************/
public:
/* Initializes EmulatedFakeCamera instance. */
status_t Initialize();
/****************************************************************************
* EmulatedCamera abstract API implementation.
***************************************************************************/
protected:
/* Gets emulated camera device ised by this instance of the emulated camera.
*/
EmulatedCameraDevice* getCameraDevice();
/****************************************************************************
* Data memebers.
***************************************************************************/
protected:
/* Facing back (true) or front (false) switch. */
bool mFacingBack;
/* Contained fake camera device object. */
EmulatedFakeCameraDevice mFakeCameraDevice;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_H */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,431 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA2_H
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA2_H
/*
* Contains declaration of a class EmulatedFakeCamera2 that encapsulates
* functionality of a fake camera that implements version 2 of the camera device
* interface.
*/
#include "EmulatedCamera2.h"
#include "fake-pipeline2/Base.h"
#include "fake-pipeline2/Sensor.h"
#include "fake-pipeline2/JpegCompressor.h"
#include <utils/Condition.h>
#include <utils/KeyedVector.h>
#include <utils/String8.h>
#include <utils/String16.h>
namespace android {
/* Encapsulates functionality of an advanced fake camera. This camera contains
* a simple simulation of a scene, sensor, and image processing pipeline.
*/
class EmulatedFakeCamera2 : public EmulatedCamera2 {
public:
/* Constructs EmulatedFakeCamera instance. */
EmulatedFakeCamera2(int cameraId, bool facingBack, struct hw_module_t* module);
/* Destructs EmulatedFakeCamera instance. */
~EmulatedFakeCamera2();
/****************************************************************************
* EmulatedCamera2 virtual overrides.
***************************************************************************/
public:
/* Initializes EmulatedFakeCamera2 instance. */
status_t Initialize();
/****************************************************************************
* Camera Module API and generic hardware device API implementation
***************************************************************************/
public:
virtual status_t connectCamera(hw_device_t** device);
virtual status_t plugCamera();
virtual status_t unplugCamera();
virtual camera_device_status_t getHotplugStatus();
virtual status_t closeCamera();
virtual status_t getCameraInfo(struct camera_info *info);
/****************************************************************************
* EmulatedCamera2 abstract API implementation.
***************************************************************************/
protected:
/** Request input queue */
virtual int requestQueueNotify();
/** Count of requests in flight */
virtual int getInProgressCount();
/** Cancel all captures in flight */
//virtual int flushCapturesInProgress();
/** Construct default request */
virtual int constructDefaultRequest(
int request_template,
camera_metadata_t **request);
virtual int allocateStream(
uint32_t width,
uint32_t height,
int format,
const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id,
uint32_t *format_actual,
uint32_t *usage,
uint32_t *max_buffers);
virtual int registerStreamBuffers(
uint32_t stream_id,
int num_buffers,
buffer_handle_t *buffers);
virtual int releaseStream(uint32_t stream_id);
// virtual int allocateReprocessStream(
// uint32_t width,
// uint32_t height,
// uint32_t format,
// const camera2_stream_ops_t *stream_ops,
// uint32_t *stream_id,
// uint32_t *format_actual,
// uint32_t *usage,
// uint32_t *max_buffers);
virtual int allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *stream_ops,
uint32_t *stream_id);
virtual int releaseReprocessStream(uint32_t stream_id);
virtual int triggerAction(uint32_t trigger_id,
int32_t ext1,
int32_t ext2);
/** Debug methods */
virtual int dump(int fd);
public:
/****************************************************************************
* Utility methods called by configure/readout threads and pipeline
***************************************************************************/
// Get information about a given stream. Will lock mMutex
const Stream &getStreamInfo(uint32_t streamId);
const ReprocessStream &getReprocessStreamInfo(uint32_t streamId);
// Notifies rest of camera subsystem of serious error
void signalError();
private:
/****************************************************************************
* Utility methods
***************************************************************************/
/** Construct static camera metadata, two-pass */
status_t constructStaticInfo(
camera_metadata_t **info,
bool sizeRequest) const;
/** Two-pass implementation of constructDefaultRequest */
status_t constructDefaultRequest(
int request_template,
camera_metadata_t **request,
bool sizeRequest) const;
/** Helper function for constructDefaultRequest */
static status_t addOrSize( camera_metadata_t *request,
bool sizeRequest,
size_t *entryCount,
size_t *dataCount,
uint32_t tag,
const void *entry_data,
size_t entry_count);
/** Determine if the stream id is listed in any currently-in-flight
* requests. Assumes mMutex is locked */
bool isStreamInUse(uint32_t streamId);
/** Determine if the reprocess stream id is listed in any
* currently-in-flight requests. Assumes mMutex is locked */
bool isReprocessStreamInUse(uint32_t streamId);
/****************************************************************************
* Pipeline controller threads
***************************************************************************/
class ConfigureThread: public Thread {
public:
ConfigureThread(EmulatedFakeCamera2 *parent);
~ConfigureThread();
status_t waitUntilRunning();
status_t newRequestAvailable();
status_t readyToRun();
bool isStreamInUse(uint32_t id);
int getInProgressCount();
private:
EmulatedFakeCamera2 *mParent;
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
bool mRunning;
bool threadLoop();
bool setupCapture();
bool setupReprocess();
bool configureNextCapture();
bool configureNextReprocess();
bool getBuffers();
Mutex mInputMutex; // Protects mActive, mRequestCount
Condition mInputSignal;
bool mActive; // Whether we're waiting for input requests or actively
// working on them
size_t mRequestCount;
camera_metadata_t *mRequest;
Mutex mInternalsMutex; // Lock before accessing below members.
bool mWaitingForReadout;
bool mNextNeedsJpeg;
bool mNextIsCapture;
int32_t mNextFrameNumber;
int64_t mNextExposureTime;
int64_t mNextFrameDuration;
int32_t mNextSensitivity;
Buffers *mNextBuffers;
};
class ReadoutThread: public Thread, private JpegCompressor::JpegListener {
public:
ReadoutThread(EmulatedFakeCamera2 *parent);
~ReadoutThread();
status_t readyToRun();
// Input
status_t waitUntilRunning();
bool waitForReady(nsecs_t timeout);
void setNextOperation(bool isCapture,
camera_metadata_t *request,
Buffers *buffers);
bool isStreamInUse(uint32_t id);
int getInProgressCount();
private:
EmulatedFakeCamera2 *mParent;
bool mRunning;
bool threadLoop();
bool readyForNextCapture();
status_t collectStatisticsMetadata(camera_metadata_t *frame);
// Inputs
Mutex mInputMutex; // Protects mActive, mInFlightQueue, mRequestCount
Condition mInputSignal;
Condition mReadySignal;
bool mActive;
static const int kInFlightQueueSize = 4;
struct InFlightQueue {
bool isCapture;
camera_metadata_t *request;
Buffers *buffers;
} *mInFlightQueue;
size_t mInFlightHead;
size_t mInFlightTail;
size_t mRequestCount;
// Internals
Mutex mInternalsMutex;
bool mIsCapture;
camera_metadata_t *mRequest;
Buffers *mBuffers;
// Jpeg completion listeners
void onJpegDone(const StreamBuffer &jpegBuffer, bool success);
void onJpegInputDone(const StreamBuffer &inputBuffer);
nsecs_t mJpegTimestamp;
};
// 3A management thread (auto-exposure, focus, white balance)
class ControlThread: public Thread {
public:
ControlThread(EmulatedFakeCamera2 *parent);
~ControlThread();
status_t readyToRun();
status_t waitUntilRunning();
// Interpret request's control parameters and override
// capture settings as needed
status_t processRequest(camera_metadata_t *request);
status_t triggerAction(uint32_t msgType,
int32_t ext1, int32_t ext2);
private:
ControlThread(const ControlThread &t);
ControlThread& operator=(const ControlThread &t);
// Constants controlling fake 3A behavior
static const nsecs_t kControlCycleDelay;
static const nsecs_t kMinAfDuration;
static const nsecs_t kMaxAfDuration;
static const float kAfSuccessRate;
static const float kContinuousAfStartRate;
static const float kAeScanStartRate;
static const nsecs_t kMinAeDuration;
static const nsecs_t kMaxAeDuration;
static const nsecs_t kMinPrecaptureAeDuration;
static const nsecs_t kMaxPrecaptureAeDuration;
static const nsecs_t kNormalExposureTime;
static const nsecs_t kExposureJump;
static const nsecs_t kMinExposureTime;
EmulatedFakeCamera2 *mParent;
bool mRunning;
bool threadLoop();
Mutex mInputMutex; // Protects input methods
Condition mInputSignal;
// Trigger notifications
bool mStartAf;
bool mCancelAf;
bool mStartPrecapture;
// Latest state for 3A request fields
uint8_t mControlMode;
uint8_t mEffectMode;
uint8_t mSceneMode;
uint8_t mAfMode;
bool mAfModeChange;
uint8_t mAwbMode;
uint8_t mAeMode;
// Latest trigger IDs
int32_t mAfTriggerId;
int32_t mPrecaptureTriggerId;
// Current state for 3A algorithms
uint8_t mAfState;
uint8_t mAeState;
uint8_t mAwbState;
bool mAeLock;
// Current control parameters
nsecs_t mExposureTime;
// Private to threadLoop and its utility methods
nsecs_t mAfScanDuration;
nsecs_t mAeScanDuration;
bool mLockAfterPassiveScan;
// Utility methods for AF
int processAfTrigger(uint8_t afMode, uint8_t afState);
int maybeStartAfScan(uint8_t afMode, uint8_t afState);
int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep);
void updateAfState(uint8_t newState, int32_t triggerId);
// Utility methods for precapture trigger
int processPrecaptureTrigger(uint8_t aeMode, uint8_t aeState);
int maybeStartAeScan(uint8_t aeMode, bool aeLock, uint8_t aeState);
int updateAeScan(uint8_t aeMode, bool aeLock, uint8_t aeState,
nsecs_t *maxSleep);
void updateAeState(uint8_t newState, int32_t triggerId);
};
/****************************************************************************
* Static configuration information
***************************************************************************/
private:
static const uint32_t kMaxRawStreamCount = 1;
static const uint32_t kMaxProcessedStreamCount = 3;
static const uint32_t kMaxJpegStreamCount = 1;
static const uint32_t kMaxReprocessStreamCount = 2;
static const uint32_t kMaxBufferCount = 4;
static const uint32_t kAvailableFormats[];
static const uint32_t kAvailableRawSizes[];
static const uint64_t kAvailableRawMinDurations[];
static const uint32_t kAvailableProcessedSizesBack[];
static const uint32_t kAvailableProcessedSizesFront[];
static const uint64_t kAvailableProcessedMinDurations[];
static const uint32_t kAvailableJpegSizesBack[];
static const uint32_t kAvailableJpegSizesFront[];
static const uint64_t kAvailableJpegMinDurations[];
/****************************************************************************
* Data members.
***************************************************************************/
protected:
/* Facing back (true) or front (false) switch. */
bool mFacingBack;
private:
bool mIsConnected;
int32_t mSensorWidth, mSensorHeight;
/** Stream manipulation */
uint32_t mNextStreamId;
uint32_t mRawStreamCount;
uint32_t mProcessedStreamCount;
uint32_t mJpegStreamCount;
uint32_t mNextReprocessStreamId;
uint32_t mReprocessStreamCount;
KeyedVector<uint32_t, Stream> mStreams;
KeyedVector<uint32_t, ReprocessStream> mReprocessStreams;
/** Simulated hardware interfaces */
sp<Sensor> mSensor;
sp<JpegCompressor> mJpegCompressor;
/** Pipeline control threads */
sp<ConfigureThread> mConfigureThread;
sp<ReadoutThread> mReadoutThread;
sp<ControlThread> mControlThread;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA2_H */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,294 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA3_H
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA3_H
/**
* Contains declaration of a class EmulatedCamera that encapsulates
* functionality of a fake camera that implements version 3 of the camera device
* interace.
*/
#include "EmulatedCamera3.h"
#include "fake-pipeline2/Base.h"
#include "fake-pipeline2/Sensor.h"
#include "fake-pipeline2/JpegCompressor.h"
#include <CameraMetadata.h>
#include <utils/SortedVector.h>
#include <utils/List.h>
#include <utils/Mutex.h>
using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
namespace android {
/**
* Encapsulates functionality for a v3 HAL camera which produces synthetic data.
*
* Note that EmulatedCameraFactory instantiates an object of this class just
* once, when EmulatedCameraFactory instance gets constructed. Connection to /
* disconnection from the actual camera device is handled by calls to
* connectDevice(), and closeCamera() methods of this class that are invoked in
* response to hw_module_methods_t::open, and camera_device::close callbacks.
*/
class EmulatedFakeCamera3 : public EmulatedCamera3,
private Sensor::SensorListener {
public:
EmulatedFakeCamera3(int cameraId, bool facingBack,
struct hw_module_t* module);
virtual ~EmulatedFakeCamera3();
/****************************************************************************
* EmulatedCamera3 virtual overrides
***************************************************************************/
public:
virtual status_t Initialize();
/****************************************************************************
* Camera module API and generic hardware device API implementation
***************************************************************************/
public:
virtual status_t connectCamera(hw_device_t** device);
virtual status_t closeCamera();
virtual status_t getCameraInfo(struct camera_info *info);
/****************************************************************************
* EmulatedCamera3 abstract API implementation
***************************************************************************/
protected:
virtual status_t configureStreams(
camera3_stream_configuration *streamList);
virtual status_t registerStreamBuffers(
const camera3_stream_buffer_set *bufferSet) ;
virtual const camera_metadata_t* constructDefaultRequestSettings(
int type);
virtual status_t processCaptureRequest(camera3_capture_request *request);
virtual status_t flush();
/** Debug methods */
virtual void dump(int fd);
private:
/**
* Get the requested capability set for this camera
*/
status_t getCameraCapabilities();
bool hasCapability(AvailableCapabilities cap);
/**
* Build the static info metadata buffer for this device
*/
status_t constructStaticInfo();
/**
* Run the fake 3A algorithms as needed. May override/modify settings
* values.
*/
status_t process3A(CameraMetadata &settings);
status_t doFakeAE(CameraMetadata &settings);
status_t doFakeAF(CameraMetadata &settings);
status_t doFakeAWB(CameraMetadata &settings);
void update3A(CameraMetadata &settings);
/** Signal from readout thread that it doesn't have anything to do */
void signalReadoutIdle();
/** Handle interrupt events from the sensor */
void onSensorEvent(uint32_t frameNumber, Event e, nsecs_t timestamp);
/****************************************************************************
* Static configuration information
***************************************************************************/
private:
static const uint32_t kMaxRawStreamCount = 1;
static const uint32_t kMaxProcessedStreamCount = 3;
static const uint32_t kMaxJpegStreamCount = 1;
static const uint32_t kMaxReprocessStreamCount = 2;
static const uint32_t kMaxBufferCount = 4;
// We need a positive stream ID to distinguish external buffers from
// sensor-generated buffers which use a nonpositive ID. Otherwise, HAL3 has
// no concept of a stream id.
static const uint32_t kGenericStreamId = 1;
static const int32_t kAvailableFormats[];
static const uint32_t kAvailableRawSizes[];
static const int64_t kSyncWaitTimeout = 10000000; // 10 ms
static const int32_t kMaxSyncTimeoutCount = 1000; // 1000 kSyncWaitTimeouts
static const uint32_t kFenceTimeoutMs = 2000; // 2 s
static const nsecs_t kJpegTimeoutNs = 5000000000l; // 5 s
/****************************************************************************
* Data members.
***************************************************************************/
/* HAL interface serialization lock. */
Mutex mLock;
/* Facing back (true) or front (false) switch. */
bool mFacingBack;
int32_t mSensorWidth;
int32_t mSensorHeight;
SortedVector<AvailableCapabilities> mCapabilities;
/**
* Cache for default templates. Once one is requested, the pointer must be
* valid at least until close() is called on the device
*/
camera_metadata_t *mDefaultTemplates[CAMERA3_TEMPLATE_COUNT];
/**
* Private stream information, stored in camera3_stream_t->priv.
*/
struct PrivateStreamInfo {
bool alive;
};
// Shortcut to the input stream
camera3_stream_t* mInputStream;
typedef List<camera3_stream_t*> StreamList;
typedef List<camera3_stream_t*>::iterator StreamIterator;
typedef Vector<camera3_stream_buffer> HalBufferVector;
// All streams, including input stream
StreamList mStreams;
// Cached settings from latest submitted request
CameraMetadata mPrevSettings;
/** Fake hardware interfaces */
sp<Sensor> mSensor;
sp<JpegCompressor> mJpegCompressor;
friend class JpegCompressor;
/** Processing thread for sending out results */
class ReadoutThread : public Thread, private JpegCompressor::JpegListener {
public:
ReadoutThread(EmulatedFakeCamera3 *parent);
~ReadoutThread();
struct Request {
uint32_t frameNumber;
CameraMetadata settings;
HalBufferVector *buffers;
Buffers *sensorBuffers;
};
/**
* Interface to parent class
*/
// Place request in the in-flight queue to wait for sensor capture
void queueCaptureRequest(const Request &r);
// Test if the readout thread is idle (no in-flight requests, not
// currently reading out anything
bool isIdle();
// Wait until isIdle is true
status_t waitForReadout();
private:
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
static const nsecs_t kMaxWaitLoops = 1000;
static const size_t kMaxQueueSize = 2;
EmulatedFakeCamera3 *mParent;
Mutex mLock;
List<Request> mInFlightQueue;
Condition mInFlightSignal;
bool mThreadActive;
virtual bool threadLoop();
// Only accessed by threadLoop
Request mCurrentRequest;
// Jpeg completion callbacks
Mutex mJpegLock;
bool mJpegWaiting;
camera3_stream_buffer mJpegHalBuffer;
uint32_t mJpegFrameNumber;
virtual void onJpegDone(const StreamBuffer &jpegBuffer, bool success);
virtual void onJpegInputDone(const StreamBuffer &inputBuffer);
};
sp<ReadoutThread> mReadoutThread;
/** Fake 3A constants */
static const nsecs_t kNormalExposureTime;
static const nsecs_t kFacePriorityExposureTime;
static const int kNormalSensitivity;
static const int kFacePrioritySensitivity;
// Rate of converging AE to new target value, as fraction of difference between
// current and target value.
static const float kExposureTrackRate;
// Minimum duration for precapture state. May be longer if slow to converge
// to target exposure
static const int kPrecaptureMinFrames;
// How often to restart AE 'scanning'
static const int kStableAeMaxFrames;
// Maximum stop below 'normal' exposure time that we'll wander to while
// pretending to converge AE. In powers of 2. (-2 == 1/4 as bright)
static const float kExposureWanderMin;
// Maximum stop above 'normal' exposure time that we'll wander to while
// pretending to converge AE. In powers of 2. (2 == 4x as bright)
static const float kExposureWanderMax;
/** Fake 3A state */
uint8_t mControlMode;
bool mFacePriority;
uint8_t mAeState;
uint8_t mAfState;
uint8_t mAwbState;
uint8_t mAeMode;
uint8_t mAfMode;
uint8_t mAwbMode;
int mAeCounter;
nsecs_t mAeCurrentExposureTime;
nsecs_t mAeTargetExposureTime;
int mAeCurrentSensitivity;
};
} // namespace android
#endif // HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H

View file

@ -0,0 +1,472 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedFakeCameraDevice that encapsulates
* fake camera device.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_FakeDevice"
#include <cutils/log.h>
#include "EmulatedFakeCamera.h"
#include "EmulatedFakeCameraDevice.h"
#undef min
#undef max
#include <algorithm>
namespace android {
static const double kCheckXSpeed = 0.00000000096;
static const double kCheckYSpeed = 0.00000000032;
static const double kSquareXSpeed = 0.000000000096;
static const double kSquareYSpeed = 0.000000000160;
static const nsecs_t kSquareColorChangeIntervalNs = seconds(5);
EmulatedFakeCameraDevice::EmulatedFakeCameraDevice(EmulatedFakeCamera* camera_hal)
: EmulatedCameraDevice(camera_hal),
mBlackYUV(kBlack32),
mWhiteYUV(kWhite32),
mRedYUV(kRed8),
mGreenYUV(kGreen8),
mBlueYUV(kBlue8),
mSquareColor(&mRedYUV),
mLastRedrawn(0),
mLastColorChange(0),
mCheckX(0),
mCheckY(0),
mSquareX(0),
mSquareY(0),
mSquareXSpeed(kSquareXSpeed),
mSquareYSpeed(kSquareYSpeed)
#if EFCD_ROTATE_FRAME
, mLastRotatedAt(0),
mCurrentFrameType(0),
mCurrentColor(&mWhiteYUV)
#endif // EFCD_ROTATE_FRAME
{
// Makes the image with the original exposure compensation darker.
// So the effects of changing the exposure compensation can be seen.
mBlackYUV.Y = mBlackYUV.Y / 2;
mWhiteYUV.Y = mWhiteYUV.Y / 2;
mRedYUV.Y = mRedYUV.Y / 2;
mGreenYUV.Y = mGreenYUV.Y / 2;
mBlueYUV.Y = mBlueYUV.Y / 2;
}
EmulatedFakeCameraDevice::~EmulatedFakeCameraDevice()
{
}
/****************************************************************************
* Emulated camera device abstract interface implementation.
***************************************************************************/
status_t EmulatedFakeCameraDevice::connectDevice()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isInitialized()) {
ALOGE("%s: Fake camera device is not initialized.", __FUNCTION__);
return EINVAL;
}
if (isConnected()) {
ALOGW("%s: Fake camera device is already connected.", __FUNCTION__);
return NO_ERROR;
}
/* There is no device to connect to. */
mState = ECDS_CONNECTED;
return NO_ERROR;
}
status_t EmulatedFakeCameraDevice::disconnectDevice()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
ALOGW("%s: Fake camera device is already disconnected.", __FUNCTION__);
return NO_ERROR;
}
if (isStarted()) {
ALOGE("%s: Cannot disconnect from the started device.", __FUNCTION__);
return EINVAL;
}
/* There is no device to disconnect from. */
mState = ECDS_INITIALIZED;
return NO_ERROR;
}
status_t EmulatedFakeCameraDevice::startDevice(int width,
int height,
uint32_t pix_fmt)
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
ALOGE("%s: Fake camera device is not connected.", __FUNCTION__);
return EINVAL;
}
if (isStarted()) {
ALOGE("%s: Fake camera device is already started.", __FUNCTION__);
return EINVAL;
}
/* Initialize the base class. */
const status_t res =
EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
if (res == NO_ERROR) {
/* Calculate U/V panes inside the framebuffer. */
switch (mPixelFormat) {
case V4L2_PIX_FMT_YVU420:
mFrameVOffset = mYStride * mFrameHeight;
mFrameUOffset = mFrameVOffset + mUVStride * (mFrameHeight / 2);
mUVStep = 1;
break;
case V4L2_PIX_FMT_YUV420:
mFrameUOffset = mYStride * mFrameHeight;
mFrameVOffset = mFrameUOffset + mUVStride * (mFrameHeight / 2);
mUVStep = 1;
break;
case V4L2_PIX_FMT_NV21:
/* Interleaved UV pane, V first. */
mFrameVOffset = mYStride * mFrameHeight;
mFrameUOffset = mFrameVOffset + 1;
mUVStep = 2;
break;
case V4L2_PIX_FMT_NV12:
/* Interleaved UV pane, U first. */
mFrameUOffset = mYStride * mFrameHeight;
mFrameVOffset = mFrameUOffset + 1;
mUVStep = 2;
break;
default:
ALOGE("%s: Unknown pixel format %.4s", __FUNCTION__,
reinterpret_cast<const char*>(&mPixelFormat));
return EINVAL;
}
mLastRedrawn = systemTime(SYSTEM_TIME_MONOTONIC);
mLastColorChange = mLastRedrawn;
/* Number of items in a single row inside U/V panes. */
mUVInRow = (width / 2) * mUVStep;
mState = ECDS_STARTED;
} else {
ALOGE("%s: commonStartDevice failed", __FUNCTION__);
}
return res;
}
status_t EmulatedFakeCameraDevice::stopDevice()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isStarted()) {
ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
return NO_ERROR;
}
EmulatedCameraDevice::commonStopDevice();
mState = ECDS_CONNECTED;
return NO_ERROR;
}
/****************************************************************************
* Worker thread management overrides.
***************************************************************************/
bool EmulatedFakeCameraDevice::produceFrame(void* buffer)
{
#if EFCD_ROTATE_FRAME
const int frame_type = rotateFrame();
switch (frame_type) {
case 0:
drawCheckerboard(buffer);
break;
case 1:
drawStripes(buffer);
break;
case 2:
drawSolid(buffer, mCurrentColor);
break;
}
#else
drawCheckerboard(buffer);
#endif // EFCD_ROTATE_FRAME
return true;
}
/****************************************************************************
* Fake camera device private API
***************************************************************************/
void EmulatedFakeCameraDevice::drawCheckerboard(void* buffer)
{
nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
nsecs_t elapsed = now - mLastRedrawn;
uint8_t* currentFrame = reinterpret_cast<uint8_t*>(buffer);
uint8_t* frameU = currentFrame + mFrameUOffset;
uint8_t* frameV = currentFrame + mFrameVOffset;
const int size = std::min(mFrameWidth, mFrameHeight) / 10;
bool black = true;
if (size == 0) {
// When this happens, it happens at a very high rate,
// so don't log any messages and just return.
return;
}
mCheckX += kCheckXSpeed * elapsed;
mCheckY += kCheckYSpeed * elapsed;
// Allow the X and Y values to transition across two checkerboard boxes
// before resetting it back. This allows for the gray to black transition.
// Note that this is in screen size independent coordinates so that frames
// will look similar regardless of resolution
if (mCheckX > 2.0) {
mCheckX -= 2.0;
}
if (mCheckY > 2.0) {
mCheckY -= 2.0;
}
// Are we in the gray or black zone?
if (mCheckX >= 1.0)
black = false;
if (mCheckY >= 1.0)
black = !black;
int county = static_cast<int>(mCheckY * size) % size;
int checkxremainder = static_cast<int>(mCheckX * size) % size;
YUVPixel adjustedWhite = YUVPixel(mWhiteYUV);
changeWhiteBalance(adjustedWhite.Y, adjustedWhite.U, adjustedWhite.V);
adjustedWhite.Y = changeExposure(adjustedWhite.Y);
YUVPixel adjustedBlack = YUVPixel(mBlackYUV);
adjustedBlack.Y = changeExposure(adjustedBlack.Y);
for(int y = 0; y < mFrameHeight; y++) {
int countx = checkxremainder;
bool current = black;
uint8_t* Y = currentFrame + mYStride * y;
uint8_t* U = frameU + mUVStride * (y / 2);
uint8_t* V = frameV + mUVStride * (y / 2);
for(int x = 0; x < mFrameWidth; x += 2) {
if (current) {
adjustedBlack.get(Y, U, V);
} else {
adjustedWhite.get(Y, U, V);
}
Y[1] = *Y;
Y += 2; U += mUVStep; V += mUVStep;
countx += 2;
if(countx >= size) {
countx = 0;
current = !current;
}
}
if(county++ >= size) {
county = 0;
black = !black;
}
}
/* Run the square. */
const int squareSize = std::min(mFrameWidth, mFrameHeight) / 4;
mSquareX += mSquareXSpeed * elapsed;
mSquareY += mSquareYSpeed * elapsed;
int squareX = mSquareX * mFrameWidth;
int squareY = mSquareY * mFrameHeight;
if (squareX + squareSize > mFrameWidth) {
mSquareXSpeed = -mSquareXSpeed;
double relativeWidth = static_cast<double>(squareSize) / mFrameWidth;
mSquareX -= 2.0 * (mSquareX + relativeWidth - 1.0);
squareX = mSquareX * mFrameWidth;
} else if (squareX < 0) {
mSquareXSpeed = -mSquareXSpeed;
mSquareX = -mSquareX;
squareX = mSquareX * mFrameWidth;
}
if (squareY + squareSize > mFrameHeight) {
mSquareYSpeed = -mSquareYSpeed;
double relativeHeight = static_cast<double>(squareSize) / mFrameHeight;
mSquareY -= 2.0 * (mSquareY + relativeHeight - 1.0);
squareY = mSquareY * mFrameHeight;
} else if (squareY < 0) {
mSquareYSpeed = -mSquareYSpeed;
mSquareY = -mSquareY;
squareY = mSquareY * mFrameHeight;
}
if (now - mLastColorChange > kSquareColorChangeIntervalNs) {
mLastColorChange = now;
mSquareColor = mSquareColor == &mRedYUV ? &mGreenYUV : &mRedYUV;
}
drawSquare(buffer, squareX, squareY, squareSize, mSquareColor);
mLastRedrawn = now;
}
void EmulatedFakeCameraDevice::drawSquare(void* buffer,
int x,
int y,
int size,
const YUVPixel* color)
{
uint8_t* currentFrame = reinterpret_cast<uint8_t*>(buffer);
uint8_t* frameU = currentFrame + mFrameUOffset;
uint8_t* frameV = currentFrame + mFrameVOffset;
const int square_xstop = std::min(mFrameWidth, x + size);
const int square_ystop = std::min(mFrameHeight, y + size);
uint8_t* Y_pos = currentFrame + y * mYStride + x;
YUVPixel adjustedColor = *color;
changeWhiteBalance(adjustedColor.Y, adjustedColor.U, adjustedColor.V);
// Draw the square.
for (; y < square_ystop; y++) {
const int iUV = (y / 2) * mUVStride + (x / 2) * mUVStep;
uint8_t* sqU = frameU + iUV;
uint8_t* sqV = frameV + iUV;
uint8_t* sqY = Y_pos;
for (int i = x; i < square_xstop; i += 2) {
adjustedColor.get(sqY, sqU, sqV);
*sqY = changeExposure(*sqY);
sqY[1] = *sqY;
sqY += 2; sqU += mUVStep; sqV += mUVStep;
}
Y_pos += mYStride;
}
}
#if EFCD_ROTATE_FRAME
void EmulatedFakeCameraDevice::drawSolid(void* buffer, YUVPixel* color)
{
YUVPixel adjustedColor = *color;
changeWhiteBalance(adjustedColor.Y, adjustedColor.U, adjustedColor.V);
/* All Ys are the same, will fill any alignment padding but that's OK */
memset(mCurrentFrame, changeExposure(adjustedColor.Y),
mFrameHeight * mYStride);
/* Fill U, and V panes. */
for (int y = 0; y < mFrameHeight / 2; ++y) {
uint8_t* U = mFrameU + y * mUVStride;
uint8_t* V = mFrameV + y * mUVStride;
for (int x = 0; x < mFrameWidth / 2; ++x, U += mUVStep, V += mUVStep) {
*U = color->U;
*V = color->V;
}
}
}
void EmulatedFakeCameraDevice::drawStripes(void* buffer)
{
/* Divide frame into 4 stripes. */
const int change_color_at = mFrameHeight / 4;
const int each_in_row = mUVInRow / mUVStep;
uint8_t* pY = mCurrentFrame;
for (int y = 0; y < mFrameHeight; y++, pY += mYStride) {
/* Select the color. */
YUVPixel* color;
const int color_index = y / change_color_at;
if (color_index == 0) {
/* White stripe on top. */
color = &mWhiteYUV;
} else if (color_index == 1) {
/* Then the red stripe. */
color = &mRedYUV;
} else if (color_index == 2) {
/* Then the green stripe. */
color = &mGreenYUV;
} else {
/* And the blue stripe at the bottom. */
color = &mBlueYUV;
}
changeWhiteBalance(color->Y, color->U, color->V);
/* All Ys at the row are the same. */
memset(pY, changeExposure(color->Y), mFrameWidth);
/* Offset of the current row inside U/V panes. */
const int uv_off = (y / 2) * mUVStride;
/* Fill U, and V panes. */
uint8_t* U = mFrameU + uv_off;
uint8_t* V = mFrameV + uv_off;
for (int k = 0; k < each_in_row; k++, U += mUVStep, V += mUVStep) {
*U = color->U;
*V = color->V;
}
}
}
int EmulatedFakeCameraDevice::rotateFrame()
{
if ((systemTime(SYSTEM_TIME_MONOTONIC) - mLastRotatedAt) >= mRotateFreq) {
mLastRotatedAt = systemTime(SYSTEM_TIME_MONOTONIC);
mCurrentFrameType++;
if (mCurrentFrameType > 2) {
mCurrentFrameType = 0;
}
if (mCurrentFrameType == 2) {
ALOGD("********** Rotated to the SOLID COLOR frame **********");
/* Solid color: lets rotate color too. */
if (mCurrentColor == &mWhiteYUV) {
ALOGD("----- Painting a solid RED frame -----");
mCurrentColor = &mRedYUV;
} else if (mCurrentColor == &mRedYUV) {
ALOGD("----- Painting a solid GREEN frame -----");
mCurrentColor = &mGreenYUV;
} else if (mCurrentColor == &mGreenYUV) {
ALOGD("----- Painting a solid BLUE frame -----");
mCurrentColor = &mBlueYUV;
} else {
/* Back to white. */
ALOGD("----- Painting a solid WHITE frame -----");
mCurrentColor = &mWhiteYUV;
}
} else if (mCurrentFrameType == 0) {
ALOGD("********** Rotated to the CHECKERBOARD frame **********");
} else if (mCurrentFrameType == 1) {
ALOGD("********** Rotated to the STRIPED frame **********");
}
}
return mCurrentFrameType;
}
#endif // EFCD_ROTATE_FRAME
}; /* namespace android */

View file

@ -0,0 +1,185 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_DEVICE_H
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_DEVICE_H
/*
* Contains declaration of a class EmulatedFakeCameraDevice that encapsulates
* a fake camera device.
*/
#include "Converters.h"
#include "EmulatedCameraDevice.h"
/* This is used for debugging format / conversion issues. If EFCD_ROTATE_FRAME is
* set to 0, the frame content will be always the "checkerboard". Otherwise, if
* EFCD_ROTATE_FRAME is set to a non-zero value, the frame content will "rotate"
* from a "checkerboard" frame to a "white/red/green/blue stripes" frame, to a
* "white/red/green/blue" frame. Frame content rotation helps finding bugs in
* format conversions.
*/
#define EFCD_ROTATE_FRAME 0
namespace android {
class EmulatedFakeCamera;
/* Encapsulates a fake camera device.
* Fake camera device emulates a camera device by providing frames containing
* a black and white checker board, moving diagonally towards the 0,0 corner.
* There is also a green, or red square that bounces inside the frame, changing
* its color when bouncing off the 0,0 corner.
*/
class EmulatedFakeCameraDevice : public EmulatedCameraDevice {
public:
/* Constructs EmulatedFakeCameraDevice instance. */
explicit EmulatedFakeCameraDevice(EmulatedFakeCamera* camera_hal);
/* Destructs EmulatedFakeCameraDevice instance. */
~EmulatedFakeCameraDevice();
/***************************************************************************
* Emulated camera device abstract interface implementation.
* See declarations of these methods in EmulatedCameraDevice class for
* information on each of these methods.
**************************************************************************/
public:
/* Connects to the camera device.
* Since there is no real device to connect to, this method does nothing,
* but changes the state.
*/
status_t connectDevice();
/* Disconnects from the camera device.
* Since there is no real device to disconnect from, this method does
* nothing, but changes the state.
*/
status_t disconnectDevice();
/* Starts the camera device. */
status_t startDevice(int width, int height, uint32_t pix_fmt);
/* Stops the camera device. */
status_t stopDevice();
/***************************************************************************
* Worker thread management overrides.
* See declarations of these methods in EmulatedCameraDevice class for
* information on each of these methods.
**************************************************************************/
protected:
/* Implementation of the frame production routine. */
bool produceFrame(void* buffer) override;
/****************************************************************************
* Fake camera device private API
***************************************************************************/
private:
/* Draws a black and white checker board in |buffer| with the assumption
* that the size of buffer matches the current frame buffer size. */
void drawCheckerboard(void* buffer);
/* Draws a square of the given color in the current frame buffer.
* Param:
* x, y - Coordinates of the top left corner of the square in the buffer.
* size - Size of the square's side.
* color - Square's color.
*/
void drawSquare(void* buffer, int x, int y, int size, const YUVPixel* color);
#if EFCD_ROTATE_FRAME
void drawSolid(void* buffer, YUVPixel* color);
void drawStripes(void* buffer);
int rotateFrame();
#endif // EFCD_ROTATE_FRAME
/****************************************************************************
* Fake camera device data members
***************************************************************************/
private:
/*
* Pixel colors in YUV format used when drawing the checker board.
*/
YUVPixel mBlackYUV;
YUVPixel mWhiteYUV;
YUVPixel mRedYUV;
YUVPixel mGreenYUV;
YUVPixel mBlueYUV;
YUVPixel* mSquareColor;
/* Last time the frame has been redrawn. */
nsecs_t mLastRedrawn;
/*
* Precalculated values related to U/V panes.
*/
/* U pane inside the framebuffer. */
ptrdiff_t mFrameUOffset;
/* V pane inside the framebuffer. */
ptrdiff_t mFrameVOffset;
/* Defines byte distance between adjacent U, and V values. */
int mUVStep;
/* Defines number of Us and Vs in a row inside the U/V panes.
* Note that if U/V panes are interleaved, this value reflects the total
* number of both, Us and Vs in a single row in the interleaved UV pane. */
int mUVInRow;
/*
* Checkerboard drawing related stuff
*/
nsecs_t mLastColorChange;
double mCheckX;
double mCheckY;
double mSquareX;
double mSquareY;
double mSquareXSpeed;
double mSquareYSpeed;
#if EFCD_ROTATE_FRAME
/* Frame rotation frequency in nanosec (currently - 3 sec) */
static const nsecs_t mRotateFreq = 3000000000LL;
/* Last time the frame has rotated. */
nsecs_t mLastRotatedAt;
/* Type of the frame to display in the current rotation:
* 0 - Checkerboard.
* 1 - White/Red/Green/Blue horisontal stripes
* 2 - Solid color. */
int mCurrentFrameType;
/* Color to use to paint the solid color frame. Colors will rotate between
* white, red, gree, and blue each time rotation comes to the solid color
* frame. */
YUVPixel* mCurrentColor;
#endif // EFCD_ROTATE_FRAME
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_DEVICE_H */

View file

@ -0,0 +1,168 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedQemuCamera that encapsulates
* functionality of an emulated camera connected to the host.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_QemuCamera"
#include <cutils/log.h>
#include "EmulatedQemuCamera.h"
#include "EmulatedCameraFactory.h"
#undef min
#undef max
#include <sstream>
#include <string>
#include <vector>
namespace android {
EmulatedQemuCamera::EmulatedQemuCamera(int cameraId, struct hw_module_t* module)
: EmulatedCamera(cameraId, module),
mQemuCameraDevice(this)
{
}
EmulatedQemuCamera::~EmulatedQemuCamera()
{
}
/****************************************************************************
* EmulatedCamera virtual overrides.
***************************************************************************/
status_t EmulatedQemuCamera::Initialize(const char* device_name,
const char* frame_dims,
const char* facing_dir)
{
ALOGV("%s:\n Name=%s\n Facing '%s'\n Dimensions=%s",
__FUNCTION__, device_name, facing_dir, frame_dims);
/* Save dimensions. */
mFrameDims = frame_dims;
/* Initialize camera device. */
status_t res = mQemuCameraDevice.Initialize(device_name);
if (res != NO_ERROR) {
return res;
}
/* Initialize base class. */
res = EmulatedCamera::Initialize();
if (res != NO_ERROR) {
return res;
}
/*
* Set customizable parameters.
*/
using Size = std::pair<int, int>;
std::vector<Size> resolutions;
std::stringstream ss(frame_dims);
std::string input;
while (std::getline(ss, input, ',')) {
int width = 0;
int height = 0;
char none = 0;
/* Expect only two results because that means there was nothing after
* the height, we don't want any trailing characters. Otherwise we just
* ignore this entry. */
if (sscanf(input.c_str(), "%dx%d%c", &width, &height, &none) == 2) {
resolutions.push_back(Size(width, height));
ALOGE("%s: %dx%d", __FUNCTION__, width, height);
}
}
/* The Android framework contains a wrapper around the v1 Camera API so that
* it can be used with API v2. This wrapper attempts to figure out the
* sensor resolution of the camera by looking at the resolution with the
* largest area and infer that the dimensions of that resolution must also
* be the size of the camera sensor. Any resolution with a dimension that
* exceeds the sensor size will be rejected so Camera API calls will start
* failing. To work around this we remove any resolutions with at least one
* dimension exceeding that of the max area resolution. */
/* First find the resolution with the maximum area, the "sensor size" */
int maxArea = 0;
int maxAreaWidth = 0;
int maxAreaHeight = 0;
for (const auto& res : resolutions) {
int area = res.first * res.second;
if (area > maxArea) {
maxArea = area;
maxAreaWidth = res.first;
maxAreaHeight = res.second;
}
}
/* Next remove any resolution with a dimension exceeding the sensor size. */
for (auto res = resolutions.begin(); res != resolutions.end(); ) {
if (res->first > maxAreaWidth || res->second > maxAreaHeight) {
/* Width and/or height larger than sensor, remove it */
res = resolutions.erase(res);
} else {
++res;
}
}
if (resolutions.empty()) {
ALOGE("%s: Qemu camera has no valid resolutions", __FUNCTION__);
return EINVAL;
}
/* Next rebuild the frame size string for the camera parameters */
std::stringstream sizesStream;
for (size_t i = 0; i < resolutions.size(); ++i) {
if (i != 0) {
sizesStream << ',';
}
sizesStream << resolutions[i].first << 'x' << resolutions[i].second;
}
std::string sizes = sizesStream.str();
mParameters.set(EmulatedCamera::FACING_KEY, facing_dir);
mParameters.set(EmulatedCamera::ORIENTATION_KEY,
gEmulatedCameraFactory.getQemuCameraOrientation());
mParameters.set(CameraParameters::KEY_ROTATION,
gEmulatedCameraFactory.getQemuCameraOrientation());
mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
sizes.c_str());
mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
sizes.c_str());
/*
* Use first dimension reported by the device to set current preview and
* picture sizes.
*/
int x = resolutions[0].first;
int y = resolutions[0].second;
mParameters.setPreviewSize(x, y);
mParameters.setPictureSize(x, y);
ALOGV("%s: Qemu camera %s is initialized. Current frame is %dx%d",
__FUNCTION__, device_name, x, y);
return NO_ERROR;
}
EmulatedCameraDevice* EmulatedQemuCamera::getCameraDevice()
{
return &mQemuCameraDevice;
}
}; /* namespace android */

View file

@ -0,0 +1,73 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_H
#define HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_H
/*
* Contains declaration of a class EmulatedQemuCamera that encapsulates
* functionality of an emulated camera connected to the host.
*/
#include "EmulatedCamera.h"
#include "EmulatedQemuCameraDevice.h"
namespace android {
/* Encapsulates functionality of an emulated camera connected to the host.
*/
class EmulatedQemuCamera : public EmulatedCamera {
public:
/* Constructs EmulatedQemuCamera instance. */
EmulatedQemuCamera(int cameraId, struct hw_module_t* module);
/* Destructs EmulatedQemuCamera instance. */
~EmulatedQemuCamera();
/***************************************************************************
* EmulatedCamera virtual overrides.
**************************************************************************/
public:
/* Initializes EmulatedQemuCamera instance. */
status_t Initialize(const char* device_name,
const char* frame_dims,
const char* facing_dir);
/***************************************************************************
* EmulatedCamera abstract API implementation.
**************************************************************************/
protected:
/* Gets emulated camera device ised by this instance of the emulated camera.
*/
EmulatedCameraDevice* getCameraDevice();
/***************************************************************************
* Data memebers.
**************************************************************************/
protected:
/* Contained qemu camera device object. */
EmulatedQemuCameraDevice mQemuCameraDevice;
/* Supported frame dimensions reported by the camera device. */
String8 mFrameDims;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_H */

View file

@ -0,0 +1,55 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedQemuCamera2 that encapsulates
* functionality of a host webcam with further processing to simulate the
* capabilities of a v2 camera device.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_QemuCamera2"
#include <cutils/log.h>
#include <cutils/properties.h>
#include "EmulatedQemuCamera2.h"
#include "EmulatedCameraFactory.h"
namespace android {
EmulatedQemuCamera2::EmulatedQemuCamera2(int cameraId,
bool facingBack,
struct hw_module_t* module)
: EmulatedCamera2(cameraId,module),
mFacingBack(facingBack)
{
ALOGD("Constructing emulated qemu camera 2 facing %s",
facingBack ? "back" : "front");
}
EmulatedQemuCamera2::~EmulatedQemuCamera2()
{
}
/****************************************************************************
* Public API overrides
***************************************************************************/
status_t EmulatedQemuCamera2::Initialize()
{
return NO_ERROR;
}
}; /* namespace android */

View file

@ -0,0 +1,66 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA2_H
#define HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA2_H
/*
* Contains declaration of a class EmulatedQemuCamera2 that encapsulates
* functionality of a host webcam with added processing to implement version 2
* of the camera device interface.
*/
#include "EmulatedCamera2.h"
namespace android {
/* Encapsulates functionality of an advanced fake camera based on real host camera data.
*/
class EmulatedQemuCamera2 : public EmulatedCamera2 {
public:
/* Constructs EmulatedFakeCamera instance. */
EmulatedQemuCamera2(int cameraId, bool facingBack, struct hw_module_t* module);
/* Destructs EmulatedFakeCamera instance. */
~EmulatedQemuCamera2();
/****************************************************************************
* EmulatedCamera2 virtual overrides.
***************************************************************************/
public:
/* Initializes EmulatedQemuCamera2 instance. */
status_t Initialize();
/****************************************************************************
* EmulatedCamera abstract API implementation.
***************************************************************************/
protected:
/****************************************************************************
* Data memebers.
***************************************************************************/
protected:
/* Facing back (true) or front (false) switch. */
bool mFacingBack;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA2_H */

View file

@ -0,0 +1,311 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class EmulatedQemuCameraDevice that encapsulates
* an emulated camera device connected to the host.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_QemuDevice"
#include <cutils/log.h>
#include "EmulatedQemuCamera.h"
#include "EmulatedQemuCameraDevice.h"
namespace android {
EmulatedQemuCameraDevice::EmulatedQemuCameraDevice(EmulatedQemuCamera* camera_hal)
: EmulatedCameraDevice(camera_hal),
mQemuClient()
{
}
EmulatedQemuCameraDevice::~EmulatedQemuCameraDevice()
{
}
/****************************************************************************
* Public API
***************************************************************************/
status_t EmulatedQemuCameraDevice::Initialize(const char* device_name)
{
/* Connect to the service. */
char connect_str[256];
snprintf(connect_str, sizeof(connect_str), "name=%s", device_name);
status_t res = mQemuClient.connectClient(connect_str);
if (res != NO_ERROR) {
return res;
}
/* Initialize base class. */
res = EmulatedCameraDevice::Initialize();
if (res == NO_ERROR) {
ALOGV("%s: Connected to the emulated camera service '%s'",
__FUNCTION__, device_name);
mDeviceName = device_name;
} else {
mQemuClient.queryDisconnect();
}
return res;
}
/****************************************************************************
* Emulated camera device abstract interface implementation.
***************************************************************************/
status_t EmulatedQemuCameraDevice::connectDevice()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isInitialized()) {
ALOGE("%s: Qemu camera device is not initialized.", __FUNCTION__);
return EINVAL;
}
if (isConnected()) {
ALOGW("%s: Qemu camera device '%s' is already connected.",
__FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
/* Connect to the camera device via emulator. */
const status_t res = mQemuClient.queryConnect();
if (res == NO_ERROR) {
ALOGV("%s: Connected to device '%s'",
__FUNCTION__, (const char*)mDeviceName);
mState = ECDS_CONNECTED;
} else {
ALOGE("%s: Connection to device '%s' failed",
__FUNCTION__, (const char*)mDeviceName);
}
return res;
}
status_t EmulatedQemuCameraDevice::disconnectDevice()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
ALOGW("%s: Qemu camera device '%s' is already disconnected.",
__FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
if (isStarted()) {
ALOGE("%s: Cannot disconnect from the started device '%s.",
__FUNCTION__, (const char*)mDeviceName);
return EINVAL;
}
/* Disconnect from the camera device via emulator. */
const status_t res = mQemuClient.queryDisconnect();
if (res == NO_ERROR) {
ALOGV("%s: Disonnected from device '%s'",
__FUNCTION__, (const char*)mDeviceName);
mState = ECDS_INITIALIZED;
} else {
ALOGE("%s: Disconnection from device '%s' failed",
__FUNCTION__, (const char*)mDeviceName);
}
return res;
}
status_t EmulatedQemuCameraDevice::startDevice(int width,
int height,
uint32_t pix_fmt)
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
ALOGE("%s: Qemu camera device '%s' is not connected.",
__FUNCTION__, (const char*)mDeviceName);
return EINVAL;
}
if (isStarted()) {
ALOGW("%s: Qemu camera device '%s' is already started.",
__FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
status_t res = EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
if (res != NO_ERROR) {
ALOGE("%s: commonStartDevice failed", __FUNCTION__);
return res;
}
/* Allocate preview frame buffer. */
/* TODO: Watch out for preview format changes! At this point we implement
* RGB32 only.*/
mPreviewFrames[0].resize(mTotalPixels);
mPreviewFrames[1].resize(mTotalPixels);
mFrameBufferPairs[0].first = mFrameBuffers[0].data();
mFrameBufferPairs[0].second = mPreviewFrames[0].data();
mFrameBufferPairs[1].first = mFrameBuffers[1].data();
mFrameBufferPairs[1].second = mPreviewFrames[1].data();
/* Start the actual camera device. */
res = mQemuClient.queryStart(mPixelFormat, mFrameWidth, mFrameHeight);
if (res == NO_ERROR) {
ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
__FUNCTION__, (const char*)mDeviceName,
reinterpret_cast<const char*>(&mPixelFormat),
mFrameWidth, mFrameHeight);
mState = ECDS_STARTED;
} else {
ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
__FUNCTION__, (const char*)mDeviceName,
reinterpret_cast<const char*>(&pix_fmt), width, height);
}
return res;
}
status_t EmulatedQemuCameraDevice::stopDevice()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
if (!isStarted()) {
ALOGW("%s: Qemu camera device '%s' is not started.",
__FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
/* Stop the actual camera device. */
status_t res = mQemuClient.queryStop();
if (res == NO_ERROR) {
mPreviewFrames[0].clear();
mPreviewFrames[1].clear();
// No need to keep all that memory around as capacity, shrink it
mPreviewFrames[0].shrink_to_fit();
mPreviewFrames[1].shrink_to_fit();
EmulatedCameraDevice::commonStopDevice();
mState = ECDS_CONNECTED;
ALOGV("%s: Qemu camera device '%s' is stopped",
__FUNCTION__, (const char*)mDeviceName);
} else {
ALOGE("%s: Unable to stop device '%s'",
__FUNCTION__, (const char*)mDeviceName);
}
return res;
}
/****************************************************************************
* EmulatedCameraDevice virtual overrides
***************************************************************************/
status_t EmulatedQemuCameraDevice::getCurrentFrame(void* buffer,
uint32_t pixelFormat) {
if (!isStarted()) {
ALOGE("%s: Device is not started", __FUNCTION__);
return EINVAL;
}
if (buffer == nullptr) {
ALOGE("%s: Invalid buffer provided", __FUNCTION__);
return EINVAL;
}
FrameLock lock(*this);
const void* primary = mCameraThread->getPrimaryBuffer();
auto frameBufferPair = reinterpret_cast<const FrameBufferPair*>(primary);
uint8_t* frame = frameBufferPair->first;
if (frame == nullptr) {
ALOGE("%s: No frame", __FUNCTION__);
return EINVAL;
}
return getCurrentFrameImpl(reinterpret_cast<const uint8_t*>(frame),
reinterpret_cast<uint8_t*>(buffer),
pixelFormat);
}
status_t EmulatedQemuCameraDevice::getCurrentPreviewFrame(void* buffer) {
if (!isStarted()) {
ALOGE("%s: Device is not started", __FUNCTION__);
return EINVAL;
}
if (buffer == nullptr) {
ALOGE("%s: Invalid buffer provided", __FUNCTION__);
return EINVAL;
}
FrameLock lock(*this);
const void* primary = mCameraThread->getPrimaryBuffer();
auto frameBufferPair = reinterpret_cast<const FrameBufferPair*>(primary);
uint32_t* previewFrame = frameBufferPair->second;
if (previewFrame == nullptr) {
ALOGE("%s: No frame", __FUNCTION__);
return EINVAL;
}
memcpy(buffer, previewFrame, mTotalPixels * 4);
return NO_ERROR;
}
const void* EmulatedQemuCameraDevice::getCurrentFrame() {
if (mCameraThread.get() == nullptr) {
return nullptr;
}
const void* primary = mCameraThread->getPrimaryBuffer();
auto frameBufferPair = reinterpret_cast<const FrameBufferPair*>(primary);
uint8_t* frame = frameBufferPair->first;
return frame;
}
/****************************************************************************
* Worker thread management overrides.
***************************************************************************/
bool EmulatedQemuCameraDevice::produceFrame(void* buffer)
{
auto frameBufferPair = reinterpret_cast<FrameBufferPair*>(buffer);
uint8_t* rawFrame = frameBufferPair->first;
uint32_t* previewFrame = frameBufferPair->second;
status_t query_res = mQemuClient.queryFrame(rawFrame, previewFrame,
mFrameBufferSize,
mTotalPixels * 4,
mWhiteBalanceScale[0],
mWhiteBalanceScale[1],
mWhiteBalanceScale[2],
mExposureCompensation);
if (query_res != NO_ERROR) {
ALOGE("%s: Unable to get current video frame: %s",
__FUNCTION__, strerror(query_res));
return false;
}
return true;
}
void* EmulatedQemuCameraDevice::getPrimaryBuffer() {
return &mFrameBufferPairs[0];
}
void* EmulatedQemuCameraDevice::getSecondaryBuffer() {
return &mFrameBufferPairs[1];
}
}; /* namespace android */

View file

@ -0,0 +1,138 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_DEVICE_H
#define HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_DEVICE_H
/*
* Contains declaration of a class EmulatedQemuCameraDevice that encapsulates
* an emulated camera device connected to the host.
*/
#include "EmulatedCameraDevice.h"
#include "QemuClient.h"
namespace android {
class EmulatedQemuCamera;
/* Encapsulates an emulated camera device connected to the host.
*/
class EmulatedQemuCameraDevice : public EmulatedCameraDevice {
public:
/* Constructs EmulatedQemuCameraDevice instance. */
explicit EmulatedQemuCameraDevice(EmulatedQemuCamera* camera_hal);
/* Destructs EmulatedQemuCameraDevice instance. */
~EmulatedQemuCameraDevice();
/***************************************************************************
* Public API
**************************************************************************/
public:
/* Initializes EmulatedQemuCameraDevice instance.
* Param:
* device_name - Name of the camera device connected to the host. The name
* that is used here must have been reported by the 'factory' camera
* service when it listed camera devices connected to the host.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
status_t Initialize(const char* device_name);
/***************************************************************************
* Emulated camera device abstract interface implementation.
* See declarations of these methods in EmulatedCameraDevice class for
* information on each of these methods.
**************************************************************************/
public:
/* Connects to the camera device. */
status_t connectDevice();
/* Disconnects from the camera device. */
status_t disconnectDevice();
/* Starts capturing frames from the camera device. */
status_t startDevice(int width, int height, uint32_t pix_fmt);
/* Stops capturing frames from the camera device. */
status_t stopDevice();
/***************************************************************************
* EmulatedCameraDevice virtual overrides
* See declarations of these methods in EmulatedCameraDevice class for
* information on each of these methods.
**************************************************************************/
public:
/* Copy the current frame to |buffer| */
status_t getCurrentFrame(void* buffer, uint32_t pixelFormat) override;
/* Copy the current preview frame to |buffer| */
status_t getCurrentPreviewFrame(void* buffer) override;
/* Get a pointer to the current frame, lock it first using FrameLock in
* EmulatedCameraDevice class */
const void* getCurrentFrame() override;
/***************************************************************************
* Worker thread management overrides.
* See declarations of these methods in EmulatedCameraDevice class for
* information on each of these methods.
**************************************************************************/
protected:
/* Implementation of the frame production routine. */
bool produceFrame(void* buffer) override;
void* getPrimaryBuffer() override;
void* getSecondaryBuffer() override;
/***************************************************************************
* Qemu camera device data members
**************************************************************************/
private:
/* Qemu client that is used to communicate with the 'emulated camera'
* service, created for this instance in the emulator. */
CameraQemuClient mQemuClient;
/* Name of the camera device connected to the host. */
String8 mDeviceName;
/* Current preview framebuffer. */
std::vector<uint32_t> mPreviewFrames[2];
/* Since the Qemu camera needs to keep track of two buffers per frame we
* use a pair here. One frame is the camera frame and the other is the
* preview frame. These are in different formats and instead of converting
* them in the guest it's more efficient to have the host provide the same
* frame in two different formats. The first buffer in the pair is the raw
* frame and the second buffer is the RGB encoded frame. The downside of
* this is that we need to override the getCurrentFrame and
* getCurrentPreviewFrame methods to extract the correct buffer from this
* pair. */
using FrameBufferPair = std::pair<uint8_t*, uint32_t*>;
FrameBufferPair mFrameBufferPairs[2];
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_DEVICE_H */

View file

@ -0,0 +1,413 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Exif"
#include <cutils/log.h>
#include <inttypes.h>
#include <math.h>
#include <stdint.h>
#include <CameraParameters.h>
using ::android::hardware::camera::common::V1_0::helper::CameraParameters;
using ::android::hardware::camera::common::V1_0::helper::Size;
#include "Exif.h"
#include <libexif/exif-data.h>
#include <libexif/exif-entry.h>
#include <libexif/exif-ifd.h>
#include <libexif/exif-tag.h>
#include <string>
#include <vector>
// For GPS timestamping we want to ensure we use a 64-bit time_t, 32-bit
// platforms have time64_t but 64-bit platforms do not.
#if defined(__LP64__)
#include <time.h>
using Timestamp = time_t;
#define TIMESTAMP_TO_TM(timestamp, tm) gmtime_r(timestamp, tm)
#else
#include <time64.h>
using Timestamp = time64_t;
#define TIMESTAMP_TO_TM(timestamp, tm) gmtime64_r(timestamp, tm)
#endif
namespace android {
// A prefix that is used for tags with the "undefined" format to indicate that
// the contents are ASCII encoded. See the user comment section of the EXIF spec
// for more details http://www.exif.org/Exif2-2.PDF
static const unsigned char kAsciiPrefix[] = {
0x41, 0x53, 0x43, 0x49, 0x49, 0x00, 0x00, 0x00 // "ASCII\0\0\0"
};
// Remove an existing EXIF entry from |exifData| if it exists. This is useful
// when replacing existing data, it's easier to just remove the data and
// re-allocate it than to adjust the amount of allocated data.
static void removeExistingEntry(ExifData* exifData, ExifIfd ifd, int tag) {
ExifEntry* entry = exif_content_get_entry(exifData->ifd[ifd],
static_cast<ExifTag>(tag));
if (entry) {
exif_content_remove_entry(exifData->ifd[ifd], entry);
}
}
static ExifEntry* allocateEntry(int tag,
ExifFormat format,
unsigned int numComponents) {
ExifMem* mem = exif_mem_new_default();
ExifEntry* entry = exif_entry_new_mem(mem);
unsigned int size = numComponents * exif_format_get_size(format);
entry->data = reinterpret_cast<unsigned char*>(exif_mem_alloc(mem, size));
entry->size = size;
entry->tag = static_cast<ExifTag>(tag);
entry->components = numComponents;
entry->format = format;
exif_mem_unref(mem);
return entry;
}
// Create an entry and place it in |exifData|, the entry is initialized with an
// array of floats from |values|
template<size_t N>
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag,
const float (&values)[N],
float denominator = 1000.0) {
removeExistingEntry(exifData, ifd, tag);
ExifByteOrder byteOrder = exif_data_get_byte_order(exifData);
ExifEntry* entry = allocateEntry(tag, EXIF_FORMAT_RATIONAL, N);
exif_content_add_entry(exifData->ifd[ifd], entry);
unsigned int rationalSize = exif_format_get_size(EXIF_FORMAT_RATIONAL);
for (size_t i = 0; i < N; ++i) {
ExifRational rational = {
static_cast<uint32_t>(values[i] * denominator),
static_cast<uint32_t>(denominator)
};
exif_set_rational(&entry->data[i * rationalSize], byteOrder, rational);
}
// Unref entry after changing owner to the ExifData struct
exif_entry_unref(entry);
return true;
}
// Create an entry with a single float |value| in it and place it in |exifData|
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag,
const float value,
float denominator = 1000.0) {
float values[1] = { value };
// Recycling functions is good for the environment
return createEntry(exifData, ifd, tag, values, denominator);
}
// Create an entry and place it in |exifData|, the entry contains the raw data
// pointed to by |data| of length |size|.
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag,
const unsigned char* data,
size_t size,
ExifFormat format = EXIF_FORMAT_UNDEFINED) {
removeExistingEntry(exifData, ifd, tag);
ExifEntry* entry = allocateEntry(tag, format, size);
memcpy(entry->data, data, size);
exif_content_add_entry(exifData->ifd[ifd], entry);
// Unref entry after changing owner to the ExifData struct
exif_entry_unref(entry);
return true;
}
// Create an entry and place it in |exifData|, the entry is initialized with
// the string provided in |value|
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag,
const char* value) {
unsigned int length = strlen(value) + 1;
const unsigned char* data = reinterpret_cast<const unsigned char*>(value);
return createEntry(exifData, ifd, tag, data, length, EXIF_FORMAT_ASCII);
}
// Create an entry and place it in |exifData|, the entry is initialized with a
// single byte in |value|
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag,
uint8_t value) {
return createEntry(exifData, ifd, tag, &value, 1, EXIF_FORMAT_BYTE);
}
// Create an entry and place it in |exifData|, the entry is default initialized
// by the exif library based on |tag|
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag) {
removeExistingEntry(exifData, ifd, tag);
ExifEntry* entry = exif_entry_new();
exif_content_add_entry(exifData->ifd[ifd], entry);
exif_entry_initialize(entry, static_cast<ExifTag>(tag));
// Unref entry after changing owner to the ExifData struct
exif_entry_unref(entry);
return true;
}
// Create an entry with a single EXIF LONG (32-bit value) and place it in
// |exifData|.
static bool createEntry(ExifData* exifData,
ExifIfd ifd,
int tag,
int value) {
removeExistingEntry(exifData, ifd, tag);
ExifByteOrder byteOrder = exif_data_get_byte_order(exifData);
ExifEntry* entry = allocateEntry(tag, EXIF_FORMAT_LONG, 1);
exif_content_add_entry(exifData->ifd[ifd], entry);
exif_set_long(entry->data, byteOrder, value);
// Unref entry after changing owner to the ExifData struct
exif_entry_unref(entry);
return true;
}
static bool getCameraParam(const CameraParameters& parameters,
const char* parameterKey,
const char** outValue) {
const char* value = parameters.get(parameterKey);
if (value) {
*outValue = value;
return true;
}
return false;
}
static bool getCameraParam(const CameraParameters& parameters,
const char* parameterKey,
float* outValue) {
const char* value = parameters.get(parameterKey);
if (value) {
*outValue = parameters.getFloat(parameterKey);
return true;
}
return false;
}
static bool getCameraParam(const CameraParameters& parameters,
const char* parameterKey,
int64_t* outValue) {
const char* value = parameters.get(parameterKey);
if (value) {
char dummy = 0;
// Attempt to scan an extra character and then make sure it was not
// scanned by checking that the return value indicates only one item.
// This way we fail on any trailing characters
if (sscanf(value, "%" SCNd64 "%c", outValue, &dummy) == 1) {
return true;
}
}
return false;
}
// Convert a GPS coordinate represented as a decimal degree value to sexagesimal
// GPS coordinates comprised of <degrees> <minutes>' <seconds>"
static void convertGpsCoordinate(float degrees, float (*result)[3]) {
float absDegrees = fabs(degrees);
// First value is degrees without any decimal digits
(*result)[0] = floor(absDegrees);
// Subtract degrees so we only have the fraction left, then multiply by
// 60 to get the minutes
float minutes = (absDegrees - (*result)[0]) * 60.0f;
(*result)[1] = floor(minutes);
// Same thing for seconds but here we store seconds with the fraction
float seconds = (minutes - (*result)[1]) * 60.0f;
(*result)[2] = seconds;
}
// Convert a UNIX epoch timestamp to a timestamp comprised of three floats for
// hour, minute and second, and a date part that is represented as a string.
static bool convertTimestampToTimeAndDate(int64_t timestamp,
float (*timeValues)[3],
std::string* date) {
Timestamp time = timestamp;
struct tm utcTime;
if (TIMESTAMP_TO_TM(&time, &utcTime) == nullptr) {
ALOGE("Could not decompose timestamp into components");
return false;
}
(*timeValues)[0] = utcTime.tm_hour;
(*timeValues)[1] = utcTime.tm_min;
(*timeValues)[2] = utcTime.tm_sec;
char buffer[64] = {};
if (strftime(buffer, sizeof(buffer), "%Y:%m:%d", &utcTime) == 0) {
ALOGE("Could not construct date string from timestamp");
return false;
}
*date = buffer;
return true;
}
ExifData* createExifData(const CameraParameters& params) {
ExifData* exifData = exif_data_new();
exif_data_set_option(exifData, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
exif_data_set_data_type(exifData, EXIF_DATA_TYPE_COMPRESSED);
exif_data_set_byte_order(exifData, EXIF_BYTE_ORDER_INTEL);
// Create mandatory exif fields and set their default values
exif_data_fix(exifData);
float triplet[3];
float floatValue = 0.0f;
const char* stringValue;
int64_t degrees;
// Datetime, creating and initializing a datetime tag will automatically
// set the current date and time in the tag so just do that.
createEntry(exifData, EXIF_IFD_0, EXIF_TAG_DATE_TIME);
// Make and model
createEntry(exifData, EXIF_IFD_0, EXIF_TAG_MAKE, "Emulator-Goldfish");
createEntry(exifData, EXIF_IFD_0, EXIF_TAG_MODEL, "Emulator-Goldfish");
// Picture size
int width = -1, height = -1;
params.getPictureSize(&width, &height);
if (width >= 0 && height >= 0) {
createEntry(exifData, EXIF_IFD_EXIF,
EXIF_TAG_PIXEL_X_DIMENSION, width);
createEntry(exifData, EXIF_IFD_EXIF,
EXIF_TAG_PIXEL_Y_DIMENSION, height);
}
// Orientation
if (getCameraParam(params,
CameraParameters::KEY_ROTATION,
&degrees)) {
// Exif orientation values, please refer to
// http://www.exif.org/Exif2-2.PDF, Section 4.6.4-A-Orientation
// Or these websites:
// http://sylvana.net/jpegcrop/exif_orientation.html
// http://www.impulseadventure.com/photo/exif-orientation.html
enum {
EXIF_ROTATE_CAMERA_CW0 = 1,
EXIF_ROTATE_CAMERA_CW90 = 6,
EXIF_ROTATE_CAMERA_CW180 = 3,
EXIF_ROTATE_CAMERA_CW270 = 8,
};
uint16_t exifOrien = 1;
switch (degrees) {
case 0:
exifOrien = EXIF_ROTATE_CAMERA_CW0;
break;
case 90:
exifOrien = EXIF_ROTATE_CAMERA_CW90;
break;
case 180:
exifOrien = EXIF_ROTATE_CAMERA_CW180;
break;
case 270:
exifOrien = EXIF_ROTATE_CAMERA_CW270;
break;
}
createEntry(exifData, EXIF_IFD_0, EXIF_TAG_ORIENTATION, exifOrien);
}
// Focal length
if (getCameraParam(params,
CameraParameters::KEY_FOCAL_LENGTH,
&floatValue)) {
createEntry(exifData, EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, floatValue);
}
// GPS latitude and reference, reference indicates sign, store unsigned
if (getCameraParam(params,
CameraParameters::KEY_GPS_LATITUDE,
&floatValue)) {
convertGpsCoordinate(floatValue, &triplet);
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_LATITUDE, triplet);
const char* ref = floatValue < 0.0f ? "S" : "N";
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_LATITUDE_REF, ref);
}
// GPS longitude and reference, reference indicates sign, store unsigned
if (getCameraParam(params,
CameraParameters::KEY_GPS_LONGITUDE,
&floatValue)) {
convertGpsCoordinate(floatValue, &triplet);
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_LONGITUDE, triplet);
const char* ref = floatValue < 0.0f ? "W" : "E";
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_LONGITUDE_REF, ref);
}
// GPS altitude and reference, reference indicates sign, store unsigned
if (getCameraParam(params,
CameraParameters::KEY_GPS_ALTITUDE,
&floatValue)) {
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_ALTITUDE,
static_cast<float>(fabs(floatValue)));
// 1 indicated below sea level, 0 indicates above sea level
uint8_t ref = floatValue < 0.0f ? 1 : 0;
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_ALTITUDE_REF, ref);
}
// GPS timestamp and datestamp
int64_t timestamp = 0;
if (getCameraParam(params,
CameraParameters::KEY_GPS_TIMESTAMP,
&timestamp)) {
std::string date;
if (convertTimestampToTimeAndDate(timestamp, &triplet, &date)) {
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_TIME_STAMP,
triplet, 1.0f);
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_DATE_STAMP,
date.c_str());
}
}
// GPS processing method
if (getCameraParam(params,
CameraParameters::KEY_GPS_PROCESSING_METHOD,
&stringValue)) {
std::vector<unsigned char> data;
// Because this is a tag with an undefined format it has to be prefixed
// with the encoding type. Insert an ASCII prefix first, then the
// actual string. Undefined tags do not have to be null terminated.
data.insert(data.end(),
std::begin(kAsciiPrefix),
std::end(kAsciiPrefix));
data.insert(data.end(), stringValue, stringValue + strlen(stringValue));
createEntry(exifData, EXIF_IFD_GPS, EXIF_TAG_GPS_PROCESSING_METHOD,
&data[0], data.size());
}
return exifData;
}
void freeExifData(ExifData* exifData) {
exif_data_free(exifData);
}
} // namespace android

View file

@ -0,0 +1,43 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GOLDFISH_CAMERA_EXIF_H
#define GOLDFISH_CAMERA_EXIF_H
struct _ExifData;
typedef struct _ExifData ExifData;
// CameraParameters is now declared in a HAL name space,
// ::android::hardware::camera::common::V1_0::helper. So remember to include
// its declaration in <CameraParameters.h> before this header.
namespace android {
//class CameraParameters;
/* Create an EXIF data structure based on camera parameters. This includes
* things like GPS information that has been set by the camera client.
*/
ExifData* createExifData(const CameraParameters& parameters);
/* Free EXIF data created in the createExifData call */
void freeExifData(ExifData* exifData);
} // namespace android
#endif // GOLDFISH_CAMERA_EXIF_H

View file

@ -0,0 +1,43 @@
#ifndef EMU_CAMERA_GRALLOC_MODULE_H
#define EMU_CAMERA_GRALLOC_MODULE_H
#include <hardware/gralloc.h>
class GrallocModule
{
public:
static GrallocModule &getInstance() {
static GrallocModule instance;
return instance;
}
int lock(buffer_handle_t handle,
int usage, int l, int t, int w, int h, void **vaddr) {
return mModule->lock(mModule, handle, usage, l, t, w, h, vaddr);
}
#ifdef GRALLOC_MODULE_API_VERSION_0_2
int lock_ycbcr(buffer_handle_t handle,
int usage, int l, int t, int w, int h,
struct android_ycbcr *ycbcr) {
return mModule->lock_ycbcr(mModule, handle, usage, l, t, w, h, ycbcr);
}
#endif
int unlock(buffer_handle_t handle) {
return mModule->unlock(mModule, handle);
}
private:
GrallocModule() {
const hw_module_t *module = NULL;
int ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module);
if (ret) {
ALOGE("%s: Failed to get gralloc module: %d", __FUNCTION__, ret);
}
mModule = reinterpret_cast<const gralloc_module_t*>(module);
}
const gralloc_module_t *mModule;
};
#endif

View file

@ -0,0 +1,94 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class NV21JpegCompressor that encapsulates a
* converter between NV21, and JPEG formats.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_JPEG"
#include <cutils/log.h>
#include <assert.h>
#include <dlfcn.h>
#include "JpegCompressor.h"
namespace android {
void* NV21JpegCompressor::mDl = NULL;
static void* getSymbol(void* dl, const char* signature) {
void* res = dlsym(dl, signature);
assert (res != NULL);
return res;
}
typedef void (*InitFunc)(JpegStub* stub);
typedef void (*CleanupFunc)(JpegStub* stub);
typedef int (*CompressFunc)(JpegStub* stub, const void* image,
int width, int height, int quality, ExifData* exifData);
typedef void (*GetCompressedImageFunc)(JpegStub* stub, void* buff);
typedef size_t (*GetCompressedSizeFunc)(JpegStub* stub);
NV21JpegCompressor::NV21JpegCompressor()
{
const char dlName[] = "/vendor/lib/hw/camera.goldfish.jpeg.so";
if (mDl == NULL) {
mDl = dlopen(dlName, RTLD_NOW);
}
assert(mDl != NULL);
InitFunc f = (InitFunc)getSymbol(mDl, "JpegStub_init");
(*f)(&mStub);
}
NV21JpegCompressor::~NV21JpegCompressor()
{
CleanupFunc f = (CleanupFunc)getSymbol(mDl, "JpegStub_cleanup");
(*f)(&mStub);
}
/****************************************************************************
* Public API
***************************************************************************/
status_t NV21JpegCompressor::compressRawImage(const void* image,
int width,
int height,
int quality,
ExifData* exifData)
{
CompressFunc f = (CompressFunc)getSymbol(mDl, "JpegStub_compress");
return (status_t)(*f)(&mStub, image, width, height, quality, exifData);
}
size_t NV21JpegCompressor::getCompressedSize()
{
GetCompressedSizeFunc f = (GetCompressedSizeFunc)getSymbol(mDl,
"JpegStub_getCompressedSize");
return (*f)(&mStub);
}
void NV21JpegCompressor::getCompressedImage(void* buff)
{
GetCompressedImageFunc f = (GetCompressedImageFunc)getSymbol(mDl,
"JpegStub_getCompressedImage");
(*f)(&mStub, buff);
}
}; /* namespace android */

View file

@ -0,0 +1,93 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H
#define HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H
/*
* Contains declaration of a class NV21JpegCompressor that encapsulates a
* converter between YV21, and JPEG formats.
*/
#include "jpeg-stub/JpegStub.h"
#include <utils/threads.h>
namespace android {
/* Encapsulates a converter between YV12, and JPEG formats.
*/
class NV21JpegCompressor
{
public:
/* Constructs JpegCompressor instance. */
NV21JpegCompressor();
/* Destructs JpegCompressor instance. */
~NV21JpegCompressor();
/****************************************************************************
* Public API
***************************************************************************/
public:
/* Compresses raw NV21 image into a JPEG.
* The compressed image will be saved in mStream member of this class. Use
* getCompressedSize method to obtain buffer size of the compressed image,
* and getCompressedImage to copy out the compressed image.
* Param:
* image - Raw NV21 image.
* width, height - Image dimensions.
* quality - JPEG quality.
* exifData - an EXIF data structure to attach to the image, may be null
* Return:
* NO_ERROR on success, or an appropriate error status.
*
*/
status_t compressRawImage(const void* image,
int width,
int height,
int quality,
ExifData* exifData);
/* Get size of the compressed JPEG buffer.
* This method must be called only after a successful completion of
* compressRawImage call.
* Return:
* Size of the compressed JPEG buffer.
*/
size_t getCompressedSize();
/* Copies out compressed JPEG buffer.
* This method must be called only after a successful completion of
* compressRawImage call.
* Param:
* buff - Buffer where to copy the JPEG. Must be large enough to contain the
* entire image.
*/
void getCompressedImage(void* buff);
/****************************************************************************
* Class data
***************************************************************************/
private:
// library handle to dlopen
static void* mDl;
JpegStub mStub;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H */

View file

@ -0,0 +1,196 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of a class PreviewWindow that encapsulates
* functionality of a preview window set via set_preview_window camera HAL API.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Preview"
#include <cutils/log.h>
#include "EmulatedCameraDevice.h"
#include "PreviewWindow.h"
#include "GrallocModule.h"
namespace android {
PreviewWindow::PreviewWindow()
: mPreviewWindow(NULL),
mPreviewFrameWidth(0),
mPreviewFrameHeight(0),
mPreviewEnabled(false)
{
}
PreviewWindow::~PreviewWindow()
{
}
/****************************************************************************
* Camera API
***************************************************************************/
status_t PreviewWindow::setPreviewWindow(struct preview_stream_ops* window,
int preview_fps)
{
ALOGV("%s: current: %p -> new: %p", __FUNCTION__, mPreviewWindow, window);
status_t res = NO_ERROR;
Mutex::Autolock locker(&mObjectLock);
/* Reset preview info. */
mPreviewFrameWidth = mPreviewFrameHeight = 0;
if (window != NULL) {
/* The CPU will write each frame to the preview window buffer.
* Note that we delay setting preview window buffer geometry until
* frames start to come in. */
res = window->set_usage(window, GRALLOC_USAGE_SW_WRITE_OFTEN);
if (res != NO_ERROR) {
window = NULL;
res = -res; // set_usage returns a negative errno.
ALOGE("%s: Error setting preview window usage %d -> %s",
__FUNCTION__, res, strerror(res));
}
}
mPreviewWindow = window;
return res;
}
status_t PreviewWindow::startPreview()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
mPreviewEnabled = true;
return NO_ERROR;
}
void PreviewWindow::stopPreview()
{
ALOGV("%s", __FUNCTION__);
Mutex::Autolock locker(&mObjectLock);
mPreviewEnabled = false;
}
/****************************************************************************
* Public API
***************************************************************************/
void PreviewWindow::onNextFrameAvailable(nsecs_t timestamp,
EmulatedCameraDevice* camera_dev)
{
int res;
Mutex::Autolock locker(&mObjectLock);
if (!isPreviewEnabled() || mPreviewWindow == NULL) {
return;
}
/* Make sure that preview window dimensions are OK with the camera device */
if (adjustPreviewDimensions(camera_dev)) {
/* Need to set / adjust buffer geometry for the preview window.
* Note that in the emulator preview window uses only RGB for pixel
* formats. */
ALOGV("%s: Adjusting preview windows %p geometry to %dx%d",
__FUNCTION__, mPreviewWindow, mPreviewFrameWidth,
mPreviewFrameHeight);
res = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
mPreviewFrameWidth,
mPreviewFrameHeight,
HAL_PIXEL_FORMAT_RGBA_8888);
if (res != NO_ERROR) {
ALOGE("%s: Error in set_buffers_geometry %d -> %s",
__FUNCTION__, -res, strerror(-res));
return;
}
}
/*
* Push new frame to the preview window.
*/
/* Dequeue preview window buffer for the frame. */
buffer_handle_t* buffer = NULL;
int stride = 0;
res = mPreviewWindow->dequeue_buffer(mPreviewWindow, &buffer, &stride);
if (res != NO_ERROR || buffer == NULL) {
ALOGE("%s: Unable to dequeue preview window buffer: %d -> %s",
__FUNCTION__, -res, strerror(-res));
return;
}
/* Let the preview window to lock the buffer. */
res = mPreviewWindow->lock_buffer(mPreviewWindow, buffer);
if (res != NO_ERROR) {
ALOGE("%s: Unable to lock preview window buffer: %d -> %s",
__FUNCTION__, -res, strerror(-res));
mPreviewWindow->cancel_buffer(mPreviewWindow, buffer);
return;
}
/* Now let the graphics framework to lock the buffer, and provide
* us with the framebuffer data address. */
void* img = NULL;
res = GrallocModule::getInstance().lock(
*buffer, GRALLOC_USAGE_SW_WRITE_OFTEN,
0, 0, mPreviewFrameWidth, mPreviewFrameHeight, &img);
if (res != NO_ERROR) {
ALOGE("%s: gralloc.lock failure: %d -> %s",
__FUNCTION__, res, strerror(res));
mPreviewWindow->cancel_buffer(mPreviewWindow, buffer);
return;
}
/* Frames come in in YV12/NV12/NV21 format. Since preview window doesn't
* supports those formats, we need to obtain the frame in RGB565. */
res = camera_dev->getCurrentPreviewFrame(img);
if (res == NO_ERROR) {
/* Show it. */
mPreviewWindow->set_timestamp(mPreviewWindow, timestamp);
mPreviewWindow->enqueue_buffer(mPreviewWindow, buffer);
} else {
ALOGE("%s: Unable to obtain preview frame: %d", __FUNCTION__, res);
mPreviewWindow->cancel_buffer(mPreviewWindow, buffer);
}
GrallocModule::getInstance().unlock(*buffer);
}
/***************************************************************************
* Private API
**************************************************************************/
bool PreviewWindow::adjustPreviewDimensions(EmulatedCameraDevice* camera_dev)
{
/* Match the cached frame dimensions against the actual ones. */
if (mPreviewFrameWidth == camera_dev->getFrameWidth() &&
mPreviewFrameHeight == camera_dev->getFrameHeight()) {
/* They match. */
return false;
}
/* They don't match: adjust the cache. */
mPreviewFrameWidth = camera_dev->getFrameWidth();
mPreviewFrameHeight = camera_dev->getFrameHeight();
return true;
}
}; /* namespace android */

View file

@ -0,0 +1,149 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_PREVIEW_WINDOW_H
#define HW_EMULATOR_CAMERA_PREVIEW_WINDOW_H
/*
* Contains declaration of a class PreviewWindow that encapsulates functionality
* of a preview window set via set_preview_window camera HAL API.
*/
namespace android {
class EmulatedCameraDevice;
/* Encapsulates functionality of a preview window set via set_preview_window
* camera HAL API.
*
* Objects of this class are contained in EmulatedCamera objects, and handle
* relevant camera API callbacks.
*/
class PreviewWindow {
public:
/* Constructs PreviewWindow instance. */
PreviewWindow();
/* Destructs PreviewWindow instance. */
~PreviewWindow();
/***************************************************************************
* Camera API
**************************************************************************/
public:
/* Actual handler for camera_device_ops_t::set_preview_window callback.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::set_preview_window callback.
* Param:
* window - Preview window to set. This parameter might be NULL, which
* indicates preview window reset.
* preview_fps - Preview's frame frequency. This parameter determins when
* a frame received via onNextFrameAvailable call will be pushed to
* the preview window. If 'window' parameter passed to this method is
* NULL, this parameter is ignored.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
status_t setPreviewWindow(struct preview_stream_ops* window,
int preview_fps);
/* Starts the preview.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::start_preview callback.
*/
status_t startPreview();
/* Stops the preview.
* This method is called by the containing emulated camera object when it is
* handing the camera_device_ops_t::start_preview callback.
*/
void stopPreview();
/* Checks if preview is enabled. */
inline bool isPreviewEnabled()
{
return mPreviewEnabled;
}
/****************************************************************************
* Public API
***************************************************************************/
public:
/* Next frame is available in the camera device.
* This is a notification callback that is invoked by the camera device when
* a new frame is available. The frame is available through the |camera_dev|
* object. Remember to use an EmulatedCameraDevice::FrameLock object to
* protect access to the frame while using it.
* Note that most likely this method is called in context of a worker thread
* that camera device has created for frame capturing.
* Param:
* timestamp - Frame's timestamp.
* camera_dev - Camera device instance that delivered the frame.
*/
void onNextFrameAvailable(nsecs_t timestamp,
EmulatedCameraDevice* camera_dev);
/***************************************************************************
* Private API
**************************************************************************/
protected:
/* Adjusts cached dimensions of the preview window frame according to the
* frame dimensions used by the camera device.
*
* When preview is started, it's not known (hard to define) what are going
* to be the dimensions of the frames that are going to be displayed. Plus,
* it might be possible, that such dimensions can be changed on the fly. So,
* in order to be always in sync with frame dimensions, this method is
* called for each frame passed to onNextFrameAvailable method, in order to
* properly adjust frame dimensions, used by the preview window.
* Note that this method must be called while object is locked.
* Param:
* camera_dev - Camera device, prpviding frames displayed in the preview
* window.
* Return:
* true if cached dimensions have been adjusted, or false if cached
* dimensions match device's frame dimensions.
*/
bool adjustPreviewDimensions(EmulatedCameraDevice* camera_dev);
/***************************************************************************
* Data members
**************************************************************************/
protected:
/* Locks this instance for data changes. */
Mutex mObjectLock;
/* Preview window instance. */
preview_stream_ops* mPreviewWindow;
/*
* Cached preview window frame dimensions.
*/
int mPreviewFrameWidth;
int mPreviewFrameHeight;
/* Preview status. */
bool mPreviewEnabled;
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_PREVIEW_WINDOW_H */

View file

@ -0,0 +1,560 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Contains implementation of classes that encapsulate connection to camera
* services in the emulator via qemu pipe.
*/
#define LOG_NDEBUG 1
#define LOG_TAG "EmulatedCamera_QemuClient"
#include <cutils/log.h>
#include "EmulatedCamera.h"
#include "QemuClient.h"
#define LOG_QUERIES 0
#if LOG_QUERIES
#define LOGQ(...) ALOGD(__VA_ARGS__)
#else
#define LOGQ(...) (void(0))
#endif // LOG_QUERIES
#define QEMU_PIPE_DEBUG LOGQ
#include "qemu_pipe.h"
namespace android {
/****************************************************************************
* Qemu query
***************************************************************************/
QemuQuery::QemuQuery()
: mQuery(mQueryPrealloc),
mQueryDeliveryStatus(NO_ERROR),
mReplyBuffer(NULL),
mReplyData(NULL),
mReplySize(0),
mReplyDataSize(0),
mReplyStatus(0)
{
*mQuery = '\0';
}
QemuQuery::QemuQuery(const char* query_string)
: mQuery(mQueryPrealloc),
mQueryDeliveryStatus(NO_ERROR),
mReplyBuffer(NULL),
mReplyData(NULL),
mReplySize(0),
mReplyDataSize(0),
mReplyStatus(0)
{
mQueryDeliveryStatus = QemuQuery::createQuery(query_string, NULL);
}
QemuQuery::QemuQuery(const char* query_name, const char* query_param)
: mQuery(mQueryPrealloc),
mQueryDeliveryStatus(NO_ERROR),
mReplyBuffer(NULL),
mReplyData(NULL),
mReplySize(0),
mReplyDataSize(0),
mReplyStatus(0)
{
mQueryDeliveryStatus = QemuQuery::createQuery(query_name, query_param);
}
QemuQuery::~QemuQuery()
{
QemuQuery::resetQuery();
}
status_t QemuQuery::createQuery(const char* name, const char* param)
{
/* Reset from the previous use. */
resetQuery();
/* Query name cannot be NULL or an empty string. */
if (name == NULL || *name == '\0') {
ALOGE("%s: NULL or an empty string is passed as query name.",
__FUNCTION__);
mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
const size_t name_len = strlen(name);
const size_t param_len = (param != NULL) ? strlen(param) : 0;
const size_t required = strlen(name) + (param_len ? (param_len + 2) : 1);
if (required > sizeof(mQueryPrealloc)) {
/* Preallocated buffer was too small. Allocate a bigger query buffer. */
mQuery = new char[required];
if (mQuery == NULL) {
ALOGE("%s: Unable to allocate %zu bytes for query buffer",
__FUNCTION__, required);
mQueryDeliveryStatus = ENOMEM;
return ENOMEM;
}
}
/* At this point mQuery buffer is big enough for the query. */
if (param_len) {
sprintf(mQuery, "%s %s", name, param);
} else {
memcpy(mQuery, name, name_len + 1);
}
return NO_ERROR;
}
status_t QemuQuery::completeQuery(status_t status)
{
/* Save query completion status. */
mQueryDeliveryStatus = status;
if (mQueryDeliveryStatus != NO_ERROR) {
return mQueryDeliveryStatus;
}
/* Make sure reply buffer contains at least 'ok', or 'ko'.
* Note that 'ok', or 'ko' prefixes are always 3 characters long: in case
* there are more data in the reply, that data will be separated from 'ok'/'ko'
* with a ':'. If there is no more data in the reply, the prefix will be
* zero-terminated, and the terminator will be inculded in the reply. */
if (mReplyBuffer == NULL || mReplySize < 3) {
ALOGE("%s: Invalid reply to the query", __FUNCTION__);
mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
/* Lets see the reply status. */
if (!memcmp(mReplyBuffer, "ok", 2)) {
mReplyStatus = 1;
} else if (!memcmp(mReplyBuffer, "ko", 2)) {
mReplyStatus = 0;
} else {
ALOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
/* Lets see if there are reply data that follow. */
if (mReplySize > 3) {
/* There are extra data. Make sure they are separated from the status
* with a ':' */
if (mReplyBuffer[2] != ':') {
ALOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
mReplyData = mReplyBuffer + 3;
mReplyDataSize = mReplySize - 3;
} else {
/* Make sure reply buffer containing just 'ok'/'ko' ends with
* zero-terminator. */
if (mReplyBuffer[2] != '\0') {
ALOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
}
return NO_ERROR;
}
void QemuQuery::resetQuery()
{
if (mQuery != NULL && mQuery != mQueryPrealloc) {
delete[] mQuery;
}
mQuery = mQueryPrealloc;
mQueryDeliveryStatus = NO_ERROR;
if (mReplyBuffer != NULL) {
free(mReplyBuffer);
mReplyBuffer = NULL;
}
mReplyData = NULL;
mReplySize = mReplyDataSize = 0;
mReplyStatus = 0;
}
/****************************************************************************
* Qemu client base
***************************************************************************/
/* Camera service name. */
const char QemuClient::mCameraServiceName[] = "camera";
QemuClient::QemuClient()
: mPipeFD(-1)
{
}
QemuClient::~QemuClient()
{
if (mPipeFD >= 0) {
close(mPipeFD);
}
}
/****************************************************************************
* Qemu client API
***************************************************************************/
status_t QemuClient::connectClient(const char* param)
{
ALOGV("%s: '%s'", __FUNCTION__, param ? param : "");
/* Make sure that client is not connected already. */
if (mPipeFD >= 0) {
ALOGE("%s: Qemu client is already connected", __FUNCTION__);
return EINVAL;
}
/* Select one of the two: 'factory', or 'emulated camera' service */
if (param == NULL || *param == '\0') {
/* No parameters: connect to the factory service. */
char pipe_name[512];
snprintf(pipe_name, sizeof(pipe_name), "qemud:%s",
mCameraServiceName);
mPipeFD = qemu_pipe_open(pipe_name);
} else {
/* One extra char ':' that separates service name and parameters + six
* characters for 'pipe:qemud:'. This is required by pipe protocol. */
char* connection_str = new char[strlen(mCameraServiceName) +
strlen(param) + 8];
sprintf(connection_str, "qemud:%s:%s", mCameraServiceName, param);
mPipeFD = qemu_pipe_open(connection_str);
delete[] connection_str;
}
if (mPipeFD < 0) {
ALOGE("%s: Unable to connect to the camera service '%s': %s",
__FUNCTION__, param ? param : "Factory", strerror(errno));
return errno ? errno : EINVAL;
}
return NO_ERROR;
}
void QemuClient::disconnectClient()
{
ALOGV("%s", __FUNCTION__);
if (mPipeFD >= 0) {
close(mPipeFD);
mPipeFD = -1;
}
}
status_t QemuClient::sendMessage(const void* data, size_t data_size)
{
if (mPipeFD < 0) {
ALOGE("%s: Qemu client is not connected", __FUNCTION__);
return EINVAL;
}
const size_t written = TEMP_FAILURE_RETRY(write(mPipeFD, data, data_size));
if (written == data_size) {
return NO_ERROR;
} else {
ALOGE("%s: Error sending data via qemu pipe: '%s'",
__FUNCTION__, strerror(errno));
return errno ? errno : EIO;
}
}
status_t QemuClient::receiveMessage(void** data, size_t* data_size)
{
*data = NULL;
*data_size = 0;
if (mPipeFD < 0) {
ALOGE("%s: Qemu client is not connected", __FUNCTION__);
return EINVAL;
}
/* The way the service replies to a query, it sends payload size first, and
* then it sends the payload itself. Note that payload size is sent as a
* string, containing 8 characters representing a hexadecimal payload size
* value. Note also, that the string doesn't contain zero-terminator. */
size_t payload_size;
char payload_size_str[9];
int rd_res = TEMP_FAILURE_RETRY(read(mPipeFD, payload_size_str, 8));
if (rd_res != 8) {
ALOGE("%s: Unable to obtain payload size: %s",
__FUNCTION__, strerror(errno));
return errno ? errno : EIO;
}
/* Convert payload size. */
errno = 0;
payload_size_str[8] = '\0';
payload_size = strtol(payload_size_str, NULL, 16);
if (errno) {
ALOGE("%s: Invalid payload size '%s'", __FUNCTION__, payload_size_str);
return EIO;
}
/* Allocate payload data buffer, and read the payload there. */
*data = malloc(payload_size);
if (*data == NULL) {
ALOGE("%s: Unable to allocate %zu bytes payload buffer",
__FUNCTION__, payload_size);
return ENOMEM;
}
rd_res = TEMP_FAILURE_RETRY(read(mPipeFD, *data, payload_size));
if (static_cast<size_t>(rd_res) == payload_size) {
*data_size = payload_size;
return NO_ERROR;
} else {
ALOGE("%s: Read size %d doesnt match expected payload size %zu: %s",
__FUNCTION__, rd_res, payload_size, strerror(errno));
free(*data);
*data = NULL;
return errno ? errno : EIO;
}
}
status_t QemuClient::doQuery(QemuQuery* query)
{
/* Make sure that query has been successfuly constructed. */
if (query->mQueryDeliveryStatus != NO_ERROR) {
ALOGE("%s: Query is invalid", __FUNCTION__);
return query->mQueryDeliveryStatus;
}
LOGQ("Send query '%s'", query->mQuery);
/* Send the query. */
status_t res = sendMessage(query->mQuery, strlen(query->mQuery) + 1);
if (res == NO_ERROR) {
/* Read the response. */
res = receiveMessage(reinterpret_cast<void**>(&query->mReplyBuffer),
&query->mReplySize);
if (res == NO_ERROR) {
LOGQ("Response to query '%s': Status = '%.2s', %d bytes in response",
query->mQuery, query->mReplyBuffer, query->mReplySize);
} else {
ALOGE("%s Response to query '%s' has failed: %s",
__FUNCTION__, query->mQuery, strerror(res));
}
} else {
ALOGE("%s: Send query '%s' failed: %s",
__FUNCTION__, query->mQuery, strerror(res));
}
/* Complete the query, and return its completion handling status. */
const status_t res1 = query->completeQuery(res);
ALOGE_IF(res1 != NO_ERROR && res1 != res,
"%s: Error %d in query '%s' completion",
__FUNCTION__, res1, query->mQuery);
return res1;
}
/****************************************************************************
* Qemu client for the 'factory' service.
***************************************************************************/
/*
* Factory service queries.
*/
/* Queries list of cameras connected to the host. */
const char FactoryQemuClient::mQueryList[] = "list";
FactoryQemuClient::FactoryQemuClient()
: QemuClient()
{
}
FactoryQemuClient::~FactoryQemuClient()
{
}
status_t FactoryQemuClient::listCameras(char** list)
{
ALOGV("%s", __FUNCTION__);
QemuQuery query(mQueryList);
if (doQuery(&query) || !query.isQuerySucceeded()) {
ALOGE("%s: List cameras query failed: %s", __FUNCTION__,
query.mReplyData ? query.mReplyData : "No error message");
return query.getCompletionStatus();
}
/* Make sure there is a list returned. */
if (query.mReplyDataSize == 0) {
ALOGE("%s: No camera list is returned.", __FUNCTION__);
return EINVAL;
}
/* Copy the list over. */
*list = (char*)malloc(query.mReplyDataSize);
if (*list != NULL) {
memcpy(*list, query.mReplyData, query.mReplyDataSize);
ALOGD("Emulated camera list: %s", *list);
return NO_ERROR;
} else {
ALOGE("%s: Unable to allocate %zu bytes",
__FUNCTION__, query.mReplyDataSize);
return ENOMEM;
}
}
/****************************************************************************
* Qemu client for an 'emulated camera' service.
***************************************************************************/
/*
* Emulated camera queries
*/
/* Connect to the camera device. */
const char CameraQemuClient::mQueryConnect[] = "connect";
/* Disconect from the camera device. */
const char CameraQemuClient::mQueryDisconnect[] = "disconnect";
/* Start capturing video from the camera device. */
const char CameraQemuClient::mQueryStart[] = "start";
/* Stop capturing video from the camera device. */
const char CameraQemuClient::mQueryStop[] = "stop";
/* Get next video frame from the camera device. */
const char CameraQemuClient::mQueryFrame[] = "frame";
CameraQemuClient::CameraQemuClient()
: QemuClient()
{
}
CameraQemuClient::~CameraQemuClient()
{
}
status_t CameraQemuClient::queryConnect()
{
ALOGV("%s", __FUNCTION__);
QemuQuery query(mQueryConnect);
doQuery(&query);
const status_t res = query.getCompletionStatus();
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
"No error message");
return res;
}
status_t CameraQemuClient::queryDisconnect()
{
ALOGV("%s", __FUNCTION__);
QemuQuery query(mQueryDisconnect);
doQuery(&query);
const status_t res = query.getCompletionStatus();
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
"No error message");
return res;
}
status_t CameraQemuClient::queryStart(uint32_t pixel_format,
int width,
int height)
{
ALOGV("%s", __FUNCTION__);
char query_str[256];
snprintf(query_str, sizeof(query_str), "%s dim=%dx%d pix=%d",
mQueryStart, width, height, pixel_format);
QemuQuery query(query_str);
doQuery(&query);
const status_t res = query.getCompletionStatus();
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
"No error message");
return res;
}
status_t CameraQemuClient::queryStop()
{
ALOGV("%s", __FUNCTION__);
QemuQuery query(mQueryStop);
doQuery(&query);
const status_t res = query.getCompletionStatus();
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
"No error message");
return res;
}
status_t CameraQemuClient::queryFrame(void* vframe,
void* pframe,
size_t vframe_size,
size_t pframe_size,
float r_scale,
float g_scale,
float b_scale,
float exposure_comp)
{
ALOGV("%s", __FUNCTION__);
char query_str[256];
snprintf(query_str, sizeof(query_str), "%s video=%zu preview=%zu whiteb=%g,%g,%g expcomp=%g",
mQueryFrame, (vframe && vframe_size) ? vframe_size : 0,
(pframe && pframe_size) ? pframe_size : 0, r_scale, g_scale, b_scale,
exposure_comp);
QemuQuery query(query_str);
doQuery(&query);
const status_t res = query.getCompletionStatus();
if( res != NO_ERROR) {
ALOGE("%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
"No error message");
return res;
}
/* Copy requested frames. */
size_t cur_offset = 0;
const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData);
/* Video frame is always first. */
if (vframe != NULL && vframe_size != 0) {
/* Make sure that video frame is in. */
if ((query.mReplyDataSize - cur_offset) >= vframe_size) {
memcpy(vframe, frame, vframe_size);
cur_offset += vframe_size;
} else {
ALOGE("%s: Reply %zu bytes is to small to contain %zu bytes video frame",
__FUNCTION__, query.mReplyDataSize - cur_offset, vframe_size);
return EINVAL;
}
}
if (pframe != NULL && pframe_size != 0) {
/* Make sure that preview frame is in. */
if ((query.mReplyDataSize - cur_offset) >= pframe_size) {
memcpy(pframe, frame + cur_offset, pframe_size);
cur_offset += pframe_size;
} else {
ALOGE("%s: Reply %zu bytes is to small to contain %zu bytes preview frame",
__FUNCTION__, query.mReplyDataSize - cur_offset, pframe_size);
return EINVAL;
}
}
return NO_ERROR;
}
}; /* namespace android */

View file

@ -0,0 +1,435 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_QEMU_CLIENT_H
#define HW_EMULATOR_CAMERA_QEMU_CLIENT_H
/*
* Contains declaration of classes that encapsulate connection to camera services
* in the emulator via qemu pipe.
*/
namespace android {
/****************************************************************************
* Qemu query
***************************************************************************/
/* Encapsulates a query to the emulator.
* Guest exchanges data with the emulator via queries sent over the qemu pipe.
* The queries as well as replies to the queries are all strings (except for the
* 'frame' query where reply is a framebuffer).
* Each query is formatted as such:
*
* "<query name>[ <parameters>]",
*
* where <query name> is a string representing query name, and <parameters> are
* optional parameters for the query. If parameters are present, they must be
* separated from the query name with a single space, and they must be formatted
* as such:
*
* "<name1>=<value1> <name2>=<value2> ... <nameN>=<valueN>"
*
* I.e.:
* - Every parameter must have a name, and a value.
* - Name and value must be separated with '='.
* - No spaces are allowed around '=' separating name and value.
* - Parameters must be separated with a single space character.
* - No '=' character is allowed in name and in value.
*
* There are certain restrictions on strings used in the query:
* - Spaces are allowed only as separators.
* - '=' are allowed only to divide parameter names from parameter values.
*
* Emulator replies to each query in two chunks:
* - 8 bytes encoding the payload size as a string containing hexadecimal
* representation of the payload size value. This is done in order to simplify
* dealing with different endianness on the host, and on the guest.
* - Payload, whose size is defined by the first chunk.
*
* Every payload always begins with two characters, encoding the result of the
* query:
* - 'ok' Encoding the success
* - 'ko' Encoding a failure.
* After that payload may have optional data. If payload has more data following
* the query result, there is a ':' character separating them. If payload carries
* only the result, it always ends with a zero-terminator. So, payload 'ok'/'ko'
* prefix is always 3 bytes long: it either includes a zero-terminator, if there
* is no data, or a ':' separator.
*/
class QemuQuery {
public:
/* Constructs an uninitialized QemuQuery instance. */
QemuQuery();
/* Constructs and initializes QemuQuery instance for a query.
* Param:
* query_string - Query string. This constructor can also be used to
* construct a query that doesn't have parameters. In this case query
* name can be passed as a parameter here.
*/
explicit QemuQuery(const char* query_string);
/* Constructs and initializes QemuQuery instance for a query with parameters.
* Param:
* query_name - Query name.
* query_param - Query parameters. Can be NULL.
*/
QemuQuery(const char* query_name, const char* query_param);
/* Destructs QemuQuery instance. */
~QemuQuery();
/****************************************************************************
* Public API
***************************************************************************/
/* Creates new query.
* Note: this method will reset this instance prior to creating a new query
* in order to discard possible "leftovers" from the previous query.
* Param:
* query_name - Query name.
* query_param - Query parameters. Can be NULL.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
status_t createQuery(const char* name, const char* param);
/* Completes the query after a reply from the emulator.
* This method will parse the reply buffer, and calculate the final query
* status, which depends not only on the transport success / failure, but
* also on 'ok' / 'ko' in the reply buffer.
* Param:
* status - Query delivery status. This status doesn't necessarily reflects
* the final query status (which is defined by 'ok'/'ko' prefix in the
* reply buffer). This status simply states whether or not the query has
* been sent, and a reply has been received successfuly. However, if
* this status indicates a failure, it means that the entire query has
* failed.
* Return:
* NO_ERROR on success, or an appropriate error status on failure. Note that
* status returned here just signals whether or not the method has succeeded.
* Use isQuerySucceeded() / getCompletionStatus() methods of this class to
* check the final query status.
*/
status_t completeQuery(status_t status);
/* Resets the query from a previous use. */
void resetQuery();
/* Checks if query has succeeded.
* Note that this method must be called after completeQuery() method of this
* class has been executed.
*/
inline bool isQuerySucceeded() const {
return mQueryDeliveryStatus == NO_ERROR && mReplyStatus != 0;
}
/* Gets final completion status of the query.
* Note that this method must be called after completeQuery() method of this
* class has been executed.
* Return:
* NO_ERROR if query has succeeded, or an appropriate error status on query
* failure.
*/
inline status_t getCompletionStatus() const {
if (mQueryDeliveryStatus == NO_ERROR) {
if (mReplyStatus) {
return NO_ERROR;
} else {
return EINVAL;
}
} else {
return mQueryDeliveryStatus;
}
}
/****************************************************************************
* Public data memebers
***************************************************************************/
public:
/* Query string. */
char* mQuery;
/* Query delivery status. */
status_t mQueryDeliveryStatus;
/* Reply buffer */
char* mReplyBuffer;
/* Reply data (past 'ok'/'ko'). If NULL, there were no data in reply. */
char* mReplyData;
/* Reply buffer size. */
size_t mReplySize;
/* Reply data size. */
size_t mReplyDataSize;
/* Reply status: 1 - ok, 0 - ko. */
int mReplyStatus;
/****************************************************************************
* Private data memebers
***************************************************************************/
protected:
/* Preallocated buffer for small queries. */
char mQueryPrealloc[256];
};
/****************************************************************************
* Qemu client base
***************************************************************************/
/* Encapsulates a connection to the 'camera' service in the emulator via qemu
* pipe.
*/
class QemuClient {
public:
/* Constructs QemuClient instance. */
QemuClient();
/* Destructs QemuClient instance. */
virtual ~QemuClient();
/****************************************************************************
* Qemu client API
***************************************************************************/
public:
/* Connects to the 'camera' service in the emulator via qemu pipe.
* Param:
* param - Parameters to pass to the camera service. There are two types of
* camera services implemented by the emulator. The first one is a
* 'camera factory' type of service that provides list of cameras
* connected to the host. Another one is an 'emulated camera' type of
* service that provides interface to a camera connected to the host. At
* the connection time emulator makes distinction between the two by
* looking at connection parameters: no parameters means connection to
* the 'factory' service, while connection with parameters means
* connection to an 'emulated camera' service, where camera is identified
* by one of the connection parameters. So, passing NULL, or an empty
* string to this method will establish a connection with the 'factory'
* service, while not empty string passed here will establish connection
* with an 'emulated camera' service. Parameters defining the emulated
* camera must be formatted as such:
*
* "name=<device name> [inp_channel=<input channel #>]",
*
* where 'device name' is a required parameter defining name of the
* camera device, and 'input channel' is an optional parameter (positive
* integer), defining the input channel to use on the camera device.
* Note that device name passed here must have been previously obtained
* from the factory service using 'list' query.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
virtual status_t connectClient(const char* param);
/* Disconnects from the service. */
virtual void disconnectClient();
/* Sends data to the service.
* Param:
* data, data_size - Data to send.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
virtual status_t sendMessage(const void* data, size_t data_size);
/* Receives data from the service.
* This method assumes that data to receive will come in two chunks: 8
* characters encoding the payload size in hexadecimal string, followed by
* the paylod (if any).
* This method will allocate data buffer where to receive the response.
* Param:
* data - Upon success contains address of the allocated data buffer with
* the data received from the service. The caller is responsible for
* freeing allocated data buffer.
* data_size - Upon success contains size of the data received from the
* service.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
virtual status_t receiveMessage(void** data, size_t* data_size);
/* Sends a query, and receives a response from the service.
* Param:
* query - Query to send to the service. When this method returns, the query
* is completed, and all its relevant data members are properly initialized.
* Return:
* NO_ERROR on success, or an appropriate error status on failure. Note that
* status returned here is not the final query status. Use isQuerySucceeded(),
* or getCompletionStatus() method on the query object to see if it has
* succeeded. However, if this method returns a failure, it means that the
* query has failed, and there is no guarantee that its data members are
* properly initialized (except for the 'mQueryDeliveryStatus', which is
* always in the proper state).
*/
virtual status_t doQuery(QemuQuery* query);
/****************************************************************************
* Data members
***************************************************************************/
protected:
/* Qemu pipe handle. */
int mPipeFD;
private:
/* Camera service name. */
static const char mCameraServiceName[];
};
/****************************************************************************
* Qemu client for the 'factory' service.
***************************************************************************/
/* Encapsulates QemuClient for the 'factory' service. */
class FactoryQemuClient : public QemuClient {
public:
/* Constructs FactoryQemuClient instance. */
FactoryQemuClient();
/* Destructs FactoryQemuClient instance. */
~FactoryQemuClient();
/****************************************************************************
* Public API
***************************************************************************/
public:
/* Lists camera devices connected to the host.
* Param:
* list - Upon success contains a list of cameras connected to the host. The
* list returned here is represented as a string, containing multiple
* lines separated with '\n', where each line represents a camera. Each
* camera line is formatted as such:
*
* "name=<device name> channel=<num> pix=<num> framedims=<dimensions>\n"
*
* Where:
* - 'name' is the name of the camera device attached to the host. This
* name must be used for subsequent connection to the 'emulated camera'
* service for that camera.
* - 'channel' - input channel number (positive int) to use to communicate
* with the camera.
* - 'pix' - pixel format (a "fourcc" uint), chosen for the video frames
* by the camera service.
* - 'framedims' contains a list of frame dimensions supported by the
* camera for the chosen pixel format. Each etry in the list is in form
* '<width>x<height>', where 'width' and 'height' are numeric values
* for width and height of a supported frame dimension. Entries in
* this list are separated with ',' with no spaces between the entries.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
status_t listCameras(char** list);
/****************************************************************************
* Names of the queries available for the emulated camera factory.
***************************************************************************/
private:
/* List cameras connected to the host. */
static const char mQueryList[];
};
/****************************************************************************
* Qemu client for an 'emulated camera' service.
***************************************************************************/
/* Encapsulates QemuClient for an 'emulated camera' service.
*/
class CameraQemuClient : public QemuClient {
public:
/* Constructs CameraQemuClient instance. */
CameraQemuClient();
/* Destructs CameraQemuClient instance. */
~CameraQemuClient();
/****************************************************************************
* Public API
***************************************************************************/
public:
/* Queries camera connection.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
status_t queryConnect();
/* Queries camera disconnection.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
status_t queryDisconnect();
/* Queries camera to start capturing video.
* Param:
* pixel_format - Pixel format that is used by the client to push video
* frames to the camera framework.
* width, height - Frame dimensions, requested by the framework.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
status_t queryStart(uint32_t pixel_format, int width, int height);
/* Queries camera to stop capturing video.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
status_t queryStop();
/* Queries camera for the next video frame.
* Param:
* vframe, vframe_size - Define buffer, allocated to receive a video frame.
* Any of these parameters can be 0, indicating that the caller is
* interested only in preview frame.
* pframe, pframe_size - Define buffer, allocated to receive a preview frame.
* Any of these parameters can be 0, indicating that the caller is
* interested only in video frame.
* r_scale, g_scale, b_scale - White balance scale.
* exposure_comp - Expsoure compensation.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
status_t queryFrame(void* vframe,
void* pframe,
size_t vframe_size,
size_t pframe_size,
float r_scale,
float g_scale,
float b_scale,
float exposure_comp);
/****************************************************************************
* Names of the queries available for the emulated camera.
***************************************************************************/
private:
/* Connect to the camera. */
static const char mQueryConnect[];
/* Disconnect from the camera. */
static const char mQueryDisconnect[];
/* Start video capturing. */
static const char mQueryStart[];
/* Stop video capturing. */
static const char mQueryStop[];
/* Query frame(s). */
static const char mQueryFrame[];
};
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_QEMU_CLIENT_H */

View file

@ -0,0 +1,170 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Thumbnail.h"
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Thumbnail"
#include <cutils/log.h>
#include <libexif/exif-data.h>
#include <libyuv.h>
#include "JpegCompressor.h"
#include <vector>
/*
* The NV21 format is a YUV format with an 8-bit Y-component and the U and V
* components are stored as 8 bits each but they are shared between a block of
* 2x2 pixels. So when calculating bits per pixel the 16 bits of U and V are
* shared between 4 pixels leading to 4 bits of U and V per pixel. Together
* with the 8 bits of Y this gives us 12 bits per pixel..
*
* The components are not grouped by pixels but separated into one Y-plane and
* one interleaved U and V-plane. The first half of the byte sequence is all of
* the Y data laid out in a linear fashion. After that the interleaved U and V-
* plane starts with one byte of V followed by one byte of U followed by one
* byte of V and so on. Each byte of U or V is associated with a 2x2 pixel block
* in a linear fashion.
*
* For an 8 by 4 pixel image the layout would be:
*
* +-----+-----+-----+-----+-----+-----+-----+-----+
* | Y0 | Y1 | Y2 | Y3 | Y4 | Y5 | Y6 | Y7 |
* +-----+-----+-----+-----+-----+-----+-----+-----+
* | Y8 | Y9 | Y10 | Y11 | Y12 | Y13 | Y14 | Y15 |
* +-----+-----+-----+-----+-----+-----+-----+-----+
* | Y16 | Y17 | Y18 | Y19 | Y20 | Y21 | Y22 | Y23 |
* +-----+-----+-----+-----+-----+-----+-----+-----+
* | Y24 | Y25 | Y26 | Y27 | Y28 | Y29 | Y30 | Y31 |
* +-----+-----+-----+-----+-----+-----+-----+-----+
* | V0 | U0 | V1 | U1 | V2 | U2 | V3 | U3 |
* +-----+-----+-----+-----+-----+-----+-----+-----+
* | V4 | U4 | V5 | U5 | V6 | U6 | V7 | U7 |
* +-----+-----+-----+-----+-----+-----+-----+-----+
*
* In this image V0 and U0 are the V and U components for the 2x2 block of
* pixels whose Y components are Y0, Y1, Y8 and Y9. V1 and U1 are matched with
* the Y components Y2, Y3, Y10, Y11, and so on for that row. For the next row
* of V and U the V4 and U4 components would be paired with Y16, Y17, Y24 and
* Y25.
*/
namespace android {
static bool createRawThumbnail(const unsigned char* sourceImage,
int sourceWidth, int sourceHeight,
int thumbnailWidth, int thumbnailHeight,
std::vector<unsigned char>* thumbnail) {
// Deinterleave the U and V planes into separate planes, this is because
// libyuv requires the planes to be separate when scaling
const size_t sourceUVPlaneSize = (sourceWidth * sourceHeight) / 4;
// Put both U and V planes in one buffer, one after the other, to reduce
// memory fragmentation and number of allocations
std::vector<unsigned char> sourcePlanes(sourceUVPlaneSize * 2);
const unsigned char* ySourcePlane = sourceImage;
unsigned char* uSourcePlane = &sourcePlanes[0];
unsigned char* vSourcePlane = &sourcePlanes[sourceUVPlaneSize];
for (size_t i = 0; i < sourceUVPlaneSize; ++i) {
vSourcePlane[i] = sourceImage[sourceWidth * sourceHeight + i * 2 + 0];
uSourcePlane[i] = sourceImage[sourceWidth * sourceHeight + i * 2 + 1];
}
// Create enough space in the output vector for the result
thumbnail->resize((thumbnailWidth * thumbnailHeight * 12) / 8);
// The downscaled U and V planes will also be linear instead of interleaved,
// allocate space for them here
const size_t destUVPlaneSize = (thumbnailWidth * thumbnailHeight) / 4;
std::vector<unsigned char> destPlanes(destUVPlaneSize * 2);
unsigned char* yDestPlane = &(*thumbnail)[0];
unsigned char* uDestPlane = &destPlanes[0];
unsigned char* vDestPlane = &destPlanes[destUVPlaneSize];
// The strides for the U and V planes are half the width because the U and V
// components are common to 2x2 pixel blocks
int result = libyuv::I420Scale(ySourcePlane, sourceWidth,
uSourcePlane, sourceWidth / 2,
vSourcePlane, sourceWidth / 2,
sourceWidth, sourceHeight,
yDestPlane, thumbnailWidth,
uDestPlane, thumbnailWidth / 2,
vDestPlane, thumbnailWidth / 2,
thumbnailWidth, thumbnailHeight,
libyuv::kFilterBilinear);
if (result != 0) {
ALOGE("Unable to create thumbnail, downscaling failed with error: %d",
result);
return false;
}
// Now we need to interleave the downscaled U and V planes into the
// output buffer to make it NV21 encoded
const size_t uvPlanesOffset = thumbnailWidth * thumbnailHeight;
for (size_t i = 0; i < destUVPlaneSize; ++i) {
(*thumbnail)[uvPlanesOffset + i * 2 + 0] = vDestPlane[i];
(*thumbnail)[uvPlanesOffset + i * 2 + 1] = uDestPlane[i];
}
return true;
}
bool createThumbnail(const unsigned char* sourceImage,
int sourceWidth, int sourceHeight,
int thumbWidth, int thumbHeight, int quality,
ExifData* exifData) {
if (thumbWidth <= 0 || thumbHeight <= 0) {
ALOGE("%s: Invalid thumbnail width=%d or height=%d, must be > 0",
__FUNCTION__, thumbWidth, thumbHeight);
return false;
}
// First downscale the source image into a thumbnail-sized raw image
std::vector<unsigned char> rawThumbnail;
if (!createRawThumbnail(sourceImage, sourceWidth, sourceHeight,
thumbWidth, thumbHeight, &rawThumbnail)) {
// The thumbnail function will log an appropriate error if needed
return false;
}
// And then compress it into JPEG format without any EXIF data
NV21JpegCompressor compressor;
status_t result = compressor.compressRawImage(&rawThumbnail[0],
thumbWidth, thumbHeight,
quality, nullptr /* EXIF */);
if (result != NO_ERROR) {
ALOGE("%s: Unable to compress thumbnail", __FUNCTION__);
return false;
}
// And finally put it in the EXIF data. This transfers ownership of the
// malloc'd memory to the EXIF data structure. As long as the EXIF data
// structure is free'd using the EXIF library this memory will be free'd.
exifData->size = compressor.getCompressedSize();
exifData->data = reinterpret_cast<unsigned char*>(malloc(exifData->size));
if (exifData->data == nullptr) {
ALOGE("%s: Unable to allocate %u bytes of memory for thumbnail",
__FUNCTION__, exifData->size);
exifData->size = 0;
return false;
}
compressor.getCompressedImage(exifData->data);
return true;
}
} // namespace android

View file

@ -0,0 +1,37 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GOLDFISH_CAMERA_THUMBNAIL_H
#define GOLDFISH_CAMERA_THUMBNAIL_H
struct _ExifData;
typedef struct _ExifData ExifData;
namespace android {
/* Create a thumbnail from NV21 source data in |sourceImage| with the given
* dimensions. The resulting thumbnail is JPEG compressed and a pointer and size
* is placed in |exifData| which takes ownership of the allocated memory.
*/
bool createThumbnail(const unsigned char* sourceImage,
int sourceWidth, int sourceHeight,
int thumbnailWidth, int thumbnailHeight, int quality,
ExifData* exifData);
} // namespace android
#endif // GOLDFISH_CAMERA_THUMBNAIL_H

View file

@ -0,0 +1,92 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "WorkerThread.h"
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_WorkerThread"
#include <cutils/log.h>
#include <algorithm>
namespace android {
WorkerThread::WorkerThread(const char* threadName,
EmulatedCameraDevice* cameraDevice,
EmulatedCamera* cameraHAL)
: Thread(true), // Callbacks may involve Java calls.
mCameraDevice(cameraDevice),
mCameraHAL(cameraHAL),
mRunning(false),
mThreadName(threadName) {
}
status_t WorkerThread::startThread(bool oneBurst) {
ALOGV("Starting worker thread, oneBurst=%s", oneBurst ? "true" : "false");
mOneBurst = oneBurst;
{
Mutex::Autolock lock(mRunningMutex);
mRunning = true;
}
return run(mThreadName, ANDROID_PRIORITY_URGENT_DISPLAY, 0);
}
status_t WorkerThread::stopThread() {
ALOGV("%s: Stopping worker thread...", __FUNCTION__);
Mutex::Autolock lock(mRunningMutex);
mRunning = false;
mRunningCondition.signal();
return NO_ERROR;
}
status_t WorkerThread::wakeThread() {
ALOGV("%s: Waking emulated camera device's worker thread...", __FUNCTION__);
mRunningCondition.signal();
return NO_ERROR;
}
status_t WorkerThread::joinThread() {
return join();
}
status_t WorkerThread::readyToRun()
{
status_t res = onThreadStart();
if (res != NO_ERROR) {
return res;
}
return NO_ERROR;
}
bool WorkerThread::threadLoop() {
if (inWorkerThread() && !mOneBurst) {
/* Only return true if we're running. If mRunning has been set to false
* we fall through to ensure that onThreadExit is called. */
Mutex::Autolock lock(mRunningMutex);
if (mRunning) {
return true;
}
}
onThreadExit();
ALOGV("%s: Exiting thread, mOneBurst=%s",
__FUNCTION__, mOneBurst ? "true" : "false");
return false;
}
} // namespace android

View file

@ -0,0 +1,108 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HW_EMULATOR_CAMERA_WORKER_THREAD_H
#define HW_EMULATOR_CAMERA_WORKER_THREAD_H
#include <utils/Thread.h>
namespace android {
class EmulatedCamera;
class EmulatedCameraDevice;
class WorkerThread : public Thread {
public:
WorkerThread(const char* threadName,
EmulatedCameraDevice* camera_dev,
EmulatedCamera* cameraHAL);
virtual ~WorkerThread() {}
/* Starts the thread
* Param:
* oneBurst - Controls how many times thread loop should run. If
* this parameter is 'true', thread routine will run only once
* If this parameter is 'false', thread routine will run until
* stopThread method is called. See startWorkerThread for more
* info.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
status_t startThread(bool oneBurst);
/* Stops the thread, this only requests that the thread exits. The method
* will return right after the request has been made. Use joinThread to
* wait for the thread to exit. */
status_t stopThread();
/* Wake a thread that's currently waiting to timeout or to be awoken */
status_t wakeThread();
/* Join the thread, waits until the thread exits before returning. */
status_t joinThread();
protected:
/* Perform whatever work should be done in the worker thread. A subclass
* needs to implement this.
* Return:
* true To continue thread loop, or false to exit the thread loop and
* terminate the thread.
*/
virtual bool inWorkerThread() = 0;
/* This provides an opportunity for a subclass to perform some operation
* when the thread starts. This is run on the newly started thread. If this
* returns an error the thread will exit and inWorkerThread will never be
* called.
*/
virtual status_t onThreadStart() { return NO_ERROR; }
/* This provides an opportunity for a subclass to perform some operation
* when the thread exits. This is run on the worker thread. By default this
* does nothing.
*/
virtual void onThreadExit() { }
/* Containing camera device object. */
EmulatedCameraDevice* mCameraDevice;
/* The camera HAL from the camera device object */
EmulatedCamera* mCameraHAL;
/* Controls number of times the thread loop runs.
* See startThread for more information. */
bool mOneBurst;
/* Running Condition and mutex, use these to sleep the thread, the
* supporting functions will use these to signal wakes and exits. */
Condition mRunningCondition;
Mutex mRunningMutex;
bool mRunning;
private:
/* Overriden base class method.
* It is overriden in order to provide one-time initialization just
* prior to starting the thread routine.
*/
status_t readyToRun() final;
/* Implements abstract method of the base Thread class. */
bool threadLoop() final;
const char* mThreadName;
};
} // namespace android
#endif // HW_EMULATOR_CAMERA_WORKER_THREAD_H

View file

@ -0,0 +1,64 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This file includes various basic structures that are needed by multiple parts
* of the fake camera 2 implementation.
*/
#ifndef HW_EMULATOR_CAMERA2_BASE_H
#define HW_EMULATOR_CAMERA2_BASE_H
#include <hardware/camera2.h>
#include <utils/Vector.h>
namespace android {
/* Internal structure for passing buffers across threads */
struct StreamBuffer {
// Positive numbers are output streams
// Negative numbers are input reprocess streams
// Zero is an auxillary buffer
int streamId;
uint32_t width, height;
uint32_t format;
uint32_t dataSpace;
uint32_t stride;
buffer_handle_t *buffer;
uint8_t *img;
};
typedef Vector<StreamBuffer> Buffers;
struct Stream {
const camera2_stream_ops_t *ops;
uint32_t width, height;
int32_t format;
uint32_t stride;
};
struct ReprocessStream {
const camera2_stream_in_ops_t *ops;
uint32_t width, height;
int32_t format;
uint32_t stride;
// -1 if the reprocessing stream is independent
int32_t sourceStreamId;
};
} // namespace android;
#endif

View file

@ -0,0 +1,292 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera2_JpegCompressor"
#include <utils/Log.h>
#include "JpegCompressor.h"
#include "../EmulatedFakeCamera2.h"
#include "../EmulatedFakeCamera3.h"
namespace android {
JpegCompressor::JpegCompressor():
Thread(false),
mIsBusy(false),
mSynchronous(false),
mBuffers(NULL),
mListener(NULL) {
}
JpegCompressor::~JpegCompressor() {
Mutex::Autolock lock(mMutex);
}
status_t JpegCompressor::reserve() {
Mutex::Autolock busyLock(mBusyMutex);
if (mIsBusy) {
ALOGE("%s: Already processing a buffer!", __FUNCTION__);
return INVALID_OPERATION;
}
mIsBusy = true;
return OK;
}
status_t JpegCompressor::start(Buffers *buffers, JpegListener *listener) {
if (listener == NULL) {
ALOGE("%s: NULL listener not allowed!", __FUNCTION__);
return BAD_VALUE;
}
Mutex::Autolock lock(mMutex);
{
Mutex::Autolock busyLock(mBusyMutex);
if (!mIsBusy) {
ALOGE("Called start without reserve() first!");
return INVALID_OPERATION;
}
mSynchronous = false;
mBuffers = buffers;
mListener = listener;
}
status_t res;
res = run("EmulatedFakeCamera2::JpegCompressor");
if (res != OK) {
ALOGE("%s: Unable to start up compression thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
delete mBuffers;
}
return res;
}
status_t JpegCompressor::compressSynchronous(Buffers *buffers) {
status_t res;
Mutex::Autolock lock(mMutex);
{
Mutex::Autolock busyLock(mBusyMutex);
if (mIsBusy) {
ALOGE("%s: Already processing a buffer!", __FUNCTION__);
return INVALID_OPERATION;
}
mIsBusy = true;
mSynchronous = true;
mBuffers = buffers;
}
res = compress();
cleanUp();
return res;
}
status_t JpegCompressor::cancel() {
requestExitAndWait();
return OK;
}
status_t JpegCompressor::readyToRun() {
return OK;
}
bool JpegCompressor::threadLoop() {
status_t res;
ALOGV("%s: Starting compression thread", __FUNCTION__);
res = compress();
mListener->onJpegDone(mJpegBuffer, res == OK);
cleanUp();
return false;
}
status_t JpegCompressor::compress() {
// Find source and target buffers. Assumes only one buffer matches
// each condition!
bool foundJpeg = false, mFoundAux = false;
for (size_t i = 0; i < mBuffers->size(); i++) {
const StreamBuffer &b = (*mBuffers)[i];
if (b.format == HAL_PIXEL_FORMAT_BLOB) {
mJpegBuffer = b;
mFoundJpeg = true;
} else if (b.streamId <= 0) {
mAuxBuffer = b;
mFoundAux = true;
}
if (mFoundJpeg && mFoundAux) break;
}
if (!mFoundJpeg || !mFoundAux) {
ALOGE("%s: Unable to find buffers for JPEG source/destination",
__FUNCTION__);
return BAD_VALUE;
}
// Set up error management
mJpegErrorInfo = NULL;
JpegError error;
error.parent = this;
mCInfo.err = jpeg_std_error(&error);
mCInfo.err->error_exit = jpegErrorHandler;
jpeg_create_compress(&mCInfo);
if (checkError("Error initializing compression")) return NO_INIT;
// Route compressed data straight to output stream buffer
JpegDestination jpegDestMgr;
jpegDestMgr.parent = this;
jpegDestMgr.init_destination = jpegInitDestination;
jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
jpegDestMgr.term_destination = jpegTermDestination;
mCInfo.dest = &jpegDestMgr;
// Set up compression parameters
mCInfo.image_width = mAuxBuffer.width;
mCInfo.image_height = mAuxBuffer.height;
mCInfo.input_components = 3;
mCInfo.in_color_space = JCS_RGB;
jpeg_set_defaults(&mCInfo);
if (checkError("Error configuring defaults")) return NO_INIT;
// Do compression
jpeg_start_compress(&mCInfo, TRUE);
if (checkError("Error starting compression")) return NO_INIT;
size_t rowStride = mAuxBuffer.stride * 3;
const size_t kChunkSize = 32;
while (mCInfo.next_scanline < mCInfo.image_height) {
JSAMPROW chunk[kChunkSize];
for (size_t i = 0 ; i < kChunkSize; i++) {
chunk[i] = (JSAMPROW)
(mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
}
jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
if (checkError("Error while compressing")) return NO_INIT;
if (exitPending()) {
ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
return TIMED_OUT;
}
}
jpeg_finish_compress(&mCInfo);
if (checkError("Error while finishing compression")) return NO_INIT;
// All done
return OK;
}
bool JpegCompressor::isBusy() {
Mutex::Autolock busyLock(mBusyMutex);
return mIsBusy;
}
bool JpegCompressor::isStreamInUse(uint32_t id) {
Mutex::Autolock lock(mBusyMutex);
if (mBuffers && mIsBusy) {
for (size_t i = 0; i < mBuffers->size(); i++) {
if ( (*mBuffers)[i].streamId == (int)id ) return true;
}
}
return false;
}
bool JpegCompressor::waitForDone(nsecs_t timeout) {
Mutex::Autolock lock(mBusyMutex);
while (mIsBusy) {
status_t res = mDone.waitRelative(mBusyMutex, timeout);
if (res != OK) return false;
}
return true;
}
bool JpegCompressor::checkError(const char *msg) {
if (mJpegErrorInfo) {
char errBuffer[JMSG_LENGTH_MAX];
mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
ALOGE("%s: %s: %s",
__FUNCTION__, msg, errBuffer);
mJpegErrorInfo = NULL;
return true;
}
return false;
}
void JpegCompressor::cleanUp() {
status_t res;
jpeg_destroy_compress(&mCInfo);
Mutex::Autolock lock(mBusyMutex);
if (mFoundAux) {
if (mAuxBuffer.streamId == 0) {
delete[] mAuxBuffer.img;
} else if (!mSynchronous) {
mListener->onJpegInputDone(mAuxBuffer);
}
}
if (!mSynchronous) {
delete mBuffers;
}
mBuffers = NULL;
mIsBusy = false;
mDone.signal();
}
void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
JpegError *error = static_cast<JpegError*>(cinfo->err);
error->parent->mJpegErrorInfo = cinfo;
}
void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
ALOGV("%s: Setting destination to %p, size %zu",
__FUNCTION__, dest->parent->mJpegBuffer.img, kMaxJpegSize);
dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer.img);
dest->free_in_buffer = kMaxJpegSize;
}
boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
ALOGE("%s: JPEG destination buffer overflow!",
__FUNCTION__);
return true;
}
void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
ALOGV("%s: Done writing JPEG data. %zu bytes left in buffer",
__FUNCTION__, cinfo->dest->free_in_buffer);
}
JpegCompressor::JpegListener::~JpegListener() {
}
} // namespace android

View file

@ -0,0 +1,124 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This class simulates a hardware JPEG compressor. It receives image buffers
* in RGBA_8888 format, processes them in a worker thread, and then pushes them
* out to their destination stream.
*/
#ifndef HW_EMULATOR_CAMERA2_JPEG_H
#define HW_EMULATOR_CAMERA2_JPEG_H
#include "utils/Thread.h"
#include "utils/Mutex.h"
#include "utils/Timers.h"
#include "Base.h"
#include <stdio.h>
extern "C" {
#include <jpeglib.h>
}
namespace android {
class JpegCompressor: private Thread, public virtual RefBase {
public:
JpegCompressor();
~JpegCompressor();
struct JpegListener {
// Called when JPEG compression has finished, or encountered an error
virtual void onJpegDone(const StreamBuffer &jpegBuffer,
bool success) = 0;
// Called when the input buffer for JPEG is not needed any more,
// if the buffer came from the framework.
virtual void onJpegInputDone(const StreamBuffer &inputBuffer) = 0;
virtual ~JpegListener();
};
// Start compressing COMPRESSED format buffers; JpegCompressor takes
// ownership of the Buffers vector.
// Reserve() must be called first.
status_t start(Buffers *buffers, JpegListener *listener);
// Compress and block until buffer is complete.
status_t compressSynchronous(Buffers *buffers);
status_t cancel();
bool isBusy();
bool isStreamInUse(uint32_t id);
bool waitForDone(nsecs_t timeout);
// Reserve the compressor for a later start() call.
status_t reserve();
// TODO: Measure this
static const size_t kMaxJpegSize = 300000;
private:
Mutex mBusyMutex;
bool mIsBusy;
Condition mDone;
bool mSynchronous;
Mutex mMutex;
Buffers *mBuffers;
JpegListener *mListener;
StreamBuffer mJpegBuffer, mAuxBuffer;
bool mFoundJpeg, mFoundAux;
jpeg_compress_struct mCInfo;
struct JpegError : public jpeg_error_mgr {
JpegCompressor *parent;
};
j_common_ptr mJpegErrorInfo;
struct JpegDestination : public jpeg_destination_mgr {
JpegCompressor *parent;
};
static void jpegErrorHandler(j_common_ptr cinfo);
static void jpegInitDestination(j_compress_ptr cinfo);
static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
static void jpegTermDestination(j_compress_ptr cinfo);
bool checkError(const char *msg);
status_t compress();
void cleanUp();
/**
* Inherited Thread virtual overrides
*/
private:
virtual status_t readyToRun();
virtual bool threadLoop();
};
} // namespace android
#endif

View file

@ -0,0 +1,478 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Scene"
#include <utils/Log.h>
#include <stdlib.h>
#include <cmath>
#include "Scene.h"
// TODO: This should probably be done host-side in OpenGL for speed and better
// quality
namespace android {
// Define single-letter shortcuts for scene definition, for directly indexing
// mCurrentColors
#define G (Scene::GRASS * Scene::NUM_CHANNELS)
#define S (Scene::GRASS_SHADOW * Scene::NUM_CHANNELS)
#define H (Scene::HILL * Scene::NUM_CHANNELS)
#define W (Scene::WALL * Scene::NUM_CHANNELS)
#define R (Scene::ROOF * Scene::NUM_CHANNELS)
#define D (Scene::DOOR * Scene::NUM_CHANNELS)
#define C (Scene::CHIMNEY * Scene::NUM_CHANNELS)
#define I (Scene::WINDOW * Scene::NUM_CHANNELS)
#define U (Scene::SUN * Scene::NUM_CHANNELS)
#define K (Scene::SKY * Scene::NUM_CHANNELS)
#define M (Scene::MOON * Scene::NUM_CHANNELS)
const int Scene::kSceneWidth = 20;
const int Scene::kSceneHeight = 20;
const uint8_t Scene::kScene[Scene::kSceneWidth * Scene::kSceneHeight] = {
// 5 10 15 20
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K, // 5
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,H,H,H,
K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,C,C,H,H,H,
K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,C,C,H,H,H,
H,K,K,K,K,K,H,R,R,R,R,R,R,R,R,R,R,R,R,H, // 10
H,K,K,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
H,H,H,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
H,H,H,K,K,H,H,H,W,W,W,W,W,W,W,W,W,W,H,H,
S,S,S,G,G,S,S,S,W,W,W,W,W,W,W,W,W,W,S,S,
S,G,G,G,G,S,S,S,W,I,I,W,D,D,W,I,I,W,S,S, // 15
G,G,G,G,G,G,S,S,W,I,I,W,D,D,W,I,I,W,S,S,
G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G,
G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G, // 20
// 5 10 15 20
};
#undef G
#undef S
#undef H
#undef W
#undef R
#undef D
#undef C
#undef I
#undef U
#undef K
#undef M
Scene::Scene(
int sensorWidthPx,
int sensorHeightPx,
float sensorSensitivity):
mSensorWidth(sensorWidthPx),
mSensorHeight(sensorHeightPx),
mHour(12),
mExposureDuration(0.033f),
mSensorSensitivity(sensorSensitivity)
{
// Map scene to sensor pixels
if (mSensorWidth > mSensorHeight) {
mMapDiv = (mSensorWidth / (kSceneWidth + 1) ) + 1;
} else {
mMapDiv = (mSensorHeight / (kSceneHeight + 1) ) + 1;
}
mOffsetX = (kSceneWidth * mMapDiv - mSensorWidth) / 2;
mOffsetY = (kSceneHeight * mMapDiv - mSensorHeight) / 2;
// Assume that sensor filters are sRGB primaries to start
mFilterR[0] = 3.2406f; mFilterR[1] = -1.5372f; mFilterR[2] = -0.4986f;
mFilterGr[0] = -0.9689f; mFilterGr[1] = 1.8758f; mFilterGr[2] = 0.0415f;
mFilterGb[0] = -0.9689f; mFilterGb[1] = 1.8758f; mFilterGb[2] = 0.0415f;
mFilterB[0] = 0.0557f; mFilterB[1] = -0.2040f; mFilterB[2] = 1.0570f;
}
Scene::~Scene() {
}
void Scene::setColorFilterXYZ(
float rX, float rY, float rZ,
float grX, float grY, float grZ,
float gbX, float gbY, float gbZ,
float bX, float bY, float bZ) {
mFilterR[0] = rX; mFilterR[1] = rY; mFilterR[2] = rZ;
mFilterGr[0] = grX; mFilterGr[1] = grY; mFilterGr[2] = grZ;
mFilterGb[0] = gbX; mFilterGb[1] = gbY; mFilterGb[2] = gbZ;
mFilterB[0] = bX; mFilterB[1] = bY; mFilterB[2] = bZ;
}
void Scene::setHour(int hour) {
ALOGV("Hour set to: %d", hour);
mHour = hour % 24;
}
int Scene::getHour() {
return mHour;
}
void Scene::setExposureDuration(float seconds) {
mExposureDuration = seconds;
}
void Scene::calculateScene(nsecs_t time) {
// Calculate time fractions for interpolation
int timeIdx = mHour / kTimeStep;
int nextTimeIdx = (timeIdx + 1) % (24 / kTimeStep);
const nsecs_t kOneHourInNsec = 1e9 * 60 * 60;
nsecs_t timeSinceIdx = (mHour - timeIdx * kTimeStep) * kOneHourInNsec + time;
float timeFrac = timeSinceIdx / (float)(kOneHourInNsec * kTimeStep);
// Determine overall sunlight levels
float sunLux =
kSunlight[timeIdx] * (1 - timeFrac) +
kSunlight[nextTimeIdx] * timeFrac;
ALOGV("Sun lux: %f", sunLux);
float sunShadeLux = sunLux * (kDaylightShadeIllum / kDirectSunIllum);
// Determine sun/shade illumination chromaticity
float currentSunXY[2];
float currentShadeXY[2];
const float *prevSunXY, *nextSunXY;
const float *prevShadeXY, *nextShadeXY;
if (kSunlight[timeIdx] == kSunsetIllum ||
kSunlight[timeIdx] == kTwilightIllum) {
prevSunXY = kSunsetXY;
prevShadeXY = kSunsetXY;
} else {
prevSunXY = kDirectSunlightXY;
prevShadeXY = kDaylightXY;
}
if (kSunlight[nextTimeIdx] == kSunsetIllum ||
kSunlight[nextTimeIdx] == kTwilightIllum) {
nextSunXY = kSunsetXY;
nextShadeXY = kSunsetXY;
} else {
nextSunXY = kDirectSunlightXY;
nextShadeXY = kDaylightXY;
}
currentSunXY[0] = prevSunXY[0] * (1 - timeFrac) +
nextSunXY[0] * timeFrac;
currentSunXY[1] = prevSunXY[1] * (1 - timeFrac) +
nextSunXY[1] * timeFrac;
currentShadeXY[0] = prevShadeXY[0] * (1 - timeFrac) +
nextShadeXY[0] * timeFrac;
currentShadeXY[1] = prevShadeXY[1] * (1 - timeFrac) +
nextShadeXY[1] * timeFrac;
ALOGV("Sun XY: %f, %f, Shade XY: %f, %f",
currentSunXY[0], currentSunXY[1],
currentShadeXY[0], currentShadeXY[1]);
// Converting for xyY to XYZ:
// X = Y / y * x
// Y = Y
// Z = Y / y * (1 - x - y);
float sunXYZ[3] = {
sunLux / currentSunXY[1] * currentSunXY[0],
sunLux,
sunLux / currentSunXY[1] *
(1 - currentSunXY[0] - currentSunXY[1])
};
float sunShadeXYZ[3] = {
sunShadeLux / currentShadeXY[1] * currentShadeXY[0],
sunShadeLux,
sunShadeLux / currentShadeXY[1] *
(1 - currentShadeXY[0] - currentShadeXY[1])
};
ALOGV("Sun XYZ: %f, %f, %f",
sunXYZ[0], sunXYZ[1], sunXYZ[2]);
ALOGV("Sun shade XYZ: %f, %f, %f",
sunShadeXYZ[0], sunShadeXYZ[1], sunShadeXYZ[2]);
// Determine moonlight levels
float moonLux =
kMoonlight[timeIdx] * (1 - timeFrac) +
kMoonlight[nextTimeIdx] * timeFrac;
float moonShadeLux = moonLux * (kDaylightShadeIllum / kDirectSunIllum);
float moonXYZ[3] = {
moonLux / kMoonlightXY[1] * kMoonlightXY[0],
moonLux,
moonLux / kMoonlightXY[1] *
(1 - kMoonlightXY[0] - kMoonlightXY[1])
};
float moonShadeXYZ[3] = {
moonShadeLux / kMoonlightXY[1] * kMoonlightXY[0],
moonShadeLux,
moonShadeLux / kMoonlightXY[1] *
(1 - kMoonlightXY[0] - kMoonlightXY[1])
};
// Determine starlight level
const float kClearNightXYZ[3] = {
kClearNightIllum / kMoonlightXY[1] * kMoonlightXY[0],
kClearNightIllum,
kClearNightIllum / kMoonlightXY[1] *
(1 - kMoonlightXY[0] - kMoonlightXY[1])
};
// Calculate direct and shaded light
float directIllumXYZ[3] = {
sunXYZ[0] + moonXYZ[0] + kClearNightXYZ[0],
sunXYZ[1] + moonXYZ[1] + kClearNightXYZ[1],
sunXYZ[2] + moonXYZ[2] + kClearNightXYZ[2],
};
float shadeIllumXYZ[3] = {
kClearNightXYZ[0],
kClearNightXYZ[1],
kClearNightXYZ[2]
};
shadeIllumXYZ[0] += (mHour < kSunOverhead) ? sunXYZ[0] : sunShadeXYZ[0];
shadeIllumXYZ[1] += (mHour < kSunOverhead) ? sunXYZ[1] : sunShadeXYZ[1];
shadeIllumXYZ[2] += (mHour < kSunOverhead) ? sunXYZ[2] : sunShadeXYZ[2];
// Moon up period covers 23->0 transition, shift for simplicity
int adjHour = (mHour + 12) % 24;
int adjMoonOverhead = (kMoonOverhead + 12 ) % 24;
shadeIllumXYZ[0] += (adjHour < adjMoonOverhead) ?
moonXYZ[0] : moonShadeXYZ[0];
shadeIllumXYZ[1] += (adjHour < adjMoonOverhead) ?
moonXYZ[1] : moonShadeXYZ[1];
shadeIllumXYZ[2] += (adjHour < adjMoonOverhead) ?
moonXYZ[2] : moonShadeXYZ[2];
ALOGV("Direct XYZ: %f, %f, %f",
directIllumXYZ[0],directIllumXYZ[1],directIllumXYZ[2]);
ALOGV("Shade XYZ: %f, %f, %f",
shadeIllumXYZ[0], shadeIllumXYZ[1], shadeIllumXYZ[2]);
for (int i = 0; i < NUM_MATERIALS; i++) {
// Converting for xyY to XYZ:
// X = Y / y * x
// Y = Y
// Z = Y / y * (1 - x - y);
float matXYZ[3] = {
kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
kMaterials_xyY[i][0],
kMaterials_xyY[i][2],
kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
(1 - kMaterials_xyY[i][0] - kMaterials_xyY[i][1])
};
if (kMaterialsFlags[i] == 0 || kMaterialsFlags[i] & kSky) {
matXYZ[0] *= directIllumXYZ[0];
matXYZ[1] *= directIllumXYZ[1];
matXYZ[2] *= directIllumXYZ[2];
} else if (kMaterialsFlags[i] & kShadowed) {
matXYZ[0] *= shadeIllumXYZ[0];
matXYZ[1] *= shadeIllumXYZ[1];
matXYZ[2] *= shadeIllumXYZ[2];
} // else if (kMaterialsFlags[i] * kSelfLit), do nothing
ALOGV("Mat %d XYZ: %f, %f, %f", i, matXYZ[0], matXYZ[1], matXYZ[2]);
float luxToElectrons = mSensorSensitivity * mExposureDuration /
(kAperture * kAperture);
mCurrentColors[i*NUM_CHANNELS + 0] =
(mFilterR[0] * matXYZ[0] +
mFilterR[1] * matXYZ[1] +
mFilterR[2] * matXYZ[2])
* luxToElectrons;
mCurrentColors[i*NUM_CHANNELS + 1] =
(mFilterGr[0] * matXYZ[0] +
mFilterGr[1] * matXYZ[1] +
mFilterGr[2] * matXYZ[2])
* luxToElectrons;
mCurrentColors[i*NUM_CHANNELS + 2] =
(mFilterGb[0] * matXYZ[0] +
mFilterGb[1] * matXYZ[1] +
mFilterGb[2] * matXYZ[2])
* luxToElectrons;
mCurrentColors[i*NUM_CHANNELS + 3] =
(mFilterB[0] * matXYZ[0] +
mFilterB[1] * matXYZ[1] +
mFilterB[2] * matXYZ[2])
* luxToElectrons;
ALOGV("Color %d RGGB: %d, %d, %d, %d", i,
mCurrentColors[i*NUM_CHANNELS + 0],
mCurrentColors[i*NUM_CHANNELS + 1],
mCurrentColors[i*NUM_CHANNELS + 2],
mCurrentColors[i*NUM_CHANNELS + 3]);
}
// Shake viewpoint; horizontal and vertical sinusoids at roughly
// human handshake frequencies
mHandshakeX =
( kFreq1Magnitude * std::sin(kHorizShakeFreq1 * timeSinceIdx) +
kFreq2Magnitude * std::sin(kHorizShakeFreq2 * timeSinceIdx) ) *
mMapDiv * kShakeFraction;
mHandshakeY =
( kFreq1Magnitude * std::sin(kVertShakeFreq1 * timeSinceIdx) +
kFreq2Magnitude * std::sin(kVertShakeFreq2 * timeSinceIdx) ) *
mMapDiv * kShakeFraction;
// Set starting pixel
setReadoutPixel(0,0);
}
void Scene::setReadoutPixel(int x, int y) {
mCurrentX = x;
mCurrentY = y;
mSubX = (x + mOffsetX + mHandshakeX) % mMapDiv;
mSubY = (y + mOffsetY + mHandshakeY) % mMapDiv;
mSceneX = (x + mOffsetX + mHandshakeX) / mMapDiv;
mSceneY = (y + mOffsetY + mHandshakeY) / mMapDiv;
mSceneIdx = mSceneY * kSceneWidth + mSceneX;
mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
}
const uint32_t* Scene::getPixelElectrons() {
const uint32_t *pixel = mCurrentSceneMaterial;
mCurrentX++;
mSubX++;
if (mCurrentX >= mSensorWidth) {
mCurrentX = 0;
mCurrentY++;
if (mCurrentY >= mSensorHeight) mCurrentY = 0;
setReadoutPixel(mCurrentX, mCurrentY);
} else if (mSubX > mMapDiv) {
mSceneIdx++;
mSceneX++;
mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
mSubX = 0;
}
return pixel;
}
// Handshake model constants.
// Frequencies measured in a nanosecond timebase
const float Scene::kHorizShakeFreq1 = 2 * M_PI * 2 / 1e9; // 2 Hz
const float Scene::kHorizShakeFreq2 = 2 * M_PI * 13 / 1e9; // 13 Hz
const float Scene::kVertShakeFreq1 = 2 * M_PI * 3 / 1e9; // 3 Hz
const float Scene::kVertShakeFreq2 = 2 * M_PI * 11 / 1e9; // 1 Hz
const float Scene::kFreq1Magnitude = 5;
const float Scene::kFreq2Magnitude = 1;
const float Scene::kShakeFraction = 0.03; // As a fraction of a scene tile
// RGB->YUV, Jpeg standard
const float Scene::kRgb2Yuv[12] = {
0.299f, 0.587f, 0.114f, 0.f,
-0.16874f, -0.33126f, 0.5f, -128.f,
0.5f, -0.41869f, -0.08131f, -128.f,
};
// Aperture of imaging lens
const float Scene::kAperture = 2.8;
// Sun illumination levels through the day
const float Scene::kSunlight[24/kTimeStep] =
{
0, // 00:00
0,
0,
kTwilightIllum, // 06:00
kDirectSunIllum,
kDirectSunIllum,
kDirectSunIllum, // 12:00
kDirectSunIllum,
kDirectSunIllum,
kSunsetIllum, // 18:00
kTwilightIllum,
0
};
// Moon illumination levels through the day
const float Scene::kMoonlight[24/kTimeStep] =
{
kFullMoonIllum, // 00:00
kFullMoonIllum,
0,
0, // 06:00
0,
0,
0, // 12:00
0,
0,
0, // 18:00
0,
kFullMoonIllum
};
const int Scene::kSunOverhead = 12;
const int Scene::kMoonOverhead = 0;
// Used for sun illumination levels
const float Scene::kDirectSunIllum = 100000;
const float Scene::kSunsetIllum = 400;
const float Scene::kTwilightIllum = 4;
// Used for moon illumination levels
const float Scene::kFullMoonIllum = 1;
// Other illumination levels
const float Scene::kDaylightShadeIllum = 20000;
const float Scene::kClearNightIllum = 2e-3;
const float Scene::kStarIllum = 2e-6;
const float Scene::kLivingRoomIllum = 50;
const float Scene::kIncandescentXY[2] = { 0.44757f, 0.40745f};
const float Scene::kDirectSunlightXY[2] = { 0.34842f, 0.35161f};
const float Scene::kDaylightXY[2] = { 0.31271f, 0.32902f};
const float Scene::kNoonSkyXY[2] = { 0.346f, 0.359f};
const float Scene::kMoonlightXY[2] = { 0.34842f, 0.35161f};
const float Scene::kSunsetXY[2] = { 0.527f, 0.413f};
const uint8_t Scene::kSelfLit = 0x01;
const uint8_t Scene::kShadowed = 0x02;
const uint8_t Scene::kSky = 0x04;
// For non-self-lit materials, the Y component is normalized with 1=full
// reflectance; for self-lit materials, it's the constant illuminance in lux.
const float Scene::kMaterials_xyY[Scene::NUM_MATERIALS][3] = {
{ 0.3688f, 0.4501f, .1329f }, // GRASS
{ 0.3688f, 0.4501f, .1329f }, // GRASS_SHADOW
{ 0.3986f, 0.5002f, .4440f }, // HILL
{ 0.3262f, 0.5040f, .2297f }, // WALL
{ 0.4336f, 0.3787f, .1029f }, // ROOF
{ 0.3316f, 0.2544f, .0639f }, // DOOR
{ 0.3425f, 0.3577f, .0887f }, // CHIMNEY
{ kIncandescentXY[0], kIncandescentXY[1], kLivingRoomIllum }, // WINDOW
{ kDirectSunlightXY[0], kDirectSunlightXY[1], kDirectSunIllum }, // SUN
{ kNoonSkyXY[0], kNoonSkyXY[1], kDaylightShadeIllum / kDirectSunIllum }, // SKY
{ kMoonlightXY[0], kMoonlightXY[1], kFullMoonIllum } // MOON
};
const uint8_t Scene::kMaterialsFlags[Scene::NUM_MATERIALS] = {
0,
kShadowed,
kShadowed,
kShadowed,
kShadowed,
kShadowed,
kShadowed,
kSelfLit,
kSelfLit,
kSky,
kSelfLit,
};
} // namespace android

View file

@ -0,0 +1,191 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The Scene class implements a simple physical simulation of a scene, using the
* CIE 1931 colorspace to represent light in physical units (lux).
*
* It's fairly approximate, but does provide a scene with realistic widely
* variable illumination levels and colors over time.
*
*/
#ifndef HW_EMULATOR_CAMERA2_SCENE_H
#define HW_EMULATOR_CAMERA2_SCENE_H
#include "utils/Timers.h"
namespace android {
class Scene {
public:
Scene(int sensorWidthPx,
int sensorHeightPx,
float sensorSensitivity);
~Scene();
// Set the filter coefficients for the red, green, and blue filters on the
// sensor. Used as an optimization to pre-calculate various illuminance
// values. Two different green filters can be provided, to account for
// possible cross-talk on a Bayer sensor. Must be called before
// calculateScene.
void setColorFilterXYZ(
float rX, float rY, float rZ,
float grX, float grY, float grZ,
float gbX, float gbY, float gbZ,
float bX, float bY, float bZ);
// Set time of day (24-hour clock). This controls the general light levels
// in the scene. Must be called before calculateScene
void setHour(int hour);
// Get current hour
int getHour();
// Set the duration of exposure for determining luminous exposure.
// Must be called before calculateScene
void setExposureDuration(float seconds);
// Calculate scene information for current hour and the time offset since
// the hour. Must be called at least once before calling getLuminousExposure.
// Resets pixel readout location to 0,0
void calculateScene(nsecs_t time);
// Set sensor pixel readout location.
void setReadoutPixel(int x, int y);
// Get sensor response in physical units (electrons) for light hitting the
// current readout pixel, after passing through color filters. The readout
// pixel will be auto-incremented. The returned array can be indexed with
// ColorChannels.
const uint32_t* getPixelElectrons();
enum ColorChannels {
R = 0,
Gr,
Gb,
B,
Y,
Cb,
Cr,
NUM_CHANNELS
};
private:
// Sensor color filtering coefficients in XYZ
float mFilterR[3];
float mFilterGr[3];
float mFilterGb[3];
float mFilterB[3];
int mOffsetX, mOffsetY;
int mMapDiv;
int mHandshakeX, mHandshakeY;
int mSensorWidth;
int mSensorHeight;
int mCurrentX;
int mCurrentY;
int mSubX;
int mSubY;
int mSceneX;
int mSceneY;
int mSceneIdx;
uint32_t *mCurrentSceneMaterial;
int mHour;
float mExposureDuration;
float mSensorSensitivity;
enum Materials {
GRASS = 0,
GRASS_SHADOW,
HILL,
WALL,
ROOF,
DOOR,
CHIMNEY,
WINDOW,
SUN,
SKY,
MOON,
NUM_MATERIALS
};
uint32_t mCurrentColors[NUM_MATERIALS*NUM_CHANNELS];
/**
* Constants for scene definition. These are various degrees of approximate.
*/
// Fake handshake parameters. Two shake frequencies per axis, plus magnitude
// as a fraction of a scene tile, and relative magnitudes for the frequencies
static const float kHorizShakeFreq1;
static const float kHorizShakeFreq2;
static const float kVertShakeFreq1;
static const float kVertShakeFreq2;
static const float kFreq1Magnitude;
static const float kFreq2Magnitude;
static const float kShakeFraction;
// RGB->YUV conversion
static const float kRgb2Yuv[12];
// Aperture of imaging lens
static const float kAperture;
// Sun, moon illuminance levels in 2-hour increments. These don't match any
// real day anywhere.
static const uint32_t kTimeStep = 2;
static const float kSunlight[];
static const float kMoonlight[];
static const int kSunOverhead;
static const int kMoonOverhead;
// Illumination levels for various conditions, in lux
static const float kDirectSunIllum;
static const float kDaylightShadeIllum;
static const float kSunsetIllum;
static const float kTwilightIllum;
static const float kFullMoonIllum;
static const float kClearNightIllum;
static const float kStarIllum;
static const float kLivingRoomIllum;
// Chromaticity of various illumination sources
static const float kIncandescentXY[2];
static const float kDirectSunlightXY[2];
static const float kDaylightXY[2];
static const float kNoonSkyXY[2];
static const float kMoonlightXY[2];
static const float kSunsetXY[2];
static const uint8_t kSelfLit;
static const uint8_t kShadowed;
static const uint8_t kSky;
static const float kMaterials_xyY[NUM_MATERIALS][3];
static const uint8_t kMaterialsFlags[NUM_MATERIALS];
static const int kSceneWidth;
static const int kSceneHeight;
static const uint8_t kScene[];
};
}
#endif // HW_EMULATOR_CAMERA2_SCENE_H

View file

@ -0,0 +1,605 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
//#define LOG_NNDEBUG 0
#define LOG_TAG "EmulatedCamera2_Sensor"
#ifdef LOG_NNDEBUG
#define ALOGVV(...) ALOGV(__VA_ARGS__)
#else
#define ALOGVV(...) ((void)0)
#endif
#include <utils/Log.h>
#include "../EmulatedFakeCamera2.h"
#include "Sensor.h"
#include <cmath>
#include <cstdlib>
#include "system/camera_metadata.h"
namespace android {
//const nsecs_t Sensor::kExposureTimeRange[2] =
// {1000L, 30000000000L} ; // 1 us - 30 sec
//const nsecs_t Sensor::kFrameDurationRange[2] =
// {33331760L, 30000000000L}; // ~1/30 s - 30 sec
const nsecs_t Sensor::kExposureTimeRange[2] =
{1000L, 300000000L} ; // 1 us - 0.3 sec
const nsecs_t Sensor::kFrameDurationRange[2] =
{33331760L, 300000000L}; // ~1/30 s - 0.3 sec
const nsecs_t Sensor::kMinVerticalBlank = 10000L;
const uint8_t Sensor::kColorFilterArrangement =
ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
// Output image data characteristics
const uint32_t Sensor::kMaxRawValue = 4000;
const uint32_t Sensor::kBlackLevel = 1000;
// Sensor sensitivity
const float Sensor::kSaturationVoltage = 0.520f;
const uint32_t Sensor::kSaturationElectrons = 2000;
const float Sensor::kVoltsPerLuxSecond = 0.100f;
const float Sensor::kElectronsPerLuxSecond =
Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
* Sensor::kVoltsPerLuxSecond;
const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
Sensor::kSaturationElectrons;
const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
const float Sensor::kReadNoiseVarBeforeGain =
Sensor::kReadNoiseStddevBeforeGain *
Sensor::kReadNoiseStddevBeforeGain;
const float Sensor::kReadNoiseVarAfterGain =
Sensor::kReadNoiseStddevAfterGain *
Sensor::kReadNoiseStddevAfterGain;
const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
const uint32_t Sensor::kDefaultSensitivity = 100;
/** A few utility functions for math, normal distributions */
// Take advantage of IEEE floating-point format to calculate an approximate
// square root. Accurate to within +-3.6%
float sqrtf_approx(float r) {
// Modifier is based on IEEE floating-point representation; the
// manipulations boil down to finding approximate log2, dividing by two, and
// then inverting the log2. A bias is added to make the relative error
// symmetric about the real answer.
const int32_t modifier = 0x1FBB4000;
int32_t r_i = *(int32_t*)(&r);
r_i = (r_i >> 1) + modifier;
return *(float*)(&r_i);
}
Sensor::Sensor(uint32_t width, uint32_t height):
Thread(false),
mResolution{width, height},
mActiveArray{0, 0, width, height},
mRowReadoutTime(kFrameDurationRange[0] / height),
mGotVSync(false),
mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
mFrameDuration(kFrameDurationRange[0]),
mGainFactor(kDefaultSensitivity),
mNextBuffers(NULL),
mFrameNumber(0),
mCapturedBuffers(NULL),
mListener(NULL),
mScene(width, height, kElectronsPerLuxSecond)
{
ALOGV("Sensor created with pixel array %d x %d", width, height);
}
Sensor::~Sensor() {
shutDown();
}
status_t Sensor::startUp() {
ALOGV("%s: E", __FUNCTION__);
int res;
mCapturedBuffers = NULL;
res = run("EmulatedFakeCamera2::Sensor",
ANDROID_PRIORITY_URGENT_DISPLAY);
if (res != OK) {
ALOGE("Unable to start up sensor capture thread: %d", res);
}
return res;
}
status_t Sensor::shutDown() {
ALOGV("%s: E", __FUNCTION__);
int res;
res = requestExitAndWait();
if (res != OK) {
ALOGE("Unable to shut down sensor capture thread: %d", res);
}
return res;
}
Scene &Sensor::getScene() {
return mScene;
}
void Sensor::setExposureTime(uint64_t ns) {
Mutex::Autolock lock(mControlMutex);
ALOGVV("Exposure set to %f", ns/1000000.f);
mExposureTime = ns;
}
void Sensor::setFrameDuration(uint64_t ns) {
Mutex::Autolock lock(mControlMutex);
ALOGVV("Frame duration set to %f", ns/1000000.f);
mFrameDuration = ns;
}
void Sensor::setSensitivity(uint32_t gain) {
Mutex::Autolock lock(mControlMutex);
ALOGVV("Gain set to %d", gain);
mGainFactor = gain;
}
void Sensor::setDestinationBuffers(Buffers *buffers) {
Mutex::Autolock lock(mControlMutex);
mNextBuffers = buffers;
}
void Sensor::setFrameNumber(uint32_t frameNumber) {
Mutex::Autolock lock(mControlMutex);
mFrameNumber = frameNumber;
}
bool Sensor::waitForVSync(nsecs_t reltime) {
int res;
Mutex::Autolock lock(mControlMutex);
mGotVSync = false;
res = mVSync.waitRelative(mControlMutex, reltime);
if (res != OK && res != TIMED_OUT) {
ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
return false;
}
return mGotVSync;
}
bool Sensor::waitForNewFrame(nsecs_t reltime,
nsecs_t *captureTime) {
Mutex::Autolock lock(mReadoutMutex);
uint8_t *ret;
if (mCapturedBuffers == NULL) {
int res;
res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
if (res == TIMED_OUT) {
return false;
} else if (res != OK || mCapturedBuffers == NULL) {
ALOGE("Error waiting for sensor readout signal: %d", res);
return false;
}
}
mReadoutComplete.signal();
*captureTime = mCaptureTime;
mCapturedBuffers = NULL;
return true;
}
Sensor::SensorListener::~SensorListener() {
}
void Sensor::setSensorListener(SensorListener *listener) {
Mutex::Autolock lock(mControlMutex);
mListener = listener;
}
status_t Sensor::readyToRun() {
ALOGV("Starting up sensor thread");
mStartupTime = systemTime();
mNextCaptureTime = 0;
mNextCapturedBuffers = NULL;
return OK;
}
bool Sensor::threadLoop() {
/**
* Sensor capture operation main loop.
*
* Stages are out-of-order relative to a single frame's processing, but
* in-order in time.
*/
/**
* Stage 1: Read in latest control parameters
*/
uint64_t exposureDuration;
uint64_t frameDuration;
uint32_t gain;
Buffers *nextBuffers;
uint32_t frameNumber;
SensorListener *listener = NULL;
{
Mutex::Autolock lock(mControlMutex);
exposureDuration = mExposureTime;
frameDuration = mFrameDuration;
gain = mGainFactor;
nextBuffers = mNextBuffers;
frameNumber = mFrameNumber;
listener = mListener;
// Don't reuse a buffer set
mNextBuffers = NULL;
// Signal VSync for start of readout
ALOGVV("Sensor VSync");
mGotVSync = true;
mVSync.signal();
}
/**
* Stage 3: Read out latest captured image
*/
Buffers *capturedBuffers = NULL;
nsecs_t captureTime = 0;
nsecs_t startRealTime = systemTime();
// Stagefright cares about system time for timestamps, so base simulated
// time on that.
nsecs_t simulatedTime = startRealTime;
nsecs_t frameEndRealTime = startRealTime + frameDuration;
nsecs_t frameReadoutEndRealTime = startRealTime +
mRowReadoutTime * mResolution[1];
if (mNextCapturedBuffers != NULL) {
ALOGVV("Sensor starting readout");
// Pretend we're doing readout now; will signal once enough time has elapsed
capturedBuffers = mNextCapturedBuffers;
captureTime = mNextCaptureTime;
}
simulatedTime += mRowReadoutTime + kMinVerticalBlank;
// TODO: Move this signal to another thread to simulate readout
// time properly
if (capturedBuffers != NULL) {
ALOGVV("Sensor readout complete");
Mutex::Autolock lock(mReadoutMutex);
if (mCapturedBuffers != NULL) {
ALOGV("Waiting for readout thread to catch up!");
mReadoutComplete.wait(mReadoutMutex);
}
mCapturedBuffers = capturedBuffers;
mCaptureTime = captureTime;
mReadoutAvailable.signal();
capturedBuffers = NULL;
}
/**
* Stage 2: Capture new image
*/
mNextCaptureTime = simulatedTime;
mNextCapturedBuffers = nextBuffers;
if (mNextCapturedBuffers != NULL) {
if (listener != NULL) {
listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
mNextCaptureTime);
}
ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
(float)exposureDuration/1e6, gain);
mScene.setExposureDuration((float)exposureDuration/1e9);
mScene.calculateScene(mNextCaptureTime);
// Might be adding more buffers, so size isn't constant
for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
const StreamBuffer &b = (*mNextCapturedBuffers)[i];
ALOGVV("Sensor capturing buffer %d: stream %d,"
" %d x %d, format %x, stride %d, buf %p, img %p",
i, b.streamId, b.width, b.height, b.format, b.stride,
b.buffer, b.img);
switch(b.format) {
case HAL_PIXEL_FORMAT_RAW16:
captureRaw(b.img, gain, b.stride);
break;
case HAL_PIXEL_FORMAT_RGB_888:
captureRGB(b.img, gain, b.stride);
break;
case HAL_PIXEL_FORMAT_RGBA_8888:
captureRGBA(b.img, gain, b.stride);
break;
case HAL_PIXEL_FORMAT_BLOB:
if (b.dataSpace != HAL_DATASPACE_DEPTH) {
// Add auxillary buffer of the right size
// Assumes only one BLOB (JPEG) buffer in
// mNextCapturedBuffers
StreamBuffer bAux;
bAux.streamId = 0;
bAux.width = b.width;
bAux.height = b.height;
bAux.format = HAL_PIXEL_FORMAT_RGB_888;
bAux.stride = b.width;
bAux.buffer = NULL;
// TODO: Reuse these
bAux.img = new uint8_t[b.width * b.height * 3];
mNextCapturedBuffers->push_back(bAux);
} else {
captureDepthCloud(b.img);
}
break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
captureNV21(b.img, gain, b.stride);
break;
case HAL_PIXEL_FORMAT_YV12:
// TODO:
ALOGE("%s: Format %x is TODO", __FUNCTION__, b.format);
break;
case HAL_PIXEL_FORMAT_Y16:
captureDepth(b.img, gain, b.stride);
break;
default:
ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
b.format);
break;
}
}
}
ALOGVV("Sensor vertical blanking interval");
nsecs_t workDoneRealTime = systemTime();
const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
timespec t;
t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
int ret;
do {
ret = nanosleep(&t, &t);
} while (ret != 0);
}
nsecs_t endRealTime = systemTime();
ALOGVV("Frame cycle took %d ms, target %d ms",
(int)((endRealTime - startRealTime)/1000000),
(int)(frameDuration / 1000000));
return true;
};
void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
float totalGain = gain/100.0 * kBaseGainFactor;
float noiseVarGain = totalGain * totalGain;
float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+ kReadNoiseVarAfterGain;
int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
mScene.setReadoutPixel(0,0);
for (unsigned int y = 0; y < mResolution[1]; y++ ) {
int *bayerRow = bayerSelect + (y & 0x1) * 2;
uint16_t *px = (uint16_t*)img + y * stride;
for (unsigned int x = 0; x < mResolution[0]; x++) {
uint32_t electronCount;
electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
// TODO: Better pixel saturation curve?
electronCount = (electronCount < kSaturationElectrons) ?
electronCount : kSaturationElectrons;
// TODO: Better A/D saturation curve?
uint16_t rawCount = electronCount * totalGain;
rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
// Calculate noise value
// TODO: Use more-correct Gaussian instead of uniform noise
float photonNoiseVar = electronCount * noiseVarGain;
float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
// Scaled to roughly match gaussian/uniform noise stddev
float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
rawCount += kBlackLevel;
rawCount += noiseStddev * noiseSample;
*px++ = rawCount;
}
// TODO: Handle this better
//simulatedTime += mRowReadoutTime;
}
ALOGVV("Raw sensor image captured");
}
void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
float totalGain = gain/100.0 * kBaseGainFactor;
// In fixed-point math, calculate total scaling from electrons to 8bpp
int scale64x = 64 * totalGain * 255 / kMaxRawValue;
uint32_t inc = ceil( (float) mResolution[0] / stride);
for (unsigned int y = 0, outY = 0; y < mResolution[1]; y+=inc, outY++ ) {
uint8_t *px = img + outY * stride * 4;
mScene.setReadoutPixel(0, y);
for (unsigned int x = 0; x < mResolution[0]; x+=inc) {
uint32_t rCount, gCount, bCount;
// TODO: Perfect demosaicing is a cheat
const uint32_t *pixel = mScene.getPixelElectrons();
rCount = pixel[Scene::R] * scale64x;
gCount = pixel[Scene::Gr] * scale64x;
bCount = pixel[Scene::B] * scale64x;
*px++ = rCount < 255*64 ? rCount / 64 : 255;
*px++ = gCount < 255*64 ? gCount / 64 : 255;
*px++ = bCount < 255*64 ? bCount / 64 : 255;
*px++ = 255;
for (unsigned int j = 1; j < inc; j++)
mScene.getPixelElectrons();
}
// TODO: Handle this better
//simulatedTime += mRowReadoutTime;
}
ALOGVV("RGBA sensor image captured");
}
void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
float totalGain = gain/100.0 * kBaseGainFactor;
// In fixed-point math, calculate total scaling from electrons to 8bpp
int scale64x = 64 * totalGain * 255 / kMaxRawValue;
uint32_t inc = ceil( (float) mResolution[0] / stride);
for (unsigned int y = 0, outY = 0; y < mResolution[1]; y += inc, outY++ ) {
mScene.setReadoutPixel(0, y);
uint8_t *px = img + outY * stride * 3;
for (unsigned int x = 0; x < mResolution[0]; x += inc) {
uint32_t rCount, gCount, bCount;
// TODO: Perfect demosaicing is a cheat
const uint32_t *pixel = mScene.getPixelElectrons();
rCount = pixel[Scene::R] * scale64x;
gCount = pixel[Scene::Gr] * scale64x;
bCount = pixel[Scene::B] * scale64x;
*px++ = rCount < 255*64 ? rCount / 64 : 255;
*px++ = gCount < 255*64 ? gCount / 64 : 255;
*px++ = bCount < 255*64 ? bCount / 64 : 255;
for (unsigned int j = 1; j < inc; j++)
mScene.getPixelElectrons();
}
// TODO: Handle this better
//simulatedTime += mRowReadoutTime;
}
ALOGVV("RGB sensor image captured");
}
void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) {
float totalGain = gain/100.0 * kBaseGainFactor;
// Using fixed-point math with 6 bits of fractional precision.
// In fixed-point math, calculate total scaling from electrons to 8bpp
const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
// In fixed-point math, saturation point of sensor after gain
const int saturationPoint = 64 * 255;
// Fixed-point coefficients for RGB-YUV transform
// Based on JFIF RGB->YUV transform.
// Cb/Cr offset scaled by 64x twice since they're applied post-multiply
const int rgbToY[] = {19, 37, 7};
const int rgbToCb[] = {-10,-21, 32, 524288};
const int rgbToCr[] = {32,-26, -5, 524288};
// Scale back to 8bpp non-fixed-point
const int scaleOut = 64;
const int scaleOutSq = scaleOut * scaleOut; // after multiplies
// inc = how many pixels to skip while reading every next pixel
// horizontally.
uint32_t inc = ceil( (float) mResolution[0] / stride);
// outH = projected vertical resolution based on stride.
uint32_t outH = mResolution[1] / inc;
for (unsigned int y = 0, outY = 0;
y < mResolution[1]; y+=inc, outY++) {
uint8_t *pxY = img + outY * stride;
uint8_t *pxVU = img + (outH + outY / 2) * stride;
mScene.setReadoutPixel(0,y);
for (unsigned int outX = 0; outX < stride; outX++) {
int32_t rCount, gCount, bCount;
// TODO: Perfect demosaicing is a cheat
const uint32_t *pixel = mScene.getPixelElectrons();
rCount = pixel[Scene::R] * scale64x;
rCount = rCount < saturationPoint ? rCount : saturationPoint;
gCount = pixel[Scene::Gr] * scale64x;
gCount = gCount < saturationPoint ? gCount : saturationPoint;
bCount = pixel[Scene::B] * scale64x;
bCount = bCount < saturationPoint ? bCount : saturationPoint;
*pxY++ = (rgbToY[0] * rCount +
rgbToY[1] * gCount +
rgbToY[2] * bCount) / scaleOutSq;
if (outY % 2 == 0 && outX % 2 == 0) {
*pxVU++ = (rgbToCr[0] * rCount +
rgbToCr[1] * gCount +
rgbToCr[2] * bCount +
rgbToCr[3]) / scaleOutSq;
*pxVU++ = (rgbToCb[0] * rCount +
rgbToCb[1] * gCount +
rgbToCb[2] * bCount +
rgbToCb[3]) / scaleOutSq;
}
for (unsigned int j = 1; j < inc; j++)
mScene.getPixelElectrons();
}
}
ALOGVV("NV21 sensor image captured");
}
void Sensor::captureDepth(uint8_t *img, uint32_t gain, uint32_t stride) {
float totalGain = gain/100.0 * kBaseGainFactor;
// In fixed-point math, calculate scaling factor to 13bpp millimeters
int scale64x = 64 * totalGain * 8191 / kMaxRawValue;
uint32_t inc = ceil( (float) mResolution[0] / stride);
for (unsigned int y = 0, outY = 0; y < mResolution[1]; y += inc, outY++ ) {
mScene.setReadoutPixel(0, y);
uint16_t *px = ((uint16_t*)img) + outY * stride;
for (unsigned int x = 0; x < mResolution[0]; x += inc) {
uint32_t depthCount;
// TODO: Make up real depth scene instead of using green channel
// as depth
const uint32_t *pixel = mScene.getPixelElectrons();
depthCount = pixel[Scene::Gr] * scale64x;
*px++ = depthCount < 8191*64 ? depthCount / 64 : 0;
for (unsigned int j = 1; j < inc; j++)
mScene.getPixelElectrons();
}
// TODO: Handle this better
//simulatedTime += mRowReadoutTime;
}
ALOGVV("Depth sensor image captured");
}
void Sensor::captureDepthCloud(uint8_t *img) {
android_depth_points *cloud = reinterpret_cast<android_depth_points*>(img);
cloud->num_points = 16;
// TODO: Create point cloud values that match RGB scene
const int FLOATS_PER_POINT = 4;
const float JITTER_STDDEV = 0.1f;
for (size_t y = 0, i = 0; y < 4; y++) {
for (size_t x = 0; x < 4; x++, i++) {
float randSampleX = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
randSampleX *= JITTER_STDDEV;
float randSampleY = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
randSampleY *= JITTER_STDDEV;
float randSampleZ = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
randSampleZ *= JITTER_STDDEV;
cloud->xyzc_points[i * FLOATS_PER_POINT + 0] = x - 1.5f + randSampleX;
cloud->xyzc_points[i * FLOATS_PER_POINT + 1] = y - 1.5f + randSampleY;
cloud->xyzc_points[i * FLOATS_PER_POINT + 2] = 3.f + randSampleZ;
cloud->xyzc_points[i * FLOATS_PER_POINT + 3] = 0.8f;
}
}
ALOGVV("Depth point cloud captured");
}
} // namespace android

View file

@ -0,0 +1,247 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This class is a simple simulation of a typical CMOS cellphone imager chip,
* which outputs 12-bit Bayer-mosaic raw images.
*
* Unlike most real image sensors, this one's native color space is linear sRGB.
*
* The sensor is abstracted as operating as a pipeline 3 stages deep;
* conceptually, each frame to be captured goes through these three stages. The
* processing step for the sensor is marked off by vertical sync signals, which
* indicate the start of readout of the oldest frame. The interval between
* processing steps depends on the frame duration of the frame currently being
* captured. The stages are 1) configure, 2) capture, and 3) readout. During
* configuration, the sensor's registers for settings such as exposure time,
* frame duration, and gain are set for the next frame to be captured. In stage
* 2, the image data for the frame is actually captured by the sensor. Finally,
* in stage 3, the just-captured data is read out and sent to the rest of the
* system.
*
* The sensor is assumed to be rolling-shutter, so low-numbered rows of the
* sensor are exposed earlier in time than larger-numbered rows, with the time
* offset between each row being equal to the row readout time.
*
* The characteristics of this sensor don't correspond to any actual sensor,
* but are not far off typical sensors.
*
* Example timing diagram, with three frames:
* Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
* Frame 2: Frame duration 75 ms, exposure time 65 ms.
* Legend:
* C = update sensor registers for frame
* v = row in reset (vertical blanking interval)
* E = row capturing image data
* R = row being read out
* | = vertical sync signal
*time(ms)| 0 55 105 155 230 270
* Frame 0| :configure : capture : readout : : :
* Row # | ..|CCCC______|_________|_________| : :
* 0 | :\ \vvvvvEEEER \ : :
* 500 | : \ \vvvvvEEEER \ : :
* 1000 | : \ \vvvvvEEEER \ : :
* 1500 | : \ \vvvvvEEEER \ : :
* 2000 | : \__________\vvvvvEEEER_________\ : :
* Frame 1| : configure capture readout : :
* Row # | : |CCCC_____|_________|______________| :
* 0 | : :\ \vvvvvEEEER \ :
* 500 | : : \ \vvvvvEEEER \ :
* 1000 | : : \ \vvvvvEEEER \ :
* 1500 | : : \ \vvvvvEEEER \ :
* 2000 | : : \_________\vvvvvEEEER______________\ :
* Frame 2| : : configure capture readout:
* Row # | : : |CCCC_____|______________|_______|...
* 0 | : : :\ \vEEEEEEEEEEEEER \
* 500 | : : : \ \vEEEEEEEEEEEEER \
* 1000 | : : : \ \vEEEEEEEEEEEEER \
* 1500 | : : : \ \vEEEEEEEEEEEEER \
* 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
*/
#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
#define HW_EMULATOR_CAMERA2_SENSOR_H
#include "utils/Thread.h"
#include "utils/Mutex.h"
#include "utils/Timers.h"
#include "Scene.h"
#include "Base.h"
namespace android {
class EmulatedFakeCamera2;
class Sensor: private Thread, public virtual RefBase {
public:
// width: Width of pixel array
// height: Height of pixel array
Sensor(uint32_t width, uint32_t height);
~Sensor();
/*
* Power control
*/
status_t startUp();
status_t shutDown();
/*
* Access to scene
*/
Scene &getScene();
/*
* Controls that can be updated every frame
*/
void setExposureTime(uint64_t ns);
void setFrameDuration(uint64_t ns);
void setSensitivity(uint32_t gain);
// Buffer must be at least stride*height*2 bytes in size
void setDestinationBuffers(Buffers *buffers);
// To simplify tracking sensor's current frame
void setFrameNumber(uint32_t frameNumber);
/*
* Controls that cause reconfiguration delay
*/
void setBinning(int horizontalFactor, int verticalFactor);
/*
* Synchronizing with sensor operation (vertical sync)
*/
// Wait until the sensor outputs its next vertical sync signal, meaning it
// is starting readout of its latest frame of data. Returns true if vertical
// sync is signaled, false if the wait timed out.
bool waitForVSync(nsecs_t reltime);
// Wait until a new frame has been read out, and then return the time
// capture started. May return immediately if a new frame has been pushed
// since the last wait for a new frame. Returns true if new frame is
// returned, false if timed out.
bool waitForNewFrame(nsecs_t reltime,
nsecs_t *captureTime);
/*
* Interrupt event servicing from the sensor. Only triggers for sensor
* cycles that have valid buffers to write to.
*/
struct SensorListener {
enum Event {
EXPOSURE_START, // Start of exposure
};
virtual void onSensorEvent(uint32_t frameNumber, Event e,
nsecs_t timestamp) = 0;
virtual ~SensorListener();
};
void setSensorListener(SensorListener *listener);
/**
* Static sensor characteristics
*/
const uint32_t mResolution[2];
const uint32_t mActiveArray[4];
static const nsecs_t kExposureTimeRange[2];
static const nsecs_t kFrameDurationRange[2];
static const nsecs_t kMinVerticalBlank;
static const uint8_t kColorFilterArrangement;
// Output image data characteristics
static const uint32_t kMaxRawValue;
static const uint32_t kBlackLevel;
// Sensor sensitivity, approximate
static const float kSaturationVoltage;
static const uint32_t kSaturationElectrons;
static const float kVoltsPerLuxSecond;
static const float kElectronsPerLuxSecond;
static const float kBaseGainFactor;
static const float kReadNoiseStddevBeforeGain; // In electrons
static const float kReadNoiseStddevAfterGain; // In raw digital units
static const float kReadNoiseVarBeforeGain;
static const float kReadNoiseVarAfterGain;
// While each row has to read out, reset, and then expose, the (reset +
// expose) sequence can be overlapped by other row readouts, so the final
// minimum frame duration is purely a function of row readout time, at least
// if there's a reasonable number of rows.
const nsecs_t mRowReadoutTime;
static const int32_t kSensitivityRange[2];
static const uint32_t kDefaultSensitivity;
private:
Mutex mControlMutex; // Lock before accessing control parameters
// Start of control parameters
Condition mVSync;
bool mGotVSync;
uint64_t mExposureTime;
uint64_t mFrameDuration;
uint32_t mGainFactor;
Buffers *mNextBuffers;
uint32_t mFrameNumber;
// End of control parameters
Mutex mReadoutMutex; // Lock before accessing readout variables
// Start of readout variables
Condition mReadoutAvailable;
Condition mReadoutComplete;
Buffers *mCapturedBuffers;
nsecs_t mCaptureTime;
SensorListener *mListener;
// End of readout variables
// Time of sensor startup, used for simulation zero-time point
nsecs_t mStartupTime;
/**
* Inherited Thread virtual overrides, and members only used by the
* processing thread
*/
private:
virtual status_t readyToRun();
virtual bool threadLoop();
nsecs_t mNextCaptureTime;
Buffers *mNextCapturedBuffers;
Scene mScene;
void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
void captureNV21(uint8_t *img, uint32_t gain, uint32_t stride);
void captureDepth(uint8_t *img, uint32_t gain, uint32_t stride);
void captureDepthCloud(uint8_t *img);
};
}
#endif // HW_EMULATOR_CAMERA2_SENSOR_H

View file

@ -0,0 +1,76 @@
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
# JPEG stub#####################################################################
ifneq ($(TARGET_BUILD_PDK),true)
include $(CLEAR_VARS)
LOCAL_VENDOR_MODULE := true
jpeg_module_relative_path := hw
jpeg_cflags := -fno-short-enums -DQEMU_HARDWARE
jpeg_cflags += -Wno-unused-parameter
jpeg_clang_flags += -Wno-c++11-narrowing
jpeg_shared_libraries := \
libcutils \
libexif \
libjpeg \
liblog \
jpeg_c_includes := external/libjpeg-turbo \
external/libexif \
frameworks/native/include
jpeg_src := \
Compressor.cpp \
JpegStub.cpp \
# goldfish build ###############################################################
LOCAL_MODULE_RELATIVE_PATH := ${jpeg_module_relative_path}
LOCAL_CFLAGS += ${jpeg_cflags}
LOCAL_CLANG_CFLAGS += ${jpeg_clangflags}
LOCAL_SHARED_LIBRARIES := ${jpeg_shared_libraries}
LOCAL_C_INCLUDES += ${jpeg_c_includes}
LOCAL_SRC_FILES := ${jpeg_src}
LOCAL_MODULE := camera.goldfish.jpeg
include $(BUILD_SHARED_LIBRARY)
# ranchu build #################################################################
include ${CLEAR_VARS}
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE := camera.ranchu.jpeg
LOCAL_MODULE_RELATIVE_PATH := ${jpeg_module_relative_path}
LOCAL_CFLAGS += ${jpeg_cflags}
LOCAL_CLANG_CFLAGS += ${jpeg_clangflags}
LOCAL_SHARED_LIBRARIES := ${jpeg_shared_libraries}
LOCAL_C_INCLUDES += ${jpeg_c_includes}
LOCAL_SRC_FILES := ${jpeg_src}
include $(BUILD_SHARED_LIBRARY)
endif # !PDK

View file

@ -0,0 +1,234 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Compressor.h"
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_JPEGStub_Compressor"
#include <cutils/log.h>
#include <libexif/exif-data.h>
Compressor::Compressor() {
}
bool Compressor::compress(const unsigned char* data,
int width, int height, int quality,
ExifData* exifData) {
if (!configureCompressor(width, height, quality)) {
// The method will have logged a more detailed error message than we can
// provide here so just return.
return false;
}
return compressData(data, exifData);
}
const std::vector<uint8_t>& Compressor::getCompressedData() const {
return mDestManager.mBuffer;
}
bool Compressor::configureCompressor(int width, int height, int quality) {
mCompressInfo.err = jpeg_std_error(&mErrorManager);
// NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
// The compiler will not generate code to destroy them during the return
// below so they will leak. Additionally, do not place any calls to libjpeg
// that can fail above this line or any error will cause undefined behavior.
if (setjmp(mErrorManager.mJumpBuffer)) {
// This is where the error handler will jump in case setup fails
// The error manager will ALOG an appropriate error message
return false;
}
jpeg_create_compress(&mCompressInfo);
mCompressInfo.image_width = width;
mCompressInfo.image_height = height;
mCompressInfo.input_components = 3;
mCompressInfo.in_color_space = JCS_YCbCr;
jpeg_set_defaults(&mCompressInfo);
jpeg_set_quality(&mCompressInfo, quality, TRUE);
// It may seem weird to set color space here again but this will also set
// other fields. These fields might be overwritten by jpeg_set_defaults
jpeg_set_colorspace(&mCompressInfo, JCS_YCbCr);
mCompressInfo.raw_data_in = TRUE;
mCompressInfo.dct_method = JDCT_IFAST;
// Set sampling factors
mCompressInfo.comp_info[0].h_samp_factor = 2;
mCompressInfo.comp_info[0].v_samp_factor = 2;
mCompressInfo.comp_info[1].h_samp_factor = 1;
mCompressInfo.comp_info[1].v_samp_factor = 1;
mCompressInfo.comp_info[2].h_samp_factor = 1;
mCompressInfo.comp_info[2].v_samp_factor = 1;
mCompressInfo.dest = &mDestManager;
return true;
}
static void deinterleave(const uint8_t* vuPlanar, std::vector<uint8_t>& uRows,
std::vector<uint8_t>& vRows, int rowIndex, int width,
int height, int stride) {
int numRows = (height - rowIndex) / 2;
if (numRows > 8) numRows = 8;
for (int row = 0; row < numRows; ++row) {
int offset = ((rowIndex >> 1) + row) * stride;
const uint8_t* vu = vuPlanar + offset;
for (int i = 0; i < (width >> 1); ++i) {
int index = row * (width >> 1) + i;
uRows[index] = vu[1];
vRows[index] = vu[0];
vu += 2;
}
}
}
bool Compressor::compressData(const unsigned char* data, ExifData* exifData) {
const uint8_t* y[16];
const uint8_t* cb[8];
const uint8_t* cr[8];
const uint8_t** planes[3] = { y, cb, cr };
int i, offset;
int width = mCompressInfo.image_width;
int height = mCompressInfo.image_height;
const uint8_t* yPlanar = data;
const uint8_t* vuPlanar = data + (width * height);
std::vector<uint8_t> uRows(8 * (width >> 1));
std::vector<uint8_t> vRows(8 * (width >> 1));
// NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
// The compiler will not generate code to destroy them during the return
// below so they will leak. Additionally, do not place any calls to libjpeg
// that can fail above this line or any error will cause undefined behavior.
if (setjmp(mErrorManager.mJumpBuffer)) {
// This is where the error handler will jump in case compression fails
// The error manager will ALOG an appropriate error message
return false;
}
jpeg_start_compress(&mCompressInfo, TRUE);
attachExifData(exifData);
// process 16 lines of Y and 8 lines of U/V each time.
while (mCompressInfo.next_scanline < mCompressInfo.image_height) {
//deinterleave u and v
deinterleave(vuPlanar, uRows, vRows, mCompressInfo.next_scanline,
width, height, width);
// Jpeg library ignores the rows whose indices are greater than height.
for (i = 0; i < 16; i++) {
// y row
y[i] = yPlanar + (mCompressInfo.next_scanline + i) * width;
// construct u row and v row
if ((i & 1) == 0) {
// height and width are both halved because of downsampling
offset = (i >> 1) * (width >> 1);
cb[i/2] = &uRows[offset];
cr[i/2] = &vRows[offset];
}
}
jpeg_write_raw_data(&mCompressInfo, const_cast<JSAMPIMAGE>(planes), 16);
}
jpeg_finish_compress(&mCompressInfo);
jpeg_destroy_compress(&mCompressInfo);
return true;
}
bool Compressor::attachExifData(ExifData* exifData) {
if (exifData == nullptr) {
// This is not an error, we don't require EXIF data
return true;
}
// Save the EXIF data to memory
unsigned char* rawData = nullptr;
unsigned int size = 0;
exif_data_save_data(exifData, &rawData, &size);
if (rawData == nullptr) {
ALOGE("Failed to create EXIF data block");
return false;
}
jpeg_write_marker(&mCompressInfo, JPEG_APP0 + 1, rawData, size);
free(rawData);
return true;
}
Compressor::ErrorManager::ErrorManager() {
error_exit = &onJpegError;
}
void Compressor::ErrorManager::onJpegError(j_common_ptr cinfo) {
// NOTE! Do not construct any non-trivial objects in this method at the top
// scope. Their destructors will not be called. If you do need such an
// object create a local scope that does not include the longjmp call,
// that ensures the object is destroyed before longjmp is called.
ErrorManager* errorManager = reinterpret_cast<ErrorManager*>(cinfo->err);
// Format and log error message
char errorMessage[JMSG_LENGTH_MAX];
(*errorManager->format_message)(cinfo, errorMessage);
errorMessage[sizeof(errorMessage) - 1] = '\0';
ALOGE("JPEG compression error: %s", errorMessage);
jpeg_destroy(cinfo);
// And through the looking glass we go
longjmp(errorManager->mJumpBuffer, 1);
}
Compressor::DestinationManager::DestinationManager() {
init_destination = &initDestination;
empty_output_buffer = &emptyOutputBuffer;
term_destination = &termDestination;
}
void Compressor::DestinationManager::initDestination(j_compress_ptr cinfo) {
auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
// Start out with some arbitrary but not too large buffer size
manager->mBuffer.resize(16 * 1024);
manager->next_output_byte = &manager->mBuffer[0];
manager->free_in_buffer = manager->mBuffer.size();
}
boolean Compressor::DestinationManager::emptyOutputBuffer(
j_compress_ptr cinfo) {
auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
// Keep doubling the size of the buffer for a very low, amortized
// performance cost of the allocations
size_t oldSize = manager->mBuffer.size();
manager->mBuffer.resize(oldSize * 2);
manager->next_output_byte = &manager->mBuffer[oldSize];
manager->free_in_buffer = manager->mBuffer.size() - oldSize;
return manager->free_in_buffer != 0;
}
void Compressor::DestinationManager::termDestination(j_compress_ptr cinfo) {
auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
// Resize down to the exact size of the output, that is remove as many
// bytes as there are left in the buffer
manager->mBuffer.resize(manager->mBuffer.size() - manager->free_in_buffer);
}

View file

@ -0,0 +1,77 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GOLDFISH_CAMERA_JPEG_STUB_COMPRESSOR_H
#define GOLDFISH_CAMERA_JPEG_STUB_COMPRESSOR_H
#include <setjmp.h>
#include <stdlib.h>
extern "C" {
#include <jpeglib.h>
#include <jerror.h>
}
#include <vector>
struct _ExifData;
typedef _ExifData ExifData;
class Compressor {
public:
Compressor();
/* Compress |data| which represents raw NV21 encoded data of dimensions
* |width| * |height|. |exifData| is optional EXIF data that will be
* attached to the compressed data if present, set to null if not needed.
*/
bool compress(const unsigned char* data,
int width, int height, int quality,
ExifData* exifData);
/* Get a reference to the compressed data, this will return an empty vector
* if compress has not been called yet
*/
const std::vector<unsigned char>& getCompressedData() const;
private:
struct DestinationManager : jpeg_destination_mgr {
DestinationManager();
static void initDestination(j_compress_ptr cinfo);
static boolean emptyOutputBuffer(j_compress_ptr cinfo);
static void termDestination(j_compress_ptr cinfo);
std::vector<unsigned char> mBuffer;
};
struct ErrorManager : jpeg_error_mgr {
ErrorManager();
static void onJpegError(j_common_ptr cinfo);
jmp_buf mJumpBuffer;
};
jpeg_compress_struct mCompressInfo;
DestinationManager mDestManager;
ErrorManager mErrorManager;
bool configureCompressor(int width, int height, int quality);
bool compressData(const unsigned char* data, ExifData* exifData);
bool attachExifData(ExifData* exifData);
};
#endif // GOLDFISH_CAMERA_JPEG_STUB_COMPRESSOR_H

View file

@ -0,0 +1,67 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "JpegStub.h"
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_JPEGStub"
#include <errno.h>
#include <cutils/log.h>
#include <stdlib.h>
#include "Compressor.h"
extern "C" void JpegStub_init(JpegStub* stub) {
stub->mCompressor = static_cast<void*>(new Compressor());
}
extern "C" void JpegStub_cleanup(JpegStub* stub) {
delete reinterpret_cast<Compressor*>(stub->mCompressor);
stub->mCompressor = nullptr;
}
extern "C" int JpegStub_compress(JpegStub* stub,
const void* buffer,
int width,
int height,
int quality,
ExifData* exifData)
{
Compressor* compressor = reinterpret_cast<Compressor*>(stub->mCompressor);
if (compressor->compress(reinterpret_cast<const unsigned char*>(buffer),
width, height, quality, exifData)) {
ALOGV("%s: Compressed JPEG: %d[%dx%d] -> %zu bytes",
__FUNCTION__, (width * height * 12) / 8,
width, height, compressor->getCompressedData().size());
return 0;
}
ALOGE("%s: JPEG compression failed", __FUNCTION__);
return errno ? errno : EINVAL;
}
extern "C" void JpegStub_getCompressedImage(JpegStub* stub, void* buff) {
Compressor* compressor = reinterpret_cast<Compressor*>(stub->mCompressor);
const std::vector<unsigned char>& data = compressor->getCompressedData();
memcpy(buff, &data[0], data.size());
}
extern "C" size_t JpegStub_getCompressedSize(JpegStub* stub) {
Compressor* compressor = reinterpret_cast<Compressor*>(stub->mCompressor);
return compressor->getCompressedData().size();
}

View file

@ -0,0 +1,43 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef JPEGSTUB_H_
#define JPEGSTUB_H_
#include <stddef.h>
struct _ExifData;
typedef _ExifData ExifData;
extern "C" {
struct JpegStub {
void* mCompressor;
};
void JpegStub_init(JpegStub* stub);
void JpegStub_cleanup(JpegStub* stub);
int JpegStub_compress(JpegStub* stub,
const void* image,
int width,
int height,
int quality,
ExifData* exifData);
void JpegStub_getCompressedImage(JpegStub* stub, void* buff);
size_t JpegStub_getCompressedSize(JpegStub* stub);
};
#endif // JPEGSTUB_H_

View file

@ -0,0 +1,88 @@
<?xml version="1.0" encoding="utf-8" ?>
<!-- Copyright (C) 2012 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
<!DOCTYPE MediaCodecs [
<!ELEMENT Include EMPTY>
<!ATTLIST Include href CDATA #REQUIRED>
<!ELEMENT MediaCodecs (Decoders|Encoders|Include)*>
<!ELEMENT Decoders (MediaCodec|Include)*>
<!ELEMENT Encoders (MediaCodec|Include)*>
<!ELEMENT MediaCodec (Type|Quirk|Include)*>
<!ATTLIST MediaCodec name CDATA #REQUIRED>
<!ATTLIST MediaCodec type CDATA>
<!ELEMENT Type EMPTY>
<!ATTLIST Type name CDATA #REQUIRED>
<!ELEMENT Quirk EMPTY>
<!ATTLIST Quirk name CDATA #REQUIRED>
]>
There's a simple and a complex syntax to declare the availability of a
media codec:
A codec that properly follows the OpenMax spec and therefore doesn't have any
quirks and that only supports a single content type can be declared like so:
<MediaCodec name="OMX.foo.bar" type="something/interesting" />
If a codec has quirks OR supports multiple content types, the following syntax
can be used:
<MediaCodec name="OMX.foo.bar" >
<Type name="something/interesting" />
<Type name="something/else" />
...
<Quirk name="requires-allocate-on-input-ports" />
<Quirk name="requires-allocate-on-output-ports" />
<Quirk name="output-buffers-are-unreadable" />
</MediaCodec>
Only the three quirks included above are recognized at this point:
"requires-allocate-on-input-ports"
must be advertised if the component does not properly support specification
of input buffers using the OMX_UseBuffer(...) API but instead requires
OMX_AllocateBuffer to be used.
"requires-allocate-on-output-ports"
must be advertised if the component does not properly support specification
of output buffers using the OMX_UseBuffer(...) API but instead requires
OMX_AllocateBuffer to be used.
"output-buffers-are-unreadable"
must be advertised if the emitted output buffers of a decoder component
are not readable, i.e. use a custom format even though abusing one of
the official OMX colorspace constants.
Clients of such decoders will not be able to access the decoded data,
naturally making the component much less useful. The only use for
a component with this quirk is to render the output to the screen.
Audio decoders MUST NOT advertise this quirk.
Video decoders that advertise this quirk must be accompanied by a
corresponding color space converter for thumbnail extraction,
matching surfaceflinger support that can render the custom format to
a texture and possibly other code, so just DON'T USE THIS QUIRK.
-->
<MediaCodecs>
<Settings>
<Setting name="max-video-encoder-input-buffers" value="12" />
</Settings>
<Include href="media_codecs_google_audio.xml" />
<Include href="media_codecs_google_telephony.xml" />
<Include href="media_codecs_google_video.xml" />
</MediaCodecs>

View file

@ -0,0 +1,106 @@
<?xml version="1.0" encoding="utf-8" ?>
<!-- Copyright (C) 2014 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<Included>
<Decoders>
<MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es">
<!-- profiles and levels: ProfileSimple : Level3 -->
<Limit name="size" min="2x2" max="352x288" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Limit name="blocks-per-second" range="12-11880" />
<Limit name="bitrate" range="1-384000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.h263.decoder" type="video/3gpp">
<!-- profiles and levels: ProfileBaseline : Level30, ProfileBaseline : Level45
ProfileISWV2 : Level30, ProfileISWV2 : Level45 -->
<Limit name="size" min="2x2" max="352x288" />
<Limit name="alignment" value="2x2" />
<Limit name="bitrate" range="1-384000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.decoder" type="video/avc">
<!-- profiles and levels: ProfileHigh : Level41 -->
<Limit name="size" min="16x16" max="1920x1088" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Limit name="blocks-per-second" range="1-244800" />
<Limit name="bitrate" range="1-12000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
<Limit name="block-count" range="1-139264" />
<Limit name="blocks-per-second" range="1-2000000" />
<Limit name="bitrate" range="1-10000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8">
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Limit name="blocks-per-second" range="1-1000000" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9">
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Limit name="blocks-per-second" range="1-500000" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
</Decoders>
<Encoders>
<MediaCodec name="OMX.google.h263.encoder" type="video/3gpp">
<!-- profiles and levels: ProfileBaseline : Level45 -->
<Limit name="size" min="176x144" max="176x144" />
<Limit name="alignment" value="16x16" />
<Limit name="bitrate" range="1-128000" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.encoder" type="video/avc">
<!-- profiles and levels: ProfileBaseline : Level41 -->
<Limit name="size" min="16x16" max="1920x1088" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Limit name="blocks-per-second" range="1-244800" />
<!-- Changed range from 12000000 to 20000000 for b/31648354 -->
<Limit name="bitrate" range="1-20000000" />
<Feature name="intra-refresh" />
</MediaCodec>
<MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es">
<!-- profiles and levels: ProfileCore : Level2 -->
<Limit name="size" min="16x16" max="176x144" />
<Limit name="alignment" value="16x16" />
<Limit name="block-size" value="16x16" />
<Limit name="blocks-per-second" range="12-1485" />
<Limit name="bitrate" range="1-64000" />
</MediaCodec>
<MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8">
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
</Encoders>
</Included>

View file

@ -0,0 +1,109 @@
<?xml version="1.0" encoding="utf-8" ?>
<!-- Copyright 2015 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
This file was generated from running the following tests:
module CtsVideoTestCases test android.video.cts.VideoEncoderDecoderTest
module CtsMediaTestCases test android.media.cts.VideoDecoderPerfTest
System: z840
The results were fed through a script simliar to get_achievable_rates.py:
https://source.android.com/devices/media/oem.html
-->
<MediaCodecs>
<Encoders>
<MediaCodec name="OMX.google.h263.encoder" type="video/3gpp" update="true">
<!-- 3 runs, min 849 max 1008 gmean 943 -->
<Limit name="measured-frame-rate-176x144" range="849-1008" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.encoder" type="video/avc" update="true">
<!-- 3 runs, min 496 max 629 gmean 565 -->
<Limit name="measured-frame-rate-320x240" range="496-629" />
<!-- 2 runs, min 197 max 203 gmean 201 -->
<Limit name="measured-frame-rate-720x480" range="197-203" />
<!-- 2 runs, min 93 max 97 gmean 95 -->
<Limit name="measured-frame-rate-1280x720" range="93-97" />
<!-- 2 runs, min 45 max 47 gmean 46 -->
<Limit name="measured-frame-rate-1920x1080" range="45-47" />
</MediaCodec>
<MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es" update="true">
<!-- 3 runs, min 881 max 1142 gmean 994 -->
<Limit name="measured-frame-rate-176x144" range="881-1142" />
</MediaCodec>
<MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8" update="true">
<!-- 3 runs, min 249 max 285 gmean 264 -->
<Limit name="measured-frame-rate-320x180" range="249-285" />
<!-- 3 runs, min 104 max 115 gmean 109 -->
<Limit name="measured-frame-rate-640x360" range="104-115" />
<!-- 3 runs, min 34 max 35 gmean 34 -->
<Limit name="measured-frame-rate-1280x720" range="34-35" />
<!-- 3 runs, min 26 max 29 gmean 27 -->
<Limit name="measured-frame-rate-1920x1080" range="26-29" />
</MediaCodec>
</Encoders>
<Decoders>
<MediaCodec name="OMX.google.h263.decoder" type="video/3gpp" update="true">
<!-- 3 runs, min 1246 max 1390 gmean 1342 -->
<Limit name="measured-frame-rate-176x144" range="1246-1390" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.decoder" type="video/avc" update="true">
<!-- 5 runs, min 299 max 629 gmean 567 -->
<Limit name="measured-frame-rate-320x240" range="299-629" />
<!-- 4 runs, min 215 max 250 gmean 232 -->
<Limit name="measured-frame-rate-720x480" range="215-250" />
<!-- 4 runs, min 75 max 85 gmean 78 -->
<Limit name="measured-frame-rate-1280x720" range="75-85" />
<!-- 4 runs, min 31 max 34 gmean 33 -->
<Limit name="measured-frame-rate-1920x1080" range="31-34" />
</MediaCodec>
<MediaCodec name="OMX.google.hevc.decoder" type="video/hevc" update="true">
<!-- 4 runs, min 754 max 817 gmean 775 -->
<Limit name="measured-frame-rate-352x288" range="754-817" />
<!-- 4 runs, min 323 max 394 gmean 373 -->
<Limit name="measured-frame-rate-640x360" range="323-394" />
<!-- 4 runs, min 349 max 372 gmean 358 -->
<Limit name="measured-frame-rate-720x480" range="349-372" />
<!-- 4 runs, min 144 max 157 gmean 151 -->
<Limit name="measured-frame-rate-1280x720" range="144-157" />
<!-- 4 runs, min 74 max 85 gmean 80 -->
<Limit name="measured-frame-rate-1920x1080" range="74-85" />
</MediaCodec>
<MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es" update="true">
<!-- 4 runs, min 1439 max 1625 gmean 1523 -->
<Limit name="measured-frame-rate-176x144" range="1439-1625" />
</MediaCodec>
<MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8" update="true">
<!-- 3 runs, min 1129 max 1261 gmean 1190 -->
<Limit name="measured-frame-rate-320x180" range="1129-1261" />
<!-- 3 runs, min 471 max 525 gmean 504 -->
<Limit name="measured-frame-rate-640x360" range="471-525" />
<!-- 3 runs, min 126 max 145 gmean 132 -->
<Limit name="measured-frame-rate-1280x720" range="126-145" />
<!-- 3 runs, min 48 max 51 gmean 49 -->
<Limit name="measured-frame-rate-1920x1080" range="48-51" />
</MediaCodec>
<MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9" update="true">
<!-- 2 runs, min 968 max 1101 gmean 1044 -->
<Limit name="measured-frame-rate-320x180" range="968-1101" />
<!-- 3 runs, min 291 max 338 gmean 319 -->
<Limit name="measured-frame-rate-640x360" range="291-338" />
<!-- Those values are from buildbots -->
<Limit name="measured-frame-rate-1280x720" range="280-400" />
<!-- Buildbot gets ~180 if it is in the first run, ~230 if it is the second run -->
<Limit name="measured-frame-rate-1920x1080" range="178-240" />
</MediaCodec>
</Decoders>
</MediaCodecs>

View file

@ -0,0 +1,368 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2010 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!DOCTYPE MediaSettings [
<!ELEMENT MediaSettings (CamcorderProfiles,
EncoderOutputFileFormat+,
VideoEncoderCap+,
AudioEncoderCap+,
VideoDecoderCap,
AudioDecoderCap)>
<!ELEMENT CamcorderProfiles (EncoderProfile+, ImageEncoding+, ImageDecoding, Camera)>
<!ELEMENT EncoderProfile (Video, Audio)>
<!ATTLIST EncoderProfile quality (high|low) #REQUIRED>
<!ATTLIST EncoderProfile fileFormat (mp4|3gp) #REQUIRED>
<!ATTLIST EncoderProfile duration (30|60) #REQUIRED>
<!ATTLIST EncoderProfile cameraId (0|1) #REQUIRED>
<!ELEMENT Video EMPTY>
<!ATTLIST Video codec (h264|h263|m4v) #REQUIRED>
<!ATTLIST Video bitRate CDATA #REQUIRED>
<!ATTLIST Video width CDATA #REQUIRED>
<!ATTLIST Video height CDATA #REQUIRED>
<!ATTLIST Video frameRate CDATA #REQUIRED>
<!ELEMENT Audio EMPTY>
<!ATTLIST Audio codec (amrnb|amrwb|aac) #REQUIRED>
<!ATTLIST Audio bitRate CDATA #REQUIRED>
<!ATTLIST Audio sampleRate CDATA #REQUIRED>
<!ATTLIST Audio channels (1|2) #REQUIRED>
<!ELEMENT ImageEncoding EMPTY>
<!ATTLIST ImageEncoding quality (90|80|70|60|50|40) #REQUIRED>
<!ELEMENT ImageDecoding EMPTY>
<!ATTLIST ImageDecoding memCap CDATA #REQUIRED>
<!ELEMENT Camera EMPTY>
<!ELEMENT EncoderOutputFileFormat EMPTY>
<!ATTLIST EncoderOutputFileFormat name (mp4|3gp) #REQUIRED>
<!ELEMENT VideoEncoderCap EMPTY>
<!ATTLIST VideoEncoderCap name (h264|h263|m4v|wmv) #REQUIRED>
<!ATTLIST VideoEncoderCap enabled (true|false) #REQUIRED>
<!ATTLIST VideoEncoderCap minBitRate CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap maxBitRate CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap minFrameWidth CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap maxFrameWidth CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap minFrameHeight CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap maxFrameHeight CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap minFrameRate CDATA #REQUIRED>
<!ATTLIST VideoEncoderCap maxFrameRate CDATA #REQUIRED>
<!ELEMENT AudioEncoderCap EMPTY>
<!ATTLIST AudioEncoderCap name (amrnb|amrwb|aac|wma) #REQUIRED>
<!ATTLIST AudioEncoderCap enabled (true|false) #REQUIRED>
<!ATTLIST AudioEncoderCap minBitRate CDATA #REQUIRED>
<!ATTLIST AudioEncoderCap maxBitRate CDATA #REQUIRED>
<!ATTLIST AudioEncoderCap minSampleRate CDATA #REQUIRED>
<!ATTLIST AudioEncoderCap maxSampleRate CDATA #REQUIRED>
<!ATTLIST AudioEncoderCap minChannels (1|2) #REQUIRED>
<!ATTLIST AudioEncoderCap maxChannels (1|2) #REQUIRED>
<!ELEMENT VideoDecoderCap EMPTY>
<!ATTLIST VideoDecoderCap name (wmv) #REQUIRED>
<!ATTLIST VideoDecoderCap enabled (true|false) #REQUIRED>
<!ELEMENT AudioDecoderCap EMPTY>
<!ATTLIST AudioDecoderCap name (wma) #REQUIRED>
<!ATTLIST AudioDecoderCap enabled (true|false) #REQUIRED>
]>
<!--
This file is used to declare the multimedia profiles and capabilities
on an android-powered device.
-->
<MediaSettings>
<!-- Each camcorder profile defines a set of predefined configuration parameters -->
<CamcorderProfiles cameraId="0">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="24" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<CamcorderProfiles cameraId="1">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="15" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<CamcorderProfiles cameraId="2">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="15" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<CamcorderProfiles cameraId="3">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="15" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<CamcorderProfiles cameraId="4">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="15" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<CamcorderProfiles cameraId="5">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="15" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<CamcorderProfiles cameraId="6">
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
<Video codec="m4v"
bitRate="128000"
width="320"
height="240"
frameRate="15" />
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
<Video codec="h264"
bitRate="192000"
width="176"
height="144"
frameRate="30" />
<!-- audio setting is ignored -->
<Audio codec="amrnb"
bitRate="12200"
sampleRate="8000"
channels="1" />
</EncoderProfile>
<ImageEncoding quality="95" />
<ImageEncoding quality="80" />
<ImageEncoding quality="70" />
<ImageDecoding memCap="20000000" />
</CamcorderProfiles>
<EncoderOutputFileFormat name="3gp" />
<EncoderOutputFileFormat name="mp4" />
<!--
If a codec is not enabled, it is invisible to the applications
In other words, the applications won't be able to use the codec
or query the capabilities of the codec at all if it is disabled
-->
<VideoEncoderCap name="h264" enabled="true"
minBitRate="64000" maxBitRate="192000"
minFrameWidth="176" maxFrameWidth="640"
minFrameHeight="144" maxFrameHeight="480"
minFrameRate="15" maxFrameRate="30" />
<VideoEncoderCap name="h263" enabled="true"
minBitRate="64000" maxBitRate="192000"
minFrameWidth="176" maxFrameWidth="640"
minFrameHeight="144" maxFrameHeight="480"
minFrameRate="15" maxFrameRate="30" />
<VideoEncoderCap name="m4v" enabled="true"
minBitRate="64000" maxBitRate="192000"
minFrameWidth="176" maxFrameWidth="640"
minFrameHeight="144" maxFrameHeight="480"
minFrameRate="15" maxFrameRate="30" />
<AudioEncoderCap name="aac" enabled="true"
minBitRate="8000" maxBitRate="96000"
minSampleRate="8000" maxSampleRate="48000"
minChannels="1" maxChannels="1" />
<AudioEncoderCap name="amrwb" enabled="true"
minBitRate="6600" maxBitRate="23050"
minSampleRate="16000" maxSampleRate="16000"
minChannels="1" maxChannels="1" />
<AudioEncoderCap name="amrnb" enabled="true"
minBitRate="5525" maxBitRate="12200"
minSampleRate="8000" maxSampleRate="8000"
minChannels="1" maxChannels="1" />
<!--
FIXME:
We do not check decoder capabilities at present
At present, we only check whether windows media is visible
for TEST applications. For other applications, we do
not perform any checks at all.
-->
<VideoDecoderCap name="wmv" enabled="false"/>
<AudioDecoderCap name="wma" enabled="false"/>
</MediaSettings>