upload android base code part4

This commit is contained in:
August 2018-08-08 17:00:29 +08:00
parent b9e30e05b1
commit 78ea2404cd
23455 changed files with 5250148 additions and 0 deletions

View file

@ -0,0 +1,16 @@
# Copyright 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ifeq ($(BOARD_USES_EASEL),true)
include $(call all-subdir-makefiles)
endif

View file

@ -0,0 +1,126 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HDR_PLUS_PROFILER_H
#define HDR_PLUS_PROFILER_H
#ifndef ENABLE_HDRPLUS_PROFILER
#define ENABLE_HDRPLUS_PROFILER 0
#endif
#if !ENABLE_HDRPLUS_PROFILER
// If profiler is not enabled, make every macro a noop
#define DECLARE_PROFILER_TIMER(_var, _description)
#define START_PROFILER_TIMER(_var) do {} while(0)
#define END_PROFILER_TIMER(_var) do {} while(0)
#define SCOPE_PROFILER_TIMER(_description) do {} while(0)
#else
#include <string>
/*
* Declare a profiler timer.
*
* _var is the variable that will be declared as a timer.
* _description is the description for this timer. It will be used when logging the timer duration.
*/
#define DECLARE_PROFILER_TIMER(_var, _description) pbcamera::TimerLogger _var = {_description}
/*
* Start a timer.
*
* _var is a timer declared with DECALRE_PROFILER_TIMER.
*/
#define START_PROFILER_TIMER(_var) ((_var).start())
/*
* End a timer and log the duration since last start.
*
* _var is a timer declared with DECALRE_PROFILER_TIMER.
*/
#define END_PROFILER_TIMER(_var) ((_var).end())
/*
* Declare a scope timer that starts now and ends when it goes out of scope.
*
* __description is the description for this timer. It will be used when logging the timer duration.
*/
#define SCOPE_PROFILER_TIMER(_description) pbcamera::ScopeTimerLogger _timer(_description)
namespace pbcamera {
#define TIMER_TAG "[PROFILE_TIMER]"
/**
* TimerLogger provides a timer to log the duration between start() and end().
*/
class TimerLogger {
public:
TimerLogger(const char *name) : mName(name), mInvalid(true) {};
// Start the timer.
void start() {
mInvalid = (clock_gettime(kClockId, &mStartTime) != 0);
}
// End the timer and log the duration since last start.
void end() {
if (mInvalid) {
ALOGE("%s <%s> start time is invalid.", TIMER_TAG, mName.c_str());
return;
}
struct timespec endTime;
mInvalid = (clock_gettime(kClockId, &endTime) != 0);
if (mInvalid) {
ALOGE("%s <%s> end time is invalid.", TIMER_TAG, mName.c_str());
return;
}
int64_t startNs = static_cast<int64_t>(mStartTime.tv_sec) * kNsPerSec + mStartTime.tv_nsec;
int64_t endNs = static_cast<int64_t>(endTime.tv_sec) * kNsPerSec + endTime.tv_nsec;
ALOGI("%s <%s> took %f ms.", TIMER_TAG, mName.c_str(),
static_cast<float>(endNs - startNs) / kNsPerMs);
}
private:
const static int64_t kNsPerSec = 1000000000;
const static int64_t kNsPerMs = 1000000;
const static clockid_t kClockId = CLOCK_BOOTTIME;
std::string mName;
struct timespec mStartTime;
bool mInvalid;
};
/**
* ScopeTimerLogger provides a timer to log the duration of the instance lifetime.
*/
class ScopeTimerLogger {
public:
ScopeTimerLogger(const char *name) : mTimerLogger(name) { mTimerLogger.start(); };
virtual ~ScopeTimerLogger() { mTimerLogger.end(); };
private:
TimerLogger mTimerLogger;
};
} // namespace pbcamera
#endif // !ENABLE_HDRPLUS_PROFILER
#endif // HDR_PLUS_PROFILER_H

View file

@ -0,0 +1,538 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HDR_PLUS_TYPES_H
#define HDR_PLUS_TYPES_H
#include <array>
#include <stdint.h>
#include <string>
#include <vector>
namespace pbcamera {
// This file defines the common types used in HDR+ client and HDR+ service API.
typedef int32_t status_t;
/*
* ImageConfiguration and PlaneConfiguration define the layout of a buffer.
* The following is an example of a NV21 buffer.
*
* <-------Y stride (in bytes)------->
* <----width (in pixels)---->
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . ^ ^
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . height Y scanline
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . (in lines) (in lines)
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
* Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . . v |
* . . . . . . . . . . . . . . . . . . |
* . . . . . . . . . . . . . . . . . . v
* <------V/U stride (in bytes)------>
* V U V U V U V U V U V U V U . . . . ^
* V U V U V U V U V U V U V U . . . . |
* V U V U V U V U V U V U V U . . . . |
* V U V U V U V U V U V U V U . . . . V/U scanline
* V U V U V U V U V U V U V U . . . . (in lines)
* . . . . . . . . . . . . . . . . . . |
* . . . . . . . . . . . . . . . . . . v
* . . . . . . . . . . . . . . . . . . -> Image padding.
*/
// PlaneConfiguration defines an image planes configuration.
struct PlaneConfiguration {
// Number of bytes in each line including padding.
uint32_t stride;
// Number of lines vertically including padding.
uint32_t scanline;
PlaneConfiguration() : stride(0), scanline(0) {};
bool operator==(const PlaneConfiguration &other) const {
return stride == other.stride &&
scanline == other.scanline;
}
bool operator!=(const PlaneConfiguration &other) const {
return !(*this == other);
}
};
// ImageConfiguration defines an image configuration.
struct ImageConfiguration {
// Image width.
uint32_t width;
// Image height;
uint32_t height;
// Image format;
int format;
// Configuration for each planes.
std::vector<PlaneConfiguration> planes;
// Number of padded bytes after the last plane.
uint32_t padding;
ImageConfiguration() : width(0), height(0), format(0), padding(0) {};
bool operator==(const ImageConfiguration &other) const {
return width == other.width &&
height == other.height &&
format == other.format &&
planes == other.planes &&
padding == other.padding;
}
bool operator!=(const ImageConfiguration &other) const {
return !(*this == other);
}
};
/*
* StreamConfiguration defines a stream's configuration, such as its image buffer resolution, used
* during stream configuration.
*/
struct StreamConfiguration {
/*
* Unique ID of the stream. Each stream must have an unique ID so it can be used to identify
* the output streams of a StreamBuffer in CaptureRequest.
*/
uint32_t id;
// Image configuration.
ImageConfiguration image;
bool operator==(const StreamConfiguration &other) const {
return id == other.id &&
image == other.image;
}
bool operator!=(const StreamConfiguration &other) const {
return !(*this == other);
}
};
/*
* SensorMode contains the sensor mode information.
*/
struct SensorMode {
// Usually 0 is back camera and 1 is front camera.
uint32_t cameraId;
// Pixel array resolution.
uint32_t pixelArrayWidth;
uint32_t pixelArrayHeight;
// Active array resolution.
uint32_t activeArrayWidth;
uint32_t activeArrayHeight;
// Sensor output pixel clock.
uint32_t outputPixelClkHz;
// Sensor timestamp offset due to gyro calibration. When comparing timestamps between AP and
// Easel, this offset should be subtracted from AP timestamp.
int64_t timestampOffsetNs;
// Sensor timestamp offset due to sensor cropping. When comparing timestamps between AP and
// Easel, this offset should be subtracted from AP timestamp.
int64_t timestampCropOffsetNs;
// Sensor output format as defined in android_pixel_format.
int format;
SensorMode() : cameraId(0), pixelArrayWidth(0), pixelArrayHeight(0), activeArrayWidth(0),
activeArrayHeight(0), outputPixelClkHz(0) {};
};
/*
* InputConfiguration defines the input configuration for HDR+ service.
*/
struct InputConfiguration {
// Whether the input frames come from sensor MIPI or AP. If true, HDR+ service will get input
// frames from sensor and sensorMode contains the sensor mode information. If false, HDR+
// service will get input frames from AP and streamConfig contains the input stream
// configuration.
bool isSensorInput;
// Sensor mode if isSensorInput is true.
SensorMode sensorMode;
// Input stream configuration if isSensorInput is false.
StreamConfiguration streamConfig;
InputConfiguration() : isSensorInput(false) {};
};
/*
* StreamBuffer defines a buffer in a stream.
*/
struct StreamBuffer {
// ID of the stream that this buffer belongs to.
uint32_t streamId;
// DMA buffer fd for this buffer if it's an ION buffer.
int32_t dmaBufFd;
// Pointer to the data of this buffer.
void* data;
// Size of the allocated data.
uint32_t dataSize;
};
/*
* CaptureRequest defines a capture request that HDR+ client sends to HDR+ service.
*/
struct CaptureRequest {
/*
* ID of the capture request. Each capture request must have an unique ID. When HDR+ service
* sends a CaptureResult to HDR+ client for this request, CaptureResult.requestId will be
* assigned to this ID.
*/
uint32_t id;
/*
* Output buffers of the request. The buffers will be filled with captured image when HDR+
* service sends the output buffers in CaptureResult.
*/
std::vector<StreamBuffer> outputBuffers;
};
// Util functions used in StaticMetadata and FrameMetadata.
namespace metadatautils {
template<typename T>
void appendValueToString(std::string *strOut, const char* key, T value);
template<typename T>
void appendVectorOrArrayToString(std::string *strOut, T values);
template<typename T>
void appendVectorOrArrayToString(std::string *strOut, const char* key, T values);
template<typename T, size_t SIZE>
void appendVectorArrayToString(std::string *strOut, const char* key,
std::vector<std::array<T, SIZE>> values);
template<typename T, size_t SIZE>
void appendArrayArrayToString(std::string *strOut, const char* key,
std::array<T, SIZE> values);
} // namespace metadatautils
static const uint32_t DEBUG_PARAM_NONE = 0u;
static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_METERING = (1u);
static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_PAYLOAD = (1u << 1);
static const uint32_t DEBUG_PARAM_SAVE_GCAME_TEXT = (1u << 2);
/*
* StaticMetadata defines a camera device's characteristics.
*
* If this structure is changed, serialization in MessengerToHdrPlusService and deserialization in
* MessengerListenerFromHdrPlusClient should also be updated.
*/
struct StaticMetadata {
// The following are from Android Camera Metadata
uint8_t flashInfoAvailable; // android.flash.info.available
std::array<int32_t, 2> sensitivityRange; // android.sensor.info.sensitivityRange
int32_t maxAnalogSensitivity; // android.sensor.maxAnalogSensitivity
std::array<int32_t, 2> pixelArraySize; // android.sensor.info.pixelArraySize
std::array<int32_t, 4> activeArraySize; // android.sensor.info.activeArraySize
std::vector<std::array<int32_t, 4>> opticalBlackRegions; // android.sensor.opticalBlackRegions
// android.scaler.availableStreamConfigurations
std::vector<std::array<int32_t, 4>> availableStreamConfigurations;
uint8_t referenceIlluminant1; // android.sensor.referenceIlluminant1
uint8_t referenceIlluminant2; // android.sensor.referenceIlluminant2
std::array<float, 9> calibrationTransform1; // android.sensor.calibrationTransform1
std::array<float, 9> calibrationTransform2; // android.sensor.calibrationTransform2
std::array<float, 9> colorTransform1; // android.sensor.colorTransform1
std::array<float, 9> colorTransform2; // android.sensor.colorTransform2
int32_t whiteLevel; // android.sensor.info.whiteLevel
uint8_t colorFilterArrangement; // android.sensor.info.colorFilterArrangement
std::vector<float> availableApertures; // android.lens.info.availableApertures
std::vector<float> availableFocalLengths; // android.lens.info.availableFocalLengths
std::array<int32_t, 2> shadingMapSize; // android.lens.info.shadingMapSize
uint8_t focusDistanceCalibration; // android.lens.info.focusDistanceCalibration
std::array<int32_t, 2> aeCompensationRange; // android.control.aeCompensationRange
float aeCompensationStep; // android.control.aeCompensationStep
uint32_t debugParams; // Use HDRPLUS_DEBUG_PARAM_*
// Convert this static metadata to a string and append it to the specified string.
void appendToString(std::string *strOut) const {
if (strOut == nullptr) return;
metadatautils::appendValueToString(strOut, "flashInfoAvailable", flashInfoAvailable);
metadatautils::appendVectorOrArrayToString(strOut, "sensitivityRange", sensitivityRange);
metadatautils::appendValueToString(strOut, "maxAnalogSensitivity", maxAnalogSensitivity);
metadatautils::appendVectorOrArrayToString(strOut, "pixelArraySize", pixelArraySize);
metadatautils::appendVectorOrArrayToString(strOut, "activeArraySize", activeArraySize);
metadatautils::appendVectorArrayToString(strOut, "opticalBlackRegions",
opticalBlackRegions);
metadatautils::appendVectorArrayToString(strOut, "availableStreamConfigurations",
availableStreamConfigurations);
metadatautils::appendValueToString(strOut, "referenceIlluminant1", referenceIlluminant1);
metadatautils::appendValueToString(strOut, "referenceIlluminant2", referenceIlluminant2);
metadatautils::appendVectorOrArrayToString(strOut, "calibrationTransform1",
calibrationTransform1);
metadatautils::appendVectorOrArrayToString(strOut, "calibrationTransform2",
calibrationTransform2);
metadatautils::appendVectorOrArrayToString(strOut, "colorTransform1", colorTransform1);
metadatautils::appendVectorOrArrayToString(strOut, "colorTransform2", colorTransform2);
metadatautils::appendValueToString(strOut, "whiteLevel", whiteLevel);
metadatautils::appendValueToString(strOut, "colorFilterArrangement",
colorFilterArrangement);
metadatautils::appendVectorOrArrayToString(strOut, "availableApertures",
availableApertures);
metadatautils::appendVectorOrArrayToString(strOut, "availableFocalLengths",
availableFocalLengths);
metadatautils::appendVectorOrArrayToString(strOut, "shadingMapSize", shadingMapSize);
metadatautils::appendValueToString(strOut, "focusDistanceCalibration",
focusDistanceCalibration);
metadatautils::appendVectorOrArrayToString(strOut, "aeCompensationRange",
aeCompensationRange);
metadatautils::appendValueToString(strOut, "aeCompensationStep",
aeCompensationStep);
metadatautils::appendValueToString(strOut, "debugParams", debugParams);
}
};
/*
* FrameMetadata defines properties of a frame captured on AP.
*
* If this structure is changed, serialization in MessengerToHdrPlusService and deserialization in
* MessengerListenerFromHdrPlusClient should also be updated.
*/
struct FrameMetadata {
int64_t easelTimestamp; // Easel timestamp
// The following are from Android Camera Metadata
int64_t exposureTime; // android.sensor.exposureTime
int32_t sensitivity; // android.sensor.sensitivity
int32_t postRawSensitivityBoost; // android.control.postRawSensitivityBoost
uint8_t flashMode; // android.flash.mode
std::array<float, 4> colorCorrectionGains; // android.colorCorrection.gains
std::array<float, 9> colorCorrectionTransform; // android.colorCorrection.transform
std::array<float, 3> neutralColorPoint; // android.sensor.neutralColorPoint
int64_t timestamp; // android.sensor.timestamp
uint8_t blackLevelLock; // android.blackLevel.lock
uint8_t faceDetectMode; // android.statistics.faceDetectMode
std::vector<int32_t> faceIds; // android.statistics.faceIds
std::vector<std::array<int32_t, 6>> faceLandmarks; // android.statistics.faceLandmarks
std::vector<std::array<int32_t, 4>> faceRectangles; // android.statistics.faceRectangles
std::vector<uint8_t> faceScores; // android.statistics.faceScores
uint8_t sceneFlicker; // android.statistics.sceneFlicker
std::array<std::array<double, 2>, 4> noiseProfile; // android.sensor.noiseProfile
std::array<float, 4> dynamicBlackLevel; // android.sensor.dynamicBlackLevel
std::vector<float> lensShadingMap; // android.statistics.lensShadingMap
float focusDistance; // android.lens.focusDistance
int32_t aeExposureCompensation; // android.control.aeExposureCompensation
uint8_t aeMode; // android.control.aeMode
uint8_t aeLock; // android.control.aeLock
uint8_t aeState; // android.control.aeState
uint8_t aePrecaptureTrigger; // android.control.aePrecaptureTrigger
std::vector<std::array<int32_t, 5>> aeRegions; // android.control.aeRegions
// Convert this static metadata to a string and append it to the specified string.
void appendToString(std::string *strOut) const {
if (strOut == nullptr) return;
metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
metadatautils::appendValueToString(strOut, "exposureTime", exposureTime);
metadatautils::appendValueToString(strOut, "sensitivity", sensitivity);
metadatautils::appendValueToString(strOut, "postRawSensitivityBoost",
postRawSensitivityBoost);
metadatautils::appendValueToString(strOut, "flashMode", flashMode);
metadatautils::appendVectorOrArrayToString(strOut, "colorCorrectionGains",
colorCorrectionGains);
metadatautils::appendVectorOrArrayToString(strOut, "colorCorrectionTransform",
colorCorrectionTransform);
metadatautils::appendVectorOrArrayToString(strOut, "neutralColorPoint", neutralColorPoint);
metadatautils::appendValueToString(strOut, "timestamp", timestamp);
metadatautils::appendValueToString(strOut, "blackLevelLock", blackLevelLock);
metadatautils::appendValueToString(strOut, "faceDetectMode", faceDetectMode);
metadatautils::appendVectorOrArrayToString(strOut, "faceIds", faceIds);
metadatautils::appendVectorArrayToString(strOut, "faceLandmarks", faceLandmarks);
metadatautils::appendVectorArrayToString(strOut, "faceRectangles", faceRectangles);
metadatautils::appendVectorOrArrayToString(strOut, "faceScores", faceScores);
metadatautils::appendArrayArrayToString(strOut, "noiseProfile", noiseProfile);
metadatautils::appendValueToString(strOut, "sceneFlicker", sceneFlicker);
metadatautils::appendVectorOrArrayToString(strOut, "dynamicBlackLevel", dynamicBlackLevel);
metadatautils::appendVectorOrArrayToString(strOut, "lensShadingMap", lensShadingMap);
metadatautils::appendValueToString(strOut, "focusDistance", focusDistance);
metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
metadatautils::appendValueToString(strOut, "aeMode", aeMode);
metadatautils::appendValueToString(strOut, "aeLock", aeLock);
metadatautils::appendValueToString(strOut, "aeState", aeState);
metadatautils::appendValueToString(strOut, "aePrecaptureTrigger", aePrecaptureTrigger);
metadatautils::appendVectorArrayToString(strOut, "aeRegions", aeRegions);
}
};
/*
* RequestMetadata defines the properties for a capture request.
*
* If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
* MessengerListenerFromHdrPlusService should also be updated.
*/
struct RequestMetadata {
std::array<int32_t, 4> cropRegion; // android.scaler.cropRegion (x_min, y_min, width, height)
int32_t aeExposureCompensation; // android.control.aeExposureCompensation
bool postviewEnable; // com.google.nexus.experimental2017.stats.postview_enable
bool continuousCapturing; // Whether to capture RAW while HDR+ processing.
// Convert this static metadata to a string and append it to the specified string.
void appendToString(std::string *strOut) const {
if (strOut == nullptr) return;
metadatautils::appendVectorOrArrayToString(strOut, "cropRegion", cropRegion);
metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
metadatautils::appendValueToString(strOut, "postviewEnable", postviewEnable);
metadatautils::appendValueToString(strOut, "continuousCapturing", continuousCapturing);
}
};
/*
* ResultMetadata defines a process frame's properties that have been modified due to processing.
*
* If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
* MessengerListenerFromHdrPlusService should also be updated.
*/
struct ResultMetadata {
int64_t easelTimestamp; // Easel timestamp of SOF of the base frame.
int64_t timestamp; // android.sensor.timestamp. AP timestamp of exposure start of the base
// frame.
std::string makernote; // Obfuscated capture information.
// Convert this static metadata to a string and append it to the specified string.
void appendToString(std::string *strOut) const {
if (strOut == nullptr) return;
metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
metadatautils::appendValueToString(strOut, "timestamp", timestamp);
metadatautils::appendValueToString(strOut, "makernote", makernote.size());
}
};
/*
* CaptureResult defines a capture result that HDR+ service returns to HDR+ client.
*/
struct CaptureResult {
/*
* ID of the CaptureRequest that this capture result corresponds to. It can be used to match
* the original CaptureRequest when the HDR+ client receives this result.
*/
uint32_t requestId;
/*
* Output buffers filled with processed frame by HDR+ service.
*/
std::vector<StreamBuffer> outputBuffers;
/*
* Result metadata including modified properties due to processing.
*/
ResultMetadata metadata;
};
// Util functions used in StaticMetadata and FrameMetadata.
namespace metadatautils {
/*
* Append a key and a value to a string.
*
* strOut is the string to append a key and a value to.
* key is the name of the data.
* value is the value of the data.
*/
template<typename T>
void appendValueToString(std::string *strOut, const char* key, T value) {
if (strOut == nullptr) return;
(*strOut) += std::string(key) + ": " + std::to_string(value) + "\n";
}
/*
* Append a vector or an array of values to a string.
*
* strOut is the string to append a key and values to.
* values is a vector or an array containing values to append to the string.
*/
template<typename T>
void appendVectorOrArrayToString(std::string *strOut, T values) {
if (strOut == nullptr) return;
for (size_t i = 0; i < values.size(); i++) {
(*strOut) += std::to_string(values[i]);
if (i != values.size() - 1) {
(*strOut) +=", ";
}
}
}
/*
* Append a key and a vector or an array of values to a string.
*
* strOut is the string to append a key and values to.
* key is the name of the data.
* values is a vector or an array containing values to append to the string.
*/
template<typename T>
void appendVectorOrArrayToString(std::string *strOut, const char* key, T values) {
if (strOut == nullptr) return;
(*strOut) += std::string(key) + ": ";
appendVectorOrArrayToString(strOut, values);
(*strOut) += "\n";
}
/*
* Append a key and a vector of arrays to a string.
*
* strOut is the string to append a key and values to.
* key is the name of the data.
* values is a vector of arrays containing values to append to the string.
*/
template<typename T, size_t SIZE>
void appendVectorArrayToString(std::string *strOut, const char* key,
std::vector<std::array<T, SIZE>> values) {
if (strOut == nullptr) return;
(*strOut) += std::string(key) + ": ";
for (size_t i = 0; i < values.size(); i++) {
appendVectorOrArrayToString(strOut, values[i]);
if (i != values.size() - 1) {
(*strOut) +=", ";
}
}
(*strOut) += "\n";
}
/*
* Append a key and an array of arrays to a string.
*
* strOut is the string to append a key and values to.
* key is the name of the data.
* values is an array of arrays containing values to append to the string.
*/
template<typename T, size_t SIZE>
void appendArrayArrayToString(std::string *strOut, const char* key,
std::array<T, SIZE> values) {
if (strOut == nullptr) return;
(*strOut) += std::string(key) + ": ";
for (size_t i = 0; i < values.size(); i++) {
appendVectorOrArrayToString(strOut, values[i]);
if (i != values.size() - 1) {
(*strOut) +=", ";
}
}
(*strOut) += "\n";
}
} // namespace metadatautils
} // namespace pbcamera
#endif // HDR_PLUS_TYPES_H

View file

@ -0,0 +1,56 @@
# Copyright 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOCAL_PATH:= $(call my-dir)
#
# libhdrplusclient
#
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
EaselManagerClient.cpp \
HdrPlusClientUtils.cpp
LOCAL_SHARED_LIBRARIES := liblog
# For AOSP builds, use dummy implementation.
ifeq ($(wildcard vendor/google_easel),)
LOCAL_CFLAGS += -DUSE_DUMMY_IMPL=1
else
LOCAL_CFLAGS += -DUSE_DUMMY_IMPL=0
LOCAL_SHARED_LIBRARIES += libhdrplusclientimpl
endif
LOCAL_HEADER_LIBRARIES := \
libsystem_headers \
libutils_headers
LOCAL_EXPORT_HEADER_LIBRARY_HEADERS := \
libutils_headers
LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/include \
hardware/google/easel/camera/include
LOCAL_CFLAGS += -Wall -Wextra -Werror
LOCAL_EXPORT_C_INCLUDE_DIRS += \
$(LOCAL_PATH)/include \
hardware/google/easel/camera/include
LOCAL_MODULE:= libhdrplusclient
LOCAL_PROPRIETARY_MODULE := true
LOCAL_MODULE_OWNER := google
include $(BUILD_SHARED_LIBRARY)

View file

@ -0,0 +1,37 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "EaselManagerClient"
#include <log/log.h>
#include <memory>
#include "EaselManagerClient.h"
#if !USE_DUMMY_IMPL
#include "EaselManagerClientImpl.h"
#endif
namespace android {
std::unique_ptr<EaselManagerClient> EaselManagerClient::create() {
#if USE_DUMMY_IMPL
return nullptr;
#else
return std::make_unique<EaselManagerClientImpl>();
#endif
}
} // namespace android

View file

@ -0,0 +1,197 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "HdrPlusClientUtils"
#include <log/log.h>
#include <fstream>
#include <inttypes.h>
#include <system/graphics.h>
#include "HdrPlusClientUtils.h"
namespace android {
namespace hdrplus_client_utils {
// Get the RGB values of the pixel at (x, y).
static status_t getRgb(uint8_t *r, uint8_t *g, uint8_t* b, uint32_t x, uint32_t y,
const pbcamera::StreamConfiguration &streamConfig,
const pbcamera::StreamBuffer &buffer) {
switch (streamConfig.image.format) {
case HAL_PIXEL_FORMAT_YCrCb_420_SP:
{
// Check the stream configuration has two planes.
if (streamConfig.image.planes.size() != 2) {
ALOGE("%s: NV21 should have 2 planes but it has %zu", __FUNCTION__,
streamConfig.image.planes.size());
return BAD_VALUE;
}
// Find the indices of Y, V, and U in the buffer.
uint32_t yIndex = y * streamConfig.image.planes[0].stride + x;
uint32_t vIndex = streamConfig.image.planes[0].scanline *
streamConfig.image.planes[0].stride +
(y / 2) * streamConfig.image.planes[1].stride + (x & ~0x1);
uint32_t uIndex = vIndex + 1;
// Convert YUV to RGB.
int32_t yc = ((uint8_t*)buffer.data)[yIndex];
int32_t vc = ((uint8_t*)buffer.data)[vIndex] - 128;
int32_t uc = ((uint8_t*)buffer.data)[uIndex] - 128;
*r = std::min(std::max(yc + 0.003036f * uc + 1.399457f * vc, 0.0f), 255.0f);
*g = std::min(std::max(yc - 0.344228f * uc - 0.717202f * vc, 0.0f), 255.0f);
*b = std::min(std::max(yc + 1.772431f * uc - 0.006137f * vc, 0.0f), 255.0f);
return OK;
}
case HAL_PIXEL_FORMAT_RGB_888:
{
// Check the stream configuration has 1 plane.
if (streamConfig.image.planes.size() != 1) {
ALOGE("%s: RGB_888 should have 1 plane but it has %zu", __FUNCTION__,
streamConfig.image.planes.size());
return BAD_VALUE;
}
uint32_t offset = y * streamConfig.image.planes[0].stride + x * 3;
*r = ((uint8_t*)buffer.data)[offset];
*g = ((uint8_t*)buffer.data)[offset + 1];
*b = ((uint8_t*)buffer.data)[offset + 2];
return OK;
}
default:
ALOGE("%s: Format %d is not supported.", __FUNCTION__, streamConfig.image.format);
return BAD_VALUE;
}
}
status_t writePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
const pbcamera::StreamBuffer &buffer) {
if (streamConfig.image.format != HAL_PIXEL_FORMAT_YCrCb_420_SP &&
streamConfig.image.format != HAL_PIXEL_FORMAT_RGB_888) {
ALOGE("%s: format 0x%x is not supported.", __FUNCTION__, streamConfig.image.format);
return BAD_VALUE;
}
std::ofstream outfile(filename, std::ios::binary);
if (!outfile.is_open()) {
ALOGE("%s: Opening file (%s) failed.", __FUNCTION__, filename.data());
return NO_INIT;
}
uint32_t width = streamConfig.image.width;
uint32_t height = streamConfig.image.height;
// Write headers of the ppm file.
outfile << "P6";
outfile << " " << std::to_string(width) << " " << std::to_string(height) << " 255 ";
// Write RGB values of the image.
uint8_t r, g, b;
for (uint32_t y = 0; y < height; y++) {
for (uint32_t x = 0; x < width; x++) {
status_t res = getRgb(&r, &g, &b, x, y, streamConfig, buffer);
if (res != OK) {
ALOGE("%s: Getting RGB failed: %s (%d).", __FUNCTION__, strerror(-res), res);
return res;
}
outfile << r << g << b;
}
}
ALOGD("%s: Saved file: %s", __FUNCTION__, filename.data());
outfile.close();
return OK;
}
status_t comparePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
const pbcamera::StreamBuffer &buffer, float *diffRatio) {
if (streamConfig.image.format != HAL_PIXEL_FORMAT_YCrCb_420_SP) {
ALOGE("%s: format 0x%x is not supported.", __FUNCTION__, streamConfig.image.format);
return BAD_VALUE;
}
std::ifstream ifile(filename, std::ios::binary);
if (!ifile.is_open()) {
ALOGE("%s: Opening file (%s) failed.", __FUNCTION__, filename.data());
return NO_INIT;
}
std::string s;
// Read headers of the ppm file.
ifile >> s;
if (s != "P6") {
ALOGE("%s: Invalid PPM file header: %s", __FUNCTION__, s.c_str());
return BAD_VALUE;
}
// Read width and height.
ifile >> s;
uint32_t width = std::stoul(s);
ifile >> s;
uint32_t height = std::stoul(s);
if (width != streamConfig.image.width || height != streamConfig.image.height) {
ALOGE("%s: Image resolution doesn't match. image %dx%d ppm %dx%d",
__FUNCTION__, streamConfig.image.width, streamConfig.image.height,
width, height);
return BAD_VALUE;
}
ifile >> s;
if (s != "255") {
ALOGE("%s: Expecting 255 but got %s", __FUNCTION__, s.c_str());
return BAD_VALUE;
}
char c;
// Get a space
ifile.get(c);
// Now the RGB values start.
uint8_t r, g, b;
uint64_t diff = 0;
for (uint32_t y = 0; y < height; y++) {
for (uint32_t x = 0; x < width; x++) {
status_t res = getRgb(&r, &g, &b, x, y, streamConfig, buffer);
if (res != OK) {
ALOGE("%s: Getting RGB failed: %s (%d).", __FUNCTION__, strerror(-res), res);
return res;
}
// Get r, g, b from golden image and accumulate the differences.
ifile.get(c);
diff += abs(static_cast<int32_t>(c) - r);
ifile.get(c);
diff += abs(static_cast<int32_t>(c) - g);
ifile.get(c);
diff += abs(static_cast<int32_t>(c) - b);
}
}
if (diffRatio != nullptr) {
*diffRatio = diff / (static_cast<float>(width) * height * 3 * 256);
}
return OK;
}
} // hdrplus_client_utils
} // namespace android

View file

@ -0,0 +1,155 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef EASEL_MANAGER_CLIENT_H
#define EASEL_MANAGER_CLIENT_H
#include <future>
#include <utils/Errors.h>
#include <utils/Mutex.h>
#define FW_VER_SIZE 24
namespace android {
class EaselManagerClientListener;
class HdrPlusClient;
class HdrPlusClientListener;
class EaselManagerClient {
public:
static std::unique_ptr<EaselManagerClient> create();
EaselManagerClient() {};
virtual ~EaselManagerClient() {};
/*
* Return if Easel is present on the device.
*
* If Easel is not present, all other calls to HdrPlusClient are invalid.
*/
virtual bool isEaselPresentOnDevice() const = 0;
/*
* Open Easel manager client.
*
* This will power on Easel and initialize Easel manager client.
*/
virtual status_t open() = 0;
/*
* Suspend Easel.
*
* Put Easel on suspend mode.
*/
virtual status_t suspend() = 0;
/*
* Resume Easel.
*
* Resume Easel from suspend mode.
*
* listener will be invoked for Easel status.
*/
virtual status_t resume(EaselManagerClientListener *listener) = 0;
/*
* Retrieve Easel firmware version.
*
* Firmware version string is added to image exif
*/
virtual status_t getFwVersion(char *fwVersion) = 0;
/*
* Start MIPI with an output pixel lock rate for a camera.
*
* Can be called when Easel is powered on or resumed, for Easel to start sending sensor data
* to AP.
*
* cameraId is the camera ID to start MIPI for.
* outputPixelClkHz is the output pixel rate.
* enableCapture is whether to enable MIPI capture on Easel.
*/
virtual status_t startMipi(uint32_t cameraId, uint32_t outputPixelClkHz,
bool enableCapture) = 0;
/*
* Stop MIPI for a camera.
*
* cameraId is the camera is ID to stop MIPI for.
*/
virtual status_t stopMipi(uint32_t cameraId) = 0;
/*
* Open an HDR+ client asynchronously.
*
* Open an HDR+ client asynchronouly. When an HDR+ client is opened,
* HdrPlusClientListener::onOpened() will be invoked with the created HDR+ client. If opening
* an HDR+ client failed, HdrPlusClientListener::onOpenFailed() will be invoked with the error.
* If this method returns an error, HdrPlusClientListener::onOpenFailed() will not be invoked.
*
* listener is an HDR+ client listener.
*
* Returns:
* OK: if initiating opening an HDR+ client asynchronously was successful.
* HdrPlusClientListener::onOpened() or HdrPlusClientListener::onOpenFailed()
* will be invoked when opening an HDR+ client completed.
* ALREADY_EXISTS: if there is already a pending HDR+ client being opened.
*/
virtual status_t openHdrPlusClientAsync(HdrPlusClientListener *listener) = 0;
/*
* Open an HDR+ client synchronously and block until it completes.
*
* listener is an HDR+ client listener.
* client is an output parameter for created HDR+ client.
*
* Returns:
* OK: on success.
* -EEXIST: if an HDR+ client is already opened.
* -ENODEV: if opening an HDR+ failed due to a serious error.
*/
virtual status_t openHdrPlusClient(HdrPlusClientListener *listener,
std::unique_ptr<HdrPlusClient> *client) = 0;
/*
* Close an HDR+ client.
*
* client is the HDR+ client to be closed.
*/
virtual void closeHdrPlusClient(std::unique_ptr<HdrPlusClient> client) = 0;
private:
// Disallow copy and assign.
EaselManagerClient(const EaselManagerClient&) = delete;
void operator=(const EaselManagerClient&) = delete;
};
/*
* EaselManagerClientListener defines callbacks that will be invoked by EaselManagerClient.
*/
class EaselManagerClientListener {
public:
virtual ~EaselManagerClientListener() {};
// Invoked when Easel encountered a fatal error. Client should shut down Easel.
virtual void onEaselFatalError(std::string errMsg) = 0;
};
} // namespace android
#endif // EASEL_MANAGER_CLIENT_H

View file

@ -0,0 +1,160 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HDR_PLUS_CLIENT_H
#define HDR_PLUS_CLIENT_H
#include "CameraMetadata.h"
#include "hardware/camera3.h"
#include "HdrPlusClientListener.h"
#include "HdrPlusTypes.h"
using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
namespace android {
/**
* HdrPlusClient
*
* HdrPlusClient class can be used to connect to HDR+ service to perform HDR+ processing on
* Easel.
*/
class HdrPlusClient {
public:
// HdrPlusClientListener is the listener to receive callbacks from HDR+ client. The listener
// must be valid during the life cycle of HdrPlusClient
HdrPlusClient(HdrPlusClientListener *) {};
/*
* The recommended way to create an HdrPlusClient instance is via
* EaselManagerClient::openHdrPlusClientAsync() or EaselManagerClient::openHdrPlusClientAsync().
* EaselManagerClient will make sure Easel is in a valid state to open an HDR+ client. To close
* an HdrPlusClient, use EaselManagerClient::closeHdrPlusClient.
*/
virtual ~HdrPlusClient() {};
/*
* Connect to HDR+ service.
*
* If EaselManagerClient is used to create the HdrPlusClient, it is already connected.
*
* Returns:
* 0: on success.
* -EEXIST: if it's already connected.
* -ENODEV: if connecting failed due to a serious error.
*/
virtual status_t connect() = 0;
/*
* Set the static metadata of current camera device.
*
* Must be called after connect() and before configuring streams.
*
* staticMetadata is the static metadata of current camera device.
*
* Returns:
* 0: on success.
* -ENODEV: if HDR+ service is not connected.
*/
virtual status_t setStaticMetadata(const camera_metadata_t &staticMetadata) = 0;
/*
* Configure streams.
*
* Must be called when configuration changes including input (sensor) resolution and format, and
* output resolutions and formats.
*
* inputConfig contains the information about the input frames or sensor configurations.
* outputConfigs is a vector of output stream configurations.
*
* Returns:
* 0: on success.
* -EINVAL: if outputConfigs is empty or the configurations are not supported.
* -ENODEV: if HDR+ service is not connected.
*/
virtual status_t configureStreams(const pbcamera::InputConfiguration &inputConfig,
const std::vector<pbcamera::StreamConfiguration> &outputConfigs) = 0;
/*
* Enable or disable ZSL HDR+ mode.
*
* When ZSL HDR+ mode is enabled, Easel will capture ZSL RAW buffers. ZSL HDR+ mode should be
* disabled to reduce power consumption when HDR+ processing is not necessary, e.g in video
* mode.
*
* enabled is a flag indicating whether to enable ZSL HDR+ mode.
*
* Returns:
* 0: on success.
* -ENODEV: if HDR+ service is not connected, or streams are not configured.
*/
virtual status_t setZslHdrPlusMode(bool enabled) = 0;
/*
* Submit a capture request for HDR+ outputs.
*
* For each output buffer in CaptureRequest, it will be returned in a CaptureResult via
* HdrPlusClientListener::onCaptureResult(). HdrPlusClientListener::onCaptureResult() may be
* invoked multiple times to return all output buffers in one CaptureRequest. Each output
* buffer will be returned in CaptureResult only once.
*
* request is a CaptureRequest containing output buffers to be filled by HDR+ service.
* requestMetadata is the metadata for this request.
*
* Returns:
* 0: on success.
* -EINVAL: if the request is invalid such as containing invalid stream IDs.
*/
virtual status_t submitCaptureRequest(pbcamera::CaptureRequest *request,
const CameraMetadata &requestMetadata) = 0;
/*
* Send an input buffer to HDR+ service. This is used when HDR+ service's input buffers come
* from the client rather than MIPI.
*
* inputBuffer is the input buffer to send to HDR+ service. After this method returns, the
* buffer has been copied (with DMA) to HDR+ service and the caller has the
* ownership of the buffer.
*/
virtual void notifyInputBuffer(const pbcamera::StreamBuffer &inputBuffer,
int64_t timestampNs) = 0;
/*
* Notify about result metadata of a frame that AP captured. This may be called multiple times
* for a frame to send multiple partial metadata and lastMetadata must be false except for the
* last partial metadata. When there is only one metadata for a frame, lastMetadata must be
* true.
*
* frameNumber is a unique frame number that the metadata belong to.
* resultMetadata is the result metadata of a frame that AP captured.
* lastMetadata is a flag indicating whether this is the last metadata for the frame number.
*/
virtual void notifyFrameMetadata(uint32_t frameNumber, const camera_metadata_t &resultMetadata,
bool lastMetadata=true) = 0;
/*
* Notify Easel has encountered a fatal error and HDR+ client should stop sending messages
* to Easel.
*/
virtual void nofityEaselFatalError() = 0;
private:
// Disallow copy and assign.
HdrPlusClient(const HdrPlusClient&) = delete;
void operator=(const HdrPlusClient&) = delete;
};
} // namespace android
#endif // HDR_PLUS_CLIENT_H

View file

@ -0,0 +1,93 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HDR_PLUS_CLIENT_LISTENER_H
#define HDR_PLUS_CLIENT_LISTENER_H
#include "hardware/camera3.h"
#include "HdrPlusTypes.h"
namespace android {
class HdrPlusClient;
/*
* HdrPlusClientListener defines callbacks that will be invoked by HdrPlusClient for events like
* returning capture results.
*/
class HdrPlusClientListener {
public:
virtual ~HdrPlusClientListener() {};
/*
* Invoked when an HDR+ client is opened successfully via
* EaselManagerClient::openHdrPlusClientAsync.
*/
virtual void onOpened(std::unique_ptr<HdrPlusClient> client) = 0;
/*
* Invoked when opening an HDR+ client failed via EaselManagerClient::openHdrPlusClientAsync.
*
* err is
* -EEXIST: if an HDR+ client is already opened.
* -ENODEV: if opening an HDR+ failed due to a serious error.
*/
virtual void onOpenFailed(status_t err) = 0;
/*
* Invoked when HDR+ client is in a fatal error state. After receiving this error, calls to HDR+
* client will not succeed and HDR+ client should be closed.
*/
virtual void onFatalError() = 0;
/*
* Invoked when a CaptureResult, containing a subset or all output buffers for a CaptureRequest,
* is received. This may be invoked multiple times for one CaptureRequest but each CaptureResult
* will contain distinct output buffers that have not been received yet.
*/
virtual void onCaptureResult(pbcamera::CaptureResult *result,
const camera_metadata_t &resultMetadata) = 0;
/*
* Invoked when a failed CaptureResult, containing a subset or all output buffers for a
* CaptureRequest, is received. Output buffers in a failed capture result may contain garbage
* data. This may be invoked multiple times for one CaptureRequest but each CaptureResult
* will contain distinct output buffers that have not been received yet.
*/
virtual void onFailedCaptureResult(pbcamera::CaptureResult *failedResult) = 0;
/*
* Invoked when HDR+ processing has started for a request. requestId is the ID of the request.
* apSensorTimestampNs is the AP sensor timestamp of the base frame, in nanoseconds.
*/
virtual void onShutter(uint32_t requestId, int64_t apSensorTimestampNs) = 0;
/*
* Invoked when Easel is ready to take another HDR+ request.
*/
virtual void onNextCaptureReady(uint32_t requestId) = 0;
/*
* Invoked when the postview for a request is ready.
*/
virtual void onPostview(uint32_t requestId, std::unique_ptr<std::vector<uint8_t>> postview,
uint32_t width, uint32_t height, uint32_t stride, int32_t format) = 0;
};
} // namespace android
#endif // HDR_PLUS_CLIENT_LISTENER_H

View file

@ -0,0 +1,63 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HDR_PLUS_CLIENT_UTILS_H
#define HDR_PLUS_CLIENT_UTILS_H
#include <utils/Errors.h>
#include "HdrPlusTypes.h"
namespace android {
namespace hdrplus_client_utils {
/*
* Write the image buffer to a .ppm file.
*
* filename is the filename of the .ppm file and should include ".ppm" in the end.
* streamConfig is the stream configuration of the buffer.
* buffer is the buffer to be saved to a .ppm file.
*
* Returns
* OK: if the file is saved successfully.
* BAD_VALUE: if the format is not support or the stream configuration is invalid.
* NO_INIT: if it failed to open the file.
*/
status_t writePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
const pbcamera::StreamBuffer &buffer);
/*
* Compare the image buffer against a golden .ppm file.
*
* filename is the filename of the .ppm file and should include ".ppm" in the end.
* streamConfig is the stream configuration of the buffer.
* buffer is the buffer to be compared.
* diffRatio will be the difference ratio between the image buffer and the golden ppm file.
* It's calculated as sum(R, G, B diffs in each pixel) / (width * height * 256 * 3)
*
* Returns
* OK: if the comparison completed successfully.
* BAD_VALUE: if the format is not support or the stream configuration is invalid, or the
* file cannot be parsed correctly.
* NO_INIT: if it failed to open the file.
*/
status_t comparePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
const pbcamera::StreamBuffer &buffer, float *diffRatio);
} // hdrplus_client_utils
} // namespace android
#endif // HDR_PLUS_CLIENT_UTILS_H