《Android 源码 Camera2 预览流程分析二》中进行了流启动,这是调用 QCamera3Channel start() 方法实现的,对应于 HAL_PIXEL_FORMAT_YCbCr_420_888 格式创建的 QCamera3Channel 实现类指向了 QCamera3RegularChannel。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

int32_t QCamera3RegularChannel::start()
{ATRACE_CALL();int32_t rc = NO_ERROR;if (0 < mMemory.getCnt()) {rc = QCamera3Channel::start();}return rc;
}
  1. 启动流,流的类型为 QCamera3Stream,这是在 addStream 中添加的;
  2. 启动 channel。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

int32_t QCamera3Channel::start()
{ATRACE_CALL();int32_t rc = NO_ERROR;if (m_numStreams > 1) {ALOGE("%s: bundle not supported", __func__);} else if (m_numStreams == 0) {return NO_INIT;}if(m_bIsActive) {ALOGD("%s: Attempt to start active channel", __func__);return rc;}for (int i = 0; i < m_numStreams; i++) {if (mStreams[i] != NULL) {mStreams[i]->start();}}rc = m_camOps->start_channel(m_camHandle, m_handle);if (rc != NO_ERROR) {for (int i = 0; i < m_numStreams; i++) {if (mStreams[i] != NULL) {mStreams[i]->stop();}}} else {m_bIsActive = true;}return rc;
}

开始流。将启动主流线程来处理与流相关的操作。

  1. 初始化 QCameraQueue;
  2. 启动流线程,调用 dataProcRoutine 例程。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Stream.cpp

int32_t QCamera3Stream::start()
{int32_t rc = 0;mDataQ.init();rc = mProcTh.launch(dataProcRoutine, this);return rc;
}

调用 pthread_create 创建并开始运行线程。

device/moto/shamu/camera/QCamera2/util/QCameraCmdThread.cpp

int32_t QCameraCmdThread::launch(void *(*start_routine)(void *),void* user_data)
{/* launch the thread */pthread_create(&cmd_pid,NULL,start_routine,user_data);return NO_ERROR;
}

用于处理主流线程中的数据的函数。处理 cmd 队列新的通知,如果 camera_cmd_type_t 类型为 CAMERA_CMD_TYPE_DO_NEXT_JOB,则从 QCameraQueue 队列中 dequeue 数据,并调用 mDataCB 指向的函数。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Stream.cpp

void *QCamera3Stream::dataProcRoutine(void *data)
{int running = 1;int ret;QCamera3Stream *pme = (QCamera3Stream *)data;QCameraCmdThread *cmdThread = &pme->mProcTh;cmdThread->setName("cam_stream_proc");CDBG("%s: E", __func__);do {do {ret = cam_sem_wait(&cmdThread->cmd_sem);if (ret != 0 && errno != EINVAL) {ALOGE("%s: cam_sem_wait error (%s)",__func__, strerror(errno));return NULL;}} while (ret != 0);// 收到有关cmd队列中新cmd可用的通知camera_cmd_type_t cmd = cmdThread->getCmd();switch (cmd) {case CAMERA_CMD_TYPE_DO_NEXT_JOB:{CDBG("%s: Do next job", __func__);mm_camera_super_buf_t *frame =(mm_camera_super_buf_t *)pme->mDataQ.dequeue();if (NULL != frame) {if (pme->mDataCB != NULL) {pme->mDataCB(frame, pme, pme->mUserData);} else {// 没有数据cb例程,在这里返回bufpme->bufDone(frame->bufs[0]->buf_idx);}}}break;case CAMERA_CMD_TYPE_EXIT:CDBG_HIGH("%s: Exit", __func__);/* 刷新数据buf队列 */pme->mDataQ.flush();running = 0;break;default:break;}} while (running);CDBG("%s: X", __func__);return NULL;
}

流的回调例程。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,QCamera3Stream *stream, void *userdata)
{QCamera3Channel *channel = (QCamera3Channel *)userdata;if (channel == NULL) {ALOGE("%s: invalid channel pointer", __func__);return;}channel->streamCbRoutine(super_frame, stream);
}
  1. 参数校验;
  2. 填充 camera3_stream_buffer_t 结构,准备回调到 framework;
  3. 调用 mChannelCB 指向的函数,实际上指向 QCamera3HardwareInterface::captureResultCb;

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

void QCamera3RegularChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,QCamera3Stream *stream)
{ATRACE_CALL();//FIXME Q Buf back in case of error?uint8_t frameIndex;buffer_handle_t *resultBuffer;int32_t resultFrameNumber;camera3_stream_buffer_t result;if (NULL == stream) {ALOGE("%s: Invalid stream", __func__);return;}if(!super_frame) {ALOGE("%s: Invalid Super buffer",__func__);return;}if(super_frame->num_bufs != 1) {ALOGE("%s: Multiple streams are not supported",__func__);return;}if(super_frame->bufs[0] == NULL ) {ALOGE("%s: Error, Super buffer frame does not contain valid buffer",__func__);return;}frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;if(frameIndex >= mNumBufs) {ALOGE("%s: Error, Invalid index for buffer",__func__);stream->bufDone(frameIndex);return;}使用以下数据发布 framework 回调resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);resultFrameNumber = mMemory.getFrameNumber(frameIndex);result.stream = mCamera3Stream;result.buffer = resultBuffer;result.status = CAMERA3_BUFFER_STATUS_OK;result.acquire_fence = -1;result.release_fence = -1;int32_t rc = stream->bufRelease(frameIndex);if (NO_ERROR != rc) {ALOGE("%s: Error %d releasing stream buffer %d",__func__, rc, frameIndex);}rc = mMemory.unregisterBuffer(frameIndex);if (NO_ERROR != rc) {ALOGE("%s: Error %d unregistering stream buffer %d",__func__, rc, frameIndex);}if (0 <= resultFrameNumber){mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, mUserData);} else {ALOGE("%s: Bad frame number", __func__);}free(super_frame);return;
}

所有 channel 的回调处理程序(流以及元数据)

device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp

void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,camera3_stream_buffer_t *buffer,uint32_t frame_number, void *userdata)
{QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;if (hw == NULL) {ALOGE("%s: Invalid hw %p", __func__, hw);return;}hw->captureResultCb(metadata, buffer, frame_number);return;
}

此处重点跟 handleBufferWithLock 函数,处理持有 mMutex 锁的图像缓冲区回调。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp

void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,camera3_stream_buffer_t *buffer, uint32_t frame_number)
{pthread_mutex_lock(&mMutex);/*假设在任何重新处理之前调用flush()。收到任何回调后立即发送通知和结果*/if (mLoopBackResult) {/* 发送通知 */camera3_notify_msg_t notify_msg;notify_msg.type = CAMERA3_MSG_SHUTTER;notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;notify_msg.message.shutter.timestamp = mLoopBackTimestamp;mCallbackOps->notify(mCallbackOps, &notify_msg);/* 发送捕获结果 */mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);free(mLoopBackResult);mLoopBackResult = NULL;}if (metadata_buf)handleMetadataWithLock(metadata_buf);elsehandleBufferWithLock(buffer, frame_number);pthread_mutex_unlock(&mMutex);
}

帧号不在待处理列表直接调用 process_capture_result 处理。

device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp

void QCamera3HardwareInterface::handleBufferWithLock(camera3_stream_buffer_t *buffer, uint32_t frame_number)
{ATRACE_CALL();// 如果待处理的请求列表中不存在帧号,则直接将缓冲区发送到 framework,// 并更新待处理的缓冲区映射,否则,记录缓冲区。List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();while (i != mPendingRequestsList.end() && i->frame_number != frame_number){i++;}if (i == mPendingRequestsList.end()) {// 验证所有挂起的请求frame_number是否更大for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();j != mPendingRequestsList.end(); j++) {if (j->frame_number < frame_number) {ALOGE("%s: Error: pending frame number %d is smaller than %d",__func__, j->frame_number, frame_number);}}camera3_capture_result_t result;memset(&result, 0, sizeof(camera3_capture_result_t));result.result = NULL;result.frame_number = frame_number;result.num_output_buffers = 1;result.partial_result = 0;for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();m != mPendingFrameDropList.end(); m++) {QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {buffer->status=CAMERA3_BUFFER_STATUS_ERROR;CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",__func__, frame_number, streamID);m = mPendingFrameDropList.erase(m);break;}}result.output_buffers = buffer;CDBG("%s: result frame_number = %d, buffer = %p",__func__, frame_number, buffer->buffer);for (List<PendingBufferInfo>::iterator k =mPendingBuffersMap.mPendingBufferList.begin();k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {if (k->buffer == buffer->buffer) {CDBG("%s: Found Frame buffer, take it out from list",__func__);mPendingBuffersMap.num_buffers--;k = mPendingBuffersMap.mPendingBufferList.erase(k);break;}}CDBG("%s: mPendingBuffersMap.num_buffers = %d",__func__, mPendingBuffersMap.num_buffers);mCallbackOps->process_capture_result(mCallbackOps, &result);} else {if (i->input_buffer) {CameraMetadata settings;camera3_notify_msg_t notify_msg;memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);if(i->settings) {settings = i->settings;if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];} else {ALOGE("%s: No timestamp in input settings! Using current one.",__func__);}} else {ALOGE("%s: Input settings missing!", __func__);}notify_msg.type = CAMERA3_MSG_SHUTTER;notify_msg.message.shutter.frame_number = frame_number;notify_msg.message.shutter.timestamp = capture_time;if (i->input_buffer->release_fence != -1) {int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);close(i->input_buffer->release_fence);if (rc != OK) {ALOGE("%s: input buffer sync wait failed %d", __func__, rc);}}for (List<PendingBufferInfo>::iterator k =mPendingBuffersMap.mPendingBufferList.begin();k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {if (k->buffer == buffer->buffer) {CDBG("%s: Found Frame buffer, take it out from list",__func__);mPendingBuffersMap.num_buffers--;k = mPendingBuffersMap.mPendingBufferList.erase(k);break;}}CDBG("%s: mPendingBuffersMap.num_buffers = %d",__func__, mPendingBuffersMap.num_buffers);bool notifyNow = true;for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();j != mPendingRequestsList.end(); j++) {if (j->frame_number < frame_number) {notifyNow = false;break;}}if (notifyNow) {camera3_capture_result result;memset(&result, 0, sizeof(camera3_capture_result));result.frame_number = frame_number;result.result = i->settings;result.input_buffer = i->input_buffer;result.num_output_buffers = 1;result.output_buffers = buffer;result.partial_result = PARTIAL_RESULT_COUNT;mCallbackOps->notify(mCallbackOps, &notify_msg);mCallbackOps->process_capture_result(mCallbackOps, &result);CDBG("%s: Notify reprocess now %d!", __func__, frame_number);i = mPendingRequestsList.erase(i);mPendingRequest--;} else {// 缓存重新处理结果以供以后使用PendingReprocessResult pendingResult;memset(&pendingResult, 0, sizeof(PendingReprocessResult));pendingResult.notify_msg = notify_msg;pendingResult.buffer = *buffer;pendingResult.frame_number = frame_number;mPendingReprocessResultList.push_back(pendingResult);CDBG("%s: Cache reprocess result %d!", __func__, frame_number);}} else {for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();j != i->buffers.end(); j++) {if (j->stream == buffer->stream) {if (j->buffer != NULL) {ALOGE("%s: Error: buffer is already set", __func__);} else {j->buffer = (camera3_stream_buffer_t *)malloc(sizeof(camera3_stream_buffer_t));*(j->buffer) = *buffer;CDBG("%s: cache buffer %p at result frame_number %d",__func__, buffer, frame_number);}}}}}
}

打开 camera 设备时候,会给 camera3_callback_ops::process_capture_result 赋值,上面的函数调用实际调用到 sProcessCaptureResult 函数。

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

Camera3Device::Camera3Device(int id):mId(id),mIsConstrainedHighSpeedConfiguration(false),mHal3Device(NULL),mStatus(STATUS_UNINITIALIZED),mStatusWaiters(0),mUsePartialResult(false),mNumPartialResults(1),mNextResultFrameNumber(0),mNextReprocessResultFrameNumber(0),mNextShutterFrameNumber(0),mNextReprocessShutterFrameNumber(0),mListener(NULL)
{ATRACE_CALL();camera3_callback_ops::notify = &sNotify;camera3_callback_ops::process_capture_result = &sProcessCaptureResult;ALOGV("%s: Created device for camera %d", __FUNCTION__, id);
}

从HAL到实例的静态回调转发方法。

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

void Camera3Device::sProcessCaptureResult(const camera3_callback_ops *cb,const camera3_capture_result *result) {Camera3Device *d =const_cast<Camera3Device*>(static_cast<const Camera3Device*>(cb));d->processCaptureResult(result);
}

相机HAL设备的回调方法。重点分析 returnOutputBuffers(…) 函数。

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

void Camera3Device::processCaptureResult(const camera3_capture_result *result) {ATRACE_CALL();status_t res;uint32_t frameNumber = result->frame_number;if (result->result == NULL && result->num_output_buffers == 0 &&result->input_buffer == NULL) {SET_ERR("No result data provided by HAL for frame %d",frameNumber);return;}// 对于HAL3.2或更高版本,如果 HAL 不支持 partial,// 则当此结果中包含元数据时,必须始终将 partial_result 设置为1。if (!mUsePartialResult &&mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 &&result->result != NULL &&result->partial_result != 1) {SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"" if partial result is not supported",frameNumber, result->partial_result);return;}bool isPartialResult = false;CameraMetadata collectedPartialResult;CaptureResultExtras resultExtras;bool hasInputBufferInRequest = false;// 从进行中的请求列表中获取快门时间戳和resultExtras,// 并在此帧的快门通知中添加。 如果尚未收到快门时间戳,// 将输出缓冲区附加到进行中的请求中,当快门时间戳到达时,将返回它们。 // 如果已收到所有结果数据和快门时间戳,更新进行中状态并删除进行中条目。nsecs_t shutterTimestamp = 0;{Mutex::Autolock l(mInFlightLock);ssize_t idx = mInFlightMap.indexOfKey(frameNumber);if (idx == NAME_NOT_FOUND) {SET_ERR("Unknown frame number for capture result: %d",frameNumber);return;}InFlightRequest &request = mInFlightMap.editValueAt(idx);ALOGVV("%s: got InFlightRequest requestId = %" PRId32", frameNumber = %" PRId64 ", burstId = %" PRId32", partialResultCount = %d",__FUNCTION__, request.resultExtras.requestId,request.resultExtras.frameNumber, request.resultExtras.burstId,result->partial_result);// 如果部分计数不是0(仅用于缓冲区),则始终将其更新为最新的数。// 当框架将相邻的部分结果聚合为一个时,将使用最新的部分计数。if (result->partial_result != 0)request.resultExtras.partialResultCount = result->partial_result;// 检查此结果是否只包含部分元数据if (mUsePartialResult && result->result != NULL) {if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {if (result->partial_result > mNumPartialResults || result->partial_result < 1) {SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"" the range of [1, %d] when metadata is included in the result",frameNumber, result->partial_result, mNumPartialResults);return;}isPartialResult = (result->partial_result < mNumPartialResults);if (isPartialResult) {request.partialResult.collectedResult.append(result->result);}} else {camera_metadata_ro_entry_t partialResultEntry;res = find_camera_metadata_ro_entry(result->result,ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);if (res != NAME_NOT_FOUND &&partialResultEntry.count > 0 &&partialResultEntry.data.u8[0] ==ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {// A partial result. Flag this as such, and collect this// set of metadata into the in-flight entry.isPartialResult = true;request.partialResult.collectedResult.append(result->result);request.partialResult.collectedResult.erase(ANDROID_QUIRKS_PARTIAL_RESULT);}}if (isPartialResult) {// Fire off a 3A-only result if possibleif (!request.partialResult.haveSent3A) {request.partialResult.haveSent3A =processPartial3AResult(frameNumber,request.partialResult.collectedResult,request.resultExtras);}}}shutterTimestamp = request.shutterTimestamp;hasInputBufferInRequest = request.hasInputBuffer;// 我们是否得到了这次捕获的(最终的)结果元数据?if (result->result != NULL && !isPartialResult) {if (request.haveResultMetadata) {SET_ERR("Called multiple times with metadata for frame %d",frameNumber);return;}if (mUsePartialResult &&!request.partialResult.collectedResult.isEmpty()) {collectedPartialResult.acquire(request.partialResult.collectedResult);}request.haveResultMetadata = true;}uint32_t numBuffersReturned = result->num_output_buffers;if (result->input_buffer != NULL) {if (hasInputBufferInRequest) {numBuffersReturned += 1;} else {ALOGW("%s: Input buffer should be NULL if there is no input"" buffer sent in the request",__FUNCTION__);}}request.numBuffersLeft -= numBuffersReturned;if (request.numBuffersLeft < 0) {SET_ERR("Too many buffers returned for frame %d",frameNumber);return;}camera_metadata_ro_entry_t entry;res = find_camera_metadata_ro_entry(result->result,ANDROID_SENSOR_TIMESTAMP, &entry);if (res == OK && entry.count == 1) {request.sensorTimestamp = entry.data.i64[0];}// 如果还没有接收到shutter事件,则将输出缓冲区附加到正在处理的请求。// 否则,将输出缓冲区返回到流。if (shutterTimestamp == 0) {request.pendingOutputBuffers.appendArray(result->output_buffers,result->num_output_buffers);} else {returnOutputBuffers(result->output_buffers,result->num_output_buffers, shutterTimestamp);}if (result->result != NULL && !isPartialResult) {if (shutterTimestamp == 0) {request.pendingMetadata = result->result;request.partialResult.collectedResult = collectedPartialResult;} else {CameraMetadata metadata;metadata = result->result;sendCaptureResult(metadata, request.resultExtras,collectedPartialResult, frameNumber, hasInputBufferInRequest,request.aeTriggerCancelOverride);}}removeInFlightRequestIfReadyLocked(idx);} // scope for mInFlightLockif (result->input_buffer != NULL) {if (hasInputBufferInRequest) {Camera3Stream *stream =Camera3Stream::cast(result->input_buffer->stream);res = stream->returnInputBuffer(*(result->input_buffer));// Note: stream may be deallocated at this point, if this buffer was the// last reference to it.if (res != OK) {ALOGE("%s: RequestThread: Can't return input buffer for frame %d to""  its stream:%s (%d)",  __FUNCTION__,frameNumber, strerror(-res), res);}} else {ALOGW("%s: Input buffer should be NULL if there is no input"" buffer sent in the request, skipping input buffer return.",__FUNCTION__);}}
}

先获取 Camera3Stream 对象,然后调用其 returnBuffer 方法。

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

void Camera3Device::returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,nsecs_t timestamp) {for (size_t i = 0; i < numBuffers; i++){Camera3Stream *stream = Camera3Stream::cast(outputBuffers[i].stream);status_t res = stream->returnBuffer(outputBuffers[i], timestamp);// 如果该缓冲区是对流的最后引用,则流可能在此时被释放。if (res != OK) {ALOGE("Can't return buffer to its stream: %s (%d)",strerror(-res), res);}}
}

此处调用 returnBufferLocked 继续返回。

frameworks/av/services/camera/libcameraservice/device3/Camera3Stream.cpp

status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,nsecs_t timestamp) {ATRACE_CALL();Mutex::Autolock l(mLock);/*** TODO: 首先检查状态是否有效。** <HAL3.2 IN_CONFIG and IN_RECONFIG in addition to CONFIGURED.* >= HAL3.2 CONFIGURED only** 也对getBuffer执行此操作。*/status_t res = returnBufferLocked(buffer, timestamp);if (res == OK) {fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true);}// 即使缓冲区返回失败,我们仍然希望向等待缓冲区返回的人发送信号。mOutputBufferReturnedSignal.signal();return res;
}

在《Android 源码 Camera2 预览流程分析一》中创建了 Camera3OutputStream 对象。此处调用了 returnAnyBufferLocked 函数。

frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp

status_t Camera3OutputStream::returnBufferLocked(const camera3_stream_buffer &buffer,nsecs_t timestamp) {ATRACE_CALL();status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true);if (res != OK) {return res;}mLastTimestamp = timestamp;return OK;
}

此处重点来看 returnBufferCheckedLocked 方法。

frameworks/av/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp

status_t Camera3IOStreamBase::returnAnyBufferLocked(const camera3_stream_buffer &buffer,nsecs_t timestamp,bool output) {status_t res;// returnBuffer may be called from a raw pointer, not a sp<>, and we'll be// decrementing the internal refcount next. In case this is the last ref, we// might get destructed on the decStrong(), so keep an sp around until the// end of the call - otherwise have to sprinkle the decStrong on all exit// points.sp<Camera3IOStreamBase> keepAlive(this);decStrong(this);if ((res = returnBufferPreconditionCheckLocked()) != OK) {return res;}sp<Fence> releaseFence;res = returnBufferCheckedLocked(buffer, timestamp, output,&releaseFence);// Res may be an error, but we still want to decrement our owned count// to enable clean shutdown. So we'll just return the error but otherwise// carry onif (releaseFence != 0) {mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);}if (output) {mHandoutOutputBufferCount--;}mHandoutTotalBufferCount--;if (mHandoutTotalBufferCount == 0 && mState != STATE_IN_CONFIG &&mState != STATE_IN_RECONFIG && mState != STATE_PREPARING) {/*** Avoid a spurious IDLE->ACTIVE->IDLE transition when using buffers* before/after register_stream_buffers during initial configuration* or re-configuration, or during prepare pre-allocation*/ALOGV("%s: Stream %d: All buffers returned; now idle", __FUNCTION__,mId);sp<StatusTracker> statusTracker = mStatusTracker.promote();if (statusTracker != 0) {statusTracker->markComponentIdle(mStatusId, mCombinedFence);}}mBufferReturnedSignal.signal();if (output) {mLastTimestamp = timestamp;}return res;
}

在回过头看《Android 源码 Camera2 预览流程分析二》,这里消费者 queueBuffer,正真开始消费 Camera 帧。

frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp

status_t Camera3OutputStream::returnBufferCheckedLocked(const camera3_stream_buffer &buffer,nsecs_t timestamp,bool output,/*out*/sp<Fence> *releaseFenceOut) {(void)output;ALOG_ASSERT(output, "Expected output to be true");status_t res;// Fence management - always honor release fence from HALsp<Fence> releaseFence = new Fence(buffer.release_fence);int anwReleaseFence = releaseFence->dup();/*** 简单释放锁以避免死锁* StreamingProcessor::startStream -> Camera3Stream::isConfiguring* 在 queueBuffer 期间(此线程将进入StreamingProcessor::onFrameAvailable)*/sp<ANativeWindow> currentConsumer = mConsumer;mLock.unlock();/*** 将缓冲区返回到 ANativeWindow*/if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {// 取消 bufferres = currentConsumer->cancelBuffer(currentConsumer.get(),container_of(buffer.buffer, ANativeWindowBuffer, handle),anwReleaseFence);if (res != OK) {ALOGE("%s: Stream %d: Error cancelling buffer to native window:"" %s (%d)", __FUNCTION__, mId, strerror(-res), res);}} else {if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {{char traceLog[48];snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);ATRACE_NAME(traceLog);}mTraceFirstBuffer = false;}// 设置时间戳res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);if (res != OK) {ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",__FUNCTION__, mId, strerror(-res), res);return res;}// 消费者 queueBufferres = currentConsumer->queueBuffer(currentConsumer.get(),container_of(buffer.buffer, ANativeWindowBuffer, handle),anwReleaseFence);if (res != OK) {ALOGE("%s: Stream %d: Error queueing buffer to native window: ""%s (%d)", __FUNCTION__, mId, strerror(-res), res);}}mLock.lock();// 一旦一个有效的缓冲区被返回到队列中,就不能再将所有的缓冲区取出来进行预分配。if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) {mStreamUnpreparable = true;}if (res != OK) {close(anwReleaseFence);}*releaseFenceOut = releaseFence;return res;
}

现在继续分析启动 channel。

  1. 根据相机句柄找到 mm_camera_obj_t 对象;
  2. 调用 mm_camera_start_channel 进一步处理。

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c

static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,uint32_t ch_id)
{int32_t rc = -1;mm_camera_obj_t * my_obj = NULL;pthread_mutex_lock(&g_intf_lock);my_obj = mm_camera_util_get_camera_by_handler(camera_handle);if(my_obj) {pthread_mutex_lock(&my_obj->cam_lock);pthread_mutex_unlock(&g_intf_lock);rc = mm_camera_start_channel(my_obj, ch_id);} else {pthread_mutex_unlock(&g_intf_lock);}CDBG("%s :X rc = %d", __func__, rc);return rc;
}
  1. 查找到 mm_channel_t 对象;
  2. 调用 mm_channel_fsm_fn 进一步处理。

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c

int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,uint32_t ch_id)
{int32_t rc = -1;mm_channel_t * ch_obj =mm_camera_util_get_channel_by_handler(my_obj, ch_id);if (NULL != ch_obj) {pthread_mutex_lock(&ch_obj->ch_lock);pthread_mutex_unlock(&my_obj->cam_lock);rc = mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_START,NULL,NULL);} else {pthread_mutex_unlock(&my_obj->cam_lock);}return rc;
}

此时 mm_channel_t 状态为 MM_CHANNEL_STATE_STOPPED。

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,mm_channel_evt_type_t evt,void * in_val,void * out_val)
{int32_t rc = -1;CDBG("%s : E state = %d", __func__, my_obj->state);switch (my_obj->state) {......case MM_CHANNEL_STATE_STOPPED:rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);break;......default:CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);break;}/* unlock ch_lock */pthread_mutex_unlock(&my_obj->ch_lock);CDBG("%s : X rc = %d", __func__, rc);return rc;
}
  1. 调用 mm_channel_start 进一步启动;
  2. 此时 mm_channel_t 状态更改为 MM_CHANNEL_STATE_ACTIVE。

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,mm_channel_evt_type_t evt,void * in_val,void * out_val)
{int32_t rc = 0;CDBG("%s : E evt = %d", __func__, evt);switch (evt) {......case MM_CHANNEL_EVT_START:{rc = mm_channel_start(my_obj);/* 第一个流以停止状态启动,然后变为活动状态 */if (0 == rc) {my_obj->state = MM_CHANNEL_STATE_ACTIVE;}}break;......default:CDBG_ERROR("%s: invalid state (%d) for evt (%d)",__func__, my_obj->state, evt);break;}CDBG("%s : E rc = %d", __func__, rc);return rc;
}

核心步骤:

  1. 启动cb线程、启动cmd线程;
  2. 启动 channel 中的所有流;

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

int32_t mm_channel_start(mm_channel_t *my_obj)
{int32_t rc = 0;int i, j;mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};uint8_t num_streams_to_start = 0;mm_stream_t *s_obj = NULL;int meta_stream_idx = 0;for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {if (my_obj->streams[i].my_hdl > 0) {s_obj = mm_channel_util_get_stream_by_handler(my_obj,my_obj->streams[i].my_hdl);if (NULL != s_obj) {/* 记住元数据流索引 */if (s_obj->stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {meta_stream_idx = num_streams_to_start;}s_objs[num_streams_to_start++] = s_obj;}}}if (meta_stream_idx > 0 ) {/* 总是先启动元数据流,所以将流对象与第一个对象交换 */s_obj = s_objs[0];s_objs[0] = s_objs[meta_stream_idx];s_objs[meta_stream_idx] = s_obj;}if (NULL != my_obj->bundle.super_buf_notify_cb) {/* 需要向上发送cb,因此启动线程 *//* 初始化 superbuf 队列 */mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);my_obj->bundle.superbuf_queue.num_streams = num_streams_to_start;my_obj->bundle.superbuf_queue.expected_frame_id = 0;my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;my_obj->bundle.superbuf_queue.led_on_num_frames = 0;for (i = 0; i < num_streams_to_start; i++) {/* 为流设置 bundled 标志 */s_objs[i]->is_bundled = 1;/* 将 bundled 流初始化为无效值-1 */my_obj->bundle.superbuf_queue.bundled_streams[i] = s_objs[i]->my_hdl;}/* 启动cb线程,通过cb分发超级buf */mm_camera_cmd_thread_launch(&my_obj->cb_thread,mm_channel_dispatch_super_buf,(void*)my_obj);/* 启动cmd线程以获取超级buf dataCB */mm_camera_cmd_thread_launch(&my_obj->cmd_thread,mm_channel_process_stream_buf,(void*)my_obj);/* 设置标志位 TRUE */my_obj->bundle.is_active = TRUE;}for (i = 0; i < num_streams_to_start; i++) {/* 一个 channel 中的所有流应该同时启动 */if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {CDBG_ERROR("%s: stream already started idx(%d)", __func__, i);rc = -1;break;}/* 分配 buf */rc = mm_stream_fsm_fn(s_objs[i],MM_STREAM_EVT_GET_BUF,NULL,NULL);if (0 != rc) {CDBG_ERROR("%s: get buf failed at idx(%d)", __func__, i);break;}/* 注册 buf */rc = mm_stream_fsm_fn(s_objs[i],MM_STREAM_EVT_REG_BUF,NULL,NULL);if (0 != rc) {CDBG_ERROR("%s: reg buf failed at idx(%d)", __func__, i);break;}/* 启动流 */rc = mm_stream_fsm_fn(s_objs[i],MM_STREAM_EVT_START,NULL,NULL);if (0 != rc) {CDBG_ERROR("%s: start stream failed at idx(%d)", __func__, i);break;}}/* 错误处理 */if (0 != rc) {for (j=0; j<=i; j++) {/* 停止流*/mm_stream_fsm_fn(s_objs[j],MM_STREAM_EVT_STOP,NULL,NULL);/* 取消注册 buf */mm_stream_fsm_fn(s_objs[j],MM_STREAM_EVT_UNREG_BUF,NULL,NULL);/* put buf back */mm_stream_fsm_fn(s_objs[j],MM_STREAM_EVT_PUT_BUF,NULL,NULL);}/* 销毁超级buf cmd 线程 */if (TRUE == my_obj->bundle.is_active) {/* first stop bundle thread */mm_camera_cmd_thread_release(&my_obj->cmd_thread);mm_camera_cmd_thread_release(&my_obj->cb_thread);/* 反初始化 superbuf 队列 */mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);/* memset bundle 信息 */memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));}}my_obj->bWaitForPrepSnapshotDone = 0;return rc;
}

Android 源码 Camera2 预览流程分析四相关推荐

  1. Android 源码 Camera2 预览流程分析一

    先上一段典型的预览代码,梳理一下相机预览流程. 从 TextureView 获取到 SurfaceTexture 将 SurfaceTexture 默认缓冲区的大小配置为相机预览的大小 新建一个 Su ...

  2. Android 源码 Camera2 获取 CameraId 列表

    获取 CameraId 列表通过调用 CameraManager 类 getCameraIdList() 实现. getCameraIdList() 按标识符返回当前连接的摄像头设备列表,包括其他 c ...

  3. 编译Android源码(2) ---- envsetup.sh文件分析

    在Android源码下载完成后,只需要简单的三个步骤就能把Android编译完成( http://source.android.com/source/building.html): 1.当前目录切换到 ...

  4. android6.0源码分析之Camera API2.0下的Preview(预览)流程分析

    1.Camera2 preview的应用层流程分析 preview流程都是从startPreview开始的,所以来看startPreview方法的代码: <code class="hl ...

  5. Android Camera 打开预览流程分析(一)--打开camera的SDK流程

    Android系统应用场景中,Camera的使用场景变得越来越重要,在手机端不管是牌照美颜,还是拍小视频上传小视频平台.在其他领域,如车载,倒车视频,360全景影像也同样会用到Camera接口.那我们 ...

  6. android源码学习- APP启动流程(android12源码)

    前言: 百度一搜能找到很多讲APP启动流程的,但是往往要么就是太老旧(还是基于android6去分析的),要么就是不全(往往只讲了整个流程的一小部分).所以我结合网上现有的文章,以及源码的阅读和调试, ...

  7. Android源码解析:UI绘制流程之控件绘制

    带着问题看源码 再接再厉,我们来分析UI绘制流程最后一步绘制流程 入口ViewRootImpl.performDraw()方法 private void performDraw() {//...try ...

  8. Camera2预览流程简单附demo

    根据这个流程图配合代码很好理解 package com.example.camera2;import androidx.annotation.NonNull; import androidx.appc ...

  9. Android源码解析:UI绘制流程之测量.md

    带着问题看源码 书接上文,做安卓开发都知道只要我们在xml布局中填写控件,并设置宽高大小与位置,安卓系统就会将我们想要的布局展示出来,但是这一步是系统是如何做到的呢?这就是上文讲到的UI绘制过程,他一 ...

最新文章

  1. 利用Gephi软件绘制网络图
  2. win10装centos双系统之后,win10的启动项消失的解决方法
  3. 类中静态成员变量 无法解析的外部符号
  4. cpu百分比linux,Linux:cpu使用百分比的计算
  5. java的封装性_Java10-java语法基础(九)——java的封装性
  6. Servlet API
  7. 《Java8实战》笔记(09):默认方法
  8. 用VC写Assembly代码(4)
  9. 信息学奥赛一本通(1256:献给阿尔吉侬的花束)
  10. Centos yum 安装JDK及配置环境变量
  11. mysql 参照完整性规则_详解mysql数据库审计特点、实现方案及审计插件部署演示...
  12. 正则表达式在NLP的基本应用
  13. linux下交叉编译ffmpeg,并加入H264编码支持
  14. 2020华为软挑热身赛
  15. 1031 查验身份证 (15 分) 一个合法的身份证号码由17位地区、日期编号和顺序编号加1位校验码组成。
  16. 松本行弘的程序世界。
  17. 计算机毕业设计java+jsp幼儿园信息网站(源码+系统+mysql数据库+Lw文档)
  18. pytroch、tensorflow对比学习—功能组件(数据管道、回调函数、特征列处理)
  19. Ubuntu 20.04 LTS 查看 CPU 温度
  20. 红与黑 DFS(JAVA解法)

热门文章

  1. powershell使用conda activate激活环境出错的三个解决方法
  2. jdk1.8之后内部类调用局部方法不用final
  3. 信维科技:做通信测试领域的“华为”
  4. 定时任务凌晨0-8点不执行
  5. dnf韩服服务器维护中.,DNF:韩服测试服开始维护,新三觉即将到来?这三个职业最有可能...
  6. linux自动备份不同的文件夹,linux定时备份文件夹
  7. PDF文件怎样合并分割
  8. Task04:集合运算
  9. 阻容感基础01:从宇宙起源到阻容感(2)
  10. 短暂聊天VS温馨提示