天天看點

AndroidO Camera 分析(二): (API 1)open、startpreview等流程Camera.open() 流程筆記Camera.setPreviewDisplay() 流程筆記Camera.startPreview() 流程總結

目錄

  • Camera.open() 流程筆記
  • Camera.setPreviewDisplay() 流程筆記
  • Camera.startPreview() 流程
    • m_stateMachine 狀态機分析
    • VIDIOC_DQBUF 分析
  • 總結

Camera.open() 流程筆記

《Camera.open() 流程》

frameworks/base/core/java/android/hardware/Camera.java
open()
--->return new Camera(cameraId);
    --->cameraInitNormal(cameraId);
        --->cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT);
            --->looper = Looper.myLooper()
            --->String packageName = ActivityThread.currentOpPackageName();
            --->return native_setup(new WeakReference<Camera>(this), cameraId, halVersion, packageName);    //JNI 調用 frameworks/base/core/jni/android_hardware_Camera.cpp 檔案中的 android_hardware_Camera_native_setup 函數
                --->const char16_t *rawClientName = reinterpret_cast<const char16_t*>(env->GetStringChars(clientPackageName, NULL));
                --->if (halVersion == CAMERA_HAL_API_VERSION_NORMAL_CONNECT)
                --->camera = Camera::connect(cameraId, clientName, Camera::USE_CALLING_UID, Camera::USE_CALLING_PID);
                    --->return CameraBaseT::connect(cameraId, clientPackageName, clientUid, clientPid);    //調用到 CameraBase.cpp 
                        --->sp<TCam> c = new TCam(cameraId);    // TCam = Camera
                        --->sp<TCamCallbacks> cl = c;
                        --->const sp<::android::hardware::ICameraService> cs = getCameraService();    //得到 CameraService
                        --->TCamConnectService fnConnectService = TCamTraits::fnConnectService;    //TCamTraits = CameraTraits<Camera> 在 Camera.cpp 中定義  fnConnectService = &::android::hardware::ICameraService::connect;
                        --->(cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid, clientPid, /*out*/ &c->mCamera);    //就是調用 CameraService::connect()
                            --->connectHelper<ICameraClient,Client>(cameraClient, id, CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, clientPid, API_1, false, false,/*out*/client);
                                --->makeClient(&client, ...)
                                    --->sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                                    --->*client = new CameraClient(cameraService, tmp, packageName, cameraIdToInt(cameraId), facing, clientPid, clientUid, getpid(), legacyMode);
                                        --->Client(cameraService, cameraClient, clientPackageName, String8::format("%d", cameraId), cameraFacing, clientPid, clientUid, servicePid)
                                            --->mRemoteCallback = cameraClient;    
                                --->client->initialize(mCameraProviderManager);      // mCameraProviderManager 很關鍵,在CameraService::enumerateProviders()中設定
                                    --->CameraClient::initialize(providerPtr)
                                        --->mHardware = new CameraHardwareInterface(camera_device_name);
                                        --->mHardware->initialize(manager);
                                            --->return manager->openSession(mName.string(), this, &mHidlDevice); // manager = CameraProviderManager, this = CameraHardwareInterface , mHidlDevice = CameraDevice
                                                    --->CameraProviderManager::openSession(id, callback, session)
                                                        --->auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
                                                            --->for (auto& provider : mProviders)        //輪循 mProviders, 這個很關鍵,看下面 mProviders 相關
                                                                --->for (auto& deviceInfo : provider->mDevices)        //mDevices 在 addDevice 中設定
                                                                    --->return deviceInfo.get();
                                                        --->auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
                                                        --->deviceInfo1->mInterface->open(callback);  //mInterface 它實際上是 CameraDevice 對象,參考 module 與 CameraDevice 相關
                                                            --->CameraDevice::open(callback)
                                                                --->mModule->getCameraInfo(mCameraIdInt, &info);        //調用hal的get_camera_info函數
                                                                --->mModule->open(mCameraId.c_str(), (hw_device_t **)&mDevice);       //調用hal的open函數,得到 device 參考《hal 層分析》mDevice = mCameraDevice
                                                                --->initHalPreviewWindow();    //填充 preview 相關函數
                                                                        --->mHalPreviewWindow.cancel_buffer = sCancelBuffer;
                                                                        --->mHalPreviewWindow.lock_buffer = sLockBuffer;
                                                                        --->mHalPreviewWindow.dequeue_buffer = sDequeueBuffer;
                                                                        --->mHalPreviewWindow.enqueue_buffer = sEnqueueBuffer;
                                                                        --->mHalPreviewWindow.set_buffer_count = sSetBufferCount;
                                                                        --->mHalPreviewWindow.set_buffers_geometry = sSetBuffersGeometry;
                                                                        --->mHalPreviewWindow.set_crop = sSetCrop;
                                                                        --->mHalPreviewWindow.set_timestamp = sSetTimestamp;
                                                                        --->mHalPreviewWindow.set_usage = sSetUsage;
                                                                        --->mHalPreviewWindow.set_swap_interval = sSetSwapInterval;
                                                                    --->mHalPreviewWindow.get_min_undequeued_buffer_count = sGetMinUndequeuedBufferCount;
                                                                --->mDeviceCallback = callback;
                                                                --->mQDeviceCallback = vendor::qti::hardware::camera::device::V1_0::IQCameraDeviceCallback::castFrom(callback);    //vendor 層回調
                                                                --->mDevice->ops->set_callbacks(mDevice, sNotifyCb, sDataCb, sDataCbTimestamp, sGetMemory, this);    //mDevice 就是由 open HAL 層得到的
                                                        --->*session = deviceInfo1->mInterface;    // mInterface 就是 CameraDevice 對象
                                        --->mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp,  handleCallbackTimestampBatch, (void *)(uintptr_t)mCameraId);
                                            --->mNotifyCb = notify_cb;
                                            --->mDataCb = data_cb;
                                            --->mDataCbTimestamp = data_cb_timestamp;
                                            --->mDataCbTimestampBatch = data_cb_timestamp_batch;
                                            --->mCbUser = user;
                                            --->*device = client;
                        --->IInterface::asBinder(c->mCamera)->linkToDeath(c);    //連接配接死亡通知
                        --->return c;    // c = Camera 對象
                --->jclass clazz = env->GetObjectClass(thiz);
                --->sp<JNICameraContext> context = new JNICameraContext(env, weak_this, clazz, camera);    
                    --->mCameraJObjectWeak = env->NewGlobalRef(weak_this);
                    --->mCameraJClass = (jclass)env->NewGlobalRef(clazz);
                    --->mCamera = camera;
                --->context->incStrong((void*)android_hardware_Camera_native_setup);
                --->camera->setListener(context);
                --->env->SetLongField(thiz, fields.context, (jlong)context.get());
                --->CameraInfo cameraInfo;
                --->status_t rc = Camera::getCameraInfo(cameraId, &cameraInfo);
                --->switch (cameraInfo.orientation) {    //Update default display orientation in case the sensor is reverse-landscape
                --->case 180:
                --->defaultOrientation = 180;
                --->camera->sendCommand(CAMERA_CMD_SET_DISPLAY_ORIENTATION, defaultOrientation, 0);    //Camera::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
                    --->sp <::android::hardware::ICamera> c = mCamera;    // mCamera 就是在 CameraService::connect() 時得到的 CameraClient
                    --->c->sendCommand(cmd, arg1, arg2);    //就是調用 CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
                        --->orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT);
                        --->mOrientation = orientation;
                        --->if (mPreviewWindow != 0)
                        --->mHardware->setPreviewTransform(mOrientation);    //就是調用 CameraHardwareInterface::setPreviewTransform(int transform)
                            --->mPreviewTransform = transform;
                            --->if (mPreviewWindow != nullptr)
                            --->native_window_set_buffers_transform(mPreviewWindow.get(), mPreviewTransform);
                                --->window->perform(window, NATIVE_WINDOW_SET_BUFFERS_TRANSFORM, transform);
           

Camera.setPreviewDisplay() 流程筆記

frameworks/base/core/java/android/hardware/Camera.java
setPreviewDisplay(SurfaceHolder holder)
--->setPreviewSurface(holder.getSurface());    // Jni 調用 android_hardware_Camera_setPreviewSurface(JNIEnv *env, jobject thiz, jobject jSurface)
    --->sp<Camera> camera = get_native_camera(env, thiz, NULL);
        --->JNICameraContext* context = reinterpret_cast<JNICameraContext*>(env->GetLongField(thiz, fields.context));    //獲得 open 的時候建立的上下文
        --->camera = context->getCamera();    //獲得 open 的時候設定的 camera
        --->return camera;
    --->sp<IGraphicBufferProducer> gbp;
    --->sp<Surface> surface;
    --->surface = android_view_Surface_getSurface(env, jSurface);
        --->sp<Surface> sur;
        --->sur = reinterpret_cast<Surface *>(env->GetLongField(surfaceObj, gSurfaceClassInfo.mNativeObject));
        --->return sur;
    --->gbp = surface->getIGraphicBufferProducer();
        --->return mGraphicBufferProducer;
    --->camera->setPreviewTarget(gbp)
        --->sp <::android::hardware::ICamera> c = mCamera;    //mCamera 就是在 CameraService::connect(&mCamera) 時得到的 CameraClient
        --->c->setPreviewTarget(bufferProducer);    //CameraClient::setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
            --->sp<IBinder> binder;
            --->binder = IInterface::asBinder(bufferProducer);
            --->window = new Surface(bufferProducer, /*controlledByApp*/ true);
            --->return setPreviewWindow(binder, window);
                --->native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
                    --->return window->perform(window, NATIVE_WINDOW_API_CONNECT, api);    // api = NATIVE_WINDOW_API_CAMERA, 顯示相關的了,不分析
                --->disconnectWindow(mPreviewWindow);    //斷開舊 mPreviewWindow
                --->mSurface = binder;
                --->mPreviewWindow = window;    //使用新的 mPreviewWindow
           

Camera.startPreview() 流程

frameworks/base/core/java/android/hardware/Camera.java
startPreview()
--->android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
    --->sp<Camera> camera = get_native_camera(env, thiz, NULL);
        --->JNICameraContext* context = reinterpret_cast<JNICameraContext*>(env->GetLongField(thiz, fields.context));    //獲得 open 的時候建立的上下文
        --->camera = context->getCamera();    //獲得 open 的時候設定的 camera
        --->return camera;
    --->camera->startPreview()    //Camera::startPreview()
        --->sp <::android::hardware::ICamera> c = mCamera;    // mCamera 就是在 CameraService::connect(&mCamera) 時得到的。是以 mCamera = CameraClient(代理類對象)
        --->c->startPreview();    // 就是調用 CameraClient::startPreview()
            --->return startCameraMode(CAMERA_PREVIEW_MODE);
                --->return startPreviewMode();
                    --->mHardware->setPreviewScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);    //适配視窗大小
                    --->mHardware->setPreviewTransform(mOrientation);    //設定旋轉方向
                    --->mHardware->setPreviewWindow(mPreviewWindow);    //就是調用 CameraHardwareInterface::setPreviewWindow()
                        --->mPreviewWindow = buf;    // 注意:  mPreviewWindow 存在的作用域不一樣。 這裡的是 CameraHardwareInterface::mPreviewWindow  上面是 CameraClient::mPreviewWindow
                        --->return mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr);    //mHidlDevice 是 CameraDevice
                            --->mHalPreviewWindow.mPreviewCallback = window;
                            --->mDevice->ops->set_preview_window(mDevice, (window == nullptr) ? nullptr : &mHalPreviewWindow)    //mDevice 就是由 open HAL 層得到的 就是調用 QCamera2HardwareInterface::set_preview_window()
                                --->QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
                                --->hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
                                    --->m_stateMachine.procAPI(api, api_payload);    //更新狀态機   參考《m_stateMachine 狀态機分析》
                                        --->qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
                                        --->node->cmd = QCAMERA_SM_CMD_TYPE_API;
                                        --->node->evt = evt;
                                        --->node->evt_payload = api_payload;
                                        --->api_queue.enqueue((void *)node)    //加入隊列
                                        --->cam_sem_post(&cmd_sem);    //通知處理線程
                    --->mHardware->startPreview();    //就是調用 CameraHardwareInterface::startPreview()
                        --->return mHidlDevice->startPreview();    //mHidlDevice 是 CameraDevice
                            --->mDevice->ops->start_preview(mDevice)    //mDevice 就是由 open HAL 層得到的 就是調用 QCamera2HardwareInterface::start_preview()
                                --->QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
                                --->qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
                                --->hw->processAPI(evt, NULL);
                                    --->m_stateMachine.procAPI(api, api_payload);    //更新狀态機   參考《m_stateMachine 狀态機分析》
                                        --->pme->stateMachine(node->evt, node->evt_payload);    //注意:這裡是另一個線程執行的了。 QCameraStateMachine::stateMachine()
                                            --->switch (m_state) {
                                            --->case QCAMERA_SM_STATE_PREVIEW_STOPPED:    //這是最初始狀态
                                            --->procEvtPreviewStoppedState(evt, payload);
                                                --->switch (evt) {
                                                --->case QCAMERA_SM_EVT_START_PREVIEW:
                                                --->m_parent->preparePreview();    // m_parent = QCamera2HardwareInterface 對象
                                                    --->addChannel(QCAMERA_CH_TYPE_PREVIEW);
                                                        --->addPreviewChannel();
                                                            --->QCameraChannel *pChannel = NULL;
                                                            --->pChannel = new QCameraChannel(handle, mCameraHandle->ops);// mCameraHandle->ops = mm_camera_ops 參考 《hal 層分析》
                                                                --->m_camHandle = cam_handle;
                                                                --->m_camOps = cam_ops;    //m_camOps = mm_camera_ops
                                                            --->pChannel->init(NULL, NULL, NULL);
                                                                --->m_handle = m_camOps->add_channel(m_camHandle, attr, dataCB, userData);    // m_camOps = mm_camera_ops 是以是調用 mm_camera_intf_add_channel()
                                                                    --->mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
                                                                    --->ch_obj = &my_obj->ch[ch_idx];
                                                                        --->mm_channel_init(ch_obj, attr, channel_cb, userdata);
                                                                            --->mm_camera_poll_thread_launch(&my_obj->poll_thread[0], MM_CAMERA_POLL_TYPE_DATA);    //開啟線程,這個線程其中一個作用就是當camera有資料時調用回調
                                                                                --->poll_cb->pfds[0] = -1;
                                                                                --->poll_cb->pfds[1] = -1;
                                                                                --->pipe(poll_cb->pfds);
                                                                                --->pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);    //建立線程
                                                                                    --->return mm_camera_poll_fn(poll_cb);
                                                                                        --->do {
                                                                                        --->poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);    //poll 等待
                                                                                        --->if ((poll_cb->poll_fds[0].revents & POLLIN) {
                                                                                        --->mm_camera_poll_proc_pipe(poll_cb);    //添加 poll fd 走這個分支
                                                                                            --->read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
                                                                                            --->poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
                                                                                            --->poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
                                                                                            --->poll_cb->num_fds++;
                                                                                        --->}
                                                                                        --->if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) && (poll_cb->poll_fds[i].revents & POLLIN)) {
                                                                                        --->poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);    //camera 有資料走這個分支。參考 《VIDIOC_DQBUF 分析》
                                                                                        --->}
                                                            --->addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA, metadata_stream_cb_routine, this);
                                                            --->addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW, preview_stream_cb_routine, this);
                                                                --->pChannel->addStream(*this, pStreamInfo, NULL, &padding_info, main_cb, userData, bDynAllocBuf, deferAllocation, ROTATE_0, cam_type);
                                                                    --->QCameraStream *pStream = new QCameraStream(allocator, m_camHandle, ch_handle, m_camOps, paddingInfo, bDeffAlloc, online_rotation);
                                                                        --->mCamOps(camOps)    //mCamOps = mm_camera_ops
                                                                        --->mMemVtbl.user_data = this;
                                                                        --->mMemVtbl.get_bufs = get_bufs;
                                                                        --->mMemVtbl.put_bufs = put_bufs;
                                                                    --->pStream->init(streamInfoBuf, miscBuf, stream_cb, userdata, bDynAllocBuf);    // stream_cb = preview_stream_cb_routine    
                                                                        --->mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
                                                                        --->configStream()
                                                                            --->stream_config.mem_vtbl = mMemVtbl;
                                                                            --->stream_config.stream_cb = dataNotifyCB;
                                                                            --->stream_config.userdata = this;
                                                                            --->mCamOps->config_stream(mCamHandle, mChannelHandle, mHandle, &stream_config);    //mCamOps = mm_camera_ops 是以調用 mm_camera_intf_config_stream
                                                                                --->my_obj = mm_camera_util_get_camera_by_handler(handle);
                                                                                --->mm_camera_config_stream(my_obj, chid, strid, config);
                                                                                    --->mm_channel_t * ch_obj =  mm_camera_util_get_channel_by_handler(my_obj, ch_id);
                                                                                    --->payload.stream_id = stream_id;
                                                                                    --->payload.config = config;
                                                                                        --->mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_CONFIG_STREAM, (void *)&payload, NULL);
                                                                                            --->mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
                                                                                                --->mm_evt_paylod_config_stream_t *payload = (mm_evt_paylod_config_stream_t *)in_val;
                                                                                                --->mm_channel_config_stream(my_obj, payload->stream_id, payload->config);
                                                                                                    --->mm_stream_t *stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
                                                                                                    --->mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_SET_FMT, (void *)config, NULL);
                                                                                                        --->mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
                                                                                                            --->mm_camera_stream_config_t *config = (mm_camera_stream_config_t *)in_val;
                                                                                                            --->mm_stream_config(my_obj, config);
                                                                                                                --->my_obj->buf_cb[cb_index].cb = config->stream_cb;    // stream_cb = dataNotifyCB
                                                                                                                --->my_obj->buf_cb[cb_index].user_data = config->userdata;    // userdata = this
                                                                                                                --->my_obj->mem_vtbl = config->mem_vtbl;    //mem_vtbl = mMemVtbl
                                                                                                                --->mm_stream_set_fmt(my_obj);
                                                                                                                    --->struct msm_v4l2_format_data msm_fmt;
                                                                                                                    --->msm_fmt.width = (unsigned int)my_obj->stream_info->dim.width;    //設定圖像寬度
                                                                                                                    --->msm_fmt.height = (unsigned int)my_obj->stream_info->dim.height;    //設定圖像高度
                                                                                                                    --->msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);    //得到Bpp
                                                                                                                    --->memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
                                                                                                                    --->ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);    //設定格式
                                                                        --->mDataCB = stream_cb;    // mDataCB = QCamera2HardwareInterface::preview_stream_cb_routine()
                                                                    --->mStreams.add(pStream);    
                                                            --->m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
                                                    --->addChannel(QCAMERA_CH_TYPE_ZSL);    //跟上面類似, 不分析    
                                                --->m_parent->startPreview();    // m_parent = QCamera2HardwareInterface 對象
                                                    --->startChannel(QCAMERA_CH_TYPE_PREVIEW);
                                                        --->m_channels[ch_type]->start();    // ch_type = QCAMERA_CH_TYPE_PREVIEW
                                                            --->mStreams[i]->start();    //啟動Stream
                                                                --->mDataQ.init();
                                                                --->mProcTh.launch(dataProcRoutine, this);    //開啟線程
                                                                    --->QCameraStream *pme = (QCameraStream *)data;
                                                                    --->QCameraCmdThread *cmdThread = &pme->mProcTh;
                                                                    --->do {
                                                                    --->cam_sem_wait(&cmdThread->cmd_sem);    //等待消息    參考 《VIDIOC_DQBUF 分析》
                                                                    --->camera_cmd_type_t cmd = cmdThread->getCmd();    //取出消息
                                                                    --->case CAMERA_CMD_TYPE_DO_NEXT_JOB:
                                                                    --->mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)pme->mDataQ.dequeue();    //從隊列中取出一幀
                                                                    --->pme->mDataCB(frame, pme, pme->mUserData);    //就是調用 QCamera2HardwareInterface::preview_stream_cb_routine()
                                                                        --->QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;    //userdata = pme->mUserData
                                                                        --->QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;    //super_frame = frame
                                                                        --->mm_camera_buf_def_t *dumpBuffer = NULL;
                                                                        --->mm_camera_buf_def_t *frame = super_frame->bufs[0];
                                                                        --->uint32_t idx = frame->buf_idx;    //取出 buf 索引
                                                                        --->memory->setBufferStatus(frame->buf_idx, STATUS_ACTIVE);
                                                                        --->memory->enqueueBuffer(idx);    //根據 buf 索引,将buf加入隊列, 這裡就是顯示了
                                                                            --->mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);    //mWindow = mHalPreviewWindow  顯示相關不分析
                                                                        --->stream->bufDone((uint32_t)dequeuedIdx);    //釋放buf  Return dequeued buffer back to driver
                                                                            --->mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);    // mCamOps = mm_camera_ops  是以調用 mm_camera_intf_qbuf()
                                                                                --->strid = get_main_camera_handle(buf->stream_id);
                                                                                --->my_obj = mm_camera_util_get_camera_by_handler(handle);
                                                                                --->mm_camera_qbuf(my_obj, chid, buf);
                                                                                    --->ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
                                                                                    --->mm_channel_qbuf(ch_obj, buf);
                                                                                        --->mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);    //後面的 my_obj 是這裡得到的
                                                                                        --->mm_stream_fsm_fn(s_obj, MM_STREAM_EVT_QBUF, (void *)buf, NULL);
                                                                                            --->mm_stream_fsm_active(my_obj, evt, in_val, out_val);
                                                                                                --->mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
                                                                                                    --->mm_stream_qbuf(my_obj, frame);
                                                                                                        --->struct v4l2_buffer buffer;
                                                                                                        --->buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                                                                                                        --->buffer.memory = V4L2_MEMORY_USERPTR;
                                                                                                        --->buffer.index = (__u32)buf->buf_idx;
                                                                                                        --->buffer.length = (__u32)length;
                                                                                                        --->ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);    //最終調用到ioctl(VIDIOC_QBUF)
                                                            --->m_camOps->start_channel(m_camHandle, m_handle);    //m_camOps = mm_camera_ops  就是調用 mm_camera_intf_start_channel()
                                                                --->uint32_t handle = get_main_camera_handle(camera_handle);
                                                                --->my_obj = mm_camera_util_get_camera_by_handler(handle);
                                                                --->mm_camera_start_channel(my_obj, chid);
                                                                    --->mm_channel_t * ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
                                                                    --->mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_START, NULL, NULL);
                                                                        --->mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);    //因為還沒啟動,是以狀态是 MM_CHANNEL_STATE_STOPPED
                                                                            --->mm_channel_start(my_obj);
                                                                                --->mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_GET_BUF, NULL, NULL);    //配置設定buf, 這裡隻是設定buf的大小等資訊
                                                                                    --->mm_stream_init_bufs(my_obj);
                                                                                --->mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_REG_BUF, NULL, NULL);    //注冊buf
                                                                                    --->mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
                                                                                        --->mm_stream_reg_buf(my_obj);
                                                                                            --->mm_stream_request_buf(my_obj);
                                                                                                --->struct v4l2_requestbuffers bufreq;
                                                                                                --->bufreq.count = buf_num;
                                                                                                --->bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                                                                                                --->bufreq.memory = V4L2_MEMORY_USERPTR;
                                                                                                --->ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);    //調用 ioctl(VIDIOC_REQBUFS) 在核心空間配置設定記憶體, 之後用mmap映射出來
                                                                                            --->mm_stream_qbuf(my_obj, &my_obj->buf[i]);    //第一次的時候參考 《VIDIOC_DQBUF 分析》
                                                                                                --->buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                                                                                                --->buffer.memory = V4L2_MEMORY_USERPTR;
                                                                                                --->buffer.index = (__u32)buf->buf_idx;
                                                                                                --->ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);    //調用 ioctl(VIDIOC_QBUF) 将 buf 挂入驅動隊列
                                                                                --->mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_START, NULL, NULL);    //啟動
                                                                                    --->mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
                                                                                        --->my_obj->state = MM_STREAM_STATE_ACTIVE;
                                                                                        --->mm_camera_cmd_thread_launch(&my_obj->cmd_thread, mm_stream_dispatch_app_data, (void *)my_obj);    //開啟線程
                                                                                            --->my_obj->buf_cb[i].cb(&super_buf, my_obj->buf_cb[i].user_data);    //就是調用 dataNotifyCB
                                                                                                --->QCameraStream* stream = (QCameraStream *)userdata;
                                                                                                --->mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
                                                                                                --->*frame = *recvd_frame;
                                                                                                --->stream->processDataNotify(frame);
                                                                                                    --->mDataQ.enqueue((void *)frame)
                                                                                                    --->mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);    //喚醒 dataProcRoutine 線程
                                                                                        --->mm_stream_streamon(my_obj);
                                                                                            --->enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                                                                                            --->ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);    //調用 ioctl(VIDIOC_STREAMON) 啟動傳輸

           

m_stateMachine 狀态機分析

QCamera2HardwareInterface *hw = new QCamera2HardwareInterface((uint32_t)camera_id);
--->m_stateMachine(this)    //m_stateMachine = QCamera2HardwareInterface
    --->api_queue()
    --->evt_queue()
    --->m_parent = ctrl;    //ctrl = QCamera2HardwareInterface 對象
    --->cam_sem_init(&cmd_sem, 0);
    --->pthread_create(&cmd_pid, NULL, smEvtProcRoutine, this);    // this = QCameraStateMachine 對象
        --->QCameraStateMachine *pme = (QCameraStateMachine *)data;
        --->cam_sem_wait(&pme->cmd_sem);
        --->qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();    //從隊裡中取出資料
        --->do {
        --->pme->stateMachine(node->evt, node->evt_payload);    //最終會調用這個函數
        --->}while (1)
        --->
           

VIDIOC_DQBUF 分析

mm_stream_qbuf()    //向kernel層配置設定完buf之後需要循環調用 VIDIOC_QBUF 将buf挂入驅動的連結清單上,這裡是第一次執行的情況
--->mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0], idx, my_obj->my_hdl, my_obj->fd, mm_stream_data_notify, (void*)my_obj, mm_camera_async_call);    //将 my_obj->fd 注冊進 poll
    --->poll_cb->poll_entries[idx].fd = fd;
    --->poll_cb->poll_entries[idx].handler = handler;
    --->poll_cb->poll_entries[idx].notify_cb = notify_cb;
    --->poll_cb->poll_entries[idx].user_data = userdata;
    --->mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
        --->write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));



調用 notify_cb 回調函數
mm_stream_data_notify(void* user_data)    //當驅動有資料時調用這個回調 注意:有一個線程專門調用回調
--->mm_stream_t *my_obj = (mm_stream_t*)user_data;
--->mm_stream_read_msm_frame(my_obj, &buf_info, (uint8_t)length);    //讀取一幀的資料
    --->struct v4l2_buffer vb;
    --->vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    --->vb.memory = V4L2_MEMORY_USERPTR;
    --->vb.length = num_planes;
    --->ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);    //調用 VIDIOC_DQBUF 從驅動隊列上取出buf
    --->buf_info->buf = &my_obj->buf[idx];
    --->buf_info->frame_idx = vb.sequence;
    --->buf_info->buf->buf_idx = idx;
    --->buf_info->buf->frame_idx = vb.sequence;
--->mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
    --->node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
    --->node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
    --->node->u.buf = *buf_info;
    --->cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);    //插入隊列
    --->cam_sem_post(&(my_obj->cmd_thread.cmd_sem));    //喚醒 mm_stream_dispatch_app_data 線程
           

總結

這些流程的代碼量實在是太巨大了,是以就沒有貼代碼上來。而是貼筆記,筆記裡面已經寫得非常完善了。
其實對比API1和API2,很明顯的發現API1的代碼簡單很多,流程也更加清晰