| 简单概括为 | Java app 呼叫 ② Jni ,Jni调用各种.so :libandroid_runtime.so ---> libcamera_client.so ---> Binder IPC---> libcameraservice.so ---> libcamera.so 注释:②请原谅我用【呼叫】这个动词,实在想不出更加形象的词汇了。 |
| 详细说明 | 1.打开linux kernel中的camera driver的设备文件,调用CameraHardwareInterface.h 中定义的openCameraHardware(),打开camera driver的设备文件(例如/dev/video0). 2.CameraHardwareInterface.h 中定义的 setParameters()函数,传参告诉camera HAL使用哪一个硬件摄像头,以及它工作的参数(size, format等等),并在HAL层分配存储preview数据的buffers(如果buffers是在linux kernel中的camera driver中分配的,并拿到这些buffers mmap后的地址指针). 3.如果不使用overlay那设置显示目标就在libcameraservice.so 中,不会进Camera HAL动态库.并将上一步拿到的preview数据buffers地址注册到surface中. 如果使用overlay那在libcameraservice.so 中会通过传进来的Isurface创建Overlay类的实例,然后调用CameraHardwareInterface.h 中定义的 setOverlay()设置到Camera HAL动态库中. 4.开始preview,调用到CameraHardwareInterface.h 中定义的 startPreview()函数.startPreviewMode会处理preview的显示介质,如果使用Overlay显示,会设置相应的Overlay,同时调用mHardware->startPreview()以启动preview;否则先调用mHardware->startPreview()启动preview,然后设置buffer:调用函数registerPreviewBuffers(),它会调用mHardware->getPreviewHeap(),从HAL层获得preview的buffer,将其设置给Surface去显示preview的结果。 |
| 123456 | struct overlay_control_context_t {struct overlay_control_device_t device;/* our private state goes below here */struct overlay_t* overlay_video1;//overlay1struct overlay_t* overlay_video2;//overlay2}; |
| void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType){ jbyteArray obj = NULL; // allocate Java byte array and copy data if (dataPtr != NULL) { ssize_t offset; size_t size; sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size); LOGV("postData: off=%d, size=%d", offset, size); uint8_t *heapBase = (uint8_t*)heap->base(); if (heapBase != NULL) { const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset); obj = env->NewByteArray(size); if (obj == NULL) { LOGE("Couldn"t allocate byte array for JPEG data"); env->ExceptionClear(); } else { env->SetByteArrayRegion(obj, 0, size, data); } } else { LOGE("image heap is NULL"); } } // post image data to Java env->CallStaticVoidMethod(mCameraJClass, fields.post_event, mCameraJObjectWeak, msgType, 0, 0, obj); if (obj) { env->DeleteLocalRef(obj); }} |
| static Mutex sPostDataLock; // A mutex that synchronizes calls to sCameraPreviewArrayGlobalstatic jbyteArray sCameraPreviewArrayGlobal; // Buffer that is reusedstatic size_t sCameraPreviewArraySize=0; // Size of the buffer (or 0 if the buffer is not yet used) |
| void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType) { if (dataPtr != NULL) { ssize_t offset; size_t size; sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size); LOGV("postData: off=%d, size=%d", offset, size); uint8_t *heapBase = (uint8_t*)heap->base(); if (heapBase != NULL) { const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset); //HACK if ((sCameraPreviewArraySize==0) || (sCameraPreviewArraySize!=size)) { if(sCameraPreviewArraySize!=0) env->DeleteGlobalRef(sCameraPreviewArrayGlobal); sCameraPreviewArraySize=size; jbyteArray mCameraPreviewArray = env->NewByteArray(size); sCameraPreviewArrayGlobal=(jbyteArray)env->NewGlobalRef(mCameraPreviewArray); env->DeleteLocalRef(mCameraPreviewArray); } if (sCameraPreviewArrayGlobal == NULL) { LOGE("Couldn"t allocate byte array for JPEG data"); env->ExceptionClear(); } else { env->SetByteArrayRegion(sCameraPreviewArrayGlobal, 0, size, data); } } else { LOGE("image heap is NULL"); } } // post image data to Java env->CallStaticVoidMethod(mCameraJClass, fields.post_event, mCameraJObjectWeak, msgType, 0, 0, sCameraPreviewArrayGlobal);} |