现在的位置: 首页 > 移动开发 > 正文

Android开发之Camera分析(二)

2019年09月04日 移动开发 ⁄ 共 19524字 ⁄ 字号 评论关闭

前面一章我们已经找到了CameraService如何在mediaService中注册,并提供BpCameraService代理类接口。下面我们分析client是如何连接到server获取服务,打开Camera模块的。

在Camera的jni文件android_hardware_camera.cpp中,我们提供Camera的一些初始化、连接等操作。

frameworks/base/jni/android_hardware_Camera.cpp:

static void android_hardware_Camera_native_setup()

{

           sp<Camera> camera = Camera::connect(cameraId);

        .........

}

哦,原来jni层真正提供给java使用的类是我们的Camera类,下面我们开始分析Camera类。

class Camera : public BnCameraClient, public IBinder::DeathRecipient
{
public:
            // construct a camera client from an existing remote
    static  sp<Camera>  create(const sp<ICamera>& camera);
    static  int32_t     getNumberOfCameras();
    static  status_t    getCameraInfo(int cameraId,
                                      struct CameraInfo* cameraInfo);
    static  sp<Camera>  connect(int cameraId);
            virtual     ~Camera();
            void        init();

            status_t    reconnect();
            void        disconnect();
            status_t    lock();
            status_t    unlock();

            status_t    getStatus() { return mStatus; }

            // pass the buffered Surface to the camera service
            status_t    setPreviewDisplay(const sp<Surface>& surface);

            // pass the buffered ISurfaceTexture to the camera service
            status_t    setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);

            // start preview mode, must call setPreviewDisplay first
            status_t    startPreview();

            // stop preview mode
            void        stopPreview();

            // get preview state
            bool        previewEnabled();

            // start recording mode, must call setPreviewDisplay first
            status_t    startRecording();

            // stop recording mode
            void        stopRecording();

            // get recording state
            bool        recordingEnabled();

            // release a recording frame
            void        releaseRecordingFrame(const sp<IMemory>& mem);

            // autoFocus - status returned from callback
            status_t    autoFocus();

            // cancel auto focus
            status_t    cancelAutoFocus();

            // take a picture - picture returned from callback
            status_t    takePicture(int msgType);

            // set preview/capture parameters - key/value pairs
            status_t    setParameters(const String8& params);

            // get preview/capture parameters - key/value pairs
            String8     getParameters() const;

            // send command to camera driver
            status_t    sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);

            // tell camera hal to store meta data or real YUV in video buffers.
            status_t    storeMetaDataInBuffers(bool enabled);

            void        setListener(const sp<CameraListener>& listener);
            void        setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
            void        setPreviewCallbackFlags(int preview_callback_flag);

            sp<ICameraRecordingProxy> getRecordingProxy();

    // ICameraClient interface
    virtual void        notifyCallback(int32_t msgType, int32_t ext, int32_t ext2);
    virtual void        dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
                                     camera_frame_metadata_t *metadata);
    virtual void        dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);

    sp<ICamera>         remote();

    class RecordingProxy : public BnCameraRecordingProxy
    {
    public:
        RecordingProxy(const sp<Camera>& camera);

        // ICameraRecordingProxy interface
        virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
        virtual void stopRecording();
        virtual void releaseRecordingFrame(const sp<IMemory>& mem);

    private:
        sp<Camera>         mCamera;
    };

private:
                        Camera();
                        Camera(const Camera&);
                        Camera& operator=(const Camera);
                        virtual void binderDied(const wp<IBinder>& who);

            class DeathNotifier: public IBinder::DeathRecipient
            {
            public:
                DeathNotifier() {
                }

                virtual void binderDied(const wp<IBinder>& who);
            };

            static sp<DeathNotifier> mDeathNotifier;

            // helper function to obtain camera service handle
            static const sp<ICameraService>& getCameraService();

            sp<ICamera>         mCamera;
            status_t            mStatus;

            sp<CameraListener>  mListener;
            sp<ICameraRecordingProxyListener>  mRecordingProxyListener;

            friend class DeathNotifier;

            static  Mutex               mLock;
            static  sp<ICameraService>  mCameraService;
};

 

我们看到Camera类作为BnCameraClient的实现类,除了直接提供create(), getNumberOfCameras(), connect(),等基本函数之外,还通过Binder通信为服务代理类BpCameraClient提供了notifyCallback(), dataCallback(), dataCallbackTimestamp()三个函数。

class ICameraClient: public IInterface
{
public:
    DECLARE_META_INTERFACE(CameraClient);

    virtual void            notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
    virtual void            dataCallback(int32_t msgType, const sp<IMemory>& data,
                                         camera_frame_metadata_t *metadata) = 0;
    virtual void            dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& data) = 0;
};


 

BnCameraClient提供处理来着BpCameraClient的请求,调用实现类Camera实现。

class BnCameraClient: public BnInterface<ICameraClient>
{
public:
    virtual status_t    onTransact( uint32_t code,
                                    const Parcel& data,
                                    Parcel* reply,
                                    uint32_t flags = 0);
};

status_t BnCameraClient::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case NOTIFY_CALLBACK: {
            LOGV("NOTIFY_CALLBACK");
            CHECK_INTERFACE(ICameraClient, data, reply);
            int32_t msgType = data.readInt32();
            int32_t ext1 = data.readInt32();
            int32_t ext2 = data.readInt32();
            notifyCallback(msgType, ext1, ext2);
            return NO_ERROR;
        } break;
        case DATA_CALLBACK: {
            LOGV("DATA_CALLBACK");
            CHECK_INTERFACE(ICameraClient, data, reply);
            int32_t msgType = data.readInt32();
            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
            camera_frame_metadata_t *metadata = NULL;
            if (data.dataAvail() > 0) {
                metadata = new camera_frame_metadata_t;
                metadata->number_of_faces = data.readInt32();
                metadata->faces = (camera_face_t *) data.readInplace(
                        sizeof(camera_face_t) * metadata->number_of_faces);
            }
            dataCallback(msgType, imageData, metadata);
            if (metadata) delete metadata;
            return NO_ERROR;
        } break;
        case DATA_CALLBACK_TIMESTAMP: {
            LOGV("DATA_CALLBACK_TIMESTAMP");
            CHECK_INTERFACE(ICameraClient, data, reply);
            nsecs_t timestamp = data.readInt64();
            int32_t msgType = data.readInt32();
            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
            dataCallbackTimestamp(timestamp, msgType, imageData);
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}

 

status_t BnCameraClient::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case NOTIFY_CALLBACK: {
            LOGV("NOTIFY_CALLBACK");
            CHECK_INTERFACE(ICameraClient, data, reply);
            int32_t msgType = data.readInt32();
            int32_t ext1 = data.readInt32();
            int32_t ext2 = data.readInt32();
            notifyCallback(msgType, ext1, ext2);
            return NO_ERROR;
        } break;
        case DATA_CALLBACK: {
            LOGV("DATA_CALLBACK");
            CHECK_INTERFACE(ICameraClient, data, reply);
            int32_t msgType = data.readInt32();
            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
            camera_frame_metadata_t *metadata = NULL;
            if (data.dataAvail() > 0) {
                metadata = new camera_frame_metadata_t;
                metadata->number_of_faces = data.readInt32();
                metadata->faces = (camera_face_t *) data.readInplace(
                        sizeof(camera_face_t) * metadata->number_of_faces);
            }
            dataCallback(msgType, imageData, metadata);
            if (metadata) delete metadata;
            return NO_ERROR;
        } break;
        case DATA_CALLBACK_TIMESTAMP: {
            LOGV("DATA_CALLBACK_TIMESTAMP");
            CHECK_INTERFACE(ICameraClient, data, reply);
            nsecs_t timestamp = data.readInt64();
            int32_t msgType = data.readInt32();
            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
            dataCallbackTimestamp(timestamp, msgType, imageData);
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}

同理BpCameraClient作为服务代理端,提供接口给client使用:

class BpCameraClient: public BpInterface<ICameraClient>
{
public:
    BpCameraClient(const sp<IBinder>& impl)
        : BpInterface<ICameraClient>(impl)
    {
    }

    // generic callback from camera service to app
    void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
    {
        LOGV("notifyCallback");
        Parcel data, reply;
        data.writeInterfaceToken(ICameraClient::getInterfaceDescriptor());
        data.writeInt32(msgType);
        data.writeInt32(ext1);
        data.writeInt32(ext2);
        remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY);
    }

    // generic data callback from camera service to app with image data
    void dataCallback(int32_t msgType, const sp<IMemory>& imageData,
                      camera_frame_metadata_t *metadata)
    {
        LOGV("dataCallback");
        Parcel data, reply;
        data.writeInterfaceToken(ICameraClient::getInterfaceDescriptor());
        data.writeInt32(msgType);
        data.writeStrongBinder(imageData->asBinder());
        if (metadata) {
            data.writeInt32(metadata->number_of_faces);
            data.write(metadata->faces, sizeof(camera_face_t) * metadata->number_of_faces);
        }
        remote()->transact(DATA_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY);
    }

    // generic data callback from camera service to app with image data
    void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& imageData)
    {
        LOGV("dataCallback");
        Parcel data, reply;
        data.writeInterfaceToken(ICameraClient::getInterfaceDescriptor());
        data.writeInt64(timestamp);
        data.writeInt32(msgType);
        data.writeStrongBinder(imageData->asBinder());
        remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
    }
};

 

下面我们接着分析前面的jni层如何与Camera取得联系的:

static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
    jobject weak_this, jint cameraId)
{
    sp<Camera> camera = Camera::connect(cameraId);

    if (camera == NULL) {
        jniThrowRuntimeException(env, "Fail to connect to camera service");
        return;
    }

    // make sure camera hardware is alive
    if (camera->getStatus() != NO_ERROR) {
        jniThrowRuntimeException(env, "Camera initialization failed");
        return;
    }

    jclass clazz = env->GetObjectClass(thiz);
    if (clazz == NULL) {
        jniThrowRuntimeException(env, "Can't find android/hardware/Camera");
        return;
    }

    // We use a weak reference so the Camera object can be garbage collected.
    // The reference is only used as a proxy for callbacks.
    sp<JNICameraContext> context = new JNICameraContext(env, weak_this, clazz, camera);
    context->incStrong(thiz);
    camera->setListener(context);

    // save context in opaque field
    env->SetIntField(thiz, fields.context, (int)context.get());
}

 

这里sp<Camera> camera = Camera::connect(cameraId),这个函数通过传一个cameraId获得一个Camara客户端,然后通过这个camera,我们创建一个JNICamreraContext监听类,来监听处理底层Camera回调函数传过来的数据和消息。

sp<Camera> Camera::connect(int camerdId)
{
	sp<Camera> c = new Camera();
	contst sp<ICameraService>& cs = getCameraService();
	if ( cs != 0) {
		c->mCamera = cs->connect(c, cameraId);
	}
	
	if (c->mCamera != 0) {
		c->mCamera->asBinder()->linkToDeath(c);
		c->mStatus = NO_ERROR;
	} else {
		c.clear();
	}
	return c;
}

 

首先new 一个Camera示例c。然后调用getCameraService()向ServiceManager查询CameraService服务,new一个代理类BpCameraService保存到sp<ICameraService> mCameraService变量中。

调用BpCameraService->connect(c, cameraId);通过Binder通信,实际调用的是CameraService->connect(),返回一个ICamera类型指针。

 

sp<ICamera> CameraService::connect(const sp<ICameraClient>& cameraClient, int cameraId)

{
  int callingPid = getCallingPid();
    sp<CameraHardwareInterface> hardware = NULL;

    Mutex::Autolock lock(mServiceLock);
    if (mClient[cameraId] != 0) {
        client = mClient[cameraId].promote();
        if (client != 0) {
            if (cameraClient->asBinder() == client->getCameraClient()->asBinder()) {
                LOG1("CameraService::connect X (pid %d) (the same client)",
                    callingPid);
                return client;
            } else {
                LOGW("CameraService::connect X (pid %d) rejected (existing client).",
                    callingPid);
                return NULL;
            }
        }
        mClient[cameraId].clear();
    }

    if (mBusy[cameraId]) {
        LOGW("CameraService::connect X (pid %d) rejected"
             " (camera %d is still busy).", callingPid, cameraId);
        return NULL;
    }

    struct camera_info info;
    if (mModule->get_camera_info(cameraId, &info) != OK) {
        LOGE("Invalid camera id %d", cameraId);
        return NULL;
    }

    char camera_device_name[10];
    snprintf(camera_device_name, sizeof(camera_device_name), "%d", cameraId);

    hardware = new CameraHardwareInterface(camera_device_name);
    if (hardware->initialize(&mModule->common) != OK) {
        hardware.clear();
        return NULL;
    }

    client = new Client(this, cameraClient, hardware, cameraId, info.facing, callingPid);
    mClient[cameraId] = client;
    LOG1("CameraService::connect X");
    return client;

}

CameraService里面首先调用根据cameraId获得camera的信息,然后示例化Camera Hal接口hardware,创建一个sp<CameraHardwareInterface>示例hardware,调用hardware的initialize()初始化函数进入HAL层打开Camera驱动,然后创建一个内部类Client的示例返回。

class CameraHardwareInterface : public virtual RefBase {
public:
	CameraHardwareInterface(const char* name) : mDevice(0), mName(name) {}
	
	statuc_t intialize(hw_module_t *module)
	{
		int rc = module->methods->open(module, mName.string(), (hw_device_t**)&mDevice);
		initHalPreviewWindow();
		return rc;
	}
	statuc_t setPreviewWindow() {}
	...
}

我们注意到在hardware->initialize(&mModule->common)中mModule模块是一个camera_module_t的结构体,

hardware/libhardware/include/hardware/camera.h:

typedef struct camera_module {
    hw_module_t common;
    int (*get_number_of_cameras)(void);
	int (*get_camera_info)(int camera_id, struct camera_info* info);
} camera_module_t;

但是这个mModule是在什么地方初始化的呢?

我们看到有一个函数:

void CameraService::onFirstRef()
{
	BnCameraService::onFirstRef();
	hw_get_module(CAMERA_HARDWARE_MODULE_ID, (const hw_module_t**)&mModule);
}

但是onFirstRef()函数又是什么时候被调用的呢?我们找到CameraService的父类RefBase::onFirstRef()
void RefBase::incStrong(const void* id) const

{

      .........

      refs->mBase->onFirstRef();

}

哦,原来在强引用sp新增引用计数时调用,那我们又在什么地方进行sp强引用的呢?

回到之前我们获取BpCameraService的时候,const sp<ICameraService>& cs = getCameraService();

这个时候初始化CameraService实例,并且用sp进行强引用,调用了onFirstRef()函数,对mModule模块进行初始化。

hw_get_module()函数是用来获取hal层模块HAL的代理,后面通过这个mModule来对Camera模块进行控制,具体下一章我们再分析。

 

好了,继续回到我们调用hardware->initialize(&mModule->common)的地方,我们通过调用:

module->methods->open()函数打开Camera驱动。

 

接着我们看看那个CameraService的内部类Client:

 class Client : public BnCamera
    {
    public:
        // ICamera interface (see ICamera for details)
        virtual void            disconnect();
        virtual status_t        connect(const sp<ICameraClient>& client);
        virtual status_t        lock();
        virtual status_t        unlock();
        virtual status_t        setPreviewDisplay(const sp<Surface>& surface);
        virtual status_t        setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
        virtual void            setPreviewCallbackFlag(int flag);
        virtual status_t        startPreview();
        virtual void            stopPreview();
        virtual bool            previewEnabled();
        virtual status_t        storeMetaDataInBuffers(bool enabled);
        virtual status_t        startRecording();
        virtual void            stopRecording();
        virtual bool            recordingEnabled();
        virtual void            releaseRecordingFrame(const sp<IMemory>& mem);
        virtual status_t        autoFocus();
        virtual status_t        cancelAutoFocus();
        virtual status_t        takePicture(int msgType);
        virtual status_t        setParameters(const String8& params);
        virtual String8         getParameters() const;
        virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
    private:
        friend class CameraService;
                                Client(const sp<CameraService>& cameraService,
                                       const sp<ICameraClient>& cameraClient,
                                       const sp<CameraHardwareInterface>& hardware,
                                       int cameraId,
                                       int cameraFacing,
                                       int clientPid);
                                ~Client();

        // return our camera client
        const sp<ICameraClient>&    getCameraClient() { return mCameraClient; }

        // check whether the calling process matches mClientPid.
        status_t                checkPid() const;
        status_t                checkPidAndHardware() const;  // also check mHardware != 0

        // these are internal functions used to set up preview buffers
        status_t                registerPreviewBuffers();

        // camera operation mode
        enum camera_mode {
            CAMERA_PREVIEW_MODE   = 0,  // frame automatically released
            CAMERA_RECORDING_MODE = 1,  // frame has to be explicitly released by releaseRecordingFrame()
        };
        // these are internal functions used for preview/recording
        status_t                startCameraMode(camera_mode mode);
        status_t                startPreviewMode();
        status_t                startRecordingMode();

        // internal function used by sendCommand to enable/disable shutter sound.
        status_t                enableShutterSound(bool enable);

        // these are static callback functions
        static void             notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
        static void             dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
                                             camera_frame_metadata_t *metadata, void* user);
        static void             dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user);
        // convert client from cookie
        static sp<Client>       getClientFromCookie(void* user);
        // handlers for messages
        void                    handleShutter(void);
        void                    handlePreviewData(int32_t msgType, const sp<IMemory>& mem,
                                                  camera_frame_metadata_t *metadata);
        void                    handlePostview(const sp<IMemory>& mem);
        void                    handleRawPicture(const sp<IMemory>& mem);
        void                    handleCompressedPicture(const sp<IMemory>& mem);
        void                    handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2);
        void                    handleGenericData(int32_t msgType, const sp<IMemory>& dataPtr,
                                                  camera_frame_metadata_t *metadata);
        void                    handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);

        void                    copyFrameAndPostCopiedFrame(
                                    int32_t msgType,
                                    const sp<ICameraClient>& client,
                                    const sp<IMemoryHeap>& heap,
                                    size_t offset, size_t size,
                                    camera_frame_metadata_t *metadata);

        int                     getOrientation(int orientation, bool mirror);

        status_t                setPreviewWindow(
                                    const sp<IBinder>& binder,
                                    const sp<ANativeWindow>& window);

        // these are initialized in the constructor.
        sp<CameraService>               mCameraService;  // immutable after constructor
        sp<ICameraClient>               mCameraClient;
        int                             mCameraId;       // immutable after constructor
        int                             mCameraFacing;   // immutable after constructor
        pid_t                           mClientPid;
        sp<CameraHardwareInterface>     mHardware;       // cleared after disconnect()
        int                             mPreviewCallbackFlag;
        int                             mOrientation;     // Current display orientation
        bool                            mPlayShutterSound;

        // Ensures atomicity among the public methods
        mutable Mutex                   mLock;
        // This is a binder of Surface or SurfaceTexture.
        sp<IBinder>                     mSurface;
        sp<ANativeWindow>               mPreviewWindow;

        // If the user want us to return a copy of the preview frame (instead
        // of the original one), we allocate mPreviewBuffer and reuse it if possible.
        sp<MemoryHeapBase>              mPreviewBuffer;

        // We need to avoid the deadlock when the incoming command thread and
        // the CameraHardwareInterface callback thread both want to grab mLock.
        // An extra flag is used to tell the callback thread that it should stop
        // trying to deliver the callback messages if the client is not
        // interested in it anymore. For example, if the client is calling
        // stopPreview(), the preview frame messages do not need to be delivered
        // anymore.

        // This function takes the same parameter as the enableMsgType() and
        // disableMsgType() functions in CameraHardwareInterface.
        void                    enableMsgType(int32_t msgType);
        void                    disableMsgType(int32_t msgType);
        volatile int32_t        mMsgEnabled;

        // This function keeps trying to grab mLock, or give up if the message
        // is found to be disabled. It returns true if mLock is grabbed.
        bool                    lockIfMessageWanted(int32_t msgType);
    };

我们知道Client 是继承于BnCamera,这里又是一个Binder通信机制。

class BnCamera : public BnInterface<ICamera>

{

public:

        virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags = 0);

}

ICamera同时作为BpCamera和BnCamera的父类,提供了接口的定义,而BpCamera作为服务代理类,提供给客户端使用。而CameraService::Client内部类作为BnCamera的子类,负责真正实现ICamera定义的所有接口函数。基本流程都是一样的,具体这里就不分析了。

 

JNICameraContext

     这个类是一个监听类,继承与类CameraListener,主要用于处理底层Camera回调函数传过来的数据和消息。

class CameraListener: virtual public RefBase
{
public:
    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
    virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr,
                          camera_frame_metadata_t *metadata) = 0;
    virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
};

 

class JNICameraContext: public CameraListener
{
public:
    JNICameraContext(JNIEnv* env, jobject weak_this, jclass clazz, const sp<Camera>& camera);
    ~JNICameraContext() { release(); }
    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
    virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr,
                          camera_frame_metadata_t *metadata);
    virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
    void postMetadata(JNIEnv *env, int32_t msgType, camera_frame_metadata_t *metadata);
    void addCallbackBuffer(JNIEnv *env, jbyteArray cbb, int msgType);
    void setCallbackMode(JNIEnv *env, bool installed, bool manualMode);
    sp<Camera> getCamera() { Mutex::Autolock _l(mLock); return mCamera; }
    bool isRawImageCallbackBufferAvailable() const;
    void release();

private:
    void copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType);
    void clearCallbackBuffers_l(JNIEnv *env, Vector<jbyteArray> *buffers);
    void clearCallbackBuffers_l(JNIEnv *env);
    jbyteArray getCallbackBuffer(JNIEnv *env, Vector<jbyteArray> *buffers, size_t bufferSize);

    jobject     mCameraJObjectWeak;     // weak reference to java object
    jclass      mCameraJClass;          // strong reference to java class
    sp<Camera>  mCamera;                // strong reference to native object
    jclass      mFaceClass;  // strong reference to Face class
    jclass      mRectClass;  // strong reference to Rect class
    Mutex       mLock;

    /*
     * Global reference application-managed raw image buffer queue.
     *
     * Manual-only mode is supported for raw image callbacks, which is
     * set whenever method addCallbackBuffer() with msgType =
     * CAMERA_MSG_RAW_IMAGE is called; otherwise, null is returned
     * with raw image callbacks.
     */
    Vector<jbyteArray> mRawImageCallbackBuffers;

    /*
     * Application-managed preview buffer queue and the flags
     * associated with the usage of the preview buffer callback.
     */
    Vector<jbyteArray> mCallbackBuffers; // Global reference application managed byte[]
    bool mManualBufferMode;              // Whether to use application managed buffers.
    bool mManualCameraCallbackSet;       // Whether the callback has been set, used to
                                         // reduce unnecessary calls to set the callback.
};

 

 

下面总结下流程:

从最开始的jni地方:

1)static函数,sp<Camera> camera = Camera::connect(cameraId);

      在connect()函数里面我们首先new 一个sp<Camera> c示例,然后通过Binder获取一个sp<ICameraService>示例,实际类型为BpCameraService。同时调用了CameraService::onFirstRef()函数,调用hw_get_module获得hal层Camera模块的代理。

2)c->mCamera = cs->connect(c, cameraId)

    我们根据BpCameraService代理,调用CameraService的connect(c, cameraId)函数,返回一个ICamera指针,实际为BpCamera类型。保存在c->mCamera里面。这样之后我就可以通过操作c就可以调用到hal层的接口了。

 

sp<Camera> camera;

{

  内部变量 sp<ICameraService> mCameraService

  内部变量 sp<ICamera> mCamera 

}

抱歉!评论已关闭.