step 1: startCamera
進入點: WebcamManager 的 onCreate() : WebcamManager.cpppublic void onCreate() {
super.onCreate();
Log.i(TAG, "Service starting");
mWebcam = new NativeWebcam("/dev/video4");
}
====================================================
NativeWebcam 建構子 就會呼叫 connect() : NativeWebcam .cpp
public NativeWebcam(String deviceName, int width, int height) {
mWidth = width;
mHeight = height;
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
connect(deviceName, mWidth, mHeight);
}
NativeWebcam 的 connect() check "/dev/video4" ok後會呼叫 startCamera()
private void connect(String deviceName, int width, int height) {
boolean deviceReady = true;
File deviceFile = new File(deviceName);
if(deviceFile.exists()) {
if(!deviceFile.canRead()) {
Log.d(TAG, "Insufficient permissions on " + deviceName +
" -- does the app have the CAMERA permission?");
deviceReady = false;
}
} else {
Log.w(TAG, deviceName + " does not exist");
deviceReady = false;
}
if(deviceReady) {
Log.i(TAG, "Preparing camera with device name " + deviceName);
startCamera(deviceName, width, height);
// controlBrightnessContrass(10, 10 );
}
}
又 startCamera 的實作在 webcam.c 裡面...透過jni 的方式連結上來
private native int startCamera(String deviceName, int width, int height);
check webcam.c
Java_com_example_robortbead_Bead_startCamera():
step 1 : 呼叫 open_device()
int result = open_device(dev_name, &DEVICE_DESCRIPTOR);
step 2: 呼叫 init_device()
result = init_device(DEVICE_DESCRIPTOR, width, height);
step 3: 呼叫 start_capture()
result = start_capture(DEVICE_DESCRIPTOR);
Step 2: getFrame
呼叫 WebcamManager 的 getFrame()
首先會先呼叫 mWebcam.isAttached() cehck
然後 mWebcam.getFrame();
public Bitmap getFrame() {
if(!mWebcam.isAttached()) {
stopSelf();
}
Bitmap bmp = mWebcam.getFrame();
return bmp ;
}
---------------------------------------------------------------
mWebcam 的 isAttached function :
public boolean isAttached() {
return cameraAttached();
}
又 cameraAttached 是 jni 的 c code
private native boolean cameraAttached();
cameraAttached() 的實作相當簡單..只需要查看剛剛開的file 是不是NULL 即可
jboolean Java_com_example_robortbead_Bead_cameraAttached(JNIEnv* env,
jobject thiz) {
return DEVICE_DESCRIPTOR != -1;
}
-----------------------------------------------------------------
再回來看 mWebcam.getFrame();
其實就是call loadNextFrame(mBitmap);
public Bitmap getFrame() {
loadNextFrame(mBitmap);
return mBitmap;
}
又 loadNextFrame 是jni 的 c code....
private native void loadNextFrame(Bitmap bitmap);
step 1: 先得到 bmp file header
if((result = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
LOGE("AndroidBitmap_getInfo() failed, error=%d", result);
return;
}
如果不是 RGBA888 就return... 也就是傳進來的BMP 一定要是 RGBA8888
if(info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Bitmap format is not RGBA_8888 !");
return;
}
step 2: 配置 bmp color buffer 的記憶體, 得到 colors array
int* colors;
if((result = AndroidBitmap_lockPixels(env, bitmap, (void*)&colors)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed, error=%d", result);
}
if(!RGB_BUFFER || !Y_BUFFER) {
LOGE("Unable to load frame, buffers not initialized");
return;
}
step 3: 呼叫 process_camera()
process_camera(DEVICE_DESCRIPTOR, FRAME_BUFFERS, info.width, info.height,
RGB_BUFFER, Y_BUFFER);
// 把RGB_BUFFER 的data copy 到 colors array
int *lrgb = &RGB_BUFFER[0];
int i;
//for(int i = 0; i < info.width * info.height; i++) {
for(i = 0; i < info.width * info.height; i++) {
*colors++ = *lrgb++;
}
// release temp buffer
AndroidBitmap_unlockPixels(env, bitmap);
接下來看 process_camera() : capture.c
利用select function 去 check status
當 result 為 -1... 繼續讀
當 result 為 0... time out ...繼續讀
則代表正常...可以抓資料了
for(;;) {
fd_set fds;
FD_ZERO(&fds);
FD_SET(fd, &fds);
struct timeval tv;
tv.tv_sec = 2;
tv.tv_usec = 0;
int result = select(fd + 1, &fds, NULL, NULL, &tv);
if(-1 == result) {
if(EINTR == errno) {
continue;
}
errnoexit("select");
} else if(0 == result) {
LOGE("select timeout");
}
if(read_frame(fd, frame_buffers, width, height, rgb_buffer, ybuf) == 1) {
break; // 跳出迴圈
}
接下來看 read_frame()
一開始先用 xioctl(fd, VIDIOC_DQBUF, &buf) 把資料抓到 buf array
然後呼叫 yuyv422_to_argb() 把 buf 從 yuyv 轉成rb 存放在 rgb_buffer
最後再呼叫 xioctl(fd, VIDIOC_QBUF, &buf) 去release buf
int read_frame(int fd, buffer* frame_buffers, int width, int height,
int* rgb_buffer, int* y_buffer)
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if(-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
switch(errno) {
case EAGAIN:
return 0;
case EIO:
default:
return errnoexit("VIDIOC_DQBUF");
}
}
assert(buf.index < BUFFER_COUNT);
yuyv422_to_argb(frame_buffers[buf.index].start, width, height, rgb_buffer,
y_buffer);
if(-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {
return errnoexit("VIDIOC_QBUF");
}
沒有留言:
張貼留言