1 | // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | package org.chromium.media; |
6 | |
7 | import android.content.Context; |
8 | import android.graphics.ImageFormat; |
9 | import android.graphics.SurfaceTexture; |
10 | import android.graphics.SurfaceTexture.OnFrameAvailableListener; |
11 | import android.hardware.Camera; |
12 | import android.hardware.Camera.PreviewCallback; |
13 | import android.opengl.GLES20; |
14 | import android.util.Log; |
15 | import android.view.Surface; |
16 | import android.view.WindowManager; |
17 | |
18 | import java.io.IOException; |
19 | import java.util.concurrent.locks.ReentrantLock; |
20 | import java.util.Iterator; |
21 | import java.util.List; |
22 | |
23 | import org.chromium.base.CalledByNative; |
24 | import org.chromium.base.JNINamespace; |
25 | |
26 | @JNINamespace("media") |
27 | public class VideoCapture implements PreviewCallback, OnFrameAvailableListener { |
28 | static class CaptureCapability { |
29 | public int mWidth = 0; |
30 | public int mHeight = 0; |
31 | public int mDesiredFps = 0; |
32 | } |
33 | |
34 | // Some devices with OS older than JELLY_BEAN don't support YV12 format correctly. |
35 | // Some devices don't support YV12 format correctly even with JELLY_BEAN or newer OS. |
36 | // To work around the issues on those devices, we'd have to request NV21. |
37 | // This is a temporary hack till device manufacturers fix the problem or |
38 | // we don't need to support those devices any more. |
39 | private static class DeviceImageFormatHack { |
40 | private static final String[] sBUGGY_DEVICE_LIST = { |
41 | "SAMSUNG-SGH-I747", |
42 | }; |
43 | |
44 | static int getImageFormat() { |
45 | if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) { |
46 | return ImageFormat.NV21; |
47 | } |
48 | |
49 | for (String buggyDevice : sBUGGY_DEVICE_LIST) { |
50 | if (buggyDevice.contentEquals(android.os.Build.MODEL)) { |
51 | return ImageFormat.NV21; |
52 | } |
53 | } |
54 | |
55 | return ImageFormat.YV12; |
56 | } |
57 | } |
58 | |
59 | private Camera mCamera; |
60 | public ReentrantLock mPreviewBufferLock = new ReentrantLock(); |
61 | private int mImageFormat = ImageFormat.YV12; |
62 | private byte[] mColorPlane = null; |
63 | private Context mContext = null; |
64 | // True when native code has started capture. |
65 | private boolean mIsRunning = false; |
66 | |
67 | private static final int NUM_CAPTURE_BUFFERS = 3; |
68 | private int mExpectedFrameSize = 0; |
69 | private int mId = 0; |
70 | // Native callback context variable. |
71 | private int mNativeVideoCaptureDeviceAndroid = 0; |
72 | private int[] mGlTextures = null; |
73 | private SurfaceTexture mSurfaceTexture = null; |
74 | private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65; |
75 | |
76 | private int mCameraOrientation = 0; |
77 | private int mCameraFacing = 0; |
78 | private int mDeviceOrientation = 0; |
79 | |
80 | CaptureCapability mCurrentCapability = null; |
81 | private static final String TAG = "VideoCapture"; |
82 | |
83 | @CalledByNative |
84 | public static VideoCapture createVideoCapture( |
85 | Context context, int id, int nativeVideoCaptureDeviceAndroid) { |
86 | return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid); |
87 | } |
88 | |
89 | public VideoCapture( |
90 | Context context, int id, int nativeVideoCaptureDeviceAndroid) { |
91 | mContext = context; |
92 | mId = id; |
93 | mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid; |
94 | } |
95 | |
96 | // Returns true on success, false otherwise. |
97 | @CalledByNative |
98 | public boolean allocate(int width, int height, int frameRate) { |
99 | Log.d(TAG, "allocate: requested width=" + width + |
100 | ", height=" + height + ", frameRate=" + frameRate); |
101 | try { |
102 | mCamera = Camera.open(mId); |
103 | } catch (RuntimeException ex) { |
104 | Log.e(TAG, "allocate:Camera.open: " + ex); |
105 | return false; |
106 | } |
107 | |
108 | try { |
109 | Camera.CameraInfo camera_info = new Camera.CameraInfo(); |
110 | Camera.getCameraInfo(mId, camera_info); |
111 | mCameraOrientation = camera_info.orientation; |
112 | mCameraFacing = camera_info.facing; |
113 | mDeviceOrientation = getDeviceOrientation(); |
114 | Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation + |
115 | ", camera orientation=" + mCameraOrientation + |
116 | ", facing=" + mCameraFacing); |
117 | |
118 | Camera.Parameters parameters = mCamera.getParameters(); |
119 | |
120 | // Calculate fps. |
121 | List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); |
122 | if (listFpsRange.size() == 0) { |
123 | Log.e(TAG, "allocate: no fps range found"); |
124 | return false; |
125 | } |
126 | int frameRateInMs = frameRate * 1000; |
127 | Iterator itFpsRange = listFpsRange.iterator(); |
128 | int[] fpsRange = (int[])itFpsRange.next(); |
129 | // Use the first range as default. |
130 | int fpsMin = fpsRange[0]; |
131 | int fpsMax = fpsRange[1]; |
132 | int newFrameRate = (fpsMin + 999) / 1000; |
133 | while (itFpsRange.hasNext()) { |
134 | fpsRange = (int[])itFpsRange.next(); |
135 | if (fpsRange[0] <= frameRateInMs && |
136 | frameRateInMs <= fpsRange[1]) { |
137 | fpsMin = fpsRange[0]; |
138 | fpsMax = fpsRange[1]; |
139 | newFrameRate = frameRate; |
140 | break; |
141 | } |
142 | } |
143 | frameRate = newFrameRate; |
144 | Log.d(TAG, "allocate: fps set to " + frameRate); |
145 | |
146 | mCurrentCapability = new CaptureCapability(); |
147 | mCurrentCapability.mDesiredFps = frameRate; |
148 | |
149 | // Calculate size. |
150 | List<Camera.Size> listCameraSize = |
151 | parameters.getSupportedPreviewSizes(); |
152 | int minDiff = Integer.MAX_VALUE; |
153 | int matchedWidth = width; |
154 | int matchedHeight = height; |
155 | Iterator itCameraSize = listCameraSize.iterator(); |
156 | while (itCameraSize.hasNext()) { |
157 | Camera.Size size = (Camera.Size)itCameraSize.next(); |
158 | int diff = Math.abs(size.width - width) + |
159 | Math.abs(size.height - height); |
160 | Log.d(TAG, "allocate: support resolution (" + |
161 | size.width + ", " + size.height + "), diff=" + diff); |
162 | // TODO(wjia): Remove this hack (forcing width to be multiple |
163 | // of 32) by supporting stride in video frame buffer. |
164 | // Right now, VideoCaptureController requires compact YV12 |
165 | // (i.e., with no padding). |
166 | if (diff < minDiff && (size.width % 32 == 0)) { |
167 | minDiff = diff; |
168 | matchedWidth = size.width; |
169 | matchedHeight = size.height; |
170 | } |
171 | } |
172 | if (minDiff == Integer.MAX_VALUE) { |
173 | Log.e(TAG, "allocate: can not find a resolution whose width " + |
174 | "is multiple of 32"); |
175 | return false; |
176 | } |
177 | mCurrentCapability.mWidth = matchedWidth; |
178 | mCurrentCapability.mHeight = matchedHeight; |
179 | Log.d(TAG, "allocate: matched width=" + matchedWidth + |
180 | ", height=" + matchedHeight); |
181 | |
182 | calculateImageFormat(matchedWidth, matchedHeight); |
183 | |
184 | parameters.setPreviewSize(matchedWidth, matchedHeight); |
185 | parameters.setPreviewFormat(mImageFormat); |
186 | parameters.setPreviewFpsRange(fpsMin, fpsMax); |
187 | mCamera.setParameters(parameters); |
188 | |
189 | // Set SurfaceTexture. |
190 | mGlTextures = new int[1]; |
191 | // Generate one texture pointer and bind it as an external texture. |
192 | GLES20.glGenTextures(1, mGlTextures, 0); |
193 | GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]); |
194 | // No mip-mapping with camera source. |
195 | GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, |
196 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); |
197 | GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, |
198 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); |
199 | // Clamp to edge is only option. |
200 | GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, |
201 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); |
202 | GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, |
203 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); |
204 | |
205 | mSurfaceTexture = new SurfaceTexture(mGlTextures[0]); |
206 | mSurfaceTexture.setOnFrameAvailableListener(null); |
207 | |
208 | mCamera.setPreviewTexture(mSurfaceTexture); |
209 | |
210 | int bufSize = matchedWidth * matchedHeight * |
211 | ImageFormat.getBitsPerPixel(mImageFormat) / 8; |
212 | for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) { |
213 | byte[] buffer = new byte[bufSize]; |
214 | mCamera.addCallbackBuffer(buffer); |
215 | } |
216 | mExpectedFrameSize = bufSize; |
217 | } catch (IOException ex) { |
218 | Log.e(TAG, "allocate: " + ex); |
219 | return false; |
220 | } |
221 | |
222 | return true; |
223 | } |
224 | |
225 | @CalledByNative |
226 | public int queryWidth() { |
227 | return mCurrentCapability.mWidth; |
228 | } |
229 | |
230 | @CalledByNative |
231 | public int queryHeight() { |
232 | return mCurrentCapability.mHeight; |
233 | } |
234 | |
235 | @CalledByNative |
236 | public int queryFrameRate() { |
237 | return mCurrentCapability.mDesiredFps; |
238 | } |
239 | |
240 | @CalledByNative |
241 | public int startCapture() { |
242 | if (mCamera == null) { |
243 | Log.e(TAG, "startCapture: camera is null"); |
244 | return -1; |
245 | } |
246 | |
247 | mPreviewBufferLock.lock(); |
248 | try { |
249 | if (mIsRunning) { |
250 | return 0; |
251 | } |
252 | mIsRunning = true; |
253 | } finally { |
254 | mPreviewBufferLock.unlock(); |
255 | } |
256 | mCamera.setPreviewCallbackWithBuffer(this); |
257 | mCamera.startPreview(); |
258 | return 0; |
259 | } |
260 | |
261 | @CalledByNative |
262 | public int stopCapture() { |
263 | if (mCamera == null) { |
264 | Log.e(TAG, "stopCapture: camera is null"); |
265 | return 0; |
266 | } |
267 | |
268 | mPreviewBufferLock.lock(); |
269 | try { |
270 | if (!mIsRunning) { |
271 | return 0; |
272 | } |
273 | mIsRunning = false; |
274 | } finally { |
275 | mPreviewBufferLock.unlock(); |
276 | } |
277 | |
278 | mCamera.stopPreview(); |
279 | mCamera.setPreviewCallbackWithBuffer(null); |
280 | return 0; |
281 | } |
282 | |
283 | @CalledByNative |
284 | public void deallocate() { |
285 | if (mCamera == null) |
286 | return; |
287 | |
288 | stopCapture(); |
289 | try { |
290 | mCamera.setPreviewTexture(null); |
291 | if (mGlTextures != null) |
292 | GLES20.glDeleteTextures(1, mGlTextures, 0); |
293 | mCurrentCapability = null; |
294 | mCamera.release(); |
295 | mCamera = null; |
296 | } catch (IOException ex) { |
297 | Log.e(TAG, "deallocate: failed to deallocate camera, " + ex); |
298 | return; |
299 | } |
300 | } |
301 | |
302 | @Override |
303 | public void onPreviewFrame(byte[] data, Camera camera) { |
304 | mPreviewBufferLock.lock(); |
305 | try { |
306 | if (!mIsRunning) { |
307 | return; |
308 | } |
309 | if (data.length == mExpectedFrameSize) { |
310 | int rotation = getDeviceOrientation(); |
311 | if (rotation != mDeviceOrientation) { |
312 | mDeviceOrientation = rotation; |
313 | Log.d(TAG, |
314 | "onPreviewFrame: device orientation=" + |
315 | mDeviceOrientation + ", camera orientation=" + |
316 | mCameraOrientation); |
317 | } |
318 | boolean flipVertical = false; |
319 | boolean flipHorizontal = false; |
320 | if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) { |
321 | rotation = (mCameraOrientation + rotation) % 360; |
322 | rotation = (360 - rotation) % 360; |
323 | flipHorizontal = (rotation == 270 || rotation == 90); |
324 | flipVertical = flipHorizontal; |
325 | } else { |
326 | rotation = (mCameraOrientation - rotation + 360) % 360; |
327 | } |
328 | if (mImageFormat == ImageFormat.NV21) { |
329 | convertNV21ToYV12(data); |
330 | } |
331 | nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid, |
332 | data, mExpectedFrameSize, |
333 | rotation, flipVertical, flipHorizontal); |
334 | } |
335 | } finally { |
336 | mPreviewBufferLock.unlock(); |
337 | if (camera != null) { |
338 | camera.addCallbackBuffer(data); |
339 | } |
340 | } |
341 | } |
342 | |
343 | // TODO(wjia): investigate whether reading from texture could give better |
344 | // performance and frame rate. |
345 | @Override |
346 | public void onFrameAvailable(SurfaceTexture surfaceTexture) { } |
347 | |
348 | private static class ChromiumCameraInfo { |
349 | private final int mId; |
350 | private final Camera.CameraInfo mCameraInfo; |
351 | |
352 | private ChromiumCameraInfo(int index) { |
353 | mId = index; |
354 | mCameraInfo = new Camera.CameraInfo(); |
355 | Camera.getCameraInfo(index, mCameraInfo); |
356 | } |
357 | |
358 | @CalledByNative("ChromiumCameraInfo") |
359 | private static int getNumberOfCameras() { |
360 | return Camera.getNumberOfCameras(); |
361 | } |
362 | |
363 | @CalledByNative("ChromiumCameraInfo") |
364 | private static ChromiumCameraInfo getAt(int index) { |
365 | return new ChromiumCameraInfo(index); |
366 | } |
367 | |
368 | @CalledByNative("ChromiumCameraInfo") |
369 | private int getId() { |
370 | return mId; |
371 | } |
372 | |
373 | @CalledByNative("ChromiumCameraInfo") |
374 | private String getDeviceName() { |
375 | return "camera " + mId + ", facing " + |
376 | (mCameraInfo.facing == |
377 | Camera.CameraInfo.CAMERA_FACING_FRONT ? "front" : "back"); |
378 | } |
379 | |
380 | @CalledByNative("ChromiumCameraInfo") |
381 | private int getOrientation() { |
382 | return mCameraInfo.orientation; |
383 | } |
384 | } |
385 | |
386 | private native void nativeOnFrameAvailable( |
387 | int nativeVideoCaptureDeviceAndroid, |
388 | byte[] data, |
389 | int length, |
390 | int rotation, |
391 | boolean flipVertical, |
392 | boolean flipHorizontal); |
393 | |
394 | private int getDeviceOrientation() { |
395 | int orientation = 0; |
396 | if (mContext != null) { |
397 | WindowManager wm = (WindowManager)mContext.getSystemService( |
398 | Context.WINDOW_SERVICE); |
399 | switch(wm.getDefaultDisplay().getRotation()) { |
400 | case Surface.ROTATION_90: |
401 | orientation = 90; |
402 | break; |
403 | case Surface.ROTATION_180: |
404 | orientation = 180; |
405 | break; |
406 | case Surface.ROTATION_270: |
407 | orientation = 270; |
408 | break; |
409 | case Surface.ROTATION_0: |
410 | default: |
411 | orientation = 0; |
412 | break; |
413 | } |
414 | } |
415 | return orientation; |
416 | } |
417 | |
418 | private void calculateImageFormat(int width, int height) { |
419 | mImageFormat = DeviceImageFormatHack.getImageFormat(); |
420 | if (mImageFormat == ImageFormat.NV21) { |
421 | mColorPlane = new byte[width * height / 4]; |
422 | } |
423 | } |
424 | |
425 | private void convertNV21ToYV12(byte[] data) { |
426 | final int ySize = mCurrentCapability.mWidth * mCurrentCapability.mHeight; |
427 | final int uvSize = ySize / 4; |
428 | for (int i = 0; i < uvSize; i++) { |
429 | final int index = ySize + i * 2; |
430 | data[ySize + i] = data[index]; |
431 | mColorPlane[i] = data[index + 1]; |
432 | } |
433 | System.arraycopy(mColorPlane, 0, data, ySize + uvSize, uvSize); |
434 | } |
435 | } |