I am using the camera2 API. I need to take a photo in the service without a preview. It works, but the photos have a bad exposure. The pictures are very dark or sometimes very light. How can I fix my code so that the photos are of high quality? I'm using the front camera.
public class Camera2Service extends Service
{
protected static final String TAG = "myLog";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.d(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onReady(CameraCaptureSession session) {
Camera2Service.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
@Override
public void onConfigured(CameraCaptureSession session) {
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
processImage(img);
img.close();
}
}
};
public void readyCamera() {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(1920, 1088, ImageFormat.JPEG, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
public String getCamera(CameraManager manager){
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation != CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e){
e.printStackTrace();
}
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "onStartCommand flags " + flags + " startId " + startId);
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onCreate() {
Log.d(TAG,"onCreate service");
super.onCreate();
}
public void actOnReadyCameraDevice()
{
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
@Override
public void onDestroy() {
try {
session.abortCaptures();
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
session.close();
}
private void processImage(Image image){
//Process image data
ByteBuffer buffer;
byte[] bytes;
boolean success = false;
File file = new File(Environment.getExternalStorageDirectory() + "/Pictures/image.jpg");
FileOutputStream output = null;
if(image.getFormat() == ImageFormat.JPEG) {
buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.remaining()]; // makes byte array large enough to hold image
buffer.get(bytes); // copies image from buffer to byte array
try {
output = new FileOutputStream(file);
output.write(bytes); // write the byte array to file
j++;
success = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close(); // close this to free up buffer for other images
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
}
Sergey, I copied your code and indeed I was able to reproduce the issue. I got totally black pictures out of Google Pixel 2 (Android 8.1).
However, I have successfully resolved the black-pic issue as follows:
First, in case anyone is wondering, you actually do NOT need any Activity, or any preview UI element as many other threads about the Camera API claim! That used to be true for the deprecated Camera v1 API. Now, with the new Camera v2 API, all I needed was a foreground service.
To start the capturing process, I used this code:
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest (CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
builder.set (CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
builder.set (CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
builder.addTarget (imageReader.getSurface ());
captureRequest = builder.build ();
Then, in ImageReader.onImageAvailable, I skipped the first N pictures (meaning I did not save them). I let the session run, and capture more pics without saving them.
That gave the camera enough time to automatically gradually adjust the exposition parameters. Then, after N ignored photos, I saved a photo, which was normally exposed, not black at all.
The value of the N constant will depend on characteristics of your hardware. So you will need to determine the ideal value of N experimentally for your hardware. You can also use histogram-based heuristic automation. At the beginning of experiments, don't be afraid to start saving only after hundreds of milliseconds of calibration have passed.
Finally, in a lot of similar threads people suggest to just wait e.g. 500 ms after creating the session and only then taking a single picture. That does not help. One really has to let the camera run and let it take many pictures rapidly (at the fastest rate possible). For that, simply use the setRepeatingRequest method (as in your original code).
Hope this helps. :)
EDITED TO ADD: When skipping the initial N pictures, you need to call the acquireLatestImage method of ImageReader for each of those skipped pictures too. Otherwise, it won't work.
Full original code with my changes incorporated that resolved the issue, tested and confirmed as working on Google Pixel 2, Android 8.1:
public class Camera2Service extends Service
{
protected static final int CAMERA_CALIBRATION_DELAY = 500;
protected static final String TAG = "myLog";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected static long cameraCaptureStartTime;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.d(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onReady(CameraCaptureSession session) {
Camera2Service.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
cameraCaptureStartTime = System.currentTimeMillis ();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
@Override
public void onConfigured(CameraCaptureSession session) {
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
if (System.currentTimeMillis () > cameraCaptureStartTime + CAMERA_CALIBRATION_DELAY) {
processImage(img);
}
img.close();
}
}
};
public void readyCamera() {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(1920, 1088, ImageFormat.JPEG, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
public String getCamera(CameraManager manager){
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e){
e.printStackTrace();
}
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "onStartCommand flags " + flags + " startId " + startId);
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onCreate() {
Log.d(TAG,"onCreate service");
super.onCreate();
}
public void actOnReadyCameraDevice()
{
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
@Override
public void onDestroy() {
try {
session.abortCaptures();
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
session.close();
}
private void processImage(Image image){
//Process image data
ByteBuffer buffer;
byte[] bytes;
boolean success = false;
File file = new File(Environment.getExternalStorageDirectory() + "/Pictures/image.jpg");
FileOutputStream output = null;
if(image.getFormat() == ImageFormat.JPEG) {
buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.remaining()]; // makes byte array large enough to hold image
buffer.get(bytes); // copies image from buffer to byte array
try {
output = new FileOutputStream(file);
output.write(bytes); // write the byte array to file
j++;
success = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close(); // close this to free up buffer for other images
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
}