Anonymous
Разное качество изображения в двух похожих приложениях для Android [закрыто]
Сообщение
Anonymous » 26 июн 2024, 10:41
Разное качество изображения
У меня есть два похожих приложения для Android, которые визуализируют изображение с камеры, однако одно показывает хорошее качество изображения (левое изображение), а другое — несколько более низкое качество (справа). image).
Они оба используют ImageReader с форматом изображения YUV_420_888.
Мне удалось создать одно из приложений для создания изображений обоих качеств, создав небольшое изменение в коде. При удалении jpegReader.getSurface() из параметров функции cameraDevice.createCaptureSession я получаю изображение нужного качества (необработанное изображение??). Это означает, что jpegReader.getSurface() добавляет какую-то обработку.
Итак, мои вопросы:
Это правда ли, что jpegReader улучшает качество изображения?
И как я могу получить качество обработанного изображения без использования jpegReader?
Код: Выделить всё
package com.mpgs2.basiccam;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.graphics.Bitmap;
import android.media.ImageReader;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Environment;
import android.provider.Settings;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import android.view.WindowManager;
import android.widget.ImageView;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.mpgs2.basiccam.reco.ProcessThread;
import com.mpgs2.basiccam.utils.ImageUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Calendar;
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_CAMERA_PERMISSION = 200;
private TextureView textureView;
private CameraDevice cameraDevice;
private CameraCaptureSession captureSession;
private CaptureRequest.Builder previewRequestBuilder;
private ImageReader jpegReader;
private ImageReader yuvReader;
private Size previewSize;
private Size processSize;
private Size imageSize;
private ProcessThread processThread;
private int takePictureCount = 0;
private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { return false; }
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.d("Main", "onSurfaceTextureUpdated");
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
cameraDevice = camera;
startPreview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
camera.close();
cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
camera.close();
cameraDevice = null;
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
textureView = findViewById(R.id.textureView);
processSize = new Size(640, 480);
processThread = new ProcessThread(this, processSize);
processThread.start();
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED ||
ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ||
ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE,
}, REQUEST_CAMERA_PERMISSION);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
if(!Environment.isExternalStorageManager()){
Intent intent = new Intent(Settings.ACTION_MANAGE_ALL_FILES_ACCESS_PERMISSION);
startActivity(intent);
}
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
openCamera();
} else {
finish();
}
}
}
@Override
protected void onResume() {
Log.d("Main", "onResume");
super.onResume();
if (textureView.isAvailable()) {
openCamera();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
protected void onPause() {
Log.d("Main", "onPause");
closeCamera();
super.onPause();
}
private byte[][] yuvBytes = new byte[3][];
private int[] rgbBytes = null;
private long lastTick = 0;
protected void fillBytes(final Image.Plane[] planes, final byte[][] yuvBytes) {
// Because of the variable row stride it's not possible to know in
// advance the actual necessary dimensions of the yuv planes.
for (int i = 0; i < planes.length; ++i) {
final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) {
yuvBytes[i] = new byte[buffer.capacity()];
}
buffer.get(yuvBytes[i]);
}
}
private void addImageToReco(Image image) {
if (rgbBytes == null) {
rgbBytes = new int[processSize.getWidth() * processSize.getHeight()];
}
Calendar cal1 = Calendar.getInstance();
long nowTick = cal1.getTimeInMillis();
if (nowTick - lastTick < 1000) return;
lastTick = nowTick;
final Image.Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
final int yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
processSize.getWidth(),
processSize.getHeight(),
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
processThread.addImage(rgbBytes);
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = manager.getCameraIdList()[0];
previewSize = new Size(640, 480);
imageSize = new Size(4000, 3000);
yuvReader = ImageReader.newInstance(processSize.getWidth(), processSize.getHeight(), ImageFormat.YUV_420_888, 2);
yuvReader.setOnImageAvailableListener(reader -> {
Image image = reader.acquireLatestImage();
if (image != null) {
addImageToReco(image);
image.close();
}
}, null);
jpegReader = ImageReader.newInstance(imageSize.getWidth(), imageSize.getHeight(), ImageFormat.JPEG, 1);
jpegReader.setOnImageAvailableListener(reader -> {
Image image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
String fileDir = getPictureFilePath();
FileOutputStream output = null;
try {
output = new FileOutputStream(fileDir);
output.write(bytes);
output.close();
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
image.close();
//Log.d("Main", "fire image available");
takePictureCount++;
}, null);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture texture = textureView.getSurfaceTexture();
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
Surface previewSurface = new Surface(texture);
try {
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(previewSurface);
previewRequestBuilder.addTarget(yuvReader.getSurface());
previewRequestBuilder.addTarget(jpegReader.getSurface());
cameraDevice.createCaptureSession(Arrays.asList(previewSurface, yuvReader.getSurface(), jpegReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
captureSession = session;
try {
captureSession.setRepeatingRequest(previewRequestBuilder.build(), null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private String getPictureFilePath() {
final File dir = Environment.getExternalStorageDirectory();
Log.d("Main", "getVideoFilePath " + dir.getAbsolutePath());
if(takePictureCount % 2 == 0)
return (dir == null ? "" : (dir.getAbsolutePath() + "/Movies/HDR")) + System.currentTimeMillis() + ".jpg";
else
return (dir == null ? "" : (dir.getAbsolutePath() + "/Movies/NOHDR")) + System.currentTimeMillis() + ".jpg";
}
private void closeCaptureSession() {
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
}
private void closeCamera() {
closeCaptureSession();
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
if (jpegReader != null) {
jpegReader.close();
jpegReader = null;
}
}
}
Ваша помощь оценена по достоинству,
Подробнее здесь:
https://stackoverflow.com/questions/786 ... droid-apps
1719387686
Anonymous
Разное качество изображения У меня есть два похожих приложения для Android, которые визуализируют изображение с камеры, однако одно показывает хорошее качество изображения (левое изображение), а другое — несколько более низкое качество (справа). image). Они оба используют ImageReader с форматом изображения YUV_420_888. Мне удалось создать одно из приложений для создания изображений обоих качеств, создав небольшое изменение в коде. При удалении jpegReader.getSurface() из параметров функции cameraDevice.createCaptureSession я получаю изображение нужного качества (необработанное изображение??). Это означает, что jpegReader.getSurface() добавляет какую-то обработку. Итак, мои вопросы: [list] [*]Это правда ли, что jpegReader улучшает качество изображения? [*]И как я могу получить качество обработанного изображения без использования jpegReader? [code]package com.mpgs2.basiccam; import androidx.appcompat.app.AppCompatActivity; import android.os.Bundle; import android.Manifest; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.ImageFormat; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureRequest; import android.media.Image; import android.graphics.Bitmap; import android.media.ImageReader; import android.media.MediaRecorder; import android.os.Build; import android.os.Environment; import android.provider.Settings; import android.util.Log; import android.util.Size; import android.view.Surface; import android.view.TextureView; import android.view.WindowManager; import android.widget.ImageView; import androidx.annotation.NonNull; import androidx.core.app.ActivityCompat; import androidx.core.content.ContextCompat; import com.mpgs2.basiccam.reco.ProcessThread; import com.mpgs2.basiccam.utils.ImageUtils; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Calendar; public class MainActivity extends AppCompatActivity { private static final int REQUEST_CAMERA_PERMISSION = 200; private TextureView textureView; private CameraDevice cameraDevice; private CameraCaptureSession captureSession; private CaptureRequest.Builder previewRequestBuilder; private ImageReader jpegReader; private ImageReader yuvReader; private Size previewSize; private Size processSize; private Size imageSize; private ProcessThread processThread; private int takePictureCount = 0; private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { openCamera(); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {} @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { return false; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surface) { //Log.d("Main", "onSurfaceTextureUpdated"); } }; private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { cameraDevice = camera; startPreview(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { camera.close(); cameraDevice = null; } @Override public void onError(@NonNull CameraDevice camera, int error) { camera.close(); cameraDevice = null; } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); textureView = findViewById(R.id.textureView); processSize = new Size(640, 480); processThread = new ProcessThread(this, processSize); processThread.start(); if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { ActivityCompat.requestPermissions(this, new String[]{ Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE, }, REQUEST_CAMERA_PERMISSION); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { if(!Environment.isExternalStorageManager()){ Intent intent = new Intent(Settings.ACTION_MANAGE_ALL_FILES_ACCESS_PERMISSION); startActivity(intent); } } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == REQUEST_CAMERA_PERMISSION) { if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { openCamera(); } else { finish(); } } } @Override protected void onResume() { Log.d("Main", "onResume"); super.onResume(); if (textureView.isAvailable()) { openCamera(); } else { textureView.setSurfaceTextureListener(surfaceTextureListener); } } @Override protected void onPause() { Log.d("Main", "onPause"); closeCamera(); super.onPause(); } private byte[][] yuvBytes = new byte[3][]; private int[] rgbBytes = null; private long lastTick = 0; protected void fillBytes(final Image.Plane[] planes, final byte[][] yuvBytes) { // Because of the variable row stride it's not possible to know in // advance the actual necessary dimensions of the yuv planes. for (int i = 0; i < planes.length; ++i) { final ByteBuffer buffer = planes[i].getBuffer(); if (yuvBytes[i] == null) { yuvBytes[i] = new byte[buffer.capacity()]; } buffer.get(yuvBytes[i]); } } private void addImageToReco(Image image) { if (rgbBytes == null) { rgbBytes = new int[processSize.getWidth() * processSize.getHeight()]; } Calendar cal1 = Calendar.getInstance(); long nowTick = cal1.getTimeInMillis(); if (nowTick - lastTick < 1000) return; lastTick = nowTick; final Image.Plane[] planes = image.getPlanes(); fillBytes(planes, yuvBytes); final int yRowStride = planes[0].getRowStride(); final int uvRowStride = planes[1].getRowStride(); final int uvPixelStride = planes[1].getPixelStride(); ImageUtils.convertYUV420ToARGB8888( yuvBytes[0], yuvBytes[1], yuvBytes[2], processSize.getWidth(), processSize.getHeight(), yRowStride, uvRowStride, uvPixelStride, rgbBytes); processThread.addImage(rgbBytes); } private void openCamera() { CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { String cameraId = manager.getCameraIdList()[0]; previewSize = new Size(640, 480); imageSize = new Size(4000, 3000); yuvReader = ImageReader.newInstance(processSize.getWidth(), processSize.getHeight(), ImageFormat.YUV_420_888, 2); yuvReader.setOnImageAvailableListener(reader -> { Image image = reader.acquireLatestImage(); if (image != null) { addImageToReco(image); image.close(); } }, null); jpegReader = ImageReader.newInstance(imageSize.getWidth(), imageSize.getHeight(), ImageFormat.JPEG, 1); jpegReader.setOnImageAvailableListener(reader -> { Image image = reader.acquireLatestImage(); ByteBuffer buffer = image.getPlanes()[0].getBuffer(); byte[] bytes = new byte[buffer.capacity()]; buffer.get(bytes); String fileDir = getPictureFilePath(); FileOutputStream output = null; try { output = new FileOutputStream(fileDir); output.write(bytes); output.close(); } catch (FileNotFoundException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } image.close(); //Log.d("Main", "fire image available"); takePictureCount++; }, null); if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { return; } manager.openCamera(cameraId, stateCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } } private void startPreview() { SurfaceTexture texture = textureView.getSurfaceTexture(); texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); Surface previewSurface = new Surface(texture); try { previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); previewRequestBuilder.addTarget(previewSurface); previewRequestBuilder.addTarget(yuvReader.getSurface()); previewRequestBuilder.addTarget(jpegReader.getSurface()); cameraDevice.createCaptureSession(Arrays.asList(previewSurface, yuvReader.getSurface(), jpegReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { captureSession = session; try { captureSession.setRepeatingRequest(previewRequestBuilder.build(), null, null); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) {} }, null); } catch (CameraAccessException e) { e.printStackTrace(); } } private String getPictureFilePath() { final File dir = Environment.getExternalStorageDirectory(); Log.d("Main", "getVideoFilePath " + dir.getAbsolutePath()); if(takePictureCount % 2 == 0) return (dir == null ? "" : (dir.getAbsolutePath() + "/Movies/HDR")) + System.currentTimeMillis() + ".jpg"; else return (dir == null ? "" : (dir.getAbsolutePath() + "/Movies/NOHDR")) + System.currentTimeMillis() + ".jpg"; } private void closeCaptureSession() { if (captureSession != null) { captureSession.close(); captureSession = null; } } private void closeCamera() { closeCaptureSession(); if (cameraDevice != null) { cameraDevice.close(); cameraDevice = null; } if (jpegReader != null) { jpegReader.close(); jpegReader = null; } } } [/code] [/list] Ваша помощь оценена по достоинству, Подробнее здесь: [url]https://stackoverflow.com/questions/78665839/different-image-quality-of-two-similar-android-apps[/url]