public static byte[] imageToByteArray(Image image) {
byte[] data = null;
if (image.getFormat() == ImageFormat.JPEG) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
data = new byte[buffer.capacity()];
buffer.get(data);
return data;
} else if (image.getFormat() == ImageFormat.YUV_420_888) {
data = NV21toJPEG(
YUV_420_888toNV21(image),
image.getWidth(), image.getHeight());
}
return data;
}
java类android.graphics.ImageFormat的实例源码
ImageUtil.java 文件源码
项目:FamilyBond
阅读 25
收藏 0
点赞 0
评论 0
HomeActivity.java 文件源码
项目:Eye
阅读 33
收藏 0
点赞 0
评论 0
/**
* Initialises the output surfaces for the camera's preview.
* There will be two output surfaces -
* 1) mSurfaceView : The surface to just show the preview frame.
* 2) mImageReader : The surface to get the actual pixel image
* data of the preview frame.
*/
private void setupOutputSurfaces() {
outputSurfaces = new ArrayList<>(2);
// For the live preview.
mSurfaceView.getHolder().setFixedSize(screenMaxX, screenMaxY);
outputSurfaces.add(mSurfaceView.getHolder().getSurface());
// For extracting the image.
mImageReader = ImageReader.newInstance(screenMaxX, screenMaxY,
ImageFormat.YUV_420_888, maxAcquired);
mImageReader.setOnImageAvailableListener(getImageAvailableListener(), null);
outputSurfaces.add(mImageReader.getSurface());
}
Camera2Api23.java 文件源码
项目:PXLSRT
阅读 28
收藏 0
点赞 0
评论 0
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
// Try to get hi-res output sizes
android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
if (outputSizes != null) {
for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
sizes.add(new Size(size.getWidth(), size.getHeight()));
}
}
if (sizes.isEmpty()) {
super.collectPictureSizes(sizes, map);
}
}
JavaCameraView.java 文件源码
项目:android-imaging-utils
阅读 33
收藏 0
点赞 0
评论 0
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
RsUtil.java 文件源码
项目:hella-renderscript
阅读 24
收藏 0
点赞 0
评论 0
@RequiresApi(18)
public static Type createYuvType(RenderScript rs, int x, int y, int yuvFormat) {
boolean supported = yuvFormat == ImageFormat.NV21 || yuvFormat == ImageFormat.YV12;
if (Build.VERSION.SDK_INT >= 19) {
supported |= yuvFormat == ImageFormat.YUV_420_888;
}
if (!supported) {
throw new IllegalArgumentException("invalid yuv format: " + yuvFormat);
}
return new Type.Builder(rs, createYuvElement(rs)).setX(x).setY(y).setYuvFormat(yuvFormat)
.create();
}
CameraPreview.java 文件源码
项目:SmartMath
阅读 22
收藏 0
点赞 0
评论 0
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
Parameters parameters = mCamera.getParameters();
mSupportedPreviewSizes = parameters.getSupportedPreviewSizes();
List<Integer> formatsList = parameters.getSupportedPreviewFormats(); //get supported preview formats
if(formatsList.contains(ImageFormat.NV21)) { // formatsList is always not null.
parameters.setPreviewFormat(ImageFormat.NV21); //set preview format is NV21,default is NV21 (yuv420sp)
}
// Set Focus mode depending on what is supported. MODE_AUTO is
// preferred mode.
// need not to test supported mode coz it has been tested in main activity.
if (msnFocusMode == 2) {
parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_INFINITY );
} else if (msnFocusMode == 1) {
parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_FIXED );
} else {
// set to auto focus by default
parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_AUTO);
}
/*if ((parameters.getMaxExposureCompensation() != 0 || parameters.getMinExposureCompensation() != 0)
&& ActivitySettings.msnPhotoTakenFrom == 1) { // screen mode.
parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
} */
parameters.setExposureCompensation(0); // exposure is not adjusted. Seems that screen mode does not bring much benefit.
List<String> scenesList = parameters.getSupportedSceneModes();
if (scenesList != null && scenesList.contains(Camera.Parameters.SCENE_MODE_STEADYPHOTO)) {
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_STEADYPHOTO); // this may crash in some devices.
}
boolean bSuccessful = setCameraParams(mCamera, parameters);
requestLayout();
}
}
CameraHandler.java 文件源码
项目:androidthings-imageclassifier
阅读 25
收藏 0
点赞 0
评论 0
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
CameraHandler.java 文件源码
项目:androidthings-imageclassifier
阅读 27
收藏 0
点赞 0
评论 0
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
CameraHandler.java 文件源码
项目:androidthings-imageclassifier
阅读 28
收藏 0
点赞 0
评论 0
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
CameraEngine.java 文件源码
项目:Fatigue-Detection
阅读 25
收藏 0
点赞 0
评论 0
public void openCamera(boolean facingFront) {
synchronized (this) {
int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK;
currentCameraId=getCameraIdWithFacing(facing);
camera = Camera.open(currentCameraId);
camera.setPreviewCallbackWithBuffer(this);
initRotateDegree(currentCameraId);
if (camera != null) {
mParams = camera.getParameters();
List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes();
List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes();
List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes();
Logger.logCameraSizes(supportedPictureSizesList);
Logger.logCameraSizes(supportedVideoSizesList);
Logger.logCameraSizes(supportedPreviewSizesList);
previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio);
Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio);
frameHeight=previewSize.width;
frameWidth=previewSize.height;
Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width);
mParams.setPreviewSize(frameHeight,frameWidth);
mParams.setPictureSize(photoSize.width,photoSize.height);
Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width);
//mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
int size = frameWidth*frameHeight;
size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8;
if (mBuffer==null || mBuffer.length!=size)
mBuffer = new byte[size];
mFrameChain[0].init(size);
mFrameChain[1].init(size);
camera.addCallbackBuffer(mBuffer);
camera.setParameters(mParams);
cameraOpened=true;
}
}
}