java类android.graphics.YuvImage的实例源码

CameraPreview.java 文件源码 项目:ProjectOxford-Apps-MimickerAlarm 阅读 34 收藏 0 点赞 0 评论 0
@Override
// Decode the image data and rotate it to the proper orientation.
// then run the callback, if any, on the image to do post processing
protected Boolean doInBackground(Object... params) {
    byte[] data = (byte[]) params[0];
    Camera camera = (Camera) params[1];
    Camera.Parameters parameters = camera.getParameters();
    int format = parameters.getPreviewFormat();
    //YUV formats require more conversion
    if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
        int w = parameters.getPreviewSize().width;
        int h = parameters.getPreviewSize().height;
        // Get the YuV image
        YuvImage yuv_image = new YuvImage(data, format, w, h, null);
        // Convert YuV to Jpeg
        Rect rect = new Rect(0, 0, w, h);
        ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
        yuv_image.compressToJpeg(rect, 100, output_stream);
        byte[] imageBytes = output_stream.toByteArray();
        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);

        Matrix transform = new Matrix();
        if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            transform.preScale(-1, 1);
        }
        transform.postRotate(mCameraRotation);
        bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), transform, true);

        if (mCapturedCapturedImageCallbackAsync != null) {
            mCapturedCapturedImageCallbackAsync.execute(bitmap);
        }
    }
    return null;
}
TcpClient.java 文件源码 项目:driverless-rccar 阅读 35 收藏 0 点赞 0 评论 0
private byte[] preprocess(byte[] preview, int width, int height) {
    byte[] jpeg = null;
    YuvImage image = new YuvImage(preview, ImageFormat.NV21, width, height, null);
    Rect r = new Rect(0, 0, width, height);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    boolean ok = image.compressToJpeg(r, 100, baos);
    if (ok) {
        jpeg = baos.toByteArray();
    }
    return jpeg;
}
YuvImageSubject.java 文件源码 项目:truth-android 阅读 18 收藏 0 点赞 0 评论 0
public static SubjectFactory<YuvImageSubject, YuvImage> type() {
  return new SubjectFactory<YuvImageSubject, YuvImage>() {
    @Override
    public YuvImageSubject getSubject(FailureStrategy fs, YuvImage that) {
      return new YuvImageSubject(fs, that);
    }
  };
}
Utils.java 文件源码 项目:ISeeU 阅读 23 收藏 0 点赞 0 评论 0
public static byte[] frameByteToJpegByte(byte[] data, Camera camera) {
    try {
        Camera.Parameters parameters = camera.getParameters();
        Camera.Size size = parameters.getPreviewSize();
        YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
                size.width, size.height, null);
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        image.compressToJpeg(
                new Rect(0, 0, image.getWidth(), image.getHeight()), COMPRESS_QUALITY,
                outputStream);
        return outputStream.toByteArray();
    } catch (Exception e) {
        return null;
    }
}
CTandroidAV.java 文件源码 项目:cloudturbine 阅读 23 收藏 0 点赞 0 评论 0
byte[] jpegFromPreview(byte[] currentPreview) {

        ByteArrayOutputStream baos = new ByteArrayOutputStream();

        Parameters parameters = mCamera.getParameters();
        Size size = parameters.getPreviewSize();
        YuvImage image = new YuvImage(currentPreview, parameters.getPreviewFormat(), size.width, size.height, null);

        image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), quality, baos);

        byte[] jpeg = baos.toByteArray();
        float rotation = (float) 0.;
        if (cameraId == 1 && mDisplay.getRotation() == Surface.ROTATION_0)
                rotation = (float) 270.;
        else if (cameraId == 0 && mDisplay.getRotation() == Surface.ROTATION_0)
                rotation = (float) 90.;

        if (debug) Log.i(TAG, "cameraId: " + cameraId + ", getRotation: " + mDisplay.getRotation() + ", rotation: " + rotation);

        if (rotation != 0.) {
            // This is the same image as the preview but in JPEG and not rotated
            Bitmap bitmap = BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length);
            ByteArrayOutputStream rotatedStream = new ByteArrayOutputStream();

            // Rotate the Bitmap
            Matrix matrix = new Matrix();
            matrix.postRotate(rotation);

            // We rotate the same Bitmap
            bitmap = Bitmap.createBitmap(bitmap, 0, 0, image.getWidth(), image.getHeight(), matrix, false);

            // We dump the rotated Bitmap to the stream
            bitmap.compress(CompressFormat.JPEG, 50, rotatedStream);
            jpeg = rotatedStream.toByteArray();
            // whew
        }
        return jpeg;
    }
MainActivity.java 文件源码 项目:cameraserve 阅读 20 收藏 0 点赞 0 评论 0
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
    previewStream.reset();
    Camera.Parameters p = camera.getParameters();

    int previewHeight = p.getPreviewSize().height,
        previewWidth = p.getPreviewSize().width;

    switch(rotationSteps) {
        case 1:
            bytes = Rotator.rotateYUV420Degree90(bytes, previewWidth, previewHeight);
            break;
        case 2:
            bytes = Rotator.rotateYUV420Degree180(bytes, previewWidth, previewHeight);
            break;
        case 3:
            bytes = Rotator.rotateYUV420Degree270(bytes, previewWidth, previewHeight);
            break;
    }

    if (rotationSteps == 1 || rotationSteps == 3) {
        int tmp = previewHeight;
        previewHeight = previewWidth;
        previewWidth = tmp;
    }

    int format = p.getPreviewFormat();
    new YuvImage(bytes, format, previewWidth, previewHeight, null)
            .compressToJpeg(new Rect(0, 0, previewWidth, previewHeight),
                    100, previewStream);

    setJpegFrame(previewStream);
}
OdroidCamera.java 文件源码 项目:SecureSmartHome 阅读 18 收藏 0 点赞 0 评论 0
private void sendImage() {
    if (getContainer() == null || lastSnapshot == null) {
        return;
    }
    int width = params.getPreviewSize().width;
    int height = params.getPreviewSize().height;
    Rect rect = new Rect(0, 0, width, height);
    YuvImage yuvimage = new YuvImage(lastSnapshot, ImageFormat.NV21, width, height, null);

    try (ByteArrayOutputStream outStream = new ByteArrayOutputStream()) {
        yuvimage.compressToJpeg(rect, 80, outStream);
        byte[] jpegData = outStream.toByteArray();

        CameraPayload payload = new CameraPayload(getCameraID(), getModuleName());
        payload.setPicture(jpegData);
        Message reply = new Message(payload);
        requireComponent(OutgoingRouter.KEY).sendReply(getReplyToMessage(), reply);
        imageSent = true;

        //File file = new File(Environment.getExternalStorageDirectory().getPath(),
        //        "snapshot" + System.currentTimeMillis() + ".jpg");
        //FileOutputStream outstr = new FileOutputStream(file);
        //yuvimage.compressToJpeg(rect, 80, outstr);
    } catch (IOException e) {
        Log.e(TAG, "Could not compress image", e);
    }

    finish();
}
CameraPreviewCallback.java 文件源码 项目:RadicalRobotics2017 阅读 18 收藏 0 点赞 0 评论 0
@Override
public void onPreviewFrame(final byte[] data, Camera camera) {
    if (timestamp == 0) {
        timestamp = System.nanoTime();
    }
    if (timestamp + delay >= System.nanoTime()) {
        return;
    }

    if (extensibleCameraManager != null && context.cameraManager().getCamera() != null) {
        Camera.Parameters parameters = context.cameraManager().getCamera().getParameters();
        Camera.Size previewSize = parameters.getPreviewSize();
        YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
                previewSize.width,
                parameters.getPreviewSize().height, null);

        synchronized (outputStream) {
            image.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 55, outputStream);

            if (jpeg == null) {
                jpeg = outputStream.toByteArray();
            } else {
                System.arraycopy(outputStream.toByteArray(), 0, jpeg, 0, jpeg.length);
            }
            outputStream.reset();
        }
        try {
            Bitmap bitmap = BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length);
            if (bitmap != null) {
                extensibleCameraManager.addImage(bitmap);
            }
            timestamp = System.nanoTime();
        } catch (Exception e) {
            Log.e(TAG, e.getLocalizedMessage(), e);
        }

    }
}
VideoStreamingThread.java 文件源码 项目:faceswap 阅读 21 收藏 0 点赞 0 评论 0
@Override
protected byte[] doInBackground(Object... objs) {
    byte[] frame = (byte[]) objs[0];
    Parameters parameters = (Parameters) objs[1];
    if (frame_firstUpdateTime == 0) {
        frame_firstUpdateTime = System.currentTimeMillis();
    }
    frame_currentUpdateTime = System.currentTimeMillis();

    int datasize = 0;
    cameraImageSize = parameters.getPreviewSize();
    YuvImage image = new YuvImage(frame, parameters.getPreviewFormat(), cameraImageSize.width,
            cameraImageSize.height, null);
    ByteArrayOutputStream tmpBuffer = new ByteArrayOutputStream();
    image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, tmpBuffer);
    Log.d(LOG_TAG, "compression took: "
            + (System.currentTimeMillis()-frame_currentUpdateTime));
    synchronized (frameLock) {
        frameBuffer = tmpBuffer.toByteArray();
        frameGeneratedTime = System.currentTimeMillis();
        frameID++;
        frameLock.notify();
    }
    datasize = tmpBuffer.size();
    frame_count++;
    frame_totalsize += datasize;
    if (frame_count % 50 == 0) {
        Log.d(LOG_TAG, "(IMG)\t" +
                "BW: " + 8.0 * frame_totalsize / (frame_currentUpdateTime - frame_firstUpdateTime) / 1000 +
                " Mbps\tCurrent FPS: " + 8.0 * datasize / (frame_currentUpdateTime - frame_prevUpdateTime) / 1000 + " Mbps\t" +
                "FPS: " + 1000.0 * frame_count / (frame_currentUpdateTime - frame_firstUpdateTime));
    }
    frame_prevUpdateTime = frame_currentUpdateTime;
    return tmpBuffer.toByteArray();
}
AndroidCameraOutputMJPEG.java 文件源码 项目:osh-android 阅读 17 收藏 0 点赞 0 评论 0
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
    long timeStamp = SystemClock.elapsedRealtimeNanos();

    // select current buffer
    YuvImage yuvImg = (data == imgBuf1) ? yuvImg1 : yuvImg2;

    // compress as JPEG
    jpegBuf.reset();
    yuvImg.compressToJpeg(imgArea, 90, jpegBuf);

    // release buffer for next frame
    camera.addCallbackBuffer(data);

    // generate new data record
    DataBlock newRecord;
    if (latestRecord == null)
        newRecord = dataStruct.createDataBlock();
    else
        newRecord = latestRecord.renew();

    // set time stamp
    double samplingTime = getJulianTimeStamp(timeStamp);
    newRecord.setDoubleValue(0, samplingTime);

    // set encoded data
    AbstractDataBlock frameData = ((DataBlockMixed)newRecord).getUnderlyingObject()[1];
    frameData.setUnderlyingObject(jpegBuf.toByteArray());

    // send event
    latestRecord = newRecord;
    latestRecordTime = System.currentTimeMillis();
    eventHandler.publishEvent(new SensorDataEvent(latestRecordTime, AndroidCameraOutputMJPEG.this, latestRecord));          
}


问题


面经


文章

微信
公众号

扫码关注公众号