Android利用SurfaceView顯示Camera圖像爬坑記(四)

2021-01-17 CSDN技術社區

最近幾天沒啥事研究了下,android自定義相機的實現,主要實現了通過surfaceview跟camera實現拍照,預覽,圖片的保存。

能夠進行前後攝像頭的切換,自動和手動聚焦,設置閃光燈模式 ,人臉識別等。可能有一些不是特別好的地方,歡迎指出

先上效果圖


1.圖像預覽的surfaceview

package com.example.camera.preview;import android.content.Context;import android.graphics.PixelFormat;import android.hardware.Camera;import android.os.Handler;import android.util.AttributeSet;import android.view.SurfaceHolder;import android.view.SurfaceView;import com.example.camera.util.CameraUtil;/** * Created by renlei * DATE: 15-11-5 * Time: 下午4:52 */public class MySurfacePreview extends SurfaceView implements SurfaceHolder.Callback { private SurfaceHolder surfaceHolder; private Handler mHandler; public MySurfacePreview(Context context, AttributeSet attrs) { super(context, attrs); surfaceHolder = getHolder(); surfaceHolder.setFormat(PixelFormat.TRANSPARENT);//translucent半透明 transparent透明 surfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { CameraUtil.getInstance().doOpenCamera(Camera.CameraInfo.CAMERA_FACING_BACK); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { CameraUtil.getInstance().doStartPreview(surfaceHolder); if (mHandler != null) { mHandler.postDelayed(new Runnable() { @Override public void run() { mHandler.sendEmptyMessage(CameraUtil.PREVIEW_HAS_STARTED); } }, 1000); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { CameraUtil.getInstance().doStopPreview(); } public void setmHandler(Handler handler) { this.mHandler = handler; }}

2CameraActivity

package com.example.camera;import android.app.Activity;import android.hardware.Camera;import android.os.Bundle;import android.os.Handler;import android.os.Message;import android.util.Log;import android.view.GestureDetector;import android.view.MotionEvent;import android.view.View;import android.widget.FrameLayout;import android.widget.ImageButton;import android.widget.ImageView;import android.widget.RelativeLayout;import com.example.camera.preview.MySurfacePreview;import com.example.camera.util.CameraUtil;import com.example.camera.util.FaceView;import com.example.camera.util.GoogleDetectListenerImpl;public class CameraActivity extends Activity { private MySurfacePreview mySurfacePreview; private ImageButton takeBtn; private FrameLayout focusLayout; private ImageView changeFlashModeIV; private ImageView swichCameraIV; private RelativeLayout settingRl; private FaceView faceView; int width; int height; /** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); initView(); bindListenre(); } private void initView() { mySurfacePreview = (MySurfacePreview) findViewById(R.id.my_surfaceview); mySurfacePreview.setmHandler(mainHandler); takeBtn = (ImageButton) findViewById(R.id.take_btn); focusLayout = (FrameLayout) findViewById(R.id.camera_focus_layout); int w = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); int h = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); Log.d("showFocusIcon initview", "w " + w + " h " + h); focusLayout.measure(w, h); width = focusLayout.getMeasuredWidth() / 2; height = focusLayout.getMeasuredHeight() / 2; Log.d("showFocusIcon initview", "focusLayout.getMeasuredWidth()/2" + focusLayout.getMeasuredWidth() / 2 + "focusLayout.getMeasuredHeight()/2" + focusLayout.getMeasuredHeight() / 2); changeFlashModeIV = (ImageView) findViewById(R.id.flash_iv); swichCameraIV = (ImageView) findViewById(R.id.swich_camera_iv); settingRl = (RelativeLayout)findViewById(R.id.setting_rl); faceView = (FaceView)findViewById(R.id.face_view); } private void bindListenre() { takeBtn.setOnClickListener(new TakeBtnClickListener()); mySurfacePreview.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (CameraUtil.getInstance().getmCameraInfo().facing == Camera.CameraInfo.CAMERA_FACING_BACK) { return gestureDetector.onTouchEvent(event); }else { return false; } } }); changeFlashModeIV.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { CameraUtil.getInstance().setFlashMode(changeFlashModeIV); } }); swichCameraIV.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { changeCamera(); } }); } private class TakeBtnClickListener implements View.OnClickListener { @Override public void onClick(View v) { CameraUtil.getInstance().doTakePic(); } } GestureDetector gestureDetector = new GestureDetector(new GestureDetector.OnGestureListener() { @Override public boolean onDown(MotionEvent e) { Log.d("MyGestureDetector", "onDown"); return true; } @Override public void onShowPress(MotionEvent e) { Log.d("MyGestureDetector", "onShowPress"); } @Override public boolean onSingleTapUp(final MotionEvent e) { Log.d("MyGestureDetector", "onSingleTapUp"); CameraUtil.getInstance().autoFocus(new Camera.AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { if (success) { Log.d("renlei", "聚焦成功"); } else { Log.d("renlei", "聚焦失敗"); } focusLayout.setVisibility(View.GONE); } }); CameraUtil.getInstance().setFocusArea(CameraActivity.this, e); showFocusIcon(e); return true; } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { Log.d("MyGestureDetector", "onScroll"); return false; } @Override public void onLongPress(MotionEvent e) { Log.d("MyGestureDetector", "onLongPress"); } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { Log.d("MyGestureDetector", "onFling"); return false; } }); private void showFocusIcon(MotionEvent e) { int x = (int) e.getX(); int y = (int) e.getY(); RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) focusLayout.getLayoutParams(); params.leftMargin = (int) (x - width + 0.5); params.topMargin = (int) (y - height + 0.5+settingRl.getHeight());// Log.d("showFocusIcon","focusLayout.getMeasuredWidth()/2"+focusLayout.getMeasuredWidth()/2+"focusLayout.getMeasuredHeight()/2"+focusLayout.getMeasuredHeight()/2);// Log.d("showFocusIcon","focusLayout.getWidth()/2"+focusLayout.getWidth()/2+"focusLayout.getHeight()/2"+focusLayout.getHeight()/2); Log.d("showFocusIcon", "x" + x + "y" + y + "params.width" + params.width + "params.height" + params.height);// focusLayout.setLayoutParams(params); focusLayout.requestLayout();// focusLayout.setLayoutParams(params); focusLayout.setVisibility(View.VISIBLE); RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) focusLayout.getLayoutParams(); Log.d("showFocusIcon", "x" + x + "y" + y + "params2.width" + params2.width + "params2.height" + params2.height); } public void changeCamera() { CameraUtil.getInstance().doStopPreview(); int newCameraId = (CameraUtil.getInstance().getCameraId() + 1) % 2; CameraUtil.getInstance().doOpenCamera(newCameraId); CameraUtil.getInstance().doStartPreview(mySurfacePreview.getHolder()); if (newCameraId == Camera.CameraInfo.CAMERA_FACING_BACK){ swichCameraIV.setImageResource(R.drawable.camera_setting_switch_back); changeFlashModeIV .setVisibility(View.VISIBLE); }else { swichCameraIV.setImageResource(R.drawable.camera_setting_switch_front); changeFlashModeIV.setVisibility(View.GONE); } } private MainHandler mainHandler = new MainHandler(); private void startGoogleDetect(){ Camera.Parameters parameters = CameraUtil.getInstance().getCameraParaters(); Camera camera = CameraUtil.getInstance().getCamera(); if (parameters.getMaxNumDetectedFaces()>0){ if(faceView != null){ faceView.clearFaces(); faceView.setVisibility(View.VISIBLE); } camera.setFaceDetectionListener(new GoogleDetectListenerImpl(CameraActivity.this,mainHandler)); camera.startFaceDetection(); } } private class MainHandler extends Handler{ @Override public void handleMessage(final Message msg) { int what = msg.what; switch (what){ case CameraUtil.PREVIEW_HAS_STARTED: startGoogleDetect(); Log.e("renlei110","開啟人臉識別"); break; case CameraUtil.RECEIVE_FACE_MSG: runOnUiThread(new Runnable() { @Override public void run() { Camera.Face[]faces = (Camera.Face[]) msg.obj; faceView.setFaces(faces); Log.e("renlei111","收到人臉識別的信息"); } }); break; } super.handleMessage(msg); } }}

3保存圖片的工具類ImageUtil

package com.example.camera.util;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.media.ExifInterface;import android.os.Environment;import android.util.Log;import java.io.File;import java.io.IOException;/** * Created by renlei * DATE: 15-11-5 * Time: 下午7:21 * Email: lei.ren@renren-inc.com */public class ImageUtil { public static void saveImage(File file,byte []data,String filePath){ BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; Bitmap tempBitmap = BitmapFactory.decodeFile(filePath,options); int degrees = getExifRotateDegree(filePath); } public static String getSaveImgePath(){ if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)){ String path = Environment.getExternalStorageDirectory().getPath()+"/renlei/"+System.currentTimeMillis()+".jpg"; File file = new File(path); if (!file.getParentFile().exists()){ file.getParentFile().mkdirs(); } return path; } return System.currentTimeMillis()+".jpg"; } public static int getExifRotateDegree(String path){ try { ExifInterface exifInterface = new ExifInterface(path); int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION,ExifInterface.ORIENTATION_NORMAL); int degrees = getExifRotateDegrees(orientation); Log.d("imageutil degrees",degrees+""); return degrees; } catch (IOException e) { e.printStackTrace(); } return 0; } public static int getExifRotateDegrees(int exifOrientation) { int degrees = 0; switch (exifOrientation) { case ExifInterface.ORIENTATION_NORMAL: degrees = 0; break; case ExifInterface.ORIENTATION_ROTATE_90: degrees = 90; break; case ExifInterface.ORIENTATION_ROTATE_180: degrees = 180; break; case ExifInterface.ORIENTATION_ROTATE_270: degrees = 270; break; } return degrees; }}<span style="color:#CC0000;"></span>

4 CameraUtil 這是最主要的一個類,其中包括了打開相機,開始預覽,結束預覽,設置閃光燈模式,聚焦等一系列的操作,代碼中都有注釋

package com.example.camera.util;import android.content.Context;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.graphics.PixelFormat;import android.graphics.Rect;import android.hardware.Camera;import android.os.Build;import android.util.Log;import android.view.MotionEvent;import android.view.SurfaceHolder;import android.widget.ImageView;import com.example.camera.R;import java.io.File;import java.io.FileOutputStream;import java.io.IOException;import java.util.ArrayList;import java.util.List;/** * Created by renlei * DATE: 15-11-5 * Time: 下午4:57 * Email: lei.ren@renren-inc.com */public class CameraUtil { private Camera mCamera; private static CameraUtil mCameraUtil; private boolean isPreview; private int cameraId = -1; //0表示後置,1表示前置 private Camera.CameraInfo mCameraInfo = new Camera.CameraInfo(); public static final int PREVIEW_HAS_STARTED = 110; public static final int RECEIVE_FACE_MSG = 111; public static synchronized CameraUtil getInstance() { if (mCameraUtil == null) { mCameraUtil = new CameraUtil(); } return mCameraUtil; } /** * 打開相機 * @param cameraId */ public void doOpenCamera(int cameraId) { Log.d("renlei", "open camera"+cameraId); try { this.cameraId = cameraId; mCamera = Camera.open(cameraId); Camera.getCameraInfo(cameraId, mCameraInfo);///這裡的mCamerainfo必須是new出來的,不能是個null } catch (Exception e) { e.printStackTrace(); } } /** * 開啟預覽 * @param holder */ public void doStartPreview(SurfaceHolder holder) { Log.d("CAmerautil","doStartPreview"); if (isPreview) { mCamera.stopPreview(); } if (mCamera != null) { Camera.Parameters parameters = mCamera.getParameters(); parameters.setPictureFormat(PixelFormat.JPEG);//設置照片拍攝後的保存格式 mCamera.setDisplayOrientation(90);//否則方向會有問題 if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {//前置與後置的不一樣,這裡暫時只設置前置的,後置的可以相應的去設置 parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); } printSupportPreviewSize(parameters); printSupportPictureSize(parameters); printSupportFocusMode(parameters);// parameters.setPictureSize(parameters.getPreviewSize().width,parameters.getPictureSize().height); //設置的這兩個size必須時支持的size大小,否則時不可以的,會出現setparameters錯誤 parameters.setPreviewSize(parameters.getSupportedPreviewSizes().get(0).width, parameters.getSupportedPreviewSizes().get(0).height); parameters.setPictureSize(parameters.getSupportedPictureSizes().get(0).width, parameters.getSupportedPictureSizes().get(0).height); mCamera.setParameters(parameters); Camera.Parameters mParams = mCamera.getParameters(); Log.i("renlei", "最終設置:PreviewSize--With = " + mParams.getPreviewSize().width + "Height = " + mParams.getPreviewSize().height); Log.i("renlei", "最終設置:PictureSize--With = " + mParams.getPictureSize().width + "Height = " + mParams.getPictureSize().height); try { mCamera.setPreviewDisplay(holder); mCamera.startPreview(); } catch (IOException e) { e.printStackTrace(); } isPreview = true; } } /** * 結束預覽 */ public void doStopPreview() { if (isPreview) { isPreview = false; mCamera.stopPreview(); mCamera.release(); mCamera = null; } } /** * 拍照 */ public void doTakePic() { if (isPreview && mCamera != null) { mCamera.takePicture(new ShutCallBackImpl(), null, new PicCallBacKImpl()); } } /** * 拍照時的動作 * 默認會有咔嚓一聲 */ private class ShutCallBackImpl implements Camera.ShutterCallback { @Override public void onShutter() { } } /** * 拍照後的最主要的返回 */ private class PicCallBacKImpl implements Camera.PictureCallback { @Override public void onPictureTaken(final byte[] data, Camera camera) { isPreview = false; new Thread(new Runnable() { @Override public void run() { String filePath = ImageUtil.getSaveImgePath(); File file = new File(filePath); FileOutputStream fos = null; try { fos = new FileOutputStream(file, true); fos.write(data); ImageUtil.saveImage(file, data, filePath); fos.close(); } catch (Exception e) { e.printStackTrace(); } } }).start(); mCamera.startPreview();//重新開啟預覽 ,不然不能繼續拍照 isPreview = true; } } /** * 列印支持的previewSizes * * @param params */ public void printSupportPreviewSize(Camera.Parameters params) { List<Camera.Size> previewSizes = params.getSupportedPreviewSizes(); for (int i = 0; i < previewSizes.size(); i++) { Camera.Size size = previewSizes.get(i); Log.i("camerautil", "previewSizes:width = " + size.width + " height = " + size.height); } } /** * 列印支持的pictureSizes * * @param params */ public void printSupportPictureSize(Camera.Parameters params) { List<Camera.Size> pictureSizes = params.getSupportedPictureSizes(); for (int i = 0; i < pictureSizes.size(); i++) { Camera.Size size = pictureSizes.get(i); Log.i("camerautil", "pictureSizes:width = " + size.width + " height = " + size.height); } } /** * 點擊聚焦 * * @param autoFocusCallback * @return */ public boolean autoFocus(Camera.AutoFocusCallback autoFocusCallback) { Log.d("Camerrautil", "autoFouce"); Camera.Parameters parameters = mCamera.getParameters(); List<String> supportMode = parameters.getSupportedFocusModes(); if (supportMode.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { String focusMode = parameters.getFocusMode(); if (!Camera.Parameters.FOCUS_MODE_AUTO.equals(focusMode)) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); mCamera.setParameters(parameters); } if (autoFocusCallback != null) { mCamera.autoFocus(autoFocusCallback); } return true; } return false; } /** * 設置聚焦的區域 * @param mContext * @param event */ public void setFocusArea(Context mContext, MotionEvent event) { if (!CameraUtil.isSupportFocusArea() || mCamera == null) { return; } Camera.Parameters parameters = mCamera.getParameters(); int ax = (int) (2000f * event.getRawX() / mContext.getResources().getDisplayMetrics().widthPixels - 1000); int ay = (int) (2000f * event.getRawY() / mContext.getResources().getDisplayMetrics().heightPixels - 1000);// Log.d("renlei",parameters.getMaxNumFocusAreas()+""); int rawx = (int) event.getRawX(); int rawy = (int) event.getRawY(); Log.d("renlei", "widthpix" + mContext.getResources().getDisplayMetrics().widthPixels + "heightpix" + mContext.getResources().getDisplayMetrics().heightPixels); Log.d("renlei", "rawx" + rawx + "rawy" + rawy); //防止超出1000 ,-1000的範圍 if (ay > 900) { ay = 900; } else if (ay < -900) { ay = -900; } if (ax < -900) { ax = -900; } else if (ax > 900) { ax = 900; } Log.d("renlei09", "ax" + ax + "ay" + ay); Camera.Area area = new Camera.Area(new Rect(ax - 100, ay - 100, ax + 100, ay + 100), 1000); List<Camera.Area> areas = new ArrayList<Camera.Area>(); areas.add(area); parameters.setFocusAreas(areas); parameters.setMeteringAreas(areas); mCamera.setParameters(parameters); } /** * 是否符合設置對焦區域的SDK版本 * * @return */ public static boolean isSupportFocusArea() { return Build.VERSION.SDK_INT >= 14; } /** * 設置閃光燈的模式 * @param imageView */ public void setFlashMode(ImageView imageView) { Camera.Parameters parameters = mCamera.getParameters(); String flashMode = parameters.getFlashMode(); Log.d("setFlashMode ", flashMode); if (flashMode != null) { if (flashMode.equals(Camera.Parameters.FLASH_MODE_OFF)) { imageView.setImageResource(R.drawable.camera_setting_flash_on_normal); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_ON); } else if (flashMode.equals(Camera.Parameters.FLASH_MODE_ON)) { imageView.setImageResource(R.drawable.camera_setting_flash_auto_normal); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO); } else if (flashMode.equals(Camera.Parameters.FLASH_MODE_AUTO)) { parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); imageView.setImageResource(R.drawable.camera_setting_flash_off_normal); } else { imageView.setImageResource(R.drawable.camera_setting_flash_off_normal); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); } mCamera.setParameters(parameters); } } public int getCameraId() { return cameraId; } /** * 列印支持的聚焦模式 * * @param params */ public void printSupportFocusMode(Camera.Parameters params) { List<String> focusModes = params.getSupportedFocusModes(); for (String mode : focusModes) { Log.i("CameraUtil", "focusModes--" + mode); } } public Camera.CameraInfo getmCameraInfo(){ return mCameraInfo; } public Camera getCamera(){ return mCamera; } public Camera.Parameters getCameraParaters(){ if (mCamera!=null){ return mCamera.getParameters(); } return null; }}


通過的google的人臉識別接口來實現的人臉識別

package com.example.camera.util;import android.content.Context;import android.hardware.Camera;import android.os.Handler;import android.os.Message;import android.util.Log;/** * Created by renlei * DATE: 15-11-10 * Time: 下午4:49 * Email: renlei0109@yeah.net */public class GoogleDetectListenerImpl implements Camera.FaceDetectionListener{ private Handler mHandler;///用於向主線程發送信息 private Context mContext; public GoogleDetectListenerImpl(Context mContext,Handler mHandler) { this.mHandler = mHandler; this.mContext = mContext; } @Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if (faces!=null){ Message msg = mHandler.obtainMessage(); msg.what = CameraUtil.RECEIVE_FACE_MSG; msg.obj = faces; msg.sendToTarget(); } }}

識別出來的人臉區域,通過FaceView來顯示

package com.example.camera.util;import android.content.Context;import android.graphics.*;import android.graphics.drawable.Drawable;import android.hardware.Camera;import android.util.AttributeSet;import android.util.Log;import android.view.View;import android.widget.ImageView;import com.example.camera.R;/** * Created by renlei * DATE: 15-11-11 * Time: 上午11:34 * Email: renlei0109@yeah.net */public class FaceView extends ImageView { private Context mContext; private Camera.Face[] mFaces; private Matrix mMatrix = new Matrix(); private boolean mirror; private Paint mLinePaint; private RectF rectF = new RectF(); private Drawable mFaceIndicator = null; public FaceView(Context context, AttributeSet attrs) { super(context, attrs); initPaint(); this.mContext = context; mFaceIndicator = mContext.getResources().getDrawable(R.drawable.ic_face_find_2); } public void setFaces(Camera.Face[] faces) { this.mFaces = faces; Log.d("Faceview", "invalidate");// ((View)getParent()).invalidate(); invalidate(); /*postInvalidate(); invalidate(); forceLayout(); requestLayout();*/ } public void clearFaces(){ mFaces = null; invalidate(); } @Override protected void onDraw(Canvas canvas) {// Log.d("Faceview", "onDraw"); if(mFaces == null || mFaces.length < 1){ return; } if (mFaces != null) { Log.d("renlei","onDraw"+mFaces.length); int id = CameraUtil.getInstance().getCameraId(); mirror = (id == Camera.CameraInfo.CAMERA_FACING_FRONT); canvas.save(); prepareMatrix(); mMatrix.postRotate(0); //Matrix.postRotate默認是順時針 canvas.rotate(-0); //Canvas.rotate()默認是逆時針 for (int i = 0; i < mFaces.length; i++) { rectF.set(mFaces[i].rect); mMatrix.mapRect(rectF); mFaceIndicator.setBounds(Math.round(rectF.left), Math.round(rectF.top), Math.round(rectF.right), Math.round(rectF.bottom)); mFaceIndicator.draw(canvas); } canvas.restore(); } super.onDraw(canvas); } /** * <p>Here is the matrix to convert driver coordinates to View coordinates * in pixels.</p> * <pre> * Matrix matrix = new Matrix(); * CameraInfo info = CameraHolder.instance().getCameraInfo()[cameraId]; * // Need mirror for front camera. * boolean mirror = (info.facing == CameraInfo.CAMERA_FACING_FRONT); * matrix.setScale(mirror ? -1 : 1, 1); * // This is the value for android.hardware.Camera.setDisplayOrientation. * matrix.postRotate(displayOrientation); * // Camera driver coordinates range from (-1000, -1000) to (1000, 1000). * // UI coordinates range from (0, 0) to (width, height). * matrix.postScale(view.getWidth() / 2000f, view.getHeight() / 2000f); * matrix.postTranslate(view.getWidth() / 2f, view.getHeight() / 2f); * </pre> */ private void prepareMatrix() { mMatrix.setScale(mirror ? -1 : 1, 1); mMatrix.postRotate(9); mMatrix.postScale(getWidth() / 2000f, getHeight() / 2000f); mMatrix.postTranslate(getWidth() / 2f, getHeight() / 2f); } private void initPaint() { mLinePaint = new Paint(Paint.ANTI_ALIAS_FLAG);// int color = Color.rgb(0, 150, 255); int color = Color.rgb(98, 212, 68);// mLinePaint.setColor(Color.RED); mLinePaint.setColor(color); mLinePaint.setStyle(Paint.Style.STROKE); mLinePaint.setStrokeWidth(5f); mLinePaint.setAlpha(180); }}


工程下載地址 下載地址

http://download.csdn.net/detail/renlei0109/9280637

相關焦點

  • Python中如何利用Opencv打開視頻或圖像並用PyQt控制項顯示
    OpenCV是一個基於BSD許可(開源)發行的跨平臺計算機視覺庫,可以運行在Linux、Windows、Android和Mac OS作業系統上,使用起來十分方便,把它與PyQt結合起來,就可以順利的操作視頻、圖像了。具體安裝請自行百度,這裡介紹使用方法。
  • Android壓力測試Monkey工具
    下面是一個更為典型的命令行示例,它啟動指定的應用程式,並向其發送1500個偽隨機事件:$ adb shell monkey -p your.package.name -v 1500$ monkey -p(package的意思) 指定文件名 -v(測試的次數和頻率) number(次數)四、Monkey測試的一個實例
  • opencv-python獲取圖像:面向對象與面向過程
    這裡需要注意以下,opencv讀取圖片默認通道為BGR的格式,當在其他UI用戶界面顯示圖像時注意轉換一下通道順序,例如BGR轉換成RGB:Image1=cv2.cvtColor(image, cv2.COLOR_BGR2RGB)下面讀取一張圖片並顯示
  • PS第二十課:濾鏡——camera raw濾鏡調色
    camera raw濾鏡調色cameraraw濾鏡調色縮放工具和抓手工具②:是調節顯示的大小和位置,以便細節的調節。調整色前調色後這種調色要確定圖像的基本色調模糊濾鏡模糊的主要是選取或者圖像柔和,以像素點為單位,是圖片產生柔和的效果,掩蓋圖像的缺陷或達到特殊的效果。
  • AP0100CS圖像信號處理器之功能詳解(獨家整理)
    AP0100CS集成了Aptina的先進圖像處理管道(pipeline),具有令人驚嘆的視頻和低光照性能。藉助用於寬動態範圍圖像再現(rendering)的高級局部色調映射(Advanced Local Tone Mapping, ALTM)功能,即使在非常困難的高對比度照明條件下也能夠生成高質量的視頻。
  • 從一個億像素看camera sensor的發展
    小米最新的1個億將camera sensor的熱度推到了新高.而camera是其中少有的持續的顯性賣點.這也讓相關技術快速的進行迭代。Image sensor雖然是一個傳統的半導體器件,但也在巨大的市場需求下呈現了技術更新的快速化和多元化。
  • 約束布局不顯示 - CSDN
    utm_source=blogxgwz7為了提升效率,布局時使用ConstraintLayout約束布局,遇到問題:一行有多個文本TextView時,TextView內容疊加顯示。處理前與處理後效果對比:
  • android啟動頁設計專題及常見問題 - CSDN
    ><FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent
  • 這個AI系統僅憑光回聲就能得到3D圖像
    這幅圖像是由反射光所形成的亮點和暗點的集合。相比之下,一種名為「飛行時間相機(time-of-flight camera,ToF camera)」的設備可以為圖像加上深度信息,通過計算物體反射回來的一束光到達不同像素的精確時間來生成 3D 圖像。最近幾十年來,研究人員創造了很多精妙的方式,來使用單像素檢測器捕捉圖像。
  • 英語口語:Dashboard camera是什麼意思?
    英語口語:Dashboard camera是什麼意思?英語詞彙量大的學生,見到英語dashboard camera的第一個反應基本都是中文:行車記錄儀。見到英語dashboard camera為什麼不能把你學過的英語用起來,說出更多英語?For example:1.
  • Android OpenCV(五十一):圖像插值
    圖像插值何為插值?插值是離散函數逼近的重要方法,利用它可通過函數在有限個點處的取值狀況,估算出函數在其他點處的近似值。何為圖像插值?從低解析度圖像生成高解析度圖像的過程(放大),用以恢復圖像中所丟失的信息。
  • 如何從失焦的圖像中恢復景深並將圖像變清晰?
    我們現在才進入今天文章的核心二、把失焦模糊的圖像變清晰今天給大家介紹的是以色列理工大學的Anat Levin教授的經典論文Levin et al., 「Image and depth from a conventional camera with a coded aperture,」 SIGGRAPH
  • Android P 行為變更
    在開發者預覽版 1 (DP1) 中,您的應用可以繼續訪問此類限制接口,不過平臺會通過顯示 Toast 以及記錄日誌消息引起開發者注意。若您的應用顯示此類 Toast,您必須棄用限制接口並改用另一種實現策略。若您認為沒有可行的替代接口,請在本文下方留言,要求我們重新考慮該限制是否合理。
  • android系統桌面設置為背景的方法
    packagecom.yao_guet;importandroid.app.WallpaperManager;importandroid.content.Context;importandroid.os.IBinder;importandroid.util.AttributeSet;importandroid.util.Log
  • 15個優秀的開源項目,讓你輕鬆應對Android開發
    【公眾號回復「1024」,送你一個特別推送】聲明原創|本文為codeGoogler授權發布,未經允許請勿轉載一、huabanDemo-android
  • 360手機專家解密Android惡意軟體7大技術趨勢
    360手機安全專家 劉敦俊  IDC統計顯示,全球Android智慧型手機達到了4.971億臺,市場佔有率達到79.3%,位居第一。有數據顯示,2012-2013年安卓新增惡意軟體月度數量增長明顯。而二次打包傳播惡意軟體漸成規模。劉敦俊介紹,通過聚類分析發現大量熱門應用被植入惡意代碼活惡意代碼或惡意廣告,試圖混淆視聽,引誘用戶進行下載。在惡意應用的出口問題上,應用商店依舊是主要傳播途徑。
  • 三星全息顯示新專利曝光:用眼球追蹤取代傳統視差法,降低功耗
    11月21日消息,世界智慧財產權組織(WIPO)曝光了三星的一項專利,其中顯示三星似乎在研發一款高清全息顯示屏,其特點是功耗大幅降低並且能同時生成兩個視覺區域。專利中的屏顯技術將會通過眼球追蹤數據,投射兩個或更多的圖像、物體或場景,用戶在一定追蹤範圍內連續移動時,圖像的每張連續幀能夠持續以高解析度顯示。這項專利的一大特點是,它或被用於強化現有的顯示屏,提高其解析度、降低性能需求,原理是為用戶的特定視角生成對應的圖像,而不是一次性生成只有一個視角的圖像。
  • 日本團隊研發世界首款視頻會議實時換臉應用「xpression camera」
    東京2020年11月10日 /美通社/ -- 最近,日本EmbodyMe團隊發布了一款適用於疫情中各種視頻會議的實時換裝變臉應用「xpression camera」。
  • 深度學習下的醫學圖像分析(四)
    醫學圖像組成醫學圖像有四個關鍵成分——像素深度、光度表示、元數據和像素數據。(單色或彩色圖片)顯示。而核醫學圖像,比如正電子發射斷層圖像(PET)和單光子發射斷層圖像(SPECT),通常都是以彩色映射或調色板來顯示的。 「元數據」是用於描述圖形象的信息。它可能看起來會比較奇怪,但是在任何一個文件格式中,除了像素數據之外,圖像還有一些其他的相關信息。這