最近幾天沒啥事研究了下,android自定義相機的實現,主要實現了通過surfaceview跟camera實現拍照,預覽,圖片的保存。
能夠進行前後攝像頭的切換,自動和手動聚焦,設置閃光燈模式 ,人臉識別等。可能有一些不是特別好的地方,歡迎指出
先上效果圖
1.圖像預覽的surfaceview
package com.example.camera.preview;import android.content.Context;import android.graphics.PixelFormat;import android.hardware.Camera;import android.os.Handler;import android.util.AttributeSet;import android.view.SurfaceHolder;import android.view.SurfaceView;import com.example.camera.util.CameraUtil;/** * Created by renlei * DATE: 15-11-5 * Time: 下午4:52 */public class MySurfacePreview extends SurfaceView implements SurfaceHolder.Callback { private SurfaceHolder surfaceHolder; private Handler mHandler; public MySurfacePreview(Context context, AttributeSet attrs) { super(context, attrs); surfaceHolder = getHolder(); surfaceHolder.setFormat(PixelFormat.TRANSPARENT);//translucent半透明 transparent透明 surfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { CameraUtil.getInstance().doOpenCamera(Camera.CameraInfo.CAMERA_FACING_BACK); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { CameraUtil.getInstance().doStartPreview(surfaceHolder); if (mHandler != null) { mHandler.postDelayed(new Runnable() { @Override public void run() { mHandler.sendEmptyMessage(CameraUtil.PREVIEW_HAS_STARTED); } }, 1000); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { CameraUtil.getInstance().doStopPreview(); } public void setmHandler(Handler handler) { this.mHandler = handler; }}
2CameraActivitypackage com.example.camera;import android.app.Activity;import android.hardware.Camera;import android.os.Bundle;import android.os.Handler;import android.os.Message;import android.util.Log;import android.view.GestureDetector;import android.view.MotionEvent;import android.view.View;import android.widget.FrameLayout;import android.widget.ImageButton;import android.widget.ImageView;import android.widget.RelativeLayout;import com.example.camera.preview.MySurfacePreview;import com.example.camera.util.CameraUtil;import com.example.camera.util.FaceView;import com.example.camera.util.GoogleDetectListenerImpl;public class CameraActivity extends Activity { private MySurfacePreview mySurfacePreview; private ImageButton takeBtn; private FrameLayout focusLayout; private ImageView changeFlashModeIV; private ImageView swichCameraIV; private RelativeLayout settingRl; private FaceView faceView; int width; int height; /** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); initView(); bindListenre(); } private void initView() { mySurfacePreview = (MySurfacePreview) findViewById(R.id.my_surfaceview); mySurfacePreview.setmHandler(mainHandler); takeBtn = (ImageButton) findViewById(R.id.take_btn); focusLayout = (FrameLayout) findViewById(R.id.camera_focus_layout); int w = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); int h = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); Log.d("showFocusIcon initview", "w " + w + " h " + h); focusLayout.measure(w, h); width = focusLayout.getMeasuredWidth() / 2; height = focusLayout.getMeasuredHeight() / 2; Log.d("showFocusIcon initview", "focusLayout.getMeasuredWidth()/2" + focusLayout.getMeasuredWidth() / 2 + "focusLayout.getMeasuredHeight()/2" + focusLayout.getMeasuredHeight() / 2); changeFlashModeIV = (ImageView) findViewById(R.id.flash_iv); swichCameraIV = (ImageView) findViewById(R.id.swich_camera_iv); settingRl = (RelativeLayout)findViewById(R.id.setting_rl); faceView = (FaceView)findViewById(R.id.face_view); } private void bindListenre() { takeBtn.setOnClickListener(new TakeBtnClickListener()); mySurfacePreview.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (CameraUtil.getInstance().getmCameraInfo().facing == Camera.CameraInfo.CAMERA_FACING_BACK) { return gestureDetector.onTouchEvent(event); }else { return false; } } }); changeFlashModeIV.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { CameraUtil.getInstance().setFlashMode(changeFlashModeIV); } }); swichCameraIV.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { changeCamera(); } }); } private class TakeBtnClickListener implements View.OnClickListener { @Override public void onClick(View v) { CameraUtil.getInstance().doTakePic(); } } GestureDetector gestureDetector = new GestureDetector(new GestureDetector.OnGestureListener() { @Override public boolean onDown(MotionEvent e) { Log.d("MyGestureDetector", "onDown"); return true; } @Override public void onShowPress(MotionEvent e) { Log.d("MyGestureDetector", "onShowPress"); } @Override public boolean onSingleTapUp(final MotionEvent e) { Log.d("MyGestureDetector", "onSingleTapUp"); CameraUtil.getInstance().autoFocus(new Camera.AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { if (success) { Log.d("renlei", "聚焦成功"); } else { Log.d("renlei", "聚焦失敗"); } focusLayout.setVisibility(View.GONE); } }); CameraUtil.getInstance().setFocusArea(CameraActivity.this, e); showFocusIcon(e); return true; } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { Log.d("MyGestureDetector", "onScroll"); return false; } @Override public void onLongPress(MotionEvent e) { Log.d("MyGestureDetector", "onLongPress"); } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { Log.d("MyGestureDetector", "onFling"); return false; } }); private void showFocusIcon(MotionEvent e) { int x = (int) e.getX(); int y = (int) e.getY(); RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) focusLayout.getLayoutParams(); params.leftMargin = (int) (x - width + 0.5); params.topMargin = (int) (y - height + 0.5+settingRl.getHeight());// Log.d("showFocusIcon","focusLayout.getMeasuredWidth()/2"+focusLayout.getMeasuredWidth()/2+"focusLayout.getMeasuredHeight()/2"+focusLayout.getMeasuredHeight()/2);// Log.d("showFocusIcon","focusLayout.getWidth()/2"+focusLayout.getWidth()/2+"focusLayout.getHeight()/2"+focusLayout.getHeight()/2); Log.d("showFocusIcon", "x" + x + "y" + y + "params.width" + params.width + "params.height" + params.height);// focusLayout.setLayoutParams(params); focusLayout.requestLayout();// focusLayout.setLayoutParams(params); focusLayout.setVisibility(View.VISIBLE); RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) focusLayout.getLayoutParams(); Log.d("showFocusIcon", "x" + x + "y" + y + "params2.width" + params2.width + "params2.height" + params2.height); } public void changeCamera() { CameraUtil.getInstance().doStopPreview(); int newCameraId = (CameraUtil.getInstance().getCameraId() + 1) % 2; CameraUtil.getInstance().doOpenCamera(newCameraId); CameraUtil.getInstance().doStartPreview(mySurfacePreview.getHolder()); if (newCameraId == Camera.CameraInfo.CAMERA_FACING_BACK){ swichCameraIV.setImageResource(R.drawable.camera_setting_switch_back); changeFlashModeIV .setVisibility(View.VISIBLE); }else { swichCameraIV.setImageResource(R.drawable.camera_setting_switch_front); changeFlashModeIV.setVisibility(View.GONE); } } private MainHandler mainHandler = new MainHandler(); private void startGoogleDetect(){ Camera.Parameters parameters = CameraUtil.getInstance().getCameraParaters(); Camera camera = CameraUtil.getInstance().getCamera(); if (parameters.getMaxNumDetectedFaces()>0){ if(faceView != null){ faceView.clearFaces(); faceView.setVisibility(View.VISIBLE); } camera.setFaceDetectionListener(new GoogleDetectListenerImpl(CameraActivity.this,mainHandler)); camera.startFaceDetection(); } } private class MainHandler extends Handler{ @Override public void handleMessage(final Message msg) { int what = msg.what; switch (what){ case CameraUtil.PREVIEW_HAS_STARTED: startGoogleDetect(); Log.e("renlei110","開啟人臉識別"); break; case CameraUtil.RECEIVE_FACE_MSG: runOnUiThread(new Runnable() { @Override public void run() { Camera.Face[]faces = (Camera.Face[]) msg.obj; faceView.setFaces(faces); Log.e("renlei111","收到人臉識別的信息"); } }); break; } super.handleMessage(msg); } }}
3保存圖片的工具類ImageUtilpackage com.example.camera.util;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.media.ExifInterface;import android.os.Environment;import android.util.Log;import java.io.File;import java.io.IOException;/** * Created by renlei * DATE: 15-11-5 * Time: 下午7:21 * Email: lei.ren@renren-inc.com */public class ImageUtil { public static void saveImage(File file,byte []data,String filePath){ BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; Bitmap tempBitmap = BitmapFactory.decodeFile(filePath,options); int degrees = getExifRotateDegree(filePath); } public static String getSaveImgePath(){ if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)){ String path = Environment.getExternalStorageDirectory().getPath()+"/renlei/"+System.currentTimeMillis()+".jpg"; File file = new File(path); if (!file.getParentFile().exists()){ file.getParentFile().mkdirs(); } return path; } return System.currentTimeMillis()+".jpg"; } public static int getExifRotateDegree(String path){ try { ExifInterface exifInterface = new ExifInterface(path); int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION,ExifInterface.ORIENTATION_NORMAL); int degrees = getExifRotateDegrees(orientation); Log.d("imageutil degrees",degrees+""); return degrees; } catch (IOException e) { e.printStackTrace(); } return 0; } public static int getExifRotateDegrees(int exifOrientation) { int degrees = 0; switch (exifOrientation) { case ExifInterface.ORIENTATION_NORMAL: degrees = 0; break; case ExifInterface.ORIENTATION_ROTATE_90: degrees = 90; break; case ExifInterface.ORIENTATION_ROTATE_180: degrees = 180; break; case ExifInterface.ORIENTATION_ROTATE_270: degrees = 270; break; } return degrees; }}<span style="color:#CC0000;"></span>
4 CameraUtil 這是最主要的一個類,其中包括了打開相機,開始預覽,結束預覽,設置閃光燈模式,聚焦等一系列的操作,代碼中都有注釋package com.example.camera.util;import android.content.Context;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.graphics.PixelFormat;import android.graphics.Rect;import android.hardware.Camera;import android.os.Build;import android.util.Log;import android.view.MotionEvent;import android.view.SurfaceHolder;import android.widget.ImageView;import com.example.camera.R;import java.io.File;import java.io.FileOutputStream;import java.io.IOException;import java.util.ArrayList;import java.util.List;/** * Created by renlei * DATE: 15-11-5 * Time: 下午4:57 * Email: lei.ren@renren-inc.com */public class CameraUtil { private Camera mCamera; private static CameraUtil mCameraUtil; private boolean isPreview; private int cameraId = -1; //0表示後置,1表示前置 private Camera.CameraInfo mCameraInfo = new Camera.CameraInfo(); public static final int PREVIEW_HAS_STARTED = 110; public static final int RECEIVE_FACE_MSG = 111; public static synchronized CameraUtil getInstance() { if (mCameraUtil == null) { mCameraUtil = new CameraUtil(); } return mCameraUtil; } /** * 打開相機 * @param cameraId */ public void doOpenCamera(int cameraId) { Log.d("renlei", "open camera"+cameraId); try { this.cameraId = cameraId; mCamera = Camera.open(cameraId); Camera.getCameraInfo(cameraId, mCameraInfo);///這裡的mCamerainfo必須是new出來的,不能是個null } catch (Exception e) { e.printStackTrace(); } } /** * 開啟預覽 * @param holder */ public void doStartPreview(SurfaceHolder holder) { Log.d("CAmerautil","doStartPreview"); if (isPreview) { mCamera.stopPreview(); } if (mCamera != null) { Camera.Parameters parameters = mCamera.getParameters(); parameters.setPictureFormat(PixelFormat.JPEG);//設置照片拍攝後的保存格式 mCamera.setDisplayOrientation(90);//否則方向會有問題 if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {//前置與後置的不一樣,這裡暫時只設置前置的,後置的可以相應的去設置 parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); } printSupportPreviewSize(parameters); printSupportPictureSize(parameters); printSupportFocusMode(parameters);// parameters.setPictureSize(parameters.getPreviewSize().width,parameters.getPictureSize().height); //設置的這兩個size必須時支持的size大小,否則時不可以的,會出現setparameters錯誤 parameters.setPreviewSize(parameters.getSupportedPreviewSizes().get(0).width, parameters.getSupportedPreviewSizes().get(0).height); parameters.setPictureSize(parameters.getSupportedPictureSizes().get(0).width, parameters.getSupportedPictureSizes().get(0).height); mCamera.setParameters(parameters); Camera.Parameters mParams = mCamera.getParameters(); Log.i("renlei", "最終設置:PreviewSize--With = " + mParams.getPreviewSize().width + "Height = " + mParams.getPreviewSize().height); Log.i("renlei", "最終設置:PictureSize--With = " + mParams.getPictureSize().width + "Height = " + mParams.getPictureSize().height); try { mCamera.setPreviewDisplay(holder); mCamera.startPreview(); } catch (IOException e) { e.printStackTrace(); } isPreview = true; } } /** * 結束預覽 */ public void doStopPreview() { if (isPreview) { isPreview = false; mCamera.stopPreview(); mCamera.release(); mCamera = null; } } /** * 拍照 */ public void doTakePic() { if (isPreview && mCamera != null) { mCamera.takePicture(new ShutCallBackImpl(), null, new PicCallBacKImpl()); } } /** * 拍照時的動作 * 默認會有咔嚓一聲 */ private class ShutCallBackImpl implements Camera.ShutterCallback { @Override public void onShutter() { } } /** * 拍照後的最主要的返回 */ private class PicCallBacKImpl implements Camera.PictureCallback { @Override public void onPictureTaken(final byte[] data, Camera camera) { isPreview = false; new Thread(new Runnable() { @Override public void run() { String filePath = ImageUtil.getSaveImgePath(); File file = new File(filePath); FileOutputStream fos = null; try { fos = new FileOutputStream(file, true); fos.write(data); ImageUtil.saveImage(file, data, filePath); fos.close(); } catch (Exception e) { e.printStackTrace(); } } }).start(); mCamera.startPreview();//重新開啟預覽 ,不然不能繼續拍照 isPreview = true; } } /** * 列印支持的previewSizes * * @param params */ public void printSupportPreviewSize(Camera.Parameters params) { List<Camera.Size> previewSizes = params.getSupportedPreviewSizes(); for (int i = 0; i < previewSizes.size(); i++) { Camera.Size size = previewSizes.get(i); Log.i("camerautil", "previewSizes:width = " + size.width + " height = " + size.height); } } /** * 列印支持的pictureSizes * * @param params */ public void printSupportPictureSize(Camera.Parameters params) { List<Camera.Size> pictureSizes = params.getSupportedPictureSizes(); for (int i = 0; i < pictureSizes.size(); i++) { Camera.Size size = pictureSizes.get(i); Log.i("camerautil", "pictureSizes:width = " + size.width + " height = " + size.height); } } /** * 點擊聚焦 * * @param autoFocusCallback * @return */ public boolean autoFocus(Camera.AutoFocusCallback autoFocusCallback) { Log.d("Camerrautil", "autoFouce"); Camera.Parameters parameters = mCamera.getParameters(); List<String> supportMode = parameters.getSupportedFocusModes(); if (supportMode.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { String focusMode = parameters.getFocusMode(); if (!Camera.Parameters.FOCUS_MODE_AUTO.equals(focusMode)) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); mCamera.setParameters(parameters); } if (autoFocusCallback != null) { mCamera.autoFocus(autoFocusCallback); } return true; } return false; } /** * 設置聚焦的區域 * @param mContext * @param event */ public void setFocusArea(Context mContext, MotionEvent event) { if (!CameraUtil.isSupportFocusArea() || mCamera == null) { return; } Camera.Parameters parameters = mCamera.getParameters(); int ax = (int) (2000f * event.getRawX() / mContext.getResources().getDisplayMetrics().widthPixels - 1000); int ay = (int) (2000f * event.getRawY() / mContext.getResources().getDisplayMetrics().heightPixels - 1000);// Log.d("renlei",parameters.getMaxNumFocusAreas()+""); int rawx = (int) event.getRawX(); int rawy = (int) event.getRawY(); Log.d("renlei", "widthpix" + mContext.getResources().getDisplayMetrics().widthPixels + "heightpix" + mContext.getResources().getDisplayMetrics().heightPixels); Log.d("renlei", "rawx" + rawx + "rawy" + rawy); //防止超出1000 ,-1000的範圍 if (ay > 900) { ay = 900; } else if (ay < -900) { ay = -900; } if (ax < -900) { ax = -900; } else if (ax > 900) { ax = 900; } Log.d("renlei09", "ax" + ax + "ay" + ay); Camera.Area area = new Camera.Area(new Rect(ax - 100, ay - 100, ax + 100, ay + 100), 1000); List<Camera.Area> areas = new ArrayList<Camera.Area>(); areas.add(area); parameters.setFocusAreas(areas); parameters.setMeteringAreas(areas); mCamera.setParameters(parameters); } /** * 是否符合設置對焦區域的SDK版本 * * @return */ public static boolean isSupportFocusArea() { return Build.VERSION.SDK_INT >= 14; } /** * 設置閃光燈的模式 * @param imageView */ public void setFlashMode(ImageView imageView) { Camera.Parameters parameters = mCamera.getParameters(); String flashMode = parameters.getFlashMode(); Log.d("setFlashMode ", flashMode); if (flashMode != null) { if (flashMode.equals(Camera.Parameters.FLASH_MODE_OFF)) { imageView.setImageResource(R.drawable.camera_setting_flash_on_normal); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_ON); } else if (flashMode.equals(Camera.Parameters.FLASH_MODE_ON)) { imageView.setImageResource(R.drawable.camera_setting_flash_auto_normal); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO); } else if (flashMode.equals(Camera.Parameters.FLASH_MODE_AUTO)) { parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); imageView.setImageResource(R.drawable.camera_setting_flash_off_normal); } else { imageView.setImageResource(R.drawable.camera_setting_flash_off_normal); parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); } mCamera.setParameters(parameters); } } public int getCameraId() { return cameraId; } /** * 列印支持的聚焦模式 * * @param params */ public void printSupportFocusMode(Camera.Parameters params) { List<String> focusModes = params.getSupportedFocusModes(); for (String mode : focusModes) { Log.i("CameraUtil", "focusModes--" + mode); } } public Camera.CameraInfo getmCameraInfo(){ return mCameraInfo; } public Camera getCamera(){ return mCamera; } public Camera.Parameters getCameraParaters(){ if (mCamera!=null){ return mCamera.getParameters(); } return null; }}
通過的google的人臉識別接口來實現的人臉識別
package com.example.camera.util;import android.content.Context;import android.hardware.Camera;import android.os.Handler;import android.os.Message;import android.util.Log;/** * Created by renlei * DATE: 15-11-10 * Time: 下午4:49 * Email: renlei0109@yeah.net */public class GoogleDetectListenerImpl implements Camera.FaceDetectionListener{ private Handler mHandler;///用於向主線程發送信息 private Context mContext; public GoogleDetectListenerImpl(Context mContext,Handler mHandler) { this.mHandler = mHandler; this.mContext = mContext; } @Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if (faces!=null){ Message msg = mHandler.obtainMessage(); msg.what = CameraUtil.RECEIVE_FACE_MSG; msg.obj = faces; msg.sendToTarget(); } }}
識別出來的人臉區域,通過FaceView來顯示package com.example.camera.util;import android.content.Context;import android.graphics.*;import android.graphics.drawable.Drawable;import android.hardware.Camera;import android.util.AttributeSet;import android.util.Log;import android.view.View;import android.widget.ImageView;import com.example.camera.R;/** * Created by renlei * DATE: 15-11-11 * Time: 上午11:34 * Email: renlei0109@yeah.net */public class FaceView extends ImageView { private Context mContext; private Camera.Face[] mFaces; private Matrix mMatrix = new Matrix(); private boolean mirror; private Paint mLinePaint; private RectF rectF = new RectF(); private Drawable mFaceIndicator = null; public FaceView(Context context, AttributeSet attrs) { super(context, attrs); initPaint(); this.mContext = context; mFaceIndicator = mContext.getResources().getDrawable(R.drawable.ic_face_find_2); } public void setFaces(Camera.Face[] faces) { this.mFaces = faces; Log.d("Faceview", "invalidate");// ((View)getParent()).invalidate(); invalidate(); /*postInvalidate(); invalidate(); forceLayout(); requestLayout();*/ } public void clearFaces(){ mFaces = null; invalidate(); } @Override protected void onDraw(Canvas canvas) {// Log.d("Faceview", "onDraw"); if(mFaces == null || mFaces.length < 1){ return; } if (mFaces != null) { Log.d("renlei","onDraw"+mFaces.length); int id = CameraUtil.getInstance().getCameraId(); mirror = (id == Camera.CameraInfo.CAMERA_FACING_FRONT); canvas.save(); prepareMatrix(); mMatrix.postRotate(0); //Matrix.postRotate默認是順時針 canvas.rotate(-0); //Canvas.rotate()默認是逆時針 for (int i = 0; i < mFaces.length; i++) { rectF.set(mFaces[i].rect); mMatrix.mapRect(rectF); mFaceIndicator.setBounds(Math.round(rectF.left), Math.round(rectF.top), Math.round(rectF.right), Math.round(rectF.bottom)); mFaceIndicator.draw(canvas); } canvas.restore(); } super.onDraw(canvas); } /** * <p>Here is the matrix to convert driver coordinates to View coordinates * in pixels.</p> * <pre> * Matrix matrix = new Matrix(); * CameraInfo info = CameraHolder.instance().getCameraInfo()[cameraId]; * // Need mirror for front camera. * boolean mirror = (info.facing == CameraInfo.CAMERA_FACING_FRONT); * matrix.setScale(mirror ? -1 : 1, 1); * // This is the value for android.hardware.Camera.setDisplayOrientation. * matrix.postRotate(displayOrientation); * // Camera driver coordinates range from (-1000, -1000) to (1000, 1000). * // UI coordinates range from (0, 0) to (width, height). * matrix.postScale(view.getWidth() / 2000f, view.getHeight() / 2000f); * matrix.postTranslate(view.getWidth() / 2f, view.getHeight() / 2f); * </pre> */ private void prepareMatrix() { mMatrix.setScale(mirror ? -1 : 1, 1); mMatrix.postRotate(9); mMatrix.postScale(getWidth() / 2000f, getHeight() / 2000f); mMatrix.postTranslate(getWidth() / 2f, getHeight() / 2f); } private void initPaint() { mLinePaint = new Paint(Paint.ANTI_ALIAS_FLAG);// int color = Color.rgb(0, 150, 255); int color = Color.rgb(98, 212, 68);// mLinePaint.setColor(Color.RED); mLinePaint.setColor(color); mLinePaint.setStyle(Paint.Style.STROKE); mLinePaint.setStrokeWidth(5f); mLinePaint.setAlpha(180); }}
http://download.csdn.net/detail/renlei0109/9280637