洞察纵观鸿蒙next版本,如何凭借FinClip加强小程序的跨平台管理,确保企业在数字化转型中的高效运营和数据安全?
1782
2022-09-05
使用OpenCV实现Android人脸检测APP
本篇文章手把手教大家使用OpenCV来实现一个能在安卓手机上运行的人脸检测APP。其实不仅仅是能检测人脸,还能检测鼻子,嘴巴,眼睛和耳朵。花了不少精力写这篇文章,希望点赞收藏关注。
无图无真相,先把APP运行的结果给大家看看。
下面我将一步一步教大家如何实现上面的APP!
第一步:-并安装Android studio。
为了保证大家能-到和我相同版本的Android Studio,我把安装包上传了到微云。地址是:tools
第三步:新建一个Android APP项目
第四步:-Opencv。
-是:-后解压。
第五步:导入OpenCV
将opencv-4.5.4-android-sdk\OpenCV-android-sdk下面的sdk复制到你在第三步创建的Android项目下面。就是第三步图中的D:\programming\MyApplication下面。然后将sdk文件夹改名为openCVsdk。
选择“Project”->“settings.gradle”。在文件中添加include ‘:openCVsdk’
选择“Project”->“openCVsdk”->“build.gradle”。
将apply plugin: 'kotlin-android’改为//apply plugin: ‘kotlin-android’
将compileSdkVersion和minSdkVersion,targetSdkVersion改为31,21,31。
在Android项目文件夹的app\src里面创建一个新文件夹jniLibs,然后把openCV文件夹的opencv-4.5.4-android-sdk\OpenCV-android-sdk\sdk\native\staticlibs里面的东西都copy到jniLibs文件夹中。
从version="1.0" encoding="utf-8"?>
双击“Project”->“app”-》“main”-》“java”-》“com.example…”下面的“MainActivity”。然后把里面的代码都换成下面的代码(保留原文件里的第一行代码)
import androidx.appcompat.app.AppCompatActivity;import android.os.Bundle;import android.content.Intent;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android-.Uri;import android.util.Log;import android.view.View;import android.widget.Button;import android.widget.ImageView;import org.opencv.android.OpenCVLoader;import org.opencv.android.Utils;import org.opencv.core.CvType;import org.opencv.core.Mat;import org.opencv.core.Point;import org.opencv.imgproc.Imgproc;import java.io.File;import java.io.FileOutputStream;import java.io.IOException;import java.io.InputStream;import org.opencv.core.MatOfRect;import org.opencv.core.Rect;import org.opencv.core.Scalar;import org.opencv.core.Size;import org.opencv.objdetect.CascadeClassifier;import android.content.Context;public class MainActivity extends AppCompatActivity { private double max_size = 1024; private int PICK_IMAGE_REQUEST = 1; private ImageView myImageView; private Bitmap selectbp; private static final String TAG = "OCVSample::Activity"; private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255); public static final int JAVA_DETECTOR = 0; public static final int NATIVE_DETECTOR = 1; private Mat mGray; private File mCascadeFile; private CascadeClassifier mJavaDetector,mNoseDetector; private int mDetectorType = JAVA_DETECTOR; private float mRelativeFaceSize = 0.2f; private int mAbsoluteFaceSize = 0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); staticLoadCVLibraries(); myImageView = (ImageView)findViewById(R.id.imageView); myImageView.setScaleType(ImageView.ScaleType.FIT_CENTER); Button selectImageBtn = (Button)findViewById(R.id.select_btn); selectImageBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // makeText(MainActivity.this.getApplicationContext(), "start to browser image", Toast.LENGTH_SHORT).show(); selectImage(); } private void selectImage() { Intent intent = new Intent(); intent.setType("image/*"); intent.setAction(Intent.ACTION_GET_CONTENT); startActivityForResult(Intent.createChooser(intent,"选择图像..."), PICK_IMAGE_REQUEST); } }); Button processBtn = (Button)findViewById(R.id.process_btn); processBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // makeText(MainActivity.this.getApplicationContext(), "hello, image process", Toast.LENGTH_SHORT).show(); convertGray(); } }); } private void staticLoadCVLibraries() { boolean load = OpenCVLoader.initDebug(); if(load) { Log.i("CV", "Open CV Libraries loaded..."); } } private void convertGray() { Mat src = new Mat(); Mat temp = new Mat(); Mat dst = new Mat(); Utils.bitmapToMat(selectbp, src); Imgproc.cvtColor(src, temp, Imgproc.COLOR_BGRA2BGR); Log.i("CV", "image type:" + (temp.type() == CvType.CV_8UC3)); Imgproc.cvtColor(temp, dst, Imgproc.COLOR_BGR2GRAY); Utils.matToBitmap(dst, selectbp); myImageView.setImageBitmap(selectbp); mGray = dst; mJavaDetector = loadDetector(R.raw.lbpcascade_frontalface,"lbpcascade_frontalface.xml"); mNoseDetector = loadDetector(R.raw.haarcascade_mcs_nose,"haarcascade_mcs_nose.xml"); if (mAbsoluteFaceSize == 0) { int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } } MatOfRect faces = new MatOfRect(); if (mJavaDetector != null) { mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); } Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) { Log.e(TAG, "start to detect nose!"); Mat faceROI = mGray.submat(facesArray[i]); MatOfRect noses = new MatOfRect(); mNoseDetector.detectMultiScale(faceROI, noses, 1.1, 2, 2, new Size(30, 30)); Rect[] nosesArray = noses.toArray(); Imgproc.rectangle(src, new Point(facesArray[i].tl().x + nosesArray[0].tl().x, facesArray[i].tl().y + nosesArray[0].tl().y) , new Point(facesArray[i].tl().x + nosesArray[0].br().x, facesArray[i].tl().y + nosesArray[0].br().y) , FACE_RECT_COLOR, 3); Imgproc.rectangle(src, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); } Utils.matToBitmap(src, selectbp); myImageView.setImageBitmap(selectbp); } private CascadeClassifier loadDetector(int rawID,String fileName) { CascadeClassifier classifier = null; try { // load cascade file from application resources InputStream is = getResources().openRawResource(rawID); File cascadeDir = getDir("cascade", Context.MODE_PRIVATE); mCascadeFile = new File(cascadeDir, fileName); FileOutputStream os = new FileOutputStream(mCascadeFile); byte[] buffer = new byte[4096]; int bytesRead; while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } is.close(); os.close(); Log.e(TAG, "start to load file: " + mCascadeFile.getAbsolutePath()); classifier = new CascadeClassifier(mCascadeFile.getAbsolutePath()); if (classifier.empty()) { Log.e(TAG, "Failed to load cascade classifier"); classifier = null; } else Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath()); cascadeDir.delete(); } catch (IOException e) { e.printStackTrace(); Log.e(TAG, "Failed to load cascade. Exception thrown: " + e); } return classifier; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == PICK_IMAGE_REQUEST && resultCode == RESULT_OK && data != null && data.getData() != null) { Uri uri = data.getData(); try { Log.d("image-tag", "start to decode selected image now..."); InputStream input = getContentResolver().openInputStream(uri); BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeStream(input, null, options); int raw_width = options.outWidth; int raw_height = options.outHeight; int max = Math.max(raw_width, raw_height); int newWidth = raw_width; int newHeight = raw_height; int inSampleSize = 1; if (max > max_size) { newWidth = raw_width / 2; newHeight = raw_height / 2; while ((newWidth / inSampleSize) > max_size || (newHeight / inSampleSize) > max_size) { inSampleSize *= 2; } } options.inSampleSize = inSampleSize; options.inJustDecodeBounds = false; options.inPreferredConfig = Bitmap.Config.ARGB_8888; selectbp = BitmapFactory.decodeStream(getContentResolver().openInputStream(uri), null, options); myImageView.setImageBitmap(selectbp); } catch (Exception e) { e.printStackTrace(); } } }}
第七步:连接手机运行程序
首先要打开安卓手机的开发者模式,每个手机品牌的打开方式不一样,你自行百度一下就知道了。例如在百度中搜索“小米手机如何开启开发者模式”。
然后用数据线将手机和电脑连接起来。成功后,Android studio里面会显示你的手机型号。如下图中显示的是“Xiaomi MI 8 UD”,本例中的开发手机是小米手机。
花了不少精力写这篇文章,希望点赞收藏关注。有问题可以在评论区留言!
版权声明:本文内容由网络用户投稿,版权归原作者所有,本站不拥有其著作权,亦不承担相应法律责任。如果您发现本站中有涉嫌抄袭或描述失实的内容,请联系我们jiasou666@gmail.com 处理,核实后本网站将在24小时内删除侵权内容。
发表评论
暂时没有评论,来抢沙发吧~