Emojify 人脸识别表情

效果图:

人脸识别成表情需要playservice服务支持

grandle:

dependencies {    implementation fileTree(include: [‘*.jar‘], dir: ‘libs‘)    implementation ‘com.android.support:appcompat-v7:28.0.0‘    implementation ‘com.android.support:support-v4:28.0.0‘    implementation ‘com.android.support.constraint:constraint-layout:1.1.3‘    testImplementation ‘junit:junit:4.12‘    androidTestImplementation ‘com.android.support.test:runner:1.0.2‘    androidTestImplementation ‘com.android.support.test.espresso:espresso-core:3.0.2‘    implementation ‘com.android.support:design:28.0.0‘    implementation ‘com.google.android.gms:play-services-vision:10.2.0‘    implementation ‘com.jakewharton:butterknife:8.8.1‘    annotationProcessor ‘com.jakewharton:butterknife-compiler:8.8.1‘    implementation ‘com.jakewharton.timber:timber:4.7.0‘}

activity.main.xml
<?xml version="1.0" encoding="utf-8"?><RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"    xmlns:app="http://schemas.android.com/apk/res-auto"    xmlns:tools="http://schemas.android.com/tools"    android:id="@+id/activity_main"    android:layout_width="match_parent"    android:layout_height="match_parent"    android:background="@color/colorPrimary"    android:paddingBottom="@dimen/activity_vertical_margin"    android:paddingLeft="@dimen/activity_horizontal_margin"    android:paddingRight="@dimen/activity_horizontal_margin"    android:paddingTop="@dimen/activity_vertical_margin"    tools:context="com.example.admin.emojify.MainActivity">

    <ImageView        android:id="@+id/image_view"        android:layout_width="match_parent"        android:layout_height="match_parent"        android:layout_margin="@dimen/view_margin"        android:contentDescription="@string/imageview_description"        android:scaleType="fitStart" />

    <TextView        android:id="@+id/title_text_view"        android:layout_width="wrap_content"        android:layout_height="wrap_content"        android:layout_above="@+id/emojify_button"        android:layout_centerHorizontal="true"        android:layout_margin="@dimen/view_margin"        android:text="@string/emojify_me"        android:textAppearance="@style/TextAppearance.AppCompat.Display1" />

    <Button        android:id="@+id/emojify_button"        android:layout_width="wrap_content"        android:layout_height="wrap_content"        android:layout_centerHorizontal="true"        android:layout_centerVertical="true"        android:text="@string/go"        android:textAppearance="@style/TextAppearance.AppCompat.Display1"/>

    <android.support.design.widget.FloatingActionButton        android:id="@+id/clear_button"        android:layout_width="wrap_content"        android:layout_height="wrap_content"        android:layout_alignParentEnd="true"        android:layout_alignParentRight="true"        android:layout_alignParentTop="true"        android:visibility="gone"        android:src="@drawable/ic_clear"        app:backgroundTint="@android:color/white"        app:fabSize="mini" />

    <android.support.design.widget.FloatingActionButton        android:id="@+id/save_button"        android:layout_width="wrap_content"        android:layout_height="wrap_content"        android:layout_alignParentBottom="true"        android:layout_alignParentEnd="true"        android:layout_alignParentRight="true"        android:layout_marginBottom="@dimen/fab_margins"        android:layout_marginEnd="@dimen/fab_margins"        android:layout_marginRight="@dimen/fab_margins"        android:src="@drawable/ic_save"        android:visibility="gone"        app:backgroundTint="@android:color/white" />

    <android.support.design.widget.FloatingActionButton        android:id="@+id/share_button"        android:layout_width="wrap_content"        android:layout_height="wrap_content"        android:layout_alignParentBottom="true"        android:layout_alignParentLeft="true"        android:layout_alignParentStart="true"        android:layout_marginBottom="@dimen/fab_margins"        android:layout_marginLeft="@dimen/fab_margins"        android:layout_marginStart="@dimen/fab_margins"        android:src="@drawable/ic_share"        android:visibility="gone"        app:backgroundTint="@android:color/white" />

</RelativeLayout>

file_paths.xml
<?xml version="1.0" encoding="utf-8"?><paths xmlns:android="http://schemas.android.com/apk/res/android">    <external-cache-path name="my_cache" path="." />    <external-path name="my_images" path="Pictures/" /></paths>

mainfest.xml
<?xml version="1.0" encoding="utf-8"?><manifest xmlns:android="http://schemas.android.com/apk/res/android"    package="com.example.admin.emojify">

    <!-- 不支持相机无法安装--><uses-feature    android:name="android.hardware.camera2"    android:required="true"/>    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

    <application        android:allowBackup="true"        android:icon="@mipmap/ic_launcher"        android:label="@string/app_name"        android:roundIcon="@mipmap/ic_launcher_round"        android:supportsRtl="true"        android:theme="@style/AppTheme">        <activity android:name=".MainActivity">            <intent-filter>                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />            </intent-filter>        </activity>        <provider            android:name="android.support.v4.content.FileProvider"            android:authorities="com.example.android.fileprovider"            android:exported="false"            android:grantUriPermissions="true">            <meta-data                android:name="android.support.FILE_PROVIDER_PATHS"                android:resource="@xml/file_paths" />        </provider>    </application>

</manifest>

BitmapUtils
package com.example.admin.emojify;

import android.content.Context;import android.content.Intent;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.net.Uri;import android.os.Environment;import android.support.v4.content.FileProvider;import android.util.DisplayMetrics;import android.view.WindowManager;import android.widget.Toast;

import java.io.File;import java.io.FileOutputStream;import java.io.IOException;import java.io.OutputStream;import java.text.SimpleDateFormat;import java.util.Date;import java.util.Locale;

public class BitmapUtils {

    private static final String FILE_PROVIDER_AUTHORITY = "com.example.android.fileprovider";

    static Bitmap resamplePic(Context context, String imagePath) {        // Get device screen size information        DisplayMetrics metrics = new DisplayMetrics();        WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);        manager.getDefaultDisplay().getMetrics(metrics);        int targetH = metrics.heightPixels;        int targetW = metrics.widthPixels;        // Get the dimensions of the original bitmap        BitmapFactory.Options bmOptions = new BitmapFactory.Options();        bmOptions.inJustDecodeBounds = true;        BitmapFactory.decodeFile(imagePath, bmOptions);        int photoW = bmOptions.outWidth;        int photoH = bmOptions.outHeight;        // Determine how much to scale down the image        int scaleFactor = Math.min(photoW / targetW, photoH / targetH);        // Decode the image file into a Bitmap sized to fill the View        bmOptions.inJustDecodeBounds = false;        bmOptions.inSampleSize = scaleFactor;        return BitmapFactory.decodeFile(imagePath);    }

    static File createTempImageFile(Context context) throws IOException {        String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",                Locale.getDefault()).format(new Date());        String imageFileName = "JPEG_" + timeStamp + "_";        File storageDir = context.getExternalCacheDir();        return File.createTempFile(                imageFileName,  /* prefix */                ".jpg",         /* suffix */                storageDir      /* directory */        );    }

    static boolean deleteImageFile(Context context, String imagePath) {        File imageFile = new File(imagePath);        boolean deleted = imageFile.delete();        if (!deleted) {            String errorMessage = context.getString(R.string.error);            Toast.makeText(context, errorMessage, Toast.LENGTH_SHORT).show();        }        return deleted;    }

    //通知相册有新的图片    private static void galleryAddPic(Context context, String imagePath) {        Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);        File f = new File(imagePath);        Uri contentUri = Uri.fromFile(f);        mediaScanIntent.setData(contentUri);        context.sendBroadcast(mediaScanIntent);    }

    static String saveImage(Context context, Bitmap image) {        String savedImagePath = null;        String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",                Locale.getDefault()).format(new Date());        String imageFileName = "JPEG_" + timeStamp + ".jpg";        File storageDir = new File(                Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES)                        + "/Emojify");        boolean success = true;        if (!storageDir.exists()) {            success = storageDir.mkdirs();        }        if (success) {            File imageFile = new File(storageDir, imageFileName);            savedImagePath = imageFile.getAbsolutePath();            try {                OutputStream fOut = new FileOutputStream(imageFile);                image.compress(Bitmap.CompressFormat.JPEG, 100, fOut);                fOut.close();            } catch (Exception e) {                e.printStackTrace();            }            galleryAddPic(context, savedImagePath);            String savedMessage = context.getString(R.string.saved_message, savedImagePath);            Toast.makeText(context, savedMessage, Toast.LENGTH_SHORT).show();

        }        return savedImagePath;    }

    static void shareImage(Context context, String imagePath) {        // Create the share intent and start the share activity        File imageFile = new File(imagePath);        Intent shareIntent = new Intent(Intent.ACTION_SEND);        shareIntent.setType("image/*");        Uri photoURI = FileProvider.getUriForFile(context, FILE_PROVIDER_AUTHORITY, imageFile);        shareIntent.putExtra(Intent.EXTRA_STREAM, photoURI);        context.startActivity(shareIntent);    }

}
Emojifier

package com.example.admin.emojify;

import android.content.Context;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.graphics.Canvas;import android.util.SparseArray;import android.widget.Toast;import com.google.android.gms.vision.Frame;import com.google.android.gms.vision.face.Face;import com.google.android.gms.vision.face.FaceDetector;import timber.log.Timber;

public class Emojifier {

    private static final float EMOJI_SCALE_FACTOR = .9f;    private static final double SMILING_PROB_THRESHOLD = .15;    private static final double EYE_OPEN_PROB_THRESHOLD = .5;

    static Bitmap detectFacesandOverlayEmoji(Context context, Bitmap picture) {        // Create the face detector, disable tracking and enable classifications        FaceDetector detector = new FaceDetector.Builder(context)                .setTrackingEnabled(false)                .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)                .build();

        // Build the frame        Frame frame = new Frame.Builder().setBitmap(picture).build();        // Detect the faces        SparseArray<Face> faces = detector.detect(frame);        // Log the number of faces        Timber.d( "detectFaces: number of faces = " + faces.size());        // Initialize result bitmap to original picture        Bitmap resultBitmap = picture;        // If there are no faces detected, show a Toast message        if (faces.size() == 0) {            Toast.makeText(context, R.string.no_faces_message, Toast.LENGTH_SHORT).show();        } else {

            // Iterate through the faces            for (int i = 0; i < faces.size(); ++i) {                Face face = faces.valueAt(i);

                Bitmap emojiBitmap;                switch (whichEmoji(face)) {                    case SMILE:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.smile);                        break;                    case FROWN:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.frown);                        break;                    case LEFT_WINK:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.leftwink);                        break;                    case RIGHT_WINK:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.rightwink);                        break;                    case LEFT_WINK_FROWN:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.leftwinkfrown);                        break;                    case RIGHT_WINK_FROWN:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.rightwinkfrown);                        break;                    case CLOSED_EYE_SMILE:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.closed_smile);                        break;                    case CLOSED_EYE_FROWN:                        emojiBitmap = BitmapFactory.decodeResource(context.getResources(),                                R.drawable.closed_frown);                        break;                    default:                        emojiBitmap = null;                        Toast.makeText(context, R.string.no_emoji, Toast.LENGTH_SHORT).show();                }

                // Add the emojiBitmap to the proper position in the original image                resultBitmap = addBitmapToFace(resultBitmap, emojiBitmap, face);            }        }

        // Release the detector        detector.release();        return resultBitmap;    }

    private static Emoji whichEmoji(Face face) {        // Log all the probabilities        Timber.d( "whichEmoji: smilingProb = " + face.getIsSmilingProbability());        Timber.d( "whichEmoji: leftEyeOpenProb = "                + face.getIsLeftEyeOpenProbability());        Timber.d( "whichEmoji: rightEyeOpenProb = "                + face.getIsRightEyeOpenProbability());

        boolean smiling = face.getIsSmilingProbability() > SMILING_PROB_THRESHOLD;        boolean leftEyeClosed = face.getIsLeftEyeOpenProbability() < EYE_OPEN_PROB_THRESHOLD;        boolean rightEyeClosed = face.getIsRightEyeOpenProbability() < EYE_OPEN_PROB_THRESHOLD;

        // Determine and log the appropriate emoji        Emoji emoji;        if(smiling) {            if (leftEyeClosed && !rightEyeClosed) {                emoji = Emoji.LEFT_WINK;            }  else if(rightEyeClosed && !leftEyeClosed){                emoji = Emoji.RIGHT_WINK;            } else if (leftEyeClosed){                emoji = Emoji.CLOSED_EYE_SMILE;            } else {                emoji = Emoji.SMILE;            }        } else {            if (leftEyeClosed && !rightEyeClosed) {                emoji = Emoji.LEFT_WINK_FROWN;            }  else if(rightEyeClosed && !leftEyeClosed){                emoji = Emoji.RIGHT_WINK_FROWN;            } else if (leftEyeClosed){                emoji = Emoji.CLOSED_EYE_FROWN;            } else {                emoji = Emoji.FROWN;            }        }        // Log the chosen Emoji        Timber.d( "whichEmoji: " + emoji.name());        return emoji;    }

    private static Bitmap addBitmapToFace(Bitmap backgroundBitmap, Bitmap emojiBitmap, Face face) {

        // Initialize the results bitmap to be a mutable copy of the original image        Bitmap resultBitmap = Bitmap.createBitmap(backgroundBitmap.getWidth(),                backgroundBitmap.getHeight(), backgroundBitmap.getConfig());

        // Scale the emoji so it looks better on the face        float scaleFactor = EMOJI_SCALE_FACTOR;

        // Determine the size of the emoji to match the width of the face and preserve aspect ratio        int newEmojiWidth = (int) (face.getWidth() * scaleFactor);        int newEmojiHeight = (int) (emojiBitmap.getHeight() *                newEmojiWidth / emojiBitmap.getWidth() * scaleFactor);

        // Scale the emoji        emojiBitmap = Bitmap.createScaledBitmap(emojiBitmap, newEmojiWidth, newEmojiHeight, false);

        // Determine the emoji position so it best lines up with the face        float emojiPositionX =                (face.getPosition().x + face.getWidth() / 2) - emojiBitmap.getWidth() / 2;        float emojiPositionY =                (face.getPosition().y + face.getHeight() / 2) - emojiBitmap.getHeight() / 3;

        // Create the canvas and draw the bitmaps to it        Canvas canvas = new Canvas(resultBitmap);        canvas.drawBitmap(backgroundBitmap, 0, 0, null);        canvas.drawBitmap(emojiBitmap, emojiPositionX, emojiPositionY, null);        return resultBitmap;    }

    private enum Emoji {        SMILE,        FROWN,        LEFT_WINK,        RIGHT_WINK,        LEFT_WINK_FROWN,        RIGHT_WINK_FROWN,        CLOSED_EYE_SMILE,        CLOSED_EYE_FROWN    }

}
 
MainActivity

package com.example.admin.emojify;

import android.Manifest;import android.content.DialogInterface;import android.content.Intent;import android.content.pm.PackageManager;import android.graphics.Bitmap;import android.net.Uri;import android.provider.MediaStore;import android.provider.Settings;import android.support.annotation.NonNull;

import android.support.design.widget.FloatingActionButton;import android.support.v4.app.ActivityCompat;import android.support.v4.content.ContextCompat;import android.support.v4.content.FileProvider;import android.support.v7.app.AlertDialog;import android.support.v7.app.AppCompatActivity;import android.os.Bundle;import android.view.View;import android.widget.Button;import android.widget.ImageView;import android.widget.TextView;import android.widget.Toast;

import java.io.File;import java.io.IOException;

import butterknife.BindView;import butterknife.ButterKnife;import butterknife.OnClick;import timber.log.Timber;

public class MainActivity extends AppCompatActivity {

    private static final int REQUEST_IMAGE_CAPTURE = 1;    private static final int REQUEST_STORAGE_PERMISSION = 1;    private static final String FILE_PROVIDER_AUTHORITY = "com.example.android.fileprovider";

    @BindView(R.id.image_view) ImageView mImageView;    @BindView(R.id.emojify_button) Button mEmojifyButton;    @BindView(R.id.share_button) FloatingActionButton mShareFab;    @BindView(R.id.save_button) FloatingActionButton mSaveFab;    @BindView(R.id.clear_button) FloatingActionButton mClearFab;    @BindView(R.id.title_text_view) TextView mTitleTextView;

    private String mTempPhotoPath;    private Bitmap mResultsBitmap;

    @Override    protected void onCreate(Bundle savedInstanceState) {        super.onCreate(savedInstanceState);        setContentView(R.layout.activity_main);        ButterKnife.bind(this);        Timber.plant(new Timber.DebugTree());    }

    @OnClick(R.id.emojify_button)    public void emojifyMe(){        if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE)                != PackageManager.PERMISSION_GRANTED) {            ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},                    REQUEST_STORAGE_PERMISSION);        } else {            // Launch the camera if the permission exists            launchCamera();

        }    }

    @Override    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {        switch (requestCode){            case REQUEST_STORAGE_PERMISSION:{                if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {                    launchCamera();                }else {                    Toast.makeText(MainActivity.this,getString(R.string.refused_authorization),Toast.LENGTH_SHORT).show();                    new AlertDialog.Builder(MainActivity.this)                            .setMessage(getString(R.string.need_permission))                            .setPositiveButton(getString(R.string.settings), new DialogInterface.OnClickListener() {                                @Override                                public void onClick(DialogInterface dialog, int which) {

                                    Uri uri=Uri.fromParts("package",getPackageName(),null);                                    Intent intent=new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS,uri);                                    startActivity(intent);                                }                            }).setNeutralButton(getString(R.string.cancel),null)                            .create()                            .show();                }break;

            }            }        }

    private void launchCamera() {        // Create the capture image intent        Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);

        // Ensure that there‘s a camera activity to handle the intent        if (takePictureIntent.resolveActivity(getPackageManager()) != null) {            // Create the temporary File where the photo should go            File photoFile = null;            try {                photoFile = BitmapUtils.createTempImageFile(this);            } catch (IOException ex) {                // Error occurred while creating the File                Timber.e("Create file failed");            }            // Continue only if the File was successfully created            if (photoFile != null) {                // Get the path of the temporary file                mTempPhotoPath = photoFile.getAbsolutePath();                // Get the content URI for the image file                Uri photoURI = FileProvider.getUriForFile(this,                        FILE_PROVIDER_AUTHORITY,                        photoFile);                // Add the URI so the camera can store the image                takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoURI);                // Launch the camera activity                startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE);            }        }    }

    @Override    protected void onActivityResult(int requestCode, int resultCode, Intent data) {        if (requestCode == REQUEST_IMAGE_CAPTURE&&resultCode==RESULT_OK){            mEmojifyButton.setVisibility(View.GONE);            mTitleTextView.setVisibility(View.GONE);            mSaveFab.setVisibility(View.VISIBLE);            mShareFab.setVisibility(View.VISIBLE);            mClearFab.setVisibility(View.VISIBLE);            mResultsBitmap = BitmapUtils.resamplePic(this, mTempPhotoPath);            mResultsBitmap = Emojifier.detectFacesandOverlayEmoji(this, mResultsBitmap);            mImageView.setImageBitmap(mResultsBitmap);        }else {            // Otherwise, delete the temporary image file            BitmapUtils.deleteImageFile(this, mTempPhotoPath);        }    }

@OnClick(R.id.save_button)public void save(View view){    BitmapUtils.deleteImageFile(this, mTempPhotoPath);    BitmapUtils.saveImage(this, mResultsBitmap);}

@OnClick(R.id.share_button)public void share(View view){    BitmapUtils.deleteImageFile(this, mTempPhotoPath);    BitmapUtils.saveImage(this, mResultsBitmap);    BitmapUtils.shareImage(this, mTempPhotoPath);}

@OnClick(R.id.clear_button)public void clear(View view){    mImageView.setImageResource(0);    mEmojifyButton.setVisibility(View.VISIBLE);    mTitleTextView.setVisibility(View.VISIBLE);    mShareFab.setVisibility(View.GONE);    mSaveFab.setVisibility(View.GONE);    mClearFab.setVisibility(View.GONE);    BitmapUtils.deleteImageFile(this, mTempPhotoPath);}

}

Github地址:https://github.com/NeoWu55/Emojify
 
 
 

原文地址:https://www.cnblogs.com/neowu/p/10927022.html

时间: 2024-10-10 16:43:44

Emojify 人脸识别表情的相关文章

实验报告: 人脸识别方法回顾与实验分析 【OpenCV测试方法源码】

趁着还未工作,先把过去做的东西整理下出来~   Github源码:https://github.com/Blz-Galaxy/OpenCV-Face-Recognition (涉及个人隐私,源码不包含测试样本,请谅解~) 对实验结果更感兴趣的朋友请直接看 第5章 [摘要]这是一篇关于人脸识别方法的实验报告.报告首先回顾了人脸识别研究的发展历程及基本分类:随后对人脸识别技术方法发展过程中一些经典的流行的方法进行了详细的阐述:最后作者通过设计实验对比了三种方法的识别效果并总结了人脸识别所面临的困难与

openCV+ASM+LBP+Gabor实现人脸识别(GT人脸库)

原理:使用GT人脸库做样本,VS2010下使用openCV2.44自带的Haar算法检測人脸区域,ASM Library特征检測,然后使用YCrCb颜色空间做肤色检測,再用LBP+Gabor小波提取特征,最小邻近距离做分类识别. 1.GT人脸库 Georgia Tech face database,网址:http://www.anefian.com/research/face_reco.htm GT人脸库包括50个人,每人15张不同角度.不同表情的正面照片. 图片为JPG格式,640*480,大

OpenCV — 人脸识别

前段时间弄过一下人脸识别相关的东西,记录一下 撰写不易,转载需注明出处:http://blog.csdn.net/jscese/article/details/54409627本文来自 [jscese]的博客! 概念 FaceDetect 人脸检测 在一张图像中判断是否存在人脸并找出人脸所在的位置 FaceRecognize 人脸识别 在人脸检测的基础上收集人脸数据集合进行处理保存信息,将输入人脸与保存的信息进行比对校验,得到是否为其中某个人脸 特征值 以某种特定规则对输入源进行处理得到具有唯一

人脸识别

一:发展历程 人脸识别系统的研究始于20世纪60年代,80年代后随着计算机技术和光学成像技术的发展得到提高,而真正进入初级的应用阶段则在90年后期,并且以美 国.德国和日本的技术实现为主:人脸识别系统成功的关键在于是否拥有尖端的核心算法,并使识别结果具有实用化的识别率和识别速度:“人脸识别系统”集成了 人工智能.机器识别.机器学习.模型理论.专家系统.视频图像处理等多种专业技术,同时需结合中间值处理的理论与实现,是生物特征识别的最新应用,其核心 技术的实现,展现了弱人工智能向强人工智能的转化.

人脸识别SDK小结

Face++人脸识别 进入官网 Face++ 致力于研发世界最好的人脸技术,提供免费的API和SDK供企业和开发者调用,更有灵活的定制化服务满足不同需求.已有多家公司使用Face++技术服务,完成包括人脸搜索.定位.识别.智能美化等功能.我们旨在为合作者提供完善的技术与维护服务. 百度媒体云人脸识别 进入官网 百度媒体云人脸识别服务,依托百度业界领先的人脸识别算法,提供了人脸检测.五官定位.人脸属性检测等功能.媒体云人脸识别服务通过提供一系列HTTP Restful API及跨终端平台SDK,实

人脸识别算法初次了解

这是转载别人的帖子,认为好,大家一块学习http://www.cnblogs.com/guoyiqi/archive/2011/07/28/2129300.html 前言 在写此文之前,先扯点东西.我一直在找一个东西,让我思考,让我久久的深陷当中,永久的,不断的思考.现在,我意识到,这个东西即是算法.我一直在找一家合适的公司,能让我的兴趣无比放肆的,自由驰骋. ok,由于在一家公司的面试过程中,面试官提到过这个人脸识别算法,由于在此之前,未曾有过了解,所以,特作此番学习与研究.有不论什么问题,欢

Android 实现人脸识别教程[运用虹软人脸识别SDK]

基于虹软人脸识别引擎,在Android平台上实现人脸识别功能,即使在离线的情况下依旧运行,不被人采集个人照片的感觉,还是爽爽的.经过整个测试过来,虹软的人脸识别还是很强大的,人脸检测可以控制在20ms之内,人脸识别大概在200ms左右.今天就来分享一下开发经验 项目的目标 我们需要实现一个人脸识别功能.简单来说,就是机的后置摄像头,识别摄像头中实时拍到的人脸信息,如果人库注册过,则显示识别后的人脸信息,如登记的名字:如果不在,提示未注册. 这个功能具有多个应用场景,比如,火车站或者打卡和门禁系统

OpenCV人脸识别Eigen算法源码分析

1 理论基础 学习Eigen人脸识别算法需要了解一下它用到的几个理论基础,现总结如下: 1.1 协方差矩阵 首先需要了解一下公式: 共公式可以看出:均值描述的是样本集合的平均值,而标准差描述的则是样本集合的各个样本点到均值的距离之平均.以一个国家国民收入为例,均值反映了平均收入,而均方差/方差则反映了贫富差距,如果两个国家国民收入均值相等,则标准差越大说明国家的国民收入越不均衡,贫富差距较大.以上公式都是用来描述一维数据量的,把方差公式推广到二维,则可得到协方差公式: 协方差表明了两个随机变量之

人脸识别必读的N篇文章

一,人脸检测/跟踪 人脸检测/跟踪的目的是在图像/视频中找到各个人脸所在的位置和大小:对于跟踪而言,还需要确定帧间不同人脸间的对应关系. 1, Robust Real-time Object Detection. Paul Viola, Michael Jones. IJCV 2004. 入选理由: Viola的人脸检测工作使得人脸检测真正变得实时可用.他们发表了一系列文章,这篇是引用率最高的一篇. 2, Fast rotation invariant multi-view face detec