Face recognition using EMGU CV 3.1.0

The function is DetectAndRecognizeFaces

designed to detect a face and transmit the detected image to a method Recognize

that returns the name of the recognized face based on the tag.

LoadTrainingSet

the function is to fetch data from SQL database and the method LoadTrainingData

is to train eigenfacerecognizer

.

The problem is that the prediction function never returns -1

for Unknown faces and always returns match

if the detected side is not present in the database.

Code:

private void DetectAndRecognizeFaces()
{
    Image<Gray, byte> grayframe = ImageFrame.Convert<Gray, byte>();

    //Assign user-defined Values to parameter variables:
    minNeighbors = int.Parse(comboBoxMinNeigh.Text);  // the 3rd parameter
    windowsSize = int.Parse(textBoxWinSize.Text);   // the 5th parameter
    scaleIncreaseRate = Double.Parse(comboBoxScIncRte.Text); //the 2nd parameter

    //detect faces from the gray-scale image and store into an array of type 'var',i.e 'MCvAvgComp[]'
    var faces = haar.DetectMultiScale(grayframe, scaleIncreaseRate, minNeighbors, Size.Empty); //the actual face detection happens here

    MessageBox.Show("Total Faces Detected: " + faces.Length.ToString());

    Bitmap BmpInput = grayframe.ToBitmap();
    Bitmap ExtractedFace;   //empty
    Graphics grp;
    //MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d);

    faceRecognizer.Load(recognizeFilePath);
    foreach (var face in faces)
    {
        t = t + 1;
        result = ImageFrame.Copy(face).Convert<Gray, byte>().Resize(100, 100, Inter.Cubic);

        //set the size of the empty box(ExtractedFace) which will later contain the detected face
        ExtractedFace = new Bitmap(face.Width, face.Height);

        //assign the empty box to graphics for painting
        grp = Graphics.FromImage(ExtractedFace);
        //graphics fills the empty box with exact pixels of the face to be extracted from input image
        grp.DrawImage(BmpInput, 0, 0, face, GraphicsUnit.Pixel);



        string name = Recognise(result);
        if (name == "Unknown")
        {
            ImageFrame.Draw(face, new Bgr(Color.Red), 3);

            MessageBox.Show("Face Name is: " + name.ToString());
            ImageFrame.Draw(name, new Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 0.5,
                new Bgr(0, 0, 255), 1, LineType.EightConnected, bottomLeftOrigin);
        }
        else
        {
            ImageFrame.Draw(face, new Bgr(Color.Green), 3);

            MessageBox.Show("Face Name is: " + name.ToString());
            ImageFrame.Draw(name, new Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 0.5,
                new Bgr(0, 255, 0), 1, LineType.EightConnected, bottomLeftOrigin);
        }


    CamImageBox.Image = ImageFrame;

}

public string Recognise(Image<Gray, byte> Input_image, int Eigen_Thresh = -1)
{
    if (_IsTrained)
    {
        faceRecognizer.Load(recognizeFilePath);
        FaceRecognizer.PredictionResult ER = faceRecognizer.Predict(Input_image);

        if (ER.Label == -1)
        {
            Eigen_Label = "Unknown";
            Eigen_Distance = 0;
            return Eigen_Label;
        }
        else
        {
            Eigen_Label = Names_List[ER.Label];
            Eigen_Distance = (float)ER.Distance;
            if (Eigen_Thresh > -1) Eigen_threshold = Eigen_Thresh;

            //Only use the post threshold rule if we are using an Eigen Recognizer 
            //since Fisher and LBHP threshold set during the constructor will work correctly 
            switch (Recognizer_Type)
            {
                case ("EMGU.CV.EigenFaceRecognizer"):
                    if (Eigen_Distance > Eigen_threshold) return Eigen_Label;
                    else return "Unknown";
                case ("EMGU.CV.LBPHFaceRecognizer"):
                case ("EMGU.CV.FisherFaceRecognizer"):
                default:
                    return Eigen_Label; //the threshold set in training controls unknowns
            }
        }

    }
    else return "";
}

private void LoadTrainingSet()
{
    Bitmap bmpImage;

    for (int i = 0; i < totalRows; i++)
    {
        byte[] fetchedBytes = (byte[])dataTable.Rows[i]["FaceImage"];
        MemoryStream stream = new MemoryStream(fetchedBytes);
        //stream.Write(fetchedBytes, 0, fetchedBytes.Length);
        bmpImage = new Bitmap(stream);
        trainingImages.Add(new Emgu.CV.Image<Gray, Byte>(bmpImage).Resize(100, 100, Inter.Cubic));

        //string faceName = (string)dataTable.Rows[i]["FaceName"];
        int faceName = (int)dataTable.Rows[i]["FaceID"];
        NameLabels.Add(faceName);
        NameLable = (string)dataTable.Rows[i]["FaceName"];
        Names_List.Add(NameLable);
        //ContTrain = NameLabels[i];
    }
    LoadTrainedData();
}

public void LoadTrainedData()
{
    if (trainingImages.ToArray().Length != 0)
    {
        var faceImages = new Image<Gray, byte>[trainingImages.Count()];
        var facesIDs = new int[NameLabels.Count()];
        //var facesNames = new string[Names_List.Count()];
        //int[] faceLabels = new int[NameLabels.Count()];
        //MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
        for (int i = 0; i < trainingImages.ToArray().Length; i++)
        {
            faceImages[i] = trainingImages[i];
            facesIDs[i] = NameLabels[i];

        }

        try
        {
            faceRecognizer.Train(faceImages, facesIDs);
            faceRecognizer.Save(recognizeFilePath);
            _IsTrained = true;
        }
        catch (Exception error)
        {
            MessageBox.Show(error.ToString());
        }
    }

}

      

+3


source to share





All Articles