Face API Using ASP.Net MVC Part 2

// Open an existing file for reading
var fStream = System.IO.File.OpenRead(FullImgPath)

// Create Instance of Service Client by passing Servicekey as parameter in constructor 
var faceServiceClient = new FaceServiceClient(ServiceKey);

// Call detection REST API
Face[] faces = await faceServiceClient.DetectAsync(fStream, true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses });

Create & Save Cropped Detected Face Images.

var croppedImg = Convert.ToString(Guid.NewGuid()) + ".jpeg" as string;
var croppedImgPath = directory + '/' + croppedImg as string;
var croppedImgFullPath = Server.MapPath(directory) + '/' + croppedImg as string;
CroppedFace = CropBitmap(
              (Bitmap)Image.FromFile(FullImgPath),
              face.FaceRectangle.Left,
              face.FaceRectangle.Top,
              face.FaceRectangle.Width,
              face.FaceRectangle.Height);
CroppedFace.Save(croppedImgFullPath, ImageFormat.Jpeg);
if (CroppedFace != null)
   ((IDisposable)CroppedFace).Dispose();

Method that Cropping Images according to face values.

public Bitmap CropBitmap(Bitmap bitmap, int cropX, int cropY, int cropWidth, int cropHeight)
{
    // Crop Images
}

Finally Full MVC Controller:

public class FaceDetectionController : Controller
{
    private static string ServiceKey = ConfigurationManager.AppSettings["FaceServiceKey"];
    private static string directory = "../UploadedFiles";
    private static string UplImageName = string.Empty;
    private ObservableCollection<vmFace> _detectedFaces = new ObservableCollection<vmFace>();
    private ObservableCollection<vmFace> _resultCollection = new ObservableCollection<vmFace>();
    public ObservableCollection<vmFace> DetectedFaces
    {
        get
        {
            return _detectedFaces;
        }
    }
    public ObservableCollection<vmFace> ResultCollection
    {
        get
        {
            return _resultCollection;
        }
    }
    public int MaxImageSize
    {
        get
        {
            return 450;
        }
    }

    // GET: FaceDetection
    public ActionResult Index()
    {
        return View();
    }

    [HttpPost]
    public JsonResult SaveCandidateFiles()
    {
        string message = string.Empty, fileName = string.Empty, actualFileName = string.Empty; bool flag = false;
        //Requested File Collection
        HttpFileCollection fileRequested = System.Web.HttpContext.Current.Request.Files;
        if (fileRequested != null)
        {
            //Create New Folder
            CreateDirectory();

            //Clear Existing File in Folder
            ClearDirectory();

            for (int i = 0; i < fileRequested.Count; i++)
            {
                var file = Request.Files[i];
                actualFileName = file.FileName;
                fileName = Guid.NewGuid() + Path.GetExtension(file.FileName);
                int size = file.ContentLength;

                try
                {
                    file.SaveAs(Path.Combine(Server.MapPath(directory), fileName));
                    message = "File uploaded successfully";
                    UplImageName = fileName;
                    flag = true;
                }
                catch (Exception)
                {
                    message = "File upload failed! Please try again";
                }
            }
        }
        return new JsonResult
        {
            Data = new
            {
                Message = message,
                UplImageName = fileName,
                Status = flag
            }
        };
    }

    [HttpGet]
    public async Task<dynamic> GetDetectedFaces()
    {
        ResultCollection.Clear();
        DetectedFaces.Clear();

        var DetectedResultsInText = string.Format("Detecting...");
        var FullImgPath = Server.MapPath(directory) + '/' + UplImageName as string;
        var QueryFaceImageUrl = directory + '/' + UplImageName;

        if (UplImageName != "")
        {
            //Create New Folder
            CreateDirectory();

            try
            {
                // Call detection REST API
                using (var fStream = System.IO.File.OpenRead(FullImgPath))
                {
                    // User picked one image
                    var imageInfo = UIHelper.GetImageInfoForRendering(FullImgPath);

                    // Create Instance of Service Client by passing Servicekey as parameter in constructor 
                    var faceServiceClient = new FaceServiceClient(ServiceKey);
                    Face[] faces = await faceServiceClient.DetectAsync(fStream, true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses });
                    DetectedResultsInText = string.Format("{0} face(s) has been detected!!", faces.Length);
                    Bitmap CroppedFace = null;

                    foreach (var face in faces)
                    {
                        //Create & Save Cropped Images
                        var croppedImg = Convert.ToString(Guid.NewGuid()) + ".jpeg" as string;
                        var croppedImgPath = directory + '/' + croppedImg as string;
                        var croppedImgFullPath = Server.MapPath(directory) + '/' + croppedImg as string;
                        CroppedFace = CropBitmap(
                                        (Bitmap)Image.FromFile(FullImgPath),
                                        face.FaceRectangle.Left,
                                        face.FaceRectangle.Top,
                                        face.FaceRectangle.Width,
                                        face.FaceRectangle.Height);
                        CroppedFace.Save(croppedImgFullPath, ImageFormat.Jpeg);
                        if (CroppedFace != null)
                            ((IDisposable)CroppedFace).Dispose();


                        DetectedFaces.Add(new vmFace()
                        {
                            ImagePath = FullImgPath,
                            FileName = croppedImg,
                            FilePath = croppedImgPath,
                            Left = face.FaceRectangle.Left,
                            Top = face.FaceRectangle.Top,
                            Width = face.FaceRectangle.Width,
                            Height = face.FaceRectangle.Height,
                            FaceId = face.FaceId.ToString(),
                            Gender = face.FaceAttributes.Gender,
                            Age = string.Format("{0:#} years old", face.FaceAttributes.Age),
                            IsSmiling = face.FaceAttributes.Smile > 0.0 ? "Smile" : "Not Smile",
                            Glasses = face.FaceAttributes.Glasses.ToString(),
                        });
                    }

                    // Convert detection result into UI binding object for rendering
                    var rectFaces = UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo);
                    foreach (var face in rectFaces)
                    {
                        ResultCollection.Add(face);
                    }
                }
            }
            catch (FaceAPIException)
            {
                //do exception work
            }
        }
        return new JsonResult
        {
            Data = new
            {
                QueryFaceImage = QueryFaceImageUrl,
                MaxImageSize = MaxImageSize,
                FaceInfo = DetectedFaces,
                FaceRectangles = ResultCollection,
                DetectedResults = DetectedResultsInText
            },
            JsonRequestBehavior = JsonRequestBehavior.AllowGet
        };
    }

    public Bitmap CropBitmap(Bitmap bitmap, int cropX, int cropY, int cropWidth, int cropHeight)
    {
        Rectangle rect = new Rectangle(cropX, cropY, cropWidth, cropHeight);
        Bitmap cropped = bitmap.Clone(rect, bitmap.PixelFormat);
        return cropped;
    }

    public void CreateDirectory()
    {
        bool exists = System.IO.Directory.Exists(Server.MapPath(directory));
        if (!exists)
        {
            try
            {
                Directory.CreateDirectory(Server.MapPath(directory));
            }
            catch (Exception ex)
            {
                ex.ToString();
            }
        }
    }

    public void ClearDirectory()
    {
        DirectoryInfo dir = new DirectoryInfo(Path.Combine(Server.MapPath(directory)));
        var files = dir.GetFiles();
        if (files.Length > 0)
        {
            try
            {
                foreach (FileInfo fi in dir.GetFiles())
                {
                    GC.Collect();
                    GC.WaitForPendingFinalizers();
                    fi.Delete();
                }
            }
            catch (Exception ex)
            {
                ex.ToString();
            }
        }
    }
}

UI Helper:

/// <summary>
/// UI helper functions
/// </summary>
internal static class UIHelper
{
    #region Methods

    /// <summary>
    /// Calculate the rendering face rectangle
    /// </summary>
    /// <param name="faces">Detected face from service</param>
    /// <param name="maxSize">Image rendering size</param>
    /// <param name="imageInfo">Image width and height</param>
    /// <returns>Face structure for rendering</returns>
    public static IEnumerable<vmFace> CalculateFaceRectangleForRendering(IEnumerable<Microsoft.ProjectOxford.Face.Contract.Face> faces, int maxSize, Tuple<int, int> imageInfo)
    {
        var imageWidth = imageInfo.Item1;
        var imageHeight = imageInfo.Item2;
        float ratio = (float)imageWidth / imageHeight;

        int uiWidth = 0;
        int uiHeight = 0;
  
        uiWidth = maxSize;
        uiHeight = (int)(maxSize / ratio);
        
        float scale = (float)uiWidth / imageWidth;

        foreach (var face in faces)
        {
            yield return new vmFace()
            {
                FaceId = face.FaceId.ToString(),
                Left = (int)(face.FaceRectangle.Left * scale),
                Top = (int)(face.FaceRectangle.Top * scale),
                Height = (int)(face.FaceRectangle.Height * scale),
                Width = (int)(face.FaceRectangle.Width * scale),
            };
        }
    }

    /// <summary>
    /// Get image basic information for further rendering usage
    /// </summary>
    /// <param name="imageFilePath">Path to the image file</param>
    /// <returns>Image width and height</returns>
    public static Tuple<int, int> GetImageInfoForRendering(string imageFilePath)
    {
        try
        {
            using (var s = File.OpenRead(imageFilePath))
            {
                JpegBitmapDecoder decoder = new JpegBitmapDecoder(s, BitmapCreateOptions.None, BitmapCacheOption.None);
                var frame = decoder.Frames.First();

                // Store image width and height for following rendering
                return new Tuple<int, int>(frame.PixelWidth, frame.PixelHeight);
            }
        }
        catch
        {
            return new Tuple<int, int>(0, 0);
        }
    }
    #endregion Methods
}

MVC View:

@{
    ViewBag.Title = "Face Detection";
}

<div ng-controller="faceDetectionCtrl">

    <h3>{{Title}}</h3>
    <div class="loadmore">
        <div ng-show="loaderMoreupl" ng-class="result">
            <img src="~/Content/ng-loader.gif" /> {{uplMessage}}
        </div>
    </div>
    <div class="clearfix"></div>
    <table style="width:100%">
        <tr>
            <th><h4>Select Query Face</h4></th>
            <th><h4>Detection Result</h4></th>
        </tr>
        <tr>
            <td style="width:60%" valign="top">
                <form novalidate name="f1">
                    <input type="file" name="file" accept="image/*" onchange="angular.element(this).scope().selectCandidateFileforUpload(this.files)" required />
                </form>
                <div class="clearfix"></div>
                <div class="loadmore">
                    <div ng-show="loaderMore" ng-class="result">
                        <img src="~/Content/ng-loader.gif" /> {{faceMessage}}
                    </div>
                </div>
                <div class="clearfix"></div>
                <div class="facePreview_thumb_big" id="faceCanvas"></div>
            </td>
            <td style="width:40%" valign="top">
                <p>{{DetectedResultsMessage}}</p>
                <hr />
                <div class="clearfix"></div>
                <div class="facePreview_thumb_small">
                    <div ng-repeat="item in DetectedFaces" class="col-sm-12">
                        <div class="col-sm-3">
                            <img ng-src="{{item.FilePath}}" width="100" />
                        </div>
                        <div class="col-sm-8">
                            <ul>
                                @*<li>FaceId: {{item.FaceId}}</li>*@
                                <li>Age: {{item.Age}}</li>
                                <li>Gender: {{item.Gender}}</li>
                                <li>{{item.IsSmiling}}</li>
                                <li>{{item.Glasses}}</li>
                            </ul>
                        </div>
                        <div class="clearfix"></div>
                    </div>
                </div>
                <div ng-hide="DetectedFaces.length">No face detected!!</div>
            </td>
        </tr>
    </table>
</div>

@section NgScript{
    <script src="~/ScriptsNg/FaceDetectionCtrl.js"></script>
}

Angular Controller:

angular.module('myFaceApp', [])
.controller('faceDetectionCtrl', function ($scope, FileUploadService) {

    $scope.Title = 'Microsoft FaceAPI - Face Detection';
    $scope.DetectedResultsMessage = 'No result found!!';
    $scope.SelectedFileForUpload = null;
    $scope.UploadedFiles = [];
    $scope.SimilarFace = [];
    $scope.FaceRectangles = [];
    $scope.DetectedFaces = [];

    //File Select & Save 
    $scope.selectCandidateFileforUpload = function (file) {
        $scope.SelectedFileForUpload = file;
        $scope.loaderMoreupl = true;
        $scope.uplMessage = 'Uploading, please wait....!';
        $scope.result = "color-red";

        //Save File
        var uploaderUrl = "/FaceDetection/SaveCandidateFiles";
        var fileSave = FileUploadService.UploadFile($scope.SelectedFileForUpload, uploaderUrl);
        fileSave.then(function (response) {
            if (response.data.Status) {
                $scope.GetDetectedFaces();
                angular.forEach(angular.element("input[type='file']"), function (inputElem) {
                    angular.element(inputElem).val(null);
                });
                $scope.f1.$setPristine();
                $scope.uplMessage = response.data.Message;
                $scope.loaderMoreupl = false;
            }
        },
        function (error) {
            console.warn("Error: " + error);
        });
    }

    //Get Detected Faces
    $scope.GetDetectedFaces = function () {
        $scope.loaderMore = true;
        $scope.faceMessage = 'Preparing, detecting faces, please wait....!';
        $scope.result = "color-red";

        var fileUrl = "/FaceDetection/GetDetectedFaces";
        var fileView = FileUploadService.GetUploadedFile(fileUrl);
        fileView.then(function (response) {
            $scope.QueryFace = response.data.QueryFaceImage;
            $scope.DetectedResultsMessage = response.data.DetectedResults;
            $scope.DetectedFaces = response.data.FaceInfo;
            $scope.FaceRectangles = response.data.FaceRectangles;
            $scope.loaderMore = false;

            //Reset element
            $('#faceCanvas_img').remove();
            $('.divRectangle_box').remove();

            //get element byID
            var canvas = document.getElementById('faceCanvas');

            //add image element
            var elemImg = document.createElement("img");
            elemImg.setAttribute("src", $scope.QueryFace);
            elemImg.setAttribute("width", response.data.MaxImageSize);
            elemImg.id = 'faceCanvas_img';
            canvas.append(elemImg);

            //Loop with face rectangles
            angular.forEach($scope.FaceRectangles, function (imgs, i) {
                //console.log($scope.DetectedFaces[i])
                //Create rectangle for every face
                var divRectangle = document.createElement('div');
                var width = imgs.Width;
                var height = imgs.Height;
                var top = imgs.Top;
                var left = imgs.Left;

                //Style Div
                divRectangle.className = 'divRectangle_box';
                divRectangle.style.width = width + 'px';
                divRectangle.style.height = height + 'px';
                divRectangle.style.position = 'absolute';
                divRectangle.style.top = top + 'px';
                divRectangle.style.left = left + 'px';
                divRectangle.style.zIndex = '999';
                divRectangle.style.border = '1px solid #fff';
                divRectangle.style.margin = '0';
                divRectangle.id = 'divRectangle_' + (i + 1);

                //Generate rectangles
                canvas.append(divRectangle);
            });
        },
        function (error) {
            console.warn("Error: " + error);
        });
    };
})
.factory('FileUploadService', function ($http, $q) {
    var fact = {};
    fact.UploadFile = function (files, uploaderUrl) {
        var formData = new FormData();
        angular.forEach(files, function (f, i) {
            formData.append("file", files[i]);
        });
        var request = $http({
            method: "post",
            url: uploaderUrl,
            data: formData,
            withCredentials: true,
            headers: { 'Content-Type': undefined },
            transformRequest: angular.identity
        });
        return request;
    }
    fact.GetUploadedFile = function (fileUrl) {
        return $http.get(fileUrl);
    }
    return fact;
})

Upload images to detect faces:

Browse Image from local folder to upload and detect faces.

Mark faces in the image:

Detected faces will mark with white rectangle.

List detected faces with face information:

List and Separate the faces with detailed face information.

Summary:

You have just seen how to call Face API to detect faces in an Image. Hope this will help to make application more smart and intelligent 🙂.

References:

Leave a Reply

Your email address will not be published. Required fields are marked *