VideoViewController.swift 9.49 KB
//
//  VideoViewController.swift
//  extract_facial_expression
//
//  Created by Jerry kim on 2020/06/11.
//  Copyright © 2020 hoya. All rights reserved.
//

import UIKit
import AVFoundation
import ARKit

class VideoViewController: UIViewController, ARSCNViewDelegate {
    let videoPlayerView = UIView()
    var player: AVPlayer?
    
    public struct emotions {
        let expression: String
        let count: Int
    }
    
    @IBOutlet var sceneView: ARSCNView!
    @IBOutlet weak var expressionLabel: UILabel!
    @IBOutlet weak var expression1: UILabel!    // smile
    @IBOutlet weak var expression2: UILabel!    // dumbfounded
    @IBOutlet weak var expression3: UILabel!    // suprise
    @IBOutlet weak var expression4: UILabel!    // nyah
    @IBOutlet weak var expression5: UILabel!    // awful
    @IBOutlet weak var expression6: UILabel!    // eye smile
    
    @IBOutlet weak var resultLabel: UILabel!    // result label
    
    var faceExpression = ""
    var count1 = 0  // smile
    var count2 = 0  // dumbfounded
    var count3 = 0  // suprise
    var count4 = 0  // nyah
    var count5 = 0  // awful
    var count6 = 0  // eye smile
    
    // Sample Videos
    let videos = ["http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerEscapes.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerFun.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/SubaruOutbackOnStreetAndDirt.mp4",
    "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/TearsOfSteel.mp4"]
    
    override func viewDidLoad() {
        super.viewDidLoad()
        videoPlayerView.backgroundColor = UIColor.white
        videoPlayerView.translatesAutoresizingMaskIntoConstraints = false
        // Add constraints, pinnning each of the four sides
//        let topConstraint = NSLayoutConstraint(item: videoPlayerView, attribute: .top, relatedBy: .equal, toItem: view, attribute: .top, multiplier: 1, constant: 0)
//        let bottomConstraint = NSLayoutConstraint(item: videoPlayerView, attribute: .bottom, relatedBy: .equal, toItem: view, attribute: .bottom, multiplier: 1, constant: 0)
//        let leadingConstraint = NSLayoutConstraint(item: videoPlayerView, attribute: .leading, relatedBy: .equal, toItem: view, attribute: .leading, multiplier: 1, constant: 0)
//        let trailingConstraint = NSLayoutConstraint(item: videoPlayerView, attribute: .trailing, relatedBy: .equal, toItem: view, attribute: .trailing, multiplier: 1, constant: 0)
        
        
        view.addSubview(videoPlayerView)
        
        videoPlayerView.leadingAnchor.constraint(equalTo: view.leadingAnchor).isActive = true
        videoPlayerView.trailingAnchor.constraint(equalTo: view.trailingAnchor).isActive = true
        videoPlayerView.heightAnchor.constraint(equalTo: view.heightAnchor, multiplier: 0.3).isActive = true
        videoPlayerView.topAnchor.constraint(equalTo: view.safeAreaLayoutGuide.topAnchor).isActive = true
        
//        view.addConstraints([topConstraint, bottomConstraint, leadingConstraint, trailingConstraint])
        view.sendSubviewToBack(videoPlayerView)
        
        // for ARSCNView
        guard ARFaceTrackingConfiguration.isSupported else {
            fatalError("Face tracking not available on this on this device model!")
        }
        sceneView.delegate = self
        sceneView.showsStatistics = true
        
//        sceneView.translatesAutoresizingMaskIntoConstraints = false
//        sceneView.leadingAnchor.constraint(equalTo: view.leadingAnchor).isActive = true
//        sceneView.topAnchor.constraint(equalToSystemSpacingBelow: videoPlayerView.bottomAnchor, multiplier: 0.3).isActive = true
//        sceneView.heightAnchor.constraint(equalTo: view.heightAnchor, multiplier: 0.3).isActive = true
        
    }
    
    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        // for view video, left landscape
        return .landscapeLeft
    }
    
    func setupVideoPlayer() {   // Play random video
        let randNum = arc4random_uniform(9)
        guard let url = URL(string: self.videos[Int(randNum)]) else {
            return
        }
        
        player = AVPlayer(url: url)
        
        let playerLayer = AVPlayerLayer(player: player)
        playerLayer.frame = videoPlayerView.bounds
        videoPlayerView.layer.addSublayer(playerLayer)
        player?.play()
    }
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        
        // Create a session configuration
        let configuration = ARFaceTrackingConfiguration()

        // Run the view's session
        sceneView.session.run(configuration)
    }
    
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        
        // Pause the view's session
        sceneView.session.pause()
    }
    
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        setupVideoPlayer()
    }
    
    func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
        let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
        let node = SCNNode(geometry: faceMesh)
        node.geometry?.firstMaterial?.fillMode = .lines
        return node
    }
    
    func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
        if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
            faceGeometry.update(from: faceAnchor.geometry)
            facePoseAnalyzer(anchor: faceAnchor)
            
            DispatchQueue.main.async {
                self.expressionLabel.text = self.faceExpression
                self.expression1.text = String(self.count1)
                self.expression2.text = String(self.count2)
                self.expression3.text = String(self.count3)
                self.expression4.text = String(self.count4)
                self.expression5.text = String(self.count5)
                self.expression6.text = String(self.count6)
                self.extractMost()
            }
            
        }
    }
    
    func extractMost() {
        let structedArray = [
            emotions(expression: "smile", count: self.count1),
            emotions(expression: "dumbfounded", count: self.count2),
            emotions(expression: "surprise", count: self.count3),
            emotions(expression: "nyah", count: self.count4),
            emotions(expression: "awful", count: self.count5),
            emotions(expression: "eye smile", count: self.count6)
        ]
        let sortedArray = structedArray.sorted(by: {$0.count > $1.count})
        self.resultLabel.text = sortedArray[0].expression

    }
    
    func facePoseAnalyzer(anchor: ARFaceAnchor) {
        let smileLeft = anchor.blendShapes[.mouthSmileLeft]
        let smileRight = anchor.blendShapes[.mouthSmileRight]
        let innerUp = anchor.blendShapes[.browInnerUp]
        let tongue = anchor.blendShapes[.tongueOut]
        let eyeBlinkLeft = anchor.blendShapes[.eyeBlinkLeft]
        let jawOpen = anchor.blendShapes[.jawOpen]
        
        let mouthFrownLeft = anchor.blendShapes[.mouthFrownLeft]
        let mouthFrownRight = anchor.blendShapes[.mouthFrownRight]
        
        var newFaceExpression = ""
    
        if ((smileLeft?.decimalValue ?? 0.0) + (smileRight?.decimalValue ?? 0.0)) > 0.9 {
            newFaceExpression = "😀"
            self.count1 = self.count1 + 1
        }
        
        if ((jawOpen?.decimalValue ?? 0.0) + (innerUp?.decimalValue ?? 0.0)) > 0.85 {
            newFaceExpression = "😧"
            self.count2 = self.count2 + 1
        }
        
     
        if innerUp?.decimalValue ?? 0.0 > 0.8 {
            newFaceExpression = "😳"
            self.count3 = self.count3 + 1
        }
        
        if tongue?.decimalValue ?? 0.0 > 0.08 {
            newFaceExpression = "😛"
            self.count4 = self.count4 + 1
        }
        
        if (mouthFrownLeft?.decimalValue ?? 0.0) > 0.3 || (mouthFrownRight?.decimalValue ?? 0.0) > 0.3 {
            newFaceExpression = "🤢"
            self.count5 = self.count5 + 1
        }
        
        
        if eyeBlinkLeft?.decimalValue ?? 0.0 > 0.5 {
            newFaceExpression = "😊"
            self.count6 = self.count6 + 1
        }
        
        if self.faceExpression != newFaceExpression {
            self.faceExpression = newFaceExpression
        }
        //        if cheekPuff?.decimalValue ?? 0.0 > 0.5 {
        //            newFaceExpression = "🤢"
        //            self.count5 = self.count5 + 1
        //        }

    }
    
    func session(_ session: ARSession, didFailWithError error: Error) {
        // Present an error message to the user
        
    }
    
    func sessionWasInterrupted(_ session: ARSession) {
        // Inform the user that the session has been interrupted, for example, by presenting an overlay
        
    }
    
    func sessionInterruptionEnded(_ session: ARSession) {
        // Reset tracking and/or remove existing anchors if consistent tracking is required
        
    }
}