Create a virtual background with ARKit

@IBOutlet var sceneView: ARSCNView!
var virtualBackgroundNode = SCNNode()

Leave only people in front (People Occlusion)

let config = ARFaceTrackingConfiguration()                    
if ARFaceTrackingConfiguration.supportsFrameSemantics(.personSegmentation) {
config.frameSemantics.insert(.personSegmentation)
} else {
presentAlert(NSLocalizedString("You can not use People Occlusion in this device.", comment: ""))
}
sceneView.session.run(config, options: [])

Put a virtual background (SCNNode)

Create a virtual background

//Adjusted so that the aspect ratio of the image is not distortedlet width = uiImage.size.width
let height = uiImage.size.height
let mediaAspectRatio = Double(width / height)
let cgImage = uiImage.cgImage
let newUiImage = UIImage(cgImage: cgImage!, scale: 1.0, orientation: .up)

let skScene = SKScene(size: CGSize(width: 1000 * mediaAspectRatio, height: 1000))
let texture = SKTexture(image:newUiImage)
let skNode = SKSpriteNode(texture:texture)
skNode.position = CGPoint(x: skScene.size.width / 2.0, y: skScene.size.height / 2.0)
skNode.size = skScene.size
skNode.yScale = -1.0
skScene.addChild(skNode)
let avPlayer = AVPlayer(url: videoUrl)//Adjusted so that the aspect ratio of the image is not distortedvar mediaAspectRatio: Double!
guard let track = AVURLAsset(url: url).tracks(withMediaType: AVMediaType.video).first else { return (nil,nil) }
let size = track.naturalSize.applying(track.preferredTransform)
let resolution = (CGSize(width: abs(size.width), height:abs(size.height)),track.preferredTransform)
let width = resolution.0?.width
let height = resolution.0?.height
mediaAspectRatio = Double(width! / height! )
avPlayer.actionAtItemEnd = AVPlayer.ActionAtItemEnd.none;
NotificationCenter.default.addObserver(self,
selector: #selector(ViewController.didPlayToEnd),
name: NSNotification.Name("AVPlayerItemDidPlayToEndTimeNotification"),
object: avPlayer.currentItem)
let skScene = SKScene(size: CGSize(width: 1000 * mediaAspectRatio, height: 1000))
if resolution.1?.b != 0{
skScene.size = CGSize(width: 1000, height: 1000 )
skScene.zRotation = 1.5708
} else if resolution.1?.a != 1.0 {
skScene.zRotation = 1.5708 * 2
}

let skNode = SKVideoNode(avPlayer: avPlayer)
skNode.position = CGPoint(x: skScene.size.width / 2.0, y: skScene.size.height / 2.0)
skNode.size = skScene.size
skNode.yScale = -1.0
if resolution.1?.b != 0{
skNode.zRotation = 1.5708
} else if resolution.1?.a != 1.0 {
skNode.zRotation = 1.5708 * 2
}
skNode.play()
skScene.addChild(skNode)
virtualBackgroundNode.geometry = SCNPlane(width: size, height: size)
let material = SCNMaterial()
material.diffuse.contents = skScene
virtualBackgroundNode.geometry?.materials = [material]
virtualBackgroundNode.scale = SCNVector3(1.7 * mediaAspectRatio, 1.7, 1)
sceneView.scene.rootNode.addChildNode(node)

Put a virtual background in front of the camera

let cameraPosition = sceneView.pointOfView?.scale
let position = SCNVector3(cameraPosition!.x, cameraPosition!.y, cameraPosition!.z - 10)
virtualBackgroundNode.position = position

Fix the position of the virtual background

--

--

Get the Medium app

A button that says 'Download on the App Store', and if clicked it will lead you to the iOS App store
A button that says 'Get it on, Google Play', and if clicked it will lead you to the Google Play store