I've followed along with this excellent tutorial from Yuma and have something working. Below is the code that I'm using in my viewcontroller
import UIKit
import SceneKit
import ARKit
import AVFoundation
import SpriteKit
class ViewController: UIViewController, ARSCNViewDelegate {
@IBOutlet var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
sceneView.delegate = self
// Show statistics such as fps and timing information
sceneView.showsStatistics = true
// Create a new scene
let scene = SCNScene(named: "art.scnassets/notebook.scn")!
// Set the scene to the view
sceneView.scene = scene
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARImageTrackingConfiguration()
guard let arImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else { return }
configuration.trackingImages = arImages
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
guard anchor is ARImageAnchor else { return }
// Amy Image recognition
guard let amyContainer = sceneView.scene.rootNode.childNode(withName: "amy", recursively: false) else { return }
amyContainer.removeFromParentNode()
node.addChildNode(amyContainer)
amyContainer.isHidden = false
// Video
let videoURL = Bundle.main.url(forResource: "video", withExtension: "mp4")!
let videoPlayer = AVPlayer(url: videoURL)
let videoScene = SKScene(size: CGSize(width: 900.0, height: 1400.0))
let videoNode = SKVideoNode(avPlayer: videoPlayer)
videoNode.position = CGPoint(x: videoScene.size.width / 2, y: videoScene.size.height / 2)
videoNode.size = videoScene.size
videoNode.yScale = -1
videoNode.play()
videoScene.addChild(videoNode)
guard let video = amyContainer.childNode(withName: "video", recursively: true)
else { return }
video.geometry?.firstMaterial?.diffuse.contents = videoScene
}
}
When I use the video that was supplied as part of the tutorial everything works perfectly, but when I try and include a video that I've created I only get a white box over the target image, but no video.
I've tripled checked the name, I've tried to export the video with the exact same dimensions all with no luck. I've tested uploading a new version of the video that does work as a different file name and changed the names and it works, so I have to assume it's the .mp4 that I'm producing which is the problem.
Are there particular rules/settings that are required for mp4's for this kind of approach, is there a setting in premiere pro or Handbrake that I can use? Or how can I debug any internal errors that are happening through Xcode itself?
JavaScript questions and answers, JavaScript questions pdf, JavaScript question bank, JavaScript questions and answers pdf, mcq on JavaScript pdf, JavaScript questions and solutions, JavaScript mcq Test , Interview JavaScript questions, JavaScript Questions for Interview, JavaScript MCQ (Multiple Choice Questions)