1

I have been working on an ARKit app and I achieved my goal of detecting the pictures in the scene and playback video in the scene. The problem occurred is when I tried to fetch the image from the internet. * The image got detected and started playback (I was able to hear the audio) but never showed any video on the scene.(I reverted the code below to from where I started) * What I actually want is to update the reference images and the videos of the playback on the go when my app is in the App Store.

KINDLY TELL THE BEST SOLUTION... THANKS

Below is my complete code: import UIKit import SceneKit import ARKit import Alamofire import AlamofireImage

class ViewController: UIViewController, ARSCNViewDelegate {

@IBOutlet var sceneView: ARSCNView!

var imageServer = [UIImage]()

var trackedImages = Set<ARReferenceImage>()

let configuration = ARImageTrackingConfiguration()

let videoNode = SKVideoNode(url: URL(fileURLWithPath: "https://example.com/video1.mp4"))

override func viewDidLoad() {
    super.viewDidLoad()

    sceneView.delegate = self
    sceneView.showsStatistics = true

}


override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)

            fetchImage {
                self.configuration.trackingImages = self.trackedImages
                self.configuration.maximumNumberOfTrackedImages = 1
                self.sceneView.session.run(self.configuration)
            }

}

override func viewWillDisappear(_ animated: Bool) {
    super.viewWillDisappear(animated)

    sceneView.session.pause()
}


func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {

    let node = SCNNode()

    if let imageAnchor = anchor as? ARImageAnchor {

        videoNode.play()

        let videoScene = SKScene(size: CGSize(width: 480, height: 360))


        videoNode.position = CGPoint(x: videoScene.size.width / 2, y: videoScene.size.height / 2)

        videoNode.yScale = -1.0

        videoScene.addChild(videoNode)


        let plane = SCNPlane(width: imageAnchor.referenceImage.physicalSize.width, height: imageAnchor.referenceImage.physicalSize.height)

        plane.firstMaterial?.diffuse.contents = videoScene

        let planeNode = SCNNode(geometry: plane)

        planeNode.eulerAngles.x = -.pi / 2

        node.addChildNode(planeNode)

    }

    return node

}

func fetchImage(completion: @escaping ()->()) {
        Alamofire.request("https://example.com/four.png").responseImage { response in
            debugPrint(response)

            print(response.request as Any)
            print(response.response as Any)
            debugPrint(response.result)

            if let image = response.result.value {
                print("image downloaded: \(image)")

                self.imageServer.append(image)

                print("ImageServer append Successful")
                print("The new number of images = \(self.imageServer.count)")

            }
            completion()
        }
}


}
J Singh
  • 59
  • 11
  • https://stackoverflow.com/questions/49300249/add-image-to-ar-resources-on-the-fly-for-image-recognition ? – Prashant Tukadiya Oct 23 '18 at 04:42
  • Hello @PrashantTukadiya I have tried this approach but it failed for me and I was not able to find the reason that’s why I uploaded my code so that I could find the problem. It would be great if you can help out. Thanks a lot brother – J Singh Oct 23 '18 at 04:48

0 Answers0