I created a Hand Pose model using CreateML and integrated it into my SwiftUI project app.
While coding, I referred to the Apple Developer documentation app for the necessary code.
However, when I ran the app on an iPhone 14, the camera didn’t display any effects or finger numbers as expected.
by using ARKit and CoreML:
struct ARViewContainer: UIViewControllerRepresentable {
let arViewController: ARViewController
let model: modelHand
func modelHand() {
let model = modelHand()
}
func makeUIViewController(context: UIViewControllerRepresentableContext<ARViewContainer>) -> ARViewController {
arViewController.model = model
return arViewController
}
func updateUIViewController(_ uiViewController: ARViewController, context: UIViewControllerRepresentableContext<ARViewContainer>) {
}
}
class ARViewController: UIViewController, ARSessionDelegate {
var frameCounter = 0
let handPosePredictionInterval = 10
var model: modelHand!
var effectNode: SCNNode?
override func viewDidLoad() {
super.viewDidLoad()
let arView = ARSCNView(frame: view.bounds)
view.addSubview(arView)
let session = ARSession()
session.delegate = self
let configuration = ARWorldTrackingConfiguration()
configuration.frameSemantics = .personSegmentationWithDepth
arView.session.run(configuration)
}
func session(_ session: ARSession, didUpdate frame: ARFrame) {
let pixelBuffer = frame.capturedImage
let handPoseRequest = VNDetectHumanHandPoseRequest()
handPoseRequest.maximumHandCount = 1
handPoseRequest.revision = VNDetectHumanHandPoseRequestRevision1
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:])
do {
try handler.perform([handPoseRequest])
} catch {
assertionFailure("Hand Pose Request failed: \(error)")
}
guard let handPoses = handPoseRequest.results, !handPoses.isEmpty else {
return
}
if frameCounter % handPosePredictionInterval == 0 {
if let handObservation = handPoses.first as? VNHumanHandPoseObservation {
do {
let keypointsMultiArray = try handObservation.keypointsMultiArray()
let handPosePrediction = try model.prediction(poses: keypointsMultiArray)
let confidence = handPosePrediction.labelProbabilities[handPosePrediction.label]!
print("Confidence: \(confidence)")
if confidence > 0.9 {
print("Rendering hand pose effect: \(handPosePrediction.label)")
renderHandPoseEffect(name: handPosePrediction.label)
}
} catch {
fatalError("Failed to perform hand pose prediction: \(error)")
}
}
}
}
func renderHandPoseEffect(name: String) {
switch name {
case "One":
print("Rendering effect for One")
if effectNode == nil {
effectNode = addParticleNode(for: "One")
}
default:
print("Removing all particle nodes")
removeAllParticleNode()
}
}
func removeAllParticleNode() {
effectNode?.removeFromParentNode()
effectNode = nil
}
func addParticleNode(for poseName: String) -> SCNNode {
print("Adding particle node for pose: \(poseName)")
let particleNode = SCNNode()
return particleNode
}
}
struct ContentView: View {
let model = modelHand()
var body: some View {
ARViewContainer(arViewController: ARViewController(), model: model)
}
}
Add some constraints, also the controller should be created in the make function
I add a constraints and the controller are on the make func, unfortunately nothing happen on the camera ‘func makeUIViewController(context: UIViewControllerRepresentableContext<ARViewContainer>) -> ARViewController { let controller = ARViewController() controller.model = model arViewController.arView = controller.arView return controller }’
Mmmm that looks a little off…