Uses AR+AI to visualize and arrange virtual furniture in real rooms Swift
👤 Sharing: AI
```swift
import ARKit
import RealityKit
import Combine
import UIKit
class ARFurnitureViewController: UIViewController, ARSessionDelegate {
// MARK: - UI Elements
@IBOutlet var arView: ARView!
@IBOutlet weak var statusLabel: UILabel!
@IBOutlet weak var addButton: UIButton!
// MARK: - AR Variables
let coachingOverlay = ARCoachingOverlayView()
var virtualObjectAnchor: AnchorEntity?
var virtualObject: ModelEntity? // The virtual furniture object
var raycastQuery: ARRaycastQuery?
var focusEntity: Entity? // Used for visual focus/placement
var dragStartPosition: SIMD3<Float>?
// MARK: - Model Loading
let availableModels: [String] = ["chair_swan", "table_ikea", "vase_01"] // Example models
// MARK: - Combine Variables
private var cancellables: Set<AnyCancellable> = []
// MARK: - Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
arView.session.delegate = self
// Configure ARKit session
configureARKitSession()
// Add coaching overlay
setupCoachingOverlay()
//Setup focus entity (helps visual alignment)
focusEntity = FocusEntity(on: arView, style: .classic(color: .yellow))
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Start the AR session when the view appears
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = [.horizontal, .vertical] // Detect horizontal and vertical planes
arView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the AR session when the view disappears
arView.session.pause()
}
// MARK: - ARKit Configuration
func configureARKitSession() {
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.horizontal, .vertical] // detect both
arView.session.run(config)
arView.debugOptions = [.showFeaturePoints] // Helps to see where ARKit is identifying features. Remove in production.
arView.automaticallyConfigureSession = false // Manual configuration allows for customization.
}
// MARK: - Object Placement
@IBAction func placeObject(_ sender: UIButton) {
// 1. Check if a plane has been detected
guard let currentRaycastResult = focusEntity?.raycastResult else {
statusLabel.text = "Please point at a detected surface."
return
}
// 2. Load the 3D model
loadModel(named: availableModels.randomElement() ?? "chair_swan") { [weak self] modelEntity in
guard let self = self, let modelEntity = modelEntity else {
self?.statusLabel.text = "Failed to load model."
return
}
// 3. Create an anchor and add the model
let anchor = AnchorEntity(raycastResult: currentRaycastResult)
anchor.addChild(modelEntity)
self.arView.scene.addAnchor(anchor)
self.virtualObjectAnchor = anchor
self.virtualObject = modelEntity
self.statusLabel.text = "Object placed!"
// Add gestures for interaction
self.installGestures(on: modelEntity)
}
}
func loadModel(named modelName: String, completion: @escaping (ModelEntity?) -> Void) {
// 1. Get the model's URL. RealityKit uses .usdz files.
guard let modelURL = Bundle.main.url(forResource: modelName, withExtension: "usdz") else {
print("Failed to find model file: \(modelName).usdz")
completion(nil)
return
}
// 2. Asynchronously load the entity.
var cancellable: AnyCancellable? = nil
cancellable = ModelEntity.loadModelAsync(contentsOf: modelURL)
.sink(receiveCompletion: { loadCompletion in
if case let .failure(error) = loadCompletion {
print("Unable to load model \(modelName) : error= \(error)")
completion(nil)
}
cancellable?.cancel() // Cancel the cancellable after completion
}, receiveValue: { modelEntity in
// 3. Scale the model (example scale)
modelEntity.scale = SIMD3<Float>(0.01, 0.01, 0.01) // Adjust as needed
completion(modelEntity)
cancellable?.cancel() // Cancel the cancellable after value received
})
}
// MARK: - Gesture Recognition (Tap and Pan)
func installGestures(on entity: Entity) {
let tap = UITapGestureRecognizer(target: self, action: #selector(handleTap(_:)))
arView.addGestureRecognizer(tap)
let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePan(_:)))
arView.addGestureRecognizer(pan)
}
@objc func handleTap(_ sender: UITapGestureRecognizer) {
let tapLocation = sender.location(in: arView)
// Perform a raycast to find entities at the tap location.
let results = arView.raycast(from: tapLocation, allowing: .existingPlaneGeometry, alignment: .any)
if let firstResult = results.first {
// Update the object's anchor position
virtualObjectAnchor?.move(to: firstResult.worldTransform)
}
}
@objc func handlePan(_ sender: UIPanGestureRecognizer) {
guard let virtualObject = virtualObject else { return } // Check if an object exists
let location = sender.location(in: arView)
switch sender.state {
case .began:
// Get the raycast at the start of the pan
let results = arView.raycast(from: location, allowing: .existingPlaneGeometry, alignment: .any)
if let firstResult = results.first {
dragStartPosition = SIMD3<Float>(firstResult.worldTransform.columns.3.x,
firstResult.worldTransform.columns.3.y,
firstResult.worldTransform.columns.3.z)
}
case .changed:
// Get the raycast during the pan
let results = arView.raycast(from: location, allowing: .existingPlaneGeometry, alignment: .any)
if let firstResult = results.first, let startPosition = dragStartPosition {
let currentPosition = SIMD3<Float>(firstResult.worldTransform.columns.3.x,
firstResult.worldTransform.columns.3.y,
firstResult.worldTransform.columns.3.z)
// Calculate the translation vector from the starting position
let translation = currentPosition - startPosition
// Apply the translation to the object's position
virtualObject.position += translation // Move the object relative to its current position.
dragStartPosition = currentPosition
}
case .ended, .cancelled:
dragStartPosition = nil
default:
break
}
}
// MARK: - ARSessionDelegate
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
statusLabel.text = "AR Session Failed: \(error.localizedDescription)"
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting a label
statusLabel.text = "AR Session Interrupted."
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
statusLabel.text = "AR Session Interruption Ended. Resetting."
resetTracking()
}
func resetTracking() {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = [.horizontal, .vertical]
arView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - Coaching Overlay (Helps user find surfaces)
func setupCoachingOverlay() {
coachingOverlay.translatesAutoresizingMaskIntoConstraints = false
arView.addSubview(coachingOverlay)
coachingOverlay.leadingAnchor.constraint(equalTo: view.leadingAnchor).isActive = true
coachingOverlay.trailingAnchor.constraint(equalTo: view.trailingAnchor).isActive = true
coachingOverlay.topAnchor.constraint(equalTo: view.topAnchor).isActive = true
coachingOverlay.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true
coachingOverlay.goal = .horizontalPlane
coachingOverlay.session = arView.session
coachingOverlay.activatesAutomatically = true
}
}
// MARK: - FocusEntity (Visual cue for surface detection)
class FocusEntity: Entity {
var raycastResult: ARRaycastResult?
var on: ARView?
var style: Style = .classic()
enum Style {
case classic(color: UIColor = .yellow)
case custom(modelName: String)
}
init(on arView: ARView, style: Style = .classic()) {
self.on = arView
self.style = style
super.init()
self.on?.scene.addAnchor(self)
}
required init() {
fatalError("init() has not been implemented")
}
required init(coder decoder: NSCoder) throws {
fatalError("init(coder:) has not been implemented")
}
func updateFocusEntity(raycastResult: ARRaycastResult) {
self.raycastResult = raycastResult
self.position = SIMD3<Float>(raycastResult.worldTransform.columns.3.x,
raycastResult.worldTransform.columns.3.y,
raycastResult.worldTransform.columns.3.z)
if self.children.count == 0 {
if case .classic(let color) = style {
let sphere = MeshResource.generateSphere(radius: 0.01)
let material = SimpleMaterial(color: color, isMetallic: false)
let entity = ModelEntity(mesh: sphere, materials: [material])
self.addChild(entity)
}
// Add more cases for custom model loading if needed
}
}
}
// Helper extension to create an AnchorEntity from a raycast result
extension AnchorEntity {
convenience init(raycastResult: ARRaycastResult) {
self.init(world: raycastResult.worldTransform)
}
}
```
**Explanation:**
1. **UI Elements and ARView:**
* `ARView`: The primary view for displaying the augmented reality scene. It's linked to the `arView` outlet in the Storyboard.
* `statusLabel`: Displays status messages to the user (e.g., instructions, errors).
* `addButton`: Button to trigger the placement of a virtual furniture object.
2. **ARKit Setup:**
* **`configureARKitSession()`:** Sets up the `ARWorldTrackingConfiguration`.
* `planeDetection = [.horizontal, .vertical]`: Enables the detection of both horizontal and vertical planes. This is crucial for placing furniture on floors, walls, or tables.
* `arView.debugOptions = [.showFeaturePoints]`: Displays feature points that ARKit is tracking. This helps you understand how well ARKit is "seeing" the environment, but you should remove this in a production app.
* `arView.automaticallyConfigureSession = false`: We manually configure the session, giving us more control.
* **`setupCoachingOverlay()`:** Creates and adds an `ARCoachingOverlayView`. This provides visual cues to the user, guiding them to move the device around until ARKit can detect a surface. The overlay has a goal (`.horizontalPlane` in this case), indicating the type of surface it's trying to help the user find.
* **`session(_:didFailWithError:)`, `sessionWasInterrupted(_:)`, `sessionInterruptionEnded(_:)`:** ARSessionDelegate methods to handle errors and interruptions in the AR session. These are important for a robust AR experience. The `resetTracking()` function attempts to restart the AR session if it's interrupted.
3. **Focus Entity**
* A custom class that provides visual feedback to the user about where ARKit is currently focusing.
* The `updateFocusEntity` method updates the focus entity's position based on the latest raycast result.
* The style can be configured to be a simple sphere or a custom model.
4. **Object Placement (`placeObject(_:)`)**
* This is the heart of the object placement logic.
* It first checks if a plane has been detected using `focusEntity?.raycastResult`.
* Then it calls `loadModel(named:)` to load a 3D model asynchronously.
* Creates an `AnchorEntity` (a point in the AR world) using the `raycastResult`. The `raycastResult` contains information about the intersection point with the detected plane. The anchor will ensure that the object stays in place relative to the real world.
* Adds the `modelEntity` as a child of the `anchor`.
* Adds the `anchor` to the `arView.scene`.
* Calls `installGestures(on:)` to enable tap and pan gestures for interaction with the placed object.
5. **Model Loading (`loadModel(named:)`)**
* Asynchronously loads a 3D model from a `.usdz` file. RealityKit prefers `.usdz` files, which are optimized for AR.
* Uses `ModelEntity.loadModelAsync(contentsOf:)` to load the model in the background.
* The `sink` method on the `Publisher` (returned by `loadModelAsync`) allows you to handle the result:
* `receiveCompletion`: Handles errors or the completion of the loading.
* `receiveValue`: Handles the successfully loaded `ModelEntity`.
* Scales the model down (example: `modelEntity.scale = SIMD3<Float>(0.01, 0.01, 0.01)`). You'll need to adjust the scale depending on the size of your 3D models.
* **Important:** The use of `cancellable` to cancel the `sink` subscription after completion is important to avoid memory leaks.
6. **Gestures**
* **`installGestures(on:)`:** Adds `UITapGestureRecognizer` and `UIPanGestureRecognizer` to the ARView.
* **`handleTap(_:)`:** Handles taps on the screen. It performs a raycast to find entities at the tap location and updates the virtual object's anchor position. This allows the user to reposition the object.
* **`handlePan(_:)`:** Handles pan gestures to drag the object around on the plane.
* It uses raycasting in the `began` state to get the starting world position of the drag.
* During the `changed` state, it calculates the translation vector from the starting position and updates the object's position.
**Important Considerations:**
* **`.usdz` Model Files:** You'll need to create or obtain `.usdz` versions of your furniture models. You can use Apple's Reality Converter app (available on macOS) to convert from other 3D model formats. Make sure your models are in your app's bundle.
* **Scaling:** The scale of your 3D models is very important. You'll need to experiment and adjust the scaling factor in the `loadModel(named:)` function to get the furniture to appear at a realistic size.
* **Coordinate Systems:** ARKit uses a right-handed coordinate system. Make sure your 3D models are oriented correctly.
* **Performance:** ARKit can be resource-intensive. Optimize your 3D models (reduce polygon count, use efficient textures) for better performance, especially on older devices.
* **Realism:** To enhance the realism, consider adding:
* **Shadows:** Realistic shadows can make virtual objects feel more grounded in the real world. RealityKit supports shadows.
* **Lighting:** ARKit can estimate the ambient lighting in the scene. Use this information to adjust the lighting of your virtual objects.
* **Occlusion:** Allow virtual objects to be occluded (hidden) by real-world objects. This requires more advanced techniques.
To run this example:
1. **Create a new Xcode project:** Select "Augmented Reality App" as the template.
2. **Copy the code:** Replace the contents of `ViewController.swift` with the code provided.
3. **Create a Storyboard:** Make sure you have an ARView, a UILabel, and a UIButton connected to the outlets in the code.
4. **Add `.usdz` models:** Add your `.usdz` model files to your project. Make sure they are added to the "Copy Bundle Resources" build phase. Update the `availableModels` array with the names of your model files (without the `.usdz` extension).
5. **Build and run:** Run the app on an ARKit-compatible iOS device. The simulator doesn't support ARKit.
6. **Permissions:** Ensure that the application has permission to use the device's camera in `Info.plist`.
This provides a solid foundation for creating an AR furniture placement app. You can expand upon this by adding more features such as:
* Model selection UI.
* Object rotation and scaling gestures.
* Saving and loading scenes.
* More advanced AR features like object occlusion.
👁️ Viewed: 3
Comments