realitykit-ar by dpearson2699/swift-ios-skills
npx skills add https://github.com/dpearson2699/swift-ios-skills --skill realitykit-ar使用 RealityKit 进行渲染,ARKit 进行世界追踪,在 iOS 上构建 AR 体验。涵盖 RealityView、实体管理、光线投射、场景理解以及基于手势的交互。目标平台为 Swift 6.2 / iOS 26+。
NSCameraUsageDescriptionRealityViewCameraContent 使用设备摄像头(iOS 18+, macOS 15+)AR 功能需要配备 A9 或更高芯片的设备。在呈现 AR 用户界面之前,务必验证设备支持。
import ARKit
guard ARWorldTrackingConfiguration.isSupported else {
showUnsupportedDeviceMessage()
return
}
广告位招租
在这里展示您的产品或服务
触达数万 AI 开发者,精准高效
| 类型 | 平台 | 作用 |
|---|---|---|
RealityView | iOS 18+, visionOS 1+ | 承载 RealityKit 内容的 SwiftUI 视图 |
RealityViewCameraContent | iOS 18+, macOS 15+ | 通过设备摄像头显示的内容 |
Entity | 所有平台 | 所有场景对象的基类 |
ModelEntity | 所有平台 | 具有可见 3D 模型的实体 |
AnchorEntity | 所有平台 | 将实体锚定到现实世界中的锚点 |
RealityView 是 RealityKit 的 SwiftUI 入口点。在 iOS 上,它提供 RealityViewCameraContent,通过设备摄像头进行 AR 渲染。
import SwiftUI
import RealityKit
struct ARExperienceView: View {
var body: some View {
RealityView { content in
// 在 iOS 上,content 是 RealityViewCameraContent
let sphere = ModelEntity(
mesh: .generateSphere(radius: 0.05),
materials: [SimpleMaterial(
color: .blue,
isMetallic: true
)]
)
sphere.position = [0, 0, -0.5] // 摄像头前方 50 厘米处
content.add(sphere)
}
}
}
使用 update 闭包来响应 SwiftUI 状态变化:
struct PlacementView: View {
@State private var modelColor: UIColor = .red
var body: some View {
RealityView { content in
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(
color: .red,
isMetallic: false
)]
)
box.name = "colorBox"
box.position = [0, 0, -0.5]
content.add(box)
} update: { content in
if let box = content.entities.first(
where: { $0.name == "colorBox" }
) as? ModelEntity {
box.model?.materials = [SimpleMaterial(
color: modelColor,
isMetallic: false
)]
}
}
Button("Change Color") {
modelColor = modelColor == .red ? .green : .red
}
}
}
异步加载 3D 模型以避免阻塞主线程:
RealityView { content in
if let robot = try? await ModelEntity(named: "robot") {
robot.position = [0, -0.2, -0.8]
robot.scale = [0.01, 0.01, 0.01]
content.add(robot)
}
}
// 立方体
let box = ModelEntity(
mesh: .generateBox(size: [0.1, 0.2, 0.1], cornerRadius: 0.005),
materials: [SimpleMaterial(color: .gray, isMetallic: true)]
)
// 球体
let sphere = ModelEntity(
mesh: .generateSphere(radius: 0.05),
materials: [SimpleMaterial(color: .blue, roughness: 0.2, isMetallic: true)]
)
// 平面
let plane = ModelEntity(
mesh: .generatePlane(width: 0.3, depth: 0.3),
materials: [SimpleMaterial(color: .green, isMetallic: false)]
)
实体使用 ECS(实体组件系统)架构。添加组件以赋予实体行为:
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .red, isMetallic: false)]
)
// 使其响应物理效果
box.components.set(PhysicsBodyComponent(
massProperties: .default,
material: .default,
mode: .dynamic
))
// 添加碰撞形状用于交互
box.components.set(CollisionComponent(
shapes: [.generateBox(size: [0.1, 0.1, 0.1])]
))
// 为手势启用输入目标
box.components.set(InputTargetComponent())
使用 AnchorEntity 将内容锚定到检测到的表面或世界位置:
RealityView { content in
// 锚定到水平表面
let floorAnchor = AnchorEntity(.plane(
.horizontal,
classification: .floor,
minimumBounds: [0.2, 0.2]
))
let model = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .orange, isMetallic: false)]
)
floorAnchor.addChild(model)
content.add(floorAnchor)
}
| 目标 | 描述 |
|---|---|
.plane(.horizontal, ...) | 水平表面(地板、桌子) |
.plane(.vertical, ...) | 垂直表面(墙壁) |
.plane(.any, ...) | 任何检测到的平面 |
.world(transform:) | 固定的世界空间位置 |
使用 RealityViewCameraContent 在 SwiftUI 视图坐标和 RealityKit 世界空间之间进行转换。与 SpatialTapGesture 结合使用,将对象放置在用户点击的检测表面上。
struct DraggableARView: View {
var body: some View {
RealityView { content in
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .blue, isMetallic: true)]
)
box.position = [0, 0, -0.5]
box.components.set(CollisionComponent(
shapes: [.generateBox(size: [0.1, 0.1, 0.1])]
))
box.components.set(InputTargetComponent())
box.name = "draggable"
content.add(box)
}
.gesture(
DragGesture()
.targetedToAnyEntity()
.onChanged { value in
let entity = value.entity
guard let parent = entity.parent else { return }
entity.position = value.convert(
value.location3D,
from: .local,
to: parent
)
}
)
}
}
.gesture(
SpatialTapGesture()
.targetedToAnyEntity()
.onEnded { value in
let tappedEntity = value.entity
highlightEntity(tappedEntity)
}
)
订阅场景更新事件以进行连续处理:
RealityView { content in
let entity = ModelEntity(
mesh: .generateSphere(radius: 0.05),
materials: [SimpleMaterial(color: .yellow, isMetallic: false)]
)
entity.position = [0, 0, -0.5]
content.add(entity)
_ = content.subscribe(to: SceneEvents.Update.self) { event in
let time = Float(event.deltaTime)
entity.position.y += sin(Float(Date().timeIntervalSince1970)) * time * 0.1
}
}
在 visionOS 上,ARKit 通过 ARKitSession、WorldTrackingProvider 和 PlaneDetectionProvider 提供了不同的 API 接口。这些 visionOS 特定的类型在 iOS 上不可用。在 iOS 上,RealityKit 通过 RealityViewCameraContent 自动处理世界追踪。
并非所有设备都支持 AR。显示一个没有任何反馈的黑色摄像头视图会让用户感到困惑。
// 错误 -- 没有设备检查
struct MyARView: View {
var body: some View {
RealityView { content in
// 在不支持的设备上静默失败
}
}
}
// 正确 -- 检查支持并显示备用视图
struct MyARView: View {
var body: some View {
if ARWorldTrackingConfiguration.isSupported {
RealityView { content in
// AR 内容
}
} else {
ContentUnavailableView(
"AR Not Supported",
systemImage: "arkit",
description: Text("This device does not support AR.")
)
}
}
}
在主线程上加载大型 USDZ 文件会导致掉帧和卡顿。RealityView 的 make 闭包是 async 的 -- 请使用它。
// 错误 -- 同步加载阻塞主线程
RealityView { content in
let model = try! Entity.load(named: "large-scene")
content.add(model)
}
// 正确 -- 异步加载
RealityView { content in
if let model = try? await ModelEntity(named: "large-scene") {
content.add(model)
}
}
手势仅对同时具有 CollisionComponent 和 InputTargetComponent 的实体有效。没有它们,点击和拖拽会穿透实体。
// 错误 -- 实体忽略手势
let box = ModelEntity(mesh: .generateBox(size: 0.1))
content.add(box)
// 正确 -- 添加碰撞和输入组件
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .red, isMetallic: false)]
)
box.components.set(CollisionComponent(
shapes: [.generateBox(size: [0.1, 0.1, 0.1])]
))
box.components.set(InputTargetComponent())
content.add(box)
update 闭包在每次 SwiftUI 状态变化时都会运行。在那里创建实体会在每次渲染过程中重复添加内容。
// 错误 -- 每次状态变化都重复创建实体
RealityView { content in
// 空
} update: { content in
let sphere = ModelEntity(mesh: .generateSphere(radius: 0.05))
content.add(sphere) // 每次更新都会再次添加
}
// 正确 -- 在 make 中创建,在 update 中修改
RealityView { content in
let sphere = ModelEntity(mesh: .generateSphere(radius: 0.05))
sphere.name = "mySphere"
content.add(sphere)
} update: { content in
if let sphere = content.entities.first(
where: { $0.name == "mySphere" }
) as? ModelEntity {
// 修改现有实体
sphere.position.y = newYPosition
}
}
iOS 上的 RealityKit 需要摄像头访问权限。如果用户拒绝授权,视图会显示一个没有任何解释的黑屏。
// 错误 -- 没有权限处理
RealityView { content in
// 如果摄像头被拒绝,则显示黑屏
}
// 正确 -- 检查并请求权限
struct ARContainerView: View {
@State private var cameraAuthorized = false
var body: some View {
Group {
if cameraAuthorized {
RealityView { content in
// AR 内容
}
} else {
ContentUnavailableView(
"Camera Access Required",
systemImage: "camera.fill",
description: Text("Enable camera in Settings to use AR.")
)
}
}
.task {
let status = AVCaptureDevice.authorizationStatus(for: .video)
if status == .authorized {
cameraAuthorized = true
} else if status == .notDetermined {
cameraAuthorized = await AVCaptureDevice
.requestAccess(for: .video)
}
}
}
}
NSCameraUsageDescriptionmake 闭包中异步加载 3D 模型make 中创建实体,在 update 中修改实体(不在 update 中创建)CollisionComponent 和 InputTargetComponentSceneEvents.Update 订阅进行逐帧逻辑处理(而非 SwiftUI 计时器)ModelEntity(named:) 异步加载,而非 Entity.load(named:)update 闭包中查找而设置了实体名称references/realitykit-patterns.md每周安装量
333
代码仓库
GitHub 星标数
269
首次出现
2026年3月8日
安全审计
安装于
codex330
github-copilot327
amp327
cline327
kimi-cli327
gemini-cli327
Build AR experiences on iOS using RealityKit for rendering and ARKit for world tracking. Covers RealityView, entity management, raycasting, scene understanding, and gesture-based interactions. Targets Swift 6.2 / iOS 26+.
NSCameraUsageDescription to Info.plistRealityViewCameraContent (iOS 18+, macOS 15+)AR features require devices with an A9 chip or later. Always verify support before presenting AR UI.
import ARKit
guard ARWorldTrackingConfiguration.isSupported else {
showUnsupportedDeviceMessage()
return
}
| Type | Platform | Role |
|---|---|---|
RealityView | iOS 18+, visionOS 1+ | SwiftUI view that hosts RealityKit content |
RealityViewCameraContent | iOS 18+, macOS 15+ | Content displayed through the device camera |
Entity | All | Base class for all scene objects |
ModelEntity | All | Entity with a visible 3D model |
AnchorEntity | All |
RealityView is the SwiftUI entry point for RealityKit. On iOS, it provides RealityViewCameraContent which renders through the device camera for AR.
import SwiftUI
import RealityKit
struct ARExperienceView: View {
var body: some View {
RealityView { content in
// content is RealityViewCameraContent on iOS
let sphere = ModelEntity(
mesh: .generateSphere(radius: 0.05),
materials: [SimpleMaterial(
color: .blue,
isMetallic: true
)]
)
sphere.position = [0, 0, -0.5] // 50cm in front of camera
content.add(sphere)
}
}
}
Use the update closure to respond to SwiftUI state changes:
struct PlacementView: View {
@State private var modelColor: UIColor = .red
var body: some View {
RealityView { content in
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(
color: .red,
isMetallic: false
)]
)
box.name = "colorBox"
box.position = [0, 0, -0.5]
content.add(box)
} update: { content in
if let box = content.entities.first(
where: { $0.name == "colorBox" }
) as? ModelEntity {
box.model?.materials = [SimpleMaterial(
color: modelColor,
isMetallic: false
)]
}
}
Button("Change Color") {
modelColor = modelColor == .red ? .green : .red
}
}
}
Load 3D models asynchronously to avoid blocking the main thread:
RealityView { content in
if let robot = try? await ModelEntity(named: "robot") {
robot.position = [0, -0.2, -0.8]
robot.scale = [0.01, 0.01, 0.01]
content.add(robot)
}
}
// Box
let box = ModelEntity(
mesh: .generateBox(size: [0.1, 0.2, 0.1], cornerRadius: 0.005),
materials: [SimpleMaterial(color: .gray, isMetallic: true)]
)
// Sphere
let sphere = ModelEntity(
mesh: .generateSphere(radius: 0.05),
materials: [SimpleMaterial(color: .blue, roughness: 0.2, isMetallic: true)]
)
// Plane
let plane = ModelEntity(
mesh: .generatePlane(width: 0.3, depth: 0.3),
materials: [SimpleMaterial(color: .green, isMetallic: false)]
)
Entities use an ECS (Entity Component System) architecture. Add components to give entities behavior:
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .red, isMetallic: false)]
)
// Make it respond to physics
box.components.set(PhysicsBodyComponent(
massProperties: .default,
material: .default,
mode: .dynamic
))
// Add collision shape for interaction
box.components.set(CollisionComponent(
shapes: [.generateBox(size: [0.1, 0.1, 0.1])]
))
// Enable input targeting for gestures
box.components.set(InputTargetComponent())
Use AnchorEntity to anchor content to detected surfaces or world positions:
RealityView { content in
// Anchor to a horizontal surface
let floorAnchor = AnchorEntity(.plane(
.horizontal,
classification: .floor,
minimumBounds: [0.2, 0.2]
))
let model = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .orange, isMetallic: false)]
)
floorAnchor.addChild(model)
content.add(floorAnchor)
}
| Target | Description |
|---|---|
.plane(.horizontal, ...) | Horizontal surfaces (floors, tables) |
.plane(.vertical, ...) | Vertical surfaces (walls) |
.plane(.any, ...) | Any detected plane |
.world(transform:) | Fixed world-space position |
Use RealityViewCameraContent to convert between SwiftUI view coordinates and RealityKit world space. Pair with SpatialTapGesture to place objects where the user taps on a detected surface.
struct DraggableARView: View {
var body: some View {
RealityView { content in
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .blue, isMetallic: true)]
)
box.position = [0, 0, -0.5]
box.components.set(CollisionComponent(
shapes: [.generateBox(size: [0.1, 0.1, 0.1])]
))
box.components.set(InputTargetComponent())
box.name = "draggable"
content.add(box)
}
.gesture(
DragGesture()
.targetedToAnyEntity()
.onChanged { value in
let entity = value.entity
guard let parent = entity.parent else { return }
entity.position = value.convert(
value.location3D,
from: .local,
to: parent
)
}
)
}
}
.gesture(
SpatialTapGesture()
.targetedToAnyEntity()
.onEnded { value in
let tappedEntity = value.entity
highlightEntity(tappedEntity)
}
)
Subscribe to scene update events for continuous processing:
RealityView { content in
let entity = ModelEntity(
mesh: .generateSphere(radius: 0.05),
materials: [SimpleMaterial(color: .yellow, isMetallic: false)]
)
entity.position = [0, 0, -0.5]
content.add(entity)
_ = content.subscribe(to: SceneEvents.Update.self) { event in
let time = Float(event.deltaTime)
entity.position.y += sin(Float(Date().timeIntervalSince1970)) * time * 0.1
}
}
On visionOS, ARKit provides a different API surface with ARKitSession, WorldTrackingProvider, and PlaneDetectionProvider. These visionOS-specific types are not available on iOS. On iOS, RealityKit handles world tracking automatically through RealityViewCameraContent.
Not all devices support AR. Showing a black camera view with no feedback confuses users.
// WRONG -- no device check
struct MyARView: View {
var body: some View {
RealityView { content in
// Fails silently on unsupported devices
}
}
}
// CORRECT -- check support and show fallback
struct MyARView: View {
var body: some View {
if ARWorldTrackingConfiguration.isSupported {
RealityView { content in
// AR content
}
} else {
ContentUnavailableView(
"AR Not Supported",
systemImage: "arkit",
description: Text("This device does not support AR.")
)
}
}
}
Loading large USDZ files on the main thread causes frame drops and hangs. The make closure of RealityView is async -- use it.
// WRONG -- synchronous load blocks the main thread
RealityView { content in
let model = try! Entity.load(named: "large-scene")
content.add(model)
}
// CORRECT -- async load
RealityView { content in
if let model = try? await ModelEntity(named: "large-scene") {
content.add(model)
}
}
Gestures only work on entities that have both CollisionComponent and InputTargetComponent. Without them, taps and drags pass through.
// WRONG -- entity ignores gestures
let box = ModelEntity(mesh: .generateBox(size: 0.1))
content.add(box)
// CORRECT -- add collision and input components
let box = ModelEntity(
mesh: .generateBox(size: 0.1),
materials: [SimpleMaterial(color: .red, isMetallic: false)]
)
box.components.set(CollisionComponent(
shapes: [.generateBox(size: [0.1, 0.1, 0.1])]
))
box.components.set(InputTargetComponent())
content.add(box)
The update closure runs on every SwiftUI state change. Creating entities there duplicates content on each render pass.
// WRONG -- duplicates entities on every state change
RealityView { content in
// empty
} update: { content in
let sphere = ModelEntity(mesh: .generateSphere(radius: 0.05))
content.add(sphere) // Added again on every update
}
// CORRECT -- create in make, modify in update
RealityView { content in
let sphere = ModelEntity(mesh: .generateSphere(radius: 0.05))
sphere.name = "mySphere"
content.add(sphere)
} update: { content in
if let sphere = content.entities.first(
where: { $0.name == "mySphere" }
) as? ModelEntity {
// Modify existing entity
sphere.position.y = newYPosition
}
}
RealityKit on iOS needs camera access. If the user denies permission, the view shows a black screen with no explanation.
// WRONG -- no permission handling
RealityView { content in
// Black screen if camera denied
}
// CORRECT -- check and request permission
struct ARContainerView: View {
@State private var cameraAuthorized = false
var body: some View {
Group {
if cameraAuthorized {
RealityView { content in
// AR content
}
} else {
ContentUnavailableView(
"Camera Access Required",
systemImage: "camera.fill",
description: Text("Enable camera in Settings to use AR.")
)
}
}
.task {
let status = AVCaptureDevice.authorizationStatus(for: .video)
if status == .authorized {
cameraAuthorized = true
} else if status == .notDetermined {
cameraAuthorized = await AVCaptureDevice
.requestAccess(for: .video)
}
}
}
}
NSCameraUsageDescription set in Info.plistmake closuremake, modified in update (not created in update)CollisionComponent and InputTargetComponentSceneEvents.Update subscriptions used for per-frame logic (not SwiftUI timers)ModelEntity(named:) async loading, not references/realitykit-patterns.mdWeekly Installs
333
Repository
GitHub Stars
269
First Seen
Mar 8, 2026
Security Audits
Gen Agent Trust HubPassSocketPassSnykPass
Installed on
codex330
github-copilot327
amp327
cline327
kimi-cli327
gemini-cli327
React 组合模式指南:Vercel 组件架构最佳实践,提升代码可维护性
105,000 周安装
文案撰写指南:PAS/AIDA/BAB框架,提升营销转化率
326 周安装
SvelteKit 项目结构详解:文件命名、路由布局与错误处理最佳实践
326 周安装
Slack API 操作工具 - 消息管理、回应、置顶与成员信息查询 | OpenClaw 机器人集成
326 周安装
GrepAI 高级搜索选项:JSON/TOON 输出、紧凑模式与 AI 代理集成指南
326 周安装
Snowflake平台技能:使用CLI、Cortex AI函数和Snowpark构建AI数据云应用
326 周安装
AI代码实施计划编写工具 - 遵循TDD原则的详细开发任务分解指南
326 周安装
| Tethers entities to a real-world anchor |
Entity.load(named:)update closure