spatial-developer by daffy0208/ai-dev-standards
npx skills add https://github.com/daffy0208/ai-dev-standards --skill spatial-developer我帮助您构建 AR/VR 体验、空间界面和沉浸式 3D 应用程序。
WebXR 开发:
Vision Pro 开发:
3D Web:
npm install three @react-three/fiber @react-three/drei @react-three/xr
// components/VRScene.tsx
'use client'
import { Canvas } from '@react-three/fiber'
import { VRButton, XR, Controllers, Hands } from '@react-three/xr'
import { Box, OrbitControls } from '@react-three/drei'
export function VRScene() {
return (
<>
<VRButton />
<Canvas>
<XR>
<ambientLight intensity={0.5} />
<pointLight position={[10, 10, 10]} />
{/* 3D Content */}
<Box position={[0, 1, -2]} args={[1, 1, 1]}>
<meshStandardMaterial color="hotpink" />
</Box>
{/* VR Controllers */}
<Controllers />
{/* Hand Tracking */}
<Hands />
</XR>
</Canvas>
</>
)
}
广告位招租
在这里展示您的产品或服务
触达数万 AI 开发者,精准高效
// components/ARScene.tsx
'use client'
import { Canvas } from '@react-three/fiber'
import { ARButton, XR } from '@react-three/xr'
import { useState } from 'react'
export function ARScene() {
const [hitTest, setHitTest] = useState(null)
return (
<>
<ARButton
sessionInit={{
requiredFeatures: ['hit-test'],
optionalFeatures: ['dom-overlay']
}}
/>
<Canvas>
<XR
onHitTest={(hitMatrix, hit) => {
setHitTest(hit)
}}
>
<ambientLight />
{hitTest && (
<mesh position={hitTest.position}>
<sphereGeometry args={[0.1]} />
<meshStandardMaterial color="blue" />
</mesh>
)}
</XR>
</Canvas>
</>
)
}
// ContentView.swift
import SwiftUI
import RealityKit
struct ContentView: View {
var body: some View {
RealityView { content in
// Add 3D content
let model = ModelEntity(
mesh: .generateSphere(radius: 0.1),
materials: [SimpleMaterial(color: .blue, isMetallic: false)]
)
content.add(model)
}
.toolbar {
ToolbarItem(placement: .bottomOrnament) {
HStack {
Button("Reset") {
// Reset scene
}
Button("Share") {
// SharePlay
}
}
}
}
}
}
'use client'
import { useXR } from '@react-three/xr'
import { useFrame } from '@react-three/fiber'
import { useState } from 'react'
export function GazeSelect() {
const { player } = useXR()
const [gazing, setGazing] = useState(false)
useFrame(() => {
// Raycast from camera
const direction = player.camera.getWorldDirection(new Vector3())
// Check intersection with objects
// If gazing for 2 seconds, select
})
return (
<mesh onPointerEnter={() => setGazing(true)}>
<boxGeometry />
<meshStandardMaterial
color={gazing ? 'green' : 'white'}
/>
</mesh>
)
}
'use client'
import { useXREvent } from '@react-three/xr'
export function GestureControl() {
useXREvent('squeeze', (e) => {
console.log('Pinch gesture detected')
// Perform action
})
useXREvent('select', (e) => {
console.log('Select gesture')
})
return (
<mesh>
<sphereGeometry args={[0.05]} />
<meshStandardMaterial color="red" />
</mesh>
)
}
'use client'
import { PositionalAudio } from '@react-three/drei'
import { useRef } from 'react'
export function SpatialSound() {
const sound = useRef()
return (
<mesh position={[2, 1, -3]}>
<sphereGeometry args={[0.2]} />
<meshStandardMaterial color="yellow" emissive="yellow" />
<PositionalAudio
ref={sound}
url="/sounds/ambient.mp3"
distance={5}
loop
autoplay
/>
</mesh>
)
}
非常适合:
我将帮助您:
🥽 VR Experiences
📱 AR Applications
👋 Hand Tracking
🎧 Spatial Audio
🌐 WebXR Scenes
🍎 Vision Pro Apps
让我们共同构建空间计算的未来!
每周安装量
99
代码仓库
GitHub 星标数
22
首次出现
2026年1月20日
安全审计
安装于
opencode87
codex85
gemini-cli85
cursor80
github-copilot76
claude-code70
I help you build AR/VR experiences, spatial interfaces, and immersive 3D applications.
WebXR Development:
Vision Pro Development:
3D Web:
npm install three @react-three/fiber @react-three/drei @react-three/xr
// components/VRScene.tsx
'use client'
import { Canvas } from '@react-three/fiber'
import { VRButton, XR, Controllers, Hands } from '@react-three/xr'
import { Box, OrbitControls } from '@react-three/drei'
export function VRScene() {
return (
<>
<VRButton />
<Canvas>
<XR>
<ambientLight intensity={0.5} />
<pointLight position={[10, 10, 10]} />
{/* 3D Content */}
<Box position={[0, 1, -2]} args={[1, 1, 1]}>
<meshStandardMaterial color="hotpink" />
</Box>
{/* VR Controllers */}
<Controllers />
{/* Hand Tracking */}
<Hands />
</XR>
</Canvas>
</>
)
}
// components/ARScene.tsx
'use client'
import { Canvas } from '@react-three/fiber'
import { ARButton, XR } from '@react-three/xr'
import { useState } from 'react'
export function ARScene() {
const [hitTest, setHitTest] = useState(null)
return (
<>
<ARButton
sessionInit={{
requiredFeatures: ['hit-test'],
optionalFeatures: ['dom-overlay']
}}
/>
<Canvas>
<XR
onHitTest={(hitMatrix, hit) => {
setHitTest(hit)
}}
>
<ambientLight />
{hitTest && (
<mesh position={hitTest.position}>
<sphereGeometry args={[0.1]} />
<meshStandardMaterial color="blue" />
</mesh>
)}
</XR>
</Canvas>
</>
)
}
// ContentView.swift
import SwiftUI
import RealityKit
struct ContentView: View {
var body: some View {
RealityView { content in
// Add 3D content
let model = ModelEntity(
mesh: .generateSphere(radius: 0.1),
materials: [SimpleMaterial(color: .blue, isMetallic: false)]
)
content.add(model)
}
.toolbar {
ToolbarItem(placement: .bottomOrnament) {
HStack {
Button("Reset") {
// Reset scene
}
Button("Share") {
// SharePlay
}
}
}
}
}
}
'use client'
import { useXR } from '@react-three/xr'
import { useFrame } from '@react-three/fiber'
import { useState } from 'react'
export function GazeSelect() {
const { player } = useXR()
const [gazing, setGazing] = useState(false)
useFrame(() => {
// Raycast from camera
const direction = player.camera.getWorldDirection(new Vector3())
// Check intersection with objects
// If gazing for 2 seconds, select
})
return (
<mesh onPointerEnter={() => setGazing(true)}>
<boxGeometry />
<meshStandardMaterial
color={gazing ? 'green' : 'white'}
/>
</mesh>
)
}
'use client'
import { useXREvent } from '@react-three/xr'
export function GestureControl() {
useXREvent('squeeze', (e) => {
console.log('Pinch gesture detected')
// Perform action
})
useXREvent('select', (e) => {
console.log('Select gesture')
})
return (
<mesh>
<sphereGeometry args={[0.05]} />
<meshStandardMaterial color="red" />
</mesh>
)
}
'use client'
import { PositionalAudio } from '@react-three/drei'
import { useRef } from 'react'
export function SpatialSound() {
const sound = useRef()
return (
<mesh position={[2, 1, -3]}>
<sphereGeometry args={[0.2]} />
<meshStandardMaterial color="yellow" emissive="yellow" />
<PositionalAudio
ref={sound}
url="/sounds/ambient.mp3"
distance={5}
loop
autoplay
/>
</mesh>
)
}
Perfect for:
I'll help you:
🥽 VR Experiences
📱 AR Applications
👋 Hand Tracking
🎧 Spatial Audio
🌐 WebXR Scenes
🍎 Vision Pro Apps
Let's build the future of spatial computing!
Weekly Installs
99
Repository
GitHub Stars
22
First Seen
Jan 20, 2026
Security Audits
Gen Agent Trust HubPassSocketPassSnykPass
Installed on
opencode87
codex85
gemini-cli85
cursor80
github-copilot76
claude-code70
React 组合模式指南:Vercel 组件架构最佳实践,提升代码可维护性
120,000 周安装
Elastic Cloud 云网络安全策略管理:IP过滤器与VPC过滤器配置指南
205 周安装
YouTube频道API工具 - 免费获取频道ID、最新视频和分页列表 | TranscriptAPI
70 周安装
Firecrawl 网页抓取工具:AI 友好的 Markdown 转换与反爬虫处理
199 周安装
阿里云 OpenClaw 最小化冒烟测试设置指南 - 验证 CLI、插件与网关状态
199 周安装
移动端离线支持开发指南:React Native/iOS/Android离线存储与同步最佳实践
199 周安装
macOS日历管理脚本:自动化创建、管理Apple日历事件,支持自然语言解析
199 周安装